code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# $Id: mc.py,v 1.1.2.1 2013/06/03 13:47:14 paus Exp $
import FWCore.ParameterSet.Config as cms
process = cms.Process('FILEFI')
# import of standard configurations
process.load('Configuration/StandardSequences/Services_cff')
process.load('FWCore/MessageService/MessageLogger_cfi')
process.load('Configuration.StandardSequences.GeometryDB_cff')
process.load('Configuration/StandardSequences/MagneticField_38T_cff')
process.load('Configuration/StandardSequences/FrontierConditions_GlobalTag_cff')
process.load('Configuration/EventContent/EventContent_cff')
process.load('RecoVertex/PrimaryVertexProducer/OfflinePrimaryVertices_cfi')
process.configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('Mit_029'),
annotation = cms.untracked.string('AODSIM'),
name = cms.untracked.string('BambuProduction')
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.options = cms.untracked.PSet(
Rethrow = cms.untracked.vstring('ProductNotFound'),
fileMode = cms.untracked.string('NOMERGE'),
)
# input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('/store/relval/CMSSW_5_3_2-START53_V6/RelValProdTTbar/AODSIM/v2/0000/9A630BD7-C3B9-E111-BAB3-00304867918E.root')
)
process.source.inputCommands = cms.untracked.vstring("keep *",
"drop *_MEtoEDMConverter_*_*",
"drop L1GlobalTriggerObjectMapRecord_hltL1GtObjectMap__HLT")
# other statements
process.GlobalTag.globaltag = 'START53_V10::All'
process.add_(cms.Service("ObjectService"))
process.load("MitProd.BAMBUSequences.BambuFillAODSIM_cfi")
process.MitTreeFiller.TreeWriter.fileName = 'XX-MITDATASET-XX'
#process.output = cms.OutputModule("PoolOutputModule",
# outputCommands = cms.untracked.vstring('keep *'),
# fileName = cms.untracked.string ("test.root")
#)
process.bambu_step = cms.Path(process.BambuFillAODSIM)
# schedule definition
process.schedule = cms.Schedule(process.bambu_step)
#process.outpath = cms.EndPath(process.output)
| cpausmit/Kraken | filefi/030/mc.py | Python | mit | 2,176 |
from django.shortcuts import render_to_response
import datetime
import urllib
import urllib2
import os
from dixy_viz import qfa_parser
from django.template import RequestContext
def get_input_list(site='dixy'):
if site == 'dixy':
input_list = ['YKU70_23GIS_NEW.txt',
'CDC13-1_20GIS_NEW.txt',
'CDC13-1_27GIS_NEW.txt',
'CDC13-1_36GIS_NEW.txt',
'YKU70_30GIS_NEW.txt',
'YKU70_37.5GIS_NEW.txt',
'YKU70_37GIS_NEW.txt',
]
if site == 'dixy-private':
input_list = ['YKU70_23GIS_NEW.txt',
'CDC13-1_20GIS_NEW.txt',
'CDC13-1_27GIS_NEW.txt',
'CDC13-1_36GIS_NEW.txt',
'YKU70_30GIS_NEW.txt',
'YKU70_37.5GIS_NEW.txt',
'YKU70_37GIS_NEW.txt',
]
if site == 'dixy-pol':
input_list = ['cdc13-1.txt',
'cdc2-2 (Pol delta).txt',
'HU_100mM.txt',
'pol1-4 (Pol alpha).txt',
'pol2-12 (Pol epsilon).txt',
]
if site == 'dixy-telo':
input_list = ['APB_QFA0142_cdc13-1_rad9D_27_SDM_rhlk_CTGNH_vs_QFA0140_cdc13-1_27_SDM_rhlk_CTGH_MDRMDP_GIS.txt',
'APB_QFA0142_cdc13-1_rad9D_UD_X1_SDM_rhlk_CTGNH_vs_QFA0140_cdc13-1_UD_X3_SDM_rhlk_CTGH_MDRMDP_GIS.txt',
'DAL_QFA0051_cdc13-1_exo1D_30_SDM_rhlk_CTGNH_vs_QFA0140_cdc13-1_27_SDM_rhlk_CTGH_MDRMDP_GIS.txt',
'DAL_QFA0136_stn1-13_33_SDM_rhlk_CTGNH_vs_QFA0018_lyp1_HLN_33_SDM_rhlk_CTGNH_MDRMDP_GIS.txt',
'DAL_QFA0139_Yku70_37_5_SDM_rhk_CTGN_vs_QFA0141_ura3_37_SDM_rhk_CTGN_MDRMDP_GIS.txt',
'DAL_QFA0140_cdc13-1_27_SDM_rhlk_CTGH_vs_QFA0141_ura3_27_SDM_rhk_CTGN_MDRMDP_GIS.txt',
'DAL_QFA0140_cdc13-1_UD_X3_SDM_rhlk_CTGH_vs_QFA0141_ura3_UD_X3_SDM_rhk_CTGN_MDRMDP_GIS.txt',
'MJG_QFA0131_rfa3-313_30_SDM_rhlk_CTGNH_vs_QFA0018_lyp1_HLN_30_SDM_rhlk_CTGNH_MDRMDP_GIS.txt',
]
return input_list
def get_url_stem(path):
if path.startswith('/dixy-telo'):
return 'dixy-telo'
if path.startswith('/dixy-private'):
return 'dixy-private'
if path.startswith('/dixy-pol'):
return 'dixy-pol'
else:
return 'dixy'
def home(request):
url_stem = get_url_stem(request.get_full_path())
return render_to_response(url_stem+'/index.html', {'year':datetime.datetime.now().year,
'context': 'home',
'path_var': url_stem,})
def about(request):
url_stem = get_url_stem(request.get_full_path())
return render_to_response(url_stem+'/about.html', {'year':datetime.datetime.now().year,
'context': 'about'})
def contact(request):
url_stem = get_url_stem(request.get_full_path())
return render_to_response(url_stem+'/contact.html', {'year':datetime.datetime.now().year,
'context': 'contact'})
def test(request):
url_stem = get_url_stem(request.get_full_path())
chart_index = 0
input_files = get_input_list(url_stem)
i = 0
x_files = []
y_files = []
metadata = []
button_labels = {}
for input_file in input_files:
x_files.append(input_file+".x")
y_files.append(input_file+".y")
metadata.append(input_file+".x_"+input_file+".y")
this_label = os.path.splitext(input_file)[0]
button_labels[i] = this_label
i += 1
return render_to_response(url_stem+'/dviz2.html', {
'data': url_stem+'-data.js',
'metadata': url_stem+'-metadata.js',
'x_files': x_files,
'y_files': y_files,
'metadata_labels': metadata,
'inputs': button_labels,
'context': 'viz',
'year':datetime.datetime.now().year,
},
context_instance=RequestContext(request)
)
def d3_viz(request, index=None):
url_stem = get_url_stem(request.get_full_path())
#set the required datasets
if request.method != 'POST':
#render the form to select datasets
#parse input headers (to get individual axes & complete sets)
headers = {}
input_list = get_input_list(url_stem)
for f in input_list:
headers[f] = qfa_parser.get_header_only(f, url_stem)
return render_to_response(url_stem+'/viz_form.html', {'context': 'viz',
'headers': headers,
'year':datetime.datetime.now().year,
},
context_instance=RequestContext(request))
else:
#data_file = p.main(input_list)
#os.chmod(data_file, 420)
#data_file_name = data_file.split('/')[-1]
x_files = []
y_files = []
parsers = []
metadata = []
button_labels = {}
chart_index = 0
i = 0
for thing in request.POST:
if thing.endswith('.txt'):
x_files.append(thing+".x")
y_files.append(thing+".y")
metadata.append(thing+".x_"+thing+".y")
this_label = os.path.splitext(thing)[0]
button_labels[i] = this_label
i += 1
is_custom = request.POST.get('custom1', False)
if is_custom:
button_labels[i] = 'Custom Set'
x_file = request.POST['xdata1']
y_file = request.POST['ydata1']
x_files.append(x_file)
y_files.append(y_file)
metadata.append(x_file+"_"+y_file)
#dc = d3charts.D3charts(x_files, y_files)
return render_to_response(url_stem+'/dviz2.html', {#'charts':dc.javascript,
#'data': data_file_name,
'data': url_stem+'-data.js',
'metadata': url_stem+'-metadata.js',
'x_files': x_files,
'y_files': y_files,
'metadata_labels': metadata,
'inputs': button_labels,
'context': 'viz',
'year':datetime.datetime.now().year,
},
context_instance=RequestContext(request)
)
def get_interactions(start, gene_list):
accesskey = "095db6c6e05853ca59a01352d80528d6"
url = "http://webservice.thebiogrid.org/interactions/"
params = urllib.urlencode({
'searchIds': 'true',
'geneList': gene_list,
'taxId': '559292',
'interSpeciesExcluded': 'true',
'includeInteractorInteractions': 'true',
'throughputTag': 'low',
'start': start,
'accesskey': accesskey,
})
fh = urllib2.urlopen(url, params)
return fh.readlines()
def process_interactions(i, g, e, gene_array):
for line in i:
line = line.rstrip()
tokens = line.split('\t')
sys_name1 = tokens[5]
sys_name2 = tokens[6]
gene1 = tokens[7]
gene2 = tokens[8]
int_type = tokens[12]
if sys_name1 in gene_array:
g.append((gene1, True))
else:
g.append((gene1, False))
if sys_name2 in gene_array:
g.append((gene2, True))
else:
g.append((gene2, False))
e.append((gene1, gene2, int_type))
return g, e
def network(request, in_genes=None):
url_stem = get_url_stem(request.get_full_path())
if in_genes:
gene_list = in_genes
gene_array = gene_list.split('|')
else:
gene_array = ['YPL089C',]
gene_list = '|'.join(gene_array)
start = 0
interactions = get_interactions(start, gene_list)
genes = []
edges = []
while len(interactions) == 10000:
genes, edges = process_interactions(interactions, genes, edges, gene_array)
start += 10000
interactions = get_interactions(start, gene_list)
genes, edges = process_interactions(interactions, genes, edges, gene_array)
genes = list(set(genes))
edges = list(set(edges))
json_genes = []
json_edges = []
i = 0
for gene in genes:
i += 1
if gene[1]:
favColor = '#C40233'
else:
favColor = '#888'
if i == len(genes):
s = "{ data: { id: '"+str(i)+"', name: '"+gene[0]+"', favColor: '"+favColor+"' } }"
else:
s = "{ data: { id: '"+str(i)+"', name: '"+gene[0]+"', favColor: '"+favColor+"' } },"
json_genes.append(s)
i = 0
for edge in edges:
i += 1
source = str([x[0] for x in genes].index(edge[0]) + 1)
target = str([x[0] for x in genes].index(edge[1]) + 1)
if edge[2] == 'physical':
favColor = "#6FB1FC"
elif edge[2] == 'genetic':
favColor = "#86B342"
if i == len(edges):
s = "{ data: { source: '"+source+"', target: '"+target+"', favColor: '"+favColor+"' } }"
else:
s = "{ data: { source: '"+source+"', target: '"+target+"', favColor: '"+favColor+"' } },"
json_edges.append(s)
return render_to_response(url_stem+'/network.html', {
'request': in_genes,
'genes':json_genes,
'edges':json_edges,
'start':start,
'year':datetime.datetime.now().year,})
def get_data(f):
url_slug = 'http://research.ncl.ac.uk/colonyzer/QFAVis/GISDatasets/'
fh = urllib.urlopen(url_slug+f, 'r')
data_lines = fh.read().splitlines()
fh.close()
return data_lines
| Bioinformatics-Support-Unit/dixy | dixy/dixy_viz/views.py | Python | mit | 9,546 |
#!/usr/bin/env python
#coding:utf-8
# Purpose: test observer object
# Created: 23.01.2011
# Copyright (C) , Manfred Moitzi
# License: MIT
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <[email protected]>"
# Standard Library
import unittest
# objects to test
from ezodf2.observer import Observer
class Listener:
msg = 'fail'
def __init__(self, xmlnode=None):
self.xmlnode = xmlnode
def on_save_handler(self, msg):
self.msg = 'ok'
def get_xmlroot(self):
return self.xmlnode
class ListenerWithoutGetXMLRootMethod:
msg = 'fail'
def on_save_handler(self, msg):
self.msg = 'ok'
class TestObserver(unittest.TestCase):
def setUp(self):
self.observer = Observer()
def test_count_listener(self):
self.assertEqual(0, self.observer._count_listeners('test'))
def test_can_subscribe_with_no_get_root(self):
listerner = ListenerWithoutGetXMLRootMethod()
self.observer.subscribe('save', listerner)
self.assertTrue(self.observer._has_listener('save'))
def test_multiple_listeners(self):
L1 = Listener()
L2 = Listener()
self.observer.subscribe('save', L1)
self.observer.subscribe('save', L2)
self.assertEqual(self.observer._count_listeners('save'), 2)
def test_subscribe_event(self):
listener = Listener()
self.observer.subscribe('save', listener)
self.assertTrue(self.observer._has_listener('save'))
def test_subscribe_without_event_handler(self):
listener = Listener()
with self.assertRaises(AttributeError):
self.observer.subscribe('open', listener)
def test_unsubscribe_existing_event(self):
listener = Listener()
self.observer.subscribe('save', listener)
self.observer.unsubscribe('save', listener)
self.assertFalse(self.observer._has_listener('save'))
def test_unsubscribe_not_existing_event(self):
listener = Listener()
with self.assertRaises(KeyError):
self.observer.unsubscribe('save', listener)
def test_unsubscribe_not_existing_listener(self):
listener = Listener()
self.observer.subscribe('save', listener)
with self.assertRaises(KeyError):
self.observer.unsubscribe('save', self)
def test_broadcast(self):
listener = Listener()
self.assertEqual(listener.msg, 'fail')
self.observer.subscribe('save', listener)
self.observer.broadcast(event='save', msg=self)
self.assertEqual(listener.msg, 'ok')
def test_broadcast_without_listeners_is_ok(self):
self.observer.broadcast(event='save', msg=self)
self.assertTrue(True)
def test_broadcast_to_destroyed_listeners(self):
listener = Listener()
self.observer.subscribe('save', listener)
del listener
self.assertEqual(self.observer._count_listeners('save'), 0)
self.observer.broadcast(event='save', msg=self)
def test_broadcast_to_different_roots(self):
root1 = object()
root2 = object()
listeners = [Listener(root1), Listener(root2), ListenerWithoutGetXMLRootMethod()]
for listener in listeners:
self.observer.subscribe('save', listener)
self.observer.broadcast(event='save', msg=self, root=root1)
self.assertEqual(listeners[0].msg, 'ok')
self.assertEqual(listeners[1].msg, 'fail')
self.assertEqual(listeners[2].msg, 'fail')
if __name__=='__main__':
unittest.main()
| iwschris/ezodf2 | tests/test_observer.py | Python | mit | 3,678 |
# coding: utf-8
from .. import fixtures, config
from ..assertions import eq_
from ..config import requirements
from sqlalchemy import Integer, Unicode, UnicodeText, select
from sqlalchemy import Date, DateTime, Time, MetaData, String, \
Text, Numeric, Float
from ..schema import Table, Column
from ... import testing
import decimal
import datetime
class _UnicodeFixture(object):
__requires__ = 'unicode_data',
data = u"Alors vous imaginez ma surprise, au lever du jour, "\
u"quand une drôle de petite voix m’a réveillé. Elle "\
u"disait: « S’il vous plaît… dessine-moi un mouton! »"
@classmethod
def define_tables(cls, metadata):
Table('unicode_table', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('unicode_data', cls.datatype),
)
def test_round_trip(self):
unicode_table = self.tables.unicode_table
config.db.execute(
unicode_table.insert(),
{
'unicode_data': self.data,
}
)
row = config.db.execute(
select([
unicode_table.c.unicode_data,
])
).first()
eq_(
row,
(self.data, )
)
assert isinstance(row[0], unicode)
def test_round_trip_executemany(self):
unicode_table = self.tables.unicode_table
config.db.execute(
unicode_table.insert(),
[
{
'unicode_data': self.data,
}
for i in xrange(3)
]
)
rows = config.db.execute(
select([
unicode_table.c.unicode_data,
])
).fetchall()
eq_(
rows,
[(self.data, ) for i in xrange(3)]
)
for row in rows:
assert isinstance(row[0], unicode)
def _test_empty_strings(self):
unicode_table = self.tables.unicode_table
config.db.execute(
unicode_table.insert(),
{"unicode_data": u''}
)
row = config.db.execute(
select([unicode_table.c.unicode_data])
).first()
eq_(row, (u'',))
class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest):
__requires__ = 'unicode_data',
datatype = Unicode(255)
@requirements.empty_strings_varchar
def test_empty_strings_varchar(self):
self._test_empty_strings()
class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest):
__requires__ = 'unicode_data', 'text_type'
datatype = UnicodeText()
@requirements.empty_strings_text
def test_empty_strings_text(self):
self._test_empty_strings()
class TextTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('text_table', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('text_data', Text),
)
def test_text_roundtrip(self):
text_table = self.tables.text_table
config.db.execute(
text_table.insert(),
{"text_data": 'some text'}
)
row = config.db.execute(
select([text_table.c.text_data])
).first()
eq_(row, ('some text',))
def test_text_empty_strings(self):
text_table = self.tables.text_table
config.db.execute(
text_table.insert(),
{"text_data": ''}
)
row = config.db.execute(
select([text_table.c.text_data])
).first()
eq_(row, ('',))
class StringTest(fixtures.TestBase):
@requirements.unbounded_varchar
def test_nolength_string(self):
metadata = MetaData()
foo = Table('foo', metadata,
Column('one', String)
)
foo.create(config.db)
foo.drop(config.db)
class _DateFixture(object):
compare = None
@classmethod
def define_tables(cls, metadata):
Table('date_table', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('date_data', cls.datatype),
)
def test_round_trip(self):
date_table = self.tables.date_table
config.db.execute(
date_table.insert(),
{'date_data': self.data}
)
row = config.db.execute(
select([
date_table.c.date_data,
])
).first()
compare = self.compare or self.data
eq_(row,
(compare, ))
assert isinstance(row[0], type(compare))
def test_null(self):
date_table = self.tables.date_table
config.db.execute(
date_table.insert(),
{'date_data': None}
)
row = config.db.execute(
select([
date_table.c.date_data,
])
).first()
eq_(row, (None,))
class DateTimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime',
datatype = DateTime
data = datetime.datetime(2012, 10, 15, 12, 57, 18)
class DateTimeMicrosecondsTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime_microseconds',
datatype = DateTime
data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396)
class TimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'time',
datatype = Time
data = datetime.time(12, 57, 18)
class TimeMicrosecondsTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'time_microseconds',
datatype = Time
data = datetime.time(12, 57, 18, 396)
class DateTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'date',
datatype = Date
data = datetime.date(2012, 10, 15)
class DateTimeCoercedToDateTimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'date',
datatype = Date
data = datetime.datetime(2012, 10, 15, 12, 57, 18)
compare = datetime.date(2012, 10, 15)
class DateTimeHistoricTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime_historic',
datatype = DateTime
data = datetime.datetime(1850, 11, 10, 11, 52, 35)
class DateHistoricTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'date_historic',
datatype = Date
data = datetime.date(1727, 4, 1)
class NumericTest(fixtures.TestBase):
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
@testing.provide_metadata
def _do_test(self, type_, input_, output, filter_=None, check_scale=False):
metadata = self.metadata
t = Table('t', metadata, Column('x', type_))
t.create()
t.insert().execute([{'x':x} for x in input_])
result = set([row[0] for row in t.select().execute()])
output = set(output)
if filter_:
result = set(filter_(x) for x in result)
output = set(filter_(x) for x in output)
eq_(result, output)
if check_scale:
eq_(
[str(x) for x in result],
[str(x) for x in output],
)
def test_numeric_as_decimal(self):
self._do_test(
Numeric(precision=8, scale=4),
[15.7563, decimal.Decimal("15.7563"), None],
[decimal.Decimal("15.7563"), None],
)
def test_numeric_as_float(self):
self._do_test(
Numeric(precision=8, scale=4, asdecimal=False),
[15.7563, decimal.Decimal("15.7563"), None],
[15.7563, None],
)
def test_float_as_decimal(self):
self._do_test(
Float(precision=8, asdecimal=True),
[15.7563, decimal.Decimal("15.7563"), None],
[decimal.Decimal("15.7563"), None],
)
def test_float_as_float(self):
self._do_test(
Float(precision=8),
[15.7563, decimal.Decimal("15.7563")],
[15.7563],
filter_=lambda n: n is not None and round(n, 5) or None
)
@testing.requires.precision_numerics_general
def test_precision_decimal(self):
numbers = set([
decimal.Decimal("54.234246451650"),
decimal.Decimal("0.004354"),
decimal.Decimal("900.0"),
])
self._do_test(
Numeric(precision=18, scale=12),
numbers,
numbers,
)
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal(self):
"""test exceedingly small decimals.
Decimal reports values with E notation when the exponent
is greater than 6.
"""
numbers = set([
decimal.Decimal('1E-2'),
decimal.Decimal('1E-3'),
decimal.Decimal('1E-4'),
decimal.Decimal('1E-5'),
decimal.Decimal('1E-6'),
decimal.Decimal('1E-7'),
decimal.Decimal('1E-8'),
decimal.Decimal("0.01000005940696"),
decimal.Decimal("0.00000005940696"),
decimal.Decimal("0.00000000000696"),
decimal.Decimal("0.70000000000696"),
decimal.Decimal("696E-12"),
])
self._do_test(
Numeric(precision=18, scale=14),
numbers,
numbers
)
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal_large(self):
"""test exceedingly large decimals.
"""
numbers = set([
decimal.Decimal('4E+8'),
decimal.Decimal("5748E+15"),
decimal.Decimal('1.521E+15'),
decimal.Decimal('00000000000000.1E+12'),
])
self._do_test(
Numeric(precision=25, scale=2),
numbers,
numbers
)
@testing.requires.precision_numerics_many_significant_digits
def test_many_significant_digits(self):
numbers = set([
decimal.Decimal("31943874831932418390.01"),
decimal.Decimal("319438950232418390.273596"),
decimal.Decimal("87673.594069654243"),
])
self._do_test(
Numeric(precision=38, scale=12),
numbers,
numbers
)
@testing.requires.precision_numerics_retains_significant_digits
def test_numeric_no_decimal(self):
numbers = set([
decimal.Decimal("1.000")
])
self._do_test(
Numeric(precision=5, scale=3),
numbers,
numbers,
check_scale=True
)
__all__ = ('UnicodeVarcharTest', 'UnicodeTextTest',
'DateTest', 'DateTimeTest', 'TextTest',
'NumericTest',
'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest',
'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest',
'DateHistoricTest', 'StringTest')
| VagrantApe/flaskMicroblog | venv/lib/python2.7/site-packages/sqlalchemy/testing/suite/test_types.py | Python | bsd-3-clause | 11,192 |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Spark CLI Fabric File
"""
__author__ = 'Dongjoon Hyun ([email protected])'
__copyright__ = 'Copyright (c) 2015-2016'
__license__ = 'Apache License'
__version__ = '0.1'
from fabric.api import task, run, env, cd
@task
def head(inpath, count=5):
"""
fab spark.head:/data/image/imagenet/*.txt,5
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.head.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
sc = SparkContext(appName='Head')
for x in sc.textFile('%(inpath)s').take(%(count)s):
print x.encode('utf8')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.head.py 2> /dev/null'
run(cmd)
@task
def sql(sql):
"""
fab spark.sql:'select count(*) from data.news'
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.sql.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
from pyspark.sql import HiveContext
sc = SparkContext(appName='HiveContext')
sqlContext = HiveContext(sc)
for x in sqlContext.sql('%(sql)s').collect():
print x
EOF''' % locals())
cmd = 'HADOOP_CONF_DIR=/etc/hive/conf /opt/spark/bin/spark-submit --num-executors 300 spark.sql.py 2> /dev/null'
run(cmd)
@task
def count_line(inpath):
"""
fab spark.count_line:/data/image/imagenet/*.txt
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.count_line.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
sc = SparkContext(appName='Count Line')
print sc.textFile('%(inpath)s').count()
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.count_line.py 2> /dev/null'
run(cmd)
@task
def count_line_with(inpath, keyword):
"""
fab spark.count_line_with:/data/text/wikipedia/ko*,'<page>'
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.count_line_with.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
sc = SparkContext(appName='Count Line With')
words = '%(keyword)s'.split(':')
def isIn(line):
line = line.encode('utf8')
for w in words:
if w in line:
return True
return False
print sc.textFile('%(inpath)s').filter(isIn).count()
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.count_line_with.py 2> /dev/null'
run(cmd)
@task
def grep(inpath, outpath, keyword):
"""
fab spark.grep:/data/text/wikipedia/ko*,/user/hadoop/grep_result,'<page>'
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.grep.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
sc = SparkContext(appName='Grep')
sc.textFile('%(inpath)s').filter(lambda line: '%(keyword)s' in line).saveAsTextFile('%(outpath)s')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.grep.py 2> /dev/null'
run(cmd)
@task
def select(inpath, outpath, columns='*', sep='\01'):
"""
fab spark.select:/data/text/news/hani/*,/user/hadoop/selected,1:0
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.select.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
import re
sc = SparkContext(appName='Select')
columns = '%(columns)s'
def select(alist, cols):
blist = [alist[c] for c in cols]
return ('%%c' %% (1)).join(blist)
if '*' == columns:
cols = xrange(len(columns.split(':')))
else:
cols = [int(i) for i in columns.split(':')]
sc.textFile('%(inpath)s').map(lambda line: select(re.split('%%c' %% (1),line), cols)).saveAsTextFile('%(outpath)s')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.select.py 2> /dev/null'
run(cmd)
@task
def tf_ko(inpath, outpath, sep='\01'):
"""
fab spark.tf_ko:/data/text/news/hani/*,/user/hadoop/tf_result
"""
if not (outpath.startswith('/tmp/') or outpath.startswith('/user/hadoop/')):
print 'Unauthorized path: %(outpath)s' % locals()
return
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.tf_ko.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
import re
import string
def normalize(str):
chars = [u'N' if c.isdigit() else c for c in str]
chars = [c for c in chars if c.isspace() or c == u'.' or c == u'N' or (44032 <= ord(c) and ord(c)<=55215)]
return ''.join(chars)
sc = SparkContext(appName='Term Frequency')
counts = sc.textFile('%(inpath)s') \
.map(lambda line: re.split('%%c' %% (1),line)[1]) \
.map(normalize) \
.flatMap(lambda line: regex.split(line)) \
.filter(lambda word: len(word.strip())>0) \
.map(lambda word: (word, 1)) \
.reduceByKey(lambda a,b: a+b) \
.map(lambda (a,b): (b,a)) \
.sortByKey(0,1) \
.map(lambda (a,b): '%%s%%c%%s' %% (b,1,a))
counts.saveAsTextFile('%(outpath)s')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.tf_ko.py 2> /dev/null'
run(cmd)
@task
def ngram_ko(n, min, inpath, outpath, sep='\01'):
"""
fab spark.ngram_ko:2,1000,/user/hadoop/tf_result,/user/hadoop/ngram_result
"""
if not (outpath.startswith('/tmp/') or outpath.startswith('/user/hadoop/')):
print 'Unauthorized path: %(outpath)s' % locals()
return
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.ngram_ko.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
import re
sc = SparkContext(appName='%(n)s-gram')
def ngram(line):
n = %(n)s
str = re.split('%%c' %% (1),line)[0]
count = int(re.split('%%c' %% (1),line)[1])
return [(str[i:i+n],count) for i in range(len(str)-n+1)]
counts = sc.textFile('%(inpath)s') \
.flatMap(ngram) \
.reduceByKey(lambda a,b: a+b) \
.filter(lambda (a,b): b>=%(min)s) \
.map(lambda (a,b): (b,a)) \
.sortByKey(0,1) \
.map(lambda (a,b): '%%s%%c%%s' %% (b,1,a))
counts.saveAsTextFile('%(outpath)s')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.ngram_ko.py 2> /dev/null'
run(cmd)
@task
def word2vec(inpath, queryword):
"""
fab spark.word2vec:/sample/sample_hani_kma,대통령
"""
cmd = '/opt/spark/bin/spark-submit /hdfs/user/hadoop/demo/nlp/SparkWord2Vec.py %(inpath)s %(queryword)s \
2> /dev/null' % locals()
run(cmd)
@task
def naivebayes_train(inpath, lambda_, outpath):
"""
fab spark.naivebayes_train:/sample/sample_naive_bayes_data.txt,1.0,/tmp/nb.model
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.naivebayes_train.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
from pyspark.mllib.classification import NaiveBayes
from pyspark.mllib.linalg import Vectors
from pyspark.mllib.regression import LabeledPoint
def parseLine(line):
parts = line.split(',')
label = float(parts[0])
features = Vectors.dense([float(x) for x in parts[1].split(' ')])
return LabeledPoint(label, features)
sc = SparkContext(appName='Naive Bayes Train')
data = sc.textFile('%(inpath)s').map(parseLine)
model = NaiveBayes.train(data, %(lambda_)s)
model.save(sc, '%(outpath)s')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit spark.naivebayes_train.py 2> /dev/null'
run(cmd)
@task
def naivebayes_predict(model, inpath, outpath):
"""
fab spark.naivebayes_predict:/tmp/nb.model,/sample/naive_bayes_test.txt,/tmp/nb.result
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.naivebayes_test.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
from pyspark.mllib.classification import NaiveBayesModel
from pyspark.mllib.linalg import Vectors
from pyspark.mllib.regression import LabeledPoint
def parseLine(line):
features = Vectors.dense([float(x) for x in line.split(' ')])
return features
sc = SparkContext(appName='Naive Bayes Predict')
model = NaiveBayesModel.load(sc, '%(model)s')
sc.textFile('%(inpath)s').map(parseLine).map(model.predict).saveAsTextFile('%(outpath)s')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit spark.naivebayes_test.py 2> /dev/null'
run(cmd)
@task
def sample(inpath, replacement, fraction, seed, outpath):
"""
fab spark.sample:/sample/sample_movielens_movies.txt,False,0.5,0,/tmp/sampled_movielens
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<EOF > spark.sample.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
sc = SparkContext(appName='Sampling')
sc.textFile('%(inpath)s').sample(%(replacement)s,%(fraction)s,%(seed)s).saveAsTextFile('%(outpath)s')
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit --num-executors 300 spark.sample.py 2> /dev/null'
run(cmd)
@task
def lm(inpath, outpath, step, maxiter):
"""
fab spark.lm:/sample/sample_regression,/user/hadoop/lm_result,0.1,1000
"""
if not (outpath.startswith('/tmp/') or outpath.startswith('/user/hadoop/')):
print 'Unauthorized path: %(outpath)s' % locals()
return
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<'EOF' > spark.lm.py
# -*- coding: utf-8 -*-
from pyspark import SparkContext
from pyspark.mllib.linalg import Vectors
from pyspark.mllib.feature import StandardScaler
from pyspark.mllib.regression import LabeledPoint, LinearRegressionWithSGD
sc = SparkContext(appName='Linear Regression')
data = sc.textFile('%(inpath)s').filter(lambda x: not x.startswith('#')).map(lambda x: x.split())
label = data.map(lambda x: x[-1])
feature = data.map(lambda x: x[0:-1])
scaler = StandardScaler().fit(feature)
feature = scaler.transform(feature)
model = LinearRegressionWithSGD.train(label.zip(feature).map(lambda (x,y): LabeledPoint(x,y)), intercept=True, \
iterations=%(maxiter)s, step=%(step)s)
print model
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit spark.lm.py 2> /dev/null'
run(cmd)
| dongjoon-hyun/tools | cli/spark.py | Python | apache-2.0 | 10,655 |
from djangomako.shortcuts import render_to_response
from bellum.common.session.login import must_be_logged
from bellum.common.session import getAccount
from bellum.common.gui import PrimaryGUIObject
from bellum.stats.larf.storage import fileFor
@must_be_logged
def process(request):
acc = getAccount(request)
ll = fileFor(acc.id)
a = []
tmp = []
for x in ll.readlines():
tmp += [x]
if len(tmp) == 2: # should be appended
a.append(tmp)
tmp = []
return render_to_response('stats/larf/larf.html',{'pgo':PrimaryGUIObject(request),
'entries':a}) | piotrmaslanka/bellum | stats/views/larf.py | Python | agpl-3.0 | 668 |
import os.path as op
import numpy as nm
import sfepy
from sfepy.discrete.common import Field
import sfepy.discrete.common.global_interp as gi
from sfepy.base.testing import TestCommon
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
test = Test(conf=conf, options=options)
return test
def test_ref_coors_fem(self):
from sfepy.discrete.fem import Mesh, FEDomain
mesh = Mesh.from_file('meshes/3d/special/cross3d.mesh',
prefix_dir=sfepy.data_dir)
domain = FEDomain('domain', mesh)
omega = domain.create_region('Omega', 'all')
field = Field.from_args('linear', nm.float64, 'scalar', omega,
approx_order=1)
mcoors = field.domain.get_mesh_coors()
conn = field.domain.get_conn()
bbox = field.domain.get_mesh_bounding_box()
ray = nm.linspace(bbox[0, 0], bbox[1, 0], 7)
coors = nm.zeros((ray.shape[0], 3), dtype=nm.float64)
def gen_rays():
coors[:, 0] = ray
yield coors
coors.fill(0.0)
coors[:, 1] = ray
yield coors
coors.fill(0.0)
coors[:, 2] = ray
yield coors
ok = True
ctx = field.create_basis_context()._geo_ctx
for ir, coors in enumerate(gen_rays()):
self.report('ray %d' % ir)
ref_coors, cells, status = gi.get_ref_coors(field, coors,
strategy='general',
close_limit=0.0,
verbose=False)
self.report(ref_coors)
self.report(cells)
self.report(status)
# In the distorted cell 2, the Newton method finds a solution
# outside of the cell. This will be fixed when box constraints
# are applied.
_ok = nm.all((status == 0) | ((cells == 2) & (status == 3)))
if not _ok:
self.report('wrong status %s for ray %d!' % (status, ir))
ok = ok and _ok
for ic, cell in enumerate(cells):
ctx.iel = cell
bf = ctx.evaluate(ref_coors[ic:ic+1], check_errors=False)
cell_coors = mcoors[conn[cell]]
coor = nm.dot(bf, cell_coors).ravel()
_ok = nm.allclose(coor, coors[ic], atol=1e-14, rtol=0.0)
if not _ok:
self.report('ray %d point %d:' % (ir, ic))
self.report(' - wrong reference coordinates %s!'
% ref_coors[ic])
self.report(' - given point: %s' % coors[ic])
self.report(' - found point: %s' % coor)
ok = ok and _ok
return ok
def test_ref_coors_iga(self):
from sfepy.discrete.iga.domain import IGDomain
domain = IGDomain.from_file(op.join(sfepy.data_dir,
'meshes/iga/block2d.iga'))
omega = domain.create_region('Omega', 'all')
field = Field.from_args('iga', nm.float64, 'scalar', omega,
approx_order='iga', poly_space_base='iga')
mcoors = field.nurbs.cps
conn = field.get_econn('volume', field.region)
bbox = domain.eval_mesh.get_bounding_box()
ray = nm.linspace(bbox[0, 0], bbox[1, 0], 11)
coors = nm.c_[ray, ray]
ref_coors, cells, status = gi.get_ref_coors(field, coors,
strategy='general',
close_limit=0.0,
verbose=False)
self.report(ref_coors)
self.report(cells)
self.report(status)
ok = nm.all(status == 0)
ctx = field.create_basis_context()
for ic, cell in enumerate(cells):
ctx.iel = cell
bf = ctx.evaluate(ref_coors[ic:ic+1])
cell_coors = mcoors[conn[cell]]
coor = nm.dot(bf, cell_coors).ravel()
_ok = nm.allclose(coor, coors[ic], atol=1e-14, rtol=0.0)
if not _ok:
self.report('point %d:' % ic)
self.report(' - wrong reference coordinates %s!'
% ref_coors[ic])
self.report(' - given point: %s' % coors[ic])
self.report(' - found point: %s' % coor)
ok = ok and _ok
return ok
| RexFuzzle/sfepy | tests/test_ref_coors.py | Python | bsd-3-clause | 4,619 |
from django.shortcuts import render,get_object_or_404,redirect
from .models import Post
from .forms import PostForm
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator,EmptyPage,PageNotAnInteger
from haystack.forms import SearchForm
# import qiniu
def full_search(request):
"""全局搜索"""
keywords = request.GET['q']
sform = SearchForm(request.GET)
posts = sform.search()
print(posts)
return render(request, 'blog/post_search_list.html',{'test': sform, 'posts': posts, 'list_header': '关键字 \'{}\' 搜索结果'.format(keywords)})
# Create your views here.
def post_list(request):
postsAll=Post.objects.filter(published_date__isnull=False).order_by('-published_date')
paginator=Paginator(postsAll,1)
page=request.GET.get('page')
try:
posts = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
posts = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
posts = paginator.page(paginator.num_pages)
return render(request, 'blog/post_list.html', {'posts': posts, 'page': True})
def post_detail(request,pk):
post=get_object_or_404(Post,pk=pk)
return render(request,'blog/post_detail.html',{ 'post': post })
@login_required
def post_new(request):
if request.method=="POST":
form=PostForm(request.POST)
if form.is_valid():
post=form.save(commit=False)
post.author=request.user
post.save()
return redirect('post_detail',pk=post.pk)
else:
form=PostForm()
return render(request, 'blog/post_edit.html', {'form': form})
def post_edit(request, pk):
post = get_object_or_404(Post, pk=pk)
if request.method == "POST":
form = PostForm(request.POST, instance=post)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = PostForm(instance=post)
return render(request, 'blog/post_edit.html', {'form': form})
def post_draft_list(request):
posts = Post.objects.filter(published_date__isnull=True).order_by('-created_date')
return render(request, 'blog/post_draft_list.html', {'posts': posts})
def post_publish(request, pk):
post = get_object_or_404(Post, pk=pk)
post.publish()
return redirect('post_detail', pk=pk)
def post_remove(request, pk):
post = get_object_or_404(Post, pk=pk)
post.delete()
return redirect('post_list') | kleinzh/PythonBlog | mysite/blog/views.py | Python | apache-2.0 | 2,496 |
"""
Real spectrum tranforms (DCT, DST, MDCT)
"""
from __future__ import division, print_function, absolute_import
__all__ = ['dct', 'idct', 'dst', 'idst']
import numpy as np
from scipy.fftpack import _fftpack
from scipy.fftpack.basic import _datacopied
import atexit
atexit.register(_fftpack.destroy_ddct1_cache)
atexit.register(_fftpack.destroy_ddct2_cache)
atexit.register(_fftpack.destroy_dct1_cache)
atexit.register(_fftpack.destroy_dct2_cache)
atexit.register(_fftpack.destroy_ddst1_cache)
atexit.register(_fftpack.destroy_ddst2_cache)
atexit.register(_fftpack.destroy_dst1_cache)
atexit.register(_fftpack.destroy_dst2_cache)
def dct(x, type=2, n=None, axis=-1, norm=None, overwrite_x=0):
"""
Return the Discrete Cosine Transform of arbitrary type sequence x.
Parameters
----------
x : array_like
The input array.
type : {1, 2, 3}, optional
Type of the DCT (see Notes). Default type is 2.
n : int, optional
Length of the transform.
axis : int, optional
Axis over which to compute the transform.
norm : {None, 'ortho'}, optional
Normalization mode (see Notes). Default is None.
overwrite_x : bool, optional
If True the contents of x can be destroyed. (default=False)
Returns
-------
y : ndarray of real
The transformed input array.
See Also
--------
idct
Notes
-----
For a single dimension array ``x``, ``dct(x, norm='ortho')`` is equal to
MATLAB ``dct(x)``.
There are theoretically 8 types of the DCT, only the first 3 types are
implemented in scipy. 'The' DCT generally refers to DCT type 2, and 'the'
Inverse DCT generally refers to DCT type 3.
**type I**
There are several definitions of the DCT-I; we use the following
(for ``norm=None``)::
N-2
y[k] = x[0] + (-1)**k x[N-1] + 2 * sum x[n]*cos(pi*k*n/(N-1))
n=1
Only None is supported as normalization mode for DCT-I. Note also that the
DCT-I is only supported for input size > 1
**type II**
There are several definitions of the DCT-II; we use the following
(for ``norm=None``)::
N-1
y[k] = 2* sum x[n]*cos(pi*k*(2n+1)/(2*N)), 0 <= k < N.
n=0
If ``norm='ortho'``, ``y[k]`` is multiplied by a scaling factor `f`::
f = sqrt(1/(4*N)) if k = 0,
f = sqrt(1/(2*N)) otherwise.
Which makes the corresponding matrix of coefficients orthonormal
(``OO' = Id``).
**type III**
There are several definitions, we use the following
(for ``norm=None``)::
N-1
y[k] = x[0] + 2 * sum x[n]*cos(pi*(k+0.5)*n/N), 0 <= k < N.
n=1
or, for ``norm='ortho'`` and 0 <= k < N::
N-1
y[k] = x[0] / sqrt(N) + sqrt(1/N) * sum x[n]*cos(pi*(k+0.5)*n/N)
n=1
The (unnormalized) DCT-III is the inverse of the (unnormalized) DCT-II, up
to a factor `2N`. The orthonormalized DCT-III is exactly the inverse of
the orthonormalized DCT-II.
References
----------
.. [1] 'A Fast Cosine Transform in One and Two Dimensions', by J.
Makhoul, `IEEE Transactions on acoustics, speech and signal
processing` vol. 28(1), pp. 27-34,
http://dx.doi.org/10.1109/TASSP.1980.1163351 (1980).
.. [2] Wikipedia, "Discrete cosine transform",
http://en.wikipedia.org/wiki/Discrete_cosine_transform
"""
if type == 1 and norm is not None:
raise NotImplementedError(
"Orthonormalization not yet supported for DCT-I")
return _dct(x, type, n, axis, normalize=norm, overwrite_x=overwrite_x)
def idct(x, type=2, n=None, axis=-1, norm=None, overwrite_x=0):
"""
Return the Inverse Discrete Cosine Transform of an arbitrary type sequence.
Parameters
----------
x : array_like
The input array.
type : {1, 2, 3}, optional
Type of the DCT (see Notes). Default type is 2.
n : int, optional
Length of the transform.
axis : int, optional
Axis over which to compute the transform.
norm : {None, 'ortho'}, optional
Normalization mode (see Notes). Default is None.
overwrite_x : bool, optional
If True the contents of x can be destroyed. (default=False)
Returns
-------
idct : ndarray of real
The transformed input array.
See Also
--------
dct
Notes
-----
For a single dimension array `x`, ``idct(x, norm='ortho')`` is equal to
MATLAB ``idct(x)``.
'The' IDCT is the IDCT of type 2, which is the same as DCT of type 3.
IDCT of type 1 is the DCT of type 1, IDCT of type 2 is the DCT of type
3, and IDCT of type 3 is the DCT of type 2. For the definition of these
types, see `dct`.
"""
if type == 1 and norm is not None:
raise NotImplementedError(
"Orthonormalization not yet supported for IDCT-I")
# Inverse/forward type table
_TP = {1:1, 2:3, 3:2}
return _dct(x, _TP[type], n, axis, normalize=norm, overwrite_x=overwrite_x)
def _dct(x, type, n=None, axis=-1, overwrite_x=0, normalize=None):
"""
Return Discrete Cosine Transform of arbitrary type sequence x.
Parameters
----------
x : array-like
input array.
n : int, optional
Length of the transform.
axis : int, optional
Axis along which the dct is computed. (default=-1)
overwrite_x : bool, optional
If True the contents of x can be destroyed. (default=False)
Returns
-------
z : real ndarray
"""
tmp = np.asarray(x)
if not np.isrealobj(tmp):
raise TypeError("1st argument must be real sequence")
if n is None:
n = tmp.shape[axis]
else:
raise NotImplemented("Padding/truncating not yet implemented")
if tmp.dtype == np.double:
if type == 1:
f = _fftpack.ddct1
elif type == 2:
f = _fftpack.ddct2
elif type == 3:
f = _fftpack.ddct3
else:
raise ValueError("Type %d not understood" % type)
elif tmp.dtype == np.float32:
if type == 1:
f = _fftpack.dct1
elif type == 2:
f = _fftpack.dct2
elif type == 3:
f = _fftpack.dct3
else:
raise ValueError("Type %d not understood" % type)
else:
raise ValueError("dtype %s not supported" % tmp.dtype)
if normalize:
if normalize == "ortho":
nm = 1
else:
raise ValueError("Unknown normalize mode %s" % normalize)
else:
nm = 0
if type == 1 and n < 2:
raise ValueError("DCT-I is not defined for size < 2")
overwrite_x = overwrite_x or _datacopied(tmp, x)
if axis == -1 or axis == len(tmp.shape) - 1:
return f(tmp, n, nm, overwrite_x)
#else:
# raise NotImplementedError("Axis arg not yet implemented")
tmp = np.swapaxes(tmp, axis, -1)
tmp = f(tmp, n, nm, overwrite_x)
return np.swapaxes(tmp, axis, -1)
###########
def dst(x, type=2, n=None, axis=-1, norm=None, overwrite_x=0):
"""
Return the Discrete Sine Transform of arbitrary type sequence x.
.. versionadded:: 0.11.0
Parameters
----------
x : array_like
The input array.
type : {1, 2, 3}, optional
Type of the DST (see Notes). Default type is 2.
n : int, optional
Length of the transform.
axis : int, optional
Axis over which to compute the transform.
norm : {None, 'ortho'}, optional
Normalization mode (see Notes). Default is None.
overwrite_x : bool, optional
If True the contents of x can be destroyed. (default=False)
Returns
-------
dst : ndarray of reals
The transformed input array.
See Also
--------
idst
Notes
-----
For a single dimension array ``x``.
There are theoretically 8 types of the DST for different combinations of
even/odd boundary conditions and boundary off sets [1]_, only the first
3 types are implemented in scipy.
**type I**
There are several definitions of the DST-I; we use the following
for ``norm=None``. DST-I assumes the input is odd around n=-1 and n=N. ::
N-1
y[k] = 2 * sum x[n]*sin(pi*(k+1)*(n+1)/(N+1))
n=0
Only None is supported as normalization mode for DCT-I. Note also that the
DCT-I is only supported for input size > 1
The (unnormalized) DCT-I is its own inverse, up to a factor `2(N+1)`.
**type II**
There are several definitions of the DST-II; we use the following
for ``norm=None``. DST-II assumes the input is odd around n=-1/2 and
n=N-1/2; the output is odd around k=-1 and even around k=N-1 ::
N-1
y[k] = 2* sum x[n]*sin(pi*(k+1)*(n+0.5)/N), 0 <= k < N.
n=0
if ``norm='ortho'``, ``y[k]`` is multiplied by a scaling factor `f` ::
f = sqrt(1/(4*N)) if k == 0
f = sqrt(1/(2*N)) otherwise.
**type III**
There are several definitions of the DST-III, we use the following
(for ``norm=None``). DST-III assumes the input is odd around n=-1
and even around n=N-1 ::
N-2
y[k] = x[N-1]*(-1)**k + 2* sum x[n]*sin(pi*(k+0.5)*(n+1)/N), 0 <= k < N.
n=0
The (unnormalized) DCT-III is the inverse of the (unnormalized) DCT-II, up
to a factor `2N`. The orthonormalized DST-III is exactly the inverse of
the orthonormalized DST-II.
References
----------
.. [1] Wikipedia, "Discrete sine transform",
http://en.wikipedia.org/wiki/Discrete_sine_transform
"""
if type == 1 and norm is not None:
raise NotImplementedError(
"Orthonormalization not yet supported for IDCT-I")
return _dst(x, type, n, axis, normalize=norm, overwrite_x=overwrite_x)
def idst(x, type=2, n=None, axis=-1, norm=None, overwrite_x=0):
"""
Return the Inverse Discrete Sine Transform of an arbitrary type sequence.
.. versionadded:: 0.11.0
Parameters
----------
x : array_like
The input array.
type : {1, 2, 3}, optional
Type of the DST (see Notes). Default type is 2.
n : int, optional
Length of the transform.
axis : int, optional
Axis over which to compute the transform.
norm : {None, 'ortho'}, optional
Normalization mode (see Notes). Default is None.
overwrite_x : bool, optional
If True the contents of x can be destroyed. (default=False)
Returns
-------
idst : ndarray of real
The transformed input array.
See Also
--------
dst
Notes
-----
'The' IDST is the IDST of type 2, which is the same as DST of type 3.
IDST of type 1 is the DST of type 1, IDST of type 2 is the DST of type
3, and IDST of type 3 is the DST of type 2. For the definition of these
types, see `dst`.
"""
if type == 1 and norm is not None:
raise NotImplementedError(
"Orthonormalization not yet supported for IDCT-I")
# Inverse/forward type table
_TP = {1:1, 2:3, 3:2}
return _dst(x, _TP[type], n, axis, normalize=norm, overwrite_x=overwrite_x)
def _dst(x, type, n=None, axis=-1, overwrite_x=0, normalize=None):
"""
Return Discrete Sine Transform of arbitrary type sequence x.
Parameters
----------
x : array-like
input array.
n : int, optional
Length of the transform.
axis : int, optional
Axis along which the dst is computed. (default=-1)
overwrite_x : bool, optional
If True the contents of x can be destroyed. (default=False)
Returns
-------
z : real ndarray
"""
tmp = np.asarray(x)
if not np.isrealobj(tmp):
raise TypeError("1st argument must be real sequence")
if n is None:
n = tmp.shape[axis]
else:
raise NotImplemented("Padding/truncating not yet implemented")
if tmp.dtype == np.double:
if type == 1:
f = _fftpack.ddst1
elif type == 2:
f = _fftpack.ddst2
elif type == 3:
f = _fftpack.ddst3
else:
raise ValueError("Type %d not understood" % type)
elif tmp.dtype == np.float32:
if type == 1:
f = _fftpack.dst1
elif type == 2:
f = _fftpack.dst2
elif type == 3:
f = _fftpack.dst3
else:
raise ValueError("Type %d not understood" % type)
else:
raise ValueError("dtype %s not supported" % tmp.dtype)
if normalize:
if normalize == "ortho":
nm = 1
else:
raise ValueError("Unknown normalize mode %s" % normalize)
else:
nm = 0
if type == 1 and n < 2:
raise ValueError("DST-I is not defined for size < 2")
overwrite_x = overwrite_x or _datacopied(tmp, x)
if axis == -1 or axis == len(tmp.shape) - 1:
return f(tmp, n, nm, overwrite_x)
#else:
# raise NotImplementedError("Axis arg not yet implemented")
tmp = np.swapaxes(tmp, axis, -1)
tmp = f(tmp, n, nm, overwrite_x)
return np.swapaxes(tmp, axis, -1)
| kmspriyatham/symath | scipy/scipy/fftpack/realtransforms.py | Python | apache-2.0 | 13,425 |
# -*- coding: utf-8 -*-
"""
Bok choy acceptance tests for problems in the LMS
See also old lettuce tests in lms/djangoapps/courseware/features/problems.feature
"""
from textwrap import dedent
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.lms.login_and_register import CombinedLoginAndRegisterPage
from common.test.acceptance.pages.lms.problem import ProblemPage
from common.test.acceptance.tests.helpers import EventsTestMixin, UniqueCourseTest
class ProblemsTest(UniqueCourseTest):
"""
Base class for tests of problems in the LMS.
"""
def setUp(self):
super(ProblemsTest, self).setUp()
self.username = "test_student_{uuid}".format(uuid=self.unique_id[0:8])
self.email = "{username}@example.com".format(username=self.username)
self.password = "keep it secret; keep it safe."
self.xqueue_grade_response = None
self.courseware_page = CoursewarePage(self.browser, self.course_id)
# Install a course with a hierarchy and problems
course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
problem = self.get_problem()
sequential = self.get_sequential()
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
sequential.add_children(problem)
)
).install()
# Auto-auth register for the course.
AutoAuthPage(
self.browser,
username=self.username,
email=self.email,
password=self.password,
course_id=self.course_id,
staff=True
).visit()
def get_problem(self):
""" Subclasses should override this to complete the fixture """
raise NotImplementedError()
def get_sequential(self):
""" Subclasses can override this to add a sequential with metadata """
return XBlockFixtureDesc('sequential', 'Test Subsection')
@attr(shard=9)
class ProblemClarificationTest(ProblemsTest):
"""
Tests the <clarification> element that can be used in problem XML.
"""
def get_problem(self):
"""
Create a problem with a <clarification>
"""
xml = dedent("""
<problem markdown="null">
<text>
<p>
Given the data in Table 7 <clarification>Table 7: "Example PV Installation Costs",
Page 171 of Roberts textbook</clarification>, compute the ROI
<clarification>Return on Investment <strong>(per year)</strong></clarification> over 20 years.
</p>
<numericalresponse answer="6.5">
<label>Enter the annual ROI</label>
<textline trailing_text="%" />
</numericalresponse>
</text>
</problem>
""")
return XBlockFixtureDesc('problem', 'TOOLTIP TEST PROBLEM', data=xml)
def test_clarification(self):
"""
Test that we can see the <clarification> tooltips.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, 'TOOLTIP TEST PROBLEM')
problem_page.click_clarification(0)
self.assertIn('"Example PV Installation Costs"', problem_page.visible_tooltip_text)
problem_page.click_clarification(1)
tooltip_text = problem_page.visible_tooltip_text
self.assertIn('Return on Investment', tooltip_text)
self.assertIn('per year', tooltip_text)
self.assertNotIn('strong', tooltip_text)
@attr(shard=9)
class ProblemHintTest(ProblemsTest, EventsTestMixin):
"""
Base test class for problem hint tests.
"""
def verify_check_hint(self, answer, answer_text, expected_events):
"""
Verify clicking Check shows the extended hint in the problem message.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_text[0], u'question text')
problem_page.fill_answer(answer)
problem_page.click_submit()
self.assertEqual(problem_page.message_text, answer_text)
# Check for corresponding tracking event
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.problem.hint.feedback_displayed'},
number_of_matches=1
)
self.assert_events_match(expected_events, actual_events)
def verify_demand_hints(self, first_hint, second_hint, expected_events):
"""
Test clicking through the demand hints and verify the events sent.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
# The hint notification should not be visible on load
self.assertFalse(problem_page.is_hint_notification_visible())
# The two Hint button should be enabled. One visible, one present, but not visible in the DOM
self.assertEqual([None, None], problem_page.get_hint_button_disabled_attr())
# The hint button rotates through multiple hints
problem_page.click_hint()
self.assertTrue(problem_page.is_hint_notification_visible())
self.assertEqual(problem_page.hint_text, first_hint)
# Now there are two "hint" buttons, as there is also one in the hint notification.
self.assertEqual([None, None], problem_page.get_hint_button_disabled_attr())
problem_page.click_hint()
self.assertEqual(problem_page.hint_text, second_hint)
# Now both "hint" buttons should be disabled, as there are no more hints.
self.assertEqual(['true', 'true'], problem_page.get_hint_button_disabled_attr())
# Now click on "Review" and make sure the focus goes to the correct place.
problem_page.click_review_in_notification(notification_type='hint')
problem_page.wait_for_focus_on_problem_meta()
# Check corresponding tracking events
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.problem.hint.demandhint_displayed'},
number_of_matches=2
)
self.assert_events_match(expected_events, actual_events)
def get_problem(self):
""" Subclasses should override this to complete the fixture """
raise NotImplementedError()
@attr(shard=9)
class ProblemNotificationTests(ProblemsTest):
"""
Tests that the notifications are visible when expected.
"""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<label>Which of the following countries has the largest population?</label>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil <choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint></choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml,
metadata={'max_attempts': 10},
grader_type='Final Exam')
def test_notification_updates(self):
"""
Verifies that the notification is removed and not visible when it should be
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
problem_page.click_choice("choice_2")
self.assertFalse(problem_page.is_success_notification_visible())
problem_page.click_submit()
problem_page.wait_success_notification()
self.assertEqual('Question 1: correct', problem_page.status_sr_text)
# Clicking Save should clear the submit notification
problem_page.click_save()
self.assertFalse(problem_page.is_success_notification_visible())
problem_page.wait_for_save_notification()
# Changing the answer should clear the save notification
problem_page.click_choice("choice_1")
self.assertFalse(problem_page.is_save_notification_visible())
problem_page.click_save()
problem_page.wait_for_save_notification()
# Submitting the problem again should clear the save notification
problem_page.click_submit()
problem_page.wait_incorrect_notification()
self.assertEqual('Question 1: incorrect', problem_page.status_sr_text)
self.assertFalse(problem_page.is_save_notification_visible())
@attr(shard=9)
class ProblemFeedbackNotificationTests(ProblemsTest):
"""
Tests that the feedback notifications are visible when expected.
"""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<label>Which of the following countries has the largest population?</label>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil <choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint></choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml,
metadata={'max_attempts': 10},
grader_type='Final Exam')
def test_feedback_notification_hides_after_save(self):
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
problem_page.click_choice("choice_0")
problem_page.click_submit()
problem_page.wait_for_feedback_message_visibility()
problem_page.click_choice("choice_1")
problem_page.click_save()
self.assertFalse(problem_page.is_feedback_message_notification_visible())
@attr(shard=9)
class ProblemSaveStatusUpdateTests(ProblemsTest):
"""
Tests the problem status updates correctly with an answer change and save.
"""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<label>Which of the following countries has the largest population?</label>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil <choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint></choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml,
metadata={'max_attempts': 10},
grader_type='Final Exam')
def test_status_removed_after_save_before_submit(self):
"""
Scenario: User should see the status removed when saving after submitting an answer and reloading the page.
Given that I have loaded the problem page
And a choice has been selected and submitted
When I change the choice
And Save the problem
And reload the problem page
Then I should see the save notification and I should not see any indication of problem status
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
problem_page.click_choice("choice_1")
problem_page.click_submit()
problem_page.wait_incorrect_notification()
problem_page.wait_for_expected_status('label.choicegroup_incorrect', 'incorrect')
problem_page.click_choice("choice_2")
self.assertFalse(problem_page.is_expected_status_visible('label.choicegroup_incorrect'))
problem_page.click_save()
problem_page.wait_for_save_notification()
# Refresh the page and the status should not be added
self.courseware_page.visit()
self.assertFalse(problem_page.is_expected_status_visible('label.choicegroup_incorrect'))
self.assertTrue(problem_page.is_save_notification_visible())
@attr(shard=9)
class ProblemSubmitButtonMaxAttemptsTest(ProblemsTest):
"""
Tests that the Submit button disables after the number of max attempts is reached.
"""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<label>Which of the following countries has the largest population?</label>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil <choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint></choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml,
metadata={'max_attempts': 2},
grader_type='Final Exam')
def test_max_attempts(self):
"""
Verifies that the Submit button disables when the max number of attempts is reached.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
# Submit first answer (correct)
problem_page.click_choice("choice_2")
self.assertFalse(problem_page.is_submit_disabled())
problem_page.click_submit()
problem_page.wait_success_notification()
# Submit second and final answer (incorrect)
problem_page.click_choice("choice_1")
problem_page.click_submit()
problem_page.wait_incorrect_notification()
# Make sure that the Submit button disables.
problem_page.wait_for_submit_disabled()
@attr(shard=9)
class ProblemSubmitButtonPastDueTest(ProblemsTest):
"""
Tests that the Submit button is disabled if it is past the due date.
"""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<label>Which of the following countries has the largest population?</label>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil <choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint></choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml,
metadata={'max_attempts': 2},
grader_type='Final Exam')
def get_sequential(self):
""" Subclasses can override this to add a sequential with metadata """
return XBlockFixtureDesc('sequential', 'Test Subsection', metadata={'due': "2016-10-01T00"})
def test_past_due(self):
"""
Verifies that the Submit button disables when the max number of attempts is reached.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
# Should have Submit button disabled on original rendering.
problem_page.wait_for_submit_disabled()
# Select a choice, and make sure that the Submit button remains disabled.
problem_page.click_choice("choice_2")
problem_page.wait_for_submit_disabled()
@attr(shard=9)
class ProblemExtendedHintTest(ProblemHintTest, EventsTestMixin):
"""
Test that extended hint features plumb through to the page html and tracking log.
"""
def get_problem(self):
"""
Problem with extended hint features.
"""
xml = dedent("""
<problem>
<p>question text</p>
<stringresponse answer="A">
<stringequalhint answer="B">hint</stringequalhint>
<textline size="20"/>
</stringresponse>
<demandhint>
<hint>demand-hint1</hint>
<hint>demand-hint2</hint>
</demandhint>
</problem>
""")
return XBlockFixtureDesc('problem', 'TITLE', data=xml)
def test_check_hint(self):
"""
Test clicking Check shows the extended hint in the problem message.
"""
self.verify_check_hint(
'B',
u'Answer\nIncorrect: hint',
[
{
'event':
{
'hint_label': u'Incorrect:',
'trigger_type': 'single',
'student_answer': [u'B'],
'correctness': False,
'question_type': 'stringresponse',
'hints': [{'text': 'hint'}]
}
}
]
)
def test_demand_hint(self):
"""
Test clicking hint button shows the demand hint in its div.
"""
self.verify_demand_hints(
u'Hint (1 of 2): demand-hint1',
u'Hint (1 of 2): demand-hint1\nHint (2 of 2): demand-hint2',
[
{'event': {u'hint_index': 0, u'hint_len': 2, u'hint_text': u'demand-hint1'}},
{'event': {u'hint_index': 1, u'hint_len': 2, u'hint_text': u'demand-hint2'}}
]
)
@attr(shard=9)
class ProblemHintWithHtmlTest(ProblemHintTest, EventsTestMixin):
"""
Tests that hints containing html get rendered properly
"""
def get_problem(self):
"""
Problem with extended hint features.
"""
xml = dedent("""
<problem>
<p>question text</p>
<stringresponse answer="A">
<stringequalhint answer="C"><a href="#">aa bb</a> cc</stringequalhint>
<textline size="20"/>
</stringresponse>
<demandhint>
<hint>aa <a href="#">bb</a> cc</hint>
<hint><a href="#">dd ee</a> ff</hint>
</demandhint>
</problem>
""")
return XBlockFixtureDesc('problem', 'PROBLEM HTML HINT TEST', data=xml)
def test_check_hint(self):
"""
Test clicking Check shows the extended hint in the problem message.
"""
self.verify_check_hint(
'C',
u'Answer\nIncorrect: aa bb cc',
[
{
'event':
{
'hint_label': u'Incorrect:',
'trigger_type': 'single',
'student_answer': [u'C'],
'correctness': False,
'question_type': 'stringresponse',
'hints': [{'text': '<a href="#">aa bb</a> cc'}]
}
}
]
)
def test_demand_hint(self):
"""
Test clicking hint button shows the demand hints in a notification area.
"""
self.verify_demand_hints(
u'Hint (1 of 2): aa bb cc',
u'Hint (1 of 2): aa bb cc\nHint (2 of 2): dd ee ff',
[
{'event': {u'hint_index': 0, u'hint_len': 2, u'hint_text': u'aa <a href="#">bb</a> cc'}},
{'event': {u'hint_index': 1, u'hint_len': 2, u'hint_text': u'<a href="#">dd ee</a> ff'}}
]
)
@attr(shard=9)
class ProblemWithMathjax(ProblemsTest):
"""
Tests the <MathJax> used in problem
"""
def get_problem(self):
"""
Create a problem with a <MathJax> in body and hint
"""
xml = dedent(r"""
<problem>
<p>Check mathjax has rendered [mathjax]E=mc^2[/mathjax]</p>
<multiplechoiceresponse>
<label>Answer this?</label>
<choicegroup type="MultipleChoice">
<choice correct="true">Choice1 <choicehint>Correct choice message</choicehint></choice>
<choice correct="false">Choice2<choicehint>Wrong choice message</choicehint></choice>
</choicegroup>
</multiplechoiceresponse>
<demandhint>
<hint>mathjax should work1 \(E=mc^2\) </hint>
<hint>mathjax should work2 [mathjax]E=mc^2[/mathjax]</hint>
</demandhint>
</problem>
""")
return XBlockFixtureDesc('problem', 'MATHJAX TEST PROBLEM', data=xml)
def test_mathjax_in_hint(self):
"""
Test that MathJax have successfully rendered in problem hint
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, "MATHJAX TEST PROBLEM")
problem_page.verify_mathjax_rendered_in_problem()
# The hint button rotates through multiple hints
problem_page.click_hint()
self.assertEqual(
["<strong>Hint (1 of 2): </strong>mathjax should work1"],
problem_page.extract_hint_text_from_html
)
problem_page.verify_mathjax_rendered_in_hint()
# Rotate the hint and check the problem hint
problem_page.click_hint()
self.assertEqual(
[
"<strong>Hint (1 of 2): </strong>mathjax should work1",
"<strong>Hint (2 of 2): </strong>mathjax should work2"
],
problem_page.extract_hint_text_from_html
)
problem_page.verify_mathjax_rendered_in_hint()
@attr(shard=9)
class ProblemPartialCredit(ProblemsTest):
"""
Makes sure that the partial credit is appearing properly.
"""
def get_problem(self):
"""
Create a problem with partial credit.
"""
xml = dedent("""
<problem>
<p>The answer is 1. Partial credit for -1.</p>
<numericalresponse answer="1" partial_credit="list">
<label>How many miles away from Earth is the sun? Use scientific notation to answer.</label>
<formulaequationinput/>
<responseparam type="tolerance" default="0.01" />
<responseparam partial_answers="-1" />
</numericalresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'PARTIAL CREDIT TEST PROBLEM', data=xml)
def test_partial_credit(self):
"""
Test that we can see the partial credit value and feedback.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, 'PARTIAL CREDIT TEST PROBLEM')
problem_page.fill_answer_numerical('-1')
problem_page.click_submit()
problem_page.wait_for_status_icon()
self.assertTrue(problem_page.simpleprob_is_partially_correct())
@attr(shard=9)
class LogoutDuringAnswering(ProblemsTest):
"""
Tests for the scenario where a user is logged out (their session expires
or is revoked) just before they click "check" on a problem.
"""
def get_problem(self):
"""
Create a problem.
"""
xml = dedent("""
<problem>
<numericalresponse answer="1">
<label>The answer is 1</label>
<formulaequationinput/>
<responseparam type="tolerance" default="0.01" />
</numericalresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml)
def log_user_out(self):
"""
Log the user out by deleting their session cookie.
"""
self.browser.delete_cookie('sessionid')
def test_logout_after_click_redirect(self):
"""
1) User goes to a problem page.
2) User fills out an answer to the problem.
3) User is logged out because their session id is invalidated or removed.
4) User clicks "check", and sees a confirmation modal asking them to
re-authenticate, since they've just been logged out.
5) User clicks "ok".
6) User is redirected to the login page.
7) User logs in.
8) User is redirected back to the problem page they started out on.
9) User is able to submit an answer
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, 'TEST PROBLEM')
problem_page.fill_answer_numerical('1')
self.log_user_out()
with problem_page.handle_alert(confirm=True):
problem_page.click_submit()
login_page = CombinedLoginAndRegisterPage(self.browser)
login_page.wait_for_page()
login_page.login(self.email, self.password)
problem_page.wait_for_page()
self.assertEqual(problem_page.problem_name, 'TEST PROBLEM')
problem_page.fill_answer_numerical('1')
problem_page.click_submit()
self.assertTrue(problem_page.simpleprob_is_correct())
def test_logout_cancel_no_redirect(self):
"""
1) User goes to a problem page.
2) User fills out an answer to the problem.
3) User is logged out because their session id is invalidated or removed.
4) User clicks "check", and sees a confirmation modal asking them to
re-authenticate, since they've just been logged out.
5) User clicks "cancel".
6) User is not redirected to the login page.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, 'TEST PROBLEM')
problem_page.fill_answer_numerical('1')
self.log_user_out()
with problem_page.handle_alert(confirm=False):
problem_page.click_submit()
problem_page.wait_for_page()
self.assertEqual(problem_page.problem_name, 'TEST PROBLEM')
@attr(shard=9)
class ProblemQuestionDescriptionTest(ProblemsTest):
"""TestCase Class to verify question and description rendering."""
descriptions = [
"A vegetable is an edible part of a plant in tuber form.",
"A fruit is a fertilized ovary of a plant and contains seeds."
]
def get_problem(self):
"""
Create a problem with question and description.
"""
xml = dedent("""
<problem>
<choiceresponse>
<label>Eggplant is a _____?</label>
<description>{}</description>
<description>{}</description>
<checkboxgroup>
<choice correct="true">vegetable</choice>
<choice correct="false">fruit</choice>
</checkboxgroup>
</choiceresponse>
</problem>
""".format(*self.descriptions))
return XBlockFixtureDesc('problem', 'Label with Description', data=xml)
def test_question_with_description(self):
"""
Scenario: Test that question and description are rendered as expected.
Given I am enrolled in a course.
When I visit a unit page with a CAPA question.
Then label and description should be rendered correctly.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, 'Label with Description')
self.assertEqual(problem_page.problem_question, 'Eggplant is a _____?')
self.assertEqual(problem_page.problem_question_descriptions, self.descriptions)
class CAPAProblemA11yBaseTestMixin(object):
"""Base TestCase Class to verify CAPA problem accessibility."""
def test_a11y(self):
"""
Verifies that there are no accessibility issues for a particular problem type
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
# Set the scope to the problem question
problem_page.a11y_audit.config.set_scope(
include=['.wrapper-problem-response']
)
# Run the accessibility audit.
problem_page.a11y_audit.check_for_accessibility_errors()
@attr('a11y')
class CAPAProblemChoiceA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""TestCase Class to verify accessibility for checkboxes and multiplechoice CAPA problems."""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<choiceresponse>
<label>question 1 text here</label>
<description>description 2 text 1</description>
<description>description 2 text 2</description>
<checkboxgroup>
<choice correct="true">True</choice>
<choice correct="false">False</choice>
</checkboxgroup>
</choiceresponse>
<multiplechoiceresponse>
<label>question 2 text here</label>
<description>description 2 text 1</description>
<description>description 2 text 2</description>
<choicegroup type="MultipleChoice">
<choice correct="false">Alpha <choicehint>A hint</choicehint></choice>
<choice correct="true">Beta</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'Problem A11Y TEST', data=xml)
@attr('a11y')
class ProblemTextInputA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""TestCase Class to verify TextInput problem accessibility."""
def get_problem(self):
"""
TextInput problem XML.
"""
xml = dedent("""
<problem>
<stringresponse answer="fight" type="ci">
<label>who wishes to _____ must first count the cost.</label>
<description>Appear weak when you are strong, and strong when you are weak.</description>
<description>In the midst of chaos, there is also opportunity.</description>
<textline size="40"/>
</stringresponse>
<stringresponse answer="force" type="ci">
<label>A leader leads by example not by _____.</label>
<description>The supreme art of war is to subdue the enemy without fighting.</description>
<description>Great results, can be achieved with small forces.</description>
<textline size="40"/>
</stringresponse>
</problem>""")
return XBlockFixtureDesc('problem', 'TEXTINPUT PROBLEM', data=xml)
@attr('a11y')
class CAPAProblemDropDownA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""TestCase Class to verify accessibility for dropdowns(optioninput) CAPA problems."""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<optionresponse>
<p>You can use this template as a guide to the simple editor markdown and OLX markup to use for
dropdown problems. Edit this component to replace this template with your own assessment.</p>
<label>Which of the following is a fruit</label>
<description>Choose wisely</description>
<optioninput>
<option correct="False">radish</option>
<option correct="True">appple</option>
<option correct="False">carrot</option>
</optioninput>
</optionresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'Problem A11Y TEST', data=xml)
@attr('a11y')
class ProblemNumericalInputA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""Tests NumericalInput accessibility."""
def get_problem(self):
"""NumericalInput problem XML."""
xml = dedent("""
<problem>
<numericalresponse answer="10*i">
<label>The square of what number is -100?</label>
<description>Use scientific notation to answer.</description>
<formulaequationinput/>
</numericalresponse>
</problem>""")
return XBlockFixtureDesc('problem', 'NUMERICALINPUT PROBLEM', data=xml)
@attr('a11y')
class ProblemMathExpressionInputA11yTest(CAPAProblemA11yBaseTestMixin, ProblemsTest):
"""Tests MathExpressionInput accessibility."""
def get_problem(self):
"""MathExpressionInput problem XML."""
xml = dedent(r"""
<problem>
<script type="loncapa/python">
derivative = "n*x^(n-1)"
</script>
<formularesponse type="ci" samples="x,n@1,2:3,4#10" answer="$derivative">
<label>Let \( x\) be a variable, and let \( n\) be an arbitrary constant. What is the derivative of \( x^n\)?</label>
<description>Enter the equation</description>
<responseparam type="tolerance" default="0.00001"/>
<formulaequationinput size="40"/>
</formularesponse>
</problem>""")
return XBlockFixtureDesc('problem', 'MATHEXPRESSIONINPUT PROBLEM', data=xml)
class ProblemMetaGradedTest(ProblemsTest):
"""
TestCase Class to verify that the graded variable is passed
"""
def get_problem(self):
"""
Problem structure
"""
xml = dedent("""
<problem>
<label>Which of the following countries has the largest population?</label>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil <choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint></choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml, grader_type='Final Exam')
def test_grader_type_displayed(self):
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, 'TEST PROBLEM')
self.assertEqual(problem_page.problem_progress_graded_value, "1 point possible (graded)")
class ProblemMetaUngradedTest(ProblemsTest):
"""
TestCase Class to verify that the ungraded variable is passed
"""
def get_problem(self):
"""
Problem structure
"""
xml = dedent("""
<problem>
<label>Which of the following countries has the largest population?</label>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil <choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint></choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'TEST PROBLEM', data=xml)
def test_grader_type_displayed(self):
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
self.assertEqual(problem_page.problem_name, 'TEST PROBLEM')
self.assertEqual(problem_page.problem_progress_graded_value, "1 point possible (ungraded)")
| pepeportela/edx-platform | common/test/acceptance/tests/lms/test_lms_problems.py | Python | agpl-3.0 | 37,713 |
from __future__ import absolute_import, print_function, division
from collections import MutableSet
import types
import weakref
from six import string_types
def check_deterministic(iterable):
# Most places where OrderedSet is used, theano interprets any exception
# whatsoever as a problem that an optimization introduced into the graph.
# If I raise a TypeError when the DestoryHandler tries to do something
# non-deterministic, it will just result in optimizations getting ignored.
# So I must use an assert here. In the long term we should fix the rest of
# theano to use exceptions correctly, so that this can be a TypeError.
if iterable is not None:
if not isinstance(iterable, (
list, tuple, OrderedSet,
types.GeneratorType, string_types)):
if len(iterable) > 1:
# We need to accept length 1 size to allow unpickle in tests.
raise AssertionError(
"Get an not ordered iterable when one was expected")
# Copyright (C) 2009 Raymond Hettinger
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# {{{ http://code.activestate.com/recipes/576696/ (r5)
class Link(object):
# This make that we need to use a different pickle protocol
# then the default. Othewise, there is pickling errors
__slots__ = 'prev', 'next', 'key', '__weakref__'
def __getstate__(self):
# weakref.proxy don't pickle well, so we use weakref.ref
# manually and don't pickle the weakref.
# We restore the weakref when we unpickle.
ret = [self.prev(), self.next()]
try:
ret.append(self.key)
except AttributeError:
pass
return ret
def __setstate__(self, state):
self.prev = weakref.ref(state[0])
self.next = weakref.ref(state[1])
if len(state) == 3:
self.key = state[2]
class OrderedSet(MutableSet):
'Set the remembers the order elements were added'
# Big-O running times for all methods are the same as for regular sets.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# The prev/next links are weakref proxies (to prevent circular references).
# Individual links are kept alive by the hard reference in self.__map.
# Those hard references disappear when a key is deleted from an OrderedSet.
# Added by IG-- pre-existing theano code expected sets
# to have this method
def update(self, iterable):
check_deterministic(iterable)
self |= iterable
def __init__(self, iterable=None):
# Checks added by IG
check_deterministic(iterable)
self.__root = root = Link() # sentinel node for doubly linked list
root.prev = root.next = weakref.ref(root)
self.__map = {} # key --> link
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.__map)
def __contains__(self, key):
return key in self.__map
def add(self, key):
# Store new key in a new link at the end of the linked list
if key not in self.__map:
self.__map[key] = link = Link()
root = self.__root
last = root.prev
link.prev, link.next, link.key = last, weakref.ref(root), key
last().next = root.prev = weakref.ref(link)
def union(self, s):
check_deterministic(s)
n = self.copy()
for elem in s:
if elem not in n:
n.add(elem)
return n
def intersection_update(self, s):
l = []
for elem in self:
if elem not in s:
l.append(elem)
for elem in l:
self.remove(elem)
return self
def difference_update(self, s):
check_deterministic(s)
for elem in s:
if elem in self:
self.remove(elem)
return self
def copy(self):
n = OrderedSet()
n.update(self)
return n
def discard(self, key):
# Remove an existing item using self.__map to find the link which is
# then removed by updating the links in the predecessor and successors.
if key in self.__map:
link = self.__map.pop(key)
link.prev().next = link.next
link.next().prev = link.prev
def __iter__(self):
# Traverse the linked list in order.
root = self.__root
curr = root.next()
while curr is not root:
yield curr.key
curr = curr.next()
def __reversed__(self):
# Traverse the linked list in reverse order.
root = self.__root
curr = root.prev()
while curr is not root:
yield curr.key
curr = curr.prev()
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
if last:
key = next(reversed(self))
else:
key = next(iter(self))
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
# Note that we implement only the comparison to another
# `OrderedSet`, and not to a regular `set`, because otherwise we
# could have a non-symmetric equality relation like:
# my_ordered_set == my_set and my_set != my_ordered_set
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
elif isinstance(other, set):
# Raise exception to avoid confusion.
raise TypeError(
'Cannot compare an `OrderedSet` to a `set` because '
'this comparison cannot be made symmetric: please '
'manually cast your `OrderedSet` into `set` before '
'performing this comparison.')
else:
return NotImplemented
# end of http://code.activestate.com/recipes/576696/ }}}
if __name__ == '__main__':
print(list(OrderedSet('abracadaba')))
print(list(OrderedSet('simsalabim')))
print(OrderedSet('boom') == OrderedSet('moob'))
print(OrderedSet('boom') == 'moob')
| JazzeYoung/VeryDeepAutoEncoder | theano/misc/ordered_set.py | Python | bsd-3-clause | 7,517 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#from marconi.openstack.common.gettextutils import _
import marconi.openstack.common.log as logging
from marconi.queues import storage
#from marconi.queues.storage import errors
LOG = logging.getLogger(__name__)
class ClaimController(storage.Claim):
def get(self, queue, claim_id, project=None):
"""Base method for getting a claim.
:param queue: Name of the queue this
claim belongs to.
:param claim_id: The claim id
:param project: Project id
:returns: (Claim's metadata, claimed messages)
:raises: DoesNotExist
"""
raise NotImplementedError
def create(self, queue, metadata, project=None, limit=10):
"""Base method for creating a claim.
:param queue: Name of the queue this
claim belongs to.
:param metadata: Claim's parameters
to be stored.
:param project: Project id
:param limit: (Default 10) Max number
of messages to claim.
:returns: (Claim ID, claimed messages)
"""
raise NotImplementedError
def update(self, queue, claim_id, metadata, project=None):
"""Base method for updating a claim.
:param queue: Name of the queue this
claim belongs to.
:param claim_id: Claim to be updated
:param metadata: Claim's parameters
to be updated.
:param project: Project id
"""
raise NotImplementedError
def delete(self, queue, claim_id, project=None):
"""Base method for deleting a claim.
:param queue: Name of the queue this
claim belongs to.
:param claim_id: Claim to be deleted
:param project: Project id
"""
raise NotImplementedError
| FlaPer87/cookiecutter-marconi-storage | {{cookiecutter.repo_name}}/{{cookiecutter.module_name}}/claims.py | Python | apache-2.0 | 2,321 |
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2011 Frédéric Bertolus.
#
# This file is part of Perroquet.
#
# Perroquet is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Perroquet is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Perroquet. If not, see <http://www.gnu.org/licenses/>.
import gettext
import gtk
from perroquetlib.config import config
_ = gettext.gettext
class GuiMessageDialog:
def __init__(self, parent):
self.config = config
self.parent = parent
self.builder = gtk.Builder()
self.builder.set_translation_domain("perroquet")
self.builder.add_from_file(self.config.get("ui_message_path"))
self.builder.connect_signals(self)
self.dialog = self.builder.get_object("dialog_message")
self.dialog.set_modal(True)
self.dialog.set_transient_for(self.parent)
self.result = False
def set_message(self, title, message):
self.builder.get_object("label_message").set_text(message)
self.dialog.set_title(title)
def run(self):
self.dialog.run()
self.dialog.destroy()
def on_button_reset_ok_clicked(self, widget, data=None):
self.dialog.response(gtk.RESPONSE_OK)
def on_button_reset_cancel_clicked(self, widget, data=None):
self.result = None
self.dialog.response(gtk.RESPONSE_CANCEL)
def on_entry_password_activate(self, widget, data=None):
self.result = self.builder.get_object("entry_password").get_text()
self.dialog.response(gtk.RESPONSE_OK)
def on_dialog_password_delete_event(self, widget, data=None):
self.result = None
self.dialog.response(gtk.RESPONSE_CANCEL)
return True
| niavok/perroquet | perroquetlib/gui/gui_message_dialog.py | Python | gpl-3.0 | 2,149 |
import os
import shutil
import unittest
from PySide.QtCore import QSettings
from ibl_stuff.libs import base as libs
LIBRARY = libs.get_library()
ENV_LIBRARY = os.environ.get("IBL_LIBRARY")
class SimpleTests(unittest.TestCase):
def test_clear_cache(self):
self.assertTrue(libs.clear_cache())
def test_save_fail(self):
self.assertFalse(libs.save_ibl("unmatched_title"))
def test_load_fail(self):
self.assertFalse(libs.load_ibl("unmatched_title"))
class TestLibrary(unittest.TestCase):
def setUp(self):
_library = os.path.join(os.path.expanduser("~"), "test_library")
if os.path.exists(_library):
shutil.rmtree(_library)
if ENV_LIBRARY:
del os.environ["IBL_LIBRARY"]
libs.set_library(_library)
for i, prj in enumerate(["foo", "bar", "baz"]):
x = libs.new_ibl("test%d" % i)
x["projects"] = [prj]
x["tags"] = [prj]
def tearDown(self):
_library = os.path.join(os.path.expanduser("~"), "test_library")
shutil.rmtree(_library)
if ENV_LIBRARY:
os.environ["IBL_LIBRARY"] = ENV_LIBRARY
libs.set_library(LIBRARY)
def test_get_library(self):
os.environ["IBL_LIBRARY"] = "test"
self.assertEqual(libs.get_library(), "test")
del os.environ["IBL_LIBRARY"]
libs.set_library("test")
self.assertEqual(libs.get_library(), "test")
QSettings("csaez", "ibl_stuff").clear()
self.assertEqual(libs.get_library(),
os.path.join(os.path.expanduser("~"), "ibl_stuff"))
def test_get_projects(self):
prjs = ["bar", "baz", "foo"]
for prj in libs.get_projects():
self.assertIn(prj, prjs)
self.assertEqual(sorted(libs.get_projects()), prjs)
def test_search_ibl(self):
for i, prj in enumerate(("foo", "bar", "baz")):
self.assertEqual(len(libs.search_ibl(prj)), 1)
self.assertEqual(libs.search_ibl(prj)[0].get("title"),
"test%d" % i)
def test_get_ibl(self):
for i, prj in enumerate(("foo", "bar", "baz")):
self.assertEqual(libs.get_ibl("test%d" % i)["projects"], [prj])
self.assertIsNone(libs.get_ibl("foo"))
libs.clear_cache()
self.assertIsNotNone(libs.get_ibl("test0"))
def test_save_success(self):
for i in range(3):
self.assertTrue(libs.save_ibl("test%d" % i))
def test_load_success(self):
for i in range(3):
self.assertTrue(libs.load_ibl("test%d" % i))
def test_remove_ibl(self):
self.assertFalse(libs.remove_ibl("foo"))
for i in range(3):
self.assertTrue(libs.remove_ibl("test%d" % i))
def test_get_tags(self):
self.assertEqual(sorted(libs.get_tags()), ["bar", "baz", "foo"])
if __name__ == "__main__":
unittest.main(verbosity=2)
| csaez/ibl_stuff | ibl_stuff/tests/test_libs.py | Python | mit | 2,944 |
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <[email protected]>
# License: http://pyasn1.sf.net/license.html
#
from sys import version_info
__all__ = ['callable']
if (2, 7) < version_info[:2] < (3, 2):
import collections
def callable(x):
return isinstance(x, collections.Callable)
else:
callable = callable
| saurabhbajaj207/CarpeDiem | venv/Lib/site-packages/pyasn1/compat/calling.py | Python | mit | 373 |
import random
from collections import Counter
from datetime import date, timedelta
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.utils import timezone
from trackstats.models import (
Domain,
Metric,
StatisticByDate,
StatisticByDateAndObject,
Period)
from trackstats.trackers import (
CountObjectsByDateTracker,
CountObjectsByDateAndObjectTracker)
from trackstats.tests.models import Comment
def to_date(dt):
return timezone.make_naive(dt).date()
class TrackersTestCase(TestCase):
def setUp(self):
self.User = get_user_model()
self.users_domain = Domain.objects.register(ref='users')
self.user_count = Metric.objects.register(
domain=self.users_domain,
ref='user_count')
self.expected_signups = {}
dt = timezone.now() - timedelta(days=7)
# TODO: Add timezone testing
# dt.replace(hour=2)
signups_lifetime = 0
while to_date(dt) != date.today():
signups_on_day = random.randint(1, 5)
signups_lifetime += signups_on_day
self.expected_signups[to_date(dt)] = {
'lifetime': signups_lifetime,
'day': signups_on_day
}
for i in range(signups_on_day):
self.User.objects.create(
username='user{}_{}'.format(to_date(dt), i),
date_joined=dt)
dt += timedelta(days=1)
self.expected_signups[date.today()] = self.expected_signups[
date.today() - timedelta(days=1)]
def test_count_lifetime(self):
CountObjectsByDateTracker(
period=Period.LIFETIME,
metric=self.user_count,
date_field='date_joined').track(self.User.objects.all())
stats = StatisticByDate.objects.narrow(
metrics=[self.user_count],
period=Period.LIFETIME)
for stat in stats:
self.assertEqual(
stat.value,
self.expected_signups[stat.date]['lifetime'])
self.assertEqual(
stats.count(),
len(self.expected_signups))
def test_count_daily(self):
CountObjectsByDateTracker(
period=Period.DAY,
metric=self.user_count,
date_field='date_joined').track(self.User.objects.all())
stats = StatisticByDate.objects.narrow(
metrics=[self.user_count],
period=Period.DAY)
for stat in stats:
self.assertEqual(
stat.value,
self.expected_signups[stat.date]['day'])
self.assertEqual(
stats.count(),
# Today is not in there due to group by.
len(self.expected_signups) - 1)
class ObjectTrackersTestCase(TestCase):
def setUp(self):
self.User = get_user_model()
domain = Domain.objects.register(ref='comments')
self.comment_count = Metric.objects.register(
domain=domain,
ref='comment_count')
users = self.users = [
self.User.objects.create(username='user{}'.format(i))
for i in range(5)]
dt = timezone.now() - timedelta(days=7)
self.expected_daily = {}
self.expected_lifetime = Counter()
while to_date(dt) <= date.today():
for user in users:
comment_count = random.randint(1, 5)
for i in range(comment_count):
Comment.objects.create(
timestamp=dt,
user=user)
self.expected_lifetime[
(to_date(dt), user.pk)] = self.expected_lifetime[(
to_date(dt) - timedelta(days=1),
user.pk)] + comment_count
self.expected_daily[(to_date(dt), user.pk)] = comment_count
dt += timedelta(days=1)
def test_count_lifetime(self):
CountObjectsByDateAndObjectTracker(
period=Period.LIFETIME,
metric=self.comment_count,
object_model=self.User,
object_field='user',
date_field='timestamp').track(Comment.objects.all())
stats = StatisticByDateAndObject.objects.narrow(
metrics=[self.comment_count],
period=Period.LIFETIME)
for stat in stats:
self.assertEqual(
self.expected_lifetime[
(stat.date, stat.object.pk)],
stat.value)
self.assertEqual(
stats.count(),
len(self.expected_lifetime))
def test_count_daily(self):
CountObjectsByDateAndObjectTracker(
period=Period.DAY,
metric=self.comment_count,
object_model=self.User,
object_field='user',
date_field='timestamp').track(Comment.objects.all())
stats = StatisticByDateAndObject.objects.narrow(
metric=self.comment_count,
period=Period.DAY)
for stat in stats:
self.assertEqual(
self.expected_daily[(stat.date, stat.object.pk)],
stat.value)
self.assertEqual(
stats.count(),
len(self.expected_daily))
| pennersr/django-trackstats | trackstats/tests/test_trackers.py | Python | mit | 5,290 |
#title : checksum-homework.py
#description : This provides functionality to verify and correct checksums provided
#author : Lewis Relph
#date : 12/03/17
#notes : This code is assuming the checksum is provided in the way specified by the AQA specification.
# We assume the left most column and bottom row are the checksums.
#python_version : 3.5.2
#==============================================================================
"""
Verify if there are any errors in the grid
Return boolean
"""
def check_for_errors(grid):
if(locate_errors(grid) or locate_errors(list(zip(*grid)))):
return True
return False
"""
Get location of errors for the provided grid (scans x-direction)
Return x-coordinates (1D array)
"""
def locate_errors(grid):
errors = []
for i in range(0, len(grid)):
check = grid[i][0]
total = 0
for digit in grid[i][1:]:
total += digit
if (total % 2 != check):
# Even, has bad check digit
errors.append(i)
return errors
"""
Correct errors in the provided grid
Return corrected grid (2D array)
"""
def correct_error(grid):
# Reverse grid for y check
grid1 = list(zip(*grid))
x_errors = locate_errors(grid[:-1])
y_errors = locate_errors(grid1[:-1])
errors = list(zip(x_errors, y_errors))
for each in errors:
if(grid[each[0]][each[1]]):
grid[each[0]][each[1]] = 0
else:
grid[each[0]][each[1]] = 1
return grid | sleawnis/python-codebank | checksum-homework.py | Python | mit | 1,621 |
# Create your views here.
from oauth2_provider.ext.rest_framework import OAuth2Authentication
from rest_framework import permissions, generics
from rest_framework.viewsets import ModelViewSet
from feedback_survey.filters import FeedbackStudentFilter
from rest_framework.response import Response
from feedback_survey.models import Feedback, Course, Teacher, Section, Student, SectionField
from feedback_survey.serializers import FeedbackSerializer, CourseSerializer, TeacherSerializer, SectionSerializer, \
StudentSerializer, FeedbackCreateSerializer, StudentCreateSerializer, CourseCreateSerializer, \
SectionCreateSerializer, SectionFieldSerializer
from feedback_survey.utils import CustomMetaDataMixin
from rest_framework import filters
class FeedbackSurveyViewSet(CustomMetaDataMixin, ModelViewSet):
"""
Added OAuth if wants just uncomment the authentication and permission class and make sure created access token
at your end.
"""
queryset = Feedback.objects.all()
serializer_class = FeedbackSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = FeedbackStudentFilter
# authentication_classes = [OAuth2Authentication, ]
# permission_classes = [permissions.IsAuthenticated, permissions.IsAdminUser]
def get_serializer_class(self):
if self.is_create_api:
return FeedbackCreateSerializer
return FeedbackSerializer
@property
def is_create_api(self):
return self.request.method == 'POST'
class CourseViewSet(CustomMetaDataMixin, ModelViewSet):
"""
"""
queryset = Course.objects.all()
serializer_class = CourseSerializer
# authentication_classes = [OAuth2Authentication, ]
# permission_classes = [permissions.IsAuthenticated, permissions.IsAdminUser]
def get_serializer_class(self):
if self.is_create_api:
return CourseCreateSerializer
return CourseSerializer
@property
def is_create_api(self):
return self.request.method == 'POST'
class TeacherSerializerViewSet(CustomMetaDataMixin, ModelViewSet):
"""
"""
queryset = Teacher.objects.all()
serializer_class = TeacherSerializer
# authentication_classes = [OAuth2Authentication, ]
# permission_classes = [permissions.IsAuthenticated, permissions.IsAdminUser]
class SectionSerializerViewSet(CustomMetaDataMixin, ModelViewSet):
"""
"""
queryset = Section.objects.all()
serializer_class = SectionSerializer
def get_serializer_class(self):
if self.is_create_api:
return SectionCreateSerializer
return SectionSerializer
@property
def is_create_api(self):
return self.request.method == 'POST'
class StudentSerializerViewSet(CustomMetaDataMixin, ModelViewSet):
"""
Creating Student with course and university
"""
queryset = Student.objects.all()
serializer_class = StudentSerializer
def get_serializer_class(self):
if self.is_create_api:
return StudentCreateSerializer
return StudentSerializer
@property
def is_create_api(self):
return self.request.method == 'POST'
class SectionFieldsSerializerViewSet(CustomMetaDataMixin, ModelViewSet):
"""
"""
queryset = SectionField.objects.all()
serializer_class = SectionFieldSerializer
class StudentPendingFeedback(CustomMetaDataMixin, generics.RetrieveAPIView):
"""
To get the value of the Student based on the
1. **authentication:** yes
2. **authorization:** authenticated user
"""
# authentication_classes = [OAuth2Authentication, ]
# permission_classes = [permissions.IsAuthenticated, ]
queryset = Feedback.objects.all()
serializer_class = FeedbackSerializer
def retrieve(self, request, *args, **kwargs):
student_id = self.request.query_params.get('student_id', None)
instance = self.queryset.filter(state="1", student_id=student_id)
serializer = self.get_serializer(instance, many=True)
return Response(serializer.data)
| mushahid54/feedback_survey | feedback_survey/views.py | Python | mit | 4,099 |
from __future__ import absolute_import, unicode_literals
# Third Party Stuff
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, PermissionsMixin, Group
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
import uuid
# Create your models here.
GENDER_CHOICES = {
'M': "Male",
'F': "Female",
'O': 'Others'
}
USER_TYPE = {
'ADMIN': 'admin',
'TEAM_LEAD': 'teamlead',
'EMP': 'employee',
}
class UUIDModel(models.Model):
"""
An abstract base class model that makes primary key `id` as UUID
instead of default auto incremented number.
"""
id = models.UUIDField(primary_key=True, editable=False, default=uuid.uuid4)
class Meta:
abstract = True
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, email, password, is_staff, is_superuser, **extra_fields):
"""
Creates and saves a User with the given email and password.
"""
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, is_staff=is_staff, is_active=True,
is_superuser=is_superuser, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
return self._create_user(email, password, False, False, **extra_fields)
def create_superuser(self, email, password, **extra_fields):
return self._create_user(email, password, True, True, **extra_fields)
class Address(models.Model):
line1 = models.CharField(max_length=100)
line2 = models.CharField(max_length=100)
city = models.CharField(max_length=100)
state = models.CharField(max_length=100)
zipcode = models.CharField(max_length=6)
def __unicode__(self):
return "{}, {}, {} - {}".format(self.line1, self.line2,
self.city, self.state, self.zipcode)
class Meta:
verbose_name = _('Address')
verbose_name_plural = _('Addresses')
@python_2_unicode_compatible
class User(AbstractBaseUser, PermissionsMixin):
first_name = models.CharField(_('First Name'), max_length=120, blank=True)
last_name = models.CharField(_('Last Name'), max_length=120, blank=True)
# E-Mail is the username for accounts
email = models.EmailField(_('email address'), unique=True, db_index=True)
username = models.CharField(max_length=60, unique=True, db_index=True)
github_handle = models.CharField(max_length=60)
is_admin = models.BooleanField(default=False)
is_organization_mail = models.BooleanField(default=False)
phone = models.CharField(max_length=10, null=True, blank=True)
age = models.PositiveSmallIntegerField(blank=True, null=True)
gender = models.CharField(
max_length=1, choices=GENDER_CHOICES.items(), default='M')
address = models.OneToOneField(Address, null=True, blank=True)
profile_image_url = models.URLField()
is_staff = models.BooleanField(_('staff status'), default=False,
help_text='Designates whether the user can log into this admin site.')
is_active = models.BooleanField('active', default=True,
help_text='Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.')
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
REQUIRED_FIELDS = ['email']
USERNAME_FIELD = 'username'
objects = UserManager()
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
ordering = ('-date_joined', )
def __str__(self):
return str(self.email)
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '{} {}'.format(self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name.strip()
def add_to_group(self, grp):
try:
g = Group.objects.get(name=USER_TYPE[grp])
except Exception as e:
print e
return False
self.groups.add(g)
self.save()
return True
| CuriousLearner/standup | src/authen/models.py | Python | gpl-3.0 | 4,591 |
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:1824")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:1824")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Makicoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Makicoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| itamae/Makicoin | contrib/bitrpc/bitrpc.py | Python | mit | 7,838 |
# Authors:
# Rob Crittenden <[email protected]>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import shlex
import re
import six
# The Python re module doesn't do nested parenthesis
# Break the ACI into 3 pieces: target, name, permissions/bind_rules
ACIPat = re.compile(r'\(version\s+3.0\s*;\s*ac[li]\s+\"([^\"]*)\"\s*;'
r'\s*(.*);\s*\)', re.UNICODE)
# Break the permissions/bind_rules out
PermPat = re.compile(r'(\w+)\s*\(([^()]*)\)\s*(.*)', re.UNICODE)
# Break the bind rule out
BindPat = re.compile(r'\(?([a-zA-Z0-9;\.]+)\s*(\!?=)\s*\"(.*)\"\)?',
re.UNICODE)
ACTIONS = ["allow", "deny"]
PERMISSIONS = ["read", "write", "add", "delete", "search", "compare",
"selfwrite", "proxy", "all"]
class ACI:
"""
Holds the basic data for an ACI entry, as stored in the cn=accounts
entry in LDAP. Has methods to parse an ACI string and export to an
ACI String.
"""
__hash__ = None
def __init__(self,acistr=None):
self.name = None
self.source_group = None
self.dest_group = None
self.orig_acistr = acistr
self.target = {}
self.action = "allow"
self.permissions = ["write"]
self.bindrule = {}
if acistr is not None:
self._parse_acistr(acistr)
def __getitem__(self,key):
"""Fake getting attributes by key for sorting"""
if key == 0:
return self.name
if key == 1:
return self.source_group
if key == 2:
return self.dest_group
raise TypeError("Unknown key value %s" % key)
def __repr__(self):
"""An alias for export_to_string()"""
return self.export_to_string()
def export_to_string(self):
"""Output a Directory Server-compatible ACI string"""
self.validate()
aci = ""
for t, v in sorted(self.target.items()):
op = v['operator']
if type(v['expression']) in (tuple, list):
target = ""
for l in self._unique_list(v['expression']):
target = target + l + " || "
target = target[:-4]
aci = aci + "(%s %s \"%s\")" % (t, op, target)
else:
aci = aci + "(%s %s \"%s\")" % (t, op, v['expression'])
aci = aci + "(version 3.0;acl \"%s\";%s (%s) %s %s \"%s\"" % (self.name, self.action, ",".join(self.permissions), self.bindrule['keyword'], self.bindrule['operator'], self.bindrule['expression']) + ";)"
return aci
def _unique_list(self, l):
"""
A set() doesn't maintain order so make a list unique ourselves.
The number of entries in our lists are always going to be
relatively low and this code will be called infrequently
anyway so the overhead will be small.
"""
unique = []
for item in l:
if item not in unique:
unique.append(item)
return unique
def _remove_quotes(self, s):
# Remove leading and trailing quotes
if s.startswith('"'):
s = s[1:]
if s.endswith('"'):
s = s[:-1]
return s
def _parse_target(self, aci):
if six.PY2:
aci = aci.encode('utf-8')
lexer = shlex.shlex(aci)
lexer.wordchars = lexer.wordchars + "."
var = False
op = "="
for token in lexer:
# We should have the form (a = b)(a = b)...
if token == "(":
var = next(lexer).strip()
operator = next(lexer)
if operator not in ("=", "!="):
# Peek at the next char before giving up
operator = operator + next(lexer)
if operator not in ("=", "!="):
raise SyntaxError("No operator in target, got '%s'" % operator)
op = operator
val = next(lexer).strip()
val = self._remove_quotes(val)
end = next(lexer)
if end != ")":
raise SyntaxError('No end parenthesis in target, got %s' % end)
if var == 'targetattr':
# Make a string of the form attr || attr || ... into a list
t = re.split(r'[^a-zA-Z0-9;\*]+', val)
self.target[var] = {}
self.target[var]['operator'] = op
self.target[var]['expression'] = t
else:
self.target[var] = {}
self.target[var]['operator'] = op
self.target[var]['expression'] = val
def _parse_acistr(self, acistr):
vstart = acistr.find('version 3.0')
if vstart < 0:
raise SyntaxError("malformed ACI, unable to find version %s" % acistr)
acimatch = ACIPat.match(acistr[vstart-1:])
if not acimatch or len(acimatch.groups()) < 2:
raise SyntaxError("malformed ACI, match for version and bind rule failed %s" % acistr)
self._parse_target(acistr[:vstart-1])
self.name = acimatch.group(1)
bindperms = PermPat.match(acimatch.group(2))
if not bindperms or len(bindperms.groups()) < 3:
raise SyntaxError("malformed ACI, permissions match failed %s" % acistr)
self.action = bindperms.group(1)
self.permissions = self._unique_list(
bindperms.group(2).replace(' ','').split(',')
)
self.set_bindrule(bindperms.group(3))
def validate(self):
"""Do some basic verification that this will produce a
valid LDAP ACI.
returns True if valid
"""
if type(self.permissions) not in (tuple, list):
raise SyntaxError("permissions must be a list")
for p in self.permissions:
if p.lower() not in PERMISSIONS:
raise SyntaxError("invalid permission: '%s'" % p)
if not self.name:
raise SyntaxError("name must be set")
if not isinstance(self.name, str):
raise SyntaxError("name must be a string")
if not isinstance(self.target, dict) or len(self.target) == 0:
raise SyntaxError("target must be a non-empty dictionary")
if not isinstance(self.bindrule, dict):
raise SyntaxError("bindrule must be a dictionary")
if not self.bindrule.get('operator') or not self.bindrule.get('keyword') or not self.bindrule.get('expression'):
raise SyntaxError("bindrule is missing a component")
return True
def set_permissions(self, permissions):
if type(permissions) not in (tuple, list):
permissions = [permissions]
self.permissions = self._unique_list(permissions)
def set_target_filter(self, filter, operator="="):
self.target['targetfilter'] = {}
if not filter.startswith("("):
filter = "(" + filter + ")"
self.target['targetfilter']['expression'] = filter
self.target['targetfilter']['operator'] = operator
def set_target_attr(self, attr, operator="="):
if not attr:
if 'targetattr' in self.target:
del self.target['targetattr']
return
if type(attr) not in (tuple, list):
attr = [attr]
self.target['targetattr'] = {}
self.target['targetattr']['expression'] = self._unique_list(attr)
self.target['targetattr']['operator'] = operator
def set_target(self, target, operator="="):
assert target.startswith("ldap:///")
self.target['target'] = {}
self.target['target']['expression'] = target
self.target['target']['operator'] = operator
def set_bindrule(self, bindrule):
if bindrule.startswith('(') != bindrule.endswith(')'):
raise SyntaxError("non-matching parentheses in bindrule")
match = BindPat.match(bindrule)
if not match or len(match.groups()) < 3:
raise SyntaxError("malformed bind rule")
self.set_bindrule_keyword(match.group(1))
self.set_bindrule_operator(match.group(2))
self.set_bindrule_expression(match.group(3).replace('"',''))
def set_bindrule_keyword(self, keyword):
self.bindrule['keyword'] = keyword
def set_bindrule_operator(self, operator):
self.bindrule['operator'] = operator
def set_bindrule_expression(self, expression):
self.bindrule['expression'] = expression
def isequal(self, b):
"""
Compare the current ACI to another one to see if they are
the same.
returns True if equal, False if not.
"""
assert isinstance(b, ACI)
try:
if self.name.lower() != b.name.lower():
return False
if set(self.permissions) != set(b.permissions):
return False
if self.bindrule.get('keyword') != b.bindrule.get('keyword'):
return False
if self.bindrule.get('operator') != b.bindrule.get('operator'):
return False
if self.bindrule.get('expression') != b.bindrule.get('expression'):
return False
if self.target.get('targetfilter',{}).get('expression') != b.target.get('targetfilter',{}).get('expression'):
return False
if self.target.get('targetfilter',{}).get('operator') != b.target.get('targetfilter',{}).get('operator'):
return False
if set(self.target.get('targetattr', {}).get('expression', ())) != set(b.target.get('targetattr',{}).get('expression', ())):
return False
if self.target.get('targetattr',{}).get('operator') != b.target.get('targetattr',{}).get('operator'):
return False
if self.target.get('target',{}).get('expression') != b.target.get('target',{}).get('expression'):
return False
if self.target.get('target',{}).get('operator') != b.target.get('target',{}).get('operator'):
return False
except Exception:
# If anything throws up then they are not equal
return False
# We got this far so lets declare them the same
return True
__eq__ = isequal
def __ne__(self, b):
return not self == b
| encukou/freeipa | ipalib/aci.py | Python | gpl-3.0 | 11,029 |
from django.http import HttpResponse
from django.views.generic import View
from RadioCanada.service import NewsService
class Home(View):
def get(self,request):
return HttpResponse('This is the home page')
class AllNews(View):
def __init__(self):
self.news_service = NewsService()
def get(self,request):
news = self.news_service.get_all()
return HttpResponse(news)
| antoinedube/django-spine-news-display | Server/RadioCanada/views.py | Python | gpl-3.0 | 413 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
'''
Cart test Case
'''
import unittest
from decimal import Decimal
from nereid.globals import session, current_website
from trytond.tests.test_tryton import USER, DB_NAME, CONTEXT, POOL
from trytond.transaction import Transaction
from test_product import BaseTestCase
class TestCart(BaseTestCase):
"""Test Cart"""
def _create_pricelists(self):
"""
Create the pricelists
"""
# Setup the pricelists
self.party_pl_margin = Decimal('1')
self.guest_pl_margin = Decimal('1')
user_price_list, = self.PriceList.create([{
'name': 'PL 1',
'company': self.company.id,
'lines': [
('create', [{
'formula': 'unit_price * %s' % self.party_pl_margin
}])
],
}])
guest_price_list, = self.PriceList.create([{
'name': 'PL 2',
'company': self.company.id,
'lines': [
('create', [{
'formula': 'unit_price * %s' % self.guest_pl_margin
}])
],
}])
return guest_price_list.id, user_price_list.id
def setup_defaults(self):
super(TestCart, self).setup_defaults()
self.ProductTemplate.write(
[self.template2], {
'list_price': Decimal('10')
}
)
def test_0010_cart_wo_login(self):
"""
Check if cart works without login
* Add 5 units of item to cart
* Check that the number of orders in system is 1
* Check if the lines is 1 for that order
"""
quantity = 5
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': quantity,
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
sales = self.Sale.search([])
self.assertEqual(len(sales), 1)
sale = sales[0]
self.assertEqual(len(sale.lines), 1)
self.assertEqual(
sale.lines[0].product, self.product1
)
self.assertEqual(sale.lines[0].quantity, quantity)
def test_0015_cart_duplication(self):
"""
duplicating a webshop order from Tryton should not
make it a cart order.
"""
quantity = 5
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': quantity,
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
sales = self.Sale.search([])
self.assertEqual(len(sales), 1)
sale = sales[0]
# Now duplicate the order
new_sale, = self.Sale.copy([sale])
self.assertFalse(new_sale.is_cart)
def test_0020_cart_diff_apps(self):
"""
Call the cart with two different applications
and assert they are different but same empty carts
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c1:
rv1 = c1.get('/cart')
self.assertEqual(rv1.status_code, 200)
data1 = rv1.data
with app.test_client() as c2:
rv2 = c2.get('/cart')
self.assertEqual(rv2.status_code, 200)
data2 = rv2.data
# Both are empty active records
self.assertTrue(data1 == data2 == 'Cart:None,0,')
def test_0025_cart_diff_apps(self):
"""
Call the cart with two different applications
and assert they are not equal. They become different
only when
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c1:
c1.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 5
}
)
rv1 = c1.get('/cart')
self.assertEqual(rv1.status_code, 200)
data1 = rv1.data
with app.test_client() as c2:
c2.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 5
}
)
rv2 = c2.get('/cart')
self.assertEqual(rv2.status_code, 200)
data2 = rv2.data
self.assertTrue(data1 != data2)
def test_0030_add_items_n_login(self):
"""User browses cart, adds items and logs in
Expected behaviour : The items in the guest cart is added to the
registered cart of the user upon login
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
c.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 5
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
cart_data1 = rv.data[6:]
# Login now and access cart
self.login(c, '[email protected]', 'password')
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
cart_data2 = rv.data[6:]
self.assertEqual(cart_data1, cart_data2)
def test_0035_add_to_cart(self):
"""
Test the add and set modes of add_to_cart
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
self.login(c, '[email protected]', 'password')
c.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 7
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,7,70.00')
c.post('/cart/add', data={
'product': self.product1.id, 'quantity': 7
})
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,7,70.00')
c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': 7, 'action': 'add'
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,14,140.00')
def test_0040_user_logout(self):
"""
When the user logs out his guest cart will always be empty
* Login
* Add a product to cart
* Logout
* Check the cart, should have 0 quantity and different cart id
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
self.login(c, '[email protected]', 'password')
c.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 7
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,7,70.00')
response = c.get('/logout')
self.assertEqual(response.status_code, 302)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:None,0,')
def test_0050_same_user_two_session(self):
"""
Registered user on two different sessions should see the same cart
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
self.login(c, '[email protected]', 'password2')
rv = c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': 6
}
)
self.assertEqual(rv.status_code, 302)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,6,60.00')
with app.test_client() as c:
self.login(c, '[email protected]', 'password2')
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,6,60.00')
def test_0060_delete_line(self):
"""
Try deleting a line from the cart
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
self.login(c, '[email protected]', 'password2')
# Add 6 of first product
rv = c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': 6
}
)
self.assertEqual(rv.status_code, 302)
# Add 10 of next product
rv = c.post(
'/cart/add',
data={
'product': self.template2.products[0].id,
'quantity': 10
}
)
self.assertEqual(rv.status_code, 302)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,16,160.00')
# Find the line with product1 and delete it
cart = self.Cart(1)
for line in cart.sale.lines:
if line.product.id == self.product1.id:
break
else:
self.fail("Order line not found")
with app.test_client() as c:
self.login(c, '[email protected]', 'password2')
c.post('/cart/delete/%d' % line.id)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,10,100.00')
# Test that ValueError is not raised if someone tries to delete
# already removed item
c.post('/cart/delete/%d' % line.id)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,10,100.00')
def test_0070_clear_cart(self):
"""
Clear the cart completely
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
self.login(c, '[email protected]', 'password2')
# Add 6 of first product
rv = c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': 6
}
)
self.assertEqual(rv.status_code, 302)
cart = self.Cart(1)
sale = cart.sale.id
with app.test_client() as c:
self.login(c, '[email protected]', 'password2')
c.post('/cart/clear')
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:None,0,')
self.assertFalse(self.Sale.search([('id', '=', sale)]))
def test_0080_reject_negative_quantity(self):
"""
If a negative quantity is sent to add to cart, then reject it
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
self.login(c, '[email protected]', 'password2')
rv = c.post(
'/cart/add',
data={
'product': self.template2.products[0].id,
'quantity': 10
}
)
self.assertEqual(rv.status_code, 302)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,10,100.00')
#: Add a negative quantity and nothing should change
rv = c.post(
'/cart/add',
data={
'product': self.template2.products[0].id,
'quantity': -10
}
)
self.assertEqual(rv.status_code, 302)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,10,100.00')
def test_0090_create_sale_order(self):
"""
Create a sale order and it should work
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'party': self.registered_user.party.id,
'company': self.company.id,
'currency': self.usd.id,
}])
self.assertEqual(sale.party, self.registered_user.party)
def test_0100_create_draft_sale(self):
"""
Create draft sale method
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
Cart = POOL.get('nereid.cart')
with app.test_request_context('/'):
# Guest cart
cart, = Cart.create([{
'user': None,
'sessionid': session.sid,
}])
cart.create_draft_sale()
self.assertEqual(
cart.sale.party, current_website.guest_user.party
)
self.assertEqual(
cart.sale.nereid_user, current_website.guest_user
)
def test_0110_cart_cache_header(self):
"""
Ensure that the cart page has a no cache header
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
with app.test_client() as c:
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.headers['Cache-Control'], 'max-age=0')
def test_0120_add_non_salable_product_to_cart(self):
"""
Try to add a non-salable product to cart.
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
# Make product1 non-salable
self.assertTrue(self.product1.salable)
self.product1.template.salable = False
self.product1.template.save()
with app.test_client() as c:
self.login(c, '[email protected]', 'password')
# You are adding a non salable product to cart
self.assertFalse(self.product1.salable)
rv = c.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 7
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,0,0')
rv = c.get('/')
self.assert_('This product is not for sale' in rv.data)
def test_0130_cart_sale_taxes(self):
"""
Test taxes and sale.refresh_taxes
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
app = self.get_app()
self.template1.customer_taxes = [self.sale_tax.id]
self.template1.save()
with app.test_client() as c:
self.login(c, '[email protected]', 'password')
c.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 7
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
# 70 (10 x 7) + 3.5 (5% Tax) = 73.50
self.assertEqual(rv.data, 'Cart:1,7,73.50')
c.post('/cart/add', data={
'product': self.product1.id, 'quantity': 7
})
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
# 70 (10 x 7) + 3.5 (5% Tax) = 73.50
self.assertEqual(rv.data, 'Cart:1,7,73.50')
sale, = self.Sale.search([])
sale.refresh_taxes() # Refresh Taxes
self.assertEqual(sale.tax_amount, Decimal('3.50'))
def test_0140_price_change_on_quantity(self):
"""
Test the add and set modes of add_to_cart
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
price_list, = self.PriceList.create([{
'name': 'Crazy Pricelist',
'company': self.company.id,
'lines': [
('create', [{
'product': self.product1.id,
'quantity': 2,
'formula': 'unit_price - 1',
}])
],
}])
self.assertTrue(price_list)
app = self.get_app()
with app.test_client() as c:
self.login(c, '[email protected]', 'password')
c.post(
'/cart/add',
data={
'product': self.product1.id, 'quantity': 1
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.data, 'Cart:1,1,10.00')
sale = self.Sale.search([])
self.assertEqual(len(sale), 1)
sale[0].price_list = price_list
sale[0].save()
self.templates.update({
'shopping-cart.jinja':
'Cart:{{ cart.id }},{{get_cart_size()|round|int}},'
'{{cart.sale.total_amount}},{{get_flashed_messages()}}',
})
c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': 1, 'action': 'add'
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
# Cart total must be 18 and not 20 due to price list
self.assertTrue('Cart:1,2,18.00' in rv.data)
self.assertTrue('dropped from' in rv.data)
# Set quantity back to 1
c.post(
'/cart/add',
data={
'product': self.product1.id,
'quantity': 1, 'action': 'set'
}
)
rv = c.get('/cart')
self.assertEqual(rv.status_code, 200)
# Cart total must be 18 and not 20 due to price list
self.assertTrue('Cart:1,1,10.00' in rv.data)
self.assertTrue('increased from' in rv.data)
def suite():
"Cart test suite"
suite = unittest.TestSuite()
suite.addTests([
unittest.TestLoader().loadTestsFromTestCase(TestCart),
])
return suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
| priyankarani/nereid-cart-b2c | tests/test_cart.py | Python | gpl-3.0 | 21,563 |
from __future__ import division, print_function
import numpy as np
from functools import partial
from itertools import product
import warnings
from sklearn import datasets
from sklearn import svm
from sklearn.datasets import make_multilabel_classification
from sklearn.preprocessing import LabelBinarizer, MultiLabelBinarizer
from sklearn.utils.fixes import np_version
from sklearn.utils.validation import check_random_state
from sklearn.utils.testing import assert_raises, clean_warning_registry
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_no_warnings
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import ignore_warnings
from sklearn.metrics import accuracy_score
from sklearn.metrics import average_precision_score
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import f1_score
from sklearn.metrics import fbeta_score
from sklearn.metrics import hamming_loss
from sklearn.metrics import hinge_loss
from sklearn.metrics import jaccard_similarity_score
from sklearn.metrics import log_loss
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import zero_one_loss
from sklearn.metrics.classification import _check_targets
from sklearn.metrics.base import UndefinedMetricWarning
###############################################################################
# Utilities for testing
def make_prediction(dataset=None, binary=False):
"""Make some classification predictions on a toy dataset using a SVC
If binary is True restrict to a binary classification problem instead of a
multiclass classification problem
"""
if dataset is None:
# import some data to play with
dataset = datasets.load_iris()
X = dataset.data
y = dataset.target
if binary:
# restrict to a binary classification task
X, y = X[y < 2], y[y < 2]
n_samples, n_features = X.shape
p = np.arange(n_samples)
rng = check_random_state(37)
rng.shuffle(p)
X, y = X[p], y[p]
half = int(n_samples / 2)
# add noisy features to make the problem harder and avoid perfect results
rng = np.random.RandomState(0)
X = np.c_[X, rng.randn(n_samples, 200 * n_features)]
# run classifier, get class probabilities and label predictions
clf = svm.SVC(kernel='linear', probability=True, random_state=0)
probas_pred = clf.fit(X[:half], y[:half]).predict_proba(X[half:])
if binary:
# only interested in probabilities of the positive case
# XXX: do we really want a special API for the binary case?
probas_pred = probas_pred[:, 1]
y_pred = clf.predict(X[half:])
y_true = y[half:]
return y_true, y_pred, probas_pred
###############################################################################
# Tests
def test_multilabel_accuracy_score_subset_accuracy():
# Dense label indicator matrix format
y1 = np.array([[0, 1, 1], [1, 0, 1]])
y2 = np.array([[0, 0, 1], [1, 0, 1]])
assert_equal(accuracy_score(y1, y2), 0.5)
assert_equal(accuracy_score(y1, y1), 1)
assert_equal(accuracy_score(y2, y2), 1)
assert_equal(accuracy_score(y2, np.logical_not(y2)), 0)
assert_equal(accuracy_score(y1, np.logical_not(y1)), 0)
assert_equal(accuracy_score(y1, np.zeros(y1.shape)), 0)
assert_equal(accuracy_score(y2, np.zeros(y1.shape)), 0)
with ignore_warnings(): # sequence of sequences is deprecated
# List of tuple of label
y1 = [(1, 2,), (0, 2,)]
y2 = [(2,), (0, 2,)]
assert_equal(accuracy_score(y1, y2), 0.5)
assert_equal(accuracy_score(y1, y1), 1)
assert_equal(accuracy_score(y2, y2), 1)
assert_equal(accuracy_score(y2, [(), ()]), 0)
assert_equal(accuracy_score(y1, y2, normalize=False), 1)
assert_equal(accuracy_score(y1, y1, normalize=False), 2)
assert_equal(accuracy_score(y2, y2, normalize=False), 2)
assert_equal(accuracy_score(y2, [(), ()], normalize=False), 0)
def test_precision_recall_f1_score_binary():
"""Test Precision Recall and F1 Score for binary classification task"""
y_true, y_pred, _ = make_prediction(binary=True)
# detailed measures for each class
p, r, f, s = precision_recall_fscore_support(y_true, y_pred, average=None)
assert_array_almost_equal(p, [0.73, 0.85], 2)
assert_array_almost_equal(r, [0.88, 0.68], 2)
assert_array_almost_equal(f, [0.80, 0.76], 2)
assert_array_equal(s, [25, 25])
# individual scoring function that can be used for grid search: in the
# binary class case the score is the value of the measure for the positive
# class (e.g. label == 1)
ps = precision_score(y_true, y_pred)
assert_array_almost_equal(ps, 0.85, 2)
rs = recall_score(y_true, y_pred)
assert_array_almost_equal(rs, 0.68, 2)
fs = f1_score(y_true, y_pred)
assert_array_almost_equal(fs, 0.76, 2)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2),
(1 + 2 ** 2) * ps * rs / (2 ** 2 * ps + rs), 2)
@ignore_warnings
def test_precision_recall_f_binary_single_class():
"""Test precision, recall and F1 score behave with a single positive or
negative class
Such a case may occur with non-stratified cross-validation"""
assert_equal(1., precision_score([1, 1], [1, 1]))
assert_equal(1., recall_score([1, 1], [1, 1]))
assert_equal(1., f1_score([1, 1], [1, 1]))
assert_equal(0., precision_score([-1, -1], [-1, -1]))
assert_equal(0., recall_score([-1, -1], [-1, -1]))
assert_equal(0., f1_score([-1, -1], [-1, -1]))
def test_average_precision_score_score_non_binary_class():
"""Test that average_precision_score function returns an error when trying
to compute average_precision_score for multiclass task.
"""
rng = check_random_state(404)
y_pred = rng.rand(10)
# y_true contains three different class values
y_true = rng.randint(0, 3, size=10)
assert_raise_message(ValueError, "multiclass format is not supported",
average_precision_score, y_true, y_pred)
def test_average_precision_score_duplicate_values():
# Duplicate values with precision-recall require a different
# processing than when computing the AUC of a ROC, because the
# precision-recall curve is a decreasing curve
# The following situtation corresponds to a perfect
# test statistic, the average_precision_score should be 1
y_true = [0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1]
y_score = [0, .1, .1, .4, .5, .6, .6, .9, .9, 1, 1]
assert_equal(average_precision_score(y_true, y_score), 1)
def test_average_precision_score_tied_values():
# Here if we go from left to right in y_true, the 0 values are
# are separated from the 1 values, so it appears that we've
# Correctly sorted our classifications. But in fact the first two
# values have the same score (0.5) and so the first two values
# could be swapped around, creating an imperfect sorting. This
# imperfection should come through in the end score, making it less
# than one.
y_true = [0, 1, 1]
y_score = [.5, .5, .6]
assert_not_equal(average_precision_score(y_true, y_score), 1.)
def test_precision_recall_fscore_support_errors():
y_true, y_pred, _ = make_prediction(binary=True)
# Bad beta
assert_raises(ValueError, precision_recall_fscore_support,
y_true, y_pred, beta=0.0)
# Bad pos_label
assert_raises(ValueError, precision_recall_fscore_support,
y_true, y_pred, pos_label=2, average='macro')
# Bad average option
assert_raises(ValueError, precision_recall_fscore_support,
[0, 1, 2], [1, 2, 0], average='mega')
def test_confusion_matrix_binary():
"""Test confusion matrix - binary classification case"""
y_true, y_pred, _ = make_prediction(binary=True)
def test(y_true, y_pred):
cm = confusion_matrix(y_true, y_pred)
assert_array_equal(cm, [[22, 3], [8, 17]])
tp, fp, fn, tn = cm.flatten()
num = (tp * tn - fp * fn)
den = np.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
true_mcc = 0 if den == 0 else num / den
mcc = matthews_corrcoef(y_true, y_pred)
assert_array_almost_equal(mcc, true_mcc, decimal=2)
assert_array_almost_equal(mcc, 0.57, decimal=2)
test(y_true, y_pred)
test([str(y) for y in y_true],
[str(y) for y in y_pred])
@ignore_warnings
def test_matthews_corrcoef_nan():
assert_equal(matthews_corrcoef([0], [1]), 0.0)
assert_equal(matthews_corrcoef([0, 0], [0, 1]), 0.0)
def test_precision_recall_f1_score_multiclass():
"""Test Precision Recall and F1 Score for multiclass classification task"""
y_true, y_pred, _ = make_prediction(binary=False)
# compute scores with default labels introspection
p, r, f, s = precision_recall_fscore_support(y_true, y_pred, average=None)
assert_array_almost_equal(p, [0.83, 0.33, 0.42], 2)
assert_array_almost_equal(r, [0.79, 0.09, 0.90], 2)
assert_array_almost_equal(f, [0.81, 0.15, 0.57], 2)
assert_array_equal(s, [24, 31, 20])
# averaging tests
ps = precision_score(y_true, y_pred, pos_label=1, average='micro')
assert_array_almost_equal(ps, 0.53, 2)
rs = recall_score(y_true, y_pred, average='micro')
assert_array_almost_equal(rs, 0.53, 2)
fs = f1_score(y_true, y_pred, average='micro')
assert_array_almost_equal(fs, 0.53, 2)
ps = precision_score(y_true, y_pred, average='macro')
assert_array_almost_equal(ps, 0.53, 2)
rs = recall_score(y_true, y_pred, average='macro')
assert_array_almost_equal(rs, 0.60, 2)
fs = f1_score(y_true, y_pred, average='macro')
assert_array_almost_equal(fs, 0.51, 2)
ps = precision_score(y_true, y_pred, average='weighted')
assert_array_almost_equal(ps, 0.51, 2)
rs = recall_score(y_true, y_pred, average='weighted')
assert_array_almost_equal(rs, 0.53, 2)
fs = f1_score(y_true, y_pred, average='weighted')
assert_array_almost_equal(fs, 0.47, 2)
assert_raises(ValueError, precision_score, y_true, y_pred,
average="samples")
assert_raises(ValueError, recall_score, y_true, y_pred, average="samples")
assert_raises(ValueError, f1_score, y_true, y_pred, average="samples")
assert_raises(ValueError, fbeta_score, y_true, y_pred, average="samples",
beta=0.5)
# same prediction but with and explicit label ordering
p, r, f, s = precision_recall_fscore_support(
y_true, y_pred, labels=[0, 2, 1], average=None)
assert_array_almost_equal(p, [0.83, 0.41, 0.33], 2)
assert_array_almost_equal(r, [0.79, 0.90, 0.10], 2)
assert_array_almost_equal(f, [0.81, 0.57, 0.15], 2)
assert_array_equal(s, [24, 20, 31])
def test_precision_recall_f1_score_multiclass_pos_label_none():
"""Test Precision Recall and F1 Score for multiclass classification task
GH Issue #1296
"""
# initialize data
y_true = np.array([0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1])
y_pred = np.array([1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1])
# compute scores with default labels introspection
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
pos_label=None,
average='weighted')
def test_zero_precision_recall():
"""Check that pathological cases do not bring NaNs"""
old_error_settings = np.seterr(all='raise')
try:
y_true = np.array([0, 1, 2, 0, 1, 2])
y_pred = np.array([2, 0, 1, 1, 2, 0])
assert_almost_equal(precision_score(y_true, y_pred,
average='weighted'), 0.0, 2)
assert_almost_equal(recall_score(y_true, y_pred, average='weighted'),
0.0, 2)
assert_almost_equal(f1_score(y_true, y_pred, average='weighted'),
0.0, 2)
finally:
np.seterr(**old_error_settings)
def test_confusion_matrix_multiclass():
"""Test confusion matrix - multi-class case"""
y_true, y_pred, _ = make_prediction(binary=False)
def test(y_true, y_pred, string_type=False):
# compute confusion matrix with default labels introspection
cm = confusion_matrix(y_true, y_pred)
assert_array_equal(cm, [[19, 4, 1],
[4, 3, 24],
[0, 2, 18]])
# compute confusion matrix with explicit label ordering
labels = ['0', '2', '1'] if string_type else [0, 2, 1]
cm = confusion_matrix(y_true,
y_pred,
labels=labels)
assert_array_equal(cm, [[19, 1, 4],
[0, 18, 2],
[4, 24, 3]])
test(y_true, y_pred)
test(list(str(y) for y in y_true),
list(str(y) for y in y_pred),
string_type=True)
def test_confusion_matrix_multiclass_subset_labels():
"""Test confusion matrix - multi-class case with subset of labels"""
y_true, y_pred, _ = make_prediction(binary=False)
# compute confusion matrix with only first two labels considered
cm = confusion_matrix(y_true, y_pred, labels=[0, 1])
assert_array_equal(cm, [[19, 4],
[4, 3]])
# compute confusion matrix with explicit label ordering for only subset
# of labels
cm = confusion_matrix(y_true, y_pred, labels=[2, 1])
assert_array_equal(cm, [[18, 2],
[24, 3]])
def test_classification_report_multiclass():
"""Test performance report"""
iris = datasets.load_iris()
y_true, y_pred, _ = make_prediction(dataset=iris, binary=False)
# print classification report with class names
expected_report = """\
precision recall f1-score support
setosa 0.83 0.79 0.81 24
versicolor 0.33 0.10 0.15 31
virginica 0.42 0.90 0.57 20
avg / total 0.51 0.53 0.47 75
"""
report = classification_report(
y_true, y_pred, labels=np.arange(len(iris.target_names)),
target_names=iris.target_names)
assert_equal(report, expected_report)
# print classification report with label detection
expected_report = """\
precision recall f1-score support
0 0.83 0.79 0.81 24
1 0.33 0.10 0.15 31
2 0.42 0.90 0.57 20
avg / total 0.51 0.53 0.47 75
"""
report = classification_report(y_true, y_pred)
assert_equal(report, expected_report)
def test_classification_report_multiclass_with_digits():
"""Test performance report with added digits in floating point values"""
iris = datasets.load_iris()
y_true, y_pred, _ = make_prediction(dataset=iris, binary=False)
# print classification report with class names
expected_report = """\
precision recall f1-score support
setosa 0.82609 0.79167 0.80851 24
versicolor 0.33333 0.09677 0.15000 31
virginica 0.41860 0.90000 0.57143 20
avg / total 0.51375 0.53333 0.47310 75
"""
report = classification_report(
y_true, y_pred, labels=np.arange(len(iris.target_names)),
target_names=iris.target_names, digits=5)
assert_equal(report, expected_report)
# print classification report with label detection
expected_report = """\
precision recall f1-score support
0 0.83 0.79 0.81 24
1 0.33 0.10 0.15 31
2 0.42 0.90 0.57 20
avg / total 0.51 0.53 0.47 75
"""
report = classification_report(y_true, y_pred)
assert_equal(report, expected_report)
def test_classification_report_multiclass_with_string_label():
y_true, y_pred, _ = make_prediction(binary=False)
y_true = np.array(["blue", "green", "red"])[y_true]
y_pred = np.array(["blue", "green", "red"])[y_pred]
expected_report = """\
precision recall f1-score support
blue 0.83 0.79 0.81 24
green 0.33 0.10 0.15 31
red 0.42 0.90 0.57 20
avg / total 0.51 0.53 0.47 75
"""
report = classification_report(y_true, y_pred)
assert_equal(report, expected_report)
expected_report = """\
precision recall f1-score support
a 0.83 0.79 0.81 24
b 0.33 0.10 0.15 31
c 0.42 0.90 0.57 20
avg / total 0.51 0.53 0.47 75
"""
report = classification_report(y_true, y_pred,
target_names=["a", "b", "c"])
assert_equal(report, expected_report)
def test_classification_report_multiclass_with_unicode_label():
y_true, y_pred, _ = make_prediction(binary=False)
labels = np.array([u"blue\xa2", u"green\xa2", u"red\xa2"])
y_true = labels[y_true]
y_pred = labels[y_pred]
expected_report = u"""\
precision recall f1-score support
blue\xa2 0.83 0.79 0.81 24
green\xa2 0.33 0.10 0.15 31
red\xa2 0.42 0.90 0.57 20
avg / total 0.51 0.53 0.47 75
"""
if np_version[:3] < (1, 7, 0):
expected_message = ("NumPy < 1.7.0 does not implement"
" searchsorted on unicode data correctly.")
assert_raise_message(RuntimeError, expected_message,
classification_report, y_true, y_pred)
else:
report = classification_report(y_true, y_pred)
assert_equal(report, expected_report)
@ignore_warnings # sequence of sequences is deprecated
def test_multilabel_classification_report():
n_classes = 4
n_samples = 50
make_ml = make_multilabel_classification
_, y_true_ll = make_ml(n_features=1, n_classes=n_classes, random_state=0,
n_samples=n_samples)
_, y_pred_ll = make_ml(n_features=1, n_classes=n_classes, random_state=1,
n_samples=n_samples)
expected_report = """\
precision recall f1-score support
0 0.50 0.67 0.57 24
1 0.51 0.74 0.61 27
2 0.29 0.08 0.12 26
3 0.52 0.56 0.54 27
avg / total 0.45 0.51 0.46 104
"""
lb = MultiLabelBinarizer()
lb.fit([range(4)])
y_true_bi = lb.transform(y_true_ll)
y_pred_bi = lb.transform(y_pred_ll)
for y_true, y_pred in [(y_true_ll, y_pred_ll), (y_true_bi, y_pred_bi)]:
report = classification_report(y_true, y_pred)
assert_equal(report, expected_report)
def test_multilabel_zero_one_loss_subset():
# Dense label indicator matrix format
y1 = np.array([[0, 1, 1], [1, 0, 1]])
y2 = np.array([[0, 0, 1], [1, 0, 1]])
assert_equal(zero_one_loss(y1, y2), 0.5)
assert_equal(zero_one_loss(y1, y1), 0)
assert_equal(zero_one_loss(y2, y2), 0)
assert_equal(zero_one_loss(y2, np.logical_not(y2)), 1)
assert_equal(zero_one_loss(y1, np.logical_not(y1)), 1)
assert_equal(zero_one_loss(y1, np.zeros(y1.shape)), 1)
assert_equal(zero_one_loss(y2, np.zeros(y1.shape)), 1)
with ignore_warnings(): # sequence of sequences is deprecated
# List of tuple of label
y1 = [(1, 2,), (0, 2,)]
y2 = [(2,), (0, 2,)]
assert_equal(zero_one_loss(y1, y2), 0.5)
assert_equal(zero_one_loss(y1, y1), 0)
assert_equal(zero_one_loss(y2, y2), 0)
assert_equal(zero_one_loss(y2, [(), ()]), 1)
assert_equal(zero_one_loss(y2, [tuple(), (10, )]), 1)
def test_multilabel_hamming_loss():
# Dense label indicator matrix format
y1 = np.array([[0, 1, 1], [1, 0, 1]])
y2 = np.array([[0, 0, 1], [1, 0, 1]])
assert_equal(hamming_loss(y1, y2), 1 / 6)
assert_equal(hamming_loss(y1, y1), 0)
assert_equal(hamming_loss(y2, y2), 0)
assert_equal(hamming_loss(y2, np.logical_not(y2)), 1)
assert_equal(hamming_loss(y1, np.logical_not(y1)), 1)
assert_equal(hamming_loss(y1, np.zeros(y1.shape)), 4 / 6)
assert_equal(hamming_loss(y2, np.zeros(y1.shape)), 0.5)
with ignore_warnings(): # sequence of sequences is deprecated
# List of tuple of label
y1 = [(1, 2,), (0, 2,)]
y2 = [(2,), (0, 2,)]
assert_equal(hamming_loss(y1, y2), 1 / 6)
assert_equal(hamming_loss(y1, y1), 0)
assert_equal(hamming_loss(y2, y2), 0)
assert_equal(hamming_loss(y2, [(), ()]), 0.75)
assert_equal(hamming_loss(y1, [tuple(), (10, )]), 0.625)
assert_almost_equal(hamming_loss(y2, [tuple(), (10, )],
classes=np.arange(11)), 0.1818, 2)
def test_multilabel_jaccard_similarity_score():
# Dense label indicator matrix format
y1 = np.array([[0, 1, 1], [1, 0, 1]])
y2 = np.array([[0, 0, 1], [1, 0, 1]])
# size(y1 \inter y2) = [1, 2]
# size(y1 \union y2) = [2, 2]
assert_equal(jaccard_similarity_score(y1, y2), 0.75)
assert_equal(jaccard_similarity_score(y1, y1), 1)
assert_equal(jaccard_similarity_score(y2, y2), 1)
assert_equal(jaccard_similarity_score(y2, np.logical_not(y2)), 0)
assert_equal(jaccard_similarity_score(y1, np.logical_not(y1)), 0)
assert_equal(jaccard_similarity_score(y1, np.zeros(y1.shape)), 0)
assert_equal(jaccard_similarity_score(y2, np.zeros(y1.shape)), 0)
with ignore_warnings(): # sequence of sequences is deprecated
# List of tuple of label
y1 = [(1, 2,), (0, 2,)]
y2 = [(2,), (0, 2,)]
assert_equal(jaccard_similarity_score(y1, y2), 0.75)
assert_equal(jaccard_similarity_score(y1, y1), 1)
assert_equal(jaccard_similarity_score(y2, y2), 1)
assert_equal(jaccard_similarity_score(y2, [(), ()]), 0)
# |y3 inter y4 | = [0, 1, 1]
# |y3 union y4 | = [2, 1, 3]
y3 = [(0,), (1,), (3,)]
y4 = [(4,), (4,), (5, 6)]
assert_almost_equal(jaccard_similarity_score(y3, y4), 0)
# |y5 inter y6 | = [0, 1, 1]
# |y5 union y6 | = [2, 1, 3]
y5 = [(0,), (1,), (2, 3)]
y6 = [(1,), (1,), (2, 0)]
assert_almost_equal(jaccard_similarity_score(y5, y6), (1 + 1 / 3) / 3)
@ignore_warnings
def test_precision_recall_f1_score_multilabel_1():
""" Test precision_recall_f1_score on a crafted multilabel example
"""
# First crafted example
y_true_ll = [(0,), (1,), (2, 3)]
y_pred_ll = [(1,), (1,), (2, 0)]
lb = LabelBinarizer()
lb.fit([range(4)])
y_true_bi = lb.transform(y_true_ll)
y_pred_bi = lb.transform(y_pred_ll)
for y_true, y_pred in [(y_true_ll, y_pred_ll), (y_true_bi, y_pred_bi)]:
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average=None)
#tp = [0, 1, 1, 0]
#fn = [1, 0, 0, 1]
#fp = [1, 1, 0, 0]
# Check per class
assert_array_almost_equal(p, [0.0, 0.5, 1.0, 0.0], 2)
assert_array_almost_equal(r, [0.0, 1.0, 1.0, 0.0], 2)
assert_array_almost_equal(f, [0.0, 1 / 1.5, 1, 0.0], 2)
assert_array_almost_equal(s, [1, 1, 1, 1], 2)
f2 = fbeta_score(y_true, y_pred, beta=2, average=None)
support = s
assert_array_almost_equal(f2, [0, 0.83, 1, 0], 2)
# Check macro
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="macro")
assert_almost_equal(p, 1.5 / 4)
assert_almost_equal(r, 0.5)
assert_almost_equal(f, 2.5 / 1.5 * 0.25)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="macro"),
np.mean(f2))
# Check micro
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="micro")
assert_almost_equal(p, 0.5)
assert_almost_equal(r, 0.5)
assert_almost_equal(f, 0.5)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="micro"),
(1 + 4) * p * r / (4 * p + r))
# Check weigted
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="weighted")
assert_almost_equal(p, 1.5 / 4)
assert_almost_equal(r, 0.5)
assert_almost_equal(f, 2.5 / 1.5 * 0.25)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="weighted"),
np.average(f2, weights=support))
# Check weigted
# |h(x_i) inter y_i | = [0, 1, 1]
# |y_i| = [1, 1, 2]
# |h(x_i)| = [1, 1, 2]
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="samples")
assert_almost_equal(p, 0.5)
assert_almost_equal(r, 0.5)
assert_almost_equal(f, 0.5)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="samples"),
0.5)
@ignore_warnings
def test_precision_recall_f1_score_multilabel_2():
""" Test precision_recall_f1_score on a crafted multilabel example 2
"""
# Second crafted example
y_true_ll = [(1,), (2,), (2, 3)]
y_pred_ll = [(4,), (4,), (2, 1)]
lb = LabelBinarizer()
lb.fit([range(1, 5)])
y_true_bi = lb.transform(y_true_ll)
y_pred_bi = lb.transform(y_pred_ll)
for y_true, y_pred in [(y_true_ll, y_pred_ll), (y_true_bi, y_pred_bi)]:
# tp = [ 0. 1. 0. 0.]
# fp = [ 1. 0. 0. 2.]
# fn = [ 1. 1. 1. 0.]
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average=None)
assert_array_almost_equal(p, [0.0, 1.0, 0.0, 0.0], 2)
assert_array_almost_equal(r, [0.0, 0.5, 0.0, 0.0], 2)
assert_array_almost_equal(f, [0.0, 0.66, 0.0, 0.0], 2)
assert_array_almost_equal(s, [1, 2, 1, 0], 2)
f2 = fbeta_score(y_true, y_pred, beta=2, average=None)
support = s
assert_array_almost_equal(f2, [0, 0.55, 0, 0], 2)
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="micro")
assert_almost_equal(p, 0.25)
assert_almost_equal(r, 0.25)
assert_almost_equal(f, 2 * 0.25 * 0.25 / 0.5)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="micro"),
(1 + 4) * p * r / (4 * p + r))
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="macro")
assert_almost_equal(p, 0.25)
assert_almost_equal(r, 0.125)
assert_almost_equal(f, 2 / 12)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="macro"),
np.mean(f2))
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="weighted")
assert_almost_equal(p, 2 / 4)
assert_almost_equal(r, 1 / 4)
assert_almost_equal(f, 2 / 3 * 2 / 4)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="weighted"),
np.average(f2, weights=support))
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="samples")
# Check weigted
# |h(x_i) inter y_i | = [0, 0, 1]
# |y_i| = [1, 1, 2]
# |h(x_i)| = [1, 1, 2]
assert_almost_equal(p, 1 / 6)
assert_almost_equal(r, 1 / 6)
assert_almost_equal(f, 2 / 4 * 1 / 3)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="samples"),
0.1666, 2)
@ignore_warnings
def test_precision_recall_f1_score_with_an_empty_prediction():
y_true_ll = [(1,), (0,), (2, 1,)]
y_pred_ll = [tuple(), (3,), (2, 1)]
lb = LabelBinarizer()
lb.fit([range(4)])
y_true_bi = lb.transform(y_true_ll)
y_pred_bi = lb.transform(y_pred_ll)
for y_true, y_pred in [(y_true_ll, y_pred_ll), (y_true_bi, y_pred_bi)]:
# true_pos = [ 0. 1. 1. 0.]
# false_pos = [ 0. 0. 0. 1.]
# false_neg = [ 1. 1. 0. 0.]
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average=None)
assert_array_almost_equal(p, [0.0, 1.0, 1.0, 0.0], 2)
assert_array_almost_equal(r, [0.0, 0.5, 1.0, 0.0], 2)
assert_array_almost_equal(f, [0.0, 1 / 1.5, 1, 0.0], 2)
assert_array_almost_equal(s, [1, 2, 1, 0], 2)
f2 = fbeta_score(y_true, y_pred, beta=2, average=None)
support = s
assert_array_almost_equal(f2, [0, 0.55, 1, 0], 2)
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="macro")
assert_almost_equal(p, 0.5)
assert_almost_equal(r, 1.5 / 4)
assert_almost_equal(f, 2.5 / (4 * 1.5))
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="macro"),
np.mean(f2))
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="micro")
assert_almost_equal(p, 2 / 3)
assert_almost_equal(r, 0.5)
assert_almost_equal(f, 2 / 3 / (2 / 3 + 0.5))
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="micro"),
(1 + 4) * p * r / (4 * p + r))
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="weighted")
assert_almost_equal(p, 3 / 4)
assert_almost_equal(r, 0.5)
assert_almost_equal(f, (2 / 1.5 + 1) / 4)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="weighted"),
np.average(f2, weights=support))
p, r, f, s = precision_recall_fscore_support(y_true, y_pred,
average="samples")
# |h(x_i) inter y_i | = [0, 0, 2]
# |y_i| = [1, 1, 2]
# |h(x_i)| = [0, 1, 2]
assert_almost_equal(p, 1 / 3)
assert_almost_equal(r, 1 / 3)
assert_almost_equal(f, 1 / 3)
assert_equal(s, None)
assert_almost_equal(fbeta_score(y_true, y_pred, beta=2,
average="samples"),
0.333, 2)
def test_precision_recall_f1_no_labels():
y_true = np.zeros((20, 3))
y_pred = np.zeros_like(y_true)
# tp = [0, 0, 0]
# fn = [0, 0, 0]
# fp = [0, 0, 0]
# support = [0, 0, 0]
# |y_hat_i inter y_i | = [0, 0, 0]
# |y_i| = [0, 0, 0]
# |y_hat_i| = [0, 0, 0]
for beta in [1]:
p, r, f, s = assert_warns(UndefinedMetricWarning,
precision_recall_fscore_support,
y_true, y_pred, average=None, beta=beta)
assert_array_almost_equal(p, [0, 0, 0], 2)
assert_array_almost_equal(r, [0, 0, 0], 2)
assert_array_almost_equal(f, [0, 0, 0], 2)
assert_array_almost_equal(s, [0, 0, 0], 2)
fbeta = assert_warns(UndefinedMetricWarning, fbeta_score,
y_true, y_pred, beta=beta, average=None)
assert_array_almost_equal(fbeta, [0, 0, 0], 2)
for average in ["macro", "micro", "weighted", "samples"]:
p, r, f, s = assert_warns(UndefinedMetricWarning,
precision_recall_fscore_support,
y_true, y_pred, average=average,
beta=beta)
assert_almost_equal(p, 0)
assert_almost_equal(r, 0)
assert_almost_equal(f, 0)
assert_equal(s, None)
fbeta = assert_warns(UndefinedMetricWarning, fbeta_score,
y_true, y_pred,
beta=beta, average=average)
assert_almost_equal(fbeta, 0)
def test_prf_warnings():
# average of per-label scores
f, w = precision_recall_fscore_support, UndefinedMetricWarning
my_assert = assert_warns_message
for average in [None, 'weighted', 'macro']:
msg = ('Precision and F-score are ill-defined and '
'being set to 0.0 in labels with no predicted samples.')
my_assert(w, msg, f, [0, 1, 2], [1, 1, 2], average=average)
msg = ('Recall and F-score are ill-defined and '
'being set to 0.0 in labels with no true samples.')
my_assert(w, msg, f, [1, 1, 2], [0, 1, 2], average=average)
# average of per-sample scores
msg = ('Precision and F-score are ill-defined and '
'being set to 0.0 in samples with no predicted labels.')
my_assert(w, msg, f, np.array([[1, 0], [1, 0]]),
np.array([[1, 0], [0, 0]]), average='samples')
msg = ('Recall and F-score are ill-defined and '
'being set to 0.0 in samples with no true labels.')
my_assert(w, msg, f, np.array([[1, 0], [0, 0]]),
np.array([[1, 0], [1, 0]]),
average='samples')
# single score: micro-average
msg = ('Precision and F-score are ill-defined and '
'being set to 0.0 due to no predicted samples.')
my_assert(w, msg, f, np.array([[1, 1], [1, 1]]),
np.array([[0, 0], [0, 0]]), average='micro')
msg = ('Recall and F-score are ill-defined and '
'being set to 0.0 due to no true samples.')
my_assert(w, msg, f, np.array([[0, 0], [0, 0]]),
np.array([[1, 1], [1, 1]]), average='micro')
# single postive label
msg = ('Precision and F-score are ill-defined and '
'being set to 0.0 due to no predicted samples.')
my_assert(w, msg, f, [1, 1], [-1, -1], average='macro')
msg = ('Recall and F-score are ill-defined and '
'being set to 0.0 due to no true samples.')
my_assert(w, msg, f, [-1, -1], [1, 1], average='macro')
def test_recall_warnings():
assert_no_warnings(recall_score,
np.array([[1, 1], [1, 1]]),
np.array([[0, 0], [0, 0]]),
average='micro')
clean_warning_registry()
with warnings.catch_warnings(record=True) as record:
warnings.simplefilter('always')
recall_score(np.array([[0, 0], [0, 0]]),
np.array([[1, 1], [1, 1]]),
average='micro')
assert_equal(str(record.pop().message),
'Recall is ill-defined and '
'being set to 0.0 due to no true samples.')
def test_precision_warnings():
clean_warning_registry()
with warnings.catch_warnings(record=True) as record:
warnings.simplefilter('always')
precision_score(np.array([[1, 1], [1, 1]]),
np.array([[0, 0], [0, 0]]),
average='micro')
assert_equal(str(record.pop().message),
'Precision is ill-defined and '
'being set to 0.0 due to no predicted samples.')
assert_no_warnings(precision_score,
np.array([[0, 0], [0, 0]]),
np.array([[1, 1], [1, 1]]),
average='micro')
def test_fscore_warnings():
clean_warning_registry()
with warnings.catch_warnings(record=True) as record:
warnings.simplefilter('always')
for score in [f1_score, partial(fbeta_score, beta=2)]:
score(np.array([[1, 1], [1, 1]]),
np.array([[0, 0], [0, 0]]),
average='micro')
assert_equal(str(record.pop().message),
'F-score is ill-defined and '
'being set to 0.0 due to no predicted samples.')
score(np.array([[0, 0], [0, 0]]),
np.array([[1, 1], [1, 1]]),
average='micro')
assert_equal(str(record.pop().message),
'F-score is ill-defined and '
'being set to 0.0 due to no true samples.')
def test_prf_average_compat():
"""Ensure warning if f1_score et al.'s average is implicit for multiclass
"""
y_true = [1, 2, 3, 3]
y_pred = [1, 2, 3, 1]
for metric in [precision_score, recall_score, f1_score,
partial(fbeta_score, beta=2)]:
score = assert_warns(DeprecationWarning, metric, y_true, y_pred)
score_weighted = assert_no_warnings(metric, y_true, y_pred,
average='weighted')
assert_equal(score, score_weighted,
'average does not act like "weighted" by default')
# check binary passes without warning
assert_no_warnings(metric, [0, 1, 1], [0, 1, 0])
@ignore_warnings # sequence of sequences is deprecated
def test__check_targets():
"""Check that _check_targets correctly merges target types, squeezes
output and fails if input lengths differ."""
IND = 'multilabel-indicator'
SEQ = 'multilabel-sequences'
MC = 'multiclass'
BIN = 'binary'
CNT = 'continuous'
MMC = 'multiclass-multioutput'
MCN = 'continuous-multioutput'
# all of length 3
EXAMPLES = [
(IND, np.array([[0, 1, 1], [1, 0, 0], [0, 0, 1]])),
# must not be considered binary
(IND, np.array([[0, 1], [1, 0], [1, 1]])),
(SEQ, [[2, 3], [1], [3]]),
(MC, [2, 3, 1]),
(BIN, [0, 1, 1]),
(CNT, [0., 1.5, 1.]),
(MC, np.array([[2], [3], [1]])),
(BIN, np.array([[0], [1], [1]])),
(CNT, np.array([[0.], [1.5], [1.]])),
(MMC, np.array([[0, 2], [1, 3], [2, 3]])),
(MCN, np.array([[0.5, 2.], [1.1, 3.], [2., 3.]])),
]
# expected type given input types, or None for error
# (types will be tried in either order)
EXPECTED = {
(IND, IND): IND,
(SEQ, SEQ): IND,
(MC, MC): MC,
(BIN, BIN): BIN,
(IND, SEQ): None,
(MC, SEQ): None,
(BIN, SEQ): None,
(MC, IND): None,
(BIN, IND): None,
(BIN, MC): MC,
# Disallowed types
(CNT, CNT): None,
(MMC, MMC): None,
(MCN, MCN): None,
(IND, CNT): None,
(SEQ, CNT): None,
(MC, CNT): None,
(BIN, CNT): None,
(MMC, CNT): None,
(MCN, CNT): None,
(IND, MMC): None,
(SEQ, MMC): None,
(MC, MMC): None,
(BIN, MMC): None,
(MCN, MMC): None,
(IND, MCN): None,
(SEQ, MCN): None,
(MC, MCN): None,
(BIN, MCN): None,
}
for (type1, y1), (type2, y2) in product(EXAMPLES, repeat=2):
try:
expected = EXPECTED[type1, type2]
except KeyError:
expected = EXPECTED[type2, type1]
if expected is None:
assert_raises(ValueError, _check_targets, y1, y2)
if type1 != type2:
assert_raise_message(
ValueError,
"Can't handle mix of {0} and {1}".format(type1, type2),
_check_targets, y1, y2)
else:
if type1 not in (BIN, MC, SEQ, IND):
assert_raise_message(ValueError,
"{0} is not supported".format(type1),
_check_targets, y1, y2)
else:
merged_type, y1out, y2out = _check_targets(y1, y2)
assert_equal(merged_type, expected)
if merged_type.startswith('multilabel'):
assert_equal(y1out.format, 'csr')
assert_equal(y2out.format, 'csr')
else:
assert_array_equal(y1out, np.squeeze(y1))
assert_array_equal(y2out, np.squeeze(y2))
assert_raises(ValueError, _check_targets, y1[:-1], y2)
def test_hinge_loss_binary():
y_true = np.array([-1, 1, 1, -1])
pred_decision = np.array([-8.5, 0.5, 1.5, -0.3])
assert_equal(hinge_loss(y_true, pred_decision), 1.2 / 4)
y_true = np.array([0, 2, 2, 0])
pred_decision = np.array([-8.5, 0.5, 1.5, -0.3])
assert_equal(hinge_loss(y_true, pred_decision), 1.2 / 4)
def test_hinge_loss_multiclass():
pred_decision = np.array([
[0.36, -0.17, -0.58, -0.99],
[-0.54, -0.37, -0.48, -0.58],
[-1.45, -0.58, -0.38, -0.17],
[-0.54, -0.38, -0.48, -0.58],
[-2.36, -0.79, -0.27, 0.24],
[-1.45, -0.58, -0.38, -0.17]
])
y_true = np.array([0, 1, 2, 1, 3, 2])
dummy_losses = np.array([
1 - pred_decision[0][0] + pred_decision[0][1],
1 - pred_decision[1][1] + pred_decision[1][2],
1 - pred_decision[2][2] + pred_decision[2][3],
1 - pred_decision[3][1] + pred_decision[3][2],
1 - pred_decision[4][3] + pred_decision[4][2],
1 - pred_decision[5][2] + pred_decision[5][3]
])
dummy_losses[dummy_losses <= 0] = 0
dummy_hinge_loss = np.mean(dummy_losses)
assert_equal(hinge_loss(y_true, pred_decision),
dummy_hinge_loss)
def test_hinge_loss_multiclass_missing_labels_with_labels_none():
y_true = np.array([0, 1, 2, 2])
pred_decision = np.array([
[1.27, 0.034, -0.68, -1.40],
[-1.45, -0.58, -0.38, -0.17],
[-2.36, -0.79, -0.27, 0.24],
[-2.36, -0.79, -0.27, 0.24]
])
error_message = ("Please include all labels in y_true "
"or pass labels as third argument")
assert_raise_message(ValueError,
error_message,
hinge_loss, y_true, pred_decision)
def test_hinge_loss_multiclass_with_missing_labels():
pred_decision = np.array([
[0.36, -0.17, -0.58, -0.99],
[-0.55, -0.38, -0.48, -0.58],
[-1.45, -0.58, -0.38, -0.17],
[-0.55, -0.38, -0.48, -0.58],
[-1.45, -0.58, -0.38, -0.17]
])
y_true = np.array([0, 1, 2, 1, 2])
labels = np.array([0, 1, 2, 3])
dummy_losses = np.array([
1 - pred_decision[0][0] + pred_decision[0][1],
1 - pred_decision[1][1] + pred_decision[1][2],
1 - pred_decision[2][2] + pred_decision[2][3],
1 - pred_decision[3][1] + pred_decision[3][2],
1 - pred_decision[4][2] + pred_decision[4][3]
])
dummy_losses[dummy_losses <= 0] = 0
dummy_hinge_loss = np.mean(dummy_losses)
assert_equal(hinge_loss(y_true, pred_decision, labels=labels),
dummy_hinge_loss)
def test_hinge_loss_multiclass_invariance_lists():
# Currently, invariance of string and integer labels cannot be tested
# in common invariance tests because invariance tests for multiclass
# decision functions is not implemented yet.
y_true = ['blue', 'green', 'red',
'green', 'white', 'red']
pred_decision = [
[0.36, -0.17, -0.58, -0.99],
[-0.55, -0.38, -0.48, -0.58],
[-1.45, -0.58, -0.38, -0.17],
[-0.55, -0.38, -0.48, -0.58],
[-2.36, -0.79, -0.27, 0.24],
[-1.45, -0.58, -0.38, -0.17]]
dummy_losses = np.array([
1 - pred_decision[0][0] + pred_decision[0][1],
1 - pred_decision[1][1] + pred_decision[1][2],
1 - pred_decision[2][2] + pred_decision[2][3],
1 - pred_decision[3][1] + pred_decision[3][2],
1 - pred_decision[4][3] + pred_decision[4][2],
1 - pred_decision[5][2] + pred_decision[5][3]
])
dummy_losses[dummy_losses <= 0] = 0
dummy_hinge_loss = np.mean(dummy_losses)
assert_equal(hinge_loss(y_true, pred_decision),
dummy_hinge_loss)
def test_log_loss():
# binary case with symbolic labels ("no" < "yes")
y_true = ["no", "no", "no", "yes", "yes", "yes"]
y_pred = np.array([[0.5, 0.5], [0.1, 0.9], [0.01, 0.99],
[0.9, 0.1], [0.75, 0.25], [0.001, 0.999]])
loss = log_loss(y_true, y_pred)
assert_almost_equal(loss, 1.8817971)
# multiclass case; adapted from http://bit.ly/RJJHWA
y_true = [1, 0, 2]
y_pred = [[0.2, 0.7, 0.1], [0.6, 0.2, 0.2], [0.6, 0.1, 0.3]]
loss = log_loss(y_true, y_pred, normalize=True)
assert_almost_equal(loss, 0.6904911)
# check that we got all the shapes and axes right
# by doubling the length of y_true and y_pred
y_true *= 2
y_pred *= 2
loss = log_loss(y_true, y_pred, normalize=False)
assert_almost_equal(loss, 0.6904911 * 6, decimal=6)
# check eps and handling of absolute zero and one probabilities
y_pred = np.asarray(y_pred) > .5
loss = log_loss(y_true, y_pred, normalize=True, eps=.1)
assert_almost_equal(loss, log_loss(y_true, np.clip(y_pred, .1, .9)))
# raise error if number of classes are not equal.
y_true = [1, 0, 2]
y_pred = [[0.2, 0.7], [0.6, 0.5], [0.4, 0.1]]
assert_raises(ValueError, log_loss, y_true, y_pred)
# case when y_true is a string array object
y_true = ["ham", "spam", "spam", "ham"]
y_pred = [[0.2, 0.7], [0.6, 0.5], [0.4, 0.1], [0.7, 0.2]]
loss = log_loss(y_true, y_pred)
assert_almost_equal(loss, 1.0383217, decimal=6)
| ashhher3/scikit-learn | sklearn/metrics/tests/test_classification.py | Python | bsd-3-clause | 47,245 |
# (DWang) Cloud Service Provider must ensure that all the permissions need
# to be protected are listed here.
# If a permission is not found in this dict, it is in the charge of the
# enforcer and the enforcer will deny requests of that permission by default.
class DefaultRules(object):
def __init__(self):
self.dflt_rules = {
'keystone': {
'cru_check': 'role:cloud_admin',
# service
"list_services": "role:domain_admin",
"get_service": "role:domain_admin",
"create_service": "role:domain_admin",
"update_service": "role:domain_admin",
"delete_service": "role:domain_admin",
# endpoint
"get_endpoint": "role:domain_admin",
"list_endpoints": "role:domain_admin",
"create_endpoint": "role:domain_admin",
"update_endpoint": "role:domain_admin",
"delete_endpoint": "role:domain_admin",
# domain
"list_domains": "role:domain_admin",
"create_domain": "role:domain_admin",
"delete_domain": "role:domain_admin",
"get_domain": "role:domain_admin",
"update_domain": "role:domain_admin",
# project
"list_projects": "role:domain_admin or role:project_admin",
"list_user_projects": "role:domain_admin or user_id:%(obj.user.id)s",
"create_project": "role:domain_admin",
"get_project": "role:domain_admin or role:project_admin",
"update_project": "role:domain_admin or role:project_admin",
"delete_project": "role:domain_admin",
# user
"list_users": "@",
"get_user": "@",
"create_user": "role:domain_admin",
"update_user": "role:domain_admin or user_id:%(obj.user.id)s",
"delete_user": "role:domain_admin",
"change_password": "role:domain_admin or user_id:%(obj.user.id)s",
# group
"get_group": "role:domain_admin or role:project_admin",
"list_groups": "role:domain_admin or role:project_admin",
"list_groups_for_user": "role:domain_admin or role:project_admin or user_id:%(obj.user.id)s",
"create_group": "role:domain_admin",
"update_group": "role:domain_admin",
"delete_group": "role:domain_admin",
"remove_user_from_group": "role:domain_admin or role:project_admin",
"check_user_in_group": "role:domain_admin or role:project_admin",
"add_user_to_group": "role:domain_admin or role:project_admin",
"list_users_in_group": "role:domain_admin or role:project_admin",
"list_projects_for_groups": "@",
"list_domains_for_groups": "@",
# grant
"check_grant": "role:domain_admin or role:project_admin",
"list_grants": "role:domain_admin or role:project_admin",
"create_grant": "role:domain_admin or role:project_admin",
"revoke_grant": "role:domain_admin or role:project_admin",
# system grant
"check_sys_grant": "role:domain_admin",
"list_sys_grants": "role:domain_admin",
"create_sys_grant": "role:domain_admin",
"revoke_sys_grant": "role:domain_admin",
# role assignment
"list_role_assignments": "role:domain_admin or role:project_admin",
# role
"get_role": "role:domain_admin or role:project_admin",
"list_roles": "role:domain_admin or role:project_admin",
"create_role": "role:domain_admin",
"update_role": "role:domain_admin",
"delete_role": "role:domain_admin",
# system role
"get_sys_role": "role:domain_admin",
"list_sys_roles": "role:domain_admin",
"create_sys_role": "role:domain_admin",
"update_sys_role": "role:domain_admin",
"delete_sys_role": "role:domain_admin",
# policy
"get_policy": "role:domain_admin",
"list_policies": "role:domain_admin",
"create_policy": "role:domain_admin",
"update_policy": "role:domain_admin",
"delete_policy": "role:domain_admin",
# rule
"get_rule": "role:domain_admin",
"list_rules": "role:domain_admin",
"create_rule": "role:domain_admin",
"update_rule": "role:domain_admin",
"delete_rule": "role:domain_admin",
# token
"check_token": "@",
"validate_token": "@",
"revocation_list": "@",
"revoke_token": "@",
# other
"get_auth_catalog": "@",
"get_auth_projects": "@",
"get_auth_domains": "@",
"list_revoke_events": "@"
},
# Glance related
'glance': {
"add_image": "role:domain_admin",
"delete_image": "role:domain_admin",
"get_image": "role:domain_admin",
"get_images": "role:domain_admin",
"modify_image": "role:domain_admin",
"publicize_image": "role:domain_admin",
"download_image": "role:domain_admin",
"upload_image": "role:domain_admin",
"context_is_admin": "role:domain_admin"
}
}
| darren-wang/op | oslo_policy/_default_domain.py | Python | apache-2.0 | 4,982 |
#!/usr/bin/env python
import numpy as np
from selfdrive.car.honda.can_parser import CANParser
from selfdrive.boardd.boardd import can_capnp_to_can_list
from cereal import car
import zmq
from selfdrive.services import service_list
import selfdrive.messaging as messaging
def _create_radard_can_parser():
dbc_f = 'acura_ilx_2016_nidec.dbc'
radar_messages = range(0x430, 0x43A) + range(0x440, 0x446)
signals = zip(['LONG_DIST'] * 16 + ['NEW_TRACK'] * 16 + ['LAT_DIST'] * 16 +
['REL_SPEED'] * 16, radar_messages * 4,
[255] * 16 + [1] * 16 + [0] * 16 + [0] * 16)
checks = zip(radar_messages, [20]*16)
return CANParser(dbc_f, signals, checks)
class RadarInterface(object):
def __init__(self):
# radar
self.pts = {}
self.track_id = 0
# Nidec
self.rcp = _create_radard_can_parser()
context = zmq.Context()
self.logcan = messaging.sub_sock(context, service_list['can'].port)
def update(self):
canMonoTimes = []
can_pub_radar = []
# TODO: can hang if no packets show up
while 1:
for a in messaging.drain_sock(self.logcan, wait_for_one=True):
canMonoTimes.append(a.logMonoTime)
can_pub_radar.extend(can_capnp_to_can_list(a.can, [1, 3]))
# only run on the 0x445 packets, used for timing
if any(x[0] == 0x445 for x in can_pub_radar):
break
updated_messages = self.rcp.update_can(can_pub_radar)
ret = car.RadarState.new_message()
errors = []
if not self.rcp.can_valid:
errors.append("notValid")
ret.errors = errors
ret.canMonoTimes = canMonoTimes
for ii in updated_messages:
cpt = self.rcp.vl[ii]
if cpt['LONG_DIST'] < 255:
if ii not in self.pts or cpt['NEW_TRACK']:
self.pts[ii] = car.RadarState.RadarPoint.new_message()
self.pts[ii].trackId = self.track_id
self.track_id += 1
self.pts[ii].dRel = cpt['LONG_DIST'] # from front of car
self.pts[ii].yRel = -cpt['LAT_DIST'] # in car frame's y axis, left is positive
self.pts[ii].vRel = cpt['REL_SPEED']
self.pts[ii].aRel = float('nan')
self.pts[ii].yvRel = float('nan')
else:
if ii in self.pts:
del self.pts[ii]
ret.points = self.pts.values()
return ret
if __name__ == "__main__":
RI = RadarInterface()
while 1:
ret = RI.update()
print(chr(27) + "[2J")
print ret
| heidecjj/openpilot | selfdrive/radar/nidec/interface.py | Python | mit | 2,420 |
import numpy as np
import cv2
def color_transfer(source, target):
source = cv2.cvtColor(source, cv2.COLOR_BGR2LAB).astype("float32")
target = cv2.cvtColor(target, cv2.COLOR_BGR2LAB).astype("float32")
(lMeanSrc, lStdSrc, aMeanSrc, aStdSrc, bMeanSrc, bStdSrc) = image_stats(source)
(lMeanTar, lStdTar, aMeanTar, aStdTar, bMeanTar, bStdTar) = image_stats(target)
(l, a, b) = cv2.split(target)
l -= lMeanTar
a -= aMeanTar
b -= bMeanTar
l = (lStdTar / lStdSrc) * l
a = (aStdTar / aStdSrc) * a
b = (bStdTar / bStdSrc) * b
l += lMeanSrc
a += aMeanSrc
b += bMeanSrc
l = np.clip(l, 0, 255)
a = np.clip(a, 0, 255)
b = np.clip(b, 0, 255)
transfer = cv2.merge([l, a, b])
transfer = cv2.cvtColor(transfer.astype("uint8"), cv2.COLOR_LAB2BGR)
return transfer
def image_stats(image):
(l, a, b) = cv2.split(image)
(lMean, lStd) = (l.mean(), l.std())
(aMean, aStd) = (a.mean(), a.std())
(bMean, bStd) = (b.mean(), b.std())
return (lMean, lStd, aMean, aStd, bMean, bStd)
| meizhoubao/pyimagesearch | color-transfer/color_transfer.py | Python | gpl-3.0 | 1,066 |
# -- external imports --
import numpy as np
from onezone import imf as _imf # to do IMF sampling
import time
from fastloop import loop_over_rnum
MASS_THRESHOLD = 8.0
#
#
#
np.random.seed(12345)
_imf.np.random.seed(12345)
# make IMF function a global for now
# and set to assumed defaults --- make this
# easily modifiable in the future
_IMF = _imf.salpeter()
_IMF.M_min = 1.0
_IMF.M_max = 100.0
def _check_scalar_input(x):
x = np.asarray(x)
scalar_input = False
if x.ndim == 0:
x = x[None]
scalar_input = True
return x, scalar_input
def _check_scalar_output(x, scalar_input):
if scalar_input:
return np.squeeze(x)
else:
return x
def exponential_disk(r, z, a, b):
"""
An un-normalized Miyamoto-Nagai density
function. The full density function can be computed
by multiplying by (b^2 * M)/(4*pi), where M is the
total disk mass. This is not needed for establishing
the IC's since it gets normalized out in computing the probability
distribution anyway.
"""
# allow for computing at many r/z values with only one z/r value
if np.size(r) != np.size(z):
if not (np.size(r) == 1 or np.size(z) == 1):
return "r and z must be same size OR one (or both) must be scalar"
if np.size(z) > np.size(r):
r = np.ones(np.size(z)) * r
else:
z = np.ones(np.size(r)) * z
zs_bs = np.sqrt(z*z + b*b)
numerator = a*r*r + (a + 3.0 * zs_bs)*(a + zs_bs)
denominator = (r*r + (a + zs_bs)**2)**(2.5) * (zs_bs)**3
return numerator / denominator
def _find_bin(x, array):
"""
Wrapper on argmin to ensure selected bin
is less than desired value. I.e:
array[index] <= x < array[index+1]
where index is the found and returned bin index.
array must be sorted in increasing order.
"""
if x < array[0] or x > array[-1]:
print("search value out of bounds on array")
print(x, array[0], array[-1])
return -1
index = np.abs(array - x).argmin()
if x < array[index] and index > 0:
index = index - 1
if x < array[index] and index > 0:
index = index - 1
if x < array[index] and index > 0:
print(i+2, array[i+2], x)
print(i, array[i], x)
print("Failure finding bin")
return index
class particleIC(object):
def __init__(self, M_star, a, b, rmax, zmax,
Z = 0.0043, rmin = 0.1, zmin = 0.1,
IMF = _IMF, npoints = 1000):
#
# Set properties
#
self._M_star = M_star
self._a = a
self._b = b
self._rmin = rmin
self._zmin = zmin
self._rmax = rmax
self._zmax = zmax
self._IMF = IMF
self._npoints = npoints
self._metallicity = Z
return
def generate(self):
"""
Given initial condition properties, generate a particle
distribution. Particles are random sampled from an IMF
and deposited over an exponential disk profile with
specified scale radius, height, and cutoff radius.
"""
if not self._is_dist_tabulated():
self._tabulate_probability_distribution()
# sample the IMF - this sets the number of stars we need
self.M = self._IMF.sample(M = self._M_star)
self.number_of_particles = np.size(self.M)
# initialize position arrays (r,theta,z)
self.r = np.zeros(self.number_of_particles)
self.z = np.zeros(self.number_of_particles)
self.theta = np.zeros(self.number_of_particles)
# get random numbers for radial position and set r
delta_r = self._tabulated_prob_r[-1] - self._tabulated_prob_r[0]
rnum = (np.random.rand(self.number_of_particles) * delta_r) + self._tabulated_prob_r[0]
for i in np.arange(self.number_of_particles):
bin = _find_bin(rnum[i], self._tabulated_prob_r)
self.r[i] = 10.0**(bin*self._tabulated_dr + self._r_o)
# get random numbers for vertical position and set z
delta_z = self._tabulated_prob_z[-1] - self._tabulated_prob_z[0]
rnum = np.random.rand(self.number_of_particles) * delta_z + self._tabulated_prob_z[0]
for i in np.arange(self.number_of_particles):
bin = _find_bin(rnum[i], self._tabulated_prob_z)
self.z[i] = 10.0**(bin*self._tabulated_dz + self._z_o)
# now randomly set z's to positive or negative
rnum = np.random.rand(self.number_of_particles)
ones = np.ones(self.number_of_particles)
ones[rnum < 0.5] = -1
self.z = self.z * ones
# and set angle (easy)
rnum = np.random.rand(self.number_of_particles)
self.theta = 2.0 * np.pi * rnum
self.metallicity = np.ones(self.number_of_particles)*self._metallicity
# done
start = time.time()
self.write_IC()
end = time.time()
print("write out took ", end - start)
return
def _tabulate_probability_distribution(self):
dr = np.log10(self._rmax / self._rmin) / (1.0*(self._npoints-1))
dz = np.log10(self._zmax / self._zmin) / (1.0*(self._npoints-1))
r_o = np.log10(self._rmin)
z_o = np.log10(self._zmin)
r = 10.0**(r_o + np.arange(0,self._npoints)*dr)
z = 10.0**(z_o + np.arange(0,self._npoints)*dz)
# sample exponential disk at z = 0
r_dist = np.cumsum( exponential_disk(r, 0.0, self._a, self._b))
r_dist = r_dist / (r_dist[-1])
# sample exponential disk at r = 0
z_dist = np.cumsum( exponential_disk(0.0, z, self._a, self._b))
z_dist = z_dist / (z_dist[-1])
# save tabulated properties
self._r_o = r_o
self._z_o = z_o
self._tabulated_dr = dr
self._tabulated_dz = dz
self._tabulated_r = r
self._tabulated_z = z
self._tabulated_prob_r = r_dist
self._tabulated_prob_z = z_dist
return
def _is_dist_tabulated(self):
if hasattr(self, '_tabulated_r'):
return (np.size(self._tabulated_r) == self._npoints)
else:
return False
def write_IC(self, outfile = './particle_IC.in'):
with open(outfile, 'w') as f:
header = "# M Z x y z\n"
fmt = "%.3f %3.3E %5.5E %5.5E %5.5E\n"
f.write(header)
x = self.x
y = self.y
for i in np.arange(self.number_of_particles):
if self.M[i] < MASS_THRESHOLD:
continue
f.write(fmt%(self.M[i], self.metallicity[i],\
x[i], y[i], self.z[i]))
print("wrote IC's for %i particles to "%(self.number_of_particles) + outfile)
return
@property
def x(self):
return self.r * np.cos(self.theta)
@property
def y(self):
return self.r * np.sin(self.theta)
if __name__ == "__main__":
import matplotlib.pyplot as plt
# perform a test of the IC's and plot
#SFR = 5.5E-5 # Msun / yr
SFR = 1.0E-3
dt = 25.0E6 # yr
a = 1.0
b = 0.030
# b = 350.0
M_star = SFR * dt
# a = 700.0 # high
# b = 350.0 # high
#a = 450.0 medium
#b = 225.0 medium
# a = 300.0 # parsec for lowest mass
# b = 150.0 # parsec for lowest mass
# SFR = 1.0E-4
# dt = 25.0E6
# M_star = SFR*dt
# a = 250.0 # parsec
# b = 125.0 # parsec
rmax = a*2.0
zmax = b*2.0
start = time.time()
IC = particleIC(M_star, a, b, rmax, zmax, Z = 0.1 * 0.0183)
IC.generate()
end = time.time()
print("generation took ", end - start)
# now plot these
fig, ax = plt.subplots(1,3)
ax[0].scatter(IC.x, IC.z, color='black', s = IC.M)
ax[1].scatter(IC.y, IC.z, color='black', s = IC.M)
ax[2].scatter(IC.x, IC.y, color='black', s = IC.M)
ax[0].set_xlim(-rmax,rmax)
ax[0].set_ylim(-rmax,rmax)
# ax[0].set_ylim(-b,b)
ax[1].set_xlim(-rmax,rmax)
ax[1].set_ylim(-rmax,rmax)
# ax[1].set_ylim(-b,b)
ax[2].set_xlim(-rmax,rmax)
ax[2].set_ylim(-rmax,rmax)
ax[0].set_xlabel("x (pc)")
ax[0].set_ylabel("z (pc)")
ax[1].set_xlabel("y (pc)")
ax[1].set_ylabel("z (pc)")
ax[2].set_xlabel("x (pc)")
ax[2].set_ylabel("y (pc)")
fig.set_size_inches(18,6)
plt.tight_layout()
plt.savefig("particle_IC_test.png")
plt.close(fig)
# now plot the surface density profile
rmin = 0.0
rmax = rmax
dr = 50.0
rbin = np.arange(rmin*rmin, (rmax)**2 + dr**2, dr**2)
rbin = np.sqrt(rbin)
centers = 0.5 * ( rbin[1:] + rbin[:-1])
Mtot = np.zeros(np.size(rbin)-1)
SD = np.zeros(np.size(rbin)-1)
for i in np.arange(1, np.size(rbin)-1):
Mtot[i] = np.sum( IC.M[ (IC.r < rbin[i]) * (IC.r >= rbin[i-1])])
SD[i] = Mtot[i] / (np.pi * ( rbin[i]**2 - rbin[i-1]**2))
fig, ax = plt.subplots(1,2)
x = rbin[:-1]
ax[0].plot(x, Mtot, label = 'mass', color = 'black', ls = '--', lw = 3, drawstyle='steps-pre')
ax[0].plot(x, np.cumsum(Mtot), label = 'cumulative', color = 'black', ls = '-', lw = 3, drawstyle='steps-pre')
ax[0].set_xlim(rmin, rmax)
ax[0].semilogy()
ax[0].set_ylabel(r'M (M$_{\odot}$)')
ax[0].set_xlabel(r'r (pc)')
ax[0].legend(loc='best')
ax[1].plot(x, SD, label = 'Surface Density', color = 'black', lw = 3, drawstyle='steps-pre')
# rsqr = 1.0 / (centers * centers)
# rsqr = (np.max(SD) / np.max(rsqr)) * rsqr
# ax[1].plot(centers, rsqr, lw = 3, color = 'black', ls = '--')
ax[1].set_xlim(rmin, rmax)
ax[1].semilogy()
ax[1].set_xlabel(r'r (pc)')
ax[1].set_ylabel(r'$\Sigma$ (M$_{\odot}$ pc$^{-2}$')
fig.set_size_inches(12,6)
plt.tight_layout()
plt.savefig("particle_IC_profile.png")
plt.close(fig)
print(np.min(IC.M), np.max(IC.M))
| aemerick/galaxy_analysis | initial_conditions/IC.py | Python | mit | 10,054 |
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="size", parent_name="surface.colorbar.title.font", **kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 1),
role=kwargs.pop("role", "style"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/surface/colorbar/title/font/_size.py | Python | mit | 505 |
# -*- coding: utf-8 -*-
from pprint import pprint
from genshi.filters import Transformer
from genshi.builder import tag
from trac.core import *
from trac.util.html import html
from trac.web import IRequestHandler, HTTPInternalError
from trac.web.chrome import INavigationContributor, ITemplateProvider, add_stylesheet, add_script, add_warning, add_notice
from trac.web.api import IRequestFilter, ITemplateStreamFilter
from trac.ticket.api import ITicketChangeListener, ITicketManipulator
from trac.prefs import IPreferencePanelProvider
from trac.ticket.model import Ticket
from trac.notification import NotifyEmail
from genshi.template.text import NewTextTemplate
import requests, re
from pkg_resources import resource_filename
#from IPython import embed
# Configuration
DEFAULT_BOUNTYFUNDING_URL='http://localhost:8080'
DEFAULT_TOKEN = 'default'
DEFAULT_MAPPING_READY = ['new', 'accepted', 'reopened']
DEFAULT_MAPPING_STARTED = ['assigned']
DEFAULT_MAPPING_COMPLETED = ['closed']
BOUNTYFUNDING_PATTERN = re.compile("(?:/(?P<ticket>ticket)/(?P<ticket_id>[0-9]+)/(?P<ticket_action>sponsor|update_sponsorship|confirm|validate|pay))|(?:/(?P<bountyfunding>bountyfunding)/(?P<bountyfunding_action>status|email|sync))")
class Sponsorship:
def __init__(self, dictionary={}):
self.amount = dictionary.get('amount', 0)
self.status = dictionary.get('status')
class Email:
def __init__(self, dictionary):
self.id = dictionary.get('id')
self.recipient = dictionary.get('recipient')
self.issue_id = dictionary.get('issue_id')
self.body = dictionary.get('body')
class GenericNotifyEmail(NotifyEmail):
template_name = 'email.txt'
def __init__(self, env, recipient, body, link):
NotifyEmail.__init__(self, env)
self.recipient = recipient
self.data = {
'body': body,
'link': link,
'project_name': env.project_name,
'project_url': env.project_url or self.env.abs_href(),
}
def get_recipients(self, resid):
return ([self.recipient], [])
def sum_amounts(sponsorships, statuses=None):
if statuses != None:
sponsorships = [s for s in sponsorships if s.status in statuses]
total_amount = sum(map(lambda s: s.amount, sponsorships))
return total_amount
class BountyFundingPlugin(Component):
implements(ITemplateStreamFilter, IRequestFilter, IRequestHandler, ITemplateProvider, ITicketChangeListener, ITicketManipulator, IPreferencePanelProvider)
def __init__(self):
self.configure()
def configure(self):
self.bountyfunding_url = self.config.get('bountyfunding', 'url', DEFAULT_BOUNTYFUNDING_URL)
self.token = self.config.get('bountyfunding', 'token', DEFAULT_TOKEN)
self.status_mapping = {}
for m in self.get_config_array(
'bountyfunding', 'status_mapping_ready', DEFAULT_MAPPING_READY):
self.status_mapping[m] = 'READY'
for m in self.get_config_array(
'bountyfunding', 'status_mapping_started', DEFAULT_MAPPING_STARTED):
self.status_mapping[m] = 'STARTED'
for m in self.get_config_array(
'bountyfunding', 'status_mapping_completed', DEFAULT_MAPPING_COMPLETED):
self.status_mapping[m] = 'COMPLETED'
def get_config_array(self, section, option, default):
value = self.config.get(section, option, None)
if value != None:
return [v.strip() for v in value.split(",")]
else:
return default
def call_api(self, method, path, **kwargs):
url = self.bountyfunding_url + path
params = kwargs
params['token'] = self.token
try:
response = requests.request(method, url, params=kwargs)
except requests.exceptions.ConnectionError:
self.log.warn("Error connecting to BountyFunding")
response = None
return response
def convert_status(self, status):
return self.status_mapping[status]
def get_sponsorships(self, ticket_id):
sponsorships = {}
request = self.call_api('GET', '/issue/%s/sponsorships' % ticket_id)
if request.status_code == 200:
sponsorships = dict(map(lambda (k,v): (k, Sponsorship(v)), request.json().items()))
return sponsorships
#TODO: not entirely safe from race conditions, fix it
def update_ticket(self, ticket, refresh_amount=True, author=None, comment=None):
update = (comment != None)
if refresh_amount:
sponsorships = self.get_sponsorships(ticket.id)
amount = sum_amounts(sponsorships.values())
if amount == 0:
if ticket["bounty"]:
ticket["bounty"] = None
update = True
else:
amount = u"%d\u20ac" % amount
if ticket["bounty"] != amount:
ticket["bounty"] = amount
update = True
if update:
ticket.save_changes(author, comment)
return update
def update_api_ticket(self, ticket):
r = self.call_api('GET', '/issue/%s' % ticket.id)
if r.status_code != 200:
return False
api_ticket = r.json()
title = ticket['summary']
status = self.convert_status(ticket['status'])
owner = ticket['owner']
changes = {}
if title != api_ticket.get('title'):
changes['title'] = title
if status != api_ticket.get('status'):
changes['status'] = status
if owner != api_ticket.get('owner'):
changes['owner'] = owner
if changes:
self.call_api('PUT', '/issue/%s' % ticket.id, **changes)
return True
return False
def get_link(self, ticket_id):
return '/ticket/%s' % ticket_id
def send_email(self, recipient, ticket_id, body):
ticket = Ticket(self.env, ticket_id)
subject = self.format_email_subject(ticket)
link = self.env.abs_href.ticket(ticket_id)
email = GenericNotifyEmail(self.env, recipient, body, link)
email.notify('', subject)
def format_email_subject(self, ticket):
template = self.config.get('notification','ticket_subject_template')
template = NewTextTemplate(template.encode('utf8'))
prefix = self.config.get('notification', 'smtp_subject_prefix')
if prefix == '__default__':
prefix = '[%s]' % self.env.project_name
data = {
'prefix': prefix,
'summary': ticket['summary'],
'ticket': ticket,
'env': self.env,
}
return template.generate(**data).render('text', encoding=None).strip()
# ITemplateStreamFilter methods
def filter_stream(self, req, method, filename, stream, data):
"""
Quick and dirty solution - modify page on the fly to inject special field. It would be
nicer if we can do it by creating custom field as this depends on page structure.
"""
#embed(header='Ticket Stream Filter')
if filename == 'ticket.html':
# Disable any direct bounty input
filter = Transformer('.//input[@id="field-bounty"]')
stream |= filter.attr("disabled", "disabled")
ticket = data.get('ticket')
if ticket and ticket.exists:
identifier = ticket.id
user = req.authname if req.authname != 'anonymous' else None
request = self.call_api('GET', '/issue/%s' % identifier)
fragment = tag()
sponsorships = {}
status = self.convert_status(ticket.values['status'])
owner = ticket.values['owner']
tooltip = None
if request != None and (request.status_code == 200 or request.status_code == 404):
sponsorships = self.get_sponsorships(identifier)
pledged_amount = sum_amounts(sponsorships.values())
user_sponsorship = sponsorships.get(user, Sponsorship())
# Bounty
tooltip = u"Pledged: %d\u20ac" % pledged_amount
if status == 'STARTED' or status == 'COMPLETED':
confirmed_amount = sum_amounts(sponsorships.values(), ('CONFIRMED', 'VALIDATED', 'REJECTED', 'TRANSFERRED', 'REFUNDED'))
tooltip += u" \nConfirmed: %d\u20ac" % confirmed_amount
if status == 'COMPLETED':
validated_amount = sum_amounts(sponsorships.values(), 'VALIDATED')
tooltip += u" \nValidated: %d\u20ac" % validated_amount
# Action
action = None
if (((status == 'STARTED' or status == 'COMPLETED')
and user_sponsorship.status == 'PLEDGED')
or (status == 'STARTED' and user != None and user != owner
and user_sponsorship.status == None)):
response = self.call_api('GET', '/config/payment_gateways')
gateways = response.json().get('gateways')
gateway_tags = []
if 'DUMMY' in gateways:
gateway_tags.append(tag.input(type="submit", value="Payment Card", name='DUMMY'))
if 'PAYPAL_STANDARD' in gateways:
gateway_tags.append(tag.input(type="submit", value="PayPal", name='PAYPAL_STANDARD'))
if 'PAYPAL_ADAPTIVE' in gateways:
gateway_tags.append(tag.input(type="submit", value="PayPal", name='PAYPAL_ADAPTIVE'))
if user_sponsorship.status == 'PLEDGED':
action = tag.form(
tag.input(type="button", name="confirm", value=u"Confirm %d\u20ac" % user_sponsorship.amount, id="confirm-button"),
tag.span(gateway_tags, id="confirm-options"),
tag.input(type="submit", name="delete", value="Delete"),
method="post", action=req.href.ticket(identifier, "confirm"))
else:
#TODO: should be separate action
action = tag.form(
tag.input(name="amount", type="text", size="3", value="0", pattern="[0-9]*", title="money amount"),
tag.input(type="button", value="Pledge & Confirm", id="confirm-button"),
tag.span(gateway_tags, id="confirm-options"),
method="post", action=req.href.ticket(identifier, "confirm"))
elif status == 'COMPLETED' and user_sponsorship.status in ('CONFIRMED', 'REJECTED', 'VALIDATED'):
action = tag.form(method="post", action=req.href.ticket(identifier, "validate"))
if user_sponsorship.status == 'CONFIRMED' or user_sponsorship.status == 'REJECTED':
action.append(tag.input(type="submit", name='validate', value=u"Validate %d\u20ac" % user_sponsorship.amount))
if user_sponsorship.status == 'CONFIRMED' or user_sponsorship.status == 'VALIDATED':
action.append(tag.input(type="submit", name='reject', value="Reject"))
elif (status == 'READY' and user != None):
if user_sponsorship.status == None:
action = tag.form(tag.input(name="amount", type="text", size="3", value=user_sponsorship.amount, pattern="[0-9]*", title="money amount"), tag.input(type="submit", value="Pledge"), method="post", action=req.href.ticket(identifier, "sponsor"))
elif user_sponsorship.status == 'PLEDGED':
action = tag.form(tag.input(name="amount", type="text", size=3, value=user_sponsorship.amount, pattern="[0-9]*", title="money amount"), tag.input(type="submit", name="update", value="Update"), tag.input(type="submit", name="delete", value="Delete"), method="post", action=req.href.ticket(identifier, "update_sponsorship"))
elif (user == None):
action = tag.span(u"\u00A0", tag.a("Login", href=req.href.login()), " or ", tag.a("Register", href=req.href.register()), " to sponsor")
if action != None:
fragment.append(" ")
fragment.append(action)
else:
error = "Connection error"
if request:
error = request.json().get("error", "Unknown error")
fragment.append(tag.span("[BountyFunding Error]", title=error))
#chrome = Chrome(self.env)
#chrome.add_jquery_ui(req)
add_stylesheet(req, 'htdocs/styles/bountyfunding.css')
add_script(req, 'htdocs/scripts/bountyfunding.js')
if tooltip != None:
filter = Transformer('.//td[@headers="h_bounty"]/text()')
stream |= filter.wrap(tag.span(title=tooltip))
filter = Transformer('.//td[@headers="h_bounty"]')
stream |= filter.attr("class", "bountyfunding")
stream |= filter.append(fragment)
return stream
# IRequestFilter methods
def pre_process_request(self, req, handler):
return handler
def post_process_request(self, req, template, data, content_type):
#if template == 'ticket.html':
# ticket = data.get('ticket')
# if ticket and ticket.exists:
# ticket.values['bounty'] = '100'
return template, data, content_type
# IRequestHandler methods
def match_request(self, req):
return BOUNTYFUNDING_PATTERN.match(req.path_info) != None
def process_request(self, req):
match = BOUNTYFUNDING_PATTERN.match(req.path_info)
if match.group('ticket'):
ticket_id = match.group('ticket_id')
action = match.group('ticket_action')
user = req.authname
ticket = Ticket(self.env, ticket_id)
ticket_title = ticket['summary']
ticket_link = self.get_link(ticket_id)
ticket_owner = ticket['owner']
ticket_status = self.convert_status(ticket['status'])
if action == 'sponsor':
amount = req.args.get('amount')
if self.call_api('GET', '/issue/%s' % ticket_id).status_code == 404:
self.call_api('POST', '/issues', ref=ticket_id, status=ticket_status, title=ticket_title, link=ticket_link, owner=ticket_owner)
response = self.call_api('POST', '/issue/%s/sponsorships' % ticket_id, user=user, amount=amount)
if response.status_code != 200:
add_warning(req, "Unable to pledge - %s" % response.json().get('error', ''))
else:
self.update_ticket(ticket, True, user)
if action == 'update_sponsorship':
if req.args.get('update'):
amount = req.args.get('amount')
response = self.call_api('PUT', '/issue/%s/sponsorship/%s' % (ticket_id, user), amount=amount)
if response.status_code != 200:
add_warning(req, "Unable to pledge - %s" % response.json().get('error', ''))
else:
self.update_ticket(ticket, True, user)
elif req.args.get('delete'):
response = self.call_api('DELETE', '/issue/%s/sponsorship/%s' % (ticket_id, user))
if response.status_code != 200:
add_warning(req, "Unable to delete pledge - %s" % response.json().get('error', ''))
else:
self.update_ticket(ticket, True, user)
elif action == 'confirm':
if req.args.get('delete'):
response = self.call_api('DELETE', '/issue/%s/sponsorship/%s' % (ticket_id, user))
if response.status_code != 200:
add_warning(req, "Unable to delete pledge - %s" % response.json().get('error', ''))
else:
self.update_ticket(ticket, True, user)
else:
if req.args.get('DUMMY'):
gateway = 'DUMMY'
elif req.args.get('PAYPAL_STANDARD'):
gateway = 'PAYPAL_STANDARD'
elif req.args.get('PAYPAL_ADAPTIVE'):
gateway = 'PAYPAL_ADAPTIVE'
else:
#TODO: raise exception instead
gateway = None
response = self.call_api('GET', '/issue/%s/sponsorship/%s' % (ticket_id, user))
if response.status_code == 404:
# Security: can't sponsor not started tickets
if ticket_status != 'STARTED':
#TODO: prevent confirming, exception would be much nicer
gateway = None
else:
amount = req.args.get('amount')
if self.call_api('GET', '/issue/%s' % ticket_id).status_code == 404:
self.call_api('POST', '/issues', ref=ticket_id, status=ticket_status, title=ticket_title, link=ticket_link, owner=ticket_owner)
response = self.call_api('POST', '/issue/%s/sponsorships' % ticket_id, user=user, amount=amount)
if response.status_code != 200:
add_warning(req, "Unable to pledge - %s" % response.json().get('error', ''))
#TODO: prevent confirming, exception would be much nicer
gateway = None
if gateway == 'DUMMY':
pay = req.args.get('pay')
card_number = req.args.get('card_number')
card_date = req.args.get('card_date')
error = ""
if pay != None:
if not card_number or not card_date:
error = 'Please specify card number and expiry date'
if card_number and card_date:
response = self.call_api('POST',
'/issue/%s/sponsorship/%s/payments' % (ticket_id, user),
gateway='DUMMY')
if response.status_code != 200:
error = 'BountyFunding cannot create plain payment'
response = self.call_api('PUT',
'/issue/%s/sponsorship/%s/payment' % (ticket_id, user),
status='CONFIRMED', card_number=card_number, card_date=card_date)
if response.status_code != 200:
error = 'BountyFunding refused your plain payment'
else:
self.update_ticket(ticket, True, user, 'Confirmed sponsorship.')
if pay == None or error:
return "payment.html", {'error': error}, None
elif gateway == 'PAYPAL_STANDARD' or gateway == 'PAYPAL_ADAPTIVE':
return_url = req.abs_href('ticket', ticket_id, 'pay')
response = self.call_api('POST',
'/issue/%s/sponsorship/%s/payments' % (ticket_id, user),
gateway=gateway, return_url=return_url)
if response.status_code == 200:
response = self.call_api('GET',
'/issue/%s/sponsorship/%s/payment' % (ticket_id, user))
if response.status_code == 200:
redirect_url = response.json().get('url')
req.redirect(redirect_url)
else:
error = 'BountyFunding cannot retrieve created PayPal payment'
else:
error = 'BountyFunding cannot create PayPal payment'
add_warning(req, error)
elif action == 'pay':
args = dict(req.args)
args['status'] = 'CONFIRMED'
response = self.call_api('PUT', '/issue/%s/sponsorship/%s/payment' % (ticket_id, user),
**args)
if response.status_code == 200:
self.update_ticket(ticket, True, user, 'Confirmed sponsorship.')
add_notice(req, "Thank you for your payment. Your transaction has been completed, and a receipt for your purchase has been emailed to you.")
elif action == 'validate':
if req.args.get('validate'):
response = self.call_api('PUT', '/issue/%s/sponsorship/%s' % (ticket_id, user),
status='VALIDATED')
if response.status_code == 200:
self.update_ticket(ticket, True, user, 'Validated sponsorship.')
elif req.args.get('reject'):
response = self.call_api('PUT', '/issue/%s/sponsorship/%s' % (ticket_id, user),
status='REJECTED')
if response.status_code == 200:
self.update_ticket(ticket, True, user, 'Rejected sponsorship.')
req.redirect(req.href.ticket(ticket_id))
elif match.group('bountyfunding'):
action = match.group('bountyfunding_action')
if action == 'email':
request = self.call_api('GET', '/emails')
if request.status_code == 200:
emails = [Email(email) for email in request.json().get('data')]
for email in emails:
self.send_email(email.recipient, int(email.issue_id), email.body)
self.call_api('DELETE', '/email/%s' % email.id),
req.send_no_content()
if action == 'status':
request = self.call_api('GET', '/version')
if request == None:
raise HTTPInternalError('Unable to connect to BountyFunding')
elif request.status_code == 403:
raise HTTPInternalError('Not permitted to connect to BountyFunding, check token')
elif request.status_code != 200:
raise HTTPInternalError('Invalid HTTP status code %s when connecting to'
' BountyFunding' % request.status_code)
else:
try:
version = request.json().get('version')
return "status.html", {'version': version}, None
except (ValueError, KeyError):
raise HTTPInternalError('Invalid response body from BountyFunding')
if action == 'sync':
#TODO: optimize by calling /issues, setting amount to 0 if not found
updated_ids = set()
user = req.authname
if 'TICKET_ADMIN' in req.perm:
for row in self.env.db_query("SELECT id from ticket ORDER BY id ASC"):
ticket_id = row[0]
ticket = Ticket(self.env, ticket_id)
if self.update_ticket(ticket, True, user):
updated_ids.add(ticket_id)
if self.update_api_ticket(ticket):
updated_ids.add(ticket_id)
else:
add_warning(req, "You are not permitted to sync")
return "sync.html", {"ids": sorted(updated_ids)}, None
# ITicketChangeListener methods
def ticket_created(self, ticket):
pass
def ticket_changed(self, ticket, comment, author, old_values):
changes = {}
if 'status' in old_values:
changes['status'] = self.convert_status(ticket.values['status'])
if 'summary' in old_values:
changes['title'] = ticket['summary']
if 'owner' in old_values:
changes['owner'] = ticket['owner']
if changes:
# Ignore error 404
self.call_api('PUT', '/issue/%s' % ticket.id, **changes)
def ticket_deleted(self, ticket):
pass
# ITicketManipulator methods
def prepare_ticket(self, req, ticket, fields, actions):
pass
def validate_ticket(self, req, ticket):
if ticket.exists:
old_ticket = Ticket(self.env, ticket.id)
if ticket['bounty'] != old_ticket['bounty']:
return [('bounty', 'Bounty cannot be changed')]
else:
if ticket['bounty']:
return [('bounty', 'Bounty cannot be set')]
return []
# ITemplateProvider methods
def get_templates_dirs(self):
return [resource_filename(__name__, 'templates')]
def get_htdocs_dirs(self):
"""Return a list of directories with static resources (such as style
sheets, images, etc.)
Each item in the list must be a `(prefix, abspath)` tuple. The
`prefix` part defines the path in the URL that requests to these
resources are prefixed with.
The `abspath` is the absolute path to the directory containing the
resources on the local file system.
"""
return [('htdocs', resource_filename(__name__, 'htdocs'))]
# IPreferencePanelProvider methods
def get_preference_panels(self, req):
#TODO: this should probably be only visible when using adaptive payments
yield ('bountyfunding', 'BountyFunding')
def render_preference_panel(self, req, panel):
user = req.authname
if req.method == 'POST':
paypal_email = req.args.get('bountyfunding_paypal_email')
if paypal_email != None:
#TODO: perform some validation if possible - see what FreedomSponsors is doing
request = self.call_api('PUT', '/user/%s' % user, paypal_email=paypal_email)
if request and request.status_code == 200:
add_notice(req, 'Your BountyFunding settings have been been saved.')
else:
add_warning(req, 'Error saving BountyFunding settings.')
req.redirect(req.href.prefs(panel or None))
paypal_email = ''
request = self.call_api('GET', '/user/%s' % user)
if request and request.status_code == 200:
paypal_email = request.json().get('paypal_email', '')
return 'bountyfunding_prefs.html', {
'bountyfunding_paypal_email': paypal_email,
}
| bountyfunding/bountyfunding | plugin/trac/bountyfunding/trac/bountyfunding.py | Python | agpl-3.0 | 27,871 |
import os
long_description = """HTTP server and request handler built on top of BaseHTTPServer
intended for Raspberry Pi projects with a web interface"""
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='RPiHTTPServer',
author='Maurizio Manetti',
author_email='[email protected]',
version='0.4.2',
url='http://github.com/mauntrelio/RPiHTTPServer',
long_description=long_description,
description='HTTP server and request handler '
'built on top of BaseHTTPServer '
'intended for Raspberry Pi projects '
'with a web interface',
zip_safe=False,
py_modules=['RPiHTTPServer'],
license='MIT'
)
| mauntrelio/RPiHTTPServer | setup.py | Python | mit | 794 |
import pa2checker
create_type_annotation = pa2checker.create_type_annotation
update_ontology_utils = pa2checker.update_ontology_utils
recompile_checker_framework = pa2checker.recompile_checker_framework
revert_checker_source = pa2checker.revert_checker_source
| aas-integration/integration-test | pa2checker/__init__.py | Python | mit | 261 |
import shutil
import subprocess
from tut.tools import tool
class PythonTool(tool.LangTool):
def resolve_version(self, version_identifier):
version = None
if version_identifier in {'default', 'python'}:
version = self._get_ver_from_env('python')
elif version_identifier in {'python3', '3', 3}:
version = self._get_ver_from_env('python3')
elif version_identifier in {'python2', '2', 2}:
version = self._get_ver_from_env('python2')
else:
version = str(version_identifier)
return version
@staticmethod
def _get_ver_from_env(version_string):
return subprocess.check_output(
[shutil.which(version_string), '--version'])[-5:]
@staticmethod
def is_newer(v1, v2):
v1_split = [int(x) for x in v1.split('.')]
v2_split = [int(x) for x in v2.split('.')]
if len(v1_split) < len(v2_split):
v1_split.extend([0] * (len(v2_split) - len(v1_split)))
return v1_split < v2_split
def initialize_environment(self):
"""Ensure python is availible in our environment.
"""
python_version = self.project.env.run('python --version').strip()[-5:]
if self.version[0] != python_version[0]:
raise EnvironmentError('Your environment was instantiated with'
'python2 but you requested python3')
if not self.is_newer(self.version, python_version):
raise EnvironmentError('Your environment was instantiated with'
'an older version of python than you expected')
| joshuamorton/tut | tut/tools/lang.py | Python | mit | 1,621 |
import cgi
import errno
import mimetypes
import os
import posixpath
import re
import shutil
import stat
import sys
import tempfile
from os import path
import django
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import handle_extensions
from django.template import Context, Engine
from django.utils import archive, six
from django.utils.six.moves.urllib.request import urlretrieve
from django.utils.version import get_docs_version
_drive_re = re.compile('^([a-z]):', re.I)
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
class TemplateCommand(BaseCommand):
"""
Copies either a Django application layout template or a Django project
layout template into the specified directory.
:param style: A color style object (see django.core.management.color).
:param app_or_project: The string 'app' or 'project'.
:param name: The name of the application or project.
:param directory: The directory to which the template should be copied.
:param options: The additional variables passed to project or app templates
"""
requires_system_checks = False
# Can't import secure during this command, because they haven't
# necessarily been created.
can_import_settings = False
# The supported URL schemes
url_schemes = ['http', 'https', 'ftp']
# Can't perform any active locale changes during this command, because
# setting might not be available at all.
leave_locale_alone = True
def add_arguments(self, parser):
parser.add_argument('name', help='Name of the application or project.')
parser.add_argument('directory', nargs='?', help='Optional destination directory')
parser.add_argument('--template',
help='The path or URL to load the template from.')
parser.add_argument('--extension', '-e', dest='extensions',
action='append', default=['py'],
help='The file extension(s) to render (default: "py"). '
'Separate multiple extensions with commas, or use '
'-e multiple times.')
parser.add_argument('--name', '-n', dest='files',
action='append', default=[],
help='The file name(s) to render. '
'Separate multiple extensions with commas, or use '
'-n multiple times.')
def handle(self, app_or_project, name, target=None, **options):
self.app_or_project = app_or_project
self.paths_to_remove = []
self.verbosity = options['verbosity']
self.validate_name(name, app_or_project)
# if some directory is given, make sure it's nicely expanded
if target is None:
top_dir = path.join(os.getcwd(), name)
try:
os.makedirs(top_dir)
except OSError as e:
if e.errno == errno.EEXIST:
message = "'%s' already exists" % top_dir
else:
message = e
raise CommandError(message)
else:
top_dir = os.path.abspath(path.expanduser(target))
if not os.path.exists(top_dir):
raise CommandError("Destination directory '%s' does not "
"exist, please create it first." % top_dir)
extensions = tuple(handle_extensions(options['extensions']))
extra_files = []
for file in options['files']:
extra_files.extend(map(lambda x: x.strip(), file.split(',')))
if self.verbosity >= 2:
self.stdout.write("Rendering %s template files with "
"extensions: %s\n" %
(app_or_project, ', '.join(extensions)))
self.stdout.write("Rendering %s template files with "
"filenames: %s\n" %
(app_or_project, ', '.join(extra_files)))
base_name = '%s_name' % app_or_project
base_subdir = '%s_template' % app_or_project
base_directory = '%s_directory' % app_or_project
camel_case_name = 'camel_case_%s_name' % app_or_project
camel_case_value = ''.join(x for x in name.title() if x != '_')
context = Context(dict(options, **{
base_name: name,
base_directory: top_dir,
camel_case_name: camel_case_value,
'docs_version': get_docs_version(),
'django_version': django.__version__,
'unicode_literals': '' if six.PY3 else 'from __future__ import unicode_literals\n\n',
}), autoescape=False)
# Setup a stub secure environment for template rendering
from django.conf import settings
if not settings.configured:
settings.configure()
template_dir = self.handle_template(options['template'],
base_subdir)
prefix_length = len(template_dir) + 1
for root, dirs, files in os.walk(template_dir):
path_rest = root[prefix_length:]
relative_dir = path_rest.replace(base_name, name)
if relative_dir:
target_dir = path.join(top_dir, relative_dir)
if not path.exists(target_dir):
os.mkdir(target_dir)
for dirname in dirs[:]:
if dirname.startswith('.') or dirname == '__pycache__':
dirs.remove(dirname)
for filename in files:
if filename.endswith(('.pyo', '.pyc', '.py.class')):
# Ignore some files as they cause various breakages.
continue
old_path = path.join(root, filename)
new_path = path.join(top_dir, relative_dir,
filename.replace(base_name, name))
if path.exists(new_path):
raise CommandError("%s already exists, overlaying a "
"project or app into an existing "
"directory won't replace conflicting "
"files" % new_path)
# Only render the Python files, as we don't want to
# accidentally render Django templates files
with open(old_path, 'rb') as template_file:
content = template_file.read()
if filename.endswith(extensions) or filename in extra_files:
content = content.decode('utf-8')
template = Engine().from_string(content)
content = template.render(context)
content = content.encode('utf-8')
with open(new_path, 'wb') as new_file:
new_file.write(content)
if self.verbosity >= 2:
self.stdout.write("Creating %s\n" % new_path)
try:
shutil.copymode(old_path, new_path)
self.make_writeable(new_path)
except OSError:
self.stderr.write(
"Notice: Couldn't set permission bits on %s. You're "
"probably using an uncommon filesystem setup. No "
"problem." % new_path, self.style.NOTICE)
if self.paths_to_remove:
if self.verbosity >= 2:
self.stdout.write("Cleaning up temporary files.\n")
for path_to_remove in self.paths_to_remove:
if path.isfile(path_to_remove):
os.remove(path_to_remove)
else:
shutil.rmtree(path_to_remove)
def handle_template(self, template, subdir):
"""
Determines where the app or project templates are.
Use django.__path__[0] as the default because we don't
know into which directory Django has been installed.
"""
if template is None:
return path.join(django.__path__[0], 'conf', subdir)
else:
if template.startswith('file://'):
template = template[7:]
expanded_template = path.expanduser(template)
expanded_template = path.normpath(expanded_template)
if path.isdir(expanded_template):
return expanded_template
if self.is_url(template):
# downloads the file and returns the path
absolute_path = self.download(template)
else:
absolute_path = path.abspath(expanded_template)
if path.exists(absolute_path):
return self.extract(absolute_path)
raise CommandError("couldn't handle %s template %s." %
(self.app_or_project, template))
def validate_name(self, name, app_or_project):
if name is None:
raise CommandError("you must provide %s %s name" % (
"an" if app_or_project == "app" else "a", app_or_project))
# If it's not a valid directory name.
if not re.search(r'^[_a-zA-Z]\w*$', name):
# Provide a smart error message, depending on the error.
if not re.search(r'^[_a-zA-Z]', name):
message = 'make sure the name begins with a letter or underscore'
else:
message = 'use only numbers, letters and underscores'
raise CommandError("%r is not a valid %s name. Please %s." %
(name, app_or_project, message))
def download(self, url):
"""
Downloads the given URL and returns the file name.
"""
def cleanup_url(url):
tmp = url.rstrip('/')
filename = tmp.split('/')[-1]
if url.endswith('/'):
display_url = tmp + '/'
else:
display_url = url
return filename, display_url
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download')
self.paths_to_remove.append(tempdir)
filename, display_url = cleanup_url(url)
if self.verbosity >= 2:
self.stdout.write("Downloading %s\n" % display_url)
try:
the_path, info = urlretrieve(url, path.join(tempdir, filename))
except IOError as e:
raise CommandError("couldn't download URL %s to %s: %s" %
(url, filename, e))
used_name = the_path.split('/')[-1]
# Trying to get better name from response headers
content_disposition = info.get('content-disposition')
if content_disposition:
_, params = cgi.parse_header(content_disposition)
guessed_filename = params.get('filename') or used_name
else:
guessed_filename = used_name
# Falling back to content type guessing
ext = self.splitext(guessed_filename)[1]
content_type = info.get('content-type')
if not ext and content_type:
ext = mimetypes.guess_extension(content_type)
if ext:
guessed_filename += ext
# Move the temporary file to a filename that has better
# chances of being recognized by the archive utils
if used_name != guessed_filename:
guessed_path = path.join(tempdir, guessed_filename)
shutil.move(the_path, guessed_path)
return guessed_path
# Giving up
return the_path
def splitext(self, the_path):
"""
Like os.path.splitext, but takes off .tar, too
"""
base, ext = posixpath.splitext(the_path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def extract(self, filename):
"""
Extracts the given file to a temporarily and returns
the path of the directory with the extracted content.
"""
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract')
self.paths_to_remove.append(tempdir)
if self.verbosity >= 2:
self.stdout.write("Extracting %s\n" % filename)
try:
archive.extract(filename, tempdir)
return tempdir
except (archive.ArchiveException, IOError) as e:
raise CommandError("couldn't extract file %s to %s: %s" %
(filename, tempdir, e))
def is_url(self, template):
"""
Returns True if the name looks like a URL
"""
if ':' not in template:
return False
scheme = template.split(':', 1)[0].lower()
return scheme in self.url_schemes
def make_writeable(self, filename):
"""
Make sure that the file is writeable.
Useful if our source is read-only.
"""
if sys.platform.startswith('java'):
# On Jython there is no os.access()
return
if not os.access(filename, os.W_OK):
st = os.stat(filename)
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
os.chmod(filename, new_permissions)
| bretlowery/snakr | lib/django/core/management/templates.py | Python | bsd-3-clause | 13,259 |
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: jeix
"""
from os.path import join
from os.path import exists
from os import makedirs
import re
import sys
import time
import socket, struct
from select import select
from module.utils import save_join
from module.plugins.Hoster import Hoster
class Xdcc(Hoster):
__name__ = "Xdcc"
__version__ = "0.3"
__pattern__ = r'xdcc://.*?(/#?.*?)?/.*?/#?\d+/?' # xdcc://irc.Abjects.net/#channel/[XDCC]|Shit/#0004/
__type__ = "hoster"
__config__ = [
("nick", "str", "Nickname", "pyload"),
("ident", "str", "Ident", "pyloadident"),
("realname", "str", "Realname", "pyloadreal")
]
__description__ = """A Plugin that allows you to download from an IRC XDCC bot"""
__author_name__ = ("jeix")
__author_mail__ = ("[email protected]")
def setup(self):
self.debug = 0 #0,1,2
self.timeout = 30
self.multiDL = False
def process(self, pyfile):
# change request type
self.req = pyfile.m.core.requestFactory.getRequest(self.__name__, type="XDCC")
self.pyfile = pyfile
for i in range(0,3):
try:
nmn = self.doDownload(pyfile.url)
self.log.debug("%s: Download of %s finished." % (self.__name__, nmn))
return
except socket.error, e:
if hasattr(e, "errno"):
errno = e.errno
else:
errno = e.args[0]
if errno in (10054,):
self.log.debug("XDCC: Server blocked our ip, retry in 5 min")
self.setWait(300)
self.wait()
continue
self.fail("Failed due to socket errors. Code: %d" % errno)
self.fail("Server blocked our ip, retry again later manually")
def doDownload(self, url):
self.pyfile.setStatus("waiting") # real link
download_folder = self.config['general']['download_folder']
location = join(download_folder, self.pyfile.package().folder.decode(sys.getfilesystemencoding()))
if not exists(location):
makedirs(location)
m = re.search(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url)
server = m.group(1)
chan = m.group(2)
bot = m.group(3)
pack = m.group(4)
nick = self.getConf('nick')
ident = self.getConf('ident')
real = self.getConf('realname')
temp = server.split(':')
ln = len(temp)
if ln == 2:
host, port = temp
elif ln == 1:
host, port = temp[0], 6667
else:
self.fail("Invalid hostname for IRC Server (%s)" % server)
#######################
# CONNECT TO IRC AND IDLE FOR REAL LINK
dl_time = time.time()
sock = socket.socket()
sock.connect((host, int(port)))
if nick == "pyload":
nick = "pyload-%d" % (time.time() % 1000) # last 3 digits
sock.send("NICK %s\r\n" % nick)
sock.send("USER %s %s bla :%s\r\n" % (ident, host, real))
time.sleep(3)
sock.send("JOIN #%s\r\n" % chan)
sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
# IRC recv loop
readbuffer = ""
done = False
retry = None
m = None
while True:
# done is set if we got our real link
if done:
break
if retry:
if time.time() > retry:
retry = None
dl_time = time.time()
sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
else:
if (dl_time + self.timeout) < time.time(): # todo: add in config
sock.send("QUIT :byebye\r\n")
sock.close()
self.fail("XDCC Bot did not answer")
fdset = select([sock], [], [], 0)
if sock not in fdset[0]:
continue
readbuffer += sock.recv(1024)
temp = readbuffer.split("\n")
readbuffer = temp.pop()
for line in temp:
if self.debug is 2: print "*> " + unicode(line, errors='ignore')
line = line.rstrip()
first = line.split()
if first[0] == "PING":
sock.send("PONG %s\r\n" % first[1])
if first[0] == "ERROR":
self.fail("IRC-Error: %s" % line)
msg = line.split(None, 3)
if len(msg) != 4:
continue
msg = { \
"origin":msg[0][1:], \
"action":msg[1], \
"target":msg[2], \
"text" :msg[3][1:] \
}
if nick == msg["target"][0:len(nick)] and "PRIVMSG" == msg["action"]:
if msg["text"] == "\x01VERSION\x01":
self.log.debug("XDCC: Sending CTCP VERSION.")
sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
elif msg["text"] == "\x01TIME\x01":
self.log.debug("Sending CTCP TIME.")
sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
elif msg["text"] == "\x01LAG\x01":
pass # don't know how to answer
if not (bot == msg["origin"][0:len(bot)]
and nick == msg["target"][0:len(nick)]
and msg["action"] in ("PRIVMSG", "NOTICE")):
continue
if self.debug is 1:
print "%s: %s" % (msg["origin"], msg["text"])
if "You already requested that pack" in msg["text"]:
retry = time.time() + 300
if "you must be on a known channel to request a pack" in msg["text"]:
self.fail("Wrong channel")
m = re.match('\x01DCC SEND (.*?) (\d+) (\d+)(?: (\d+))?\x01', msg["text"])
if m:
done = True
# get connection data
ip = socket.inet_ntoa(struct.pack('L', socket.ntohl(int(m.group(2)))))
port = int(m.group(3))
packname = m.group(1)
if len(m.groups()) > 3:
self.req.filesize = int(m.group(4))
self.pyfile.name = packname
filename = save_join(location, packname)
self.log.info("XDCC: Downloading %s from %s:%d" % (packname, ip, port))
self.pyfile.setStatus("downloading")
newname = self.req.download(ip, port, filename, sock, self.pyfile.setProgress)
if newname and newname != filename:
self.log.info("%(name)s saved as %(newname)s" % {"name": self.pyfile.name, "newname": newname})
filename = newname
# kill IRC socket
# sock.send("QUIT :byebye\r\n")
sock.close()
self.lastDownload = filename
return self.lastDownload
| fener06/pyload | module/plugins/hoster/Xdcc.py | Python | gpl-3.0 | 8,399 |
from direct.directnotify.DirectNotifyGlobal import *
from pandac.PandaModules import *
from toontown.building import DoorTypes
from toontown.building.DistributedDoorAI import DistributedDoorAI
from toontown.building.DistributedKartShopInteriorAI import DistributedKartShopInteriorAI
from toontown.toon import NPCToons
if __debug__:
import pdb
class KartShopBuildingAI:
notify = directNotify.newCategory('KartShopBuildingAI')
def __init__(self, air, exteriorZone, interiorZone, blockNumber):
self.air = air
self.exteriorZone = exteriorZone
self.interiorZone = interiorZone
self.setup(blockNumber)
def cleanup(self):
for npc in self.npcs:
npc.requestDelete()
del self.npcs
self.outsideDoor0.requestDelete()
self.outsideDoor1.requestDelete()
self.insideDoor0.requestDelete()
self.insideDoor1.requestDelete()
del self.outsideDoor0
del self.insideDoor0
del self.outsideDoor1
del self.insideDoor1
self.kartShopInterior.requestDelete()
del self.kartShopInterior
def setup(self, blockNumber):
self.kartShopInterior = DistributedKartShopInteriorAI(
blockNumber, self.air, self.interiorZone)
self.kartShopInterior.generateWithRequired(self.interiorZone)
self.npcs = NPCToons.createNpcsInZone(self.air, self.interiorZone)
self.outsideDoor0 = DistributedDoorAI(
self.air, blockNumber, DoorTypes.EXT_KS, doorIndex=1)
self.outsideDoor1 = DistributedDoorAI(
self.air, blockNumber, DoorTypes.EXT_KS, doorIndex=2)
self.insideDoor0 = DistributedDoorAI(
self.air, blockNumber, DoorTypes.INT_KS, doorIndex=1)
self.insideDoor1 = DistributedDoorAI(
self.air, blockNumber, DoorTypes.INT_KS, doorIndex=2)
self.outsideDoor0.setOtherDoor(self.insideDoor0)
self.outsideDoor1.setOtherDoor(self.insideDoor1)
self.insideDoor0.setOtherDoor(self.outsideDoor0)
self.insideDoor1.setOtherDoor(self.outsideDoor1)
self.outsideDoor0.zoneId = self.exteriorZone
self.outsideDoor1.zoneId = self.exteriorZone
self.insideDoor0.zoneId = self.interiorZone
self.insideDoor1.zoneId = self.interiorZone
self.outsideDoor0.generateWithRequired(self.exteriorZone)
self.outsideDoor1.generateWithRequired(self.exteriorZone)
self.insideDoor0.generateWithRequired(self.interiorZone)
self.insideDoor1.generateWithRequired(self.interiorZone)
self.outsideDoor0.sendUpdate('setDoorIndex', [self.outsideDoor0.getDoorIndex()])
self.outsideDoor1.sendUpdate('setDoorIndex', [self.outsideDoor1.getDoorIndex()])
self.insideDoor0.sendUpdate('setDoorIndex', [self.insideDoor0.getDoorIndex()])
self.insideDoor1.sendUpdate('setDoorIndex', [self.insideDoor1.getDoorIndex()])
| Spiderlover/Toontown | toontown/building/KartShopBuildingAI.py | Python | mit | 2,918 |
# coding=utf-8
#
# BSD 3-Clause License
#
# Copyright (c) 2016-21, University of Liverpool
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Command line object for bbcontacts contact filtering application
"""
__author__ = "Felix Simkovic"
__date__ = "10 Aug 2016"
__version__ = "0.13.1"
from Bio.Application import _Argument
from Bio.Application import _Option
from Bio.Application import _Switch
from Bio.Application import AbstractCommandline
class BbcontactsCommandline(AbstractCommandline):
"""
Command line object for bbcontacts [#]_ contact filtering application
https://github.com/soedinglab/bbcontacts
The bbcontacts program is a Python program predicting residue-level
contacts between beta-strands by detecting patterns in matrices of
predicted couplings. bbcontacts can make use of a secondary structure
assignment or a secondary structure prediction.
.. [#] Andreani J., Söding J. (2015). bbcontacts: prediction of beta-strand
pairing from direct coupling patterns. Bioinformatics 31(11), 1729-1737.
Examples
--------
To filter a contact map using a Multiple Sequence Alignment in
CCMpred format, use:
>>> from conkit.applications import BbcontactsCommandline
>>> bbcontacts_cline = BbcontactsCommandline(
... matfile='test.mat', diversity_score=0.482, prefix='test'
... )
>>> print(bbcontacts_cline)
bbcontacts
You would typically run the command line with :func:`bbcontacts_cline` or via
the :mod:`~subprocess` module.
Note
----
Installation instructions are available via the `GitHub repository
<https://github.com/soedinglab/bbcontacts>`_.
"""
def __init__(self, cmd="bbcontacts", **kwargs):
# TODO: figure a way to group CL arguments as in `mutually_exclusive_group`
if "dssp_file" in list(kwargs.keys()) and "psipred_file" in list(kwargs.keys()):
msg = "Provide only one of [dssp_file|psipred_file]!"
raise RuntimeError(msg)
elif not ("dssp_file" in list(kwargs.keys()) or "psipred_file" in list(kwargs.keys())):
msg = "Provide one of [dssp_file|psipred_file]!"
raise RuntimeError(msg)
self.parameters = [
_Option(["-c", "config_file"], "bbcontacts configuration file", filename=True, equate=False),
_Option(
["-s", "smoothing_size"],
"Perform local background correction of the coupling matrix "
"before decoding: from each coupling, subtract the average "
"coupling (smoothed background) over an area extending by "
"SMOOTHINGSIZE in each direction [default=10, use 0 for no "
"local background correction]",
equate=False,
),
_Switch(
["-l", "long_predictions"],
"Turn off (slow) prediction-shortening mode (this mode is on "
"by default but will only get triggered when long predictions occur)",
),
_Option(
["-n", "pdb_name"],
"Provide a PDB identifier (when also using -e, this will be the "
"PDB name to look for in EVALUATIONFILE)",
equate=False,
),
_Option(
["-e", "evaluation_file"],
"Provide a file containing the true contacts (BetaSheet916.dat, "
"BetaSheet1452.dat or same format) for evaluation",
filename=True,
equate=False,
),
_Argument(["matfile"], "CCMpred-like coupling matrix", filename=True, is_required=True),
_Argument(["diversity_score"], "sequence-dependent diversity score", is_required=True),
_Argument(["prefix"], "output prefix", is_required=True),
_Option(["-d", "dssp_file"], "DSSP secondary structure prediction file", filename=True, equate=False),
_Option(["-p", "psipred_file"], "PSIPRED secondary structure prediction file", filename=True, equate=False),
]
AbstractCommandline.__init__(self, cmd, **kwargs)
| rigdenlab/conkit | conkit/applications/bbcontacts.py | Python | bsd-3-clause | 5,619 |
# -*- coding: utf-8 -*-
#
# Copyright 2004-2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Convert Qt Linguist (.ts) files to Gettext PO localization files.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/ts2po.html
for examples and usage instructions.
"""
from translate.storage import po, ts
class ts2po(object):
def __init__(self, duplicatestyle="msgctxt", pot=False):
self.duplicatestyle = duplicatestyle
self.pot = pot
def convertmessage(self, contextname, messagenum, source, target, msgcomments, transtype):
"""makes a pounit from the given message"""
thepo = po.pounit(encoding="UTF-8")
thepo.addlocation("%s#%d" % (contextname, messagenum))
thepo.source = source
if not self.pot:
thepo.target = target
if len(msgcomments) > 0:
thepo.addnote(msgcomments)
if transtype == "unfinished" and thepo.istranslated():
thepo.markfuzzy()
if transtype == "obsolete":
# This should use the Gettext obsolete method but it would require quite a bit of work
thepo.addnote("(obsolete)", origin="developer")
# using the fact that -- quote -- "(this is nonsense)"
return thepo
def convertfile(self, inputfile):
"""converts a .ts file to .po format"""
tsfile = ts.QtTsParser(inputfile)
thetargetfile = po.pofile()
for contextname, messages in tsfile.iteritems():
messagenum = 0
for message in messages:
messagenum += 1
source = tsfile.getmessagesource(message)
translation = tsfile.getmessagetranslation(message)
comment = tsfile.getmessagecomment(message)
transtype = tsfile.getmessagetype(message)
thepo = self.convertmessage(contextname, messagenum, source, translation, comment, transtype)
thetargetfile.addunit(thepo)
thetargetfile.removeduplicates(self.duplicatestyle)
return thetargetfile
def convertts(inputfile, outputfile, templates, pot=False, duplicatestyle="msgctxt"):
"""reads in stdin using fromfileclass, converts using convertorclass, writes to stdout"""
convertor = ts2po(duplicatestyle=duplicatestyle, pot=pot)
outputstore = convertor.convertfile(inputfile)
if outputstore.isempty():
return 0
outputstore.serialize(outputfile)
return 1
def main(argv=None):
from translate.convert import convert
formats = {"ts": ("po", convertts)}
parser = convert.ConvertOptionParser(formats, usepots=True, description=__doc__)
parser.add_duplicates_option()
parser.passthrough.append("pot")
parser.run(argv)
| phlax/translate | translate/convert/ts2po.py | Python | gpl-2.0 | 3,410 |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '2.0.0rc3.dev0'
long_description = (
open('README.rst').read() + '\n' +
open('CHANGES.rst').read()+ '\n'
)
setup(
name='plone.app.mosaic',
version=version,
description='Plone Mosaic',
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Plone',
'Framework :: Plone :: 5.0',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='',
author='',
author_email='',
url='https://github.com/plone/plone.app.mosaic',
license='gpl',
packages=find_packages('src'),
package_dir={'': 'src'},
namespace_packages=['plone', 'plone.app'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'plone.api',
'plone.subrequest>=1.7.0',
'plone.tiles>=1.8.0',
'plone.app.blocks>=4.0.0rc1',
'plone.app.tiles>=3.0.0',
'plone.app.drafts>=1.1.0',
'plone.app.standardtiles>=2.0.0rc1',
'Products.CMFPlone>=5.0.4'
],
extras_require={'test': [
'plone.app.testing',
'plone.app.contenttypes',
'plone.app.widgets>=1.8.0.dev0',
'plone.app.robotframework',
'robotframework-selenium2library',
'robotframework-selenium2screenshots'
]},
entry_points="""
# -*- Entry points: -*-
[z3c.autoinclude.plugin]
target = plone
""",
)
| davilima6/plone.app.mosaic | setup.py | Python | gpl-2.0 | 1,916 |
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2016 Tecnativa S.L. - Pedro M. Baeza
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# Copyright 2019 Tecnativa - Cristina Martin R.
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import _, api, exceptions, fields, models
class ResPartnerIndustry(models.Model):
_inherit = "res.partner.industry"
_order = "parent_path"
_parent_order = "name"
_parent_store = True
name = fields.Char(required=True)
parent_id = fields.Many2one(
comodel_name="res.partner.industry", ondelete="restrict"
)
child_ids = fields.One2many(
comodel_name="res.partner.industry", inverse_name="parent_id", string="Children"
)
parent_path = fields.Char(index=True)
def name_get(self):
def get_names(cat):
""" Return the list [cat.name, cat.parent_id.name, ...] """
res = []
while cat:
res.insert(0, cat.name)
cat = cat.parent_id
return res
return [(cat.id, " / ".join(get_names(cat))) for cat in self]
@api.constrains("parent_id")
def _check_parent_id(self):
if not self._check_recursion():
raise exceptions.ValidationError(
_("Error! You cannot create recursive industries.")
)
@api.constrains("name", "parent_id")
def _check_uniq_name(self):
if (
self.search_count(
[("name", "=", self.name), ("parent_id", "=", self.parent_id.id)]
)
> 1
):
raise exceptions.ValidationError(
_("Error! Industry with same name and parent already exists.")
)
def copy(self, default=None):
default = default or {}
if "name" not in default or default["name"] == self.name:
default["name"] = self.name + " 2"
return super(ResPartnerIndustry, self).copy(default=default)
| OCA/partner-contact | partner_industry_secondary/models/res_partner_industry.py | Python | agpl-3.0 | 2,067 |
#!/usr/bin/env python
"""Module for setting up statistical models"""
from __future__ import division
import numpy as np
import pymc as mc
import evac_flow_exit_mu_single_graphics as graphics
import data_evac
def model1():
"""
PyMC configuration with Model 1.
preevac_alpha vs theta[0] + theta[1]*type + theta[2]*eff_wid + theta[3]*tread
"""
# Priors
theta = mc.Uniform('theta',
lower=[-10.0, -10.0],
upper=[ 10.0, 10.0],
value=[ 0.1, 0.1])
sigma = mc.Uniform('sigma', lower=0., upper=100., value=1.)
# Model
@mc.deterministic
def y_mean(theta=theta,
beta = data_evac.data_alphabeta['beta']):
return theta[0]*beta**theta[1]
# Likelihood
# The likelihood is N(y_mean, sigma^2), where sigma
# is pulled from a uniform distribution.
y_obs = mc.Normal('y_obs',
value=data_evac.data_alphabeta['alpha'],
mu=y_mean,
tau=sigma**-2,
observed=True)
return vars()
def model2():
"""
PyMC configuration with Model 2.
preevac_alpha vs theta[0] + theta[1]*type + theta[2]*eff_wid + theta[3]*tread
"""
# Priors
theta = mc.Uniform('theta',
lower=[-10.0, -10.0],
upper=[ 10.0, 10.0],
value=[ 0.1, 0.1])
sigma = mc.Uniform('sigma', lower=0., upper=100., value=1.)
# Model
@mc.deterministic
def y_mean(theta=theta,
alpha = data_evac.data_alphabeta['alpha']):
return theta[0]*alpha**theta[1]
# Likelihood
# The likelihood is N(y_mean, sigma^2), where sigma
# is pulled from a uniform distribution.
y_obs = mc.Normal('y_obs',
value=data_evac.data_alphabeta['beta'],
mu=y_mean,
tau=sigma**-2,
observed=True)
return vars()
def model3():
"""
PyMC configuration with Model 1.
preevac_alpha vs theta[0] + theta[1]*type + theta[2]*eff_wid + theta[3]*tread
"""
# Priors
theta = mc.Uniform('theta',
lower=[-10.0, -10.0],
upper=[ 10.0, 10.0],
value=[ 0.1, 0.1])
sigma = mc.Uniform('sigma', lower=0., upper=100., value=1.)
# Model
@mc.deterministic
def y_mean(theta=theta,
beta = data_evac.data_alphabeta['beta']):
return theta[0]*beta + theta[1]
# Likelihood
# The likelihood is N(y_mean, sigma^2), where sigma
# is pulled from a uniform distribution.
y_obs = mc.Normal('y_obs',
value=data_evac.data_alphabeta['alpha'],
mu=y_mean,
tau=sigma**-2,
observed=True)
return vars()
def model4():
"""
PyMC configuration with Model 2.
preevac_alpha vs theta[0] + theta[1]*type + theta[2]*eff_wid + theta[3]*tread
"""
# Priors
theta = mc.Uniform('theta',
lower=[-10.0, -10.0],
upper=[ 10.0, 10.0],
value=[ 0.1, 0.1])
sigma = mc.Uniform('sigma', lower=0., upper=100., value=1.)
# Model
@mc.deterministic
def y_mean(theta=theta,
alpha = data_evac.data_alphabeta['alpha']):
return theta[0]*alpha + theta[1]
# Likelihood
# The likelihood is N(y_mean, sigma^2), where sigma
# is pulled from a uniform distribution.
y_obs = mc.Normal('y_obs',
value=data_evac.data_alphabeta['beta'],
mu=y_mean,
tau=sigma**-2,
observed=True)
return vars()
| koverholt/bayes-fire | Example_Cases/Evac_Stairs/Scripts/evac_alphabeta_paper_model.py | Python | bsd-3-clause | 3,889 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import http.server
import itertools
import json
import logging
import mimetypes
import os
import pkgutil
import re
from builtins import bytes, object, open, range, str, zip
from collections import namedtuple
from datetime import date, datetime
from textwrap import dedent
import pystache
from future.moves.urllib.parse import parse_qs, urlencode, urlsplit, urlunparse
from pants.base.build_environment import get_buildroot
from pants.base.mustache import MustacheRenderer
from pants.base.run_info import RunInfo
from pants.pantsd.process_manager import ProcessManager
logger = logging.getLogger(__name__)
# Google Prettyprint plugin files.
PPP_RE = re.compile(r"^lang-.*\.js$")
class PantsHandler(http.server.BaseHTTPRequestHandler):
"""A handler that demultiplexes various pants reporting URLs."""
def __init__(self, settings, renderer, request, client_address, server):
self._settings = settings # An instance of ReportingServer.Settings.
self._root = self._settings.root
self._renderer = renderer
self._client_address = client_address
# The underlying handlers for specific URL prefixes.
self._GET_handlers = [
('/runs/', self._handle_runs), # Show list of known pants runs.
('/run/', self._handle_run), # Show a report for a single pants run.
('/browse/', self._handle_browse), # Browse filesystem under build root.
('/content/', self._handle_content), # Show content of file.
('/assets/', self._handle_assets), # Statically serve assets (css, js etc.)
('/poll', self._handle_poll), # Handle poll requests for raw file content.
('/latestrunid', self._handle_latest_runid), # Return id of latest pants run.
('/favicon.ico', self._handle_favicon) # Return favicon.
]
# TODO(#6071): BaseHTTPServer.BaseHTTPRequestHandler is an old-style class, so we must
# invoke its __init__ like this.
# TODO: Replace this entirely with a proper server as part of the pants daemon.
http.server.BaseHTTPRequestHandler.__init__(self, request, client_address, server)
def do_GET(self):
"""GET method implementation for BaseHTTPRequestHandler."""
if not self._client_allowed():
return
try:
(_, _, path, query, _) = urlsplit(self.path)
params = parse_qs(query)
# Give each handler a chance to respond.
for prefix, handler in self._GET_handlers:
if self._maybe_handle(prefix, handler, path, params):
return
# If no path specified, default to showing the list of all runs.
if path == '/':
self._handle_runs('', {})
return
content = 'Invalid GET request {}'.format(self.path).encode('utf-8'),
self._send_content(content, 'text/html', code=400)
except (IOError, ValueError):
pass # Printing these errors gets annoying, and there's nothing to do about them anyway.
#sys.stderr.write('Invalid GET request {}'.format(self.path))
def _handle_runs(self, relpath, params):
"""Show a listing of all pants runs since the last clean-all."""
runs_by_day = self._partition_runs_by_day()
args = self._default_template_args('run_list.html')
args['runs_by_day'] = runs_by_day
content = self._renderer.render_name('base.html', args).encode("utf-8")
self._send_content(content, 'text/html')
_collapsible_fmt_string = dedent("""
<div class="{class_prefix}" id="{id}">
<div class="{class_prefix}-header toggle-header" id="{id}-header">
<div class="{class_prefix}-header-icon toggle-header-icon" onclick="pants.collapsible.toggle('{id}')">
<i id="{id}-icon" class="visibility-icon icon-large icon-caret-right hidden"></i>
</div>
<div class="{class_prefix}-header-text toggle-header-text">
[<span id="{id}-header-text">{title}</span>]
</div>
</div>
<div class="{class_prefix}-content toggle-content nodisplay" id="{id}-content"></div>
</div>
""")
def _handle_run(self, relpath, params):
"""Show the report for a single pants run."""
args = self._default_template_args('run.html')
run_id = relpath
run_info = self._get_run_info_dict(run_id)
if run_info is None:
args['no_such_run'] = relpath
if run_id == 'latest':
args['is_latest'] = 'none'
else:
report_abspath = run_info['default_report']
report_relpath = os.path.relpath(report_abspath, self._root)
report_dir = os.path.dirname(report_relpath)
self_timings_path = os.path.join(report_dir, 'self_timings')
cumulative_timings_path = os.path.join(report_dir, 'cumulative_timings')
artifact_cache_stats_path = os.path.join(report_dir, 'artifact_cache_stats')
run_info['timestamp_text'] = \
datetime.fromtimestamp(float(run_info['timestamp'])).strftime('%H:%M:%S on %A, %B %d %Y')
timings_and_stats = '\n'.join([
self._collapsible_fmt_string.format(id='cumulative-timings-collapsible',
title='Cumulative timings', class_prefix='aggregated-timings'),
self._collapsible_fmt_string.format(id='self-timings-collapsible',
title='Self timings', class_prefix='aggregated-timings'),
self._collapsible_fmt_string.format(id='artifact-cache-stats-collapsible',
title='Artifact cache stats', class_prefix='artifact-cache-stats')
])
args.update({'run_info': run_info,
'report_path': report_relpath,
'self_timings_path': self_timings_path,
'cumulative_timings_path': cumulative_timings_path,
'artifact_cache_stats_path': artifact_cache_stats_path,
'timings_and_stats': timings_and_stats})
if run_id == 'latest':
args['is_latest'] = run_info['id']
content = self._renderer.render_name('base.html', args).encode("utf-8")
self._send_content(content, 'text/html')
def _handle_browse(self, relpath, params):
"""Handle requests to browse the filesystem under the build root."""
abspath = os.path.normpath(os.path.join(self._root, relpath))
if not abspath.startswith(self._root):
raise ValueError # Prevent using .. to get files from anywhere other than root.
if os.path.isdir(abspath):
self._serve_dir(abspath, params)
elif os.path.isfile(abspath):
self._serve_file(abspath, params)
def _handle_content(self, relpath, params):
"""Render file content for pretty display."""
abspath = os.path.normpath(os.path.join(self._root, relpath))
if os.path.isfile(abspath):
with open(abspath, 'rb') as infile:
content = infile.read()
else:
content = 'No file found at {}'.format(abspath).encode('utf-8')
content_type = mimetypes.guess_type(abspath)[0] or 'text/plain'
if not content_type.startswith('text/') and not content_type == 'application/xml':
# Binary file. Display it as hex, split into lines.
n = 120 # Display lines of this max size.
content = repr(content)[1:-1] # Will escape non-printables etc, dropping surrounding quotes.
content = '\n'.join([content[i:i + n] for i in range(0, len(content), n)])
prettify = False
prettify_extra_langs = []
else:
prettify = True
if self._settings.assets_dir:
prettify_extra_dir = os.path.join(self._settings.assets_dir, 'js', 'prettify_extra_langs')
prettify_extra_langs = [{'name': x} for x in os.listdir(prettify_extra_dir)]
else:
# TODO: Find these from our package, somehow.
prettify_extra_langs = []
linenums = True
args = {'prettify_extra_langs': prettify_extra_langs, 'content': content,
'prettify': prettify, 'linenums': linenums}
content = self._renderer.render_name('file_content.html', args).encode("utf-8")
self._send_content(content, 'text/html')
def _handle_assets(self, relpath, params):
"""Statically serve assets: js, css etc."""
if self._settings.assets_dir:
abspath = os.path.normpath(os.path.join(self._settings.assets_dir, relpath))
with open(abspath, 'rb') as infile:
content = infile.read()
else:
content = pkgutil.get_data(__name__, os.path.join('assets', relpath))
content_type = mimetypes.guess_type(relpath)[0] or 'text/plain'
self._send_content(content, content_type)
def _handle_poll(self, relpath, params):
"""Handle poll requests for raw file contents."""
request = json.loads(params.get('q')[0])
ret = {}
# request is a polling request for multiple files. For each file:
# - id is some identifier assigned by the client, used to differentiate the results.
# - path is the file to poll.
# - pos is the last byte position in that file seen by the client.
for poll in request:
_id = poll.get('id', None)
path = poll.get('path', None)
pos = poll.get('pos', 0)
if path:
abspath = os.path.normpath(os.path.join(self._root, path))
if os.path.isfile(abspath):
with open(abspath, 'rb') as infile:
if pos:
infile.seek(pos)
content = infile.read()
ret[_id] = content.decode("utf-8")
content = json.dumps(ret).encode("utf-8")
self._send_content(content, 'application/json')
def _handle_latest_runid(self, relpath, params):
"""Handle request for the latest run id.
Used by client-side javascript to detect when there's a new run to display.
"""
latest_runinfo = self._get_run_info_dict('latest')
if latest_runinfo is None:
self._send_content(b'none', 'text/plain')
else:
self._send_content(latest_runinfo['id'].encode("utf-8"), 'text/plain')
def _handle_favicon(self, relpath, params):
"""Statically serve the favicon out of the assets dir."""
self._handle_assets('favicon.ico', params)
def _partition_runs_by_day(self):
"""Split the runs by day, so we can display them grouped that way."""
run_infos = self._get_all_run_infos()
for x in run_infos:
ts = float(x['timestamp'])
x['time_of_day_text'] = datetime.fromtimestamp(ts).strftime('%H:%M:%S')
def date_text(dt):
delta_days = (date.today() - dt).days
if delta_days == 0:
return 'Today'
elif delta_days == 1:
return 'Yesterday'
elif delta_days < 7:
return dt.strftime('%A') # Weekday name.
else:
d = dt.day % 10
suffix = 'st' if d == 1 else 'nd' if d == 2 else 'rd' if d == 3 else 'th'
return dt.strftime('%B %d') + suffix # E.g., October 30th.
keyfunc = lambda x: datetime.fromtimestamp(float(x['timestamp']))
sorted_run_infos = sorted(run_infos, key=keyfunc, reverse=True)
return [{'date_text': date_text(dt), 'run_infos': [x for x in infos]}
for dt, infos in itertools.groupby(sorted_run_infos, lambda x: keyfunc(x).date())]
def _get_run_info_dict(self, run_id):
"""Get the RunInfo for a run, as a dict."""
run_info_path = os.path.join(self._settings.info_dir, run_id, 'info')
if os.path.exists(run_info_path):
# We copy the RunInfo as a dict, so we can add stuff to it to pass to the template.
return RunInfo(run_info_path).get_as_dict()
else:
return None
def _get_all_run_infos(self):
"""Find the RunInfos for all runs since the last clean-all."""
info_dir = self._settings.info_dir
if not os.path.isdir(info_dir):
return []
paths = [os.path.join(info_dir, x) for x in os.listdir(info_dir)]
# We copy the RunInfo as a dict, so we can add stuff to it to pass to the template.
# We filter only those that have a timestamp, to avoid a race condition with writing
# that field.
return [d for d in
[RunInfo(os.path.join(p, 'info')).get_as_dict() for p in paths
if os.path.isdir(p) and not os.path.islink(p)]
if 'timestamp' in d]
def _serve_dir(self, abspath, params):
"""Show a directory listing."""
relpath = os.path.relpath(abspath, self._root)
breadcrumbs = self._create_breadcrumbs(relpath)
entries = [{'link_path': os.path.join(relpath, e), 'name': e} for e in os.listdir(abspath)]
args = self._default_template_args('dir.html')
args.update({'root_parent': os.path.dirname(self._root),
'breadcrumbs': breadcrumbs,
'entries': entries,
'params': params})
content = self._renderer.render_name('base.html', args).encode("utf-8")
self._send_content(content, 'text/html')
def _serve_file(self, abspath, params):
"""Show a file.
The actual content of the file is rendered by _handle_content.
"""
relpath = os.path.relpath(abspath, self._root)
breadcrumbs = self._create_breadcrumbs(relpath)
link_path = urlunparse(['', '', relpath, '', urlencode(params), ''])
args = self._default_template_args('file.html')
args.update({'root_parent': os.path.dirname(self._root),
'breadcrumbs': breadcrumbs,
'link_path': link_path})
content = self._renderer.render_name('base.html', args).encode("utf-8")
self._send_content(content, 'text/html')
def _send_content(self, content, content_type, code=200):
"""Send content to client."""
assert isinstance(content, bytes)
self.send_response(code)
self.send_header('Content-Type', content_type)
self.send_header('Content-Length', str(len(content)))
self.end_headers()
self.wfile.write(content)
def _client_allowed(self):
"""Check if client is allowed to connect to this server."""
client_ip = self._client_address[0]
if not client_ip in self._settings.allowed_clients and \
not 'ALL' in self._settings.allowed_clients:
content = 'Access from host {} forbidden.'.format(client_ip).encode('utf-8')
self._send_content(content, 'text/html')
return False
return True
def _maybe_handle(self, prefix, handler, path, params, data=None):
"""Apply the handler if the prefix matches."""
if path.startswith(prefix):
relpath = path[len(prefix):]
if data:
handler(relpath, params, data)
else:
handler(relpath, params)
return True
else:
return False
def _create_breadcrumbs(self, relpath):
"""Create filesystem browsing breadcrumb navigation.
That is, make each path segment into a clickable element that takes you to that dir.
"""
if relpath == '.':
breadcrumbs = []
else:
path_parts = [os.path.basename(self._root)] + relpath.split(os.path.sep)
path_links = ['/'.join(path_parts[1:i + 1]) for i, name in enumerate(path_parts)]
breadcrumbs = [{'link_path': link_path, 'name': name}
for link_path, name in zip(path_links, path_parts)]
return breadcrumbs
def _default_template_args(self, content_template):
"""Initialize template args."""
def include(text, args):
template_name = pystache.render(text, args)
return self._renderer.render_name(template_name, args)
# Our base template calls include on the content_template.
ret = {'content_template': content_template}
ret['include'] = lambda text: include(text, ret)
return ret
def log_message(self, fmt, *args):
"""Silence BaseHTTPRequestHandler's logging."""
class ReportingServer(object):
"""Reporting Server HTTP server."""
class Settings(namedtuple('Settings', ['info_dir', 'template_dir', 'assets_dir', 'root',
'allowed_clients'])):
"""Reporting server settings.
info_dir: path to dir containing RunInfo files.
template_dir: location of mustache template files. If None, the templates
embedded in our package are used.
assets_dir: location of assets (js, css etc.) If None, the assets
embedded in our package are used.
root: build root.
allowed_clients: list of ips or ['ALL'].
"""
def __init__(self, port, settings):
renderer = MustacheRenderer(settings.template_dir, __name__)
class MyHandler(PantsHandler):
def __init__(self, request, client_address, server):
PantsHandler.__init__(self, settings, renderer, request, client_address, server)
self._httpd = http.server.HTTPServer(('', port), MyHandler)
self._httpd.timeout = 0.1 # Not the network timeout, but how often handle_request yields.
def server_port(self):
return self._httpd.server_port
def start(self):
self._httpd.serve_forever()
class ReportingServerManager(ProcessManager):
def __init__(self, context=None, options=None):
ProcessManager.__init__(self, name='reporting_server')
self.context = context
self.options = options
def post_fork_child(self):
"""Post-fork() child callback for ProcessManager.daemonize()."""
# The server finds run-specific info dirs by looking at the subdirectories of info_dir,
# which is conveniently and obviously the parent dir of the current run's info dir.
info_dir = os.path.dirname(self.context.run_tracker.run_info_dir)
settings = ReportingServer.Settings(info_dir=info_dir,
root=get_buildroot(),
template_dir=self.options.template_dir,
assets_dir=self.options.assets_dir,
allowed_clients=self.options.allowed_clients)
server = ReportingServer(self.options.port, settings)
self.write_socket(server.server_port())
# Block forever.
server.start()
| twitter/pants | src/python/pants/reporting/reporting_server.py | Python | apache-2.0 | 17,903 |
import logging
import os.path
import yaml
from kpm.manifest import ManifestBase
__all__ = ['ManifestChart']
logger = logging.getLogger(__name__)
MANIFEST_FILES = ["Chart.yaml", "Chart.yml"]
class ManifestChart(ManifestBase):
def __init__(self, package=None, values=None):
self.values = values
if package is None:
self._load_from_path()
else:
self._load_yaml(package.manifest)
def _load_yaml(self, yamlstr):
try:
self.update(yaml.load(yamlstr))
except yaml.YAMLError, exc:
print "Error in configuration file:"
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
print "Error position: (%s:%s)" % (mark.line + 1, mark.column + 1)
raise exc
def _load_from_path(self):
for f in MANIFEST_FILES:
if os.path.exists(f):
mfile = f
break
with open(mfile) as f:
self._load_yaml(f.read())
@property
def keywords(self):
return self.get("keywords", [])
@property
def engine(self):
return self.get("engine", "gotpl")
@property
def home(self):
return self.get("home", "")
@property
def description(self):
return self.get("description", "")
@property
def version(self):
return self.get("version", "")
@property
def maintainers(self):
return self.get("maintainers", [])
@property
def sources(self):
return self.get("sources", [])
@property
def name(self):
return self.get("name", [])
def metadata(self):
return {"maintainers": self.maintainers, "source": self.sources}
| kubespray/kpm | kpm/manifest_chart.py | Python | apache-2.0 | 1,740 |
_unsorted = None
_in_sorted_order = None
import glob
import datetime
import os
files = glob.glob('*.markdown')
for f in files:
print(f)
date_part = f[0:10]
name_part = f[11:]
the_date = datetime.datetime.strptime(date_part, "%Y-%m-%d").date()
dir = the_date.strftime("%Y/")
if not os.path.exists(dir):
os.makedirs(dir)
new_path = dir + name_part
with open(f) as f1:
with open(new_path, 'w') as f2:
f2.write('date: {}\n'.format(date_part))
for line in f1:
f2.write(line)
| mahnve/sinor | tools/convert.py | Python | mit | 562 |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import socket
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
from electrum_vtc.i18n import _
from electrum_vtc.network import DEFAULT_PORTS
from electrum_vtc.network import serialize_server, deserialize_server
from util import *
protocol_names = ['TCP', 'SSL']
protocol_letters = 'ts'
class NetworkDialog(QDialog):
def __init__(self, network, config):
QDialog.__init__(self)
self.setWindowTitle(_('Network'))
self.setMinimumSize(500, 20)
self.nlayout = NetworkChoiceLayout(network, config)
vbox = QVBoxLayout(self)
vbox.addLayout(self.nlayout.layout())
vbox.addLayout(Buttons(CloseButton(self)))
self.connect(self, QtCore.SIGNAL('updated'), self.on_update)
network.register_callback(self.on_network, ['updated', 'interfaces'])
def on_network(self, event, *args):
self.emit(QtCore.SIGNAL('updated'), event, *args)
def on_update(self):
self.nlayout.update()
class NodesListWidget(QTreeWidget):
def __init__(self, parent):
QTreeWidget.__init__(self)
self.parent = parent
self.setHeaderLabels([_('Node'), _('Height')])
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.create_menu)
def create_menu(self, position):
item = self.currentItem()
if not item:
return
is_server = not bool(item.data(0, Qt.UserRole).toInt()[0])
menu = QMenu()
if is_server:
server = unicode(item.data(1, Qt.UserRole).toString())
menu.addAction(_("Use as server"), lambda: self.parent.follow_server(server))
else:
index = item.data(1, Qt.UserRole).toInt()[0]
menu.addAction(_("Follow this branch"), lambda: self.parent.follow_branch(index))
menu.exec_(self.viewport().mapToGlobal(position))
def keyPressEvent(self, event):
if event.key() in [ Qt.Key_F2, Qt.Key_Return ]:
self.on_activated(self.currentItem(), self.currentColumn())
else:
QTreeWidget.keyPressEvent(self, event)
def on_activated(self, item, column):
# on 'enter' we show the menu
pt = self.visualItemRect(item).bottomLeft()
pt.setX(50)
self.emit(SIGNAL('customContextMenuRequested(const QPoint&)'), pt)
def update(self, network):
self.clear()
self.addChild = self.addTopLevelItem
chains = network.get_blockchains()
n_chains = len(chains)
for k, items in chains.items():
b = network.blockchains[k]
name = b.get_name()
if n_chains >1:
x = QTreeWidgetItem([name + '@%d'%b.get_checkpoint(), '%d'%b.height()])
x.setData(0, Qt.UserRole, 1)
x.setData(1, Qt.UserRole, b.checkpoint)
else:
x = self
for i in items:
star = ' *' if i == network.interface else ''
item = QTreeWidgetItem([i.host + star, '%d'%i.tip])
item.setData(0, Qt.UserRole, 0)
item.setData(1, Qt.UserRole, i.server)
x.addChild(item)
if n_chains>1:
self.addTopLevelItem(x)
x.setExpanded(True)
h = self.header()
h.setStretchLastSection(False)
h.setResizeMode(0, QHeaderView.Stretch)
h.setResizeMode(1, QHeaderView.ResizeToContents)
class ServerListWidget(QTreeWidget):
def __init__(self, parent):
QTreeWidget.__init__(self)
self.parent = parent
self.setHeaderLabels([_('Host'), _('Port')])
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.create_menu)
def create_menu(self, position):
item = self.currentItem()
if not item:
return
menu = QMenu()
server = unicode(item.data(1, Qt.UserRole).toString())
menu.addAction(_("Use as server"), lambda: self.set_server(server))
menu.exec_(self.viewport().mapToGlobal(position))
def set_server(self, s):
host, port, protocol = s.split(':')
self.parent.server_host.setText(host)
self.parent.server_port.setText(port)
self.parent.set_server()
def keyPressEvent(self, event):
if event.key() in [ Qt.Key_F2, Qt.Key_Return ]:
self.on_activated(self.currentItem(), self.currentColumn())
else:
QTreeWidget.keyPressEvent(self, event)
def on_activated(self, item, column):
# on 'enter' we show the menu
pt = self.visualItemRect(item).bottomLeft()
pt.setX(50)
self.emit(SIGNAL('customContextMenuRequested(const QPoint&)'), pt)
def update(self, servers, protocol, use_tor):
self.clear()
for _host, d in sorted(servers.items()):
if _host.endswith('.onion') and not use_tor:
continue
port = d.get(protocol)
if port:
x = QTreeWidgetItem([_host, port])
server = _host+':'+port+':'+protocol
x.setData(1, Qt.UserRole, server)
self.addTopLevelItem(x)
h = self.header()
h.setStretchLastSection(False)
h.setResizeMode(0, QHeaderView.Stretch)
h.setResizeMode(1, QHeaderView.ResizeToContents)
class NetworkChoiceLayout(object):
def __init__(self, network, config, wizard=False):
self.network = network
self.config = config
self.protocol = None
self.tor_proxy = None
self.tabs = tabs = QTabWidget()
server_tab = QWidget()
proxy_tab = QWidget()
blockchain_tab = QWidget()
tabs.addTab(blockchain_tab, _('Overview'))
tabs.addTab(proxy_tab, _('Proxy'))
tabs.addTab(server_tab, _('Server'))
# server tab
grid = QGridLayout(server_tab)
grid.setSpacing(8)
self.server_host = QLineEdit()
self.server_host.setFixedWidth(200)
self.server_port = QLineEdit()
self.server_port.setFixedWidth(60)
self.ssl_cb = QCheckBox(_('Use SSL'))
self.autoconnect_cb = QCheckBox(_('Select server automatically'))
self.autoconnect_cb.setEnabled(self.config.is_modifiable('auto_connect'))
self.server_host.editingFinished.connect(self.set_server)
self.server_port.editingFinished.connect(self.set_server)
self.ssl_cb.clicked.connect(self.change_protocol)
self.autoconnect_cb.clicked.connect(self.set_server)
self.autoconnect_cb.clicked.connect(self.update)
grid.addWidget(QLabel(_('Server') + ':'), 1, 0)
grid.addWidget(self.server_host, 1, 1, 1, 2)
grid.addWidget(self.server_port, 1, 3)
label = _('Server peers') if network.is_connected() else _('Default Servers')
grid.addWidget(QLabel(label), 2, 0, 1, 5)
self.servers_list = ServerListWidget(self)
grid.addWidget(self.servers_list, 3, 0, 1, 5)
# Proxy tab
grid = QGridLayout(proxy_tab)
grid.setSpacing(8)
# proxy setting
self.proxy_mode = QComboBox()
self.proxy_mode.addItems(['NONE', 'SOCKS4', 'SOCKS5', 'HTTP'])
self.proxy_host = QLineEdit()
self.proxy_host.setFixedWidth(200)
self.proxy_port = QLineEdit()
self.proxy_port.setFixedWidth(60)
self.proxy_user = QLineEdit()
self.proxy_user.setPlaceholderText(_("Proxy user"))
self.proxy_password = QLineEdit()
self.proxy_password.setPlaceholderText(_("Password"))
self.proxy_password.setEchoMode(QLineEdit.Password)
self.proxy_password.setFixedWidth(60)
self.proxy_mode.currentIndexChanged.connect(self.set_proxy)
self.proxy_host.editingFinished.connect(self.set_proxy)
self.proxy_port.editingFinished.connect(self.set_proxy)
self.proxy_user.editingFinished.connect(self.set_proxy)
self.proxy_password.editingFinished.connect(self.set_proxy)
self.check_disable_proxy()
self.proxy_mode.connect(self.proxy_mode, SIGNAL('currentIndexChanged(int)'), self.check_disable_proxy)
self.proxy_mode.connect(self.proxy_mode, SIGNAL('currentIndexChanged(int)'), self.proxy_settings_changed)
self.proxy_host.connect(self.proxy_host, SIGNAL('textEdited(QString)'), self.proxy_settings_changed)
self.proxy_port.connect(self.proxy_port, SIGNAL('textEdited(QString)'), self.proxy_settings_changed)
self.proxy_user.connect(self.proxy_user, SIGNAL('textEdited(QString)'), self.proxy_settings_changed)
self.proxy_password.connect(self.proxy_password, SIGNAL('textEdited(QString)'), self.proxy_settings_changed)
self.tor_cb = QCheckBox(_("Use Tor Proxy"))
self.tor_cb.setIcon(QIcon(":icons/tor_logo.png"))
self.tor_cb.hide()
self.tor_cb.clicked.connect(self.use_tor_proxy)
grid.addWidget(self.ssl_cb, 0, 0, 1, 3)
grid.addWidget(self.tor_cb, 1, 0, 1, 3)
grid.addWidget(self.proxy_mode, 4, 1)
grid.addWidget(self.proxy_host, 4, 2)
grid.addWidget(self.proxy_port, 4, 3)
grid.addWidget(self.proxy_user, 5, 2)
grid.addWidget(self.proxy_password, 5, 3)
grid.setRowStretch(6, 1)
# Blockchain Tab
grid = QGridLayout(blockchain_tab)
msg = ' '.join([
_("Electrum connects to several nodes in order to download block headers and find out the longest blockchain."),
_("This blockchain is used to verify the transactions sent by your transaction server.")
])
self.status_label = QLabel('')
grid.addWidget(QLabel(_('Status') + ':'), 0, 0)
grid.addWidget(self.status_label, 0, 1, 1, 3)
grid.addWidget(HelpButton(msg), 0, 4)
self.server_label = QLabel('')
msg = _("Electrum sends your wallet addresses to a single server, in order to receive your transaction history.")
grid.addWidget(QLabel(_('Server') + ':'), 1, 0)
grid.addWidget(self.server_label, 1, 1, 1, 3)
grid.addWidget(HelpButton(msg), 1, 4)
self.height_label = QLabel('')
msg = _('This is the height of your local copy of the blockchain.')
grid.addWidget(QLabel(_('Blockchain') + ':'), 2, 0)
grid.addWidget(self.height_label, 2, 1)
grid.addWidget(HelpButton(msg), 2, 4)
self.split_label = QLabel('')
grid.addWidget(self.split_label, 3, 0, 1, 3)
msg = ' '.join([
_("If auto-connect is enabled, Electrum will always use a server that is on the longest blockchain."),
_("If it is disabled, you have to choose a server you want to use. Electrum will warn you if your server is lagging.")
])
grid.addWidget(self.autoconnect_cb, 4, 0, 1, 3)
grid.addWidget(HelpButton(msg), 4, 4)
self.nodes_list_widget = NodesListWidget(self)
grid.addWidget(self.nodes_list_widget, 5, 0, 1, 5)
vbox = QVBoxLayout()
vbox.addWidget(tabs)
self.layout_ = vbox
# tor detector
self.td = td = TorDetector()
td.found_proxy.connect(self.suggest_proxy)
td.start()
self.update()
def check_disable_proxy(self, index = False):
if self.config.is_modifiable('proxy'):
for w in [self.proxy_host, self.proxy_port, self.proxy_user, self.proxy_password]:
w.setEnabled(self.proxy_mode.currentText() != 'NONE')
else:
for w in [self.proxy_host, self.proxy_port, self.proxy_mode]: w.setEnabled(False)
def enable_set_server(self):
if self.config.is_modifiable('server'):
enabled = not self.autoconnect_cb.isChecked()
self.server_host.setEnabled(enabled)
self.server_port.setEnabled(enabled)
self.servers_list.setEnabled(enabled)
self.tabs.setTabEnabled(2, enabled)
else:
for w in [self.autoconnect_cb, self.server_host, self.server_port, self.ssl_cb, self.servers_list]:
w.setEnabled(False)
def update(self):
host, port, protocol, proxy_config, auto_connect = self.network.get_parameters()
if not proxy_config:
proxy_config = { "mode":"none", "host":"localhost", "port":"9050"}
self.server_host.setText(host)
self.server_port.setText(port)
self.ssl_cb.setChecked(protocol=='s')
self.autoconnect_cb.setChecked(auto_connect)
host = self.network.interface.host if self.network.interface else _('None')
self.server_label.setText(host)
self.set_protocol(protocol)
self.servers = self.network.get_servers()
self.servers_list.update(self.servers, self.protocol, self.tor_cb.isChecked())
self.enable_set_server()
# proxy tab
self.proxy_mode.setCurrentIndex(self.proxy_mode.findText(str(proxy_config.get("mode").upper())))
self.proxy_host.setText(proxy_config.get("host"))
self.proxy_port.setText(proxy_config.get("port"))
self.proxy_user.setText(proxy_config.get("user", ""))
self.proxy_password.setText(proxy_config.get("password", ""))
height_str = "%d "%(self.network.get_local_height()) + _('blocks')
self.height_label.setText(height_str)
n = len(self.network.get_interfaces())
status = _("Connected to %d nodes.")%n if n else _("Not connected")
self.status_label.setText(status)
chains = self.network.get_blockchains()
if len(chains)>1:
chain = self.network.blockchain()
checkpoint = chain.get_checkpoint()
name = chain.get_name()
msg = _('Chain split detected at block %d')%checkpoint + '\n'
msg += (_('You are following branch') if auto_connect else _('Your server is on branch'))+ ' ' + name
msg += ' (%d %s)' % (chain.get_branch_size(), _('blocks'))
else:
msg = ''
self.split_label.setText(msg)
self.nodes_list_widget.update(self.network)
def layout(self):
return self.layout_
def set_protocol(self, protocol):
if protocol != self.protocol:
self.protocol = protocol
def change_protocol(self, use_ssl):
p = 's' if use_ssl else 't'
host = unicode(self.server_host.text())
pp = self.servers.get(host, DEFAULT_PORTS)
if p not in pp.keys():
p = pp.keys()[0]
port = pp[p]
self.server_host.setText(host)
self.server_port.setText(port)
self.set_protocol(p)
self.set_server()
def follow_branch(self, index):
self.network.follow_chain(index)
self.update()
def follow_server(self, server):
self.network.switch_to_interface(server)
host, port, protocol, proxy, auto_connect = self.network.get_parameters()
host, port, protocol = server.split(':')
self.network.set_parameters(host, port, protocol, proxy, auto_connect)
self.update()
def server_changed(self, x):
if x:
self.change_server(str(x.text(0)), self.protocol)
def change_server(self, host, protocol):
pp = self.servers.get(host, DEFAULT_PORTS)
if protocol and protocol not in protocol_letters:
protocol = None
if protocol:
port = pp.get(protocol)
if port is None:
protocol = None
if not protocol:
if 's' in pp.keys():
protocol = 's'
port = pp.get(protocol)
else:
protocol = pp.keys()[0]
port = pp.get(protocol)
self.server_host.setText(host)
self.server_port.setText(port)
self.ssl_cb.setChecked(protocol=='s')
def accept(self):
pass
def set_server(self):
host, port, protocol, proxy, auto_connect = self.network.get_parameters()
host = str(self.server_host.text())
port = str(self.server_port.text())
protocol = 's' if self.ssl_cb.isChecked() else 't'
auto_connect = self.autoconnect_cb.isChecked()
self.network.set_parameters(host, port, protocol, proxy, auto_connect)
def set_proxy(self):
host, port, protocol, proxy, auto_connect = self.network.get_parameters()
if self.proxy_mode.currentText() != 'NONE':
proxy = { 'mode':str(self.proxy_mode.currentText()).lower(),
'host':str(self.proxy_host.text()),
'port':str(self.proxy_port.text()),
'user':str(self.proxy_user.text()),
'password':str(self.proxy_password.text())}
else:
proxy = None
self.network.set_parameters(host, port, protocol, proxy, auto_connect)
def suggest_proxy(self, found_proxy):
self.tor_proxy = found_proxy
self.tor_cb.setText("Use Tor proxy at port " + str(found_proxy[1]))
if self.proxy_mode.currentIndex() == 2 \
and self.proxy_host.text() == "127.0.0.1" \
and self.proxy_port.text() == str(found_proxy[1]):
self.tor_cb.setChecked(True)
self.tor_cb.show()
def use_tor_proxy(self, use_it):
# 2 = SOCKS5
if not use_it:
self.proxy_mode.setCurrentIndex(0)
self.tor_cb.setChecked(False)
else:
self.proxy_mode.setCurrentIndex(2)
self.proxy_host.setText("127.0.0.1")
self.proxy_port.setText(str(self.tor_proxy[1]))
self.proxy_user.setText("")
self.proxy_password.setText("")
self.tor_cb.setChecked(True)
self.set_proxy()
def proxy_settings_changed(self):
self.tor_cb.setChecked(False)
class TorDetector(QThread):
found_proxy = pyqtSignal(object)
def __init__(self):
QThread.__init__(self)
def run(self):
# Probable ports for Tor to listen at
ports = [9050, 9150]
for p in ports:
if TorDetector.is_tor_port(p):
self.found_proxy.emit(("127.0.0.1", p))
return
@staticmethod
def is_tor_port(port):
try:
s = socket._socketobject(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(0.1)
s.connect(("127.0.0.1", port))
# Tor responds uniquely to HTTP-like requests
s.send("GET\n")
if "Tor is not an HTTP Proxy" in s.recv(1024):
return True
except socket.error:
pass
return False
| pknight007/electrum-vtc | gui/vtc/network_dialog.py | Python | mit | 19,825 |
from tendrl.commons.event import Event
from tendrl.commons.flows.exceptions import FlowExecutionFailedError
from tendrl.commons.message import Message
def get_node_ips(parameters):
node_ips = []
for node, config in parameters["Cluster.node_configuration"].iteritems():
node_ips.append(config["provisioning_ip"])
return node_ips
def create_gluster(parameters):
node_ips = get_node_ips(parameters)
plugin = NS.gluster_provisioner.get_plugin()
Event(
Message(
job_id=parameters['job_id'],
flow_id=parameters['flow_id'],
priority="info",
publisher=NS.publisher_id,
payload={"message": "Setting up gluster nodes %s" %
parameters['TendrlContext.integration_id']
}
)
)
ret_val = plugin.setup_gluster_node(
node_ips,
repo=NS.config.data.get('glusterfs_repo', None)
)
if ret_val is not True:
raise FlowExecutionFailedError("Error setting up gluster node")
Event(
Message(
job_id=parameters['job_id'],
flow_id=parameters['flow_id'],
priority="info",
publisher=NS.publisher_id,
payload={"message": "Creating gluster cluster %s" %
parameters['TendrlContext.integration_id']
}
)
)
ret_val = plugin.create_gluster_cluster(node_ips)
if ret_val is not True:
raise FlowExecutionFailedError("Error creating gluster cluster")
Event(
Message(
job_id=parameters['job_id'],
flow_id=parameters['flow_id'],
priority="info",
publisher=NS.publisher_id,
payload={"message": "Created Gluster Cluster %s" %
parameters['TendrlContext.integration_id']
}
)
)
| rishubhjain/commons | tendrl/commons/flows/create_cluster/gluster_help.py | Python | lgpl-2.1 | 1,926 |
from collections import OrderedDict
from allure_commons.types import AttachmentType
from allure_commons.model2 import ExecutableItem
from allure_commons.model2 import TestResult
from allure_commons.model2 import Attachment, ATTACHMENT_PATTERN
from allure_commons.utils import now
from allure_commons._core import plugin_manager
class AllureReporter(object):
def __init__(self):
self._items = OrderedDict()
self._orphan_items = []
def _update_item(self, uuid, **kwargs):
item = self._items[uuid] if uuid else self._items[next(reversed(self._items))]
for name, value in kwargs.items():
attr = getattr(item, name)
if isinstance(attr, list):
attr.append(value)
else:
setattr(item, name, value)
def _last_executable(self):
for _uuid in reversed(self._items):
if isinstance(self._items[_uuid], ExecutableItem):
return _uuid
def get_item(self, uuid):
return self._items.get(uuid)
def get_last_item(self, item_type=None):
for _uuid in reversed(self._items):
if item_type is None:
return self._items.get(_uuid)
if type(self._items[_uuid]) == item_type:
return self._items.get(_uuid)
def start_group(self, uuid, group):
self._items[uuid] = group
def stop_group(self, uuid, **kwargs):
self._update_item(uuid, **kwargs)
group = self._items.pop(uuid)
plugin_manager.hook.report_container(container=group)
def update_group(self, uuid, **kwargs):
self._update_item(uuid, **kwargs)
def start_before_fixture(self, parent_uuid, uuid, fixture):
self._items.get(parent_uuid).befores.append(fixture)
self._items[uuid] = fixture
def stop_before_fixture(self, uuid, **kwargs):
self._update_item(uuid, **kwargs)
self._items.pop(uuid)
def start_after_fixture(self, parent_uuid, uuid, fixture):
self._items.get(parent_uuid).afters.append(fixture)
self._items[uuid] = fixture
def stop_after_fixture(self, uuid, **kwargs):
self._update_item(uuid, **kwargs)
fixture = self._items.pop(uuid)
fixture.stop = now()
def schedule_test(self, uuid, test_case):
self._items[uuid] = test_case
def get_test(self, uuid):
return self.get_item(uuid) if uuid else self.get_last_item(TestResult)
def close_test(self, uuid):
test_case = self._items.pop(uuid)
plugin_manager.hook.report_result(result=test_case)
def drop_test(self, uuid):
self._items.pop(uuid)
def start_step(self, parent_uuid, uuid, step):
parent_uuid = parent_uuid if parent_uuid else self._last_executable()
if parent_uuid is None:
self._orphan_items.append(uuid)
else:
self._items[parent_uuid].steps.append(step)
self._items[uuid] = step
def stop_step(self, uuid, **kwargs):
if uuid in self._orphan_items:
self._orphan_items.remove(uuid)
else:
self._update_item(uuid, **kwargs)
self._items.pop(uuid)
def _attach(self, uuid, name=None, attachment_type=None, extension=None, parent_uuid=None):
mime_type = attachment_type
extension = extension if extension else 'attach'
if type(attachment_type) is AttachmentType:
extension = attachment_type.extension
mime_type = attachment_type.mime_type
file_name = ATTACHMENT_PATTERN.format(prefix=uuid, ext=extension)
attachment = Attachment(source=file_name, name=name, type=mime_type)
last_uuid = parent_uuid if parent_uuid else self._last_executable()
self._items[last_uuid].attachments.append(attachment)
return file_name
def attach_file(self, uuid, source, name=None, attachment_type=None, extension=None, parent_uuid=None):
file_name = self._attach(uuid, name=name, attachment_type=attachment_type,
extension=extension, parent_uuid=parent_uuid)
plugin_manager.hook.report_attached_file(source=source, file_name=file_name)
def attach_data(self, uuid, body, name=None, attachment_type=None, extension=None, parent_uuid=None):
file_name = self._attach(uuid, name=name, attachment_type=attachment_type,
extension=extension, parent_uuid=parent_uuid)
plugin_manager.hook.report_attached_data(body=body, file_name=file_name)
| allure-framework/allure-python | allure-python-commons/src/reporter.py | Python | apache-2.0 | 4,553 |
#!/usr/bin/env python
#
# Copyright (C) 2017 ShadowMan
#
import base64
import random
def to_string(object, encoding = 'utf-8'):
if isinstance(object, str):
return object
if isinstance(object, bytes):
return object.decode(encoding)
if hasattr(object, '__str__'):
return object.__str__()
raise TypeError('object to string error occurs, object invalid')
def to_bytes(object, encoding = 'utf-8'):
if isinstance(object, bytes):
return object
if isinstance(object, str):
return object.encode(encoding)
if hasattr(object, '__str__'):
return object.__str__().encode(encoding)
raise TypeError('object to bytes error occurs, object invalid')
def to_integer(object, *, encoding = 'utf-8'):
if isinstance(object, int):
return object
if isinstance(object, str):
object = object.encode(encoding)
if isinstance(object, bytes):
return int(''.join([ hex(ch)[2:] for ch in object ]), 16)
def to_int8(object):
return to_integer(object) & 0xff
def to_int16(object):
return to_integer(object) & 0xffff
def to_int32(object):
return to_integer(object) & 0xffffffff
# a nonce consisting of a randomly selected 16-byte value
# 1-byte(0 - 127), not 0 - 255
def random_bytes_string(length, start = 0, stop = 0x7f, encoding = 'utf-8'):
rst_string = \
''.join([ chr(random.randint(start, stop)) for _ in range(length) ])
return to_bytes(rst_string, encoding)
# check base64.b64decode string length
def base64_decode_length(key):
return len(base64.b64decode(to_bytes(key)))
# [ 1, 2, (3, 4), [ 5, (6, 7), 8 ], 9 ] =>
# [ 1, 2, 3, 4, 5, 6, 7, 8, 9 ]
def flatten_list(array):
def _flatten_generator(array):
for item in array:
if isinstance(item, (tuple, list)):
yield from _flatten_generator(item)
else:
yield item
return list(_flatten_generator(array))
| shadow-share/websocket | websocket/utils/generic.py | Python | mit | 1,961 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2007-2014 Jeff Wang(<http://[email protected]>).
| vileopratama/vitech | src/addons/l10n_cn_standard/__init__.py | Python | mit | 162 |
"""Added last_updated columns strategically
Revision ID: 4e4814bebf31
Revises: 32aede9e7e69
Create Date: 2017-09-29 14:10:22.478961
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4e4814bebf31'
down_revision = '32aede9e7e69'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('fort_sightings', sa.Column('last_updated', sa.TIMESTAMP(), nullable=True))
op.add_column('pokestops', sa.Column('last_updated', sa.TIMESTAMP(), nullable=True))
op.add_column('sightings', sa.Column('last_updated', sa.TIMESTAMP(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('sightings', 'last_updated')
op.drop_column('pokestops', 'last_updated')
op.drop_column('fort_sightings', 'last_updated')
# ### end Alembic commands ###
| DavisPoGo/Monocle | migrations/versions/4e4814bebf31_added_last_updated_columns_strategically.py | Python | mit | 983 |
# PYTHON 3
#
# Author: Kate Willett
# Created: 4 March 2019
# Last update: 15 April 2019
# Location: /data/local/hadkw/HADCRUH2/UPDATE2014/PROGS/PYTHON/
# GitHub: https://github.com/Kate-Willett/PYTHON
# -----------------------
# CODE PURPOSE AND OUTPUT
# -----------------------
# This code reads in monthly mean gridded (5by5) netCDF files and produces area average time series
# in netCDF and ASCII
#
# Note that the mdi (-1e30) is different between IDL (float?) and Python (double?) and at the moment
# I have netCDF files created in both IDL and Python. So - first thing is to reset all missing values to
# the Python mdi used here.
# Actually now I make netCDF files with -999. as the missing data!
#
# This code was originally IDL written by Kate Willett make_area_avg_ts.pro and used
# globalmean.pro to do the area averaging which was written in IDL by Tim Osborn
#
# <references to related published material, e.g. that describes data set>
#
# -----------------------
# LIST OF MODULES
# -----------------------
## Modules
#from datetime import datetime
#import numpy as np
#from matplotlib.dates import date2num,num2date
#import sys, os
#from scipy.optimize import curve_fit,fsolve,leastsq
#from scipy import pi,sqrt,exp
#from scipy.special import erf
#import scipy.stats
#from math import sqrt,pi,radians,sin,cos,acos
#import struct
#from netCDF4 import Dataset
#from netCDF4 import stringtoarr # for putting strings in as netCDF variables
#import pdb
#
## Kates:
#import TestLeap
#from ReadNetCDF import GetGrid4
#from ReadNetCDF import GetGrid4Slice
#from GetNiceTimes import MakeDaysSince
#
# -----------------------
# DATA
# -----------------------
# HadISDH-land:
# /data/local/hadkw/HADCRUH2/UPDATE2016/STATISTICS/GRIDS/
# HadISDH.landq.3.0.0.2016p_FLATgridIDPHA5by5_anoms7605_JAN2017_cf.nc
# HadISDH-marine
# /data/local/hadkw/HADCRUH2/UPDATE2016/STATISTICS/GRIDS/
# HadISDH.marineq.1.0.0.2016p_OBSclim2BClocal_anoms8110_JAN2017_cf.nc
# HadISDH.marineq.1.0.0.2016p_OBSclim2BClocalship_anoms8110_JAN2017_cf.nc
# HadISDH-blend:
# /data/local/hadkw/HADCRUH2/UPDATE2016/STATISTICS/GRIDS/
# HadISDH.blendq.1.0.0.2016p_FULL_anoms8110_JAN2017_cf.nc
# HadISDH.blendq.1.0.0.2016p_FULLship_anoms8110_JAN2017_cf.nc
# Other:
#
# -----------------------
# HOW TO RUN THE CODE
# -----------------------
# Make sure all of the EDITABLES are correct
# module load scitools/default-current
# python MakeAreaAvgTS.py
#
# NOT ANYMORE: if you want era5 data masked to HadISDH then set MaskIt = True internally
# if you want different years or regions then reset internally
#> module load scitools/default-current
#> python MakeGridTrends --var <var> --typee <type> --year1 <yyyy> --year2 <yyyy>
#
## Which variable?
# var = 'dpd' #'dpd','td','t','tw','e','q','rh'
#
## Which homog type?
# typee = 'LAND', 'RAW','OTHER', 'BLEND', 'BLENDSHIP', 'MARINE','MARINESHIP', 'ERA5','EAR5MASK','ERA5LAND','ERA5MARINE','ERA5LANDMASK','ERA5MARINEMASK'
#
# year1 and year2 are start and end year of trends
#
# -----------------------
# OUTPUT
# -----------------------
# /data/local/hadkw/HADCRUH2/UPDATE2016/STATISTICS/TIMESERIES/
# HadISDH.landq.3.0.0.2016p_FLATgridIDPHA5by5_anoms7605_JAN2017_areaTS_19732016.nc
# HadISDH.blendq.1.0.0.2016p_FULL_anoms8110_JAN2017_areaTS_19732016.nc
# HadISDH.blendq.1.0.0.2016p_FULLship_anoms8110_JAN2017_areaTS_19732016.nc
# HadISDH.marineq.1.0.0.2016p_OBSclim2BClocal_anoms8110_JAN2017_areaTS_19732016.nc
# HadISDH.marineq.1.0.0.2016p_OBSclim2BClocalship_anoms8110_JAN2017_areaTS_19732016.nc
#
# -----------------------
# VERSION/RELEASE NOTES
# -----------------------
#
# Version 2 (5 November 2020)
# ---------
#
# Enhancements
# Now runs from command line
# Now works with ERA5 anoms and masks if desired.
#
# Changes
#
# Bug fixes
#
#
# Version 1 (15 April 2019)
# ---------
#
# Enhancements
#
# Changes
#
# Bug fixes
#
# -----------------------
# OTHER INFORMATION
# -----------------------
# Based on original IDL code by Kate Willett make_area_avg_ts.pro
############################################################################
# Modules
from datetime import datetime
import numpy as np
from matplotlib.dates import date2num,num2date
import sys, os, getopt
from scipy.optimize import curve_fit,fsolve,leastsq
from scipy import pi,sqrt,exp
from scipy.special import erf
import scipy.stats
from math import sqrt,pi,radians,sin,cos,acos
import struct
from netCDF4 import Dataset
from netCDF4 import stringtoarr # for putting strings in as netCDF variables
import pdb
# Kates:
import TestLeap
from ReadNetCDF import GetGrid4
from ReadNetCDF import GetGrid4Slice
from GetNiceTimes import MakeDaysSince
# Start and end years of complete dataset if HardWire = 1
startyr = 1973 # Candidate DAtaset start year - reset later if ERA or by HardWire=0
styrh = 1973 # HadISDH start year (for masking) reset by HardWire=0
edyr = 2020 # reset by HardWire=0
# Which climatology period to work with?
climST = str(1981) #1976 or 1981
climED = str(2010) #2005 or 2010
climBIT = 'anoms'+climST[2:4]+climED[2:4]
# Dataset version if HardWire = 1
lversion = '4.3.0.2020f'
mversion = '1.1.0.2020f'
bversion = '1.1.0.2020f'
# HARDWIRED SET UP!!!
# If HardWire = 1 then program reads from the above run choices
# If HardWire = 0 then program reads in from F1_HadISDHBuildConfig.txt
HardWire = 0
if (HardWire == 0):
#' Read in the config file to get all of the info
with open('F1_HadISDHBuildConfig.txt') as f:
ConfigDict = dict(x.rstrip().split('=', 1) for x in f)
lversion = ConfigDict['VersionDots']
mversion = ConfigDict['MVersionDots']
bversion = ConfigDict['BVersionDots']
startyr = int(ConfigDict['StartYear'])
edyr = int(ConfigDict['EndYear'])
# Note that ConfigDict is still held in memory and contains all the Global Attribute Elements for the output NetCDF File
mdi = -1e30 # missing data indicator
#
############################################################################
# SUBROUTINES #
############################################################################
# AreaMean
def AreaMean(DataField,TheLats,MaskField=None,Cover=None):
'''
This function computes the spatial area average using cosine weighting of the latitudes
Its based on original IDL code by Tim Osborn globalmean.pro
Computes a global mean from a field (or set of fields if fd is 3D),
accounting for missing data.
A separate mask (of same dimension) can (has to be at the moment) be supplied if required.
Single hemisphere means can also be returned (not at the moment)
The number of boxes with data is returned in cover (not at the moment)
mdi is passed from above
DOES NOT ASSUME MASK IS IDENTICAL FOR EACH TIME STEP!!!
INPUTS:
DataField[:,:,:] - time, lat, lon np.array of data - can cope with missing data (mdi)
TheLats[:] - np.array of latitudes
Optional:
MaskField[:,:,:] - if not supplied then average computed over entire DataField
Cover[:] - if supplied then the number of boxes non-missing is returned per time step
OUTPUTS:
DataTS[:] - np.array time series of area averages
'''
# Find dimensions
fullsize = np.shape(DataField)
if (len(fullsize) < 2) | (len(fullsize) > 3):
print('DataField must be 2D or 3D')
pdb.set_trace()
# Set up dimensions depending on whether its 2D or 3D
if (len(fullsize) == 3):
Ntims = fullsize[0]
Nlons = fullsize[2] #nx = fullsize(1)
Nlats = fullsize[1] #ny = fullsize(2)
else:
Ntims = 1
Nlons = fullsize[1] #nx = fullsize(1)
Nlats = fullsize[0] #ny = fullsize(2)
# if a mask is supplied then use to remove points from fd
masksize = np.shape(MaskField)
if (len(masksize) > 0):
if (len(masksize) != len(fullsize)) & (len(masksize) != 2):
print('Mask is wrong size')
pdb.set_trace()
# Set up dimensions depending on whether its 2D or 3D
if (len(masksize) == 3):
if (masksize[0] != Ntims):
print('Mask is wrong size')
pdb.set_trace()
if (masksize[2] != Nlons) | (masksize[1] != Nlats):
print('Mask is wrong size')
pdb.set_trace()
Ntimsmask = masksize[0]
else:
if (masksize[1] != Nlons) | (masksize[0] != Nlats):
print('Mask is wrong size')
pdb.set_trace()
Ntimsmask = 1
# In the case of no mask then compute over all boxes
else:
Ntimsmask = 1
MaskField = np.empty((Nlats,Nlons),dtype = float) # IDL was lons,lats
# Now make arrays
# IDL code below but it seems redundant to me because data was already lon, lat, time in IDL
# In python its time, lat, lon!!!
#fd = reform(fd,nx,ny,nz)
#mask=reform(mask,nx,ny,nzmask)
sumval = np.zeros(Ntims,dtype = float)
sumarea = np.zeros(Ntims,dtype = float)
# # For southern hemisphere component
# sval = np.zeros(Ntims,dtype = float)
# sarea = np.zeros(Ntims,dtype = float)
# # For northern hemisphere component
# nval = np.zeros(Ntims,dtype = float)
# narea = np.zeros(Ntims,dtype = float)
# If Cover exists then set up for filling it
CoverTest = np.shape(Cover)
if (len(CoverTest) > 0):
# For number of non-mdi boxes contributing
Cover = np.zeros(Ntims,dtype = float)
# print('Test AreaMean set up so far')
# pdb.set_trace()
# If the MaskField has been supplied then it should have the same dimensions as DataField
# DOes not assume that mask is identical for each time step
for ln in range(Nlons): #i
for lt in range(Nlats): #j
# print(ln,lt)
# Is this lat/lon a 1 or an mdi in the mask - 1 = compute!
temp_data = np.copy(DataField[:,lt,ln])
CarryOn = 0
if (Ntims == Ntimsmask):
temp_mask = np.copy(MaskField[:,lt,ln])
mask_cover = np.where(temp_mask == mdi)
if (len(mask_cover[0]) > 0):
temp_data[mask_cover] = mdi #
kl = np.where(temp_data != mdi)
CarryOn = 1
else:
if (MaskField[lt,ln] != mdi):
kl = np.where(temp_data != mdi)
CarryOn = 1
if (CarryOn == 1) & (len(kl) > 0):
# print('Test kl values and how this bit works')
# pdb.set_trace()
sumval[kl] = sumval[kl] + temp_data[kl]*cos(radians(TheLats[lt]))
sumarea[kl] = sumarea[kl] + cos(radians(TheLats[lt]))
if (len(CoverTest) > 0):
Cover[kl] = Cover[kl] + 1.
# if (TheLats[lt] < 0.):
# sval[kl] = sval[kl] + DataField[kl,lt,ln]*cos(radians(TheLats[lt]))
# sarea[kl] = sarea[kl] + cos(radians(TheLats[lt]))
# else:
# nval[kl] = nval[kl] + DataField[kl,lt,ln]*cos(radians(TheLats[lt]))
# narea[kl] = narea[kl] + cos(radians(TheLats[lt]))
gots = np.where(sumarea > 0)
if (len(gots[0]) > 0):
sumval[gots] = sumval[gots] / sumarea[gots]
misses = np.where(sumarea == 0)
if (len(misses[0]) > 0):
sumval[misses] = mdi
if (Ntims == 1): # convert to scalars
sumval = sumval[0]
if (len(CoverTest) > 0):
return sumval, Cover
else:
return sumval
############################################################################
# WriteNetCDF
def WriteNetCDF(Filename,TheGArray,TheNHArray,TheTArray,TheSHArray,TheTimes,TheStYr, TheEdYr, TheClimStart, TheClimEnd, TheName, TheStandardName, TheLongName, TheUnit, TheRegions):
'''
This function writes out a NetCDF 4 file
INPUTS:
Filename - string file name
TheGArray[:] - time array of global average values
TheNHArray[:] - time array of nhem average values
TheTArray[:] - time array of tropical average values
TheSHArray[:] - time array of shem average values
TheTimes[:] - times in days since TheStYr, Jan 1st
TheStYr - integer start year assumes Jan start
TheEdYr - integer end year assumes Dec start
TheClimStart - integer start of clim Jan start
TheClimEnd - integer end of clim Dec start
TheName - string short name of var q2m
TheStandardName - string standard name of variable
TheUnit - string unit of variable
TheRegions - dictionary with G, NH, T and SH [lower lat, upper lat] boundaries
OUTPUTS:
None
'''
# No need to convert float data using given scale_factor and add_offset to integers - done within writing program (packV = (V-offset)/scale
# Not sure what this does to float precision though...
# Create a new netCDF file - have tried zlib=True,least_significant_digit=3 (and 1) - no difference
ncfw = Dataset(Filename+'.nc','w',format='NETCDF4_CLASSIC') # need to try NETCDF4 and also play with compression but test this first
# Set up the dimension names and quantities
ncfw.createDimension('time',len(TheTimes))
# Go through each dimension and set up the variable and attributes for that dimension if needed
MyVarT = ncfw.createVariable('time','f4',('time',))
MyVarT.standard_name = 'time'
MyVarT.long_name = 'time'
MyVarT.units = 'days since '+str(TheStYr)+'-1-1 00:00:00'
MyVarT.start_year = str(TheStYr)
MyVarT.end_year = str(TheEdYr)
MyVarT[:] = TheTimes
# Go through each variable and set up the variable attributes
# I've added zlib=True so that the file is in compressed form
# I've added least_significant_digit=4 because we do not need to store information beyone 4 significant figures.
MyVarG = ncfw.createVariable('glob_'+TheName+'_anoms','f4',('time',),fill_value = mdi,zlib=True,least_significant_digit=4)
#MyVarG.standard_name = TheStandardName
MyVarG.long_name = TheLongName+' global average anomalies '+'%5.1f' % (TheRegions['G'][0])+' to '+'%5.1f' % (TheRegions['G'][1])
MyVarG.units = TheUnit
# MyVarG.valid_min = np.min(TheGArray)
# MyVarG.valid_max = np.max(TheGArray)
# MyVarG.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarG[:] = TheGArray[:]
MyVarN = ncfw.createVariable('nhem_'+TheName+'_anoms','f4',('time',),fill_value = mdi,zlib=True,least_significant_digit=4)
#MyVarN.standard_name = TheStandardName
MyVarN.long_name = TheLongName+' northern hemisphere average anomalies '+'%5.1f' % (TheRegions['NH'][0])+' to '+'%5.1f' % (TheRegions['NH'][1])
MyVarN.units = TheUnit
# MyVarN.valid_min = np.min(TheNHArray)
# MyVarN.valid_max = np.max(TheNHArray)
# MyVarN.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarN[:] = TheNHArray[:]
MyVarT = ncfw.createVariable('trop_'+TheName+'_anoms','f4',('time',),fill_value = mdi,zlib=True,least_significant_digit=4)
#MyVarT.standard_name = TheStandardName
MyVarT.long_name = TheLongName+' tropical average anomalies '+'%5.1f' % (TheRegions['T'][0])+' to '+'%5.1f' % (TheRegions['T'][1])
MyVarT.units = TheUnit
# MyVarT.valid_min = np.min(TheTArray)
# MyVarT.valid_max = np.max(TheTArray)
# MyVarT.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarT[:] = TheTArray[:]
MyVarS = ncfw.createVariable('shem_'+TheName+'_anoms','f4',('time',),fill_value = mdi,zlib=True,least_significant_digit=4)
#MyVarS.standard_name = TheStandardName
MyVarS.long_name = TheLongName+' southern hemisphere average anomalies '+'%5.1f' % (TheRegions['SH'][0])+' to '+'%5.1f' % (TheRegions['SH'][1])
MyVarS.units = TheUnit
# MyVarS.valid_min = np.min(TheSHArray)
# MyVarS.valid_max = np.max(TheSHArray)
# MyVarS.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarS[:] = TheSHArray[:]
ncfw.close()
return
############################################################################
# WriteText
def WriteText(Filename,TheGArray,TheNHArray,TheTArray,TheSHArray,TheTimes,TheStYr,TheEdYr):
'''
This function writes out two files with year or yearmonth and then Global, N Hemi, Tropics and S Hemi time series
There has to be at least 11 months of the year present to calculate an annual value
'''
# Minimum missing data threshold
MinThresh = 11
# Check for mdi (which is currently -1e30 and change to -999.99
Amdi = -99.99
TheGArray[np.where(TheGArray == mdi)] = Amdi # tp catch floating point oddity (-1e30 + 1 is still a ridiculously large -ve number)
TheNHArray[np.where(TheNHArray == mdi)] = Amdi # tp catch floating point oddity (-1e30 + 1 is still a ridiculously large -ve number)
TheTArray[np.where(TheTArray == mdi)] = Amdi # tp catch floating point oddity (-1e30 + 1 is still a ridiculously large -ve number)
TheSHArray[np.where(TheSHArray == mdi)] = Amdi # tp catch floating point oddity (-1e30 + 1 is still a ridiculously large -ve number)
# Open the file for annual and monthly
ann = open(Filename+'_annual.dat', "a")
mon = open(Filename+'_monthly.dat', "a")
# Write the file header
ann.write("DATE GLOBAL N_HEMI TROPICS S_HEMI\n")
mon.write(" DATE GLOBAL N_HEMI TROPICS S_HEMI\n")
# Loop through each year and month and write out
yy = 0
mm = 0
for tt in range(len(TheTimes)):
# Write monthlies to file
m = '%02i' % (mm+1)
# pdb.set_trace()
mon.write('{:4d}{:2s} {:6.2f} {:6.2f} {:6.2f} {:6.2f}\n'.format(yy+TheStYr,m,TheGArray[tt],TheNHArray[tt],TheTArray[tt],TheSHArray[tt]))
mm = mm+1
if (mm == 12):
# Get annual mean value and write to file
TmpArr = TheGArray[tt-11:tt+1]
gots = np.where(TmpArr > Amdi)
if (len(gots[0]) >= MinThresh):
TheGVal = np.mean(TmpArr[gots])
else:
TheGVal = Amdi
TmpArr = TheNHArray[tt-11:tt+1]
gots = np.where(TmpArr > Amdi)
if (len(gots[0]) >= MinThresh):
TheNHVal = np.mean(TmpArr[gots])
else:
TheNHVal = Amdi
TmpArr = TheTArray[tt-11:tt+1]
gots = np.where(TmpArr > Amdi)
if (len(gots[0]) >= MinThresh):
TheTVal = np.mean(TmpArr[gots])
else:
TheTVal = Amdi
TmpArr = TheSHArray[tt-11:tt+1]
gots = np.where(TmpArr > Amdi)
if (len(gots[0]) >= MinThresh):
TheSHVal = np.mean(TmpArr[gots])
else:
TheSHVal = Amdi
ann.write('{:4d} {:6.2f} {:6.2f} {:6.2f} {:6.2f}\n'.format(yy+TheStYr,TheGVal, TheNHVal, TheTVal, TheSHVal))
yy = yy+1
mm = 0
# CLose the files
ann.close()
mon.close()
return
############################################################################
# MAIN #
############################################################################
def main(argv):
# INPUT PARAMETERS AS STRINGS!!!!
var = 'q' # 'q','rh','e','td','tw','t','dpd'
typee = 'LAND' # 'LAND','RAW','OTHER', 'BLEND', 'BLENDSHIP', 'MARINE', 'MARINESHIP' # domain does not need to be set correctly!!!
# can also be 'ERA5' 'ERA5LAND','ERA5MARINE' 'ERA5MARINEMASK' ERA5LANDMASK'
year1 = '1973' # Start year of trend
year2 = '2018' # End year of trend
try:
opts, args = getopt.getopt(argv, "hi:",
["var=","typee=","year1=","year2="])
except getopt.GetoptError:
print('Usage (as strings) MakeGridTrends.py --var <q> --typee <IDPHA> --year1 <1973> --year2 <2018>')
sys.exit(2)
for opt, arg in opts:
if opt == "--var":
try:
var = arg
except:
sys.exit("Failed: var not a string")
elif opt == "--typee":
try:
typee = arg
except:
sys.exit("Failed: typee not a string")
elif opt == "--year1":
try:
year1 = arg
except:
sys.exit("Failed: year1 not an integer")
elif opt == "--year2":
try:
year2 = arg
except:
sys.exit("Failed: year2 not an integer")
assert year1 != -999 and year2 != -999, "Year not specified."
print(var,typee,year1, year2)
# *** CHOOSE WHETHER TO WORK WITH ANOMALIES OR ACTUALS - COULD ADD RENORMALISATION IF DESIRED ***
isanom = True # 'false' for actual values, 'true' for anomalies
# What domain?
if (typee == 'MARINE') | (typee == 'MARINESHIP') | (typee == 'ERA5MARINE') | (typee == 'ERA5MARINEMASK'):
domain = 'marine'
version = mversion
elif (typee == 'BLEND') | (typee == 'BLENDSHIP') | (typee == 'ERA5') | (typee == 'ERA5MASK'):
domain = 'blend'
version = bversion
else:
domain = 'land'
version = lversion
# Set up the trend years
sttrd = int(year1)
edtrd = int(year2)
# Latitude and Longitude gridbox width and variable names
latlg = 5.
lonlg = 5.
#latlg = 1.
#lonlg = 1.
LatInfo = ['latitude']
LonInfo = ['longitude']
# SEt up area average masks
MaskDict = dict([('G',[-70.,70.]),
('NH',[20.,70.]),
('T',[-20.,20.]),
('SH',[-70.,-20.])])
# Time and dimension variables
# nyrs = (edyr+1)-styr
# nmons = nyrs*12
nyrs = (edtrd+1)-sttrd
nmons = nyrs*12
stlt = -90+(latlg/2.)
stln = -180+(lonlg/2.)
nlats = int(180/latlg)
nlons = int(360/lonlg)
lats = (np.arange(nlats)*latlg) + stlt
lons = (np.arange(nlons)*lonlg) + stln
WORKINGDIR = '/scratch/hadkw/UPDATE20'+str(edyr)[2:4]
# WORKINGDIR = '/data/users/hadkw/WORKING_HADISDH/UPDATE20'+str(edyr)[2:4]
indir = WORKINGDIR+'/STATISTICS/GRIDS/'
outdir = WORKINGDIR+'/STATISTICS/TIMESERIES/'
# If we're working with ERA5 then set INDIR to OTHERDATA
if (typee.find('ERA5') >= 0):
indir = WORKINGDIR+'/OTHERDATA/'
indirH = WORKINGDIR+'/STATISTICS/GRIDS/'
# END OF EDITABLES**********************************************************
# Dictionaries for filename and other things
ParamDict = dict([('q',['q','q2m','g/kg']),
('rh',['RH','rh2m','%rh']),
('t',['T','t2m','deg C']),
('td',['Td','td2m','deg C']),
('tw',['Tw','tw2m','deg C']),
('e',['e','e2m','hPa']),
('dpd',['DPD','dpd2m','deg C']),
('evap',['q','evap','cm w.e.'])])
# Dictionary for looking up variable standard (not actually always standard!!!) names for netCDF output of variables
NameDict = dict([('q',['specific_humidity',' decadal trend in specific humidity anomaly ('+climST+' to '+climED+' base period)']),
('rh',['relative_humidity',' decadal trend in relative humidity anomaly ('+climST+' to '+climED+' base period)']),
('e',['vapour_pressure',' decadal trend in vapour pressure anomaly ('+climST+' to '+climED+' base period)']),
('tw',['wetbulb_temperature',' decadal trend in wetbulb temperature anomaly ('+climST+' to '+climED+' base period)']),
('t',['drybulb_temperature',' decadal trend in dry bulb temperature anomaly ('+climST+' to '+climED+' base period)']),
('td',['dewpoint_temperature',' decadal trend in dew point temperature anomaly ('+climST+' to '+climED+' base period)']),
('dpd',['dewpoint depression',' decadal trend in dew point depression anomaly ('+climST+' to '+climED+' base period)']),
('evap',['evaporation',' decadal trend in evaporation anomaly ('+climST+' to '+climED+' base period)'])])
# unitees = ParamDict[param][2]
# varname = param
unitees = ParamDict[var][2]
varname = var
if domain == 'land':
DatTyp = 'IDPHA'
if (var == 'dpd'):
DatTyp = 'PHA'
if (var == 'td'):
DatTyp = 'PHADPD'
fileblurb = 'FLATgridHOM5by5'
# fileblurb = 'FLATgrid'+DatTyp+'5by5'
elif domain == 'marine':
if (typee == 'MARINE'):
fileblurb = 'BClocal5by5both'
elif (typee == 'MARINESHIP') | (typee == 'ERA5MARINEMASK') | (typee == 'ERA5MARINE'):
fileblurb = 'BClocalSHIP5by5both'
elif domain == 'blend':
DatTyp = 'IDPHA'
if (var == 'dpd'):
DatTyp = 'PHA'
if (var == 'td'):
DatTyp = 'PHADPD'
if (typee == 'BLEND'):
fileblurb = 'FLATgridHOMBClocalboth5by5'
# fileblurb = 'FLATgrid'+DatTyp+'BClocalboth5by5'
elif (typee == 'BLENDSHIP') | (typee == 'ERA5MASK') | (typee == 'ERA5'):
fileblurb = 'FLATgridHOMBClocalSHIPboth5by5'
# fileblurb = 'FLATgrid'+DatTyp+'BClocalSHIPboth5by5'
inlandcover = WORKINGDIR+'/OTHERDATA/HadCRUT.4.3.0.0.land_fraction.nc'
infile = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT
# infile = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_'+thenmon+thenyear+'_cf'
outfile = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_areaTS_'+str(sttrd)+str(edtrd) #70S-70N
if (typee.find('ERA5') >= 0):
infile = var+'2m_monthly_5by5_ERA5_1979'+str(edyr)
outfile = var+'2m_monthly_5by5_ERA5_'+climBIT+'_areaTS_'+str(sttrd)+str(edtrd) #70S-70N
infileH = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT
# infileH = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_'+thenmon+thenyear+'_cf'
outfileH = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_areaTS_'+str(sttrd)+str(edtrd) #70S-70N
# Removed the nowmonnowyear thenmonthenyear bits
# infile = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_'+thenmon+thenyear+'_cf'
# outfile = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_'+nowmon+nowyear+'_areaTS_'+str(sttrd)+str(edtrd) #70S-70N
if (isanom == False):
outfile = outfile+'_ABS'
# Get Data
styr = startyr
if (typee.find('ERA') >= 0):
styr = 1979
if (isanom == True):
if (domain == 'land'):
ReadInfo = [var+'2m_anoms_land','time']
outfile = outfile+'_land'
if (domain == 'marine'):
ReadInfo = [var+'2m_anoms_ocean','time']
outfile = outfile+'_marine'
if (domain == 'blend'):
ReadInfo = [var+'2m_anoms','time']
ReadInfoH = [var+'_anoms','time']
else:
ReadInfo = [var+'2m','time']
ReadInfoH = [var+'_abs','time']
else:
if (isanom == True):
ReadInfo = [var+'_anoms','time']
else:
ReadInfo = [var+'_abs','time']
## read in files
#LatInfo = ['latitude']
#LonInfo = ['longitude']
#
#if (isanom == True):
# if (homogtype == 'ERA-Interim') | (homogtype == 'ERA5'):
# if (domain == 'land'):
# ReadInfo = [varname+'_anoms_land','time']
# outfile = outfile+'_land'
# if (domain == 'marine'):
# ReadInfo = [varname+'_anoms_ocean','time']
# outfile = outfile+'_marine'
# else:
# ReadInfo = [varname+'_anoms','time']
#else:
# if (homogtype == 'ERA-Interim') | (homogtype == 'ERA5'):
# if (domain == 'land'):
# ReadInfo = [varname+'_land','time']
# outfile = outfile+'_land'
# if (domain == 'marine'):
# ReadInfo = [varname+'_ocean','time']
# outfile = outfile+'_land'
# else:
# ReadInfo = [varname+'_abs','time']#
#
print('Reading in the data for :',typee)
#print('Reading in the data for :',homogtype)
# TmpVals,Latitudes,Longitudes = GetGrid4(infile,ReadInfo,LatInfo,LonInfo)
TmpVals,Latitudes,Longitudes = GetGrid4(indir+infile+'.nc',ReadInfo,LatInfo,LonInfo)
# Seperate out data and times
TheData = TmpVals[0]
Times = TmpVals[1]
TmpVals = []
# Check the mdis = IDL output netCDF differs from Python output
bads = np.where(TheData < -10000)
if (len(bads[0]) > 0):
TheData[bads] = mdi
# If we're masking ERA then read in HadISDH
if (typee.find('MASK') >= 0):
print('Masking ERA5')
outfile = outfile+'_mask'
TmpValsH,LatitudesH,LongitudesH = GetGrid4(indirH+infileH+'.nc',ReadInfoH,LatInfo,LonInfo)
# Seperate out data and times
TheDataH = TmpValsH[0]
TimesH = TmpValsH[1]
TmpValsH = []
# Check the mdis = IDL output netCDF differs from Python output
bads = np.where(TheDataH < -10000)
if (len(bads[0]) > 0):
TheDataH[bads] = mdi
# Make HadISDH start in the same years
TheDataH = TheDataH[(styr-styrh)*12:((edyr-styrh) + 1)*12,:,:]
# Now mask the ERA data with HadISDH missing data
TheData[np.where(TheDataH == mdi)] = mdi
## Now if we're masking then read in the mask for the time slice of ERA-Interim
#if (mask == True):
#
# SliceInfo = dict([('TimeSlice',[mskstpt,mskedpt]),
# ('LatSlice',[0,nlats]),
# ('LonSlice',[0,nlons])])
#
# if (isanom == True):
# ReadInfo = [param+'_anoms']
# else:
# ReadInfo = [param+'_abs']#
#
# print('Reading in the mask data for :',typee)
# print('Reading in the mask data for :',homogtype)
# TmpVals,Latitudes,Longitudes = GetGrid4Slice(maskfile,ReadInfo,SliceInfo,LatInfo,LonInfo)
#
# # Seperate out data and times
# MSKTheData = TmpVals
## MSKTimes = TmpVals[1]
# TmpVals = []
#
# # Check the mdis = IDL output netCDF differs from Python output
# bads = np.where(MSKTheData < -10000)
# if (len(bads[0]) > 0):
# MSKTheData[bads] = mdi
#
# # mask out points in candidate that do not have data in the mask
# bads = np.where(MSKTheData <= mdi)
## pdb.set_trace()
# if (len(bads[0]) > 0):
# TheData[bads] = mdi
## # make anomalies from the monthlies if you want to be precise about anomalising with same coverage as HadISDH
## newq_values=make_array(nlons,nlats,nmons,/float,value=mdi)
## FOR ltt=0,nlats-1 DO BEGIN
## FOR lnn=0,nlons-1 DO BEGIN
## subarr=REFORM(q_values(lnn,ltt,*),12,nyrs)
## FOR mm=0,11 DO BEGIN
## gots=WHERE(subarr(mm,*) NE mdi,count)
## climsub=subarr(mm,mclimst-styr:mclimst-styr)
## gotsC=WHERE(climsub NE mdi,countC)
## IF (countC GE 15) THEN subarr(mm,gots)=subarr(mm,gots)-MEAN(climsub(gotsC)) ELSE subarr(mm,*)=mdi
## ENDFOR
## newq_values(lnn,ltt,*)=REFORM(subarr,nmons)
## ENDFOR
## ENDFOR
## #stop
## q_values=newq_values
# make spatial area masks - set anything greater than 70 deg lat to mdi
global_mask = np.zeros((nlats,nlons),dtype = float)
global_mask.fill(1)
nhem_mask = np.copy(global_mask)
shem_mask = np.copy(global_mask)
trop_mask = np.copy(global_mask)
for deg in range(nlats):
if (lats[deg] < MaskDict['G'][0]) | (lats[deg] > MaskDict['G'][1]):
global_mask[deg,:] = mdi
if (lats[deg] < MaskDict['NH'][0]) | (lats[deg] > MaskDict['NH'][1]):
nhem_mask[deg,:] = mdi
if (lats[deg] < MaskDict['T'][0]) | (lats[deg] > MaskDict['T'][1]):
trop_mask[deg,:] = mdi
if (lats[deg] < MaskDict['SH'][0]) | (lats[deg] > MaskDict['SH'][1]):
shem_mask[deg,:] = mdi
global_mask_3d = np.repeat(global_mask[np.newaxis,:,:],nmons, axis = 0)
nhem_mask_3d = np.repeat(nhem_mask[np.newaxis,:,:],nmons, axis = 0)
shem_mask_3d = np.repeat(shem_mask[np.newaxis,:,:],nmons, axis = 0)
trop_mask_3d = np.repeat(trop_mask[np.newaxis,:,:],nmons, axis = 0)
##CoverTS = np.empty(nmons,dtype = float)
##CoverTS.fill(mdi)
##glob_avg_ts,CoverTS = AreaMean(TheData,lats,global_mask_3d,CoverTS)
glob_avg_ts = AreaMean(TheData,lats,global_mask_3d)
print(len(glob_avg_ts),np.max(glob_avg_ts),np.min(glob_avg_ts))
#pdb.set_trace()
nhem_avg_ts = AreaMean(TheData,lats,nhem_mask_3d)
print(len(nhem_avg_ts),np.max(nhem_avg_ts),np.min(nhem_avg_ts))
trop_avg_ts = AreaMean(TheData,lats,trop_mask_3d)
print(len(trop_avg_ts),np.max(trop_avg_ts),np.min(trop_avg_ts))
shem_avg_ts = AreaMean(TheData,lats,shem_mask_3d)
print(len(shem_avg_ts),np.max(shem_avg_ts),np.min(shem_avg_ts))
# save to file as netCDF and .dat
# WriteNetCDF(outfile,glob_avg_ts,nhem_avg_ts,trop_avg_ts,shem_avg_ts,Times,styr, edyr, climst, climed, ParamDict[param][0], StandardNameDict[param], LongNameDict[param], unitees, MaskDict)
WriteNetCDF(outdir+outfile,glob_avg_ts,nhem_avg_ts,trop_avg_ts,shem_avg_ts,Times,styr, edyr, climST, climED, ParamDict[var][0], NameDict[var][0], NameDict[var][1], unitees, MaskDict)
# WriteText(outfile,glob_avg_ts,nhem_avg_ts,trop_avg_ts,shem_avg_ts,Times,styr, edyr)
WriteText(outdir+outfile,glob_avg_ts,nhem_avg_ts,trop_avg_ts,shem_avg_ts,Times,styr, edyr)
# Note if any of the series have missing data because at these large scales they should not
if (len(np.where(glob_avg_ts <= mdi)[0]) > 0):
print('Missing months for Global average: ',len(np.where(glob_avg_ts <= mdi)[0]))
pdb.set_trace()
if (len(np.where(nhem_avg_ts <= mdi)[0]) > 0):
print('Missing months for NHemi average: ',len(np.where(nhem_avg_ts <= mdi)[0]))
pdb.set_trace()
if (len(np.where(trop_avg_ts <= mdi)[0]) > 0):
print('Missing months for Tropics average: ',len(np.where(trop_avg_ts <= mdi)[0]))
pdb.set_trace()
if (len(np.where(shem_avg_ts <= mdi)[0]) > 0):
print('Missing months for Shemi average: ',len(np.where(shem_avg_ts <= mdi)[0]))
# pdb.set_trace()
print('And we are done!')
if __name__ == '__main__':
main(sys.argv[1:])
| Kate-Willett/HadISDH_Build | MakeAreaAvgTS.py | Python | cc0-1.0 | 34,336 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v10.enums",
marshal="google.ads.googleads.v10",
manifest={"LocalPlaceholderFieldEnum",},
)
class LocalPlaceholderFieldEnum(proto.Message):
r"""Values for Local placeholder fields.
For more information about dynamic remarketing feeds, see
https://support.google.com/google-ads/answer/6053288.
"""
class LocalPlaceholderField(proto.Enum):
r"""Possible values for Local placeholder fields."""
UNSPECIFIED = 0
UNKNOWN = 1
DEAL_ID = 2
DEAL_NAME = 3
SUBTITLE = 4
DESCRIPTION = 5
PRICE = 6
FORMATTED_PRICE = 7
SALE_PRICE = 8
FORMATTED_SALE_PRICE = 9
IMAGE_URL = 10
ADDRESS = 11
CATEGORY = 12
CONTEXTUAL_KEYWORDS = 13
FINAL_URLS = 14
FINAL_MOBILE_URLS = 15
TRACKING_URL = 16
ANDROID_APP_LINK = 17
SIMILAR_DEAL_IDS = 18
IOS_APP_LINK = 19
IOS_APP_STORE_ID = 20
__all__ = tuple(sorted(__protobuf__.manifest))
| googleads/google-ads-python | google/ads/googleads/v10/enums/types/local_placeholder_field.py | Python | apache-2.0 | 1,702 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import itertools
from oslo_log import log as logging
import six
from heat.common import exception
from heat.common.i18n import _
from heat.common.i18n import _LI
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import function
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
try:
from pyrax.exceptions import NotFound # noqa
PYRAX_INSTALLED = True
except ImportError:
# Setup fake exception for testing without pyrax
class NotFound(Exception):
pass
PYRAX_INSTALLED = False
LOG = logging.getLogger(__name__)
def lb_immutable(exc):
if 'immutable' in six.text_type(exc):
return True
return False
class LoadbalancerBuildError(exception.HeatException):
msg_fmt = _("There was an error building the loadbalancer:%(lb_name)s.")
class CloudLoadBalancer(resource.Resource):
"""Represents a Rackspace Cloud Loadbalancer."""
support_status = support.SupportStatus(
status=support.UNSUPPORTED,
message=_('This resource is not supported, use at your own risk.'))
PROPERTIES = (
NAME, NODES, PROTOCOL, ACCESS_LIST, HALF_CLOSED, ALGORITHM,
CONNECTION_LOGGING, METADATA, PORT, TIMEOUT,
CONNECTION_THROTTLE, SESSION_PERSISTENCE, VIRTUAL_IPS,
CONTENT_CACHING, HEALTH_MONITOR, SSL_TERMINATION, ERROR_PAGE,
HTTPS_REDIRECT,
) = (
'name', 'nodes', 'protocol', 'accessList', 'halfClosed', 'algorithm',
'connectionLogging', 'metadata', 'port', 'timeout',
'connectionThrottle', 'sessionPersistence', 'virtualIps',
'contentCaching', 'healthMonitor', 'sslTermination', 'errorPage',
'httpsRedirect',
)
LB_UPDATE_PROPS = (NAME, ALGORITHM, PROTOCOL, HALF_CLOSED, PORT, TIMEOUT,
HTTPS_REDIRECT)
_NODE_KEYS = (
NODE_ADDRESSES, NODE_PORT, NODE_CONDITION, NODE_TYPE,
NODE_WEIGHT,
) = (
'addresses', 'port', 'condition', 'type',
'weight',
)
_ACCESS_LIST_KEYS = (
ACCESS_LIST_ADDRESS, ACCESS_LIST_TYPE,
) = (
'address', 'type',
)
_CONNECTION_THROTTLE_KEYS = (
CONNECTION_THROTTLE_MAX_CONNECTION_RATE,
CONNECTION_THROTTLE_MIN_CONNECTIONS,
CONNECTION_THROTTLE_MAX_CONNECTIONS,
CONNECTION_THROTTLE_RATE_INTERVAL,
) = (
'maxConnectionRate',
'minConnections',
'maxConnections',
'rateInterval',
)
_VIRTUAL_IP_KEYS = (
VIRTUAL_IP_TYPE, VIRTUAL_IP_IP_VERSION, VIRTUAL_IP_ID
) = (
'type', 'ipVersion', 'id'
)
_HEALTH_MONITOR_KEYS = (
HEALTH_MONITOR_ATTEMPTS_BEFORE_DEACTIVATION, HEALTH_MONITOR_DELAY,
HEALTH_MONITOR_TIMEOUT, HEALTH_MONITOR_TYPE, HEALTH_MONITOR_BODY_REGEX,
HEALTH_MONITOR_HOST_HEADER, HEALTH_MONITOR_PATH,
HEALTH_MONITOR_STATUS_REGEX,
) = (
'attemptsBeforeDeactivation', 'delay',
'timeout', 'type', 'bodyRegex',
'hostHeader', 'path',
'statusRegex',
)
_HEALTH_MONITOR_CONNECT_KEYS = (
HEALTH_MONITOR_ATTEMPTS_BEFORE_DEACTIVATION, HEALTH_MONITOR_DELAY,
HEALTH_MONITOR_TIMEOUT, HEALTH_MONITOR_TYPE,
)
_SSL_TERMINATION_KEYS = (
SSL_TERMINATION_SECURE_PORT, SSL_TERMINATION_PRIVATEKEY,
SSL_TERMINATION_CERTIFICATE, SSL_TERMINATION_INTERMEDIATE_CERTIFICATE,
SSL_TERMINATION_SECURE_TRAFFIC_ONLY,
) = (
'securePort', 'privatekey',
'certificate', 'intermediateCertificate',
'secureTrafficOnly',
)
ATTRIBUTES = (
PUBLIC_IP, VIPS
) = (
'PublicIp', 'virtualIps'
)
ALGORITHMS = ["LEAST_CONNECTIONS", "RANDOM", "ROUND_ROBIN",
"WEIGHTED_LEAST_CONNECTIONS", "WEIGHTED_ROUND_ROBIN"]
_health_monitor_schema = {
HEALTH_MONITOR_ATTEMPTS_BEFORE_DEACTIVATION: properties.Schema(
properties.Schema.NUMBER,
required=True,
constraints=[
constraints.Range(1, 10),
]
),
HEALTH_MONITOR_DELAY: properties.Schema(
properties.Schema.NUMBER,
required=True,
constraints=[
constraints.Range(1, 3600),
]
),
HEALTH_MONITOR_TIMEOUT: properties.Schema(
properties.Schema.NUMBER,
required=True,
constraints=[
constraints.Range(1, 300),
]
),
HEALTH_MONITOR_TYPE: properties.Schema(
properties.Schema.STRING,
required=True,
constraints=[
constraints.AllowedValues(['CONNECT', 'HTTP', 'HTTPS']),
]
),
HEALTH_MONITOR_BODY_REGEX: properties.Schema(
properties.Schema.STRING
),
HEALTH_MONITOR_HOST_HEADER: properties.Schema(
properties.Schema.STRING
),
HEALTH_MONITOR_PATH: properties.Schema(
properties.Schema.STRING
),
HEALTH_MONITOR_STATUS_REGEX: properties.Schema(
properties.Schema.STRING
),
}
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
update_allowed=True
),
NODES: properties.Schema(
properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
schema={
NODE_ADDRESSES: properties.Schema(
properties.Schema.LIST,
required=True,
description=(_("IP addresses for the load balancer "
"node. Must have at least one "
"address.")),
schema=properties.Schema(
properties.Schema.STRING
)
),
NODE_PORT: properties.Schema(
properties.Schema.INTEGER,
required=True
),
NODE_CONDITION: properties.Schema(
properties.Schema.STRING,
default='ENABLED',
constraints=[
constraints.AllowedValues(['ENABLED',
'DISABLED',
'DRAINING']),
]
),
NODE_TYPE: properties.Schema(
properties.Schema.STRING,
default='PRIMARY',
constraints=[
constraints.AllowedValues(['PRIMARY',
'SECONDARY']),
]
),
NODE_WEIGHT: properties.Schema(
properties.Schema.NUMBER,
default=1,
constraints=[
constraints.Range(1, 100),
]
),
},
),
required=True,
update_allowed=True
),
PROTOCOL: properties.Schema(
properties.Schema.STRING,
required=True,
constraints=[
constraints.AllowedValues(['DNS_TCP', 'DNS_UDP', 'FTP',
'HTTP', 'HTTPS', 'IMAPS',
'IMAPv4', 'LDAP', 'LDAPS',
'MYSQL', 'POP3', 'POP3S', 'SMTP',
'TCP', 'TCP_CLIENT_FIRST', 'UDP',
'UDP_STREAM', 'SFTP']),
],
update_allowed=True
),
ACCESS_LIST: properties.Schema(
properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
schema={
ACCESS_LIST_ADDRESS: properties.Schema(
properties.Schema.STRING,
required=True
),
ACCESS_LIST_TYPE: properties.Schema(
properties.Schema.STRING,
required=True,
constraints=[
constraints.AllowedValues(['ALLOW', 'DENY']),
]
),
},
)
),
HALF_CLOSED: properties.Schema(
properties.Schema.BOOLEAN,
update_allowed=True
),
ALGORITHM: properties.Schema(
properties.Schema.STRING,
constraints=[
constraints.AllowedValues(ALGORITHMS)
],
update_allowed=True
),
CONNECTION_LOGGING: properties.Schema(
properties.Schema.BOOLEAN,
update_allowed=True
),
METADATA: properties.Schema(
properties.Schema.MAP,
update_allowed=True
),
PORT: properties.Schema(
properties.Schema.INTEGER,
required=True,
update_allowed=True
),
TIMEOUT: properties.Schema(
properties.Schema.NUMBER,
constraints=[
constraints.Range(1, 120),
],
update_allowed=True
),
CONNECTION_THROTTLE: properties.Schema(
properties.Schema.MAP,
schema={
CONNECTION_THROTTLE_MAX_CONNECTION_RATE: properties.Schema(
properties.Schema.NUMBER,
constraints=[
constraints.Range(0, 100000),
]
),
CONNECTION_THROTTLE_MIN_CONNECTIONS: properties.Schema(
properties.Schema.INTEGER,
constraints=[
constraints.Range(1, 1000),
]
),
CONNECTION_THROTTLE_MAX_CONNECTIONS: properties.Schema(
properties.Schema.INTEGER,
constraints=[
constraints.Range(1, 100000),
]
),
CONNECTION_THROTTLE_RATE_INTERVAL: properties.Schema(
properties.Schema.NUMBER,
constraints=[
constraints.Range(1, 3600),
]
),
},
update_allowed=True
),
SESSION_PERSISTENCE: properties.Schema(
properties.Schema.STRING,
constraints=[
constraints.AllowedValues(['HTTP_COOKIE', 'SOURCE_IP']),
],
update_allowed=True
),
VIRTUAL_IPS: properties.Schema(
properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
schema={
VIRTUAL_IP_TYPE: properties.Schema(
properties.Schema.STRING,
"The type of VIP (public or internal). This property"
" cannot be specified if 'id' is specified. This "
"property must be specified if id is not specified.",
constraints=[
constraints.AllowedValues(['SERVICENET',
'PUBLIC']),
]
),
VIRTUAL_IP_IP_VERSION: properties.Schema(
properties.Schema.STRING,
"IP version of the VIP. This property cannot be "
"specified if 'id' is specified. This property must "
"be specified if id is not specified.",
constraints=[
constraints.AllowedValues(['IPV6', 'IPV4']),
]
),
VIRTUAL_IP_ID: properties.Schema(
properties.Schema.NUMBER,
"ID of a shared VIP to use instead of creating a "
"new one. This property cannot be specified if type"
" or version is specified."
)
},
),
required=True,
constraints=[
constraints.Length(min=1)
]
),
CONTENT_CACHING: properties.Schema(
properties.Schema.STRING,
constraints=[
constraints.AllowedValues(['ENABLED', 'DISABLED']),
],
update_allowed=True
),
HEALTH_MONITOR: properties.Schema(
properties.Schema.MAP,
schema=_health_monitor_schema,
update_allowed=True
),
SSL_TERMINATION: properties.Schema(
properties.Schema.MAP,
schema={
SSL_TERMINATION_SECURE_PORT: properties.Schema(
properties.Schema.INTEGER,
default=443
),
SSL_TERMINATION_PRIVATEKEY: properties.Schema(
properties.Schema.STRING,
required=True
),
SSL_TERMINATION_CERTIFICATE: properties.Schema(
properties.Schema.STRING,
required=True
),
# only required if configuring intermediate ssl termination
# add to custom validation
SSL_TERMINATION_INTERMEDIATE_CERTIFICATE: properties.Schema(
properties.Schema.STRING
),
# pyrax will default to false
SSL_TERMINATION_SECURE_TRAFFIC_ONLY: properties.Schema(
properties.Schema.BOOLEAN,
default=False
),
},
update_allowed=True
),
ERROR_PAGE: properties.Schema(
properties.Schema.STRING,
update_allowed=True
),
HTTPS_REDIRECT: properties.Schema(
properties.Schema.BOOLEAN,
_("Enables or disables HTTP to HTTPS redirection for the load "
"balancer. When enabled, any HTTP request returns status code "
"301 (Moved Permanently), and the requester is redirected to "
"the requested URL via the HTTPS protocol on port 443. Only "
"available for HTTPS protocol (port=443), or HTTP protocol with "
"a properly configured SSL termination (secureTrafficOnly=true, "
"securePort=443)."),
update_allowed=True,
default=False,
support_status=support.SupportStatus(version="2015.1")
)
}
attributes_schema = {
PUBLIC_IP: attributes.Schema(
_('Public IP address of the specified instance.')
),
VIPS: attributes.Schema(
_("A list of assigned virtual ip addresses")
)
}
ACTIVE_STATUS = 'ACTIVE'
DELETED_STATUS = 'DELETED'
PENDING_DELETE_STATUS = 'PENDING_DELETE'
PENDING_UPDATE_STATUS = 'PENDING_UPDATE'
def __init__(self, name, json_snippet, stack):
super(CloudLoadBalancer, self).__init__(name, json_snippet, stack)
self.clb = self.cloud_lb()
def cloud_lb(self):
return self.client('cloud_lb')
def _setup_properties(self, properties, function):
"""Use defined schema properties as kwargs for loadbalancer objects."""
if properties and function:
return [function(**self._remove_none(item_dict))
for item_dict in properties]
elif function:
return [function()]
def _alter_properties_for_api(self):
"""Set up required, but useless, key/value pairs.
The following properties have useless key/value pairs which must
be passed into the api. Set them up to make template definition easier.
"""
session_persistence = None
if self.SESSION_PERSISTENCE in self.properties.data:
session_persistence = {'persistenceType':
self.properties[self.SESSION_PERSISTENCE]}
connection_logging = None
if self.CONNECTION_LOGGING in self.properties.data:
connection_logging = {"enabled":
self.properties[self.CONNECTION_LOGGING]}
metadata = None
if self.METADATA in self.properties.data:
metadata = [{'key': k, 'value': v}
for k, v
in six.iteritems(self.properties[self.METADATA])]
return (session_persistence, connection_logging, metadata)
def _check_active(self, lb=None):
"""Update the loadbalancer state, check the status."""
if not lb:
lb = self.clb.get(self.resource_id)
if lb.status == self.ACTIVE_STATUS:
return True
else:
return False
def _valid_HTTPS_redirect_with_HTTP_prot(self):
"""Determine if HTTPS redirect is valid when protocol is HTTP"""
proto = self.properties[self.PROTOCOL]
redir = self.properties[self.HTTPS_REDIRECT]
termcfg = self.properties.get(self.SSL_TERMINATION) or {}
seconly = termcfg.get(self.SSL_TERMINATION_SECURE_TRAFFIC_ONLY, False)
secport = termcfg.get(self.SSL_TERMINATION_SECURE_PORT, 0)
if (redir and (proto == "HTTP") and seconly and (secport == 443)):
return True
return False
def _process_node(self, node):
for addr in node.get(self.NODE_ADDRESSES, []):
norm_node = copy.deepcopy(node)
norm_node['address'] = addr
del norm_node[self.NODE_ADDRESSES]
yield norm_node
def _process_nodes(self, node_list):
node_itr = six.moves.map(self._process_node, node_list)
return itertools.chain.from_iterable(node_itr)
def _validate_https_redirect(self):
redir = self.properties[self.HTTPS_REDIRECT]
proto = self.properties[self.PROTOCOL]
if (redir and (proto != "HTTPS") and
not self._valid_HTTPS_redirect_with_HTTP_prot()):
message = _("HTTPS redirect is only available for the HTTPS "
"protocol (port=443), or the HTTP protocol with "
"a properly configured SSL termination "
"(secureTrafficOnly=true, securePort=443).")
raise exception.StackValidationFailed(message=message)
def handle_create(self):
node_list = self._process_nodes(self.properties.get(self.NODES))
nodes = [self.clb.Node(**node) for node in node_list]
vips = self.properties.get(self.VIRTUAL_IPS)
virtual_ips = self._setup_properties(vips, self.clb.VirtualIP)
(session_persistence, connection_logging, metadata
) = self._alter_properties_for_api()
lb_body = {
'port': self.properties[self.PORT],
'protocol': self.properties[self.PROTOCOL],
'nodes': nodes,
'virtual_ips': virtual_ips,
'algorithm': self.properties.get(self.ALGORITHM),
'halfClosed': self.properties.get(self.HALF_CLOSED),
'connectionThrottle': self.properties.get(
self.CONNECTION_THROTTLE),
'metadata': metadata,
'healthMonitor': self.properties.get(self.HEALTH_MONITOR),
'sessionPersistence': session_persistence,
'timeout': self.properties.get(self.TIMEOUT),
'connectionLogging': connection_logging,
self.HTTPS_REDIRECT: self.properties[self.HTTPS_REDIRECT]
}
if self._valid_HTTPS_redirect_with_HTTP_prot():
lb_body[self.HTTPS_REDIRECT] = False
self._validate_https_redirect()
lb_name = (self.properties.get(self.NAME) or
self.physical_resource_name())
LOG.debug("Creating loadbalancer: %s" % {lb_name: lb_body})
lb = self.clb.create(lb_name, **lb_body)
self.resource_id_set(str(lb.id))
def check_create_complete(self, *args):
lb = self.clb.get(self.resource_id)
return (self._check_active(lb) and
self._create_access_list(lb) and
self._create_errorpage(lb) and
self._create_ssl_term(lb) and
self._create_redirect(lb) and
self._create_cc(lb))
def _create_access_list(self, lb):
if not self.properties[self.ACCESS_LIST]:
return True
old_access_list = lb.get_access_list()
new_access_list = self.properties[self.ACCESS_LIST]
if not self._access_list_needs_update(old_access_list,
new_access_list):
return True
try:
lb.add_access_list(new_access_list)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _create_errorpage(self, lb):
if not self.properties[self.ERROR_PAGE]:
return True
old_errorpage = lb.get_error_page()
new_errorpage_content = self.properties[self.ERROR_PAGE]
new_errorpage = {'errorpage': {'content': new_errorpage_content}}
if not self._errorpage_needs_update(old_errorpage, new_errorpage):
return True
try:
lb.set_error_page(new_errorpage_content)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _create_ssl_term(self, lb):
if not self.properties[self.SSL_TERMINATION]:
return True
old_ssl_term = lb.get_ssl_termination()
new_ssl_term = self.properties[self.SSL_TERMINATION]
if not self._ssl_term_needs_update(old_ssl_term, new_ssl_term):
return True
try:
lb.add_ssl_termination(**new_ssl_term)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _create_redirect(self, lb):
if not self._valid_HTTPS_redirect_with_HTTP_prot():
return True
old_redirect = lb.httpsRedirect
new_redirect = self.properties[self.HTTPS_REDIRECT]
if not self._redirect_needs_update(old_redirect, new_redirect):
return True
try:
lb.update(httpsRedirect=True)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _create_cc(self, lb):
if not self.properties[self.CONTENT_CACHING]:
return True
old_cc = lb.content_caching
new_cc = self.properties[self.CONTENT_CACHING] == 'ENABLED'
if not self._cc_needs_update(old_cc, new_cc):
return True
try:
lb.content_caching = new_cc
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def handle_check(self):
lb = self.clb.get(self.resource_id)
if not self._check_active():
raise exception.Error(_("Cloud Loadbalancer is not ACTIVE "
"(was: %s)") % lb.status)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
return prop_diff
def check_update_complete(self, prop_diff):
lb = self.clb.get(self.resource_id)
return (lb.status != self.PENDING_UPDATE_STATUS and # lb immutable?
self._update_props(lb, prop_diff) and
self._update_nodes_add(lb, prop_diff) and
self._update_nodes_delete(lb, prop_diff) and
self._update_nodes_change(lb, prop_diff) and
self._update_health_monitor(lb, prop_diff) and
self._update_session_persistence(lb, prop_diff) and
self._update_ssl_termination(lb, prop_diff) and
self._update_metadata(lb, prop_diff) and
self._update_errorpage(lb, prop_diff) and
self._update_connection_logging(lb, prop_diff) and
self._update_connection_throttle(lb, prop_diff) and
self._update_content_caching(lb, prop_diff))
def _nodes_need_update_add(self, old, new):
if not old:
return True
new = list(self._process_nodes(new))
new_nodes = ["%s%s" % (x['address'], x['port']) for x in new]
old_nodes = ["%s%s" % (x.address, x.port) for x in old]
for node in new_nodes:
if node not in old_nodes:
return True
return False
def _nodes_need_update_delete(self, old, new):
if not new:
return True
new = list(self._process_nodes(new))
new_nodes = ["%s%s" % (x['address'], x['port']) for x in new]
old_nodes = ["%s%s" % (x.address, x.port) for x in old]
for node in old_nodes:
if node not in new_nodes:
return True
return False
def _nodes_need_update_change(self, old, new):
def find_node(nodes, address, port):
for node in nodes:
if node['address'] == address and node['port'] == port:
return node
new = list(self._process_nodes(new))
for old_node in old:
new_node = find_node(new, old_node.address, old_node.port)
if (new_node['condition'] != old_node.condition or
new_node['type'] != old_node.type or
new_node['weight'] != old_node.weight):
return True
return False
def _needs_update_comparison(self, old, new):
if old != new:
return True
return False
def _needs_update_comparison_bool(self, old, new):
if new is None:
return old
return self._needs_update_comparison(old, new)
def _needs_update_comparison_nullable(self, old, new):
if not old and not new:
return False
return self._needs_update_comparison(old, new)
def _props_need_update(self, old, new):
return self._needs_update_comparison_nullable(old, new) # dict
def _hm_needs_update(self, old, new):
return self._needs_update_comparison_nullable(old, new) # dict
def _sp_needs_update(self, old, new):
return self._needs_update_comparison_bool(old, new) # bool
def _metadata_needs_update(self, old, new):
return self._needs_update_comparison_nullable(old, new) # dict
def _errorpage_needs_update(self, old, new):
return self._needs_update_comparison_nullable(old, new) # str
def _cl_needs_update(self, old, new):
return self._needs_update_comparison_bool(old, new) # bool
def _ct_needs_update(self, old, new):
return self._needs_update_comparison_nullable(old, new) # dict
def _cc_needs_update(self, old, new):
return self._needs_update_comparison_bool(old, new) # bool
def _ssl_term_needs_update(self, old, new):
if new is None:
return self._needs_update_comparison_nullable(
old, new) # dict
# check all relevant keys
if (old.get(self.SSL_TERMINATION_SECURE_PORT) !=
new[self.SSL_TERMINATION_SECURE_PORT]):
return True
if (old.get(self.SSL_TERMINATION_SECURE_TRAFFIC_ONLY) !=
new[self.SSL_TERMINATION_SECURE_TRAFFIC_ONLY]):
return True
if (old.get(self.SSL_TERMINATION_CERTIFICATE, '').strip() !=
new.get(self.SSL_TERMINATION_CERTIFICATE, '').strip()):
return True
if (new.get(self.SSL_TERMINATION_INTERMEDIATE_CERTIFICATE, '')
and (old.get(self.SSL_TERMINATION_INTERMEDIATE_CERTIFICATE,
'').strip()
!= new.get(self.SSL_TERMINATION_INTERMEDIATE_CERTIFICATE,
'').strip())):
return True
return False
def _access_list_needs_update(self, old, new):
old = [{key: al[key] for key in self._ACCESS_LIST_KEYS} for al in old]
old = set([frozenset(s.items()) for s in old])
new = set([frozenset(s.items()) for s in new])
return old != new
def _redirect_needs_update(self, old, new):
return self._needs_update_comparison_bool(old, new) # bool
def _update_props(self, lb, prop_diff):
old_props = {}
new_props = {}
for prop in six.iterkeys(prop_diff):
if prop in self.LB_UPDATE_PROPS:
old_props[prop] = getattr(lb, prop)
new_props[prop] = prop_diff[prop]
if new_props and self._props_need_update(old_props, new_props):
try:
lb.update(**new_props)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
return True
def _nodes_update_data(self, lb, prop_diff):
current_nodes = lb.nodes
diff_nodes = self._process_nodes(prop_diff[self.NODES])
# Loadbalancers can be uniquely identified by address and
# port. Old is a dict of all nodes the loadbalancer
# currently knows about.
old = dict(("{0.address}{0.port}".format(node), node)
for node in current_nodes)
# New is a dict of the nodes the loadbalancer will know
# about after this update.
new = dict(("%s%s" % (node["address"],
node[self.NODE_PORT]), node)
for node in diff_nodes)
old_set = set(six.iterkeys(old))
new_set = set(six.iterkeys(new))
deleted = old_set.difference(new_set)
added = new_set.difference(old_set)
updated = new_set.intersection(old_set)
return old, new, deleted, added, updated
def _update_nodes_add(self, lb, prop_diff):
"""Add loadbalancers in the new map that are not in the old map."""
if self.NODES not in prop_diff:
return True
old_nodes = lb.nodes if hasattr(lb, self.NODES) else None
new_nodes = prop_diff[self.NODES]
if not self._nodes_need_update_add(old_nodes, new_nodes):
return True
old, new, deleted, added, updated = self._nodes_update_data(lb,
prop_diff)
new_nodes = [self.clb.Node(**new[lb_node]) for lb_node in added]
if new_nodes:
try:
lb.add_nodes(new_nodes)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_nodes_delete(self, lb, prop_diff):
"""Delete loadbalancers in the old dict that aren't in the new dict."""
if self.NODES not in prop_diff:
return True
old_nodes = lb.nodes if hasattr(lb, self.NODES) else None
new_nodes = prop_diff[self.NODES]
if not self._nodes_need_update_delete(old_nodes, new_nodes):
return True
old, new, deleted, added, updated = self._nodes_update_data(lb,
prop_diff)
for node in deleted:
try:
old[node].delete()
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_nodes_change(self, lb, prop_diff):
"""Update nodes that have been changed."""
if self.NODES not in prop_diff:
return True
old_nodes = lb.nodes if hasattr(lb, self.NODES) else None
new_nodes = prop_diff[self.NODES]
if not self._nodes_need_update_change(old_nodes, new_nodes):
return True
old, new, deleted, added, updated = self._nodes_update_data(lb,
prop_diff)
for node in updated:
node_changed = False
for attribute in six.iterkeys(new[node]):
new_value = new[node][attribute]
if new_value and new_value != getattr(old[node], attribute):
node_changed = True
setattr(old[node], attribute, new_value)
if node_changed:
try:
old[node].update()
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_health_monitor(self, lb, prop_diff):
if self.HEALTH_MONITOR not in prop_diff:
return True
old_hm = lb.get_health_monitor()
new_hm = prop_diff[self.HEALTH_MONITOR]
if not self._hm_needs_update(old_hm, new_hm):
return True
try:
if new_hm is None:
lb.delete_health_monitor()
else:
# Adding a health monitor is a destructive, so there's
# no need to delete, then add
lb.add_health_monitor(**new_hm)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_session_persistence(self, lb, prop_diff):
if self.SESSION_PERSISTENCE not in prop_diff:
return True
old_sp = lb.session_persistence
new_sp = prop_diff[self.SESSION_PERSISTENCE]
if not self._sp_needs_update(old_sp, new_sp):
return True
try:
if new_sp is None:
lb.session_persistence = ''
else:
# Adding session persistence is destructive
lb.session_persistence = new_sp
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_ssl_termination(self, lb, prop_diff):
if self.SSL_TERMINATION not in prop_diff:
return True
old_ssl_term = lb.get_ssl_termination()
new_ssl_term = prop_diff[self.SSL_TERMINATION]
if not self._ssl_term_needs_update(old_ssl_term, new_ssl_term):
return True
try:
if new_ssl_term is None:
lb.delete_ssl_termination()
else:
# Adding SSL termination is destructive
lb.add_ssl_termination(**new_ssl_term)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_metadata(self, lb, prop_diff):
if self.METADATA not in prop_diff:
return True
old_metadata = lb.get_metadata()
new_metadata = prop_diff[self.METADATA]
if not self._metadata_needs_update(old_metadata, new_metadata):
return True
try:
if new_metadata is None:
lb.delete_metadata()
else:
lb.set_metadata(new_metadata)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_errorpage(self, lb, prop_diff):
if self.ERROR_PAGE not in prop_diff:
return True
old_errorpage = lb.get_error_page()['errorpage']['content']
new_errorpage = prop_diff[self.ERROR_PAGE]
if not self._errorpage_needs_update(old_errorpage, new_errorpage):
return True
try:
if new_errorpage is None:
lb.clear_error_page()
else:
lb.set_error_page(new_errorpage)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_connection_logging(self, lb, prop_diff):
if self.CONNECTION_LOGGING not in prop_diff:
return True
old_cl = lb.connection_logging
new_cl = prop_diff[self.CONNECTION_LOGGING]
if not self._cl_needs_update(old_cl, new_cl):
return True
try:
if new_cl:
lb.connection_logging = True
else:
lb.connection_logging = False
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_connection_throttle(self, lb, prop_diff):
if self.CONNECTION_THROTTLE not in prop_diff:
return True
old_ct = lb.get_connection_throttle()
new_ct = prop_diff[self.CONNECTION_THROTTLE]
if not self._ct_needs_update(old_ct, new_ct):
return True
try:
if new_ct is None:
lb.delete_connection_throttle()
else:
lb.add_connection_throttle(**new_ct)
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _update_content_caching(self, lb, prop_diff):
if self.CONTENT_CACHING not in prop_diff:
return True
old_cc = lb.content_caching
new_cc = prop_diff[self.CONTENT_CACHING] == 'ENABLED'
if not self._cc_needs_update(old_cc, new_cc):
return True
try:
lb.content_caching = new_cc
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def check_delete_complete(self, *args):
if self.resource_id is None:
return True
try:
loadbalancer = self.clb.get(self.resource_id)
except NotFound:
return True
if loadbalancer.status == self.DELETED_STATUS:
return True
elif loadbalancer.status == self.PENDING_DELETE_STATUS:
return False
else:
try:
loadbalancer.delete()
except Exception as exc:
if lb_immutable(exc):
return False
raise
return False
def _remove_none(self, property_dict):
"""Remove None values that would cause schema validation problems.
These are values that may be initialized to None.
"""
return dict((key, value)
for (key, value) in six.iteritems(property_dict)
if value is not None)
def validate(self):
"""Validate any of the provided params."""
res = super(CloudLoadBalancer, self).validate()
if res:
return res
if self.properties.get(self.HALF_CLOSED):
if not (self.properties[self.PROTOCOL] == 'TCP' or
self.properties[self.PROTOCOL] == 'TCP_CLIENT_FIRST'):
message = (_('The %s property is only available for the TCP '
'or TCP_CLIENT_FIRST protocols')
% self.HALF_CLOSED)
raise exception.StackValidationFailed(message=message)
# health_monitor connect and http types require completely different
# schema
if self.properties.get(self.HEALTH_MONITOR):
prop_val = self.properties[self.HEALTH_MONITOR]
health_monitor = self._remove_none(prop_val)
schema = self._health_monitor_schema
if health_monitor[self.HEALTH_MONITOR_TYPE] == 'CONNECT':
schema = dict((k, v) for k, v in schema.items()
if k in self._HEALTH_MONITOR_CONNECT_KEYS)
properties.Properties(schema,
health_monitor,
function.resolve,
self.name).validate()
# validate if HTTPS_REDIRECT is true
self._validate_https_redirect()
# if a vip specifies and id, it can't specify version or type;
# otherwise version and type are required
for vip in self.properties.get(self.VIRTUAL_IPS, []):
has_id = vip.get(self.VIRTUAL_IP_ID) is not None
has_version = vip.get(self.VIRTUAL_IP_IP_VERSION) is not None
has_type = vip.get(self.VIRTUAL_IP_TYPE) is not None
if has_id:
if (has_version or has_type):
message = _("Cannot specify type or version if VIP id is"
" specified.")
raise exception.StackValidationFailed(message=message)
elif not (has_version and has_type):
message = _("Must specify VIP type and version if no id "
"specified.")
raise exception.StackValidationFailed(message=message)
def _public_ip(self, lb):
for ip in lb.virtual_ips:
if ip.type == 'PUBLIC':
return six.text_type(ip.address)
def _resolve_attribute(self, key):
if self.resource_id:
lb = self.clb.get(self.resource_id)
attribute_function = {
self.PUBLIC_IP: self._public_ip(lb),
self.VIPS: [{"id": vip.id,
"type": vip.type,
"ip_version": vip.ip_version,
"address": vip.address}
for vip in lb.virtual_ips]
}
if key not in attribute_function:
raise exception.InvalidTemplateAttribute(resource=self.name,
key=key)
function = attribute_function[key]
LOG.info(_LI('%(name)s.GetAtt(%(key)s) == %(function)s'),
{'name': self.name, 'key': key, 'function': function})
return function
def resource_mapping():
return {'Rackspace::Cloud::LoadBalancer': CloudLoadBalancer}
def available_resource_mapping():
if PYRAX_INSTALLED:
return resource_mapping()
return {}
| dims/heat | contrib/rackspace/rackspace/resources/cloud_loadbalancer.py | Python | apache-2.0 | 43,181 |
from ..core import Storage
from ..parameters import InterpolatedLevelParameter
from ..parameters.groundwater import KeatingStreamFlowParameter
import numbers
from scipy.interpolate import interp1d
class KeatingAquifer(Storage):
def __init__(self, model, name,
num_streams, num_additional_inputs,
stream_flow_levels, transmissivity, coefficient,
levels, volumes=None, area=None, storativity=None,
**kwargs):
"""Storage node with one or more Keating outflows
Parameters
----------
model : pywr.core.Model
The Pywr Model.
name : string
A unique name for the node in the model.
num_streams : integer
Number of keating outflows.
num_additional_inputs : integer
Number of additional outflows (e.g. for direct abstraction or
discharge from the aquifer).
stream_flow_levels : list of list of floats
For each stream a list of levels to pass to the keating streamflow
parameter.
transmissivity : list of floats
The transmissivity for each stream flow level.
coefficient : list of floats
The coefficient for each stream flow level.
levels : list of floats
A list of levels for the level-volume relationship. The length
should be greater than 1.
volumes : list of floats (optional)
A list of volumes for the level-volume relationship. The length
should be the same as `levels`.
area : float (optional)
Area of the aquifer in m2.
storativity : list of floats (optional)
Storativity of the aquifer as a factor (e.g. 0.05). This defines
part of the volume-level relationship. The length should be one
less than `levels`.
Either supply the `volumes` argument or both the `area` and
`storativity` arguments.
See also documentation for the `KeatingStreamFlowParameter`.
"""
super(KeatingAquifer, self).__init__(model, name,
num_inputs=(num_streams + num_additional_inputs), **kwargs)
if not (num_streams > 0):
raise ValueError("Keating aquifer must have at least one stream outflow")
if len(stream_flow_levels) != num_streams:
raise ValueError("Stream flow levels must have `num_streams` items")
for i in stream_flow_levels:
if len(i) != len(transmissivity):
raise ValueError("Items in stream flow levels should have the same length as transmissivity")
if not isinstance(coefficient, numbers.Number):
raise ValueError("Coefficient must be a scalar")
if volumes is None:
if not isinstance(area, numbers.Number):
raise ValueError("Area must be a scalar")
if len(storativity) != (len(levels) - 1):
raise ValueError("Storativity must have one less item than levels")
heights = [levels[n+1] - levels[n] for n in range(0, len(levels)-1)]
volumes = [0.0]
for n, (s, h) in enumerate(zip(storativity, heights)):
volumes.append(volumes[-1] + area * s * h * 0.001)
else:
# check volumes
if len(volumes) != len(levels):
raise ValueError("Volumes must have the same length as levels")
self.area = area
if len(levels) != len(volumes):
raise ValueError("Levels and volumes must have the same length")
self._volumes = volumes
self._levels = levels
self._level_to_volume = interp1d(levels, volumes)
self.max_volume = max(volumes)
self.min_volume = min(volumes)
self.level = InterpolatedLevelParameter(self, volumes, levels)
# initialise streamflow parameters
for n, node in enumerate(self.inputs[0:num_streams]):
parameter = KeatingStreamFlowParameter(self, stream_flow_levels[n],
transmissivity,
coefficient)
node.max_flow = parameter
node.min_flow = parameter
def initial_level():
def fget(self):
# get the initial level from the volume
return self.level.interp(self.initial_volume)
def fset(self, value):
# actually sets the initial volume
volume = self._level_to_volume(value)
self.initial_volume = volume
return locals()
initial_level = property(**initial_level())
| jetuk/pywr | pywr/domains/groundwater.py | Python | gpl-3.0 | 4,671 |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.Dynamics.MetaBlockConnectable import MetaBlockConnectable
class MetaBlockState(MetaBlockConnectable):
def __init__(self, MemberOf_MetaBlock=None, *args, **kw_args):
"""Initialises a new 'MetaBlockState' instance.
@param MemberOf_MetaBlock:
"""
self._MemberOf_MetaBlock = None
self.MemberOf_MetaBlock = MemberOf_MetaBlock
super(MetaBlockState, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["MemberOf_MetaBlock"]
_many_refs = []
def getMemberOf_MetaBlock(self):
return self._MemberOf_MetaBlock
def setMemberOf_MetaBlock(self, value):
if self._MemberOf_MetaBlock is not None:
filtered = [x for x in self.MemberOf_MetaBlock.MetaBlockState if x != self]
self._MemberOf_MetaBlock._MetaBlockState = filtered
self._MemberOf_MetaBlock = value
if self._MemberOf_MetaBlock is not None:
if self not in self._MemberOf_MetaBlock._MetaBlockState:
self._MemberOf_MetaBlock._MetaBlockState.append(self)
MemberOf_MetaBlock = property(getMemberOf_MetaBlock, setMemberOf_MetaBlock)
| rwl/PyCIM | CIM14/IEC61970/Dynamics/MetaBlockState.py | Python | mit | 2,325 |
from django.utils.datastructures import MultiValueDict
class HttpMergeParameters(object):
def process_request(self, request):
if request.method.lower() == 'GET':
base = request.POST
override = request.GET
else:
base = request.GET
override = request.POST
request.params = MultiValueDict(dict(base, **override))
class HttpMethodOverride(object):
def process_request(self, request):
if 'HTTP_X_HTTP_METHOD' in request.META: # (Microsoft)
request.method = request.META['HTTP_X_HTTP_METHOD']
return
elif 'HTTP_X_HTTP_METHOD_OVERRIDE' in request.META: # (Google/GData)
request.method = request.META['HTTP_X_HTTP_METHOD_OVERRIDE']
return
elif 'X_METHOD_OVERRIDE' in request.META: # (IBM)
request.method = request.META['X_METHOD_OVERRIDE']
return
elif 'X-Method' in request.params: # custom
request.method = request.params.get('X-Method')
return
class ResponseFormatDetection(object):
def _expected_types(self, request):
header = request.META.get('HTTP_ACCEPT', '*/*')
header = request.params.get('X-Accept', header)
header_types = header.split(',')
clean_types = []
for type in header_types:
type = type.strip()
if type.find(';') > 0:
type = type[0:type.find(';')]
clean_types.append(type)
return clean_types
def process_template_response(self, request, response):
expected = self._expected_types(request)
json_types = ['application/json']
csv_types = ['text/comma-separated-values', 'text/csv', 'application/csv']
# todo think of adding Content-Disposition: like
# Content-Disposition: attachment; filename="download.csv"
if len(filter(set(expected).__contains__, json_types)) > 0:
ext = '.json'
response['Content-Type'] = 'application/json; charset='+response._charset
elif len(filter(set(expected).__contains__, csv_types)) > 0:
ext = '.csv'
response['Content-Type'] = 'text/csv; charset='+response._charset
else:
ext = '.html'
response.template_name += ext
return response | radoraykov/rating-gov-representatives | apps/website/core/middleware/rest.py | Python | unlicense | 2,339 |
# -*- coding: utf-8 -*-
class HTTPError(IOError):
"""Unified HTTPError used across all http_client implementations.
"""
def __init__(self, response, message=None, swagger_result=None):
"""
:type response: :class:`bravado_core.response.IncomingResponse`
:param message: Optional string message
:param swagger_result: If the response for this HTTPError is
documented in the swagger spec, then this should be the result
value of the response.
"""
self.response = response
self.message = message
self.swagger_result = swagger_result
def __str__(self):
# Try to surface the most useful/relevant information available
# since this is the first thing a developer sees when bad things
# happen.
status_and_reason = str(self.response)
message = ': ' + self.message if self.message else ''
result = ': {0}'.format(self.swagger_result) \
if self.swagger_result is not None else ''
return '{0}{1}{2}'.format(status_and_reason, message, result)
| MphasisWyde/eWamSublimeAdaptor | src/third-party/bravado/exception.py | Python | mit | 1,108 |
import threading
import datetime
import logging
import pymongo
import re
from importlib import import_module
from path import path
from xmodule.errortracker import null_error_tracker
from xmodule.x_module import XModuleDescriptor
from xmodule.modulestore.locator import BlockUsageLocator, DescriptionLocator, CourseLocator, VersionTree
from xmodule.modulestore.exceptions import InsufficientSpecificationError, VersionConflictError
from xmodule.modulestore import inheritance
from .. import ModuleStoreBase
from ..exceptions import ItemNotFoundError
from .definition_lazy_loader import DefinitionLazyLoader
from .caching_descriptor_system import CachingDescriptorSystem
log = logging.getLogger(__name__)
#==============================================================================
# Documentation is at
# https://edx-wiki.atlassian.net/wiki/display/ENG/Mongostore+Data+Structure
#
# Known issue:
# Inheritance for cached kvs doesn't work on edits. Use case.
# 1) attribute foo is inheritable
# 2) g.children = [p], p.children = [a]
# 3) g.foo = 1 on load
# 4) if g.foo > 0, if p.foo > 0, if a.foo > 0 all eval True
# 5) p.foo = -1
# 6) g.foo > 0, p.foo <= 0 all eval True BUT
# 7) BUG: a.foo > 0 still evals True but should be False
# 8) reread and everything works right
# 9) p.del(foo), p.foo > 0 is True! works
# 10) BUG: a.foo < 0!
# Local fix wont' permanently work b/c xblock may cache a.foo...
#
#==============================================================================
class SplitMongoModuleStore(ModuleStoreBase):
"""
A Mongodb backed ModuleStore supporting versions, inheritance,
and sharing.
"""
def __init__(self, host, db, collection, fs_root, render_template,
port=27017, default_class=None,
error_tracker=null_error_tracker,
user=None, password=None,
**kwargs):
ModuleStoreBase.__init__(self)
self.db = pymongo.database.Database(pymongo.MongoClient(
host=host,
port=port,
tz_aware=True,
**kwargs
), db)
# TODO add caching of structures to thread_cache to prevent repeated fetches (but not index b/c
# it changes w/o having a change in id)
self.course_index = self.db[collection + '.active_versions']
self.structures = self.db[collection + '.structures']
self.definitions = self.db[collection + '.definitions']
# ??? Code review question: those familiar w/ python threading. Should I instead
# use django cache? How should I expire entries?
# _add_cache could use a lru mechanism to control the cache size?
self.thread_cache = threading.local()
if user is not None and password is not None:
self.db.authenticate(user, password)
# every app has write access to the db (v having a flag to indicate r/o v write)
# Force mongo to report errors, at the expense of performance
# pymongo docs suck but explanation:
# http://api.mongodb.org/java/2.10.1/com/mongodb/WriteConcern.html
self.course_index.write_concern = {'w': 1}
self.structures.write_concern = {'w': 1}
self.definitions.write_concern = {'w': 1}
if default_class is not None:
module_path, _, class_name = default_class.rpartition('.')
class_ = getattr(import_module(module_path), class_name)
self.default_class = class_
else:
self.default_class = None
self.fs_root = path(fs_root)
self.error_tracker = error_tracker
self.render_template = render_template
def cache_items(self, system, base_usage_ids, depth=0, lazy=True):
'''
Handles caching of items once inheritance and any other one time
per course per fetch operations are done.
:param system: a CachingDescriptorSystem
:param base_usage_ids: list of usage_ids to fetch
:param depth: how deep below these to prefetch
:param lazy: whether to fetch definitions or use placeholders
'''
new_module_data = {}
for usage_id in base_usage_ids:
new_module_data = self.descendants(system.course_entry['blocks'],
usage_id,
depth,
new_module_data)
# remove any which were already in module_data (not sure if there's a better way)
for newkey in new_module_data.iterkeys():
if newkey in system.module_data:
del new_module_data[newkey]
if lazy:
for block in new_module_data.itervalues():
block['definition'] = DefinitionLazyLoader(self,
block['definition'])
else:
# Load all descendants by id
descendent_definitions = self.definitions.find({
'_id': {'$in': [block['definition']
for block in new_module_data.itervalues()]}})
# turn into a map
definitions = {definition['_id']: definition
for definition in descendent_definitions}
for block in new_module_data.itervalues():
if block['definition'] in definitions:
block['definition'] = definitions[block['definition']]
system.module_data.update(new_module_data)
return system.module_data
def _load_items(self, course_entry, usage_ids, depth=0, lazy=True):
'''
Load & cache the given blocks from the course. Prefetch down to the
given depth. Load the definitions into each block if lazy is False;
otherwise, use the lazy definition placeholder.
'''
system = self._get_cache(course_entry['_id'])
if system is None:
system = CachingDescriptorSystem(
self,
course_entry,
{},
lazy,
self.default_class,
self.error_tracker,
self.render_template
)
self._add_cache(course_entry['_id'], system)
self.cache_items(system, usage_ids, depth, lazy)
return [system.load_item(usage_id, course_entry) for usage_id in usage_ids]
def _get_cache(self, course_version_guid):
"""
Find the descriptor cache for this course if it exists
:param course_version_guid:
"""
if not hasattr(self.thread_cache, 'course_cache'):
self.thread_cache.course_cache = {}
system = self.thread_cache.course_cache
return system.get(course_version_guid)
def _add_cache(self, course_version_guid, system):
"""
Save this cache for subsequent access
:param course_version_guid:
:param system:
"""
if not hasattr(self.thread_cache, 'course_cache'):
self.thread_cache.course_cache = {}
self.thread_cache.course_cache[course_version_guid] = system
return system
def _clear_cache(self):
"""
Should only be used by testing or something which implements transactional boundary semantics
"""
self.thread_cache.course_cache = {}
def _lookup_course(self, course_locator):
'''
Decode the locator into the right series of db access. Does not
return the CourseDescriptor! It returns the actual db json from
structures.
Semantics: if course_id and revision given, then it will get that revision. If
also give a version_guid, it will see if the current head of that revision == that guid. If not
it raises VersionConflictError (the version now differs from what it was when you got your
reference)
:param course_locator: any subclass of CourseLocator
'''
# NOTE: if and when this uses cache, the update if changed logic will break if the cache
# holds the same objects as the descriptors!
if not course_locator.is_fully_specified():
raise InsufficientSpecificationError('Not fully specified: %s' % course_locator)
if course_locator.course_id is not None and course_locator.revision is not None:
# use the course_id
index = self.course_index.find_one({'_id': course_locator.course_id})
if index is None:
raise ItemNotFoundError(course_locator)
if course_locator.revision not in index['versions']:
raise ItemNotFoundError(course_locator)
version_guid = index['versions'][course_locator.revision]
if course_locator.version_guid is not None and version_guid != course_locator.version_guid:
# This may be a bit too touchy but it's hard to infer intent
raise VersionConflictError(course_locator, CourseLocator(course_locator, version_guid=version_guid))
else:
# TODO should this raise an exception if revision was provided?
version_guid = course_locator.version_guid
# cast string to ObjectId if necessary
version_guid = course_locator.as_object_id(version_guid)
entry = self.structures.find_one({'_id': version_guid})
# b/c more than one course can use same structure, the 'course_id' is not intrinsic to structure
# and the one assoc'd w/ it by another fetch may not be the one relevant to this fetch; so,
# fake it by explicitly setting it in the in memory structure.
if course_locator.course_id:
entry['course_id'] = course_locator.course_id
entry['revision'] = course_locator.revision
return entry
def get_courses(self, revision, qualifiers=None):
'''
Returns a list of course descriptors matching any given qualifiers.
qualifiers should be a dict of keywords matching the db fields or any
legal query for mongo to use against the active_versions collection.
Note, this is to find the current head of the named revision type
(e.g., 'draft'). To get specific versions via guid use get_course.
'''
if qualifiers is None:
qualifiers = {}
qualifiers.update({"versions.{}".format(revision): {"$exists": True}})
matching = self.course_index.find(qualifiers)
# collect ids and then query for those
version_guids = []
id_version_map = {}
for course_entry in matching:
version_guid = course_entry['versions'][revision]
version_guids.append(version_guid)
id_version_map[version_guid] = course_entry['_id']
course_entries = self.structures.find({'_id': {'$in': version_guids}})
# get the block for the course element (s/b the root)
result = []
for entry in course_entries:
# structures are course agnostic but the caller wants to know course, so add it in here
entry['course_id'] = id_version_map[entry['_id']]
root = entry['root']
result.extend(self._load_items(entry, [root], 0, lazy=True))
return result
def get_course(self, course_locator):
'''
Gets the course descriptor for the course identified by the locator
which may or may not be a blockLocator.
raises InsufficientSpecificationError
'''
course_entry = self._lookup_course(course_locator)
root = course_entry['root']
result = self._load_items(course_entry, [root], 0, lazy=True)
return result[0]
def get_course_for_item(self, location):
'''
Provided for backward compatibility. Is equivalent to calling get_course
:param location:
'''
return self.get_course(location)
def has_item(self, block_location):
"""
Returns True if location exists in its course. Returns false if
the course or the block w/in the course do not exist for the given version.
raises InsufficientSpecificationError if the locator does not id a block
"""
if block_location.usage_id is None:
raise InsufficientSpecificationError(block_location)
try:
course_structure = self._lookup_course(block_location)
except ItemNotFoundError:
# this error only occurs if the course does not exist
return False
return course_structure['blocks'].get(block_location.usage_id) is not None
def get_item(self, location, depth=0):
"""
depth (int): An argument that some module stores may use to prefetch
descendants of the queried modules for more efficient results later
in the request. The depth is counted in the number of
calls to get_children() to cache. None indicates to cache all
descendants.
raises InsufficientSpecificationError or ItemNotFoundError
"""
assert isinstance(location, BlockUsageLocator)
if not location.is_initialized():
raise InsufficientSpecificationError("Not yet initialized: %s" % location)
course = self._lookup_course(location)
items = self._load_items(course, [location.usage_id], depth, lazy=True)
if len(items) == 0:
raise ItemNotFoundError(location)
return items[0]
# TODO refactor this and get_courses to use a constructed query
def get_items(self, locator, qualifiers):
'''
Get all of the modules in the given course matching the qualifiers. The
qualifiers should only be fields in the structures collection (sorry).
There will be a separate search method for searching through
definitions.
Common qualifiers are category, definition (provide definition id),
metadata: {display_name ..}, children (return
block if its children includes the one given value). If you want
substring matching use {$regex: /acme.*corp/i} type syntax.
Although these
look like mongo queries, it is all done in memory; so, you cannot
try arbitrary queries.
:param locator: CourseLocator or BlockUsageLocator restricting search scope
:param qualifiers: a dict restricting which elements should match
'''
# TODO extend to only search a subdag of the course?
course = self._lookup_course(locator)
items = []
for usage_id, value in course['blocks'].iteritems():
if self._block_matches(value, qualifiers):
items.append(usage_id)
if len(items) > 0:
return self._load_items(course, items, 0, lazy=True)
else:
return []
# What's the use case for usage_id being separate?
def get_parent_locations(self, locator, usage_id=None):
'''
Return the locations (Locators w/ usage_ids) for the parents of this location in this
course. Could use get_items(location, {'children': usage_id}) but this is slightly faster.
NOTE: does not actually ensure usage_id exists
If usage_id is None, then the locator must specify the usage_id
'''
if usage_id is None:
usage_id = locator.usage_id
course = self._lookup_course(locator)
items = []
for parent_id, value in course['blocks'].iteritems():
for child_id in value['children']:
if usage_id == child_id:
locator = locator.as_course_locator()
items.append(BlockUsageLocator(url=locator, usage_id=parent_id))
return items
def get_course_index_info(self, course_locator):
"""
The index records the initial creation of the indexed course and tracks the current version
heads. This function is primarily for test verification but may serve some
more general purpose.
:param course_locator: must have a course_id set
:return {'org': , 'prettyid': ,
versions: {'draft': the head draft version id,
'published': the head published version id if any,
},
'edited_by': who created the course originally (named edited for consistency),
'edited_on': when the course was originally created
}
"""
if course_locator.course_id is None:
return None
index = self.course_index.find_one({'_id': course_locator.course_id})
return index
# TODO figure out a way to make this info accessible from the course descriptor
def get_course_history_info(self, course_locator):
"""
Because xblocks doesn't give a means to separate the course structure's meta information from
the course xblock's, this method will get that info for the structure as a whole.
:param course_locator:
:return {'original_version': the version guid of the original version of this course,
'previous_version': the version guid of the previous version,
'edited_by': who made the last change,
'edited_on': when the change was made
}
"""
course = self._lookup_course(course_locator)
return {'original_version': course['original_version'],
'previous_version': course['previous_version'],
'edited_by': course['edited_by'],
'edited_on': course['edited_on']
}
def get_definition_history_info(self, definition_locator):
"""
Because xblocks doesn't give a means to separate the definition's meta information from
the usage xblock's, this method will get that info for the definition
:return {'original_version': the version guid of the original version of this course,
'previous_version': the version guid of the previous version,
'edited_by': who made the last change,
'edited_on': when the change was made
}
"""
definition = self.definitions.find_one({'_id': definition_locator.definition_id})
if definition is None:
return None
return {'original_version': definition['original_version'],
'previous_version': definition['previous_version'],
'edited_by': definition['edited_by'],
'edited_on': definition['edited_on']
}
def get_course_successors(self, course_locator, version_history_depth=1):
'''
Find the version_history_depth next versions of this course. Return as a VersionTree
Mostly makes sense when course_locator uses a version_guid, but because it finds all relevant
next versions, these do include those created for other courses.
:param course_locator:
'''
if version_history_depth < 1:
return None
if course_locator.version_guid is None:
course = self._lookup_course(course_locator)
version_guid = course.version_guid
else:
version_guid = course_locator.version_guid
# TODO if depth is significant, it may make sense to get all that have the same original_version
# and reconstruct the subtree from version_guid
next_entries = self.structures.find({'previous_version' : version_guid})
# must only scan cursor's once
next_versions = [struct for struct in next_entries]
result = {version_guid: [CourseLocator(version_guid=struct['_id']) for struct in next_versions]}
depth = 1
while depth < version_history_depth and len(next_versions) > 0:
depth += 1
next_entries = self.structures.find({'previous_version':
{'$in': [struct['_id'] for struct in next_versions]}})
next_versions = [struct for struct in next_entries]
for course_structure in next_versions:
result.setdefault(course_structure['previous_version'], []).append(
CourseLocator(version_guid=struct['_id']))
return VersionTree(CourseLocator(course_locator, version_guid=version_guid), result)
def get_block_generations(self, block_locator):
'''
Find the history of this block. Return as a VersionTree of each place the block changed (except
deletion).
The block's history tracks its explicit changes; so, changes in descendants won't be reflected
as new iterations.
'''
block_locator = block_locator.version_agnostic()
course_struct = self._lookup_course(block_locator)
usage_id = block_locator.usage_id
update_version_field = 'blocks.{}.update_version'.format(usage_id)
all_versions_with_block = self.structures.find({'original_version': course_struct['original_version'],
update_version_field: {'$exists': True}})
# find (all) root versions and build map previous: [successors]
possible_roots = []
result = {}
for version in all_versions_with_block:
if version['_id'] == version['blocks'][usage_id]['update_version']:
if version['blocks'][usage_id].get('previous_version') is None:
possible_roots.append(version['blocks'][usage_id]['update_version'])
else:
result.setdefault(version['blocks'][usage_id]['previous_version'], set()).add(
version['blocks'][usage_id]['update_version'])
# more than one possible_root means usage was added and deleted > 1x.
if len(possible_roots) > 1:
# find the history segment including block_locator's version
element_to_find = course_struct['blocks'][usage_id]['update_version']
if element_to_find in possible_roots:
possible_roots = [element_to_find]
for possibility in possible_roots:
if self._find_local_root(element_to_find, possibility, result):
possible_roots = [possibility]
break
elif len(possible_roots) == 0:
return None
# convert the results value sets to locators
for k, versions in result.iteritems():
result[k] = [BlockUsageLocator(version_guid=version, usage_id=usage_id)
for version in versions]
return VersionTree(BlockUsageLocator(version_guid=possible_roots[0], usage_id=usage_id), result)
def get_definition_successors(self, definition_locator, version_history_depth=1):
'''
Find the version_history_depth next versions of this definition. Return as a VersionTree
'''
# TODO implement
pass
def create_definition_from_data(self, new_def_data, category, user_id):
"""
Pull the definition fields out of descriptor and save to the db as a new definition
w/o a predecessor and return the new id.
:param user_id: request.user object
"""
document = {"category" : category,
"data": new_def_data,
"edited_by": user_id,
"edited_on": datetime.datetime.utcnow(),
"previous_version": None,
"original_version": None}
new_id = self.definitions.insert(document)
definition_locator = DescriptionLocator(new_id)
document['original_version'] = new_id
self.definitions.update({'_id': new_id}, {'$set': {"original_version": new_id}})
return definition_locator
def update_definition_from_data(self, definition_locator, new_def_data, user_id):
"""
See if new_def_data differs from the persisted version. If so, update
the persisted version and return the new id.
:param user_id: request.user
"""
def needs_saved():
if isinstance(new_def_data, dict):
for key, value in new_def_data.iteritems():
if key not in old_definition['data'] or value != old_definition['data'][key]:
return True
for key, value in old_definition['data'].iteritems():
if key not in new_def_data:
return True
else:
return new_def_data != old_definition['data']
# if this looks in cache rather than fresh fetches, then it will probably not detect
# actual change b/c the descriptor and cache probably point to the same objects
old_definition = self.definitions.find_one({'_id': definition_locator.definition_id})
if old_definition is None:
raise ItemNotFoundError(definition_locator.url())
del old_definition['_id']
if needs_saved():
old_definition['data'] = new_def_data
old_definition['edited_by'] = user_id
old_definition['edited_on'] = datetime.datetime.utcnow()
old_definition['previous_version'] = definition_locator.definition_id
new_id = self.definitions.insert(old_definition)
return DescriptionLocator(new_id), True
else:
return definition_locator, False
def _generate_usage_id(self, course_blocks, category):
"""
Generate a somewhat readable block id unique w/in this course using the category
:param course_blocks: the current list of blocks.
:param category:
"""
# NOTE: a potential bug is that a block is deleted and another created which gets the old
# block's id. a possible fix is to cache the last serial in a dict in the structure
# {category: last_serial...}
# A potential confusion is if the name incorporates the parent's name, then if the child
# moves, its id won't change and will be confusing
serial = 1
while category + str(serial) in course_blocks:
serial += 1
return category + str(serial)
def _generate_course_id(self, id_root):
"""
Generate a somewhat readable course id unique w/in this db using the id_root
:param course_blocks: the current list of blocks.
:param category:
"""
existing_uses = self.course_index.find({"_id": {"$regex": id_root}})
if existing_uses.count() > 0:
max_found = 0
matcher = re.compile(id_root + r'(\d+)')
for entry in existing_uses:
serial = re.search(matcher, entry['_id'])
if serial is not None and serial.groups > 0:
value = int(serial.group(1))
if value > max_found:
max_found = value
return id_root + str(max_found + 1)
else:
return id_root
# TODO I would love to write this to take a real descriptor and persist it BUT descriptors, kvs, and dbmodel
# all assume locators are set and unique! Having this take the model contents piecemeal breaks the separation
# of model from persistence layer
def create_item(self, course_or_parent_locator, category, user_id, definition_locator=None, new_def_data=None,
metadata=None, force=False):
"""
Add a descriptor to persistence as the last child of the optional parent_location or just as an element
of the course (if no parent provided). Return the resulting post saved version with populated locators.
If the locator is a BlockUsageLocator, then it's assumed to be the parent. If it's a CourseLocator, then it's
merely the containing course.
raises InsufficientSpecificationError if there is no course locator.
raises VersionConflictError if course_id and version_guid given and the current version head != version_guid
and force is not True.
force: fork the structure and don't update the course draftVersion if the above
The incoming definition_locator should either be None to indicate this is a brand new definition or
a pointer to the existing definition to which this block should point or from which this was derived.
If new_def_data is None, then definition_locator must have a value meaning that this block points
to the existing definition. If new_def_data is not None and definition_location is not None, then
new_def_data is assumed to be a new payload for definition_location.
Creates a new version of the course structure, creates and inserts the new block, makes the block point
to the definition which may be new or a new version of an existing or an existing.
Rules for course locator:
* If the course locator specifies a course_id and either it doesn't
specify version_guid or the one it specifies == the current draft, it progresses the course to point
to the new draft and sets the active version to point to the new draft
* If the locator has a course_id but its version_guid != current draft, it raises VersionConflictError.
NOTE: using a version_guid will end up creating a new version of the course. Your new item won't be in
the course id'd by version_guid but instead in one w/ a new version_guid. Ensure in this case that you get
the new version_guid from the locator in the returned object!
"""
# find course_index entry if applicable and structures entry
index_entry = self._get_index_if_valid(course_or_parent_locator, force)
structure = self._lookup_course(course_or_parent_locator)
# persist the definition if persisted != passed
if (definition_locator is None or definition_locator.definition_id is None):
definition_locator = self.create_definition_from_data(new_def_data, category, user_id)
elif new_def_data is not None:
definition_locator, _ = self.update_definition_from_data(definition_locator, new_def_data, user_id)
# copy the structure and modify the new one
new_structure = self._version_structure(structure, user_id)
# generate an id
new_usage_id = self._generate_usage_id(new_structure['blocks'], category)
update_version_keys = ['blocks.{}.update_version'.format(new_usage_id)]
if isinstance(course_or_parent_locator, BlockUsageLocator) and course_or_parent_locator.usage_id is not None:
parent = new_structure['blocks'][course_or_parent_locator.usage_id]
parent['children'].append(new_usage_id)
parent['edited_on'] = datetime.datetime.utcnow()
parent['edited_by'] = user_id
parent['previous_version'] = parent['update_version']
update_version_keys.append('blocks.{}.update_version'.format(course_or_parent_locator.usage_id))
new_structure['blocks'][new_usage_id] = {
"children": [],
"category": category,
"definition": definition_locator.definition_id,
"metadata": metadata if metadata else {},
'edited_on': datetime.datetime.utcnow(),
'edited_by': user_id,
'previous_version': None
}
new_id = self.structures.insert(new_structure)
update_version_payload = {key: new_id for key in update_version_keys}
self.structures.update({'_id': new_id},
{'$set': update_version_payload})
# update the index entry if appropriate
if index_entry is not None:
self._update_head(index_entry, course_or_parent_locator.revision, new_id)
course_parent = course_or_parent_locator.as_course_locator()
else:
course_parent = None
# fetch and return the new item--fetching is unnecessary but a good qc step
return self.get_item(BlockUsageLocator(course_id=course_parent,
usage_id=new_usage_id,
version_guid=new_id))
def create_course(self, org, prettyid, user_id, id_root=None, metadata=None, course_data=None,
master_version='draft', versions_dict=None, root_category='course'):
"""
Create a new entry in the active courses index which points to an existing or new structure. Returns
the course root of the resulting entry (the location has the course id)
id_root: allows the caller to specify the course_id. It's a root in that, if it's already taken,
this method will append things to the root to make it unique. (defaults to org)
metadata: if provided, will set the metadata of the root course object in the new draft course. If both
metadata and a starting version are provided, it will generate a successor version to the given version,
and update the metadata with any provided values (via update not setting).
course_data: if provided, will update the data of the new course xblock definition to this. Like metadata,
if provided, this will cause a new version of any given version as well as a new version of the
definition (which will point to the existing one if given a version). If not provided and given
a draft_version, it will reuse the same definition as the draft course (obvious since it's reusing the draft
course). If not provided and no draft is given, it will be empty and get the field defaults (hopefully) when
loaded.
master_version: the tag (key) for the version name in the dict which is the 'draft' version. Not the actual
version guid, but what to call it.
versions_dict: the starting version ids where the keys are the tags such as 'draft' and 'published'
and the values are structure guids. If provided, the new course will reuse this version (unless you also
provide any overrides such as metadata, see above). if not provided, will create a mostly empty course
structure with just a category course root xblock.
"""
if metadata is None:
metadata = {}
# build from inside out: definition, structure, index entry
# if building a wholly new structure
if versions_dict is None or master_version not in versions_dict:
# create new definition and structure
if course_data is None:
course_data = {}
definition_entry = {
'category': root_category,
'data': course_data,
'edited_by': user_id,
'edited_on': datetime.datetime.utcnow(),
'previous_version': None,
}
definition_id = self.definitions.insert(definition_entry)
definition_entry['original_version'] = definition_id
self.definitions.update({'_id': definition_id}, {'$set': {"original_version": definition_id}})
draft_structure = {
'root': 'course',
'previous_version': None,
'edited_by': user_id,
'edited_on': datetime.datetime.utcnow(),
'blocks': {
'course': {
'children':[],
'category': 'course',
'definition': definition_id,
'metadata': metadata,
'edited_on': datetime.datetime.utcnow(),
'edited_by': user_id,
'previous_version': None}}}
new_id = self.structures.insert(draft_structure)
draft_structure['original_version'] = new_id
self.structures.update({'_id': new_id},
{'$set': {"original_version": new_id,
'blocks.course.update_version': new_id}})
if versions_dict is None:
versions_dict = {master_version: new_id}
else:
versions_dict[master_version] = new_id
else:
# just get the draft_version structure
draft_version = CourseLocator(version_guid=versions_dict[master_version])
draft_structure = self._lookup_course(draft_version)
if course_data is not None or metadata:
draft_structure = self._version_structure(draft_structure, user_id)
root_block = draft_structure['blocks'][draft_structure['root']]
if metadata is not None:
root_block['metadata'].update(metadata)
if course_data is not None:
definition = self.definitions.find_one({'_id': root_block['definition']})
definition['data'].update(course_data)
definition['previous_version'] = definition['_id']
definition['edited_by'] = user_id
definition['edited_on'] = datetime.datetime.utcnow()
del definition['_id']
root_block['definition'] = self.definitions.insert(definition)
root_block['edited_on'] = datetime.datetime.utcnow()
root_block['edited_by'] = user_id
root_block['previous_version'] = root_block.get('update_version')
# insert updates the '_id' in draft_structure
new_id = self.structures.insert(draft_structure)
versions_dict[master_version] = new_id
self.structures.update({'_id': new_id},
{'$set': {'blocks.{}.update_version'.format(draft_structure['root']): new_id}})
# create the index entry
if id_root is None:
id_root = org
new_id = self._generate_course_id(id_root)
index_entry = {
'_id': new_id,
'org': org,
'prettyid': prettyid,
'edited_by': user_id,
'edited_on': datetime.datetime.utcnow(),
'versions': versions_dict}
new_id = self.course_index.insert(index_entry)
return self.get_course(CourseLocator(course_id=new_id, revision=master_version))
def update_item(self, descriptor, user_id, force=False):
"""
Save the descriptor's definition, metadata, & children references (i.e., it doesn't descend the tree).
Return the new descriptor (updated location).
raises ItemNotFoundError if the location does not exist.
Creates a new course version. If the descriptor's location has a course_id, it moves the course head
pointer. If the version_guid of the descriptor points to a non-head version and there's been an intervening
change to this item, it raises a VersionConflictError unless force is True. In the force case, it forks
the course but leaves the head pointer where it is (this change will not be in the course head).
The implementation tries to detect which, if any changes, actually need to be saved and thus won't version
the definition, structure, nor course if they didn't change.
"""
original_structure = self._lookup_course(descriptor.location)
index_entry = self._get_index_if_valid(descriptor.location, force)
descriptor.definition_locator, is_updated = self.update_definition_from_data(
descriptor.definition_locator, descriptor.xblock_kvs.get_data(), user_id)
# check children
original_entry = original_structure['blocks'][descriptor.location.usage_id]
if (not is_updated and descriptor.has_children
and not self._xblock_lists_equal(original_entry['children'], descriptor.children)):
is_updated = True
# check metadata
if not is_updated:
is_updated = self._compare_metadata(descriptor.xblock_kvs.get_own_metadata(), original_entry['metadata'])
# if updated, rev the structure
if is_updated:
new_structure = self._version_structure(original_structure, user_id)
block_data = new_structure['blocks'][descriptor.location.usage_id]
if descriptor.has_children:
block_data["children"] = [self._usage_id(child) for child in descriptor.children]
block_data["definition"] = descriptor.definition_locator.definition_id
block_data["metadata"] = descriptor.xblock_kvs.get_own_metadata()
block_data['edited_on'] = datetime.datetime.utcnow()
block_data['edited_by'] = user_id
block_data['previous_version'] = block_data['update_version']
new_id = self.structures.insert(new_structure)
self.structures.update({'_id': new_id},
{'$set': {'blocks.{}.update_version'.format(descriptor.location.usage_id): new_id}})
# update the index entry if appropriate
if index_entry is not None:
self._update_head(index_entry, descriptor.location.revision, new_id)
# fetch and return the new item--fetching is unnecessary but a good qc step
return self.get_item(BlockUsageLocator(descriptor.location, version_guid=new_id))
else:
# nothing changed, just return the one sent in
return descriptor
def persist_xblock_dag(self, xblock, user_id, force=False):
"""
create or update the xblock and all of its children. The xblock's location must specify a course.
If it doesn't specify a usage_id, then it's presumed to be new and need creation. This function
descends the children performing the same operation for any that are xblocks. Any children which
are usage_ids just update the children pointer.
All updates go into the same course version (bulk updater).
Updates the objects which came in w/ updated location and definition_location info.
returns the post-persisted version of the incoming xblock. Note that its children will be ids not
objects.
:param xblock:
:param user_id:
"""
# find course_index entry if applicable and structures entry
index_entry = self._get_index_if_valid(xblock.location, force)
structure = self._lookup_course(xblock.location)
new_structure = self._version_structure(structure, user_id)
changed_blocks = self._persist_subdag(xblock, user_id, new_structure['blocks'])
if changed_blocks:
new_id = self.structures.insert(new_structure)
update_command = {}
for usage_id in changed_blocks:
update_command['blocks.{}.update_version'.format(usage_id)] = new_id
self.structures.update({'_id': new_id}, {'$set': update_command})
# update the index entry if appropriate
if index_entry is not None:
self._update_head(index_entry, xblock.location.revision, new_id)
# fetch and return the new item--fetching is unnecessary but a good qc step
return self.get_item(BlockUsageLocator(xblock.location, version_guid=new_id))
else:
return xblock
def _persist_subdag(self, xblock, user_id, structure_blocks):
# persist the definition if persisted != passed
new_def_data = xblock.xblock_kvs.get_data()
if (xblock.definition_locator is None or xblock.definition_locator.definition_id is None):
xblock.definition_locator = self.create_definition_from_data(new_def_data,
xblock.category, user_id)
is_updated = True
elif new_def_data is not None:
xblock.definition_locator, is_updated = self.update_definition_from_data(xblock.definition_locator,
new_def_data, user_id)
if xblock.location.usage_id is None:
# generate an id
is_new = True
is_updated = True
usage_id = self._generate_usage_id(structure_blocks, xblock.category)
xblock.location.usage_id = usage_id
else:
is_new = False
usage_id = xblock.location.usage_id
if (not is_updated and xblock.has_children
and not self._xblock_lists_equal(structure_blocks[usage_id]['children'], xblock.children)):
is_updated = True
children = []
updated_blocks = []
if xblock.has_children:
for child in xblock.children:
if isinstance(child, XModuleDescriptor):
updated_blocks += self._persist_subdag(child, user_id, structure_blocks)
children.append(child.location.usage_id)
else:
children.append(child)
is_updated = is_updated or updated_blocks
metadata = xblock.xblock_kvs.get_own_metadata()
if not is_new and not is_updated:
is_updated = self._compare_metadata(metadata, structure_blocks[usage_id]['metadata'])
if is_updated:
structure_blocks[usage_id] = {
"children": children,
"category": xblock.category,
"definition": xblock.definition_locator.definition_id,
"metadata": metadata if metadata else {},
'previous_version': structure_blocks.get(usage_id, {}).get('update_version'),
'edited_by': user_id,
'edited_on': datetime.datetime.utcnow()
}
updated_blocks.append(usage_id)
return updated_blocks
def _compare_metadata(self, metadata, original_metadata):
original_keys = original_metadata.keys()
if len(metadata) != len(original_keys):
return True
else:
new_keys = metadata.keys()
for key in original_keys:
if key not in new_keys or original_metadata[key] != metadata[key]:
return True
# TODO change all callers to update_item
def update_children(self, course_id, location, children):
raise NotImplementedError()
# TODO change all callers to update_item
def update_metadata(self, course_id, location, metadata):
raise NotImplementedError()
def update_course_index(self, course_locator, new_values_dict, update_versions=False):
"""
Change the given course's index entry for the given fields. new_values_dict
should be a subset of the dict returned by get_course_index_info.
It cannot include '_id' (will raise IllegalArgument).
Provide update_versions=True if you intend this to replace the versions hash.
Note, this operation can be dangerous and break running courses.
If the dict includes versions and not update_versions, it will raise an exception.
If the dict includes edited_on or edited_by, it will raise an exception
Does not return anything useful.
"""
# TODO how should this log the change? edited_on and edited_by for this entry
# has the semantic of who created the course and when; so, changing those will lose
# that information.
if '_id' in new_values_dict:
raise ValueError("Cannot override _id")
if 'edited_on' in new_values_dict or 'edited_by' in new_values_dict:
raise ValueError("Cannot set edited_on or edited_by")
if not update_versions and 'versions' in new_values_dict:
raise ValueError("Cannot override versions without setting update_versions")
self.course_index.update({'_id': course_locator.course_id},
{'$set': new_values_dict})
def delete_item(self, usage_locator, user_id, force=False):
"""
Delete the tree rooted at block and any references w/in the course to the block
from a new version of the course structure.
returns CourseLocator for new version
raises ItemNotFoundError if the location does not exist.
raises ValueError if usage_locator points to the structure root
Creates a new course version. If the descriptor's location has a course_id, it moves the course head
pointer. If the version_guid of the descriptor points to a non-head version and there's been an intervening
change to this item, it raises a VersionConflictError unless force is True. In the force case, it forks
the course but leaves the head pointer where it is (this change will not be in the course head).
"""
assert isinstance(usage_locator, BlockUsageLocator) and usage_locator.is_initialized()
original_structure = self._lookup_course(usage_locator)
if original_structure['root'] == usage_locator.usage_id:
raise ValueError("Cannot delete the root of a course")
index_entry = self._get_index_if_valid(usage_locator, force)
new_structure = self._version_structure(original_structure, user_id)
new_blocks = new_structure['blocks']
parents = self.get_parent_locations(usage_locator)
update_version_keys = []
for parent in parents:
parent_block = new_blocks[parent.usage_id]
parent_block['children'].remove(usage_locator.usage_id)
parent_block['edited_on'] = datetime.datetime.utcnow()
parent_block['edited_by'] = user_id
parent_block['previous_version'] = parent_block['update_version']
update_version_keys.append('blocks.{}.update_version'.format(parent.usage_id))
# remove subtree
def remove_subtree(usage_id):
for child in new_blocks[usage_id]['children']:
remove_subtree(child)
del new_blocks[usage_id]
remove_subtree(usage_locator.usage_id)
# update index if appropriate and structures
new_id = self.structures.insert(new_structure)
if update_version_keys:
update_version_payload = {key: new_id for key in update_version_keys}
self.structures.update({'_id': new_id}, {'$set': update_version_payload})
result = CourseLocator(version_guid=new_id)
# update the index entry if appropriate
if index_entry is not None:
self._update_head(index_entry, usage_locator.revision, new_id)
result.course_id = usage_locator.course_id
result.revision = usage_locator.revision
return result
def delete_course(self, course_id):
"""
Remove the given course from the course index.
Only removes the course from the index. The data remains. You can use create_course
with a versions hash to restore the course; however, the edited_on and
edited_by won't reflect the originals, of course.
:param course_id: uses course_id rather than locator to emphasize its global effect
"""
index = self.course_index.find_one({'_id': course_id})
if index is None:
raise ItemNotFoundError(course_id)
# this is the only real delete in the system. should it do something else?
self.course_index.remove(index['_id'])
# TODO remove all callers and then this
def get_errored_courses(self):
"""
This function doesn't make sense for the mongo modulestore, as structures
are loaded on demand, rather than up front
"""
return {}
def inherit_metadata(self, block_map, block, inheriting_metadata=None):
"""
Updates block with any value
that exist in inheriting_metadata and don't appear in block['metadata'],
and then inherits block['metadata'] to all of the children in
block['children']. Filters by inheritance.INHERITABLE_METADATA
"""
if block is None:
return
if inheriting_metadata is None:
inheriting_metadata = {}
# the currently passed down values take precedence over any previously cached ones
# NOTE: this should show the values which all fields would have if inherited: i.e.,
# not set to the locally defined value but to value set by nearest ancestor who sets it
block.setdefault('_inherited_metadata', {}).update(inheriting_metadata)
# update the inheriting w/ what should pass to children
inheriting_metadata = block['_inherited_metadata'].copy()
for field in inheritance.INHERITABLE_METADATA:
if field in block['metadata']:
inheriting_metadata[field] = block['metadata'][field]
for child in block.get('children', []):
self.inherit_metadata(block_map, block_map[child], inheriting_metadata)
def descendants(self, block_map, usage_id, depth, descendent_map):
"""
adds block and its descendants out to depth to descendent_map
Depth specifies the number of levels of descendants to return
(0 => this usage only, 1 => this usage and its children, etc...)
A depth of None returns all descendants
"""
if usage_id not in block_map:
return descendent_map
if usage_id not in descendent_map:
descendent_map[usage_id] = block_map[usage_id]
if depth is None or depth > 0:
depth = depth - 1 if depth is not None else None
for child in block_map[usage_id].get('children', []):
descendent_map = self.descendants(block_map, child, depth,
descendent_map)
return descendent_map
def definition_locator(self, definition):
'''
Pull the id out of the definition w/ correct semantics for its
representation
'''
if isinstance(definition, DefinitionLazyLoader):
return definition.definition_locator
elif '_id' not in definition:
return None
else:
return DescriptionLocator(definition['_id'])
def _block_matches(self, value, qualifiers):
'''
Return True or False depending on whether the value (block contents)
matches the qualifiers as per get_items
:param value:
:param qualifiers:
'''
for key, criteria in qualifiers.iteritems():
if key in value:
target = value[key]
if not self._value_matches(target, criteria):
return False
elif criteria is not None:
return False
return True
def _value_matches(self, target, criteria):
''' helper for _block_matches '''
if isinstance(target, list):
return any(self._value_matches(ele, criteria)
for ele in target)
elif isinstance(criteria, dict):
if '$regex' in criteria:
return re.search(criteria['$regex'], target) is not None
elif not isinstance(target, dict):
return False
else:
return (isinstance(target, dict) and
self._block_matches(target, criteria))
else:
return criteria == target
def _xblock_lists_equal(self, lista, listb):
"""
Do the 2 lists refer to the same xblocks in the same order (presumes they're from the
same course)
:param lista:
:param listb:
"""
if len(lista) != len(listb):
return False
for idx in enumerate(lista):
if lista[idx] != listb[idx]:
itema = self._usage_id(lista[idx])
if itema != self._usage_id(listb[idx]):
return False
return True
def _usage_id(self, xblock_or_id):
"""
arg is either an xblock or an id. If an xblock, get the usage_id from its location. Otherwise, return itself.
:param xblock_or_id:
"""
if isinstance(xblock_or_id, XModuleDescriptor):
return xblock_or_id.location.usage_id
else:
return xblock_or_id
def _get_index_if_valid(self, locator, force=False):
"""
If the locator identifies a course and points to its draft (or plausibly its draft),
then return the index entry.
raises VersionConflictError if not the right version
:param locator:
"""
if locator.course_id is None or locator.revision is None:
return None
else:
index_entry = self.course_index.find_one({'_id': locator.course_id})
if (locator.version_guid is not None
and index_entry['versions'][locator.revision] != locator.version_guid
and not force):
raise VersionConflictError(
locator,
CourseLocator(
course_id=index_entry['_id'],
version_guid=index_entry['versions'][locator.revision],
revision=locator.revision))
else:
return index_entry
def _version_structure(self, structure, user_id):
"""
Copy the structure and update the history info (edited_by, edited_on, previous_version)
:param structure:
:param user_id:
"""
new_structure = structure.copy()
new_structure['blocks'] = new_structure['blocks'].copy()
del new_structure['_id']
new_structure['previous_version'] = structure['_id']
new_structure['edited_by'] = user_id
new_structure['edited_on'] = datetime.datetime.utcnow()
return new_structure
def _find_local_root(self, element_to_find, possibility, tree):
if possibility not in tree:
return False
if element_to_find in tree[possibility]:
return True
for subtree in tree[possibility]:
if self._find_local_root(element_to_find, subtree, tree):
return True
return False
def _update_head(self, index_entry, revision, new_id):
"""
Update the active index for the given course's revision to point to new_id
:param index_entry:
:param course_locator:
:param new_id:
"""
self.course_index.update(
{"_id": index_entry["_id"]},
{"$set": {"versions.{}".format(revision): new_id}})
| rationalAgent/edx-platform-custom | common/lib/xmodule/xmodule/modulestore/split_mongo/split.py | Python | agpl-3.0 | 58,181 |
import collections
import json
import pkg_resources
__version__ = "1.1.3"
def versionList(versionString):
"""
Transform a version from string to integer list in order to make possible versions comparison.
:param versionString: a string containing the version in the format '9.9.9.xxx'
:return: a integer list containing the version in the format ['9', '9', '9']. Alphanumerics are ignored.
"""
versionInt = []
versionSplit = versionString.split(".")
for v in versionSplit:
if v.isdigit():
versionInt.append(v)
else:
break
return versionInt
try:
import nupic
except ImportError:
raise ImportError("NuPIC library not found! Access https://github.com/numenta/nupic/ for get help on how install it.")
foundNupic = pkg_resources.get_distribution("nupic")
versionRequiredMin = "0.2.2"
versionRequiredMax = "99.99.99"
if not (versionList(versionRequiredMin) <= versionList(foundNupic.version) <= versionList(versionRequiredMax)):
raise Exception("Unexpected version of NuPIC Library! Expected between %s and %s, but detected %s in %s." % (versionRequiredMin, versionRequiredMax, foundNupic.version, foundNupic.location))
try:
from PyQt4 import Qt, QtCore, QtGui, QtOpenGL
except ImportError:
msg = "PyQt4 library not found! Access http://pyqt.sourceforge.net/Docs/PyQt4/installation.html for get help on how install it" \
"...or use a package manager like apt, yum, or brew:\n" \
" apt-get install python-qt4 python-qt4-gl\n" \
" yum install PyQt4\n" \
" brew install pyqt"
raise ImportError(msg)
class MachineState(object):
"""
This class consists of a queue with max length to store states for each time step.
"""
def __init__(self, defaultValue, maxLen):
self.defaultValue = defaultValue
self.maxLen = maxLen
self.list = [defaultValue] * maxLen
def getList(self):
"""
Get list with stored states machine.
"""
return self.list
def rotate(self):
"""
Update states machine by remove the first element and add a new element in the end.
"""
self.list.remove(self.list[0])
self.list.append(self.defaultValue)
def atGivenStepAgo(self, timeStep):
"""
Get the state for a given time step.
"""
return self.list[len(self.list) - timeStep - 1]
def setForCurrStep(self, value):
"""
Set the state for the current time step.
"""
self.list[len(self.list) - 1] = value
def atCurrStep(self):
"""
Get the state of the current time step.
"""
return self.list[len(self.list) - 1]
def atPreviousStep(self):
"""
Get the state of the previous time step.
"""
return self.list[len(self.list) - 2]
def atFirstStep(self):
"""
Get the state of the firt time step.
"""
return self.list[0]
def getInstantiatedClass(moduleName, className, classParams):
"""
Return an instantiated class given a module, class, and constructor params
"""
# Remove single quote from parameter values
# foo: 'bar' => foo: bar
classParams = classParams.replace(": '", ": ")
classParams = classParams.replace("', ", ", ")
classParams = classParams.replace("'}", "}")
classParams = classParams.replace("'", "\"")
module = __import__(moduleName, fromlist=[className])
class_ = getattr(module, className)
params = json.loads(classParams, object_pairs_hook=collections.OrderedDict)
instance = class_(**params)
return instance
class ArrayTableModel(QtGui.QStandardItemModel):
def __init__(self, flags):
QtGui.QStandardItemModel.__init__(self)
self.flags = flags
self.header = []
self.data = []
def update(self, header, data):
self.header = header
self.data = data
numCols = len(self.header)
self.setColumnCount(numCols)
numRows = len(self.data)
self.setRowCount(numRows)
for col in range(numCols):
self.setHeaderData(col, QtCore.Qt.Horizontal, self.header[col])
for row in range(numRows):
for col in range(numCols):
value = self.data[row][col]
self.setData(self.index(row, col, QtCore.QModelIndex()), value)
def setData(self, index, value, role=None):
self.data[index.row()][index.column()] = value
self.dataChanged.emit(index, index)
return True
def data(self, index, role=None):
column, row = index.column(), index.row()
if role == QtCore.Qt.TextAlignmentRole:
return QtCore.Qt.AlignRight
elif role == QtCore.Qt.DisplayRole:
return self.data[row][column]
return
def columnCount(self, parent=None, **kwargs):
return len(self.header)
def rowCount(self, parent=None, **kwargs):
return len(self.data)
def flags(self, index):
return self.flags
| neuroidss/nupic.studio | nupic_studio/__init__.py | Python | gpl-2.0 | 4,707 |
DEBUG = True
SERVICE_NAME = 'tenykssame'
SERVICE_VERSION = '0.1.0'
SERVICE_UUID = '2b7b68b1-722d-4271-a82f-1ce0cf791d21'
SERVICE_DESCRIPTION = 'same'
ZMQ_CONNECTION = {
'out': 'tcp://localhost:61124',
'in': 'tcp://localhost:61123'
}
| kyleterry/tenyks-contrib | src/tenykssame/settings.py | Python | mit | 241 |
#!/usr/bin/python2.7
# Compresses the core Blockly files into a single JavaScript file.
#
# Copyright 2012 Google Inc.
# https://developers.google.com/blockly/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script generates two versions of Blockly's core files:
# blockly_compressed.js
# blockly_uncompressed.js
# The compressed file is a concatenation of all of Blockly's core files which
# have been run through Google's Closure Compiler. This is done using the
# online API (which takes a few seconds and requires an Internet connection).
# The uncompressed file is a script that loads in each of Blockly's core files
# one by one. This takes much longer for a browser to load, but is useful
# when debugging code since line numbers are meaningful and variables haven't
# been renamed. The uncompressed file also allows for a faster developement
# cycle since there is no need to rebuild or recompile, just reload.
#
# This script also generates:
# blocks_compressed.js: The compressed common blocks.
# blocks_horizontal_compressed.js: The compressed Scratch horizontal blocks.
# blocks_vertical_compressed.js: The compressed Scratch vertical blocks.
# msg/js/<LANG>.js for every language <LANG> defined in msg/js/<LANG>.json.
import sys
if sys.version_info[0] != 2:
raise Exception("Blockly build only compatible with Python 2.x.\n"
"You are using: " + sys.version)
import errno, glob, httplib, json, os, re, subprocess, threading, urllib
def import_path(fullpath):
"""Import a file with full path specification.
Allows one to import from any directory, something __import__ does not do.
Args:
fullpath: Path and filename of import.
Returns:
An imported module.
"""
path, filename = os.path.split(fullpath)
filename, ext = os.path.splitext(filename)
sys.path.append(path)
module = __import__(filename)
reload(module) # Might be out of date.
del sys.path[-1]
return module
HEADER = ("// Do not edit this file; automatically generated by build.py.\n"
"'use strict';\n")
class Gen_uncompressed(threading.Thread):
"""Generate a JavaScript file that loads Blockly's raw files.
Runs in a separate thread.
"""
def __init__(self, search_paths, vertical):
threading.Thread.__init__(self)
self.search_paths = search_paths
self.vertical = vertical
def run(self):
if self.vertical:
target_filename = 'blockly_uncompressed_vertical.js'
else:
target_filename = 'blockly_uncompressed_horizontal.js'
f = open(target_filename, 'w')
f.write(HEADER)
f.write("""
var isNodeJS = !!(typeof module !== 'undefined' && module.exports &&
typeof window === 'undefined');
if (isNodeJS) {
var window = {};
require('../closure-library/closure/goog/bootstrap/nodejs');
}
window.BLOCKLY_DIR = (function() {
if (!isNodeJS) {
// Find name of current directory.
var scripts = document.getElementsByTagName('script');
var re = new RegExp('(.+)[\/]blockly_uncompressed(_vertical|_horizontal|)\.js$');
for (var i = 0, script; script = scripts[i]; i++) {
var match = re.exec(script.src);
if (match) {
return match[1];
}
}
alert('Could not detect Blockly\\'s directory name.');
}
return '';
})();
window.BLOCKLY_BOOT = function() {
var dir = '';
if (isNodeJS) {
require('../closure-library/closure/goog/bootstrap/nodejs');
dir = 'blockly';
} else {
// Execute after Closure has loaded.
if (!window.goog) {
alert('Error: Closure not found. Read this:\\n' +
'developers.google.com/blockly/guides/modify/web/closure');
}
dir = window.BLOCKLY_DIR.match(/[^\\/]+$/)[0];
}
""")
add_dependency = []
base_path = calcdeps.FindClosureBasePath(self.search_paths)
for dep in calcdeps.BuildDependenciesFromFiles(self.search_paths):
add_dependency.append(calcdeps.GetDepsLine(dep, base_path))
add_dependency = '\n'.join(add_dependency)
# Find the Blockly directory name and replace it with a JS variable.
# This allows blockly_uncompressed.js to be compiled on one computer and be
# used on another, even if the directory name differs.
m = re.search('[\\/]([^\\/]+)[\\/]core[\\/]blockly.js', add_dependency)
add_dependency = re.sub('([\\/])' + re.escape(m.group(1)) +
'([\\/]core[\\/])', '\\1" + dir + "\\2', add_dependency)
f.write(add_dependency + '\n')
provides = []
for dep in calcdeps.BuildDependenciesFromFiles(self.search_paths):
if not dep.filename.startswith(os.pardir + os.sep): # '../'
provides.extend(dep.provides)
provides.sort()
f.write('\n')
f.write('// Load Blockly.\n')
for provide in provides:
f.write("goog.require('%s');\n" % provide)
f.write("""
delete this.BLOCKLY_DIR;
delete this.BLOCKLY_BOOT;
};
if (isNodeJS) {
window.BLOCKLY_BOOT()
module.exports = Blockly;
} else {
// Delete any existing Closure (e.g. Soy's nogoog_shim).
document.write('<script>var goog = undefined;</script>');
// Load fresh Closure Library.
document.write('<script src="' + window.BLOCKLY_DIR +
'/../closure-library/closure/goog/base.js"></script>');
document.write('<script>window.BLOCKLY_BOOT();</script>');
}
""")
f.close()
print("SUCCESS: " + target_filename)
class Gen_compressed(threading.Thread):
"""Generate a JavaScript file that contains all of Blockly's core and all
required parts of Closure, compiled together.
Uses the Closure Compiler's online API.
Runs in a separate thread.
"""
def __init__(self, search_paths_vertical, search_paths_horizontal):
threading.Thread.__init__(self)
self.search_paths_vertical = search_paths_vertical
self.search_paths_horizontal = search_paths_horizontal
def run(self):
self.gen_core(True)
self.gen_core(False)
self.gen_blocks("horizontal")
self.gen_blocks("vertical")
self.gen_blocks("common")
self.gen_generator("arduino")
def gen_core(self, vertical):
if vertical:
target_filename = 'blockly_compressed_vertical.js'
search_paths = self.search_paths_vertical
else:
target_filename = 'blockly_compressed_horizontal.js'
search_paths = self.search_paths_horizontal
# Define the parameters for the POST request.
params = [
("compilation_level", "SIMPLE_OPTIMIZATIONS"),
("use_closure_library", "true"),
("output_format", "json"),
("output_info", "compiled_code"),
("output_info", "warnings"),
("output_info", "errors"),
("output_info", "statistics"),
]
# Read in all the source files.
filenames = calcdeps.CalculateDependencies(search_paths,
[os.path.join("core", "blockly.js")])
for filename in filenames:
# Filter out the Closure files (the compiler will add them).
if filename.startswith(os.pardir + os.sep): # '../'
continue
f = open(filename)
params.append(("js_code", "".join(f.readlines())))
f.close()
self.do_compile(params, target_filename, filenames, "")
def gen_blocks(self, block_type):
if block_type == "horizontal":
target_filename = "blocks_compressed_horizontal.js"
filenames = glob.glob(os.path.join("blocks_horizontal", "*.js"))
elif block_type == "vertical":
target_filename = "blocks_compressed_vertical.js"
filenames = glob.glob(os.path.join("blocks_vertical", "*.js"))
elif block_type == "common":
target_filename = "blocks_compressed.js"
filenames = glob.glob(os.path.join("blocks_common", "*.js"))
# Define the parameters for the POST request.
params = [
("compilation_level", "SIMPLE_OPTIMIZATIONS"),
("output_format", "json"),
("output_info", "compiled_code"),
("output_info", "warnings"),
("output_info", "errors"),
("output_info", "statistics"),
]
# Read in all the source files.
# Add Blockly.Blocks to be compatible with the compiler.
params.append(("js_code", "goog.provide('Blockly.Blocks');"))
# Add Blockly.Colours for use of centralized colour bank
filenames.append(os.path.join("core", "colours.js"))
filenames.append(os.path.join("core", "constants.js"))
for filename in filenames:
f = open(filename)
params.append(("js_code", "".join(f.readlines())))
f.close()
# Remove Blockly.Blocks to be compatible with Blockly.
remove = "var Blockly={Blocks:{}};"
self.do_compile(params, target_filename, filenames, remove)
def gen_generator(self, language):
target_filename = language + "_compressed.js"
# Define the parameters for the POST request.
params = [
("compilation_level", "SIMPLE_OPTIMIZATIONS"),
("output_format", "json"),
("output_info", "compiled_code"),
("output_info", "warnings"),
("output_info", "errors"),
("output_info", "statistics"),
]
# Read in all the source files.
# Add Blockly.Generator to be compatible with the compiler.
params.append(("js_code", "goog.provide('Blockly.Generator');"))
filenames = glob.glob(
os.path.join("generators", language, "*.js"))
filenames.insert(0, os.path.join("generators", language + ".js"))
for filename in filenames:
f = open(filename)
params.append(("js_code", "".join(f.readlines())))
f.close()
filenames.insert(0, "[goog.provide]")
# Remove Blockly.Generator to be compatible with Blockly.
remove = "var Blockly={Generator:{}};"
self.do_compile(params, target_filename, filenames, remove)
def do_compile(self, params, target_filename, filenames, remove):
# Send the request to Google.
headers = {"Content-type": "application/x-www-form-urlencoded"}
conn = httplib.HTTPConnection("closure-compiler.appspot.com")
conn.request("POST", "/compile", urllib.urlencode(params), headers)
response = conn.getresponse()
json_str = response.read()
conn.close()
# Parse the JSON response.
json_data = json.loads(json_str)
def file_lookup(name):
if not name.startswith("Input_"):
return "???"
n = int(name[6:]) - 1
return filenames[n]
if json_data.has_key("serverErrors"):
errors = json_data["serverErrors"]
for error in errors:
print("SERVER ERROR: %s" % target_filename)
print(error["error"])
elif json_data.has_key("errors"):
errors = json_data["errors"]
for error in errors:
print("FATAL ERROR")
print(error["error"])
if error["file"]:
print("%s at line %d:" % (
file_lookup(error["file"]), error["lineno"]))
print(error["line"])
print((" " * error["charno"]) + "^")
sys.exit(1)
else:
if json_data.has_key("warnings"):
warnings = json_data["warnings"]
for warning in warnings:
print("WARNING")
print(warning["warning"])
if warning["file"]:
print("%s at line %d:" % (
file_lookup(warning["file"]), warning["lineno"]))
print(warning["line"])
print((" " * warning["charno"]) + "^")
print()
if not json_data.has_key("compiledCode"):
print("FATAL ERROR: Compiler did not return compiledCode.")
sys.exit(1)
code = HEADER + "\n" + json_data["compiledCode"]
code = code.replace(remove, "")
# Trim down Google's Apache licences.
# The Closure Compiler used to preserve these until August 2015.
# Delete this in a few months if the licences don't return.
LICENSE = re.compile("""/\\*
[\w ]+
(Copyright \\d+ Google Inc.)
https://developers.google.com/blockly/
Licensed under the Apache License, Version 2.0 \(the "License"\);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
\\*/""")
code = re.sub(LICENSE, r"\n// \1 Apache License 2.0", code)
stats = json_data["statistics"]
original_b = stats["originalSize"]
compressed_b = stats["compressedSize"]
if original_b > 0 and compressed_b > 0:
f = open(target_filename, "w")
f.write(code)
f.close()
original_kb = int(original_b / 1024 + 0.5)
compressed_kb = int(compressed_b / 1024 + 0.5)
ratio = int(float(compressed_b) / float(original_b) * 100 + 0.5)
print("SUCCESS: " + target_filename)
print("Size changed from %d KB to %d KB (%d%%)." % (
original_kb, compressed_kb, ratio))
else:
print("UNKNOWN ERROR")
class Gen_langfiles(threading.Thread):
"""Generate JavaScript file for each natural language supported.
Runs in a separate thread.
"""
def __init__(self):
threading.Thread.__init__(self)
def _rebuild(self, srcs, dests):
# Determine whether any of the files in srcs is newer than any in dests.
try:
return (max(os.path.getmtime(src) for src in srcs) >
min(os.path.getmtime(dest) for dest in dests))
except OSError as e:
# Was a file not found?
if e.errno == errno.ENOENT:
# If it was a source file, we can't proceed.
if e.filename in srcs:
print("Source file missing: " + e.filename)
sys.exit(1)
else:
# If a destination file was missing, rebuild.
return True
else:
print("Error checking file creation times: " + e)
def run(self):
# The files msg/json/{en,qqq,synonyms}.json depend on msg/messages.js.
if self._rebuild([os.path.join("msg", "messages.js")],
[os.path.join("msg", "json", f) for f in
["en.json", "qqq.json", "synonyms.json"]]):
try:
subprocess.check_call([
"python",
os.path.join("i18n", "js_to_json.py"),
"--input_file", "msg/messages.js",
"--output_dir", "msg/json/",
"--quiet"])
except (subprocess.CalledProcessError, OSError) as e:
# Documentation for subprocess.check_call says that CalledProcessError
# will be raised on failure, but I found that OSError is also possible.
print("Error running i18n/js_to_json.py: ", e)
sys.exit(1)
# Checking whether it is necessary to rebuild the js files would be a lot of
# work since we would have to compare each <lang>.json file with each
# <lang>.js file. Rebuilding is easy and cheap, so just go ahead and do it.
try:
# Use create_messages.py to create .js files from .json files.
cmd = [
"python",
os.path.join("i18n", "create_messages.py"),
"--source_lang_file", os.path.join("msg", "json", "en.json"),
"--source_synonym_file", os.path.join("msg", "json", "synonyms.json"),
"--key_file", os.path.join("msg", "json", "keys.json"),
"--output_dir", os.path.join("msg", "js"),
"--quiet"]
json_files = glob.glob(os.path.join("msg", "json", "*.json"))
json_files = [file for file in json_files if not
(file.endswith(("keys.json", "synonyms.json", "qqq.json")))]
cmd.extend(json_files)
subprocess.check_call(cmd)
except (subprocess.CalledProcessError, OSError) as e:
print("Error running i18n/create_messages.py: ", e)
sys.exit(1)
# Output list of .js files created.
for f in json_files:
# This assumes the path to the current directory does not contain "json".
f = f.replace("json", "js")
if os.path.isfile(f):
print("SUCCESS: " + f)
else:
print("FAILED to create " + f)
def exclude_vertical(item):
return not item.endswith("block_render_svg_vertical.js")
def exclude_horizontal(item):
return not item.endswith("block_render_svg_horizontal.js")
if __name__ == "__main__":
try:
calcdeps = import_path(os.path.join(
os.path.pardir, "closure-library", "closure", "bin", "calcdeps.py"))
except ImportError:
if os.path.isdir(os.path.join(os.path.pardir, "closure-library-read-only")):
# Dir got renamed when Closure moved from Google Code to GitHub in 2014.
print("Error: Closure directory needs to be renamed from"
"'closure-library-read-only' to 'closure-library'.\n"
"Please rename this directory.")
elif os.path.isdir(os.path.join(os.path.pardir, "google-closure-library")):
# When Closure is installed by npm, it is named "google-closure-library".
#calcdeps = import_path(os.path.join(
# os.path.pardir, "google-closure-library", "closure", "bin", "calcdeps.py"))
print("Error: Closure directory needs to be renamed from"
"'google-closure-library' to 'closure-library'.\n"
"Please rename this directory.")
else:
print("""Error: Closure not found. Read this:
developers.google.com/blockly/guides/modify/web/closure""")
sys.exit(1)
search_paths = calcdeps.ExpandDirectories(
["core", os.path.join(os.path.pardir, "closure-library")])
search_paths_horizontal = filter(exclude_vertical, search_paths)
search_paths_vertical = filter(exclude_horizontal, search_paths)
# Run all tasks in parallel threads.
# Uncompressed is limited by processor speed.
# Compressed is limited by network and server speed.
# Vertical:
Gen_uncompressed(search_paths_vertical, True).start()
# Horizontal:
Gen_uncompressed(search_paths_horizontal, False).start()
# Compressed forms of vertical and horizontal.
Gen_compressed(search_paths_vertical, search_paths_horizontal).start()
# This is run locally in a separate thread.
Gen_langfiles().start()
| kesl-scratch/PopconBot | scratch-blocks/build.py | Python | mit | 18,550 |
# BSD Licence
# Copyright (c) 2011, Science & Technology Facilities Council (STFC)
# All rights reserved.
#
# See the LICENSE file in the source distribution of this software for
# the full license text.
from drslib.cmip5 import CMIP5FileSystem
from drslib import p_cmip5
from drslib import drs_tree
import gen_drs
import os, shutil
import metaconfig
import tempfile
LISTING = 'multi_product.ls'
config = metaconfig.get_config('drslib')
shelve_dir = config.get('p_cmip5', 'shelve-dir')
def setup_module():
global p_cmip5, listing, tmpdir, dt
tmpdir = tempfile.mkdtemp(prefix='drs_tree-product-')
print 'TMPDIR ',tmpdir
shelves = p_cmip5.init._find_shelves(shelve_dir)
config_file = os.path.join(os.path.dirname(__file__), 'ukmo_sample.ini')
listing = os.path.join(os.path.dirname(__file__), LISTING)
gen_drs.write_listing(tmpdir, listing)
p_cmip5 = p_cmip5.product.cmip5_product(mip_table_shelve=shelves['stdo_mip'],
template=shelves['template'],
stdo=shelves['stdo'],
config=config_file,
not_ok_excpt=True)
drs_fs = CMIP5FileSystem(tmpdir)
dt = drs_tree.DRSTree(drs_fs)
dt.set_p_cmip5(p_cmip5)
def test_product_dup():
"""
Test scanning a set of files that are put into multiple products.
"""
filenames = [f.strip() for f in open(listing).readlines()]
def iter_files():
for filename in filenames:
yield filename, tmpdir
dt.discover_incoming_fromfiles(iter_files())
# There should be 2 pub-level datasets
assert len(dt.pub_trees) == 2
pt1, pt2 = dt.pub_trees.values()
# They should be in separate products
assert pt1.drs.product != pt2.drs.product
#!NOTE: if the test fails before here it isn't really testing multi-product ingests
# probably the p_cmip5 algorithm has changed to put the test data into 1 product.
# They should be disjoint
set1 = set(x[0] for x in pt1._todo)
set2 = set(x[0] for x in pt2._todo)
assert set1.isdisjoint(set2)
# Check the total number of files is right
assert len(set1) + len(set2) == len(filenames)
def test_product_fixed():
filenames = ['areacella_fx_IPSL-CM5A-LR_piControl_r0i0p0.nc']
def iter_files():
for filename in filenames:
yield filename, tmpdir
dt.discover_incoming_fromfiles(iter_files())
def teardown_module():
shutil.rmtree(tmpdir)
| ESGF/esgf-drslib | test/test_drs_tree_product.py | Python | bsd-3-clause | 2,575 |
#!/usr/bin/python
#
# Copyright 2008-2010 WebDriver committers
# Copyright 2008-2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from selenium import webdriver
from selenium.test.selenium.webdriver.common import api_examples
from selenium.test.selenium.webdriver.common.webserver import SimpleWebServer
def setup_module(module):
webserver = SimpleWebServer()
webserver.start()
FirefoxApiExampleTest.webserver = webserver
FirefoxApiExampleTest.driver = webdriver.Firefox()
class FirefoxApiExampleTest(api_examples.ApiExampleTest):
pass
def teardown_module(module):
FirefoxApiExampleTest.driver.quit()
FirefoxApiExampleTest.webserver.stop()
| gx1997/chrome-loongson | third_party/webdriver/python/test/selenium/webdriver/firefox/test_ff_api.py | Python | bsd-3-clause | 1,189 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Binary for training translation models and decoding from them.
Running this program without --decode will download the WMT corpus into
the directory specified as --data_dir and tokenize it in a very basic way,
and then start training a model saving checkpoints to --train_dir.
Running with --decode starts an interactive loop so you can see how
the current checkpoint translates English sentences into French.
See the following papers for more information on neural translation models.
* http://arxiv.org/abs/1409.3215
* http://arxiv.org/abs/1409.0473
* http://arxiv.org/pdf/1412.2007v2.pdf
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
import random
import sys
import time
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.models.rnn.translate import data_utils
from tensorflow.models.rnn.translate import seq2seq_model
from tensorflow.python.platform import gfile
tf.app.flags.DEFINE_float("learning_rate", 0.5, "Learning rate.")
tf.app.flags.DEFINE_float("learning_rate_decay_factor", 0.99,
"Learning rate decays by this much.")
tf.app.flags.DEFINE_float("max_gradient_norm", 5.0,
"Clip gradients to this norm.")
tf.app.flags.DEFINE_integer("batch_size", 64,
"Batch size to use during training.")
tf.app.flags.DEFINE_integer("size", 1024, "Size of each model layer.")
tf.app.flags.DEFINE_integer("num_layers", 3, "Number of layers in the model.")
tf.app.flags.DEFINE_integer("en_vocab_size", 40000, "English vocabulary size.")
tf.app.flags.DEFINE_integer("fr_vocab_size", 40000, "French vocabulary size.")
tf.app.flags.DEFINE_string("data_dir", "/tmp", "Data directory")
tf.app.flags.DEFINE_string("train_dir", "/tmp", "Training directory.")
tf.app.flags.DEFINE_integer("max_train_data_size", 0,
"Limit on the size of training data (0: no limit).")
tf.app.flags.DEFINE_integer("steps_per_checkpoint", 200,
"How many training steps to do per checkpoint.")
tf.app.flags.DEFINE_boolean("decode", False,
"Set to True for interactive decoding.")
tf.app.flags.DEFINE_boolean("self_test", False,
"Run a self-test if this is set to True.")
FLAGS = tf.app.flags.FLAGS
# We use a number of buckets and pad to the closest one for efficiency.
# See seq2seq_model.Seq2SeqModel for details of how they work.
_buckets = [(5, 10), (10, 15), (20, 25), (40, 50)]
def read_data(source_path, target_path, max_size=None):
"""Read data from source and target files and put into buckets.
Args:
source_path: path to the files with token-ids for the source language.
target_path: path to the file with token-ids for the target language;
it must be aligned with the source file: n-th line contains the desired
output for n-th line from the source_path.
max_size: maximum number of lines to read, all other will be ignored;
if 0 or None, data files will be read completely (no limit).
Returns:
data_set: a list of length len(_buckets); data_set[n] contains a list of
(source, target) pairs read from the provided data files that fit
into the n-th bucket, i.e., such that len(source) < _buckets[n][0] and
len(target) < _buckets[n][1]; source and target are lists of token-ids.
"""
data_set = [[] for _ in _buckets]
with gfile.GFile(source_path, mode="r") as source_file:
with gfile.GFile(target_path, mode="r") as target_file:
source, target = source_file.readline(), target_file.readline()
counter = 0
while source and target and (not max_size or counter < max_size):
counter += 1
if counter % 100000 == 0:
print(" reading data line %d" % counter)
sys.stdout.flush()
source_ids = [int(x) for x in source.split()]
target_ids = [int(x) for x in target.split()]
target_ids.append(data_utils.EOS_ID)
for bucket_id, (source_size, target_size) in enumerate(_buckets):
if len(source_ids) < source_size and len(target_ids) < target_size:
data_set[bucket_id].append([source_ids, target_ids])
break
source, target = source_file.readline(), target_file.readline()
return data_set
def create_model(session, forward_only):
"""Create translation model and initialize or load parameters in session."""
model = seq2seq_model.Seq2SeqModel(
FLAGS.en_vocab_size, FLAGS.fr_vocab_size, _buckets,
FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
FLAGS.learning_rate, FLAGS.learning_rate_decay_factor,
forward_only=forward_only)
ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
if ckpt and gfile.Exists(ckpt.model_checkpoint_path):
print("Reading model parameters from %s" % ckpt.model_checkpoint_path)
model.saver.restore(session, ckpt.model_checkpoint_path)
else:
print("Created model with fresh parameters.")
session.run(tf.initialize_all_variables())
return model
def train():
"""Train a en->fr translation model using WMT data."""
# Prepare WMT data.
print("Preparing WMT data in %s" % FLAGS.data_dir)
en_train, fr_train, en_dev, fr_dev, _, _ = data_utils.prepare_wmt_data(
FLAGS.data_dir, FLAGS.en_vocab_size, FLAGS.fr_vocab_size)
with tf.Session() as sess:
# Create model.
print("Creating %d layers of %d units." % (FLAGS.num_layers, FLAGS.size))
model = create_model(sess, False)
# Read data into buckets and compute their sizes.
print ("Reading development and training data (limit: %d)."
% FLAGS.max_train_data_size)
dev_set = read_data(en_dev, fr_dev)
train_set = read_data(en_train, fr_train, FLAGS.max_train_data_size)
train_bucket_sizes = [len(train_set[b]) for b in xrange(len(_buckets))]
train_total_size = float(sum(train_bucket_sizes))
# A bucket scale is a list of increasing numbers from 0 to 1 that we'll use
# to select a bucket. Length of [scale[i], scale[i+1]] is proportional to
# the size if i-th training bucket, as used later.
train_buckets_scale = [sum(train_bucket_sizes[:i + 1]) / train_total_size
for i in xrange(len(train_bucket_sizes))]
# This is the training loop.
step_time, loss = 0.0, 0.0
current_step = 0
previous_losses = []
while True:
# Choose a bucket according to data distribution. We pick a random number
# in [0, 1] and use the corresponding interval in train_buckets_scale.
random_number_01 = np.random.random_sample()
bucket_id = min([i for i in xrange(len(train_buckets_scale))
if train_buckets_scale[i] > random_number_01])
# Get a batch and make a step.
start_time = time.time()
encoder_inputs, decoder_inputs, target_weights = model.get_batch(
train_set, bucket_id)
_, step_loss, _ = model.step(sess, encoder_inputs, decoder_inputs,
target_weights, bucket_id, False)
step_time += (time.time() - start_time) / FLAGS.steps_per_checkpoint
loss += step_loss / FLAGS.steps_per_checkpoint
current_step += 1
# Once in a while, we save checkpoint, print statistics, and run evals.
if current_step % FLAGS.steps_per_checkpoint == 0:
# Print statistics for the previous epoch.
perplexity = math.exp(loss) if loss < 300 else float('inf')
print ("global step %d learning rate %.4f step-time %.2f perplexity "
"%.2f" % (model.global_step.eval(), model.learning_rate.eval(),
step_time, perplexity))
# Decrease learning rate if no improvement was seen over last 3 times.
if len(previous_losses) > 2 and loss > max(previous_losses[-3:]):
sess.run(model.learning_rate_decay_op)
previous_losses.append(loss)
# Save checkpoint and zero timer and loss.
checkpoint_path = os.path.join(FLAGS.train_dir, "translate.ckpt")
model.saver.save(sess, checkpoint_path, global_step=model.global_step)
step_time, loss = 0.0, 0.0
# Run evals on development set and print their perplexity.
for bucket_id in xrange(len(_buckets)):
encoder_inputs, decoder_inputs, target_weights = model.get_batch(
dev_set, bucket_id)
_, eval_loss, _ = model.step(sess, encoder_inputs, decoder_inputs,
target_weights, bucket_id, True)
eval_ppx = math.exp(eval_loss) if eval_loss < 300 else float('inf')
print(" eval: bucket %d perplexity %.2f" % (bucket_id, eval_ppx))
sys.stdout.flush()
def decode():
with tf.Session() as sess:
# Create model and load parameters.
model = create_model(sess, True)
model.batch_size = 1 # We decode one sentence at a time.
# Load vocabularies.
en_vocab_path = os.path.join(FLAGS.data_dir,
"vocab%d.en" % FLAGS.en_vocab_size)
fr_vocab_path = os.path.join(FLAGS.data_dir,
"vocab%d.fr" % FLAGS.fr_vocab_size)
en_vocab, _ = data_utils.initialize_vocabulary(en_vocab_path)
_, rev_fr_vocab = data_utils.initialize_vocabulary(fr_vocab_path)
# Decode from standard input.
sys.stdout.write("> ")
sys.stdout.flush()
sentence = sys.stdin.readline()
while sentence:
# Get token-ids for the input sentence.
token_ids = data_utils.sentence_to_token_ids(sentence, en_vocab)
# Which bucket does it belong to?
bucket_id = min([b for b in xrange(len(_buckets))
if _buckets[b][0] > len(token_ids)])
# Get a 1-element batch to feed the sentence to the model.
encoder_inputs, decoder_inputs, target_weights = model.get_batch(
{bucket_id: [(token_ids, [])]}, bucket_id)
# Get output logits for the sentence.
_, _, output_logits = model.step(sess, encoder_inputs, decoder_inputs,
target_weights, bucket_id, True)
# This is a greedy decoder - outputs are just argmaxes of output_logits.
outputs = [int(np.argmax(logit, axis=1)) for logit in output_logits]
# If there is an EOS symbol in outputs, cut them at that point.
if data_utils.EOS_ID in outputs:
outputs = outputs[:outputs.index(data_utils.EOS_ID)]
# Print out French sentence corresponding to outputs.
print(" ".join([rev_fr_vocab[output] for output in outputs]))
print("> ", end="")
sys.stdout.flush()
sentence = sys.stdin.readline()
def self_test():
"""Test the translation model."""
with tf.Session() as sess:
print("Self-test for neural translation model.")
# Create model with vocabularies of 10, 2 small buckets, 2 layers of 32.
model = seq2seq_model.Seq2SeqModel(10, 10, [(3, 3), (6, 6)], 32, 2,
5.0, 32, 0.3, 0.99, num_samples=8)
sess.run(tf.initialize_all_variables())
# Fake data set for both the (3, 3) and (6, 6) bucket.
data_set = ([([1, 1], [2, 2]), ([3, 3], [4]), ([5], [6])],
[([1, 1, 1, 1, 1], [2, 2, 2, 2, 2]), ([3, 3, 3], [5, 6])])
for _ in xrange(5): # Train the fake model for 5 steps.
bucket_id = random.choice([0, 1])
encoder_inputs, decoder_inputs, target_weights = model.get_batch(
data_set, bucket_id)
model.step(sess, encoder_inputs, decoder_inputs, target_weights,
bucket_id, False)
def main(_):
if FLAGS.self_test:
self_test()
elif FLAGS.decode:
decode()
else:
train()
if __name__ == "__main__":
tf.app.run()
| DeepThoughtTeam/tensorflow | tensorflow/models/rnn/translate/translate.py | Python | apache-2.0 | 12,522 |
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 [email protected] |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import userdb
multisite_cronjobs.append(userdb.execute_userdb_job)
| ypid-bot/check_mk | web/plugins/cron/user_sync.py | Python | gpl-2.0 | 1,544 |
# -*- coding: utf-8 -*-
#==============================================================================
# Copyright: Hybrid Labs
# Licence: See LICENSE
#==============================================================================
from functools import partial
class AttrFuncDecorator(object):
def __init__(self, f):
self.f = f
def __getattr__(self, name):
return partial(self.f, name)
def __call__(self, *args, **kwargs):
return self.f(*args, **kwargs)
| nehz/avalon | avalon/utils.py | Python | mit | 522 |
## Copyright (c) 2012-2015 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
project = 'prebuild'
master_doc = 'index'
| dmerejkowsky/qibuild | python/qidoc/test/projects/world/source/conf.py | Python | bsd-3-clause | 214 |
"""
This module is part of Orion2GoogleSpreadsheet project.
Contains functionality used by Orion2GoogleSpreadsheet to normalize strings
when parsing attributes from received data.
"""
import unicodedata
import string
import logs
# Normalize strings
def string_normalizer(message):
try:
# Convert to unicode format
message = message.decode()
# Lower-case
message = message.lower()
# Replace some characters
message = message.replace('.', '_')
message = message.replace(' ', '_')
message = message.replace(':', '_')
# Get NFKD unicode format
message = unicodedata.normalize('NFKD', message)
# Delete not ascii_letters
message = ''.join(x for x in message if x in string.ascii_letters or x == "_" or x.isdigit())
except:
logs.logger.warn("An error occurred while trying to normalize string")
return ""
# Return normalized string
return message | dflazaro/Orion2GoogleSpreadsheet | normalizer.py | Python | gpl-3.0 | 977 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import warnings
from tempfile import NamedTemporaryFile
from typing import Iterable, Optional, Sequence, Union
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.aws.operators.s3_list import S3ListOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook, _parse_gcs_url, gcs_object_is_directory
from airflow.utils.decorators import apply_defaults
class S3ToGCSOperator(S3ListOperator):
"""
Synchronizes an S3 key, possibly a prefix, with a Google Cloud Storage
destination path.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:S3ToGCSOperator`
:param bucket: The S3 bucket where to find the objects. (templated)
:type bucket: str
:param prefix: Prefix string which filters objects whose name begin with
such prefix. (templated)
:type prefix: str
:param delimiter: the delimiter marks key hierarchy. (templated)
:type delimiter: str
:param aws_conn_id: The source S3 connection
:type aws_conn_id: str
:param verify: Whether or not to verify SSL certificates for S3 connection.
By default SSL certificates are verified.
You can provide the following values:
- ``False``: do not validate SSL certificates. SSL will still be used
(unless use_ssl is False), but SSL certificates will not be
verified.
- ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses.
You can specify this argument if you want to use a different
CA cert bundle than the one used by botocore.
:type verify: bool or str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud.
:type gcp_conn_id: str
:param dest_gcs_conn_id: (Deprecated) The connection ID used to connect to Google Cloud.
This parameter has been deprecated. You should pass the gcp_conn_id parameter instead.
:type dest_gcs_conn_id: str
:param dest_gcs: The destination Google Cloud Storage bucket and prefix
where you want to store the files. (templated)
:type dest_gcs: str
:param delegate_to: Google account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param replace: Whether you want to replace existing destination files
or not.
:type replace: bool
:param gzip: Option to compress file for upload
:type gzip: bool
:param google_impersonation_chain: Optional Google service account to impersonate using
short-term credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type google_impersonation_chain: Union[str, Sequence[str]]
**Example**:
.. code-block:: python
s3_to_gcs_op = S3ToGCSOperator(
task_id='s3_to_gcs_example',
bucket='my-s3-bucket',
prefix='data/customers-201804',
dest_gcs_conn_id='google_cloud_default',
dest_gcs='gs://my.gcs.bucket/some/customers/',
replace=False,
gzip=True,
dag=my-dag)
Note that ``bucket``, ``prefix``, ``delimiter`` and ``dest_gcs`` are
templated, so you can use variables in them if you wish.
"""
template_fields: Iterable[str] = (
'bucket',
'prefix',
'delimiter',
'dest_gcs',
'google_impersonation_chain',
)
ui_color = '#e09411'
# pylint: disable=too-many-arguments
@apply_defaults
def __init__(
self,
*,
bucket,
prefix='',
delimiter='',
aws_conn_id='aws_default',
verify=None,
gcp_conn_id='google_cloud_default',
dest_gcs_conn_id=None,
dest_gcs=None,
delegate_to=None,
replace=False,
gzip=False,
google_impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
):
super().__init__(bucket=bucket, prefix=prefix, delimiter=delimiter, aws_conn_id=aws_conn_id, **kwargs)
if dest_gcs_conn_id:
warnings.warn(
"The dest_gcs_conn_id parameter has been deprecated. You should pass "
"the gcp_conn_id parameter.",
DeprecationWarning,
stacklevel=3,
)
gcp_conn_id = dest_gcs_conn_id
self.gcp_conn_id = gcp_conn_id
self.dest_gcs = dest_gcs
self.delegate_to = delegate_to
self.replace = replace
self.verify = verify
self.gzip = gzip
self.google_impersonation_chain = google_impersonation_chain
if dest_gcs and not gcs_object_is_directory(self.dest_gcs):
self.log.info(
'Destination Google Cloud Storage path is not a valid '
'"directory", define a path that ends with a slash "/" or '
'leave it empty for the root of the bucket.'
)
raise AirflowException(
'The destination Google Cloud Storage path must end with a slash "/" or be empty.'
)
def execute(self, context):
# use the super method to list all the files in an S3 bucket/key
files = super().execute(context)
gcs_hook = GCSHook(
google_cloud_storage_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.google_impersonation_chain,
)
# pylint: disable=too-many-nested-blocks
if not self.replace:
# if we are not replacing -> list all files in the GCS bucket
# and only keep those files which are present in
# S3 and not in Google Cloud Storage
bucket_name, object_prefix = _parse_gcs_url(self.dest_gcs)
existing_files_prefixed = gcs_hook.list(bucket_name, prefix=object_prefix)
existing_files = []
if existing_files_prefixed:
# Remove the object prefix itself, an empty directory was found
if object_prefix in existing_files_prefixed:
existing_files_prefixed.remove(object_prefix)
# Remove the object prefix from all object string paths
for f in existing_files_prefixed:
if f.startswith(object_prefix):
existing_files.append(f[len(object_prefix) :])
else:
existing_files.append(f)
files = list(set(files) - set(existing_files))
if len(files) > 0:
self.log.info('%s files are going to be synced: %s.', len(files), files)
else:
self.log.info('There are no new files to sync. Have a nice day!')
if files:
hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
for file in files:
# GCS hook builds its own in-memory file so we have to create
# and pass the path
file_object = hook.get_key(file, self.bucket)
with NamedTemporaryFile(mode='wb', delete=True) as f:
file_object.download_fileobj(f)
f.flush()
dest_gcs_bucket, dest_gcs_object_prefix = _parse_gcs_url(self.dest_gcs)
# There will always be a '/' before file because it is
# enforced at instantiation time
dest_gcs_object = dest_gcs_object_prefix + file
# Sync is sequential and the hook already logs too much
# so skip this for now
# self.log.info(
# 'Saving file {0} from S3 bucket {1} in GCS bucket {2}'
# ' as object {3}'.format(file, self.bucket,
# dest_gcs_bucket,
# dest_gcs_object))
gcs_hook.upload(dest_gcs_bucket, dest_gcs_object, f.name, gzip=self.gzip)
self.log.info("All done, uploaded %d files to Google Cloud Storage", len(files))
else:
self.log.info('In sync, no files needed to be uploaded to Google Cloud Storage')
return files
| airbnb/airflow | airflow/providers/google/cloud/transfers/s3_to_gcs.py | Python | apache-2.0 | 9,700 |
import json
import datetime
from mock import MagicMock
import dateutil.parser
from django.utils import six
from django.utils.http import urlquote
from django.utils import timezone
from django.test import TestCase
from django.test.utils import override_settings
from django import template
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.core.urlresolvers import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from wagtail.tests.utils import unittest, WagtailTestUtils
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import (
Format,
get_image_format,
register_image_format
)
from wagtail.wagtailimages.backends import get_image_backend
from wagtail.wagtailimages.backends.pillow import PillowBackend
from wagtail.wagtailimages.utils.crop import crop_to_point, CropBox
from wagtail.wagtailimages.utils.focal_point import FocalPoint
from wagtail.wagtailimages.utils.crypto import generate_signature, verify_signature
from wagtail.tests.models import EventPage, EventPageCarouselItem
from wagtail.wagtailcore.models import Page
def get_test_image_file(filename='test.png'):
from six import BytesIO
from PIL import Image
from django.core.files.images import ImageFile
f = BytesIO()
image = Image.new('RGB', (640, 480), 'white')
image.save(f, 'PNG')
return ImageFile(f, name=filename)
Image = get_image_model()
class TestImage(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_is_portrait(self):
self.assertFalse(self.image.is_portrait())
def test_is_landscape(self):
self.assertTrue(self.image.is_landscape())
class TestImagePermissions(TestCase):
def setUp(self):
# Create some user accounts for testing permissions
User = get_user_model()
self.user = User.objects.create_user(username='user', email='[email protected]', password='password')
self.owner = User.objects.create_user(username='owner', email='[email protected]', password='password')
self.editor = User.objects.create_user(username='editor', email='[email protected]', password='password')
self.editor.groups.add(Group.objects.get(name='Editors'))
self.administrator = User.objects.create_superuser(username='administrator', email='[email protected]', password='password')
# Owner user must have the add_image permission
self.owner.user_permissions.add(Permission.objects.get(codename='add_image'))
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
uploaded_by_user=self.owner,
file=get_test_image_file(),
)
def test_administrator_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.administrator))
def test_editor_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.editor))
def test_owner_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.owner))
def test_user_cant_edit(self):
self.assertFalse(self.image.is_editable_by_user(self.user))
class TestRenditions(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_default_backend(self):
# default backend should be pillow
backend = get_image_backend()
self.assertTrue(isinstance(backend, PillowBackend))
def test_minification(self):
rendition = self.image.get_rendition('width-400')
# Check size
self.assertEqual(rendition.width, 400)
self.assertEqual(rendition.height, 300)
def test_resize_to_max(self):
rendition = self.image.get_rendition('max-100x100')
# Check size
self.assertEqual(rendition.width, 100)
self.assertEqual(rendition.height, 75)
def test_resize_to_min(self):
rendition = self.image.get_rendition('min-120x120')
# Check size
self.assertEqual(rendition.width, 160)
self.assertEqual(rendition.height, 120)
def test_resize_to_original(self):
rendition = self.image.get_rendition('original')
# Check size
self.assertEqual(rendition.width, 640)
self.assertEqual(rendition.height, 480)
def test_cache(self):
# Get two renditions with the same filter
first_rendition = self.image.get_rendition('width-400')
second_rendition = self.image.get_rendition('width-400')
# Check that they are the same object
self.assertEqual(first_rendition, second_rendition)
class TestRenditionsWand(TestCase):
def setUp(self):
try:
import wand
except ImportError:
# skip these tests if Wand is not installed
raise unittest.SkipTest(
"Skipping image backend tests for wand, as wand is not installed")
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
self.image.backend = 'wagtail.wagtailimages.backends.wand.WandBackend'
def test_minification(self):
rendition = self.image.get_rendition('width-400')
# Check size
self.assertEqual(rendition.width, 400)
self.assertEqual(rendition.height, 300)
def test_resize_to_max(self):
rendition = self.image.get_rendition('max-100x100')
# Check size
self.assertEqual(rendition.width, 100)
self.assertEqual(rendition.height, 75)
def test_resize_to_min(self):
rendition = self.image.get_rendition('min-120x120')
# Check size
self.assertEqual(rendition.width, 160)
self.assertEqual(rendition.height, 120)
def test_resize_to_original(self):
rendition = self.image.get_rendition('original')
# Check size
self.assertEqual(rendition.width, 640)
self.assertEqual(rendition.height, 480)
def test_cache(self):
# Get two renditions with the same filter
first_rendition = self.image.get_rendition('width-400')
second_rendition = self.image.get_rendition('width-400')
# Check that they are the same object
self.assertEqual(first_rendition, second_rendition)
class TestImageTag(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def render_image_tag(self, image, filter_spec):
temp = template.Template('{% load wagtailimages_tags %}{% image image_obj ' + filter_spec + '%}')
context = template.Context({'image_obj': image})
return temp.render(context)
def test_image_tag(self):
result = self.render_image_tag(self.image, 'width-400')
# Check that all the required HTML attributes are set
self.assertTrue('width="400"' in result)
self.assertTrue('height="300"' in result)
self.assertTrue('alt="Test image"' in result)
def render_image_tag_as(self, image, filter_spec):
temp = template.Template('{% load wagtailimages_tags %}{% image image_obj ' + filter_spec + ' as test_img %}<img {{ test_img.attrs }} />')
context = template.Context({'image_obj': image})
return temp.render(context)
def test_image_tag_attrs(self):
result = self.render_image_tag_as(self.image, 'width-400')
# Check that all the required HTML attributes are set
self.assertTrue('width="400"' in result)
self.assertTrue('height="300"' in result)
self.assertTrue('alt="Test image"' in result)
def render_image_tag_with_extra_attributes(self, image, title):
temp = template.Template('{% load wagtailimages_tags %}{% image image_obj width-400 class="photo" title=title|lower %}')
context = template.Context({'image_obj': image, 'title': title})
return temp.render(context)
def test_image_tag_with_extra_attributes(self):
result = self.render_image_tag_with_extra_attributes(self.image, 'My Wonderful Title')
# Check that all the required HTML attributes are set
self.assertTrue('width="400"' in result)
self.assertTrue('height="300"' in result)
self.assertTrue('class="photo"' in result)
self.assertTrue('title="my wonderful title"' in result)
## ===== ADMIN VIEWS =====
class TestImageIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_index'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/index.html')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.get({'ordering': ordering})
self.assertEqual(response.status_code, 200)
class TestImageAddView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_add_image'), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages_add_image'), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
def test_add(self):
response = self.post({
'title': "Test image",
'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages_index'))
# Check that the image was created
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 1)
# Test that size was populated correctly
image = images.first()
self.assertEqual(image.width, 640)
self.assertEqual(image.height, 480)
class TestImageEditView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages_edit_image', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages_edit_image', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/edit.html')
def test_edit(self):
response = self.post({
'title': "Edited",
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages_index'))
# Check that the image was edited
image = Image.objects.get(id=self.image.id)
self.assertEqual(image.title, "Edited")
class TestImageDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages_delete_image', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages_delete_image', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/confirm_delete.html')
def test_delete(self):
response = self.post({
'hello': 'world'
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages_index'))
# Check that the image was deleted
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 0)
class TestImageChooserView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_chooser'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
class TestImageChooserChosenView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages_image_chosen', args=(self.image.id,)), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/image_chosen.js')
# TODO: Test posting
class TestImageChooserUploadView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_chooser_upload'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
# TODO: Test uploading through chooser
class TestFormat(TestCase):
def setUp(self):
# test format
self.format = Format(
'test name',
'test label',
'test classnames',
'test filter spec'
)
# test image
self.image = MagicMock()
self.image.id = 0
def test_editor_attributes(self):
result = self.format.editor_attributes(
self.image,
'test alt text'
)
self.assertEqual(result,
'data-embedtype="image" data-id="0" data-format="test name" data-alt="test alt text" ')
def test_image_to_editor_html(self):
result = self.format.image_to_editor_html(
self.image,
'test alt text'
)
six.assertRegex(self, result,
'<img data-embedtype="image" data-id="0" data-format="test name" data-alt="test alt text" class="test classnames" src="[^"]+" width="1" height="1" alt="test alt text">',
)
def test_image_to_html_no_classnames(self):
self.format.classnames = None
result = self.format.image_to_html(self.image, 'test alt text')
six.assertRegex(self, result,
'<img src="[^"]+" width="1" height="1" alt="test alt text">'
)
self.format.classnames = 'test classnames'
def test_get_image_format(self):
register_image_format(self.format)
result = get_image_format('test name')
self.assertEqual(result, self.format)
class TestUsageCount(TestCase):
fixtures = ['wagtail/tests/fixtures/test.json']
def setUp(self):
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_image_usage_count(self):
self.assertEqual(self.image.get_usage().count(), 0)
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_image_document_usage_count(self):
page = EventPage.objects.get(id=4)
event_page_carousel_item = EventPageCarouselItem()
event_page_carousel_item.page = page
event_page_carousel_item.image = self.image
event_page_carousel_item.save()
self.assertEqual(self.image.get_usage().count(), 1)
class TestGetUsage(TestCase):
fixtures = ['wagtail/tests/fixtures/test.json']
def setUp(self):
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_image_get_usage_not_enabled(self):
self.assertEqual(list(self.image.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_image_get_usage(self):
self.assertEqual(list(self.image.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_image_document_get_usage(self):
page = EventPage.objects.get(id=4)
event_page_carousel_item = EventPageCarouselItem()
event_page_carousel_item.page = page
event_page_carousel_item.image = self.image
event_page_carousel_item.save()
self.assertTrue(issubclass(Page, type(self.image.get_usage()[0])))
class TestMultipleImageUploader(TestCase, WagtailTestUtils):
"""
This tests the multiple image upload views located in wagtailimages/views/multiple.py
"""
def setUp(self):
self.login()
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_add(self):
"""
This tests that the add view responds correctly on a GET request
"""
# Send request
response = self.client.get(reverse('wagtailimages_add_multiple'))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/multiple/add.html')
def test_add_post(self):
"""
This tests that a POST request to the add view saves the image and returns an edit form
"""
response = self.client.post(reverse('wagtailimages_add_multiple'), {
'files[]': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtailimages/multiple/edit_form.html')
# Check image
self.assertIn('image', response.context)
self.assertEqual(response.context['image'].title, 'test.png')
# Check form
self.assertIn('form', response.context)
self.assertEqual(response.context['form'].initial['title'], 'test.png')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], response.context['image'].id)
self.assertTrue(response_json['success'])
def test_add_post_noajax(self):
"""
This tests that only AJAX requests are allowed to POST to the add view
"""
response = self.client.post(reverse('wagtailimages_add_multiple'), {})
# Check response
self.assertEqual(response.status_code, 400)
def test_add_post_nofile(self):
"""
This tests that the add view checks for a file when a user POSTs to it
"""
response = self.client.post(reverse('wagtailimages_add_multiple'), {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 400)
def test_add_post_badfile(self):
"""
This tests that the add view checks for a file when a user POSTs to it
"""
response = self.client.post(reverse('wagtailimages_add_multiple'), {
'files[]': SimpleUploadedFile('test.png', b"This is not an image!"),
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertNotIn('image_id', response_json)
self.assertNotIn('form', response_json)
self.assertIn('success', response_json)
self.assertIn('error_message', response_json)
self.assertFalse(response_json['success'])
self.assertEqual(response_json['error_message'], 'Not a valid image. Please use a gif, jpeg or png file with the correct file extension (*.gif, *.jpg or *.png).')
def test_edit_get(self):
"""
This tests that a GET request to the edit view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtailimages_edit_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_edit_post(self):
"""
This tests that a POST request to the edit view edits the image
"""
# Send request
response = self.client.post(reverse('wagtailimages_edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "New title!",
('image-%d-tags' % self.image.id): "",
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertNotIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertTrue(response_json['success'])
def test_edit_post_noajax(self):
"""
This tests that a POST request to the edit view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtailimages_edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "New title!",
('image-%d-tags' % self.image.id): "",
})
# Check response
self.assertEqual(response.status_code, 400)
def test_edit_post_validation_error(self):
"""
This tests that a POST request to the edit page returns a json document with "success=False"
and a form with the validation error indicated
"""
# Send request
response = self.client.post(reverse('wagtailimages_edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "", # Required
('image-%d-tags' % self.image.id): "",
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtailimages/multiple/edit_form.html')
# Check that a form error was raised
self.assertFormError(response, 'form', 'title', "This field is required.")
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertFalse(response_json['success'])
def test_delete_get(self):
"""
This tests that a GET request to the delete view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtailimages_delete_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_delete_post(self):
"""
This tests that a POST request to the delete view deletes the image
"""
# Send request
response = self.client.post(reverse('wagtailimages_delete_multiple', args=(self.image.id, )), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Make sure the image is deleted
self.assertFalse(Image.objects.filter(id=self.image.id).exists())
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertTrue(response_json['success'])
def test_edit_post_noajax(self):
"""
This tests that a POST request to the delete view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtailimages_delete_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 400)
class TestSignatureGeneration(TestCase):
def test_signature_generation(self):
self.assertEqual(generate_signature(100, 'fill-800x600'), b'xnZOzQyUg6pkfciqcfRJRosOrGg=')
def test_signature_verification(self):
self.assertTrue(verify_signature(b'xnZOzQyUg6pkfciqcfRJRosOrGg=', 100, 'fill-800x600'))
def test_signature_changes_on_image_id(self):
self.assertFalse(verify_signature(b'xnZOzQyUg6pkfciqcfRJRosOrGg=', 200, 'fill-800x600'))
def test_signature_changes_on_filter_spec(self):
self.assertFalse(verify_signature(b'xnZOzQyUg6pkfciqcfRJRosOrGg=', 100, 'fill-800x700'))
class TestFrontendServeView(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_get(self):
"""
Test a valid GET request to the view
"""
# Generate signature
signature = generate_signature(self.image.id, 'fill-800x600')
# Get the image
response = self.client.get(reverse('wagtailimages_serve', args=(signature, self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'image/jpeg')
# Make sure the cache headers are set to expire after at least one month
self.assertIn('Cache-Control', response)
self.assertEqual(response['Cache-Control'].split('=')[0], 'max-age')
self.assertTrue(int(response['Cache-Control'].split('=')[1]) > datetime.timedelta(days=30).seconds)
self.assertIn('Expires', response)
self.assertTrue(dateutil.parser.parse(response['Expires']) > timezone.now() + datetime.timedelta(days=30))
def test_get_invalid_signature(self):
"""
Test that an invalid signature returns a 403 response
"""
# Generate a signature for the incorrect image id
signature = generate_signature(self.image.id + 1, 'fill-800x600')
# Get the image
response = self.client.get(reverse('wagtailimages_serve', args=(signature, self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 403)
def test_get_invalid_filter_spec(self):
"""
Test that an invalid filter spec returns a 400 response
This is very unlikely to happen in reality. A user would have
to create signature for the invalid filter spec which can't be
done with Wagtails built in URL generator. We should test it
anyway though.
"""
# Generate a signature with the invalid filterspec
signature = generate_signature(self.image.id, 'bad-filter-spec')
# Get the image
response = self.client.get(reverse('wagtailimages_serve', args=(signature, self.image.id, 'bad-filter-spec')))
# Check response
self.assertEqual(response.status_code, 400)
class TestURLGeneratorView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
This tests that the view responds correctly for a user with edit permissions on this image
"""
# Get
response = self.client.get(reverse('wagtailimages_url_generator', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/url_generator.html')
def test_get_bad_permissions(self):
"""
This tests that the view gives a 403 if a user without correct permissions attemts to access it
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get
response = self.client.get(reverse('wagtailimages_url_generator', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 403)
class TestGenerateURLView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
This tests that the view responds correctly for a user with edit permissions on this image
"""
# Get
response = self.client.get(reverse('wagtailimages_generate_url', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
content_json = json.loads(response.content.decode())
self.assertEqual(set(content_json.keys()), set(['url', 'local_url']))
expected_url = 'http://localhost/images/%(signature)s/%(image_id)d/fill-800x600/' % {
'signature': urlquote(generate_signature(self.image.id, 'fill-800x600').decode()),
'image_id': self.image.id,
}
self.assertEqual(content_json['url'], expected_url)
expected_local_url = '/images/%(signature)s/%(image_id)d/fill-800x600/' % {
'signature': urlquote(generate_signature(self.image.id, 'fill-800x600').decode()),
'image_id': self.image.id,
}
self.assertEqual(content_json['local_url'], expected_local_url)
def test_get_bad_permissions(self):
"""
This tests that the view gives a 403 if a user without correct permissions attemts to access it
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get
response = self.client.get(reverse('wagtailimages_generate_url', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 403)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'You do not have permission to generate a URL for this image.',
}))
def test_get_bad_image(self):
"""
This tests that the view gives a 404 response if a user attempts to use it with an image which doesn't exist
"""
# Get
response = self.client.get(reverse('wagtailimages_generate_url', args=(self.image.id + 1, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 404)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'Cannot find image.',
}))
def test_get_bad_filter_spec(self):
"""
This tests that the view gives a 400 response if the user attempts to use it with an invalid filter spec
"""
# Get
response = self.client.get(reverse('wagtailimages_generate_url', args=(self.image.id, 'bad-filter-spec')))
# Check response
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'Invalid filter spec.',
}))
class TestCropToPoint(TestCase):
def test_basic(self):
"Test basic cropping in the centre of the image"
self.assertEqual(
crop_to_point((640, 480), (100, 100), FocalPoint(x=320, y=240)),
CropBox(270, 190, 370, 290),
)
def test_basic_no_focal_point(self):
"If focal point is None, it should make one in the centre of the image"
self.assertEqual(
crop_to_point((640, 480), (100, 100), None),
CropBox(270, 190, 370, 290),
)
def test_doesnt_exit_top_left(self):
"Test that the cropbox doesn't exit the image at the top left"
self.assertEqual(
crop_to_point((640, 480), (100, 100), FocalPoint(x=0, y=0)),
CropBox(0, 0, 100, 100),
)
def test_doesnt_exit_bottom_right(self):
"Test that the cropbox doesn't exit the image at the bottom right"
self.assertEqual(
crop_to_point((640, 480), (100, 100), FocalPoint(x=640, y=480)),
CropBox(540, 380, 640, 480),
)
def test_doesnt_get_smaller_than_focal_point(self):
"Test that the cropbox doesn't get any smaller than the focal point"
self.assertEqual(
crop_to_point((640, 480), (10, 10), FocalPoint(x=320, y=240, width=100, height=100)),
CropBox(270, 190, 370, 290),
)
def test_keeps_composition(self):
"Test that the cropbox tries to keep the composition of the original image as much as it can"
self.assertEqual(
crop_to_point((300, 300), (150, 150), FocalPoint(x=100, y=200)),
CropBox(50, 100, 200, 250), # Focal point is 1/3 across and 2/3 down in the crop box
)
def test_keeps_focal_point_in_view_bottom_left(self):
"""
Even though it tries to keep the composition of the image,
it shouldn't let that get in the way of keeping the entire subject in view
"""
self.assertEqual(
crop_to_point((300, 300), (150, 150), FocalPoint(x=100, y=200, width=150, height=150)),
CropBox(25, 125, 175, 275),
)
def test_keeps_focal_point_in_view_top_right(self):
"""
Even though it tries to keep the composition of the image,
it shouldn't let that get in the way of keeping the entire subject in view
"""
self.assertEqual(
crop_to_point((300, 300), (150, 150), FocalPoint(x=200, y=100, width=150, height=150)),
CropBox(125, 25, 275, 175),
)
class TestIssue573(TestCase):
"""
This tests for a bug which causes filename limit on Renditions to be reached
when the Image has a long original filename and a big focal point key
"""
def test_issue_573(self):
# Create an image with a big filename and focal point
image = Image.objects.create(
title="Test image",
file=get_test_image_file('thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpialidocious.png'),
focal_point_x=1000,
focal_point_y=1000,
focal_point_width=1000,
focal_point_height=1000,
)
# Try creating a rendition from that image
# This would crash if the bug is present
image.get_rendition('fill-800x600')
| thenewguy/wagtail | wagtail/wagtailimages/tests.py | Python | bsd-3-clause | 37,784 |
# -*- encoding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import logging
import argparse
import traceback
from mozdevice import DeviceManagerADB, DMError, ADBError
from mozlog.structured import commandline, get_default_logger
from time import sleep
from mcts.utils.device.devicehelper import DeviceHelper
from mozdevice.adb import ADBError
# ######################################################################################################################
# Test class that all test must be derived from
###############################################
class ExtraTest(object):
"""
Parent class for all tests in this suite.
Every child must set its .group string and implement
its .run() method.
"""
@classmethod
def groupname(cls):
"""
Getter that returns a test's group name.
"""
if cls.group:
return cls.group
else:
return 'unknown'
@staticmethod
def group_list(mode='phone'):
"""
Returns a list of all groups in the test suite.
"""
if mode == 'stingray':
return ['ssl']
groups = []
for t in ExtraTest.__subclasses__():
if t.groupname() not in groups:
groups.append(t.groupname())
return groups
@staticmethod
def test_list(group=None, mode='phone'):
"""
Returns a list of all tests, optionally filtered by group.
"""
if mode == 'stingray' and group is not None:
group = 'ssl'
if group is None:
return ExtraTest.__subclasses__()
else:
tests = []
for t in ExtraTest.__subclasses__():
if t.groupname() == group:
tests.append(t)
return tests
@staticmethod
def run_groups(groups=[], version=None, host='localhost', port=2828, mode='phone'):
hasadb = mode == 'phone'
logger = get_default_logger()
if groups is None or len(groups) == 0: # run all groups
logger.debug('running securitysuite tests for all groups %s' % str(ExtraTest.group_list(mode=mode)))
groups = ExtraTest.group_list(mode=mode)
else:
logger.debug('running securitysuite tests for groups %s' % str(groups))
logger.suite_start(tests=groups)
# setup marionette before any test
marionette = DeviceHelper.getMarionette(host=host, port=port)
# setup device before any test
device = DeviceHelper.getDevice()
for g in groups:
logger.debug("running securitysuite test group %s" % g)
logger.test_start(g)
try:
ExtraTest.run(g, version=version)
logger.test_end(g, 'OK')
except:
logger.critical(traceback.format_exc())
logger.test_end(g, 'FAIL')
raise
logger.suite_end()
@classmethod
def run(cls, group=None, version=None):
"""
Runs all the tests, optionally just within the specified group.
"""
for t in cls.test_list(group):
t.run(version=version)
@classmethod
def log_status(cls, status, msg):
logger = get_default_logger()
logger.test_status(cls.groupname(), cls.__name__, status, message=msg)
#######################################################################################################################
# Shared module functionality
#############################
def wait_for_adb_device():
try:
device = DeviceHelper.getDevice()
except ADBError:
device = None
print "Waiting for adb connection..."
while device is None:
try:
device = DeviceHelper.getDevice()
except ADBError:
sleep(0.2)
if len(device.devices()) < 1:
print "Waiting for adb device..."
while len(device.devices()) < 1:
sleep(0.2)
def adb_has_root():
# normally this should check via root=True to .shell_output, but doesn't work
device = DeviceHelper.getDevice()
return device.shell_output("id").startswith("uid=0(root)")
#######################################################################################################################
# Command line handler
######################
def securitycli():
"""
Entry point for the runner defined in setup.py.
"""
parser = argparse.ArgumentParser(description="Runner for security test suite")
parser.add_argument("-l", "--list-test-groups", action="store_true",
help="List all logical test groups")
parser.add_argument("-a", "--list-all-tests", action="store_true",
help="List all tests")
parser.add_argument("-i", "--include", metavar="GROUP", action="append", default=[],
help="Only include specified group(s) in run, include several "
"groups by repeating flag")
parser.add_argument("--version", action="store", dest="version",
help="B2G version")
parser.add_argument("--ipython", dest="ipython", action="store_true",
help="drop to ipython session")
parser.add_argument('-H', '--host',
help='Hostname or ip for target device',
action='store', default='localhost')
parser.add_argument('-P', '--port',
help='Port for target device',
action='store', default=2828)
parser.add_argument('-m', '--mode',
help='Test mode (stingray, phone) default (phone)',
action='store', default='phone')
parser.add_argument("-v", dest="verbose", action="store_true",
help="Verbose output")
# add specialized mozilla logger options
commandline.add_logging_group(parser)
args = parser.parse_args()
# set up mozilla logger
logger = commandline.setup_logging("securitysuite", vars(args), {"raw": sys.stdout})
try:
if args.list_test_groups:
for group in ExtraTest.group_list(args.mode):
print group
elif args.list_all_tests:
for test in ExtraTest.test_list(args.mode):
print "%s.%s" % (test.group, test.__name__)
elif args.ipython:
from IPython import embed
embed()
elif args.mode == 'stingray':
logger.debug("security cli runnng with args %s" % args)
ExtraTest.run_groups(args.include,
version=args.version,
host=args.host, port=int(args.port),
mode=args.mode)
else:
logger.debug("security cli runnng with args %s" % args)
wait_for_adb_device()
if not adb_has_root():
logger.warning("adb has no root. Results will be incomplete.")
ExtraTest.run_groups(args.include, version=args.version)
except:
logger.critical(traceback.format_exc())
raise
if __name__ == "__main__":
securitycli()
| mozilla-b2g/fxos-certsuite | mcts/securitysuite/suite.py | Python | mpl-2.0 | 7,388 |
import requests
from flask import session, Blueprint, redirect
from flask import request
from grano import authz
from grano.lib.exc import BadRequest
from grano.lib.serialisation import jsonify
from grano.views.cache import validate_cache
from grano.core import db, url_for, app
from grano.providers import github, twitter, facebook
from grano.model import Account
from grano.logic import accounts
blueprint = Blueprint('sessions_api', __name__)
@blueprint.route('/api/1/sessions', methods=['GET'])
def status():
permissions = {}
if authz.logged_in():
for permission in request.account.permissions:
permissions[permission.project.slug] = {
'reader': permission.reader,
'editor': permission.editor,
'admin': permission.admin
}
keys = {
'p': repr(permissions),
'i': request.account.id if authz.logged_in() else None
}
validate_cache(keys=keys)
return jsonify({
'logged_in': authz.logged_in(),
'api_key': request.account.api_key if authz.logged_in() else None,
'account': request.account if request.account else None,
'permissions': permissions
})
def provider_not_enabled(name):
return jsonify({
'status': 501,
'name': 'Provider not configured: %s' % name,
'message': 'There are no OAuth credentials given for %s' % name,
}, status=501)
@blueprint.route('/api/1/sessions/logout', methods=['GET'])
def logout():
#authz.require(authz.logged_in())
session.clear()
return redirect(request.args.get('next_url', '/'))
@blueprint.route('/api/1/sessions/login/github', methods=['GET'])
def github_login():
if not app.config.get('GITHUB_CLIENT_ID'):
return provider_not_enabled('github')
callback=url_for('sessions_api.github_authorized')
session.clear()
if not request.args.get('next_url'):
raise BadRequest("No 'next_url' is specified.")
session['next_url'] = request.args.get('next_url')
return github.authorize(callback=callback)
@blueprint.route('/api/1/sessions/callback/github', methods=['GET'])
@github.authorized_handler
def github_authorized(resp):
next_url = session.get('next_url', '/')
if resp is None or not 'access_token' in resp:
return redirect(next_url)
access_token = resp['access_token']
session['access_token'] = access_token, ''
res = requests.get('https://api.github.com/user?access_token=%s' % access_token,
verify=False)
data = res.json()
account = Account.by_github_id(data.get('id'))
data_ = {
'full_name': data.get('name'),
'login': data.get('login'),
'email': data.get('email'),
'github_id': data.get('id')
}
account = accounts.save(data_, account=account)
db.session.commit()
session['id'] = account.id
return redirect(next_url)
@blueprint.route('/api/1/sessions/login/twitter', methods=['GET'])
def twitter_login():
if not app.config.get('TWITTER_API_KEY'):
return provider_not_enabled('twitter')
callback=url_for('sessions_api.twitter_authorized')
session.clear()
if not request.args.get('next_url'):
raise BadRequest("No 'next_url' is specified.")
session['next_url'] = request.args.get('next_url')
return twitter.authorize(callback=callback)
@blueprint.route('/api/1/sessions/callback/twitter', methods=['GET'])
@twitter.authorized_handler
def twitter_authorized(resp):
next_url = session.get('next_url', '/')
if resp is None or not 'oauth_token' in resp:
return redirect(next_url)
session['twitter_token'] = (resp['oauth_token'],
resp['oauth_token_secret'])
res = twitter.get('users/show.json?user_id=%s' % resp.get('user_id'))
account = Account.by_twitter_id(res.data.get('id'))
data_ = {
'full_name': res.data.get('name'),
'login': res.data.get('screen_name'),
'twitter_id': res.data.get('id')
}
account = accounts.save(data_, account=account)
db.session.commit()
session['id'] = account.id
return redirect(next_url)
@blueprint.route('/api/1/sessions/login/facebook', methods=['GET'])
def facebook_login():
if not app.config.get('FACEBOOK_APP_ID'):
return provider_not_enabled('facebook')
callback=url_for('sessions_api.facebook_authorized')
session.clear()
if not request.args.get('next_url'):
raise BadRequest("No 'next_url' is specified.")
session['next_url'] = request.args.get('next_url')
return facebook.authorize(callback=callback)
@blueprint.route('/api/1/sessions/callback/facebook', methods=['GET'])
@facebook.authorized_handler
def facebook_authorized(resp):
next_url = session.get('next_url', '/')
if resp is None or not 'access_token' in resp:
return redirect(next_url)
session['facebook_token'] = (resp.get('access_token'), '')
data = facebook.get('/me').data
account = Account.by_facebook_id(data.get('id'))
data_ = {
'full_name': data.get('name'),
'login': data.get('username'),
'email': data.get('email'),
'facebook_id': data.get('id')
}
account = accounts.save(data_, account=account)
db.session.commit()
session['id'] = account.id
return redirect(next_url)
| clkao/grano | grano/views/sessions_api.py | Python | mit | 5,328 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# seeder.py - Exports reachable nodes into DNS zone files for DNS seeder.
#
# Copyright (c) Addy Yeow Chin Heng <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Exports reachable nodes into DNS zone files for DNS seeder.
"""
import glob
import json
import logging
import operator
import os
import random
import sys
import time
from collections import defaultdict
from ConfigParser import ConfigParser
from utils import new_redis_conn
REDIS_CONN = None
CONF = {}
class Seeder(object):
"""
Implements seeding mechanic by exporting reachable nodes as A and AAAA
records into DNS zone files. A separate DNS server software is expected to
consume and serve the zone files to the public.
"""
def __init__(self):
self.dump = None
self.nodes = []
self.addresses = defaultdict(list)
self.now = 0
def export_nodes(self, dump):
"""
Exports nodes to generate A and AAAA records from the latest snapshot.
"""
self.now = int(time.time())
if dump != self.dump:
try:
self.nodes = json.loads(open(dump, "r").read(),
encoding="latin-1")
except ValueError:
logging.warning("Write pending")
return
if len(self.nodes) == 0:
logging.warning("len(self.nodes): %d", len(self.nodes))
return
self.addresses = defaultdict(list)
for address, services in self.filter_nodes():
self.addresses[services].append(address)
self.dump = dump
self.save_zone_files()
def save_zone_files(self):
"""
Saves A and AAAA records in DNS zone files.
"""
default_zone = os.path.basename(CONF['zone_file'])
for i in range(0xf + 1):
if i == 0:
zone = default_zone
zone_file = CONF['zone_file']
wildcard = "".join([
"\n",
"*.{0}.\tIN\tCNAME\t{0}.".format(default_zone),
])
addresses = []
for services, addrs in self.addresses.iteritems():
if services & 1 == 1: # NODE_NETWORK
addresses.extend(addrs)
else:
zone = 'x%x.%s' % (i, default_zone)
zone_file = CONF['zone_file'].replace(default_zone, zone)
wildcard = ""
addresses = self.addresses[i]
logging.debug("Zone file: %s", zone_file)
serial = str(self.now)
logging.debug("Serial: %s", serial)
template = open(CONF['template'], "r") \
.read() \
.replace("1501826735", serial) \
.replace("seed.bitnodes.io.", zone.replace("zone", ""))
content = "".join([
template,
wildcard,
"\n",
self.get_records(addresses),
]).strip() + "\n"
open(zone_file, "w").write(content)
def get_records(self, addresses):
"""
Returns addresses formatted in A, AAAA, TXT records for a zone file.
"""
a_records = []
aaaa_records = []
txt_records = []
for address in addresses:
if address.endswith(".onion"):
txt_records.append("@\tIN\tTXT\t{}".format(address))
elif ":" in address:
aaaa_records.append("@\tIN\tAAAA\t{}".format(address))
else:
a_records.append("@\tIN\tA\t{}".format(address))
logging.debug("A records: %d", len(a_records))
logging.debug("AAAA records: %d", len(aaaa_records))
logging.debug("TXT records: %d", len(txt_records))
random.shuffle(a_records)
random.shuffle(aaaa_records)
random.shuffle(txt_records)
records = "".join([
"\n".join(a_records[:CONF['a_records']]),
"\n",
"\n".join(aaaa_records[:CONF['aaaa_records']]),
"\n",
"\n".join(txt_records[:CONF['txt_records']]),
])
return records
def filter_nodes(self):
"""
Returns nodes that satisfy the minimum requirements listed below:
1) Height must be at most 2 blocks away from the consensus height
2) Uptime must be equal or greater than the configured min. age
3) Max. one node per ASN
4) Uses default port
"""
consensus_height = self.get_consensus_height()
min_age = self.get_min_age()
asns = set()
for node in self.nodes:
address = node[0]
port = node[1]
age = self.now - node[4]
services = node[5]
height = node[6]
asn = node[13]
if port != CONF['port'] or asn is None or age < min_age:
continue
if consensus_height and abs(consensus_height - height) > 2:
continue
if asn in asns and not address.endswith(".onion"):
continue
yield address, services
asns.add(asn)
def get_consensus_height(self):
"""
Returns the most common height from Redis.
"""
height = REDIS_CONN.get('height')
if height:
height = int(height)
logging.info("Consensus. height: %s", height)
return height
def get_min_age(self):
"""
Returns the minimum required uptime. If the oldest node cannot satisfy
the configured value, use a fallback value of max. 1 percent away from
the uptime of the oldest node.
"""
min_age = CONF['min_age']
oldest = self.now - min(self.nodes, key=operator.itemgetter(4))[4]
logging.info("Longest uptime: %d", oldest)
if oldest < min_age:
min_age = oldest - (0.01 * oldest) # Max. 1% newer than oldest
logging.info("Min. age: %d", min_age)
return min_age
def cron():
"""
Periodically fetches latest snapshot to sample nodes for DNS zone files.
"""
seeder = Seeder()
while True:
time.sleep(5)
try:
dump = max(glob.iglob("{}/*.json".format(CONF['export_dir'])))
except ValueError as err:
logging.warning(err)
continue
logging.info("Dump: %s", dump)
seeder.export_nodes(dump)
def init_conf(argv):
"""
Populates CONF with key-value pairs from configuration file.
"""
conf = ConfigParser()
conf.read(argv[1])
CONF['logfile'] = conf.get('seeder', 'logfile')
CONF['port'] = conf.getint('seeder', 'port')
CONF['db'] = conf.getint('seeder', 'db')
CONF['debug'] = conf.getboolean('seeder', 'debug')
CONF['export_dir'] = conf.get('seeder', 'export_dir')
CONF['min_age'] = conf.getint('seeder', 'min_age')
CONF['zone_file'] = conf.get('seeder', 'zone_file')
CONF['template'] = conf.get('seeder', 'template')
CONF['a_records'] = conf.getint('seeder', 'a_records')
CONF['aaaa_records'] = conf.getint('seeder', 'aaaa_records')
CONF['txt_records'] = conf.getint('seeder', 'txt_records')
zone_dir = os.path.dirname(CONF['zone_file'])
if not os.path.exists(zone_dir):
os.makedirs(zone_dir)
def main(argv):
if len(argv) < 2 or not os.path.exists(argv[1]):
print("Usage: seeder.py [config]")
return 1
# Initialize global conf
init_conf(argv)
# Initialize logger
loglevel = logging.INFO
if CONF['debug']:
loglevel = logging.DEBUG
logformat = ("%(asctime)s,%(msecs)05.1f %(levelname)s (%(funcName)s) "
"%(message)s")
logging.basicConfig(level=loglevel,
format=logformat,
filename=CONF['logfile'],
filemode='w')
print("Log: {}, press CTRL+C to terminate..".format(CONF['logfile']))
global REDIS_CONN
REDIS_CONN = new_redis_conn(db=CONF['db'])
cron()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| ayeowch/bitnodes | seeder.py | Python | mit | 9,236 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={},
),
]
| mod2/codex | accounts/migrations/0002_auto_20141031_1649.py | Python | mit | 337 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Externalmodule(Package):
homepage = "http://somewhere.com"
url = "http://somewhere.com/module-1.0.tar.gz"
version('1.0', '1234567890abcdef1234567890abcdef')
depends_on('externalprereq')
| LLNL/spack | var/spack/repos/builtin.mock/packages/externalmodule/package.py | Python | lgpl-2.1 | 435 |
#
# Copyright (C) 2016 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
from oeqa.core.case import OETestCase
class AnotherTest(OETestCase):
def testAnother(self):
self.assertTrue(True, msg='How is this possible?')
| schleichdi2/OPENNFR-6.3-CORE | opennfr-openembedded-core/meta/lib/oeqa/core/tests/cases/loader/valid/another.py | Python | gpl-2.0 | 234 |
# Copyright 2009-2010 by Ka-Ping Yee
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Geographical functions. All measurements are in metres."""
from math import asin, cos, pi, sin, sqrt
EARTH_RADIUS = 6371009
def hav(theta):
"""Computes the haversine of an angle given in radians."""
return sin(theta/2)**2
def central_angle((phi_s, lam_s), (phi_f, lam_f)):
"""Returns the central angle between two points on a sphere, whose
locations are given as (latitude, longitude) pairs in radians."""
d_phi = phi_s - phi_f
d_lam = lam_s - lam_f
return 2*asin(sqrt(hav(d_phi) + cos(phi_s)*cos(phi_f)*hav(d_lam)))
def distance(start, finish):
"""Approximates the distance in metres between two points on the Earth,
which are given as {'lat':y, 'lon':x} objects in degrees."""
start_rad = (start['lat']*pi/180, start['lon']*pi/180)
finish_rad = (finish['lat']*pi/180, finish['lon']*pi/180)
return central_angle(start_rad, finish_rad)*EARTH_RADIUS
def point_inside_polygon(point, poly):
"""Returns true if the given point is inside the given polygon.
point is given as an {'lat':y, 'lon':x} object in degrees
poly is given as a list of (longitude, latitude) tuples. The last vertex
is assumed to be the same as the first vertex.
TODO(shakusa): poly should probably be expressed in a less-confusing way"""
lat = point['lat']
lon = point['lon']
n = len(poly)
inside = False
# Count the parity of intersections of a horizontal eastward ray starting
# at (lon, lat). If even, point is outside, odd, point is inside
lon1, lat1 = poly[0]
for i in range(n + 1):
lon2, lat2 = poly[i % n]
# if our ray falls within the vertical coords of the edge
if min(lat1, lat2) < lat <= max(lat1, lat2):
# if our (eastward) ray starts before the edge and the edge is not
# horizontal
if lon <= max(lon1, lon2) and lat1 != lat2:
lon_inters = lon1 + (lat - lat1) * (lon2 - lon1) / (lat2 - lat1)
# if the intersection is beyond the start of the ray,
# we've crossed it
if lon <= lon_inters:
inside = not inside
lon1, lat1 = lon2, lat2
return inside
| Princessgladys/googleresourcefinder | lib/feedlib/geo.py | Python | apache-2.0 | 2,777 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
from setuptools import setup
from setuptools import find_packages
import os
import re
# https://github.com/zzzeek/sqlalchemy/blob/master/setup.py
v_file = open(os.path.join(os.path.dirname(__file__), 'easylinker', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v_file.read()).group(1)
v_file.close()
download_url = 'https://github.com/if1live/easylinker/tarball/{}'.format(VERSION)
packages = [
'easylinker',
]
dependency_links = [
'github.com/if1live/ntfs/tarball/master#egg=0.1.3',
]
requires = [
'jinja2>=2.7.3',
]
tests_requires = [
'nose>=1.3.4',
]
setup(
name="easylinker",
version=VERSION,
url='https://github.com/if1live/easylinker',
download_url=download_url,
author="libsora",
author_email="[email protected]",
maintainer="libsora",
maintainer_email="[email protected]",
description="Tool for creating hard/symbolic link easily",
long_description=open("README.rst").read(),
license="MIT",
packages=packages,
package_data={'': ['LICENSE', ]},
include_package_data=True,
install_requires=requires,
tests_require=tests_requires,
dependency_links=dependency_links,
keywords=['hardlink'],
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| if1live/easylinker | setup.py | Python | mit | 1,689 |
from django.shortcuts import render
from django.urls import reverse
from .forms import CustomSurveyForm
from .models import SenateProjects, StudentProjects
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect
def senate_project_survey(request, projectid):
"""
Generalized view for a custom senate project survey
**Context**
''survey''
CustomSurvey form that is related to the project
**Template:**
:template:'senate-project-survey.html'
"""
project = get_object_or_404(SenateProjects, pk=projectid)
survey = project.survey
if request.method == "POST":
user = request.user
survey = CustomSurveyForm(request.POST, user=user, survey=survey)
if survey.is_valid():
survey.save()
return HttpResponseRedirect(reverse('senate-projects-home', current_app='Project'))
else:
user = request.user
survey = CustomSurveyForm(user=user, survey=survey)
return render(request, 'senate-project-survey.html', {'survey': survey})
def projects_home(request):
"""
Home view for all projects (Depreciated, soon to be made into drop down)
**Template:**
:template:'projects-home.html'
"""
return render(request, 'projects-home.html')
def student_projects(request):
"""
Home view for all student projects
**Context**
''student_projects''
All projects from the studentprojects model
**Template:**
:template:'student-projects.html'
"""
student_projects = StudentProjects.objects.all()
return render(request, 'student-projects.html', {'student_projects': student_projects})
def senate_projects(request):
"""
Home view for all senate projects
**Context**
''student_projects''
All projects from the senateprojects model
**Template:**
:template:'senate-projects.html'
"""
senate_projects = SenateProjects.objects.all()
return render(request, 'senate-projects.html', {'senate_projects': senate_projects})
def senate_project_specific(request, projectid):
"""
Generalized view for a senate project
**Context**
''project''
A specific senate project object
**Template:**
:template:'senate-project-specific.html'
"""
project = get_object_or_404(SenateProjects, pk=projectid)
return render(request, 'senate-project-specific.html', {'project': project})
def student_project_specific(request):
"""
Generalized view for a senate project
**Context**
''project''
A specific senate project object
**Template:**
:template:'senate-project-specific.html'
"""
return render(request, 'student-project-specific.html') | ByrdOfAFeather/AlphaTrion | Project/views.py | Python | mit | 2,528 |
# -*- coding: utf-8 -*-
#
## copyright (C) 2018
# The Icecube Collaboration
#
# $Id$
#
# @version $Revision$
# @date $LastChangedDate$
# @author Hershal Pandya <[email protected]> Last changed by: $LastChangedBy$
#
from icecube import shield
tray.AddModule(IceTopPulseMerger, 'IceTopPulseMerger',
output=pulses_merged,
input=[icetop_globals.icetop_hlc_vem_pulses, correctedSLC],
If = field_exists(icetop_globals.icetop_hlc_vem_pulses))
def IceTopPulseMerger(frame, output='', input=[]):
''' Merges inputs in output. Used to merge SLC and HLC IT pulses '''
if output not in frame.keys():
pulses = []
for i in input:
if not i in frame:
continue
ps = frame[i]
pulses.append(i)
frame[output] = dataclasses.I3RecoPulseSeriesMapUnion(frame, pulses)
#pulses_output = dataclasses.I3RecoPulseSeriesMapUnion(frame, pulses)
#frame[output] = pulses_output.apply(frame)
tray.AddModule("I3ShieldDataCollector",
InputRecoPulses=veto_globals.pulses_tank_merged,
InputTrack=fit_type,
OutputName=hit_output_flat,
BadDomList = icetop_globals.icetop_bad_doms,
ReportUnhitDOMs=True,
ReportCharge = True,
useCurvatureApproximation=False,
If = field_exists(veto_globals.pulses_tank_merged))
#if you are using a curvature to subtract out of times. can be done in next iteration.
# hit_output_curved = "Shield_" + veto_globals.pulses_tank_merged +'_'+ fit_type
# unhit_output_curved = hit_output_curved +"_UnHit"
# print(hit_output_curved)
# tray.AddModule("I3ShieldDataCollector",
# InputRecoPulses=veto_globals.pulses_tank_merged,
# InputTrack=fit_type,
# OutputName=hit_output_curved,
# BadDomList = icetop_globals.icetop_bad_doms,
# ReportUnhitDOMs=True,
# ReportCharge = True,
# useCurvatureApproximation= True,
# coefficients = veto_globals.coefficients,
# If = field_exists(veto_globals.pulses_tank_merged))
def assemble_excluded_doms_list(frame, geometry = 'I3Geometry',icetop_excluded_tanks_lists=[], out = veto_globals.icetop_excluded_doms):
if out in frame:
log_warn('%s already in frame.being deleted'%out)
frame.Delete(out)
geo = frame[geometry]
mergedExcludedList=dataclasses.I3VectorOMKey()
for container in icetop_excluded_tanks_lists:
icetop_excluded_tanks = frame[container]
for tank in icetop_excluded_tanks:
for omkey in geo.stationgeo[tank.string][tank.tank].omkey_list:
if omkey not in mergedExcludedList:
mergedExcludedList.append(omkey)
frame.Put(out,mergedExcludedList)
| jrbourbeau/cr-composition | processing/legacy/icetop_llhratio/python/shield_snippets.py | Python | mit | 2,763 |
import os
import unittest
from bs4 import BeautifulSoup
from django.test import TestCase
from bikestats.models import Make, Model, Stat, parse_all
from bikestats.scraper import Scraper
TEST_DIR = os.path.join(os.path.dirname(__file__), '../../www.motorcyclespecs.co.za')
class TestScraper(TestCase):
""" Test our scraped data. Note: you must have scraped the entire website before this test! """
def test_scrape_root(self):
parse_all(TEST_DIR)
# TODO some sort of functional test, not just a count of makes
self.assertEqual(132, Make.objects.count())
self.assertEqual(4287, Model.objects.count())
self.assertEqual(26482, Stat.objects.count())
if __name__ == '__main':
unittest.main()
| narfman0/bikestats | bikestats/tests/test_models.py | Python | gpl-3.0 | 743 |
import numpy as np
import gelato
import pytest
@pytest.fixture()
def seeded():
gelato.set_tt_rng(42)
np.random.seed(42)
| ferrine/gelato | gelato/tests/conftest.py | Python | mit | 130 |
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import time
import unittest
from grpc._adapter import _c
from grpc._adapter import _types
class CTypeSmokeTest(unittest.TestCase):
def testCompletionQueueUpDown(self):
completion_queue = _c.CompletionQueue()
del completion_queue
def testServerUpDown(self):
completion_queue = _c.CompletionQueue()
serv = _c.Server(completion_queue, [])
del serv
del completion_queue
def testChannelUpDown(self):
channel = _c.Channel('[::]:0', [])
del channel
if __name__ == '__main__':
unittest.main(verbosity=2)
| fichter/grpc | src/python/grpcio_test/grpc_test/_adapter/_c_test.py | Python | bsd-3-clause | 2,073 |
from .base import CreateBaseCommand
class HelperCloseOpenIndexCommand(CreateBaseCommand):
command_name = "elasticsearch:helper-close-open-index"
def is_enabled(self):
return True
def run_request(self, index=None):
if not index:
self.show_index_list_panel(self.run)
return
response_close = self.client.indices.close(index=index)
response_open = self.client.indices.open(index=index)
return dict(
command=self.command_name,
close=response_close, open=response_open)
| KunihikoKido/sublime-elasticsearch-client | commands/helper_close_open_index.py | Python | mit | 569 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import numpy as np
from numpy.testing import assert_allclose, assert_equal, assert_almost_equal
import abel
from abel.benchmark import is_symmetric
DATA_DIR = os.path.join(os.path.split(__file__)[0], 'data')
def assert_allclose_msg(x, y, message, rtol=1e-5):
assert np.allclose(x, y, rtol=1e-5), message
def test_speeds():
# This very superficial test checks that angular_integration is able to
# execute (no syntax errors)
n = 101
IM = np.random.randn(n, n)
abel.tools.vmi.angular_integration(IM)
def test_centering_function_shape():
# ni -> original shape
# n -> result of the centering function
for (y, x) in [(20, 11), # crop image
(21, 11),
(5, 11), # pad image
(4, 11)]:
data = np.zeros((y, x))
res = abel.tools.center.center_image(data, (y//2, x//2))
assert_equal( res.shape, (y, x),
'Centering preserves shapes for ni={}, n={}'.format(y, x))
def test_centering_function():
# ni -> original shape of the data is (ni, ni)
# n_c -> the image center is (n_c, n_c)
for (ni, n_c) in [(10, 5),
(10, 5),
]:
arr = np.zeros((ni, ni))
# arr[n_c-1:n_c+2,n_c-1:n_c+2] = 1
# # else:
arr[n_c-1:n_c+1,n_c-1:n_c+1] = 1.0
res = abel.tools.center.center_image(arr, (n_c, n_c), odd_size=False)
# The print statements below can be commented after we fix the centering issue
# print('Original array')
# print(arr)
# print('Centered array')
# print(res)
assert_equal( is_symmetric(res), True,\
'Validating the centering function for ni={}, n_c={}'.format(ni, n_c))
def test_speeds_non_integer_center():
# ensures that the rest speeds function can work with a non-integer center
n = 101
IM = np.random.randn(n, n)
abel.tools.vmi.angular_integration(IM, origin=(50.5, 50.5))
def test_anisotropy_parameter():
# anisotropy parameter from test image (not transformed)
IM = abel.tools.analytical.sample_image(name='dribinski')
Beta, Amp, Rmid, Ivstheta, theta = abel.tools.vmi.radial_integration(IM,
radial_ranges=([(0, 33), (92, 108)]))
assert_almost_equal(-0.14, Beta[0][0], decimal=2)
if __name__ == "__main__":
test_anisotropy_parameter()
| DhrubajyotiDas/PyAbel | abel/tests/test_tools.py | Python | mit | 2,564 |
# -*- coding: utf-8 -*-
{
'name': 'SaaS System Administration',
'summary': "System Administration Framework for SAAS Tools",
'version': '11.0.1.0.0',
'author': 'Salton Massally <[email protected]> (iDT Labs), Nicolas JEUDY',
'license': 'LGPL-3',
'category': 'SaaS',
'website': 'idtlabs.sl',
'depends': ['saas_portal'],
'data': [
'views/saas_portal_views.xml',
],
'installable': True,
}
| it-projects-llc/odoo-saas-tools | saas_sysadmin/__manifest__.py | Python | lgpl-3.0 | 441 |
"""
This package contains packages that are primarily used for semantic analysis.
"""
__all__ = ['scope']
if __name__ == "__main__":
pass
| ldionne/nstl-lang | nstl/sema/__init__.py | Python | bsd-3-clause | 146 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#É adimensional?
adi = False
#É para salvar as figuras(True|False)?
save = True
#Caso seja para salvar, qual é o formato desejado?
formato = 'jpg'
#Caso seja para salvar, qual é o diretório que devo salvar?
dircg = 'fig-sen'
#Caso seja para salvar, qual é o nome do arquivo?
nome = 'r-acel-u-cg'
#Qual título colocar no gráficos?
titulo = ''#'Curva de Giro'
#Qual a cor dos gráficos?
pc = 'k'
r1c = 'b'
r2c = 'y'
r3c = 'r'
#Estilo de linha
ps = '-'
r1s = '-'
r2s = '-'
r3s = '-'
import os
import scipy as sp
import matplotlib.pyplot as plt
from libplot import *
acelhis = sp.genfromtxt('../entrada/padrao/CurvaGiro/acel.dat')
acelhis2 = sp.genfromtxt('../entrada/r/saida1.1/CurvaGiro/acel.dat')
acelhis3 = sp.genfromtxt('../entrada/r/saida1.2/CurvaGiro/acel.dat')
acelhis4 = sp.genfromtxt('../entrada/r/saida1.3/CurvaGiro/acel.dat')
axl = [0, 1000, -0.04, 0.0]
#Plotando a Curva de Giro
if adi:
ylabel = r'$t\prime$'
xacellabel = r'$\dot u\prime$'
else:
ylabel = r'$\dot u \quad m^2/s$'
xacellabel = r'$t \quad segundos$'
plt.subplot2grid((1,4),(0,0), colspan=3)
#Padrao
plt.plot(acelhis[:, 0], acelhis[:, 1], color = pc, linestyle = ps,
linewidth = 2, label=ur'padrão')
plt.plot(acelhis2[:, 0], acelhis2[:, 1], color = r1c,linestyle = r1s,
linewidth = 2, label=ur'1.1r')
plt.plot(acelhis3[:, 0], acelhis3[:, 1], color = r2c, linestyle = r2s,
linewidth = 2, label=ur'1.2r')
plt.plot(acelhis4[:, 0], acelhis4[:, 1], color = r3c, linestyle = r3s,
linewidth = 2, label=ur'1.3r')
plt.title(titulo)
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.ylabel(ylabel)
plt.xlabel(xacellabel)
plt.axis(axl)
plt.grid(True)
if save:
if not os.path.exists(dircg):
os.makedirs(dircg)
if os.path.exists(dircg + '/' + nome + '.' + formato):
os.remove(dircg + '/' + nome + '.' + formato)
plt.savefig(dircg + '/' + nome + '.' + formato , format=formato)
else:
plt.show()
| asoliveira/NumShip | scripts/plot/r-ace-u-cg-plt.py | Python | gpl-3.0 | 2,027 |
#
# Copyright 2011-2016 Thomas Chiroux
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.
# If not, see <http://www.gnu.org/licenses/gpl.html>
#
# This module is part of dipplanner, a Dive planning Tool written in python
# pylint: disable=too-many-public-methods, protected-access, no-self-use
# pylint: disable=too-few-public-methods, duplicate-code, invalid-name
# pylint: disable=too-many-ancestors, attribute-defined-outside-init
"""Test Dives with air."""
import json
import pkg_resources
from dipplanner.dive import Dive
from dipplanner.segment import SegmentDive
from dipplanner.segment import UnauthorizedMod
from dipplanner import settings
from dipplanner.tests.common import TestDive, TMethodsMixin
# ============================================================================
# ======= S Y S T E M A T I C T E S T S ===============================
# ============================================================================
class TestDiveCCAirBase(TestDive):
"""Class for test air dive."""
def setUp(self):
"""Init of the tests."""
super().setUp()
# load json results file
self.results = json.loads(
pkg_resources.resource_string(
"dipplanner.tests",
__name__.split('.')[-1] + '.json').decode('utf-8'))
class TestDiveCCAir(TestDiveCCAirBase):
"""Class for test air dive."""
def setUp(self):
"""Init of the tests."""
super().setUp()
settings.RUN_TIME = False
self.setpoint = 1.2
self.dive_tank = self.ccair
self.all_tanks = [self.ccair]
self.do_dive()
# AIR =========================================================================
# =============================================s====== 10m tests ==============
class TestDiveCCAir10m10min(TestDiveCCAir, TMethodsMixin):
"""Test air 10m 10min."""
params = ((10, 10), )
class TestDiveCCAir10m20min(TestDiveCCAir, TMethodsMixin):
"""Test air 10m 20min."""
params = ((10, 20), )
class TestDiveCCAir10m30min(TestDiveCCAir, TMethodsMixin):
"""Test air 10m 30min."""
params = ((10, 30), )
class TestDiveCCAir10m40min(TestDiveCCAir, TMethodsMixin):
"""Test air 10m 40min."""
params = ((10, 40), )
class TestDiveCCAir10m50min(TestDiveCCAir, TMethodsMixin):
"""Test air 10m 50min."""
params = ((10, 50), )
class TestDiveCCAir10m60min(TestDiveCCAir, TMethodsMixin):
"""Test air 10m 60min."""
params = ((10, 60), )
class TestDiveCCAir10m70min(TestDiveCCAir, TMethodsMixin):
"""Test air 10m 70min."""
params = ((10, 70), )
# ==================================================== 20m tests ==============
class TestDiveCCAir20m10min(TestDiveCCAir, TMethodsMixin):
"""Test air 20m 10min."""
params = ((20, 10), )
class TestDiveCCAir20m20min(TestDiveCCAir, TMethodsMixin):
"""Test air 20m 20min."""
params = ((20, 20), )
class TestDiveCCAir20m30min(TestDiveCCAir, TMethodsMixin):
"""Test air 20m 30min."""
params = ((20, 30), )
class TestDiveCCAir20m40min(TestDiveCCAir, TMethodsMixin):
"""Test air 20m 40min."""
params = ((20, 40), )
class TestDiveCCAir20m50min(TestDiveCCAir, TMethodsMixin):
"""Test air 20m 50min."""
params = ((20, 50), )
# ==================================================== 30m tests ==============
class TestDiveCCAir30m10min(TestDiveCCAir, TMethodsMixin):
"""Test air 30m 10min."""
params = ((30, 10), )
class TestDiveCCAir30m20min(TestDiveCCAir, TMethodsMixin):
"""Test air 30m 20min."""
params = ((30, 20), )
class TestDiveCCAir30m30min(TestDiveCCAir, TMethodsMixin):
"""Test air 30m 30min."""
params = ((30, 30), )
class TestDiveCCAir30m40min(TestDiveCCAir, TMethodsMixin):
"""Test air 30m 40min."""
params = ((30, 40), )
class TestDiveCCAir30m50min(TestDiveCCAir, TMethodsMixin):
"""Test air 30m 50min."""
params = ((30, 50), )
# ==================================================== 40m tests ==============
class TestDiveCCAir40m10min(TestDiveCCAir, TMethodsMixin):
"""Test air 40m 10min."""
params = ((40, 10), )
class TestDiveCCAir40m20min(TestDiveCCAir, TMethodsMixin):
"""Test air 40m 20min."""
params = ((40, 20), )
class TestDiveCCAir40m30min(TestDiveCCAir, TMethodsMixin):
"""Test air 40m 30min."""
params = ((40, 30), )
class TestDiveCCAir40m40min(TestDiveCCAir, TMethodsMixin):
"""Test air 40m 40min."""
params = ((40, 40), )
class TestDiveCCAir40m50min(TestDiveCCAir, TMethodsMixin):
"""Test air 40m 50min."""
params = ((40, 50), )
# ==================================================== 70m tests ==============
class TestDiveCCAir50m10min(TestDive):
"""Test air 70m 10min."""
params = ((50, 10), )
def runTest(self):
"""Run one test."""
try:
self.setpoint = 1.2
self.dive_tank = self.ccair
self.all_tanks = [self.ccair]
diveseg1 = SegmentDive(self.params[0][0], self.params[0][1] * 60,
self.dive_tank, self.setpoint)
self.profile1 = Dive([diveseg1], self.all_tanks)
self.profile1.do_dive()
except UnauthorizedMod:
pass
else:
self.fail("should raise UnauthorizedMod")
# ======================= Multilevel Dive =====================================
class TestDiveCCAirMultilevel1(TestDiveCCAir, TMethodsMixin):
"""Multilevel dive test."""
params = ((40, 10), (45, 12), (30, 15))
name = 'multilevel1'
| ThomasChiroux/dipplanner | dipplanner/tests/dive_cc_air_test.py | Python | gpl-3.0 | 6,118 |
import json
import pytest
import time
import azrael.igor
import azrael.aztypes
import azrael.leonard
import azrael.datastore
import azrael.vectorgrid
import azrael.eventstore
import numpy as np
import unittest.mock as mock
import azrael.leo_api as leoAPI
from azrael.aztypes import RetVal
from IPython import embed as ipshell
from azrael.test.test import getCSBox, getCSSphere, getCSEmpty
from azrael.test.test import getP2P, getLeonard, getRigidBody
# List all available engines. This simplifies the parameterisation of those
# tests that must pass for all engines.
allEngines = [
azrael.leonard.LeonardBase,
azrael.leonard.LeonardBullet,
azrael.leonard.LeonardSweeping,
azrael.leonard.LeonardDistributedZeroMQ,
]
class TestLeonardAllEngines:
@classmethod
def setup_class(cls):
assert azrael.vectorgrid.deleteAllGrids().ok
cls.igor = azrael.igor.Igor()
@classmethod
def teardown_class(cls):
assert azrael.vectorgrid.deleteAllGrids().ok
def setup_method(self, method):
assert azrael.vectorgrid.deleteAllGrids().ok
azrael.datastore.init(flush=True)
def teardown_method(self, method):
azrael.test.test.shutdownLeonard()
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_getGridForces(self, clsLeonard):
"""
Spawn an object, specify its State Variables explicitly, and verify the
change propagated through Azrael.
"""
# Convenience.
vg = azrael.vectorgrid
# Create pristince force grid.
assert vg.defineGrid(name='force', vecDim=3, granularity=1).ok
# Get a Leonard instance.
leo = getLeonard(clsLeonard)
# Grid parameters.
Nx, Ny, Nz = 2, 3, 4
ofs = np.array([-1.1, -2.7, 3.5], np.float64)
force = np.zeros((Nx, Ny, Nz, 3))
# Compute grid values.
idPos, idVal = {}, {}
val, objID = 0, 0
for x in range(Nx):
for y in range(Ny):
for z in range(Nz):
# Assign integer values (allows for equality comparisions
# later on without having to worry about rounding effects).
force[x, y, z] = [val, val + 1, val + 2]
# Build the input dictionary for ``getGridForces``.
idPos[objID] = np.array(
[x + ofs[0], y + ofs[1], z + ofs[2]])
# Keep track of the value assigned to this position.
idVal[objID] = force[x, y, z]
# Update the counters.
val += 3
objID += 1
# Set the grid values with a region operator.
ret = vg.setRegion('force', ofs, force)
# Query the grid values at the positions specified in idPos.
ret = leo.getGridForces(idPos)
assert ret.ok
gridForces = ret.data
# Verify the value at every position we used in this test.
for objID in idPos:
# Convenience.
val_direct = idVal[objID]
val_gridforce = gridForces[objID]
# Compare: direct <--> getGridForces.
assert np.array_equal(val_direct, val_gridforce)
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_setRigidBody_basic(self, clsLeonard):
"""
Spawn an object, specify its State Variables explicitly, and verify the
change propagated through Azrael.
"""
# Get a Leonard instance.
leo = getLeonard(clsLeonard)
# Parameters and constants for this test.
id_1 = '1'
# Body data.
p = np.array([1, 2, 5])
vl = np.array([8, 9, 10.5])
vr = vl + 1
body = {'position': p, 'velocityLin': vl, 'velocityRot': vr}
del p, vl, vr
# Spawn a new object. It must have ID=1.
assert leoAPI.addCmdSpawn([(id_1, getRigidBody())]).ok
# Update the object's body.
assert leoAPI.addCmdModifyBodyState(id_1, body).ok
# Sync the commands to Leonard.
leo.processCommandsAndSync()
# Verify that the attributes were correctly updated.
ret = leo.allBodies[id_1]
assert np.array_equal(ret.position, body['position'])
assert np.array_equal(ret.velocityLin, body['velocityLin'])
assert np.array_equal(ret.velocityRot, body['velocityRot'])
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_setRigidBody_advanced(self, clsLeonard):
"""
Similar to test_setRigidBody_basic but modify the collision shape
information as well, namely their mass- and type.
"""
# Get a Leonard instance.
leo = getLeonard(clsLeonard)
# Parameters and constants for this test.
cshape_box = {'1': getCSBox()}
cshape_sphere = {'1': getCSSphere()}
body = getRigidBody(imass=2, scale=3, cshapes=cshape_sphere)
# Spawn an object.
objID = '1'
assert leoAPI.addCmdSpawn([(objID, body)]).ok
del body
# Verify the body data.
leo.processCommandsAndSync()
assert leo.allBodies[objID].imass == 2
assert leo.allBodies[objID].scale == 3
assert leo.allBodies[objID].cshapes == cshape_sphere
# Update the body.
cs_new = {'imass': 4, 'scale': 5, 'cshapes': cshape_box}
assert leoAPI.addCmdModifyBodyState(objID, cs_new).ok
# Verify the body data.
leo.processCommandsAndSync()
ret = leo.allBodies[objID]
assert (ret.imass == 4) and (ret.scale == 5)
assert ret.cshapes == cshape_box
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_move_single_object(self, clsLeonard):
"""
Create a single object with non-zero initial speed and ensure
Leonard moves it accordingly.
"""
# Get a Leonard instance.
leo = getLeonard(clsLeonard)
# Constants and parameters for this test.
id_0 = '0'
# Spawn an object.
assert leoAPI.addCmdSpawn([(id_0, getRigidBody())]).ok
# Advance the simulation by 1s and verify that nothing has moved.
leo.step(1.0, 60)
assert np.array_equal(leo.allBodies[id_0].position, [0, 0, 0])
# Give the object a velocity.
body = {'velocityLin': np.array([1, 0, 0])}
assert leoAPI.addCmdModifyBodyState(id_0, body).ok
del body
# Advance the simulation by another second and verify the objects have
# moved accordingly.
leo.step(1.0, 60)
body = leo.allBodies[id_0]
assert 0.9 <= body.position[0] < 1.1
assert body.position[1] == body.position[2] == 0
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_move_two_objects_no_collision(self, clsLeonard):
"""
Same as previous test but with two objects.
"""
# Get a Leonard instance.
leo = getLeonard(clsLeonard)
# Constants and parameters for this test.
id_0, id_1 = '0', '1'
body_0 = getRigidBody(position=[0, 0, 0], velocityLin=[1, 0, 0])
body_1 = getRigidBody(position=[0, 10, 0], velocityLin=[0, -1, 0])
# Create two objects.
tmp = [(id_0, body_0), (id_1, body_1)]
assert leoAPI.addCmdSpawn(tmp).ok
# Advance the simulation by 1s.
leo.step(1.0, 60)
# The objects must have moved according to their initial velocity.
pos_0 = leo.allBodies[id_0].position
pos_1 = leo.allBodies[id_1].position
assert pos_0[1] == pos_0[2] == 0
assert pos_1[0] == pos_1[2] == 0
assert 0.9 <= pos_0[0] <= 1.1
assert 8.9 <= pos_1[1] <= 9.1
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_force_grid(self, clsLeonard):
"""
Create a force grid and ensure Leonard applies its values to the
center of the mass.
"""
# Convenience.
vg = azrael.vectorgrid
# Get a Leonard instance.
leo = getLeonard(clsLeonard)
# Constants and parameters for this test.
id_0 = '0'
# Spawn one object.
assert leoAPI.addCmdSpawn([(id_0, getRigidBody())]).ok
# Advance the simulation by 1s and verify that nothing has moved.
leo.step(1.0, 60)
assert np.array_equal(leo.allBodies[id_0].position, [0, 0, 0])
# Define a force grid.
assert vg.defineGrid(name='force', vecDim=3, granularity=1).ok
# Specify a non-zero value somewhere away from the object. This means
# the object must still not move.
pos = np.array([1, 2, 3], np.float64)
value = np.ones(3, np.float64)
assert vg.setValues('force', [(pos, value)]).ok
# Step the simulation and verify the object remained where it was.
leo.step(1.0, 60)
assert np.array_equal(leo.allBodies[id_0].position, [0, 0, 0])
# Specify a grid value of 1 Newton in x-direction.
pos = np.array([0, 0, 0], np.float64)
value = np.array([1, 0, 0], np.float64)
assert vg.setValues('force', [(pos, value)]).ok
# Step the simulation and verify the object moved accordingly.
leo.step(1.0, 60)
body = leo.allBodies[id_0]
assert 0.4 <= body.position[0] < 0.6
assert body.position[1] == body.position[2] == 0
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_collision_contacts_mock(self, clsLeonard):
"""
Create two touching objects and verify that Bullet dispatches the
event. This test uses a mock to intercept the exact information to
dispatch.
"""
# Get a Leonard instance and verify it has an EventStore instance.
leo = getLeonard(clsLeonard)
assert hasattr(leo, 'events')
# Install the mock.
mock_es = mock.create_autospec(azrael.eventstore.EventStore)
mock_es.publish.return_value = RetVal(True, None, None)
leo.events = mock_es
# Constants and parameters for this test.
id_0, id_1 = '0', '1'
# Step the empty simulation, spawn one object, step it again. Verify
# that 'publish' is never called because no collisions have occurred.
assert mock_es.publish.call_count == 0
leo.step(1, 1)
assert mock_es.publish.call_count == 0
assert leoAPI.addCmdSpawn([(id_0, getRigidBody())]).ok
assert mock_es.publish.call_count == 0
leo.step(1, 1)
assert mock_es.publish.call_count == 0
# Duplicate the object at the same location. This implies they collide
# and 'publish' must be triggered *after* stepping the simulation.
assert leoAPI.addCmdSpawn([(id_1, getRigidBody())]).ok
assert mock_es.publish.call_count == 0
leo.step(1, 1)
if clsLeonard == azrael.leonard.LeonardBase:
# The Base class does not compute collisions and therefore must
# never publish anything.
assert mock_es.publish.call_count == 0
else:
assert mock_es.publish.call_count == 1
# Unpack the keyword arguments with which 'publish' was called.
_, kwargs = mock_es.publish.call_args
# Verify the argument values.
assert kwargs['topic'] == 'phys.collisions'
msg = json.loads(kwargs['msg'].decode('utf8'))
assert len(msg) == 1
# Unpack the constutents of the one collision contact entry.
bodyA, bodyB, colldata = msg[0]
# Verify the IDs of the bodies that collided. Verify further that
# each body has exactly one collision point. The collision point
# itself we cannot check because it is difficult to
# deterministically predict where Bullet will think they touch. As
# such, we only verify that there are two contacts (one for each
# object) and that it is a 3-Vector.
assert bodyA == id_0, bodyB == id_1
assert len(colldata) == 2
assert len(colldata[0]) == len(colldata[1]) == 3
@pytest.mark.parametrize('clsLeonard', allEngines)
def test_collision_contacts_eventstore(self, clsLeonard):
"""
Create two touching bodies and step the simulation. Verify the
collision event via the event store API.
"""
# Skip this test for LeonardBase because it does not emit any messages.
if clsLeonard == azrael.leonard.LeonardBase:
return
# Instantiate the message store API and start it in a thread.
es = azrael.eventstore.EventStore(topics=['#'])
es.start()
# Instantiate Leonard.
leo = getLeonard(clsLeonard)
# Spawn two identical objects and step the simulation.
assert leoAPI.addCmdSpawn([('0', getRigidBody()), ('1', getRigidBody())]).ok
leo.step(1, 1)
# Wait for the message to arrive.
for ii in range(10):
time.sleep(0.1)
if len(es.messages) > 0:
break
assert ii < 9
ret = es.getMessages()
assert ret.ok
# Verify there is exactly one message. That message must have been
# published to the 'phys.collisions' topic.
assert len(ret.data) == 1
topic, msg = ret.data[0]
assert topic == 'phys.collisions'
# The payload must be JSON.
msg = json.loads(msg.decode('utf8'))
# The payload is a list of lists. Since only one pair of objects must
# have collided, that list must contain only one element.
assert len(msg) == 1
msg = msg[0]
# The content of the list comprises the AIDs of the two objects first,
# followed by information about the position of the collisions (which
# we ignore here because we cannot safely predict their values).
assert (msg[0] == '0') and (msg[1] == '1')
# Stop the thread.
es.stop()
es.join()
class TestWorkerManager:
@classmethod
def setup_class(cls):
pass
@classmethod
def teardown_class(cls):
pass
def setup_method(self, method):
pass
def teardown_method(self, method):
azrael.test.test.shutdownLeonard()
def test_workerManager_basic(self):
"""
"""
# Convenience.
m_workerCls = mock.MagicMock()
m_worker = mock.MagicMock()
m_workerCls.return_value = m_worker
WM = azrael.leonard.WorkerManager
# Number of workers and/or steps can be zero but not negative.
with pytest.raises(AssertionError):
WM(numWorkers=-1, minSteps=5, maxSteps=10, workerCls=m_workerCls)
with pytest.raises(AssertionError):
WM(numWorkers=1, minSteps=-5, maxSteps=10, workerCls=m_workerCls)
with pytest.raises(AssertionError):
WM(numWorkers=1, minSteps=5, maxSteps=-10, workerCls=m_workerCls)
# 'minSteps' must be smaller or equal than 'maxSteps'.
WM(numWorkers=1, minSteps=50, maxSteps=50, workerCls=m_workerCls)
with pytest.raises(AssertionError):
WM(numWorkers=1, minSteps=50, maxSteps=10, workerCls=m_workerCls)
# Start a fleet of zero workers and verify that the WorkerManager did
# not make any attempts to instantiate the WorkerCls.
assert not m_workerCls.called
wm = WM(numWorkers=0, minSteps=10, maxSteps=50, workerCls=m_workerCls)
assert wm.workers == []
wm.maintainFleet()
assert not m_workerCls.called
del m_worker
# Start a fleet of two (mock) workers.
m_workerCls.reset_mock()
m_worker1, m_worker2 = mock.MagicMock(), mock.MagicMock()
m_workerCls.side_effect = [m_worker1, m_worker2]
assert not m_workerCls.called
wm = WM(numWorkers=2, minSteps=10, maxSteps=50, workerCls=m_workerCls)
assert wm.workers == [None, None]
wm.maintainFleet()
assert m_worker1.start.called
assert m_worker2.start.called
@mock.patch.object(azrael.leonard.os, 'kill')
def test_workerManager_stop(self, m_oskill):
"""
"""
WM = azrael.leonard.WorkerManager
# Create mocked Workers, one alive, one already terminated.
m_worker_alive, m_worker_dead = mock.MagicMock(), mock.MagicMock()
m_worker_alive.is_alive.return_value = True
m_worker_alive.pid = 1
m_worker_dead.is_alive.return_value = False
m_worker_dead.pid = 2
# Create a WorkerManager and install a list of mocked Worker instances.
m_workerCls = mock.MagicMock()
wm = WM(numWorkers=2, minSteps=1, maxSteps=5, workerCls=m_workerCls)
assert wm.workers == [None, None]
wm.workers = [m_worker_alive, m_worker_dead]
# Call the 'stop' method. This must first send SIGTERM to all children
# still alive and then join them.
assert m_oskill.call_count == 0
wm.stopAll()
assert m_worker_alive.is_alive.called
assert m_worker_dead.is_alive.called
assert m_oskill.call_count == 1
assert m_oskill.called_with(m_worker_alive.pid, azrael.leonard.signal.SIGTERM)
assert m_worker_alive.join.called
assert m_worker_dead.join.called
assert wm.workers == [None, None]
def test_worker_respawn(self):
"""
Ensure the objects move correctly even though the Workers will restart
themselves after every step.
The test code is similar to ``test_move_two_objects_no_collision``.
"""
# Instantiate Leonard.
leo = getLeonard(azrael.leonard.LeonardDistributedZeroMQ)
# Define a force grid (not used in this test but prevents a plethora
# of meaningless warning messages).
vg = azrael.vectorgrid
assert vg.defineGrid(name='force', vecDim=3, granularity=1).ok
# Constants and parameters for this test.
id_0, id_1 = '0', '1'
cshapes = {'cssphere': getCSSphere(radius=1)}
# Define two bodies and create the objects.
body_0 = getRigidBody(
position=[0, 0, 0], velocityLin=[1, 0, 0], cshapes=cshapes)
body_1 = getRigidBody(
position=[0, 10, 0], velocityLin=[0, -1, 0], cshapes=cshapes)
tmp = [(id_0, body_0), (id_1, body_1)]
assert leoAPI.addCmdSpawn(tmp).ok
# Advance the simulation by 1s via many small time steps. This
# ensures the Workers will restart themselves frequently.
for ii in range(60):
leo.step(1.0 / 60, 1)
# The objects must have moved according to their initial velocity.
pos_0 = leo.allBodies[id_0].position
pos_1 = leo.allBodies[id_1].position
assert pos_0[1] == pos_0[2] == 0
assert pos_1[0] == pos_1[2] == 0
assert 0.9 <= pos_0[0] <= 1.1
assert 8.9 <= pos_1[1] <= 9.1
class TestLeonardOther:
@classmethod
def setup_class(cls):
cls.igor = azrael.igor.Igor()
@classmethod
def teardown_class(cls):
pass
def setup_method(self, method):
assert azrael.vectorgrid.deleteAllGrids().ok
azrael.datastore.init(flush=True)
def teardown_method(self, method):
azrael.test.test.shutdownLeonard()
def test_createWorkPackages(self):
"""
Create a Work Package and verify its content.
"""
# Get a Leonard instance.
leo = getLeonard(azrael.leonard.LeonardDistributedZeroMQ)
# Constants.
id_1, id_2 = '1', '2'
dt, maxsteps = 2, 3
# Invalid call: list of IDs must not be empty.
assert not leo.createWorkPackage([], dt, maxsteps).ok
# Invalid call: Leonard has no object with ID 10.
assert not leo.createWorkPackage([10], dt, maxsteps).ok
# Test data.
body_1 = getRigidBody(imass=1)
body_2 = getRigidBody(imass=2)
# Add two new objects to Leonard.
tmp = [(id_1, body_1), (id_2, body_2)]
assert leoAPI.addCmdSpawn(tmp).ok
leo.processCommandsAndSync()
# Create a Work Package with two objects. The WPID must be 1.
ret = leo.createWorkPackage([id_1], dt, maxsteps)
ret_wpid, ret_wpdata = ret.data['wpid'], ret.data['wpdata']
assert (ret.ok, ret_wpid, len(ret_wpdata)) == (True, 0, 1)
# Create a second WP: it must have WPID=2 and contain two objects.
ret = leo.createWorkPackage([id_1, id_2], dt, maxsteps)
ret_wpid, ret_wpdata = ret.data['wpid'], ret.data['wpdata']
assert (ret.ok, ret_wpid, len(ret_wpdata)) == (True, 1, 2)
# Check the WP content.
WPDataOut = azrael.aztypes.WPDataOut
WPMeta = azrael.aztypes.WPMeta
data = [WPDataOut(*_) for _ in ret.data['wpdata']]
meta = WPMeta(*ret.data['wpmeta'])
assert (meta.dt, meta.maxsteps) == (dt, maxsteps)
assert (ret.ok, len(data)) == (True, 2)
assert (data[0].aid, data[1].aid) == (id_1, id_2)
assert getRigidBody(*data[0].rbs) == body_1
assert getRigidBody(*data[1].rbs) == body_2
assert np.array_equal(data[0].force, [0, 0, 0])
assert np.array_equal(data[1].force, [0, 0, 0])
def test_updateLocalCache(self):
"""
Update the local object cache in Leonard based on a Work Package.
"""
# Get a Leonard instance.
leo = getLeonard(azrael.leonard.LeonardDistributedZeroMQ)
# Convenience.
body_1 = getRigidBody(imass=1)
body_2 = getRigidBody(imass=2)
id_1, id_2 = '1', '2'
# Spawn new objects.
tmp = [(id_1, body_1), (id_2, body_2)]
assert leoAPI.addCmdSpawn(tmp).ok
leo.processCommandsAndSync()
# Create a new State Vector to replace the old one.
body_3 = getRigidBody(imass=4, position=[1, 2, 3])
newWP = [azrael.aztypes.WPDataRet(id_1, body_3)]
# Check the state variables for objID=id_1 before and after the update.
assert getRigidBody(*leo.allBodies[id_1]) == body_1
leo.updateLocalCache(newWP, None)
assert getRigidBody(*leo.allBodies[id_1]) == body_3
def test_processCommandQueue(self):
"""
Create commands to spawn-, delete, and modify objects or their booster
values. Then verify that ``processCommandQueue`` corrently updates
Leonard's object cache.
"""
# Get a Leonard instance.
leo = getLeonard(azrael.leonard.LeonardDistributedZeroMQ)
# Convenience.
body_1 = getRigidBody(imass=1)
body_2 = getRigidBody(imass=2)
id_1, id_2 = '1', '2'
# Cache must be empty.
assert len(leo.allBodies) == len(leo.allForces) == 0
# Spawn two objects.
tmp = [(id_1, body_1), (id_2, body_2)]
assert leoAPI.addCmdSpawn(tmp).ok
leo.processCommandsAndSync()
# Verify the local cache (forces and torques must default to zero).
assert getRigidBody(*leo.allBodies[id_1]) == body_1
assert getRigidBody(*leo.allBodies[id_2]) == body_2
tmp = leo.allForces[id_1]
assert tmp.forceDirect == tmp.torqueDirect == [0, 0, 0]
assert tmp.forceBoost == tmp.torqueBoost == [0, 0, 0]
del tmp
# Remove first object.
assert leoAPI.addCmdRemoveObject(id_1).ok
leo.processCommandsAndSync()
assert id_1 not in leo.allBodies
assert id_1 not in leo.allForces
# Change the State Vector of id_2.
pos = (10, 11.5, 12)
body_3 = {'position': pos}
assert leo.allBodies[id_2].position == (0, 0, 0)
assert leoAPI.addCmdModifyBodyState(id_2, body_3).ok
leo.processCommandsAndSync()
assert leo.allBodies[id_2].position == pos
# Apply a direct force and torque to id_2.
force, torque = [1, 2, 3], [4, 5, 6]
assert leoAPI.addCmdDirectForce(id_2, force, torque).ok
leo.processCommandsAndSync()
assert leo.allForces[id_2].forceDirect == force
assert leo.allForces[id_2].torqueDirect == torque
# Specify a new force- and torque value due to booster activity.
force, torque = [1, 2, 3], [4, 5, 6]
assert leoAPI.addCmdBoosterForce(id_2, force, torque).ok
leo.processCommandsAndSync()
assert leo.allForces[id_2].forceBoost == force
assert leo.allForces[id_2].torqueBoost == torque
def test_maintain_forces(self):
"""
Leonard must not reset any forces from one iteration to the next
(used to be the case at some point and thus requires a dedicated
test now).
"""
# Get a Leonard instance.
leo = getLeonard(azrael.leonard.LeonardDistributedZeroMQ)
# Convenience.
sv = getRigidBody(imass=1)
objID = '1'
# Spawn object.
assert leoAPI.addCmdSpawn([(objID, sv)]).ok
leo.processCommandsAndSync()
# Initial force and torque must be zero.
tmp = leo.allForces[objID]
assert tmp.forceDirect == tmp.torqueDirect == [0, 0, 0]
assert tmp.forceBoost == tmp.torqueBoost == [0, 0, 0]
del tmp
# Change the direct force and verify that Leonard does not reset it.
assert leoAPI.addCmdDirectForce(objID, [1, 2, 3], [4, 5, 6]).ok
for ii in range(10):
leo.processCommandsAndSync()
tmp = leo.allForces[objID]
assert tmp.forceDirect == [1, 2, 3]
assert tmp.torqueDirect == [4, 5, 6]
assert tmp.forceBoost == [0, 0, 0]
assert tmp.torqueBoost == [0, 0, 0]
# Change the booster force and verify that Leonard does not change
# it (or the direct force specified earlier)
assert leoAPI.addCmdBoosterForce(objID, [-1, -2, -3], [-4, -5, -6]).ok
for ii in range(10):
leo.processCommandsAndSync()
tmp = leo.allForces[objID]
assert tmp.forceDirect == [1, 2, 3]
assert tmp.torqueDirect == [4, 5, 6]
assert tmp.forceBoost == [-1, -2, -3]
assert tmp.torqueBoost == [-4, -5, -6]
# Change the direct forces again.
assert leoAPI.addCmdDirectForce(objID, [3, 2, 1], [6, 5, 4]).ok
for ii in range(10):
leo.processCommandsAndSync()
tmp = leo.allForces[objID]
assert tmp.forceDirect == [3, 2, 1]
assert tmp.torqueDirect == [6, 5, 4]
assert tmp.forceBoost == [-1, -2, -3]
assert tmp.torqueBoost == [-4, -5, -6]
# Change the booster forces again.
assert leoAPI.addCmdBoosterForce(objID, [-3, -2, -1], [-6, -5, -4]).ok
for ii in range(10):
leo.processCommandsAndSync()
tmp = leo.allForces[objID]
assert tmp.forceDirect == [3, 2, 1]
assert tmp.torqueDirect == [6, 5, 4]
assert tmp.forceBoost == [-3, -2, -1]
assert tmp.torqueBoost == [-6, -5, -4]
def test_totalForceAndTorque_no_rotation(self):
"""
Verify that 'totalForceAndTorque' correctly adds up the direct-
and booster forces for an object that is in neutral position (ie
without rotation).
"""
# Get a Leonard instance.
leo = getLeonard(azrael.leonard.LeonardDistributedZeroMQ)
# Spawn one object.
sv = getRigidBody(imass=1, rotation=(0, 0, 0, 1))
objID = '1'
assert leoAPI.addCmdSpawn([(objID, sv)]).ok
leo.processCommandsAndSync()
del sv
# Initial force and torque must be zero.
assert leo.totalForceAndTorque(objID) == ([0, 0, 0], [0, 0, 0])
# Change the direct force.
assert leoAPI.addCmdDirectForce(objID, [1, 2, 3], [4, 5, 6]).ok
leo.processCommandsAndSync()
assert leo.totalForceAndTorque(objID) == ([1, 2, 3], [4, 5, 6])
# Change the direct force.
assert leoAPI.addCmdDirectForce(objID, [1, 2, 30], [4, 5, 60]).ok
leo.processCommandsAndSync()
assert leo.totalForceAndTorque(objID) == ([1, 2, 30], [4, 5, 60])
# Reset the direct force and change the booster force.
assert leoAPI.addCmdDirectForce(objID, [0, 0, 0], [0, 0, 0]).ok
assert leoAPI.addCmdBoosterForce(objID, [-1, -2, -3], [-4, -5, -6]).ok
leo.processCommandsAndSync()
assert leo.totalForceAndTorque(objID) == ([-1, -2, -3], [-4, -5, -6])
# Direct- and booste forces must perfectly balance each other.
assert leoAPI.addCmdDirectForce(objID, [1, 2, 3], [4, 5, 6]).ok
assert leoAPI.addCmdBoosterForce(objID, [-1, -2, -3], [-4, -5, -6]).ok
leo.processCommandsAndSync()
assert leo.totalForceAndTorque(objID) == ([0, 0, 0], [0, 0, 0])
def test_totalForceAndTorque_with_rotation(self):
"""
Similar to the previou 'test_totalForceAndTorque_no_rotation'
but this time the object does not have a neutral rotation in
world coordinates. This must have no effect on the direct force
values, but the booster forces must be re-oriented accordingly.
"""
# Get a Leonard instance.
leo = getLeonard(azrael.leonard.LeonardDistributedZeroMQ)
# Spawn one object rotated 180 degress around x-axis.
sv = getRigidBody(imass=1, rotation=(1, 0, 0, 0))
objID = '1'
assert leoAPI.addCmdSpawn([(objID, sv)]).ok
leo.processCommandsAndSync()
del sv
# Initial force and torque must be zero.
assert leo.totalForceAndTorque(objID) == ([0, 0, 0], [0, 0, 0])
# Add booster force in z-direction.
assert leoAPI.addCmdBoosterForce(objID, [1, 2, 3], [-1, -2, -3]).ok
leo.processCommandsAndSync()
# The net forces in must have their signs flipped in the y/z
# directions, and remain unchanged for x since the object itself is
# rotated 180 degrees around the x-axis.
assert leo.totalForceAndTorque(objID) == ([1, -2, -3], [-1, 2, 3])
# The object's rotation must not effect the direct force and torque.
assert leoAPI.addCmdBoosterForce(objID, [0, 0, 0], [0, 0, 0]).ok
assert leoAPI.addCmdDirectForce(objID, [1, 2, 3], [4, 5, 6]).ok
leo.processCommandsAndSync()
assert leo.totalForceAndTorque(objID) == ([1, 2, 3], [4, 5, 6])
def test_mergeConstraintSets(self):
"""
Create a few disjoint sets, specify some constraints, and verify that
they are merged correctly.
"""
def _verify(_coll_sets, _correct_answer):
"""
Assert that the ``_coll_sets`` are reduced to ``_correct_answer``
by the `mergeConstraintSets` algorithm.
"""
# Fetch all unique object pairs connected by a constraint.
assert self.igor.updateLocalCache().ok
ret = self.igor.uniquePairs()
assert ret.ok
# Merge all the sets `_coll_sets` that are connected by at least
# one constraint.
ret = azrael.leonard.mergeConstraintSets(ret.data, _coll_sets)
assert ret.ok
computed = ret.data
# Compare the computed- with the expected output.
computed = [sorted(tuple(_)) for _ in computed]
correct = [sorted(tuple(_)) for _ in _correct_answer]
assert sorted(computed) == sorted(correct)
# Convenience.
igor = self.igor
mergeConstraintSets = azrael.leonard.mergeConstraintSets
# Empty set.
self.igor.reset()
assert self.igor.updateLocalCache().ok
ret = self.igor.uniquePairs()
assert ret.ok
assert mergeConstraintSets(ret.data, []) == (True, None, [])
_verify([], [])
# Set with only one subset.
self.igor.reset()
assert self.igor.updateLocalCache().ok
ret = self.igor.uniquePairs()
assert ret.ok
assert mergeConstraintSets(ret.data, [[1]]) == (True, None, [[1]])
tmp = [[1, 2, 3]]
assert mergeConstraintSets(ret.data, tmp) == (True, None, tmp)
del tmp
# Two disjoint sets.
self.igor.reset()
s = [['1'], ['2']]
_verify(s, s)
assert igor.addConstraints([getP2P()]).ok
_verify(s, [['1', '2']])
self.igor.reset()
_verify(s, s)
# Two disjoint sets but the constraint does not link them.
self.igor.reset()
s = [['1'], ['2']]
_verify(s, s)
assert igor.addConstraints([getP2P(rb_a='1', rb_b='3')]).ok
_verify(s, s)
# Three disjoint sets and the constraint links two of them.
self.igor.reset()
s = [['1', '2', '3'], ['4', '5'], ['6']]
_verify(s, s)
assert igor.addConstraints([getP2P(rb_a='1', rb_b='6')]).ok
_verify(s, [['1', '2', '3', '6'], ['4', '5']])
# Three disjoint sets and two constraint link both of them.
self.igor.reset()
s = [['1', '2', '3'], ['4', '5'], ['6']]
_verify(s, s)
assert igor.addConstraints([getP2P(rb_a='1', rb_b='6')]).ok
assert igor.addConstraints([getP2P(rb_a='3', rb_b='4')]).ok
_verify(s, [['1', '2', '3', '6', '4', '5']])
@pytest.mark.parametrize('clsLeonard', [
azrael.leonard.LeonardBullet,
azrael.leonard.LeonardSweeping,
azrael.leonard.LeonardDistributedZeroMQ])
def test_constraint_p2p(self, clsLeonard):
"""
Link two bodies together with a Point2Point constraint and verify that
they move together.
"""
# Get a Leonard instance.
leo = getLeonard(clsLeonard)
# Convenience.
id_a, id_b = '1', '2'
pos_a, pos_b = (-2, 0, 0), (2, 0, 0)
distance = abs(pos_a[0] - pos_b[0])
assert distance >= 4
body_a = getRigidBody(position=pos_a, cshapes={'cssphere': getCSSphere()})
body_b = getRigidBody(position=pos_b, cshapes={'cssphere': getCSSphere()})
# Specify the constraints.
con = getP2P(rb_a=id_a, rb_b=id_b, pivot_a=pos_b, pivot_b=pos_a)
assert self.igor.addConstraints([con]).ok
# Spawn both objects.
assert leoAPI.addCmdSpawn([(id_a, body_a), (id_b, body_b)]).ok
leo.processCommandsAndSync()
# Apply a force to the left sphere only.
assert leoAPI.addCmdDirectForce(id_a, [-10, 0, 0], [0, 0, 0]).ok
leo.processCommandsAndSync()
# Both object must have moved the same distance 'delta' because they
# are linked. Their distance must not have changed.
leo.step(1.0, 60)
allObjs = leo.allBodies
delta_a = allObjs[id_a].position - np.array(pos_a)
delta_b = allObjs[id_b].position - np.array(pos_b)
assert delta_a[0] < pos_a[0]
assert np.allclose(delta_a, delta_b)
tmp = abs(allObjs[id_a].position[0] - allObjs[id_b].position[0])
assert abs(tmp - distance) < 0.01
del tmp
# Unlink the objects again, apply a right-pointing force to the
# right object and verify that the left continues to move left and the
# right does not.
assert self.igor.removeConstraints([con]) == (True, None, 1)
assert leoAPI.addCmdDirectForce(id_b, [10, 0, 0], [0, 0, 0]).ok
leo.processCommandsAndSync()
leo.step(1.0, 60)
# The distance between the spheres must have increases since they are
# not linked anymore.
tmp = abs(allObjs[id_a].position[0] - allObjs[id_b].position[0])
assert tmp > (distance + 1)
class TestBroadphase:
@classmethod
def setup_class(cls):
assert azrael.vectorgrid.deleteAllGrids().ok
cls.igor = azrael.igor.Igor()
@classmethod
def teardown_class(cls):
assert azrael.vectorgrid.deleteAllGrids().ok
def setup_method(self, method):
assert azrael.vectorgrid.deleteAllGrids().ok
self.igor.reset()
def teardown_method(self, method):
azrael.test.test.shutdownLeonard()
def verifySweeping(self, aabbsIn, correct_answer):
# Create the AABB dictionaries. For this test the data is
# identical in all three dimensions.
aabbs = {}
for k, v in aabbsIn.items():
aabbs[k] = {'x': v, 'y': v, 'z': v}
# Turn ``correct_answer`` into a well ordered list of lists to make
# the comparison with the actual result easier (see loop below).
correct_answer = sorted([tuple(set(_)) for _ in correct_answer])
# Run the sweeping algorithm for every dimension and verify the
# outputs match the expected 'correct_answer'.
for dim in ('x', 'y', 'z'):
# Run sweeping algorithm and inspect its return value.
ret = azrael.leonard.sweeping(aabbs, dim)
assert ret.ok
computed = ret.data
# Turn the output into a well ordered list of lists and compare
# it to the expected answer.
computed = sorted([tuple(set(_)) for _ in computed])
assert computed == correct_answer
def test_sweeping_2objects_multi_aabb(self):
"""
Use objects that use more than one AABB to describe its collision set.
"""
# Define a force grid (not used in this test but prevent a plethora
# of meaningleass warning messages).
vg = azrael.vectorgrid
assert vg.defineGrid(name='force', vecDim=3, granularity=1).ok
# Convenience variables.
_verify = self.verifySweeping
# Two orthogonal objects; the first has two AABBs, the second only one.
_verify({0: [['4', '5'], ['6', '7']], 1: [['0', '1']]},
correct_answer=[['0'], ['1']])
# Self intersecting object: the two objects do not touch, but the AABBs
# of the first do. This must result in to independent objects.
_verify({0: [[4, 5], [4, 5]], 1: [[0, 1]]},
correct_answer=[['0'], ['1']])
# Two objects; the first has two AABS the second only one. The second
# object touches the first AABB of the first object.
_verify({0: [[4, 5], [6, 7]], 1: [[3, 5]]},
correct_answer=[['0', '1']])
# Two identical objects with two AABBs (each). This must produce one
# set.
_verify({0: [[4, 5], [6, 7]], 1: [[4, 5], [6, 7]]},
correct_answer=[['0', '1']])
# Three objects with one-, two-, and one AABB. The first touches the
# left AABB of the middle object in 'x', whereas the third touches the
# right AABB of the middle object in the 'y' dimensions.
_verify({0: [[3, 5]], 1: [[4, 5], [7, 8]], 2: [[7, 9]]},
correct_answer=[['0', '1', '2']])
# Same as above, but the third object does not touch.
_verify({0: [[3, 5]], 1: [[4, 5], [7, 8]], 2: [[70, 90]]},
correct_answer=[['0', '1'], ['2']])
def test_sweeping_2objects(self):
"""
Ensure the Sweeping algorithm finds the correct sets.
The algorithm takes a list of dictionarys and returns a list of
lists.
The input dictionary each contains the AABB coordinates. The output
list contains the set of overlapping AABBs.
"""
# Define a force grid (not used in this test but prevents a plethora
# of meaningless warning messages).
vg = azrael.vectorgrid
assert vg.defineGrid(name='force', vecDim=3, granularity=1).ok
# Convenience variables.
_verify = self.verifySweeping
# Two orthogonal objects.
_verify({0: [[4, 5]], 1: [[1, 2]]},
correct_answer=[['0'], ['1']])
# Repeat the test but use a different set of ID labels.
_verify({3: [[4, 5]], 10: [[1, 2]]},
correct_answer=[['3'], ['10']])
# One object inside the other.
_verify({0: [[2, 4]], 1: [[0, 1]]},
correct_answer=[['0'], ['1']])
# Partially overlapping to the right of the first object.
_verify({0: [[1, 5]], 1: [[2, 4]]},
correct_answer=[['0', '1']])
# Partially overlapping to the left of the first object.
_verify({0: [[1, 5]], 1: [[0, 2]]},
correct_answer=[['0', '1']])
# Pass no object to the Sweeping algorithm.
assert azrael.leonard.sweeping({}, 'x').data == []
# Pass only a single object to the Sweeping algorithm.
_verify({0: [[1, 5]]},
correct_answer=[['0']])
def test_sweeping_3objects(self):
"""
Same as test_sweeping_2objects but with three objects.
"""
# Define a force grid (not used in this test but prevent a plethora
# of meaningleass warning messages).
vg = azrael.vectorgrid
assert vg.defineGrid(name='force', vecDim=3, granularity=1).ok
# Convenience variable.
_verify = self.verifySweeping
# Three non-overlapping objects.
_verify({0: [[1, 2]], 1: [[3, 4]], 2: [[5, 6]]},
correct_answer=[['0'], ['1'], ['2']])
# First and second overlap.
_verify({0: [[1, 2]], 1: [[1.5, 4]], 2: [[5, 6]]},
correct_answer=[['0', '1'], ['2']])
# Repeat test with different set of ID labels.
_verify({2: [[1, 2]], 4: [[1.5, 4]], 10: [[5, 6]]},
correct_answer=[['2', '4'], ['10']])
# First overlaps with second, second overlaps with third, but third
# does not overlap with first. The algorithm must nevertheless return
# all three in a single set.
_verify({0: [[1, 2]], 1: [[1.5, 4]], 2: [[3, 6]]},
correct_answer=[['0', '1', '2']])
# First and third overlap.
_verify({0: [[1, 2]], 1: [[10, 11]], 2: [[0, 1.5]]},
correct_answer=[['0', '2'], ['1']])
@mock.patch('azrael.leonard.sweeping')
def test_computeCollisionSetsAABB_mocksweeping(self, mock_sweeping):
"""
Create three bodies. Then alter their AABBs to create various
combinations of overlap.
"""
# Install a mock for the sweeping algorithm.
azrael.leonard.sweeping = mock_sweeping
mock_sweeping.return_value = RetVal(True, None, [])
# Single body with no AABBs.
bodies = {5: getRigidBody(position=(0, 0, 0))}
aabbs = {5: []}
correct = {5: {'x': [], 'y': [], 'z': []}}
azrael.leonard.computeCollisionSetsAABB(bodies, aabbs)
mock_sweeping.assert_called_with(correct, 'x')
# Single body with one AABB.
bodies = {5: getRigidBody(position=(0, 0, 0))}
aabbs = {5: {'1': (0, 0, 0, 1, 2, 3)}}
correct = {5: {'x': [[-1, 1]],
'y': [[-2, 2]],
'z': [[-3, 3]]}}
azrael.leonard.computeCollisionSetsAABB(bodies, aabbs)
mock_sweeping.assert_called_with(correct, 'x')
# Single body with two AABBs.
bodies = {5: getRigidBody(position=(0, 0, 0))}
aabbs = {5: {'1': (0, 0, 0, 1, 1, 1),
'2': (2, 3, 4, 2, 4, 8)}}
correct = {5: {'x': [[-1, 1], [0, 4]],
'y': [[-1, 1], [-1, 7]],
'z': [[-1, 1], [-4, 12]]}}
azrael.leonard.computeCollisionSetsAABB(bodies, aabbs)
mock_sweeping.assert_called_with(correct, 'x')
# Single body at an offset with two AABBs.
bodies = {5: getRigidBody(position=(0, 1, 2))}
aabbs = {5: {'1': (0, 0, 0, 1, 1, 1),
'2': (2, 3, 4, 2, 4, 8)}}
correct = {5: {'x': [[-1, 1], [0, 4]],
'y': [[0, 2], [0, 8]],
'z': [[1, 3], [-2, 14]]}}
azrael.leonard.computeCollisionSetsAABB(bodies, aabbs)
mock_sweeping.assert_called_with(correct, 'x')
# Three bodies with 0, 1, and 2 AABBs, respectively.
bodies = {6: getRigidBody(position=(0, 0, 0)),
7: getRigidBody(position=(0, 0, 0)),
8: getRigidBody(position=(0, 0, 0))}
aabbs = {6: {},
7: {'1': (0, 0, 0, 1, 1, 1)},
8: {'1': (0, 0, 0, 1, 1, 1),
'2': (2, 3, 4, 2, 4, 8)}}
correct = {6: {'x': [],
'y': [],
'z': []},
7: {'x': [[-1, 1]],
'y': [[-1, 1]],
'z': [[-1, 1]]},
8: {'x': [[-1, 1], [0, 4]],
'y': [[-1, 1], [-1, 7]],
'z': [[-1, 1], [-4, 12]]}}
azrael.leonard.computeCollisionSetsAABB(bodies, aabbs)
mock_sweeping.assert_called_with(correct, 'x')
def test_computeCollisionSetsAABB_basic(self):
"""
Create three bodies. Then alter their AABBs to create various
combinations of overlap.
"""
def testCCS(pos, AABBs, expected_objIDs):
"""
Compute broadphase results for bodies at ``pos`` with ``AABBs``
and verify that the ``expected_objIDs`` sets were produced.
This function assumes that every body has exactly one AABB and with
no relative offset to the body's position.
"""
# Compile the set of bodies- and their AABBs for this test run.
assert len(pos) == len(aabbs)
bodies = [getRigidBody(position=_) for _ in pos]
# By assumption for this function, every object has exactly one AABB
# centered at position zero relative to their rigid body.
AABBs = [{'1': (0, 0, 0, _[0], _[1], _[2])} for _ in AABBs]
# Convert to dictionaries: the key is the bodyID in Azrael; here it
# is a simple enumeration.
bodies = {str(idx): val for (idx, val) in enumerate(bodies)}
AABBs = {str(idx): val for (idx, val) in enumerate(AABBs)}
# Determine the list of broadphase collision sets.
ret = azrael.leonard.computeCollisionSetsAABB(bodies, AABBs)
assert ret.ok
# Convert the reference data to a sorted list of sets.
expected_objIDs = [sorted(tuple(_)) for _ in expected_objIDs]
computed_objIDs = [sorted(tuple(_)) for _ in ret.data]
# Return the equality of the two list of lists.
assert sorted(expected_objIDs) == sorted(computed_objIDs)
del bodies, AABBs, ret, expected_objIDs, computed_objIDs
# First overlaps with second, second with third, but first not with
# third. This must result in a single broadphase set containing all
# three bodies.
pos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
aabbs = [(0.9, 0.9, 0.9), (0.9, 0.9, 0.9), (0.9, 0.9, 0.9)]
correct_answer = (['0', '1', '2'], )
testCCS(pos, aabbs, correct_answer)
# Move the middle object away: three independent objects.
pos = [(0, 0, 0), (1, 10, 1), (2, 2, 2)]
aabbs = [(0.9, 0.9, 0.9), (0.9, 0.9, 0.9), (0.9, 0.9, 0.9)]
correct_answer = (['0'], ['1'], ['2'])
testCCS(pos, aabbs, correct_answer)
# Move the middle object back but make it so small in 'y' direction
# that it does not intersect with the other two: three independent
# objects.
pos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
aabbs = [(0.9, 0.9, 0.9), (0.05, 0.05, 0.05), (0.9, 0.9, 0.9)]
correct_answer = (['0'], ['1'], ['2'])
testCCS(pos, aabbs, correct_answer)
# Second and third overlap, but first is by itself.
pos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
aabbs = ([0.1, 0.1, 0.1], [0.1, 0.1, 0.1], [1, 1, 1])
correct_answer = (['0'], ['1', '2'])
testCCS(pos, aabbs, correct_answer)
# Objects overlap in 'x' and 'z', but not 'y': three independent
# objects.
pos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
aabbs = ([1, 0.4, 1], [1, 0.4, 1], [1, 0.4, 1])
correct_answer = (['0'], ['1'], ['2'])
testCCS(pos, aabbs, correct_answer)
# Middle object has no size, but the first/third objects are large
# enough to touch each other: First/third must be connected, middle one
# must be by itself.
pos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
aabbs = ([1.01, 1.01, 1.01], [0, 0, 0], [1.01, 1.01, 1.01])
correct_answer = (['0', '2'], ['1'])
testCCS(pos, aabbs, correct_answer)
def test_computeCollisionSetsAABB_rotate_scale(self):
"""
Test broadphase when body has a different scale and/or is rotated.
Create two bodies with one AABB each. The AABB of the first body is
a centered unit cube used for testing. The second body has an AABB with
an offset. Use different scales and rotations to verify it is
correctly taken into account during the broadphase.
"""
# Create the test body at the center. It is a centered unit cube.
body_a = getRigidBody(position=(0, 0, 0), cshapes={'csbox': getCSBox()})
def _verify(rba, pos, rot, scale, intersect: bool):
"""
Assert that body ``rba`` and a new body (specified by ``pos``,
``rot``, and ``scale``) ``intersect``.
This is a convenience function to facilitate more readable tests.
"""
# Hard code collision shape offset for second object.
cs_ofs = (1, 0, 0)
# Create the second body. Its collision shape is a unit cube
# at position `cs_ofs`.
body_b = getRigidBody(position=pos, scale=scale, rotation=rot,
cshapes={'csbox': getCSBox()})
# Compile the input dictionaries for the broadphase algorithm.
bodies = {'1': rba, '2': body_b}
aabbs = {'1': {'1': [0, 0, 0, 1, 1, 1]},
'2': {'1': [cs_ofs[0], cs_ofs[1], cs_ofs[2], 1, 1, 1]}}
# Compute the broadphase collision sets.
ret = azrael.leonard.computeCollisionSetsAABB(bodies, aabbs)
assert ret.ok
coll_sets = ret.data
# If the bodies intersect there must be exactly one collision set
# with two entries, otherwise it is the other way around.
if intersect:
assert len(coll_sets) == 1
assert len(coll_sets[0]) == 2
else:
assert len(coll_sets) == 2
assert len(coll_sets[0]) == len(coll_sets[1]) == 1
# Test object intersects (just) to the right of probe.
pos = (0.99, 0, 0)
rot = (0, 0, 0, 1)
scale = 1
_verify(body_a, pos, rot, scale, intersect=True)
# Test object does (just) not intersect with probe.
pos = (1.01, 0, 0)
rot = (0, 0, 0, 1)
scale = 1
_verify(body_a, pos, rot, scale, intersect=False)
# Dummy is rotated 180 degrees around y axis. This causes the AABBs to
# intersect again.
pos = (2, 0, 0)
rot = (0, 0, 0, 1)
scale = 1
_verify(body_a, pos, rot=(0, 0, 0, 1), scale=1, intersect=False)
_verify(body_a, pos, rot=(0, 1, 0, 0), scale=1, intersect=True)
# Place the dummy out of reach from the probe. However, double the
# size of the probe wich makes the objects overlap.
pos = (-4, 0, 0)
rot = (0, 0, 0, 1)
body_a_scaled = body_a._replace(scale=3)
_verify(body_a, pos, rot, scale=1, intersect=False)
_verify(body_a_scaled, pos, rot, scale=1, intersect=True)
@pytest.mark.parametrize('dim', [0, 1, 2])
def test_computeCollisionSetsAABB_viaLeonard(self, dim):
"""
Create a sequence of 10 test objects and sync them to Leonard. Their
positions only differ in the ``dim`` dimension.
Then use subsets of these 10 objects to test basic collision detection.
This uses the Azrael toolchain to create objects and sync them the
Leonard. This ensures the data propagates coorectly from the
interfaces, via Leonard, to the broadphase algorithm.
"""
# Get a Leonard instance.
leo = getLeonard(azrael.leonard.LeonardBase)
# Create the IDs for the test bodies.
num_bodies = 10
# Create several rigid bodies with a spherical collision shape.
cs = {'1': getCSSphere(radius=1)}
if dim == 0:
states = [getRigidBody(position=[_, 0, 0], cshapes=cs) for _ in range(10)]
elif dim == 1:
states = [getRigidBody(position=[0, _, 0], cshapes=cs) for _ in range(10)]
elif dim == 2:
states = [getRigidBody(position=[0, 0, _], cshapes=cs) for _ in range(10)]
else:
print('Invalid dimension for this test')
assert False
# Add all objects to the Body State DB and sync with Leonard.
for objID, bs in enumerate(states):
assert leoAPI.addCmdSpawn([(str(objID), bs)]).ok
del states
leo.processCommandsAndSync()
# Sanity check: the number of test IDs must match the number of objects
# in Leonard.
assert len(leo.allBodies) == num_bodies
def ccsWrapper(test_objIDs, expected_objIDs):
"""
Assert that ``test_objIDs`` form the ``expected_objIDs`` collision
sets.
This is a convenience wrapper to facilitate readable tests.
"""
# Compile the set of bodies- and their AABBs for this test run.
bodies = {_: leo.allBodies[_] for _ in test_objIDs}
AABBs = {_: leo.allAABBs[_] for _ in test_objIDs}
# Determine the list of broadphase collision sets.
ret = azrael.leonard.computeCollisionSetsAABB(bodies, AABBs)
assert ret.ok
# Convert the reference data to a sorted list of sets.
expected_objIDs = sorted([set(_) for _ in expected_objIDs])
computed_objIDs = sorted([set(_) for _ in ret.data])
# Return the equality of the two list of lists.
assert expected_objIDs == computed_objIDs
# Two non-overlapping objects.
ccsWrapper(['0', '9'],
[['0'], ['9']])
# Two overlapping objects.
ccsWrapper(['0', '1'],
[['0', '1']])
# Three sets.
ccsWrapper(['0', '1', '5', '8', '9'],
[['0', '1'], ['5'], ['8', '9']])
# Same test, but objects are passed in a different sequence. This must
# not alter the test outcome.
ccsWrapper(['0', '5', '1', '9', '8'],
[['0', '1'], ['5'], ['8', '9']])
# All objects must form one connected set.
ccsWrapper([str(_) for _ in range(10)], [[str(_) for _ in range(10)]])
def test_computeCollisionSetsAABB_static(self):
"""
Static bodies (ie every body with mass=0) must be added to every
collision set.
"""
def testCCS(pos, AABBs, imasses, expected_objIDs):
"""
Compute broadphase results for bodies at ``pos`` with ``masses``
and ``AABBs``, and verify that the ``expected_objIDs`` sets were
produced.
This function assumes that every body has exactly one AABB and with
no relative offset to the body's position.
"""
# Compile the set of bodies- and their AABBs for this test run.
assert len(pos) == len(aabbs) == len(imasses)
bodies = [getRigidBody(position=p, imass=m) for (p, m) in zip(pos, imasses)]
# By assumption, this function, every object has exactly one AABB
# centered at zero.
AABBs = [{'1': (0, 0, 0, _[0], _[1], _[2])} for _ in AABBs]
# Convert to dictionaries: the key is the bodyID in Azrael; here it
# is a simple enumeration.
bodies = {str(idx): val for (idx, val) in enumerate(bodies)}
AABBs = {str(idx): val for (idx, val) in enumerate(AABBs)}
# Determine the list of broadphase collision sets.
ret = azrael.leonard.computeCollisionSetsAABB(bodies, AABBs)
assert ret.ok
# Convert the reference data to a sorted list of sets.
expected_objIDs = [sorted(tuple(_)) for _ in expected_objIDs]
computed_objIDs = [sorted(tuple(_)) for _ in ret.data]
# Return the equality of the two list of lists.
assert sorted(expected_objIDs) == sorted(computed_objIDs)
del bodies, AABBs, ret, expected_objIDs, computed_objIDs
# Three dynamics bodies: First overlaps with second, second with third,
# but first not with third. This must result in a single broadphase set
# containing all three bodies.
pos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
aabbs = [(0.9, 0.9, 0.9), (0.9, 0.9, 0.9), (0.9, 0.9, 0.9)]
imasses = [1, 1, 1]
correct_answer = (['0', '1', '2'], )
testCCS(pos, aabbs, imasses, correct_answer)
# Same test, but this time the middle body is static (ie imass=0). This
# must result in two collision sets, each containing the static body.
pos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
aabbs = [(0.9, 0.9, 0.9), (0.9, 0.9, 0.9), (0.9, 0.9, 0.9)]
imasses = [1, 0, 1]
correct_answer = (['0', '1'], ['1', '2'])
testCCS(pos, aabbs, imasses, correct_answer)
def test_skipEmpty(self):
"""
Verify that _skipEmptyBodies removes all bodies that have a) exactly
one collision shape and b) that collision shape is empty.
"""
# Convenience: some collision shapes.
empty = getCSEmpty()
sphere = getCSSphere(radius=1)
# Create several bodies with various collision shape combinations.
bodies = {
1: getRigidBody(cshapes={'foo': empty}),
2: getRigidBody(cshapes={'bar': sphere}),
3: getRigidBody(cshapes={'foo': empty, 'bar': sphere}),
4: getRigidBody(cshapes={'foo': empty, 'bar': empty})
}
# Shallow copy of the original dictionary for the comparison
# afterwards.
bodies_copy = dict(bodies)
ret = azrael.leonard._skipEmptyBodies(bodies_copy)
# Verify that the function has no side effect (ie that it does not
# alter the dictionary we pass in).
assert bodies == bodies_copy
# The function must have removed the first body.
assert ret == {2: bodies[2], 3: bodies[3]}
| olitheolix/azrael | azrael/test/test_leonard.py | Python | agpl-3.0 | 58,822 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class ResourceNamespacePatch(Resource):
"""Definition of resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, tags=None):
super(ResourceNamespacePatch, self).__init__()
self.tags = tags
| lmazuel/azure-sdk-for-python | azure-mgmt-relay/azure/mgmt/relay/models/resource_namespace_patch.py | Python | mit | 1,372 |
import ConfigParser
import os
import glob
from tworpus import settings
def get_converter_dirs():
pathname = settings.CONVERTERS_DIR
dirs = []
for dir in os.walk(pathname):
if os.path.isdir(dir[0]) and dir[0] is not pathname:
cfgfile = glob.glob(os.path.join(dir[0], "config.cfg"))
if cfgfile.__len__() > 0:
dirs.append(dir[0])
return dirs
def get_converter_data():
converters = []
dirs = get_converter_dirs()
for dir in dirs:
config = ConfigParser.ConfigParser()
config.read(os.path.join(dir, "config.cfg"))
try:
data = {}
data["class_name"] = config.get("Data Converter", "class")
data["module_name"] = config.get("Data Converter", "module")
data["author_name"] = config.get("Data Converter", "author")
data["company_name"] = config.get("Data Converter", "company")
data["description"] = config.get("Data Converter", "description")
data["title"] = config.get("Data Converter", "title")
data["id"] = config.get("Data Converter", "id")
data["package"] = os.path.basename(dir)
converters.append(data)
except:
pass
return converters
def get_converters_from_ids(ids):
filteredconverters = []
converters = get_converter_data()
for id in ids:
for converter in converters:
if id == converter["id"]:
filteredconverters.append(converter)
return filteredconverters | markusmichel/Tworpus-Client | tworpus/data_converter.py | Python | apache-2.0 | 1,560 |
#!/usr/bin/python3
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
# bsc#1031358
# This script dumps stuck requests for every disk on the system
from crash.subsystem.storage import for_each_disk
from crash.subsystem.storage.decoders import for_each_bio_in_stack
from crash.subsystem.storage import gendisk_name
from crash.subsystem.storage.block import for_each_request_in_queue
from crash.types.list import list_for_each_entry
from crash.util import get_symbol_value
from crash.cache.syscache import kernel, jiffies_to_msec
empty = []
flush_end_io = get_symbol_value('flush_end_io')
for b in for_each_disk():
name = gendisk_name(b)
count = 0
for r in for_each_request_in_queue(b['queue']):
age_in_jiffies = kernel.jiffies - r['start_time']
age = float(int(kernel.jiffies_to_msec(age_in_jiffies))) / 1000
if count == 0:
print name
if r['bio']:
print "{}: {:x} request: age={}s, bio chain".format(
count, int(r.address), age, int(r['bio']))
n=0
for entry in for_each_bio_in_stack(r['bio']):
print " {}: {}".format(n, entry['description'])
n += 1
else:
if r['end_io'] == flush_end_io:
print "{}: {:x} request: age={}s, pending flush request".format(
count, int(r.address), age)
else:
print "{}: {:x} request: start={}, undecoded".format(
count, int(r.address), age)
count += 1
print
if count == 0:
empty.append(name)
#print "Queues for the following devices were empty: {}".format(", ".join(empty))
| jeffmahoney/crash-python | contrib/stuck-requests.py | Python | gpl-2.0 | 1,702 |
# Licensed under the MIT License - https://opensource.org/licenses/MIT
import unittest
import numpy as np
from pycobra.cobra import Cobra
from pycobra.ewa import Ewa
from pycobra.kernelcobra import KernelCobra
import logging
from sklearn.utils.estimator_checks import check_estimator
class TestPrediction(unittest.TestCase):
def setUp(self):
# setting up our random data-set
rng = np.random.RandomState(42)
# D1 = train machines; D2 = create COBRA; D3 = calibrate epsilon, alpha; D4 = testing
n_features = 20
D1, D2, D3, D4 = 200, 200, 200, 200
D = D1 + D2 + D3 + D4
X = rng.uniform(-1, 1, D * n_features).reshape(D, n_features)
Y = np.power(X[:,1], 2) + np.power(X[:,3], 3) + np.exp(X[:,10])
# training data-set
X_train = X[:D1 + D2]
X_test = X[D1 + D2 + D3:D1 + D2 + D3 + D4]
# for testing
Y_train = Y[:D1 + D2]
Y_test = Y[D1 + D2 + D3:D1 + D2 + D3 + D4]
cobra = Cobra(random_state=0, epsilon=0.5)
cobra.fit(X_train, Y_train)
ewa = Ewa(random_state=0)
ewa.fit(X_train, Y_train)
kernel = KernelCobra(random_state=0)
kernel.fit(X_train, Y_train)
self.test_data = X_test
self.cobra = cobra
self.ewa = ewa
self.kernelcobra = kernel
def test_cobra_predict(self):
expected = 2.7310842344617035
result = self.cobra.predict(self.test_data[0].reshape(1, -1))
self.assertAlmostEqual(expected, result)
def test_ewa_predict(self):
expected = 2.7656847636961603
result = self.ewa.predict(self.test_data[0].reshape(1, -1))
self.assertAlmostEqual(expected, result[0])
def test_kernel_predict(self):
expected = 2.613685190585763
result = self.kernelcobra.predict(self.test_data[0].reshape(1, -1))
self.assertAlmostEqual(expected, result[0])
def test_estimators(self):
check_estimator(Cobra)
check_estimator(Ewa)
check_estimator(KernelCobra)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main() | bhargavvader/pycobra | tests/test_cobra.py | Python | mit | 2,212 |
#!/usr/bin/env python
import socket
import sys
import requests
import string
r = requests.get("http://4.ipquail.com/ip")
if r.status_code == 200:
ipv4 = r.content.translate(None, string.whitespace)
else:
ipv4 = 'err'
#print ipv4
# Create a UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
server_address = ('192.168.203.22', 10000)
message = "This is a test message from\n%s" % ipv4
try:
# Send data
print >>sys.stderr, 'sending "%s"' % message
sent = sock.sendto(message, server_address)
# Receive response
# print >>sys.stderr, 'waiting to receive'
# data, server = sock.recvfrom(4096)
# print >>sys.stderr, 'received "%s"' % data
finally:
print >>sys.stderr, 'closing socket'
sock.close()
| tbaschak/bcp38-tests | bcp38-client.py | Python | mit | 761 |
from typing import *
from .component import Component
from ..observable import Observable
class Button(Component, Observable):
"""
An Observable Component that represents a Button with some text
"""
def __init__(self, text: AnyStr):
super().__init__()
self.text = text
def on_clicked(self, *handlers: Callable, **kwargs):
return self.on('clicked', *handlers, **kwargs)
def __getstate__(self) -> dict:
return dict(
**super(Button, self).__getstate__(),
text=self.text,
uuid=self.uuid
)
| purduesigbots/pros-cli | pros/common/ui/interactive/components/button.py | Python | mpl-2.0 | 591 |
from JumpScale import j
import copy
def extractDetails(remoteException):
"""Extract the details of an xml-rpc exception error message"""
try:
details = remoteException.faultString or remoteException.message
details = details.split(":", 2)[-1].replace("'>", "").lstrip()
except AttributeError:
details = str(remoteException)
return details.replace('\n', '; ') # just in case the returned error message contains an newline
class WikiClientConfluence:
def __init__(self, spacename, erasespace=False, erasepages=False):
self.spacename = spacename
spaces = j.clients.confluence.listSpaces()
spacenames = [item.key for item in spaces]
if spacename in spacenames and erasespace:
j.clients.confluence.removeSpace(spacename)
if spacename not in spacenames or erasespace:
j.clients.confluence.addSpace(spacename, spacename)
if erasepages and not erasespace:
self.removeAllPages()
self.actions = {}
def removeAllPages(self, exclude=[]):
"""
remove all pages in space
"""
pages = j.clients.confluence.listPages(self.spacename)
pageids = [item.id for item in pages if not (item.title == "Home" or item.title in exclude)]
for id in pageids:
j.console.echo("Remove page %s" % id)
j.clients.confluence.removePage("%s" % id)
def pageExists(self, pagename):
try:
page = j.clients.confluence.findPage(self.spacename, pagename)
return page
except Exception as ex:
if str(ex).find("does not exist") != -1:
return False
raise Exception('Unable to find page %s in space %s' % (pagename, self.spacename) +
'. Reason %s' % extractDetails(ex))
def pageDelete(self, pagename):
page = self.pageExists(pagename)
if page != False:
print "delete page %s" % page.title
j.clients.confluence.removePage(page.id)
def pageContentGet(self, pagename):
pageid = self.pageExists(pagename)
if pageid != False:
page = j.clients.confluence.getPage(pageid)
return page.content
else:
raise RuntimeError("cannot find page with space %s and name %s" % (self.spacename, pagename))
def createPagetree(self, pagetree):
"""
@pagetree $toppage/$subpage1/$subpage2/...
"""
if pagetree == None:
return None
def getContent(pagename):
return "h2. %s\n\n{children}\n" % pagename
pagetree = pagetree.replace("\\", "/")
pagetree = pagetree.replace("//", "/")
if pagetree[0] == "/":
pagetree = pagetree[1:]
pagenames = pagetree.split("/")
if len(pagenames) == 1:
return pagenames[0]
if len(pagenames) == 0:
raise RuntimeError("Cannot create pagetree because pagetree empty")
if not self.pageExists(pagenames[0]):
raise RuntimeError("Cannot create createPagetree: %s because could not find parent %s" % (pagetree, pagenames[0]))
parentname = pagenames.pop(0)
for pagename in pagenames:
page = self.pageExists(pagename)
if page == False:
self.pageContentSet(pagename, getContent(pagename), parent=parentname)
parentname = pagename
return pagename
def pageContentSet(self, pagename, content, parent=None):
"""
@param parent can be a single name of a home page or a pagetree e.g. $toppage/$subpage1/$subpage2/...
"""
parentid = None
parent = self.createPagetree(parent)
if parent != None:
parentpage = self.pageExists(parent)
if parentpage:
parentid = parentpage.id
if parent != None and parentpage == False:
raise RuntimeError("Cannot find parent page with name %s" % parent)
page = self.pageExists(pagename)
if page != False:
pageid = page.id
if page != False and parent != None and page.parent.id != parentid:
j.console.echo("Warning: page %s is connected to wrong parent %s, should be %s" % (pagename, page.parent.id, parentid))
# print "delete page %s" % page.title
# self.pageDelete(pagename)
pageid = False
if page != False:
page.content = content
print "editpage %s" % page.title
result = j.clients.confluence.editPage(page)
else:
print "add page %s" % pagename
result = j.clients.confluence.addPage(self.spacename, pagename, parentid, content)
def pageNew(self, pagename):
page = Page(pagename, "")
page.actions = self.actions
return page
def pageGet(self, pagename):
content = self.pageContentGet(pagename)
page = Page(pagename, content)
page.actions = self.actions
return page
def pageSet(self, page, parent=None):
"""
@param parent is name of wikipage to use as parent
"""
return self.pageContentSet(page.name, page.content, parent)
def generate(self, page, parent=None):
if parent == None and page.parent != None:
parent = page.parent
if parent == "":
parent = None
return self.pageContentSet(page.name, page.content, parent)
def generatePagegroup(self, pagegroup):
for key in pagegroup.pages.keys():
page = pagegroup.pages[key]
self.pageContentSet(page.name, page.content, page.parent)
def initActions(self, actions):
"""
@actions is dict with as key the name of the action, the value is the link with {params} which will be filled in with the remainder of the link
"""
self.actions = actions
| Jumpscale/jumpscale6_core | lib/JumpScale/portal/docgenerator/WikiClientConfluence.py | Python | bsd-2-clause | 5,951 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from . import views, Registry
urlpatterns = [
url(r'^$', views.LoginFormView.as_view(), name="login"),
url(r'^logout$', views.LogoutActionView.as_view(), name="logout"),
url(r'^settings', views.SettingsView.as_view(), name="settings"),
url(r'^list$', views.FactorListView.as_view(), name="list"),
]
for id, cfg in Registry.items():
urlpatterns.append(url("{}/".format(id), include((cfg.urlpatterns, "watchdog_id.auth_factories." + id), namespace=id)))
| watchdogpolska/watchdog-id | watchdog_id/auth_factories/urls.py | Python | mit | 582 |
# as_nrf_stream.py uasyncio stream interface for nRF24l01 radio
# (C) Peter Hinch 2020
# Released under the MIT licence
import io
import ustruct
import uasyncio as asyncio
from time import ticks_ms, ticks_diff
from micropython import const
from nrf24l01 import NRF24L01
__version__ = (0, 1, 0)
# I/O interface
MP_STREAM_POLL_RD = const(1)
MP_STREAM_POLL_WR = const(4)
MP_STREAM_POLL = const(3)
MP_STREAM_ERROR = const(-1)
# Command bits. Notionally LS 4 bits are command, upper 4 status
MSG = const(0) # Normal packet. May carry data.
ACK = const(1) # Acknowledge. May carry data.
PWR = const(0x40) # Node has powered up: peer clears rxq.
PID = const(0x80) # 1-bit PID.
CMDMASK = const(0x0f) # LS bits is cmd
# Timing
SEND_DELAY = const(10) # Transmit delay (give remote time to turn round)
# Optional statistics
S_RX_TIMEOUTS = 0
S_TX_TIMEOUTS = 1
S_RX_ALL = 2
S_RX_DATA = 3
# Packet class creates nRF24l01 a fixed size 32-byte packet from the tx queue
class Packet:
def __init__(self):
self._fmt = 'BB30s' # Format is cmd nbytes data
class TxPacket(Packet):
def __init__(self):
super().__init__()
self._buf = bytearray(32)
self._pid = 0
self._len = 0
self._ploads = 0 # No. of payloads sent
# Update command byte prior to transmit. Send PWR bit until 2nd update: by
# then we must have had an ACK from 1st payload.
def __call__(self, txcmd):
self._buf[0] = txcmd | self._pid if self else txcmd
# 1st packet has PWR bit set so RX clears down rxq.
if self._ploads < 2: # Stop with 2nd payload.
self._buf[0] |= PWR
return self._buf
# Update the buffer with data from the tx queue. Return the new reduced
# queue instance.
def update(self, txq):
txd = txq[:30] # Get current data for tx up to interface maximum
self._len = len(txd)
if self: # Has payload
self._pid ^= PID
ustruct.pack_into(self._fmt, self._buf, 0, 0, self._len, txd)
if self._ploads < 2:
self._ploads += 1 # Payloads sent.
return txq[30:]
def __bool__(self): # True if packet has payload
return self._len > 0
class RxPacket(Packet):
def __init__(self):
super().__init__()
self._pid = None # PID from last data packet
def __call__(self, data): # Split a raw 32 byte packet into fields
rxcmd, nbytes, d = ustruct.unpack(self._fmt, data)
cmd = rxcmd & CMDMASK # Split rxcmd byte
rxpid = rxcmd & PID
pwr = bool(rxcmd & PWR) # Peer has power cycled.
dupe = False # Assume success
if nbytes: # Dupe detection only relevant to a data payload
if (self._pid is None) or (rxpid != self._pid):
# 1st packet or new PID received. Not a dupe.
self._pid = rxpid # Save PID to check next packet
else:
dupe = True
return d[:nbytes], cmd, dupe, pwr
# Base class for Master and Slave
class AS_NRF24L01(io.IOBase):
pipes = (b'\xf0\xf0\xf0\xf7\xe1', b'\xf0\xf0\xf0\xf7\xd2')
def __init__(self, config):
master = int(isinstance(self, Master))
# Support gathering statistics. Delay until protocol running.
self._is_running = False
if config.stats:
self._stats = [0, 0, 0, 0]
self._do_stats = self._stat_update
else:
self._stats = None
self._do_stats = lambda _ : None
self._tx_ms = config.tx_ms # Max time master or slave can transmit
radio = NRF24L01(config.spi, config.csn, config.ce, config.channel, 32)
radio.open_tx_pipe(self.pipes[master ^ 1])
radio.open_rx_pipe(1, self.pipes[master])
self._radio = radio
self._txq = b'' # Transmit and receive queues
self._rxq = b''
self._txpkt = TxPacket()
self._rxpkt = RxPacket()
self._tlast = ticks_ms() # Time of last communication
self._txbusy = False # Don't call ._radio.any() while sending.
# **** uasyncio stream interface ****
def ioctl(self, req, arg):
ret = MP_STREAM_ERROR
if req == MP_STREAM_POLL:
ret = 0
if arg & MP_STREAM_POLL_RD:
if not self._txbusy and (self._radio.any() or self._rxq):
ret |= MP_STREAM_POLL_RD
if arg & MP_STREAM_POLL_WR:
if not self._txq:
ret |= MP_STREAM_POLL_WR
return ret
# .write is called by drain - ioctl postpones until .txq is empty
def write(self, buf):
self._txq = bytes(buf) # Arg is a memoryview
return len(buf) # Assume eventual success.
# Return a maximum of one line; ioctl postpones until .rxq is not
def readline(self): # empty or if radio has a packet to read
if self._radio.any():
self._process_packet() # Update ._rxq
n = self._rxq.find(b'\n') + 1
if not n: # Leave incomplete line on queue.
return b''
res = self._rxq[:n] # Return 1st line on queue
self._rxq = self._rxq[n:]
return res
def read(self, n):
if self._radio.any():
self._process_packet()
res = self._rxq[:n]
self._rxq = self._rxq[n:]
return res
# **** private methods ****
# Control radio tx/rx
def _listen(self, val):
if val:
self._radio.start_listening() # Turn off tx
self._txbusy = False
else:
self._txbusy = True # Prevent calls to ._process_packet
self._radio.stop_listening()
# Send a 32 byte buffer subject to a timeout. The value returned by
# .send_done does not reliably distinguish success from failure.
# Consequently ._send makes no attempt to distinguish success, fail and
# timeout. This is handled by the protocol.
async def _send(self, buf):
self._listen(False)
await asyncio.sleep_ms(SEND_DELAY) # Give remote time to start listening
t = ticks_ms()
self._radio.send_start(buf) # Initiate tx
while self._radio.send_done() is None: # tx in progress
if ticks_diff(ticks_ms(), t) > self._tx_ms:
self._do_stats(S_TX_TIMEOUTS) # Optionally count instances
break
await asyncio.sleep_ms(0) # Await completion, timeout or failure
self._listen(True) # Turn off tx
# Update an individual statistic
def _stat_update(self, idx):
if self._stats is not None and self._is_running:
self._stats[idx] += 1
# **** API ****
def t_last_ms(self): # Return the time (in ms) since last communication
return ticks_diff(ticks_ms(), self._tlast)
def stats(self):
return self._stats
# Master sends one ACK. If slave doesn't receive the ACK it retransmits same data.
# Master discards it as a dupe and sends another ACK.
class Master(AS_NRF24L01):
def __init__(self, config):
from uasyncio import Event
super().__init__(config)
self._txcmd = MSG
self._pkt_rec = Event()
asyncio.create_task(self._run())
async def _run(self):
# Await incoming for 1.5x max slave transmit time
rx_time = int(SEND_DELAY + 1.5 * self._tx_ms) / 1000 # Seem to have lost wait_for_ms
while True:
self._pkt_rec.clear()
await self._send(self._txpkt(self._txcmd))
# Default command for next packet may be changed by ._process_packet
self._txcmd = MSG
try:
await asyncio.wait_for(self._pkt_rec.wait(), rx_time)
except asyncio.TimeoutError:
self._do_stats(S_RX_TIMEOUTS) # Loop again to retransmit pkt.
else: # Pkt was received so last was acknowledged. Create the next one.
self._txq = self._txpkt.update(self._txq)
self._is_running = True # Start gathering stats now
# A packet is ready. Any response implies an ACK: slave never transmits
# unsolicited messages
def _process_packet(self):
rxdata, _, dupe, pwrup = self._rxpkt(self._radio.recv())
if pwrup: # Slave has had a power outage
self._rxq = b''
self._tlast = ticks_ms() # User outage detection
self._pkt_rec.set()
if rxdata: # Packet has data. ACK even if a dupe.
self._do_stats(S_RX_ALL) # Optionally count instances
self._txcmd = ACK
if not dupe: # Add new packets to receive queue
self._do_stats(S_RX_DATA)
self._rxq = b''.join((self._rxq, rxdata))
class Slave(AS_NRF24L01):
def __init__(self, config):
super().__init__(config)
self._listen(True)
self._is_running = True # Start gathering stats immediately
def _process_packet(self):
rxdata, rxcmd, dupe, pwrup = self._rxpkt(self._radio.recv())
if pwrup: # Master has had a power outage
self._rxq = b''
self._tlast = ticks_ms()
if rxdata:
self._do_stats(S_RX_ALL) # Optionally count instances
if not dupe: # New data received.
self._do_stats(S_RX_DATA)
self._rxq = b''.join((self._rxq, rxdata))
# If last packet was empty or was acknowledged, get next one.
if (rxcmd == ACK) or not self._txpkt:
self._txq = self._txpkt.update(self._txq) # Replace txq
asyncio.create_task(self._send(self._txpkt(MSG)))
# Issues start_listening when done.
| peterhinch/micropython-radio | async/as_nrf_stream.py | Python | mit | 9,620 |
from lomnikol import *
import matplotlib.pyplot as plt
import statistics as st
#
# dif = []
# diferr = []
# with open('Sheet1.dat') as file:
# for line in file:
# temp = line.strip().split('\t')
# dif.append(float(temp[0]))
# diferr.append(float(temp[1]))
#
# temperatura = [24.7, 24.7, 24.7, 24.8, 24.8, 24.8, 24.9, 24.9, 24.9, 23.2, 23.3, 23.7, 24.1, 24.3, 23.3, 23.1, 23.1, 24.5, 24.5]
# # temperatura.append(float(input('VpiÄąËĂË€ˇi zadnjo temperaturo merjenja (uporabi decimalno piko!): ')))
#
#
#
# skupno = []
# for i in range(len(temperatura)):
# skupno.append([dif[i], diferr[i], temperatura[i]])
#
# #print(skupno)
#
#
# f = open('normalizirano.txt', 'w')
# for i in range(len(temperatura)):
# a = skupno[i][0]*((skupno[i][2]+273)/298)*(visk(skupno[i][2])/visk(25))
# aer = skupno[i][1]*((skupno[i][2]+273)/298)*(visk(skupno[i][2])/visk(25))
#
# f.write(str(a) + '\t' + str(aer) + '\n')
#
# f.close()
temperatura = []
with open('Book1.txt', 'r') as file:
for line in file:
temperatura.append(float(line.split('\t')[0]))
# print(temperatura
D = []
q = q2(70)
visk26 = visk(26)
err = []
with open('fji.txt', 'r') as file:
for line in file:
try:
# print(line)
D.append(float(line.split()[0])/q)
err.append(float(line.split()[1])/float(line.split()[0]))
except IndexError:
print('Vrstic je več kot meritev!')
pass
Dnorm = []
for i in range(len(temperatura)):
temp = D[i]*(299/(temperatura[i]+273))*(visk(temperatura[i])/visk26)*1000
Dnorm.append(temp)
print(Dnorm)
try:
for i in range(len(Dnorm)):
err[i]=Dnorm[i]*err[i]
except IndexError:
print('Neki je predolgo/prekratko')
pass
# plt.plot(temperatura, Dnorm)
Dpovp = st.mean(Dnorm)
Ddev = st.stdev(Dnorm)
fig, axs = plt.subplots(nrows=2, ncols=1, sharex=True)
ax = axs[0]
ax.errorbar(temperatura, Dnorm, xerr=None, yerr=err[:59])
ax.set_ylim(0,9e-12)
tex = '$\\overline{}={:.4e}$'.format('{D}', Dpovp) + '\n' + '$\sigma = {:.4e}$'.format(Ddev) + '\n' + \
'$\\overline{}={:.4f}$'.format('{D}', Dpovp*10e11) + '$(1\\pm{:.4f})e-12$'.format( Ddev/Dpovp)
props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
ax.text(0.58, 0.40, tex, transform=ax.transAxes, fontsize=14,
verticalalignment='top', bbox=props)
ax.set_title('D normaliziran')
ax = axs[1]
ax.errorbar(temperatura, Dnorm, xerr=None, yerr=err[:59])
ax.set_title('D normaliziran povečan')
plt.show() | Ossada/DLS-UVVis | module2.py | Python | mit | 2,711 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.