code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
'''
Displays volume on Adafruit NeoPixel FeatherWing
'''
import gc
from time import sleep, time
import network
from config import hosts, ssid, pw
from umqtt_client import MQTTClient as umc
from machine import Pin
from neopixel import NeoPixel
with open('mqtt_id', 'r') as f:
mqtt_id = f.read().strip()
with open('location', 'r') as f:
loc = f.read().strip()
host = hosts[loc]
print("NewPixel Volume script")
print("mqtt_id =", mqtt_id)
print("location =", loc)
print("host =", host)
np = NeoPixel(Pin(13, Pin.OUT), 32)
def clear():
for i in range(32):
np[i] = (0, 0, 0)
np.write()
c = umc(mqtt_id, host, 1883)
def run():
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
if not wlan.isconnected():
print('connecting to network...')
wlan.connect(ssid, pw)
while not wlan.isconnected():
pass
print('network config:', wlan.ifconfig())
c.connect()
c.subscribe('sonos/{}/volume'.format(loc))
sleep(2)
cur_time = time()
cur_volume = 0
bb = True
while 1:
z = c.check_msg()
if z:
print(z)
if isinstance(z, int):
print("returned a integer")
# what could go below is having one neopixel indicate that we're still connected
if bb:
np[31] = (50,50,50)
else:
np[31] = (0,0,0)
bb = not bb
np.write()
continue
topic, volume = z
print("topic =", topic)
print("volume =", volume)
## this is where you would place the neopixel code
clear()
# volume maxes at 100
n = int(volume)//3 # there are 32 neopixels
for i in range(n):
np[i] = (50,0,0)#100
np.write()
t = time()
if t > cur_time + 30:
c.ping()
cur_time = t
gc.collect()
#print(gc.mem_free())
sleep(1)
#run()
| slzatz/esp8266 | neo_volume.py | Python | mit | 1,830 |
## Script (Python) "require_login"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=
##title=Login
##
login = 'login'
from Products.CMFCore.utils import getToolByName
import zope.i18nmessageid
from Products.CMFPlone.i18nl10n import utranslate
_ = zope.i18nmessageid.MessageFactory('opencore')
portal = context.portal_url.getPortalObject()
request = context.REQUEST
# if cookie crumbler did a traverse instead of a redirect,
# this would be the way to get the value of came_from
#url = portal.getCurrentUrl()
#context.REQUEST.set('came_from', url)
referer = request.environ.get('HTTP_REFERER')
if context.portal_membership.isAnonymousUser():
msg = _(u'psm_please_sign_in', u'Please sign in to continue.')
msg = utranslate('opencore', msg, context=context)
plone_utils = getToolByName(portal, 'plone_utils')
plone_utils.addPortalMessage(msg)
if referer is not None:
request.form['referer'] = referer
return portal.restrictedTraverse(login)()
else:
# We're already logged in.
if request.form.get('came_from'):
referer = referer or ''
if request.form['came_from'].split('?')[0] == referer.split('?')[0]:
# AFAICT, the HTTP referer and the came_from value are equal when
# Flunc (and possibly other clients) update the referer on
# every redirect, which causes an infinite redirect loop
# in our login code. Break the loop by redirecting
# somewhere innocuous. Firefox doesn't seem to have this
# problem, it sets the referer header to the URL before
# the one that caused a redirect.
#
# To accomplish this, we need to clobber the request vars
# that insufficient_privileges.cpy looks for.
# - pw/slinkp
request.form['came_from'] = ''
request.environ['HTTP_REFERER'] = portal.absolute_url()
return portal.restrictedTraverse('insufficient_privileges')()
| socialplanning/opencore | Products/OpenPlans/skins/openplans_login_patches/require_login.py | Python | gpl-3.0 | 2,059 |
# Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from decimal import Decimal
from datetime import datetime, date, time
from uuid import uuid1, uuid4
import six
from cassandra.util import OrderedMap, Date, Time, sortedset, Duration
from tests.integration import get_server_versions
PRIMITIVE_DATATYPES = sortedset([
'ascii',
'bigint',
'blob',
'boolean',
'decimal',
'double',
'float',
'inet',
'int',
'text',
'timestamp',
'timeuuid',
'uuid',
'varchar',
'varint',
])
PRIMITIVE_DATATYPES_KEYS = PRIMITIVE_DATATYPES.copy()
COLLECTION_TYPES = sortedset([
'list',
'set',
'map',
])
def update_datatypes():
_cass_version, _cql_version = get_server_versions()
if _cass_version >= (2, 1, 0):
COLLECTION_TYPES.add('tuple')
if _cass_version >= (2, 2, 0):
PRIMITIVE_DATATYPES.update(['date', 'time', 'smallint', 'tinyint'])
PRIMITIVE_DATATYPES_KEYS.update(['date', 'time', 'smallint', 'tinyint'])
if _cass_version >= (3, 10):
PRIMITIVE_DATATYPES.add('duration')
global SAMPLE_DATA
SAMPLE_DATA = get_sample_data()
def get_sample_data():
sample_data = {}
for datatype in PRIMITIVE_DATATYPES:
if datatype == 'ascii':
sample_data[datatype] = 'ascii'
elif datatype == 'bigint':
sample_data[datatype] = 2 ** 63 - 1
elif datatype == 'blob':
sample_data[datatype] = bytearray(b'hello world')
elif datatype == 'boolean':
sample_data[datatype] = True
elif datatype == 'decimal':
sample_data[datatype] = Decimal('12.3E+7')
elif datatype == 'double':
sample_data[datatype] = 1.23E+8
elif datatype == 'float':
sample_data[datatype] = 3.4028234663852886e+38
elif datatype == 'inet':
sample_data[datatype] = ('123.123.123.123', '2001:db8:85a3:8d3:1319:8a2e:370:7348')
if six.PY3:
import ipaddress
sample_data[datatype] += (ipaddress.IPv4Address("123.123.123.123"),
ipaddress.IPv6Address('2001:db8:85a3:8d3:1319:8a2e:370:7348'))
elif datatype == 'int':
sample_data[datatype] = 2147483647
elif datatype == 'text':
sample_data[datatype] = 'text'
elif datatype == 'timestamp':
sample_data[datatype] = datetime(2013, 12, 31, 23, 59, 59, 999000)
elif datatype == 'timeuuid':
sample_data[datatype] = uuid1()
elif datatype == 'uuid':
sample_data[datatype] = uuid4()
elif datatype == 'varchar':
sample_data[datatype] = 'varchar'
elif datatype == 'varint':
sample_data[datatype] = int(str(2147483647) + '000')
elif datatype == 'date':
sample_data[datatype] = Date(date(2015, 1, 15))
elif datatype == 'time':
sample_data[datatype] = Time(time(16, 47, 25, 7))
elif datatype == 'tinyint':
sample_data[datatype] = 123
elif datatype == 'smallint':
sample_data[datatype] = 32523
elif datatype == 'duration':
sample_data[datatype] = Duration(months=2, days=12, nanoseconds=21231)
else:
raise Exception("Missing handling of {0}".format(datatype))
return sample_data
SAMPLE_DATA = get_sample_data()
def get_sample(datatype):
"""
Helper method to access created sample data for primitive types
"""
if isinstance(SAMPLE_DATA[datatype], tuple):
return SAMPLE_DATA[datatype][0]
return SAMPLE_DATA[datatype]
def get_all_samples(datatype):
"""
Helper method to access created sample data for primitive types
"""
if isinstance(SAMPLE_DATA[datatype], tuple):
return SAMPLE_DATA[datatype]
return SAMPLE_DATA[datatype],
def get_collection_sample(collection_type, datatype):
"""
Helper method to access created sample data for collection types
"""
if collection_type == 'list':
return [get_sample(datatype), get_sample(datatype)]
elif collection_type == 'set':
return sortedset([get_sample(datatype)])
elif collection_type == 'map':
return OrderedMap([(get_sample(datatype), get_sample(datatype))])
elif collection_type == 'tuple':
return (get_sample(datatype),)
else:
raise Exception('Missing handling of non-primitive type {0}.'.format(collection_type))
| datastax/python-driver | tests/integration/datatype_utils.py | Python | apache-2.0 | 5,034 |
from django.conf import settings
from math import *
def validate_ai_script(text):
"""
Verify input AI script. False if the script contains not allowed keywords like "import", "exec"
:param text: String
:return: True/False
"""
if text.strip() == "":
return False
for line in text.splitlines():
if any(kw in line for kw in settings.NOT_ALLOWED_KW):
return False
return True
def calc_user_level(win, lose):
"""
Calculate gaining points and money of a user. If winner level is the same as loser, points = 5, money=300
If winner level is higher than loser, points = 1 or 2, money = 150 or 200
If winner level is lower than loser, points = 5 + (loser's level - winner's level)*5, money = 300 + 50*dif
It means winner get bonus of 5 points for each lower level
:param win: level of winner
:param lose: level of loser
:return: points, money
"""
if win < lose:
dif = lose - win
return (10 + 7 * dif), (300 + 50 * dif)
elif win == lose:
return 10, 300
else:
dif = win - lose
if dif < 3:
return 4, 200
return 2, 150
def calcul_difference_elo_pts(win_points, lose_points):
diff = win_points - lose_points
print("win pts %s - lose pts %s" % (win_points, lose_points))
if abs(diff) > settings.ELO_PTS_MAX_DIFF and diff < 0:
return -settings.ELO_PTS_MAX_DIFF
elif abs(diff) > settings.ELO_PTS_MAX_DIFF and diff > 0:
return settings.ELO_PTS_MAX_DIFF
else:
return diff
def probability_award(D):
if D < 0: # launcher WIN
prob_win = 1 / (1 + pow(10, (-D / 400)))
prob_lose = 1 / (1 + pow(10, (D / 400)))
print("probability launcher win %s - %s" % (prob_win, prob_lose))
else: # launcher LOSE
prob_win = 1 / (1 + pow(10, (D / 400)))
prob_lose = 1 / (1 + pow(10, (-D / 400)))
print("probability launcher lose %s - %s" % (prob_win, prob_lose))
return prob_win, prob_lose
def calcul_coefficient_K(player):
print("Début: ", player.points, player.coeff_K)
if player.nb_games <= 30:
player.nb_games += 1
elif player.points < 2400:
if player.coeff_K != 20:
player.coeff_K = 20
player.nb_games += 1
else:
if player.coeff_K != 10:
player.coeff_K = 10
player.nb_games += 1
player.save()
print("Fin: ", player.points, player.coeff_K)
return player.coeff_K
def award_battle(winner, loser, mode):
"""
Increase players points, money, level, tank hp if possible
:param winner: UserProfile of winner
:param loser: UserProfile of loser
:param mode: yes [VERSUS] - no [CHAMPIONNAT]
:return: void
"""
w_points, w_money = calc_user_level(winner.level, loser.level)
if mode == 'no':
winner.points += w_points
if winner.points >= winner.next_level_exp:
winner.level += 1
winner.calc_next_level_exp()
tank = winner.get_tank()
# tank.hp_value += 10
tank.save()
winner.money += w_money
winner.save()
if mode == 'no':
loser.points += 2
if loser.points >= loser.next_level_exp:
loser.level += 1
loser.calc_next_level_exp()
tank = loser.get_tank()
# tank.hp_value += 10
tank.save()
loser.money += 100
loser.save()
def award_battle_elo(winner, loser, mode):
"""
Increase players points (based on ELO)
:param winner: UserProfile of winner
:param loser: UserProfile of loser
:param mode: yes [ENTRAINEMENT] - no [CHAMPIONNAT]
:return: void
"""
if mode == 'no':
difference = calcul_difference_elo_pts(winner.points, loser.points)
print("ELO points différence: %s" % difference)
p_D_win, p_D_lose = probability_award(difference)
print("Probability win %s - lose %s" % (p_D_win, p_D_lose))
player_K_win = calcul_coefficient_K(winner)
player_K_lose = calcul_coefficient_K(loser)
print("Coefficient K win %s - lose %s" % (player_K_win, player_K_lose))
new_pts_win = trunc(winner.points + player_K_win * (settings.ELO_PTS_AWARD_WIN - p_D_win))
new_pts_lose = trunc(loser.points + player_K_lose * (settings.ELO_PTS_AWARD_LOSE - p_D_lose))
if new_pts_lose < 0:
# new_pts_lose = loser.points
new_pts_lose = loser.points + 10*(player_K_win * (settings.ELO_PTS_AWARD_WIN - p_D_win))/100
print("New points win %s (%s) - lose %s (%s)" % (new_pts_win, player_K_win * (settings.ELO_PTS_AWARD_WIN - p_D_win), new_pts_lose, player_K_lose * (settings.ELO_PTS_AWARD_LOSE - p_D_lose)))
winner.points = new_pts_win
loser.points = new_pts_lose
winner.save()
loser.save()
| Petrole/MaturePyRobots | WebPyRobot/backend/utils.py | Python | gpl-3.0 | 4,839 |
'''
Created on Mar 28, 2015
Copyright 2015, Institute for Systems Biology.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author: michael
'''
import logging
import urllib
import util
def process_metadata_current(config, run_dir, log_name):
"""
return type:
barcode2term2value: for each sample barcode, finds the AliquotUUID and CENTER_CODE values and sets it as the DataCenterCode field
"""
log = logging.getLogger(log_name)
log.info('start processing metadata.current.txt')
barcode2term2value = {}
metadataURL = config['downloads']['metadata_current']
try:
# metadata = util.getURLData(metadataURL, 'metadata.current.txt', log)
metadata = urllib.urlopen(metadataURL)
contents = metadata.read()
lines = contents.split('\n')
util.post_run_file(run_dir, 'metadata.current.txt', contents)
except Exception as e:
log.exception('problem fetching metadata.current.txt')
if 'test' == config['mode']:
metadata = open('metadata.current.txt')
lines = metadata.read()
lines = lines.split('\n')
log.warning('using local copy for testing purposes')
else:
raise e
try:
column2term = config['metadata_locations']['metadata.current.txt']
headers = lines[0].split('\t')
column2index = {}
for column in column2term:
column2index[column] = headers.index(column)
except Exception as e:
log.exception('problem parsing metadata.current.txt header: %s' % (headers))
raise e
try:
for line in lines[1:]:
if not line:
continue
fields = line.split('\t')
term2value = {}
for column, term in column2term.iteritems():
term2value[term] = fields[column2index[column]]
barcode2term2value[fields[1]] = term2value
except Exception as e:
log.exception('problem parsing metadata.current.txt: %s' % (line))
raise e
log.info('finished processing metadata.current.txt')
return barcode2term2value
| isb-cgc/ISB-CGC-data-proc | data_upload/util/process_metadata_current.py | Python | apache-2.0 | 2,703 |
'''
@author: Frank
'''
import unittest
from zstackwoodpecker.engine import engine
class Test(unittest.TestCase):
def testName(self):
logfd = open('/tmp/log', 'w')
engine.execute_case('test/testcase2.py', logfd)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | zstackio/zstack-woodpecker | zstackwoodpecker/test/test_engine.py | Python | apache-2.0 | 351 |
# Copyright 2015 Lukas Lalinsky
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import time
import datetime
from decimal import Decimal
from phoenixdb.avatica.proto import common_pb2
__all__ = [
'Date', 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks',
'Binary', 'STRING', 'BINARY', 'NUMBER', 'DATETIME', 'ROWID', 'BOOLEAN',
'JAVA_CLASSES', 'JAVA_CLASSES_MAP', 'TypeHelper',
]
def Date(year, month, day):
"""Constructs an object holding a date value."""
return datetime.date(year, month, day)
def Time(hour, minute, second):
"""Constructs an object holding a time value."""
return datetime.time(hour, minute, second)
def Timestamp(year, month, day, hour, minute, second):
"""Constructs an object holding a datetime/timestamp value."""
return datetime.datetime(year, month, day, hour, minute, second)
def DateFromTicks(ticks):
"""Constructs an object holding a date value from the given UNIX timestamp."""
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
"""Constructs an object holding a time value from the given UNIX timestamp."""
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
"""Constructs an object holding a datetime/timestamp value from the given UNIX timestamp."""
return Timestamp(*time.localtime(ticks)[:6])
def Binary(value):
"""Constructs an object capable of holding a binary (long) string value."""
return bytes(value)
def time_from_java_sql_time(n):
dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(milliseconds=n)
return dt.time()
def time_to_java_sql_time(t):
return ((t.hour * 60 + t.minute) * 60 + t.second) * 1000 + t.microsecond // 1000
def date_from_java_sql_date(n):
return datetime.date(1970, 1, 1) + datetime.timedelta(days=n)
def date_to_java_sql_date(d):
if isinstance(d, datetime.datetime):
d = d.date()
td = d - datetime.date(1970, 1, 1)
return td.days
def datetime_from_java_sql_timestamp(n):
return datetime.datetime(1970, 1, 1) + datetime.timedelta(milliseconds=n)
def datetime_to_java_sql_timestamp(d):
td = d - datetime.datetime(1970, 1, 1)
return td.microseconds // 1000 + (td.seconds + td.days * 24 * 3600) * 1000
class ColumnType(object):
def __init__(self, eq_types):
self.eq_types = tuple(eq_types)
self.eq_types_set = set(eq_types)
def __eq__(self, other):
return other in self.eq_types_set
def __cmp__(self, other):
if other in self.eq_types_set:
return 0
if other < self.eq_types:
return 1
else:
return -1
STRING = ColumnType(['VARCHAR', 'CHAR'])
"""Type object that can be used to describe string-based columns."""
BINARY = ColumnType(['BINARY', 'VARBINARY'])
"""Type object that can be used to describe (long) binary columns."""
NUMBER = ColumnType([
'INTEGER', 'UNSIGNED_INT', 'BIGINT', 'UNSIGNED_LONG', 'TINYINT', 'UNSIGNED_TINYINT',
'SMALLINT', 'UNSIGNED_SMALLINT', 'FLOAT', 'UNSIGNED_FLOAT', 'DOUBLE', 'UNSIGNED_DOUBLE', 'DECIMAL'
])
"""Type object that can be used to describe numeric columns."""
DATETIME = ColumnType(['TIME', 'DATE', 'TIMESTAMP', 'UNSIGNED_TIME', 'UNSIGNED_DATE', 'UNSIGNED_TIMESTAMP'])
"""Type object that can be used to describe date/time columns."""
ROWID = ColumnType([])
"""Only implemented for DB API 2.0 compatibility, not used."""
BOOLEAN = ColumnType(['BOOLEAN'])
"""Type object that can be used to describe boolean columns. This is a phoenixdb-specific extension."""
# XXX ARRAY
if sys.version_info[0] < 3:
_long = long # noqa: F821
else:
_long = int
JAVA_CLASSES = {
'bool_value': [
('java.lang.Boolean', common_pb2.BOOLEAN, None, None),
],
'string_value': [
('java.lang.Character', common_pb2.CHARACTER, None, None),
('java.lang.String', common_pb2.STRING, None, None),
('java.math.BigDecimal', common_pb2.BIG_DECIMAL, str, Decimal),
],
'number_value': [
('java.lang.Integer', common_pb2.INTEGER, None, int),
('java.lang.Short', common_pb2.SHORT, None, int),
('java.lang.Long', common_pb2.LONG, None, _long),
('java.lang.Byte', common_pb2.BYTE, None, int),
('java.sql.Time', common_pb2.JAVA_SQL_TIME, time_to_java_sql_time, time_from_java_sql_time),
('java.sql.Date', common_pb2.JAVA_SQL_DATE, date_to_java_sql_date, date_from_java_sql_date),
('java.sql.Timestamp', common_pb2.JAVA_SQL_TIMESTAMP, datetime_to_java_sql_timestamp, datetime_from_java_sql_timestamp),
],
'bytes_value': [
('[B', common_pb2.BYTE_STRING, Binary, None),
],
'double_value': [
# if common_pb2.FLOAT is used, incorrect values are sent
('java.lang.Float', common_pb2.DOUBLE, float, float),
('java.lang.Double', common_pb2.DOUBLE, float, float),
]
}
"""Groups of Java classes."""
JAVA_CLASSES_MAP = dict((v[0], (k, v[1], v[2], v[3])) for k in JAVA_CLASSES for v in JAVA_CLASSES[k])
"""Flips the available types to allow for faster lookup by Java class.
This mapping should be structured as:
{
'java.math.BigDecimal': ('string_value', common_pb2.BIG_DECIMAL, str, Decimal),),
...
'<java class>': (<field_name>, <Rep enum>, <mutate_to function>, <cast_from function>),
}
"""
class TypeHelper(object):
@staticmethod
def from_class(klass):
"""Retrieves a Rep and functions to cast to/from based on the Java class.
:param klass:
The string of the Java class for the column or parameter.
:returns: tuple ``(field_name, rep, mutate_to, cast_from)``
WHERE
``field_name`` is the attribute in ``common_pb2.TypedValue``
``rep`` is the common_pb2.Rep enum
``mutate_to`` is the function to cast values into Phoenix values, if any
``cast_from`` is the function to cast from the Phoenix value to the Python value, if any
:raises:
NotImplementedError
"""
if klass not in JAVA_CLASSES_MAP:
raise NotImplementedError('type {} is not supported'.format(klass))
return JAVA_CLASSES_MAP[klass]
| lalinsky/python-phoenixdb | phoenixdb/types.py | Python | apache-2.0 | 6,740 |
#
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# BitBake Toaster Implementation
#
# Copyright (C) 2013 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import operator
from django.db.models import Q
from django.shortcuts import render
from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe, LogMessage, Variable, Target_Installed_Package
from orm.models import Task_Dependency, Recipe_Dependency, Package, Package_File, Package_Dependency
from orm.models import Target_Installed_Package, VariableHistory, Target_Image_File, Target_File
from django.views.decorators.cache import cache_control
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
def _build_page_range(paginator, index = 1):
try:
page = paginator.page(index)
except PageNotAnInteger:
page = paginator.page(1)
except EmptyPage:
page = paginator.page(paginator.num_pages)
page.page_range = [page.number]
crt_range = 0
for i in range(1,5):
if (page.number + i) <= paginator.num_pages:
page.page_range = page.page_range + [ page.number + i]
crt_range +=1
if (page.number - i) > 0:
page.page_range = [page.number -i] + page.page_range
crt_range +=1
if crt_range == 4:
break
return page
@cache_control(no_store=True)
def build(request):
template = 'simple_build.html'
logs = LogMessage.objects.all()
build_info = _build_page_range(Paginator(Build.objects.order_by("-id"), 10),request.GET.get('page', 1))
context = {'objects': build_info, 'logs': logs ,
'hideshowcols' : [
{'name': 'Output', 'order':10},
{'name': 'Log', 'order':11},
]}
return render(request, template, context)
def _find_task_revdep(task):
tp = []
for p in Task_Dependency.objects.filter(depends_on=task):
tp.append(p.task);
return tp
def _find_task_provider(task):
task_revdeps = _find_task_revdep(task)
for tr in task_revdeps:
if tr.outcome != Task.OUTCOME_COVERED:
return tr
for tr in task_revdeps:
trc = _find_task_provider(tr)
if trc is not None:
return trc
return None
def task(request, build_id):
template = 'simple_task.html'
tasks = _build_page_range(Paginator(Task.objects.filter(build=build_id, order__gt=0), 100),request.GET.get('page', 1))
for t in tasks:
if t.outcome == Task.OUTCOME_COVERED:
t.provider = _find_task_provider(t)
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects': tasks}
return render(request, template, context)
def configuration(request, build_id):
template = 'simple_configuration.html'
variables = _build_page_range(Paginator(Variable.objects.filter(build=build_id), 50), request.GET.get('page', 1))
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : variables}
return render(request, template, context)
def bpackage(request, build_id):
template = 'simple_bpackage.html'
packages = Package.objects.filter(build = build_id)
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : packages}
return render(request, template, context)
def bfile(request, build_id, package_id):
template = 'simple_bfile.html'
files = Package_File.objects.filter(package = package_id)
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : files}
return render(request, template, context)
def tpackage(request, build_id, target_id):
template = 'simple_package.html'
packages = map(lambda x: x.package, list(Target_Installed_Package.objects.filter(target=target_id)))
context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : packages}
return render(request, template, context)
def layer(request):
template = 'simple_layer.html'
layer_info = Layer.objects.all()
for li in layer_info:
li.versions = Layer_Version.objects.filter(layer = li)
for liv in li.versions:
liv.count = Recipe.objects.filter(layer_version__id = liv.id).count()
context = {'objects': layer_info}
return render(request, template, context)
def layer_versions_recipes(request, layerversion_id):
template = 'simple_recipe.html'
recipes = Recipe.objects.filter(layer_version__id = layerversion_id)
context = {'objects': recipes,
'layer_version' : Layer_Version.objects.filter( id = layerversion_id )[0]
}
return render(request, template, context)
#### API
import json
from django.core import serializers
from django.http import HttpResponse, HttpResponseBadRequest
def model_explorer(request, model_name):
DESCENDING = 'desc'
response_data = {}
model_mapping = {
'build': Build,
'target': Target,
'target_file': Target_File,
'target_image_file': Target_Image_File,
'task': Task,
'task_dependency': Task_Dependency,
'package': Package,
'layer': Layer,
'layerversion': Layer_Version,
'recipe': Recipe,
'recipe_dependency': Recipe_Dependency,
'package': Package,
'package_dependency': Package_Dependency,
'target_installed_package': Target_Installed_Package,
'build_file': Package_File,
'variable': Variable,
'variablehistory': VariableHistory,
'logmessage': LogMessage,
}
if model_name not in model_mapping.keys():
return HttpResponseBadRequest()
model = model_mapping[model_name]
try:
limit = int(request.GET.get('limit', 0))
except ValueError:
limit = 0
try:
offset = int(request.GET.get('offset', 0))
except ValueError:
offset = 0
ordering_string, invalid = _validate_input(request.GET.get('orderby', ''),
model)
if invalid:
return HttpResponseBadRequest()
filter_string, invalid = _validate_input(request.GET.get('filter', ''),
model)
if invalid:
return HttpResponseBadRequest()
search_term = request.GET.get('search', '')
if filter_string:
filter_terms = _get_filtering_terms(filter_string)
try:
queryset = model.objects.filter(**filter_terms)
except ValueError:
queryset = []
else:
queryset = model.objects.all()
if search_term:
queryset = _get_search_results(search_term, queryset, model)
if ordering_string and queryset:
column, order = ordering_string.split(':')
if order.lower() == DESCENDING:
queryset = queryset.order_by('-' + column)
else:
queryset = queryset.order_by(column)
if offset and limit:
queryset = queryset[offset:(offset+limit)]
elif offset:
queryset = queryset[offset:]
elif limit:
queryset = queryset[:limit]
if queryset:
response_data['count'] = queryset.count()
else:
response_data['count'] = 0
response_data['list'] = serializers.serialize('json', queryset)
return HttpResponse(json.dumps(response_data),
content_type='application/json')
def _get_filtering_terms(filter_string):
search_terms = filter_string.split(":")
keys = search_terms[0].split(',')
values = search_terms[1].split(',')
return dict(zip(keys, values))
def _validate_input(input, model):
invalid = 0
if input:
input_list = input.split(":")
# Check we have only one colon
if len(input_list) != 2:
invalid = 1
return None, invalid
# Check we have an equal number of terms both sides of the colon
if len(input_list[0].split(',')) != len(input_list[1].split(',')):
invalid = 1
return None, invalid
# Check we are looking for a valid field
valid_fields = model._meta.get_all_field_names()
for field in input_list[0].split(','):
if field not in valid_fields:
invalid = 1
return None, invalid
return input, invalid
def _get_search_results(search_term, queryset, model):
search_objects = []
for st in search_term.split(" "):
q_map = map(lambda x: Q(**{x+'__icontains': st}),
model.search_allowed_fields)
search_objects.append(reduce(operator.or_, q_map))
search_object = reduce(operator.and_, search_objects)
queryset = queryset.filter(search_object)
return queryset
| wwright2/dcim3-angstrom1 | sources/bitbake/lib/toaster/bldviewer/views.py | Python | mit | 9,287 |
#IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe
from pydevd_constants import * #@UnusedWildImport
import pydev_imports
from pydevd_comm import CMD_CHANGE_VARIABLE, \
CMD_EVALUATE_EXPRESSION, \
CMD_EXEC_EXPRESSION, \
CMD_GET_COMPLETIONS, \
CMD_GET_FRAME, \
CMD_SET_PY_EXCEPTION, \
CMD_GET_VARIABLE, \
CMD_LIST_THREADS, \
CMD_REMOVE_BREAK, \
CMD_RUN, \
CMD_SET_BREAK, \
CMD_SET_NEXT_STATEMENT,\
CMD_STEP_INTO, \
CMD_STEP_OVER, \
CMD_STEP_RETURN, \
CMD_THREAD_CREATE, \
CMD_THREAD_KILL, \
CMD_THREAD_RUN, \
CMD_THREAD_SUSPEND, \
CMD_RUN_TO_LINE, \
CMD_RELOAD_CODE, \
CMD_VERSION, \
CMD_GET_FILE_CONTENTS, \
GetGlobalDebugger, \
InternalChangeVariable, \
InternalGetCompletions, \
InternalEvaluateExpression, \
InternalGetFrame, \
InternalGetVariable, \
InternalTerminateThread, \
InternalRunThread, \
InternalStepThread, \
NetCommand, \
NetCommandFactory, \
PyDBDaemonThread, \
PydevQueue, \
ReaderThread, \
SetGlobalDebugger, \
WriterThread, \
PydevdFindThreadById, \
PydevdLog, \
StartClient, \
StartServer, \
InternalSetNextStatementThread
from pydevd_file_utils import NormFileToServer, GetFilenameAndBase
import pydevd_import_class
import pydevd_vars
import traceback
import pydevd_vm_type
import pydevd_tracing
import pydevd_io
from pydevd_additional_thread_info import PyDBAdditionalThreadInfo
import time
threadingEnumerate = threading.enumerate
threadingCurrentThread = threading.currentThread
DONT_TRACE = {
#commonly used things from the stdlib that we don't want to trace
'threading.py':1,
'Queue.py':1,
'socket.py':1,
#things from pydev that we don't want to trace
'pydevd_additional_thread_info.py':1,
'pydevd_comm.py':1,
'pydevd_constants.py':1,
'pydevd_file_utils.py':1,
'pydevd_frame.py':1,
'pydevd_io.py':1 ,
'pydevd_resolver.py':1 ,
'pydevd_tracing.py':1 ,
'pydevd_vars.py':1,
'pydevd_vm_type.py':1,
'pydevd.py':1 ,
'pydevd_psyco_stub.py':1
}
if IS_PY3K:
#if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
DONT_TRACE['io.py'] = 1
connected = False
bufferStdOutToServer = False
bufferStdErrToServer = False
#=======================================================================================================================
# PyDBCommandThread
#=======================================================================================================================
class PyDBCommandThread(PyDBDaemonThread):
def __init__(self, pyDb):
PyDBDaemonThread.__init__(self)
self.pyDb = pyDb
self.setName('pydevd.CommandThread')
def OnRun(self):
time.sleep(5) #this one will only start later on (because otherwise we may not have any non-daemon threads
run_traced = True
if pydevd_vm_type.GetVmType() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0:
#don't run untraced threads if we're in jython 2.2.1 or lower
#jython bug: if we start a thread and another thread changes the tracing facility
#it affects other threads (it's not set only for the thread but globally)
#Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867
run_traced = False
if run_traced:
pydevd_tracing.SetTrace(None) # no debugging on this thread
try:
while not self.killReceived:
try:
self.pyDb.processInternalCommands()
except:
PydevdLog(0, 'Finishing debug communication...(2)')
time.sleep(0.5)
except:
pass
#only got this error in interpreter shutdown
#PydevdLog(0, 'Finishing debug communication...(3)')
_original_excepthook = None
#=======================================================================================================================
# excepthook
#=======================================================================================================================
def excepthook(exctype, value, tb):
#Always call the original excepthook before going on to call the debugger post mortem to show it.
_original_excepthook(exctype, value, tb)
debugger = GetGlobalDebugger()
if debugger is None or not debugger.break_on_uncaught:
return
if debugger.handle_exceptions is not None:
if not issubclass(exctype, debugger.handle_exceptions):
return
frames = []
while tb:
frames.append(tb.tb_frame)
tb = tb.tb_next
thread = threadingCurrentThread()
frames_byid = dict([(id(frame),frame) for frame in frames])
frame = frames[-1]
thread.additionalInfo.pydev_force_stop_at_exception = (frame, frames_byid)
debugger = GetGlobalDebugger()
debugger.force_post_mortem_stop += 1
#=======================================================================================================================
# set_pm_excepthook
#=======================================================================================================================
def set_pm_excepthook(handle_exceptions=None):
'''
This function is now deprecated (PyDev provides an UI to handle that now).
'''
raise DeprecationWarning(
'This function is now replaced by GetGlobalDebugger().setExceptHook and is now controlled by the PyDev UI.')
try:
import thread
except ImportError:
import _thread as thread #Py3K changed it.
_original_start_new_thread = thread.start_new_thread
#=======================================================================================================================
# NewThreadStartup
#=======================================================================================================================
class NewThreadStartup:
def __init__(self, original_func, args, kwargs):
self.original_func = original_func
self.args = args
self.kwargs = kwargs
def __call__(self):
global_debugger = GetGlobalDebugger()
pydevd_tracing.SetTrace(global_debugger.trace_dispatch)
self.original_func(*self.args, **self.kwargs)
#=======================================================================================================================
# pydev_start_new_thread
#=======================================================================================================================
def pydev_start_new_thread(function, args, kwargs={}):
'''
We need to replace the original thread.start_new_thread with this function so that threads started through
it and not through the threading module are properly traced.
'''
return _original_start_new_thread(NewThreadStartup(function, args, kwargs), ())
#=======================================================================================================================
# PyDB
#=======================================================================================================================
class PyDB:
""" Main debugging class
Lots of stuff going on here:
PyDB starts two threads on startup that connect to remote debugger (RDB)
The threads continuously read & write commands to RDB.
PyDB communicates with these threads through command queues.
Every RDB command is processed by calling processNetCommand.
Every PyDB net command is sent to the net by posting NetCommand to WriterThread queue
Some commands need to be executed on the right thread (suspend/resume & friends)
These are placed on the internal command queue.
"""
def __init__(self):
SetGlobalDebugger(self)
pydevd_tracing.ReplaceSysSetTraceFunc()
self.reader = None
self.writer = None
self.quitting = None
self.cmdFactory = NetCommandFactory()
self._cmd_queue = {} # the hash of Queues. Key is thread id, value is thread
self.breakpoints = {}
self.readyToRun = False
self._main_lock = threading.Lock()
self._lock_running_thread_ids = threading.Lock()
self._finishDebuggingSession = False
self.force_post_mortem_stop = 0
self.break_on_uncaught = False
self.break_on_caught = False
self.handle_exceptions = None
#this is a dict of thread ids pointing to thread ids. Whenever a command is passed to the java end that
#acknowledges that a thread was created, the thread id should be passed here -- and if at some time we do not
#find that thread alive anymore, we must remove it from this list and make the java side know that the thread
#was killed.
self._running_thread_ids = {}
def FinishDebuggingSession(self):
self._finishDebuggingSession = True
def initializeNetwork(self, sock):
try:
sock.settimeout(None) # infinite, no timeouts from now on - jython does not have it
except:
pass
self.writer = WriterThread(sock)
self.reader = ReaderThread(sock)
self.writer.start()
self.reader.start()
time.sleep(0.1) # give threads time to start
def connect(self, host, port):
if host:
s = StartClient(host, port)
else:
s = StartServer(port)
self.initializeNetwork(s)
def getInternalQueue(self, thread_id):
""" returns internal command queue for a given thread.
if new queue is created, notify the RDB about it """
try:
return self._cmd_queue[thread_id]
except KeyError:
return self._cmd_queue.setdefault(thread_id, PydevQueue.Queue()) #@UndefinedVariable
def postInternalCommand(self, int_cmd, thread_id):
""" if thread_id is *, post to all """
if thread_id == "*":
for k in self._cmd_queue.keys():
self._cmd_queue[k].put(int_cmd)
else:
queue = self.getInternalQueue(thread_id)
queue.put(int_cmd)
def checkOutput(self, out, outCtx):
'''Checks the output to see if we have to send some buffered output to the debug server
@param out: sys.stdout or sys.stderr
@param outCtx: the context indicating: 1=stdout and 2=stderr (to know the colors to write it)
'''
try:
v = out.getvalue()
if v:
self.cmdFactory.makeIoMessage(v, outCtx, self)
except:
traceback.print_exc()
def processInternalCommands(self):
'''This function processes internal commands
'''
curr_thread_id = GetThreadId(threadingCurrentThread())
program_threads_alive = {}
all_threads = threadingEnumerate()
program_threads_dead = []
self._main_lock.acquire()
try:
if bufferStdOutToServer:
self.checkOutput(sys.stdoutBuf, 1) #@UndefinedVariable
if bufferStdErrToServer:
self.checkOutput(sys.stderrBuf, 2) #@UndefinedVariable
self._lock_running_thread_ids.acquire()
try:
for t in all_threads:
thread_id = GetThreadId(t)
if not isinstance(t, PyDBDaemonThread) and t.isAlive():
program_threads_alive[thread_id] = t
if not DictContains(self._running_thread_ids, thread_id):
if not hasattr(t, 'additionalInfo'):
#see http://sourceforge.net/tracker/index.php?func=detail&aid=1955428&group_id=85796&atid=577329
#Let's create the additional info right away!
t.additionalInfo = PyDBAdditionalThreadInfo()
self._running_thread_ids[thread_id] = t
self.writer.addCommand(self.cmdFactory.makeThreadCreatedMessage(t))
queue = self.getInternalQueue(thread_id)
cmdsToReadd = [] #some commands must be processed by the thread itself... if that's the case,
#we will re-add the commands to the queue after executing.
try:
while True:
int_cmd = queue.get(False)
if int_cmd.canBeExecutedBy(curr_thread_id):
PydevdLog(2, "processing internal command ", str(int_cmd))
int_cmd.doIt(self)
else:
PydevdLog(2, "NOT processing internal command ", str(int_cmd))
cmdsToReadd.append(int_cmd)
except PydevQueue.Empty: #@UndefinedVariable
for int_cmd in cmdsToReadd:
queue.put(int_cmd)
# this is how we exit
thread_ids = list(self._running_thread_ids.keys())
for tId in thread_ids:
if not DictContains(program_threads_alive, tId):
program_threads_dead.append(tId)
finally:
self._lock_running_thread_ids.release()
for tId in program_threads_dead:
try:
self.processThreadNotAlive(tId)
except:
sys.stderr.write('Error iterating through %s (%s) - %s\n' % (
program_threads_alive, program_threads_alive.__class__, dir(program_threads_alive)))
raise
if len(program_threads_alive) == 0:
self.FinishDebuggingSession()
for t in all_threads:
if hasattr(t, 'doKillPydevThread'):
t.doKillPydevThread()
finally:
self._main_lock.release()
def setTracingForUntracedContexts(self):
#Enable the tracing for existing threads (because there may be frames being executed that
#are currently untraced).
threads = threadingEnumerate()
for t in threads:
if not t.getName().startswith('pydevd.'):
#TODO: optimize so that we only actually add that tracing if it's in
#the new breakpoint context.
additionalInfo = None
try:
additionalInfo = t.additionalInfo
except AttributeError:
pass #that's ok, no info currently set
if additionalInfo is not None:
for frame in additionalInfo.IterFrames():
self.SetTraceForFrameAndParents(frame)
del frame
def processNetCommand(self, cmd_id, seq, text):
'''Processes a command received from the Java side
@param cmd_id: the id of the command
@param seq: the sequence of the command
@param text: the text received in the command
@note: this method is run as a big switch... after doing some tests, it's not clear whether changing it for
a dict id --> function call will have better performance result. A simple test with xrange(10000000) showed
that the gains from having a fast access to what should be executed are lost because of the function call in
a way that if we had 10 elements in the switch the if..elif are better -- but growing the number of choices
makes the solution with the dispatch look better -- so, if this gets more than 20-25 choices at some time,
it may be worth refactoring it (actually, reordering the ifs so that the ones used mostly come before
probably will give better performance).
'''
self._main_lock.acquire()
try:
try:
cmd = None
if cmd_id == CMD_RUN:
self.readyToRun = True
elif cmd_id == CMD_VERSION:
# response is version number
cmd = self.cmdFactory.makeVersionMessage(seq)
elif cmd_id == CMD_LIST_THREADS:
# response is a list of threads
cmd = self.cmdFactory.makeListThreadsMessage(seq)
elif cmd_id == CMD_THREAD_KILL:
int_cmd = InternalTerminateThread(text)
self.postInternalCommand(int_cmd, text)
elif cmd_id == CMD_THREAD_SUSPEND:
#Yes, thread suspend is still done at this point, not through an internal command!
t = PydevdFindThreadById(text)
if t:
additionalInfo = None
try:
additionalInfo = t.additionalInfo
except AttributeError:
pass #that's ok, no info currently set
if additionalInfo is not None:
for frame in additionalInfo.IterFrames():
self.SetTraceForFrameAndParents(frame)
del frame
self.setSuspend(t, CMD_THREAD_SUSPEND)
elif cmd_id == CMD_THREAD_RUN:
t = PydevdFindThreadById(text)
if t:
thread_id = GetThreadId(t)
int_cmd = InternalRunThread(thread_id)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_STEP_INTO or cmd_id == CMD_STEP_OVER or cmd_id == CMD_STEP_RETURN:
#we received some command to make a single step
t = PydevdFindThreadById(text)
if t:
thread_id = GetThreadId(t)
int_cmd = InternalStepThread(thread_id, cmd_id)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_RUN_TO_LINE or cmd_id == CMD_SET_NEXT_STATEMENT:
#we received some command to make a single step
thread_id, line, func_name = text.split('\t', 2)
t = PydevdFindThreadById(thread_id)
if t:
int_cmd = InternalSetNextStatementThread(thread_id, cmd_id, line, func_name)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_RELOAD_CODE:
#we received some command to make a reload of a module
module_name = text.strip()
from pydevd_reload import xreload
if not DictContains(sys.modules, module_name):
if '.' in module_name:
new_module_name = module_name.split('.')[-1]
if DictContains(sys.modules, new_module_name):
module_name = new_module_name
if not DictContains(sys.modules, module_name):
sys.stderr.write('pydev debugger: Unable to find module to reload: "'+module_name+'".\n')
sys.stderr.write('pydev debugger: This usually means you are trying to reload the __main__ module (which cannot be reloaded).\n')
else:
sys.stderr.write('pydev debugger: Reloading: '+module_name+'\n')
xreload(sys.modules[module_name])
elif cmd_id == CMD_CHANGE_VARIABLE:
#the text is: thread\tstackframe\tFRAME|GLOBAL\tattribute_to_change\tvalue_to_change
try:
thread_id, frame_id, scope, attr_and_value = text.split('\t', 3)
tab_index = attr_and_value.rindex('\t')
attr = attr_and_value[0:tab_index].replace('\t', '.')
value = attr_and_value[tab_index + 1:]
int_cmd = InternalChangeVariable(seq, thread_id, frame_id, scope, attr, value)
self.postInternalCommand(int_cmd, thread_id)
except:
traceback.print_exc()
elif cmd_id == CMD_GET_VARIABLE:
#we received some command to get a variable
#the text is: thread_id\tframe_id\tFRAME|GLOBAL\tattributes*
try:
thread_id, frame_id, scopeattrs = text.split('\t', 2)
if scopeattrs.find('\t') != -1: # there are attributes beyond scope
scope, attrs = scopeattrs.split('\t', 1)
else:
scope, attrs = (scopeattrs, None)
int_cmd = InternalGetVariable(seq, thread_id, frame_id, scope, attrs)
self.postInternalCommand(int_cmd, thread_id)
except:
traceback.print_exc()
elif cmd_id == CMD_GET_COMPLETIONS:
#we received some command to get a variable
#the text is: thread_id\tframe_id\tactivation token
try:
thread_id, frame_id, scope, act_tok = text.split('\t', 3)
int_cmd = InternalGetCompletions(seq, thread_id, frame_id, act_tok)
self.postInternalCommand(int_cmd, thread_id)
except:
traceback.print_exc()
elif cmd_id == CMD_GET_FRAME:
thread_id, frame_id, scope = text.split('\t', 2)
int_cmd = InternalGetFrame(seq, thread_id, frame_id)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_SET_BREAK:
#func name: 'None': match anything. Empty: match global, specified: only method context.
#command to add some breakpoint.
# text is file\tline. Add to breakpoints dictionary
file, line, condition = text.split('\t', 2)
if condition.startswith('**FUNC**'):
func_name, condition = condition.split('\t', 1)
#We must restore new lines and tabs as done in
#AbstractDebugTarget.breakpointAdded
condition = condition.replace("@_@NEW_LINE_CHAR@_@", '\n').\
replace("@_@TAB_CHAR@_@", '\t').strip()
func_name = func_name[8:]
else:
func_name = 'None' #Match anything if not specified.
file = NormFileToServer(file)
if not os.path.exists(file):
sys.stderr.write('pydev debugger: warning: trying to add breakpoint'\
' to file that does not exist: %s (will have no effect)\n' % (file,))
line = int(line)
if DEBUG_TRACE_BREAKPOINTS > 0:
sys.stderr.write('Added breakpoint:%s - line:%s - func_name:%s\n' % (file, line, func_name))
if DictContains(self.breakpoints, file):
breakDict = self.breakpoints[file]
else:
breakDict = {}
if len(condition) <= 0 or condition == None or condition == "None":
breakDict[line] = (True, None, func_name)
else:
breakDict[line] = (True, condition, func_name)
self.breakpoints[file] = breakDict
self.setTracingForUntracedContexts()
elif cmd_id == CMD_REMOVE_BREAK:
#command to remove some breakpoint
#text is file\tline. Remove from breakpoints dictionary
file, line = text.split('\t', 1)
file = NormFileToServer(file)
try:
line = int(line)
except ValueError:
pass
else:
try:
del self.breakpoints[file][line] #remove the breakpoint in that line
if DEBUG_TRACE_BREAKPOINTS > 0:
sys.stderr.write('Removed breakpoint:%s\n' % (file,))
except KeyError:
#ok, it's not there...
if DEBUG_TRACE_BREAKPOINTS > 0:
#Sometimes, when adding a breakpoint, it adds a remove command before (don't really know why)
sys.stderr.write("breakpoint not found: %s - %s\n" % (file, line))
elif cmd_id == CMD_EVALUATE_EXPRESSION or cmd_id == CMD_EXEC_EXPRESSION:
#command to evaluate the given expression
#text is: thread\tstackframe\tLOCAL\texpression
thread_id, frame_id, scope, expression = text.split('\t', 3)
int_cmd = InternalEvaluateExpression(seq, thread_id, frame_id, expression,
cmd_id == CMD_EXEC_EXPRESSION)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_SET_PY_EXCEPTION:
# Command which receives set of exceptions on which user wants to break the debugger
# text is: break_on_uncaught;break_on_caught;TypeError;ImportError;zipimport.ZipImportError;
splitted = text.split(';')
if len(splitted) >= 2:
if splitted[0] == 'true':
break_on_uncaught = True
else:
break_on_uncaught = False
if splitted[1] == 'true':
break_on_caught = True
else:
break_on_caught = False
handle_exceptions = []
for exception_type in splitted[2:]:
exception_type = exception_type.strip()
if not exception_type:
continue
try:
handle_exceptions.append(eval(exception_type))
except:
try:
handle_exceptions.append(pydevd_import_class.ImportName(exception_type))
except:
sys.stderr.write("Unable to Import: %s when determining exceptions to break.\n" % (exception_type,))
if DEBUG_TRACE_BREAKPOINTS > 0:
sys.stderr.write("Exceptions to hook : %s\n" % (handle_exceptions,))
self.setExceptHook(tuple(handle_exceptions), break_on_uncaught, break_on_caught)
self.setTracingForUntracedContexts()
else:
sys.stderr.write("Error when setting exception list. Received: %s\n" % (text,))
elif cmd_id == CMD_GET_FILE_CONTENTS:
if os.path.exists(text):
f = open(text, 'r')
try:
source = f.read()
finally:
f.close()
cmd = self.cmdFactory.makeGetFileContents(seq, source)
else:
#I have no idea what this is all about
cmd = self.cmdFactory.makeErrorMessage(seq, "unexpected command " + str(cmd_id))
if cmd is not None:
self.writer.addCommand(cmd)
del cmd
except Exception:
traceback.print_exc()
cmd = self.cmdFactory.makeErrorMessage(seq,
"Unexpected exception in processNetCommand.\nInitial params: %s" % ((cmd_id, seq, text),))
self.writer.addCommand(cmd)
finally:
self._main_lock.release()
def setExceptHook(self, handle_exceptions, break_on_uncaught, break_on_caught):
'''
Should be called to set the exceptions to be handled and whether it should break on uncaught and
caught exceptions.
Can receive a parameter to stop only on some exceptions.
E.g.:
set_pm_excepthook((IndexError, ValueError), True, True)
or
set_pm_excepthook(IndexError, True, False)
if passed without a parameter, will break on any exception
@param handle_exceptions: exception or tuple(exceptions)
The exceptions that should be handled.
@param break_on_uncaught bool
Whether it should break on uncaught exceptions.
@param break_on_caught: bool
Whether it should break on caught exceptions.
'''
global _original_excepthook
if sys.excepthook != excepthook:
#Only keep the original if it's not our own excepthook (if called many times).
_original_excepthook = sys.excepthook
self.handle_exceptions = handle_exceptions
#Note that we won't set to break if we don't have any exception to break on
self.break_on_uncaught = handle_exceptions and break_on_uncaught
self.break_on_caught = handle_exceptions and break_on_caught
sys.excepthook = excepthook
def processThreadNotAlive(self, threadId):
""" if thread is not alive, cancel trace_dispatch processing """
self._lock_running_thread_ids.acquire()
try:
thread = self._running_thread_ids.pop(threadId, None)
if thread is None:
return
wasNotified = thread.additionalInfo.pydev_notify_kill
if not wasNotified:
thread.additionalInfo.pydev_notify_kill = True
finally:
self._lock_running_thread_ids.release()
cmd = self.cmdFactory.makeThreadKilledMessage(threadId)
self.writer.addCommand(cmd)
def setSuspend(self, thread, stop_reason):
thread.additionalInfo.pydev_state = STATE_SUSPEND
thread.stop_reason = stop_reason
def doWaitSuspend(self, thread, frame, event, arg): #@UnusedVariable
""" busy waits until the thread state changes to RUN
it expects thread's state as attributes of the thread.
Upon running, processes any outstanding Stepping commands.
"""
self.processInternalCommands()
cmd = self.cmdFactory.makeThreadSuspendMessage(GetThreadId(thread), frame, thread.stop_reason)
self.writer.addCommand(cmd)
info = thread.additionalInfo
while info.pydev_state == STATE_SUSPEND and not self._finishDebuggingSession:
self.processInternalCommands()
time.sleep(0.01)
#process any stepping instructions
if info.pydev_step_cmd == CMD_STEP_INTO:
info.pydev_step_stop = None
elif info.pydev_step_cmd == CMD_STEP_OVER:
info.pydev_step_stop = frame
self.SetTraceForFrameAndParents(frame)
elif info.pydev_step_cmd == CMD_RUN_TO_LINE or info.pydev_step_cmd == CMD_SET_NEXT_STATEMENT :
self.SetTraceForFrameAndParents(frame)
if event == 'line' or event == 'exception':
#If we're already in the correct context, we have to stop it now, because we can act only on
#line events -- if a return was the next statement it wouldn't work (so, we have this code
#repeated at pydevd_frame).
stop = False
curr_func_name = frame.f_code.co_name
#global context is set with an empty name
if curr_func_name in ('?', '<module>'):
curr_func_name = ''
if curr_func_name == info.pydev_func_name:
line = info.pydev_next_line
if frame.f_lineno == line:
stop = True
else:
if frame.f_trace is None:
frame.f_trace = self.trace_dispatch
frame.f_lineno = line
frame.f_trace = None
stop = True
if stop:
info.pydev_state = STATE_SUSPEND
self.doWaitSuspend(thread, frame, event, arg)
return
elif info.pydev_step_cmd == CMD_STEP_RETURN:
back_frame = frame.f_back
if back_frame is not None:
#steps back to the same frame (in a return call it will stop in the 'back frame' for the user)
info.pydev_step_stop = frame
self.SetTraceForFrameAndParents(frame)
else:
#No back frame?!? -- this happens in jython when we have some frame created from an awt event
#(the previous frame would be the awt event, but this doesn't make part of 'jython', only 'java')
#so, if we're doing a step return in this situation, it's the same as just making it run
info.pydev_step_stop = None
info.pydev_step_cmd = None
info.pydev_state = STATE_RUN
del frame
cmd = self.cmdFactory.makeThreadRunMessage(GetThreadId(thread), info.pydev_step_cmd)
self.writer.addCommand(cmd)
def trace_dispatch(self, frame, event, arg):
''' This is the callback used when we enter some context in the debugger.
We also decorate the thread we are in with info about the debugging.
The attributes added are:
pydev_state
pydev_step_stop
pydev_step_cmd
pydev_notify_kill
'''
try:
if self._finishDebuggingSession:
#that was not working very well because jython gave some socket errors
threads = threadingEnumerate()
for t in threads:
if hasattr(t, 'doKillPydevThread'):
t.doKillPydevThread()
return None
filename, base = GetFilenameAndBase(frame)
is_file_to_ignore = DictContains(DONT_TRACE, base) #we don't want to debug threading or anything related to pydevd
if not self.force_post_mortem_stop: #If we're in post mortem mode, we might not have another chance to show that info!
if is_file_to_ignore:
return None
#print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name)
try:
#this shouldn't give an exception, but it could happen... (python bug)
#see http://mail.python.org/pipermail/python-bugs-list/2007-June/038796.html
#and related bug: http://bugs.python.org/issue1733757
t = threadingCurrentThread()
except:
frame.f_trace = self.trace_dispatch
return self.trace_dispatch
try:
additionalInfo = t.additionalInfo
except:
additionalInfo = t.additionalInfo = PyDBAdditionalThreadInfo()
if self.force_post_mortem_stop: #If we're in post mortem mode, we might not have another chance to show that info!
if additionalInfo.pydev_force_stop_at_exception:
self.force_post_mortem_stop -= 1
frame, frames_byid = additionalInfo.pydev_force_stop_at_exception
thread_id = GetThreadId(t)
used_id = pydevd_vars.addAdditionalFrameById(thread_id, frames_byid)
try:
self.setSuspend(t, CMD_STEP_INTO)
self.doWaitSuspend(t, frame, 'exception', None)
finally:
additionalInfo.pydev_force_stop_at_exception = None
pydevd_vars.removeAdditionalFrameById(thread_id)
# if thread is not alive, cancel trace_dispatch processing
if not t.isAlive():
self.processThreadNotAlive(GetThreadId(t))
return None # suspend tracing
if is_file_to_ignore:
return None
#each new frame...
return additionalInfo.CreateDbFrame((self, filename, additionalInfo, t, frame)).trace_dispatch(frame, event, arg)
except SystemExit:
return None
except Exception:
#Log it
if traceback is not None:
#This can actually happen during the interpreter shutdown in Python 2.7
traceback.print_exc()
return None
if USE_PSYCO_OPTIMIZATION:
try:
import psyco
trace_dispatch = psyco.proxy(trace_dispatch)
processNetCommand = psyco.proxy(processNetCommand)
processInternalCommands = psyco.proxy(processInternalCommands)
doWaitSuspend = psyco.proxy(doWaitSuspend)
getInternalQueue = psyco.proxy(getInternalQueue)
except ImportError:
if hasattr(sys, 'exc_clear'): #jython does not have it
sys.exc_clear() #don't keep the traceback (let's keep it clear for when we go to the point of executing client code)
if not IS_PY3K and not IS_PY27 and not IS_64_BITS and not sys.platform.startswith("java") and not sys.platform.startswith("cli"):
sys.stderr.write("pydev debugger: warning: psyco not available for speedups (the debugger will still work correctly, but a bit slower)\n")
def SetTraceForFrameAndParents(self, frame, also_add_to_passed_frame=True):
dispatch_func = self.trace_dispatch
if also_add_to_passed_frame:
if frame.f_trace is None:
frame.f_trace = dispatch_func
else:
try:
#If it's the trace_exception, go back to the frame trace dispatch!
if frame.f_trace.im_func.__name__ == 'trace_exception':
frame.f_trace = frame.f_trace.im_self.trace_dispatch
except AttributeError:
pass
frame = frame.f_back
while frame:
if frame.f_trace is None:
frame.f_trace = dispatch_func
else:
try:
#If it's the trace_exception, go back to the frame trace dispatch!
if frame.f_trace.im_func.__name__ == 'trace_exception':
frame.f_trace = frame.f_trace.im_self.trace_dispatch
except AttributeError:
pass
frame = frame.f_back
del frame
def run(self, file, globals=None, locals=None):
if globals is None:
#patch provided by: Scott Schlesier - when script is run, it does not
#use globals from pydevd:
#This will prevent the pydevd script from contaminating the namespace for the script to be debugged
#pretend pydevd is not the main module, and
#convince the file to be debugged that it was loaded as main
sys.modules['pydevd'] = sys.modules['__main__']
sys.modules['pydevd'].__name__ = 'pydevd'
from imp import new_module
m = new_module('__main__')
sys.modules['__main__'] = m
m.__file__ = file
globals = m.__dict__
try:
globals['__builtins__'] = __builtins__
except NameError:
pass #Not there on Jython...
if locals is None:
locals = globals
#Predefined (writable) attributes: __name__ is the module's name;
#__doc__ is the module's documentation string, or None if unavailable;
#__file__ is the pathname of the file from which the module was loaded,
#if it was loaded from a file. The __file__ attribute is not present for
#C modules that are statically linked into the interpreter; for extension modules
#loaded dynamically from a shared library, it is the pathname of the shared library file.
#I think this is an ugly hack, bug it works (seems to) for the bug that says that sys.path should be the same in
#debug and run.
if m.__file__.startswith(sys.path[0]):
#print >> sys.stderr, 'Deleting: ', sys.path[0]
del sys.path[0]
#now, the local directory has to be added to the pythonpath
#sys.path.insert(0, os.getcwd())
#Changed: it's not the local directory, but the directory of the file launched
#The file being run ust be in the pythonpath (even if it was not before)
sys.path.insert(0, os.path.split(file)[0])
# for completness, we'll register the pydevd.reader & pydevd.writer threads
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.reader" id="-1"/></xml>')
self.writer.addCommand(net)
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.writer" id="-1"/></xml>')
self.writer.addCommand(net)
pydevd_tracing.SetTrace(self.trace_dispatch)
try:
#not available in jython!
threading.settrace(self.trace_dispatch) # for all future threads
except:
pass
try:
thread.start_new_thread = pydev_start_new_thread
thread.start_new = pydev_start_new_thread
except:
pass
while not self.readyToRun:
time.sleep(0.1) # busy wait until we receive run command
PyDBCommandThread(debugger).start()
pydev_imports.execfile(file, globals, locals) #execute the script
def processCommandLine(argv):
""" parses the arguments.
removes our arguments from the command line """
retVal = {}
retVal['client'] = ''
retVal['server'] = False
retVal['port'] = 0
retVal['file'] = ''
i = 0
del argv[0]
while (i < len(argv)):
if (argv[i] == '--port'):
del argv[i]
retVal['port'] = int(argv[i])
del argv[i]
elif (argv[i] == '--vm_type'):
del argv[i]
retVal['vm_type'] = argv[i]
del argv[i]
elif (argv[i] == '--client'):
del argv[i]
retVal['client'] = argv[i]
del argv[i]
elif (argv[i] == '--server'):
del argv[i]
retVal['server'] = True
elif (argv[i] == '--file'):
del argv[i]
retVal['file'] = argv[i];
i = len(argv) # pop out, file is our last argument
elif (argv[i] == '--DEBUG_RECORD_SOCKET_READS'):
del argv[i]
retVal['DEBUG_RECORD_SOCKET_READS'] = True
else:
raise ValueError("unexpected option " + argv[i])
return retVal
def usage(doExit=0):
sys.stdout.write('Usage:\n')
sys.stdout.write('pydevd.py --port=N [(--client hostname) | --server] --file executable [file_options]\n')
if doExit:
sys.exit(0)
#=======================================================================================================================
# patch_django_autoreload
#=======================================================================================================================
def patch_django_autoreload():
'''
Patch Django to work with remote debugger without adding an explicit
pydevd.settrace to set a breakpoint (i.e.: setup the remote debugger machinery
and don't suspend now -- this will load the breakpoints and will listen to
changes in them so that we do stop on the breakpoints set in the editor).
Checked with with Django 1.2.5.
Checked with with Django 1.3.
'''
if ('runserver' in sys.argv or 'testserver' in sys.argv):
from django.utils import autoreload
original_main = autoreload.main
def main(main_func, args=None, kwargs=None):
if os.environ.get("RUN_MAIN") == "true":
original_main_func = main_func
def pydev_debugger_main_func(*args, **kwargs):
settrace(suspend=False)
return original_main_func(*args, **kwargs)
main_func = pydev_debugger_main_func
return original_main(main_func, args, kwargs)
autoreload.main = main
#=======================================================================================================================
# settrace
#=======================================================================================================================
def settrace(host=None, stdoutToServer=False, stderrToServer=False, port=5678, suspend=True, trace_only_current_thread=True):
'''Sets the tracing function with the pydev debug function and initializes needed facilities.
@param host: the user may specify another host, if the debug server is not in the same machine (default is the local host)
@param stdoutToServer: when this is true, the stdout is passed to the debug server
@param stderrToServer: when this is true, the stderr is passed to the debug server
so that they are printed in its console and not in this process console.
@param port: specifies which port to use for communicating with the server (note that the server must be started
in the same port). @note: currently it's hard-coded at 5678 in the client
@param suspend: whether a breakpoint should be emulated as soon as this function is called.
@param trace_only_current_thread: determines if only the current thread will be traced or all future threads will also have the tracing enabled.
'''
_set_trace_lock.acquire()
try:
_locked_settrace(host, stdoutToServer, stderrToServer, port, suspend, trace_only_current_thread)
finally:
_set_trace_lock.release()
_set_trace_lock = threading.Lock()
def _locked_settrace(host, stdoutToServer, stderrToServer, port, suspend, trace_only_current_thread):
if host is None:
import pydev_localhost
host = pydev_localhost.get_localhost()
global connected
global bufferStdOutToServer
global bufferStdErrToServer
if not connected :
connected = True
bufferStdOutToServer = stdoutToServer
bufferStdErrToServer = stderrToServer
pydevd_vm_type.SetupType()
debugger = PyDB()
debugger.connect(host, port)
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.reader" id="-1"/></xml>')
debugger.writer.addCommand(net)
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.writer" id="-1"/></xml>')
debugger.writer.addCommand(net)
if bufferStdOutToServer:
sys.stdoutBuf = pydevd_io.IOBuf()
sys.stdout = pydevd_io.IORedirector(sys.stdout, sys.stdoutBuf) #@UndefinedVariable
if bufferStdErrToServer:
sys.stderrBuf = pydevd_io.IOBuf()
sys.stderr = pydevd_io.IORedirector(sys.stderr, sys.stderrBuf) #@UndefinedVariable
debugger.SetTraceForFrameAndParents(GetFrame(), False)
t = threadingCurrentThread()
try:
additionalInfo = t.additionalInfo
except AttributeError:
additionalInfo = PyDBAdditionalThreadInfo()
t.additionalInfo = additionalInfo
while not debugger.readyToRun:
time.sleep(0.1) # busy wait until we receive run command
if suspend:
debugger.setSuspend(t, CMD_SET_BREAK)
#note that we do that through pydevd_tracing.SetTrace so that the tracing
#is not warned to the user!
pydevd_tracing.SetTrace(debugger.trace_dispatch)
if not trace_only_current_thread:
#Trace future threads?
try:
#not available in jython!
threading.settrace(debugger.trace_dispatch) # for all future threads
except:
pass
try:
thread.start_new_thread = pydev_start_new_thread
thread.start_new = pydev_start_new_thread
except:
pass
PyDBCommandThread(debugger).start()
else:
#ok, we're already in debug mode, with all set, so, let's just set the break
debugger = GetGlobalDebugger()
debugger.SetTraceForFrameAndParents(GetFrame(), False)
t = threadingCurrentThread()
try:
additionalInfo = t.additionalInfo
except AttributeError:
additionalInfo = PyDBAdditionalThreadInfo()
t.additionalInfo = additionalInfo
pydevd_tracing.SetTrace(debugger.trace_dispatch)
if not trace_only_current_thread:
#Trace future threads?
try:
#not available in jython!
threading.settrace(debugger.trace_dispatch) # for all future threads
except:
pass
try:
thread.start_new_thread = pydev_start_new_thread
thread.start_new = pydev_start_new_thread
except:
pass
if suspend:
debugger.setSuspend(t, CMD_SET_BREAK)
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
sys.stderr.write("pydev debugger: starting\n")
# parse the command line. --file is our last argument that is required
try:
setup = processCommandLine(sys.argv)
except ValueError:
traceback.print_exc()
usage(1)
#as to get here all our imports are already resolved, the psyco module can be
#changed and we'll still get the speedups in the debugger, as those functions
#are already compiled at this time.
try:
import psyco
except ImportError:
if hasattr(sys, 'exc_clear'): #jython does not have it
sys.exc_clear() #don't keep the traceback -- clients don't want to see it
pass #that's ok, no need to mock psyco if it's not available anyways
else:
#if it's available, let's change it for a stub (pydev already made use of it)
import pydevd_psyco_stub
sys.modules['psyco'] = pydevd_psyco_stub
PydevdLog(2, "Executing file ", setup['file'])
PydevdLog(2, "arguments:", str(sys.argv))
pydevd_vm_type.SetupType(setup.get('vm_type', None))
DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = setup.get('DEBUG_RECORD_SOCKET_READS', False)
debugger = PyDB()
debugger.connect(setup['client'], setup['port'])
connected = True #Mark that we're connected when started from inside eclipse.
debugger.run(setup['file'], None, None)
| tenXer/PyDevSrc | pydevsrc/pydevd.py | Python | epl-1.0 | 55,520 |
#
# constants.py: anaconda constants
#
# Copyright (C) 2001 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Erik Troan <[email protected]>
#
import string
from pyanaconda.i18n import N_
SELINUX_DEFAULT = 1
# where to look for 3rd party addons
ADDON_PATHS = ["/usr/share/anaconda/addons"]
# pull in kickstart constants as well
# pylint: disable-msg=W0401
from pykickstart.constants import *
# common string needs to be easy to change
from pyanaconda import product
productName = product.productName
productVersion = product.productVersion
productArch = product.productArch
bugzillaUrl = product.bugUrl
isFinal = product.isFinal
# for use in device names, eg: "fedora", "rhel"
shortProductName = productName.lower()
if productName.count(" "):
shortProductName = ''.join(s[0] for s in shortProductName.split())
# DriverDisc Paths
DD_ALL = "/tmp/DD"
DD_FIRMWARE = "/tmp/DD/lib/firmware"
DD_RPMS = "/tmp/DD-*"
TRANSLATIONS_UPDATE_DIR="/tmp/updates/po"
ANACONDA_CLEANUP = "anaconda-cleanup"
ROOT_PATH = "/mnt/sysimage"
MOUNT_DIR = "/mnt/install"
DRACUT_REPODIR = "/run/install/repo"
DRACUT_ISODIR = "/run/install/source"
ISO_DIR = MOUNT_DIR + "/isodir"
IMAGE_DIR = MOUNT_DIR + "/image"
INSTALL_TREE = MOUNT_DIR + "/source"
BASE_REPO_NAME = "anaconda"
# NOTE: this should be LANG_TERRITORY.CODESET, e.g. en_US.UTF-8
DEFAULT_LANG = "en_US.UTF-8"
DEFAULT_VC_FONT = "latarcyrheb-sun16"
DEFAULT_KEYBOARD = "us"
DRACUT_SHUTDOWN_EJECT = "/run/initramfs/usr/lib/dracut/hooks/shutdown/99anaconda-eject.sh"
# VNC questions
USEVNC = N_("Start VNC")
USETEXT = N_("Use text mode")
# Runlevel files
RUNLEVELS = {3: 'multi-user.target', 5: 'graphical.target'}
# Network
NETWORK_CONNECTION_TIMEOUT = 45 # in seconds
NETWORK_CONNECTED_CHECK_INTERVAL = 0.1 # in seconds
# DBus
DEFAULT_DBUS_TIMEOUT = -1 # use default
# Thread names
THREAD_EXECUTE_STORAGE = "AnaExecuteStorageThread"
THREAD_STORAGE = "AnaStorageThread"
THREAD_STORAGE_WATCHER = "AnaStorageWatcher"
THREAD_CHECK_STORAGE = "AnaCheckStorageThread"
THREAD_CUSTOM_STORAGE_INIT = "AnaCustomStorageInit"
THREAD_WAIT_FOR_CONNECTING_NM = "AnaWaitForConnectingNMThread"
THREAD_PAYLOAD = "AnaPayloadThread"
THREAD_PAYLOAD_MD = "AnaPayloadMDThread"
THREAD_INPUT_BASENAME = "AnaInputThread"
THREAD_SYNC_TIME_BASENAME = "AnaSyncTime"
THREAD_EXCEPTION_HANDLING_TEST = "AnaExceptionHandlingTest"
THREAD_LIVE_PROGRESS = "AnaLiveProgressThread"
THREAD_SOFTWARE_WATCHER = "AnaSoftwareWatcher"
THREAD_CHECK_SOFTWARE = "AnaCheckSoftwareThread"
THREAD_SOURCE_WATCHER = "AnaSourceWatcher"
THREAD_INSTALL = "AnaInstallThread"
THREAD_CONFIGURATION = "AnaConfigurationThread"
THREAD_FCOE = "AnaFCOEThread"
THREAD_ISCSI_DISCOVER = "AnaIscsiDiscoverThread"
THREAD_ISCSI_LOGIN = "AnaIscsiLoginThread"
THREAD_GEOLOCATION_REFRESH = "AnaGeolocationRefreshThread"
THREAD_DATE_TIME = "AnaDateTimeThread"
THREAD_TIME_INIT = "AnaTimeInitThread"
THREAD_XKL_WRAPPER_INIT = "AnaXklWrapperInitThread"
THREAD_KEYBOARD_INIT = "AnaKeyboardThread"
THREAD_ADD_LAYOUTS_INIT = "AnaAddLayoutsInitThread"
# Geolocation constants
# geolocation providers
# - values are used by the geoloc CLI/boot option
GEOLOC_PROVIDER_FEDORA_GEOIP = "provider_fedora_geoip"
GEOLOC_PROVIDER_HOSTIP = "provider_hostip"
GEOLOC_PROVIDER_GOOGLE_WIFI = "provider_google_wifi"
# geocoding provider
GEOLOC_GEOCODER_NOMINATIM = "geocoder_nominatim"
# default providers
GEOLOC_DEFAULT_PROVIDER = GEOLOC_PROVIDER_FEDORA_GEOIP
GEOLOC_DEFAULT_GEOCODER = GEOLOC_GEOCODER_NOMINATIM
# timeout (in seconds)
GEOLOC_TIMEOUT = 3
ANACONDA_ENVIRON = "anaconda"
FIRSTBOOT_ENVIRON = "firstboot"
# Tainted hardware
UNSUPPORTED_HW = 1 << 28
# Password validation
PASSWORD_MIN_LEN = 6
PASSWORD_EMPTY_ERROR = N_("The password is empty.")
PASSWORD_CONFIRM_ERROR_GUI = N_("The passwords do not match.")
PASSWORD_CONFIRM_ERROR_TUI = N_("The passwords you entered were different. Please try again.")
PASSWORD_WEAK = N_("The password you have provided is weak. You will have to press Done twice to confirm it.")
PASSWORD_WEAK_WITH_ERROR = N_("The password you have provided is weak: %s. You will have to press Done twice to confirm it.")
PASSWORD_WEAK_CONFIRM = N_("You have provided a weak password. Press Done again to use anyway.")
PASSWORD_WEAK_CONFIRM_WITH_ERROR = N_("You have provided a weak password: %s. Press Done again to use anyway.")
PASSWORD_STRENGTH_DESC = [N_("Empty"), N_("Weak"), N_("Fair"), N_("Good"), N_("Strong")]
# the number of seconds we consider a noticeable freeze of the UI
NOTICEABLE_FREEZE = 0.1
# all ASCII characters
PW_ASCII_CHARS = string.digits + string.ascii_letters + string.punctuation + " "
| mairin/anaconda | pyanaconda/constants.py | Python | gpl-2.0 | 5,251 |
"""Networking overhead (200 trials on 200 nodes)
In this run, we will start 100 trials and run them on 100 different nodes.
This test will thus measure the overhead that comes with network communication
and specifically log synchronization.
Cluster: cluster_100x2.yaml
Test owner: krfricke
Acceptance criteria: Should run faster than 500 seconds.
Theoretical minimum time: 300 seconds
"""
import argparse
import ray
from ray import tune
from ray.tune.utils.release_test_util import timed_tune_run
def main(smoke_test: bool = False):
ray.init(address="auto")
num_samples = 100 if not smoke_test else 20
results_per_second = 0.01
trial_length_s = 300
max_runtime = 1000
timed_tune_run(
name="result network overhead",
num_samples=num_samples,
results_per_second=results_per_second,
trial_length_s=trial_length_s,
max_runtime=max_runtime,
resources_per_trial={"cpu": 2}, # One per node
sync_config=tune.SyncConfig(syncer="auto"),
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test",
action="store_true",
default=False,
help="Finish quickly for training.",
)
args = parser.parse_args()
main(args.smoke_test)
| ray-project/ray | release/tune_tests/scalability_tests/workloads/test_network_overhead.py | Python | apache-2.0 | 1,303 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2008 Francisco José Rodríguez Bogado, #
# Diego Muñoz Escalante. #
# ([email protected], [email protected]) #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
###################################################################
## utils_administracion.py - Algunas utilidades para el módulo de
## administración.
###################################################################
## NOTAS:
## DEPRECATED
##
###################################################################
## Changelog:
## 14 de octubre de 2005 -> Inicio
##
###################################################################
raise ImportError, "Este módulo está obsoleto."
def id_propia_empresa_proveedor():
"""
Devuelve el id de la propia empresa en la tabla proveedores.
"""
from framework.pclases import DatosDeLaEmpresa, Proveedor
try:
empresa = DatosDeLaEmpresa.select()[0]
except:
print "ERROR: No hay datos de la empresa."
return 0
try:
empresa = Proveedor.select(Proveedor.q.nombre == empresa.nombre)[0]
except: #IndexError? SQLObjectNotFound?
print "ERROR: La empresa no está en la tabla de de proveedores."
return 0
return empresa.id
def ultimo_pedido_de_venta():
"""
Devuelve el último número de pedido de venta válido
para el año en curso o 0 si no hay pedidos de venta.
"""
from framework import pclases
# Los pedidos de compra son aquellos que tienen como proveedor a
# la propia empresa:
idcliente = id_propia_empresa_cliente()
try:
ultimopedido = pclases.PedidoVenta.select(
pclases.PedidoVenta.q.clienteID == idcliente,
orderBy="-numpedido")[0]
ultimonumpedido = ultimopedido.numpedido
except IndexError:
# No hay pedidos de venta, así que ultimonumpedido es 0:
ultimonumpedido = 0
return ultimonumpedido
def ultimo_numfactura():
"""
Devuelve el mayor número de factura.
"""
from framework import pclases
try:
fact = pclases.FacturaVenta.select(orderBy = "-numfactura")[0]
except IndexError:
return 0
return fact.numfactura
def id_propia_empresa_cliente():
"""
Devuelve el id de la propia empresa en la tabla clientes.
"""
from framework.pclases import DatosDeLaEmpresa, Cliente, Auditoria
try:
empresa = DatosDeLaEmpresa.select()[0]
except:
print "ERROR: No hay datos de la empresa."
return 0
try:
empresa = Cliente.select(Cliente.q.nombre == empresa.nombre)[0]
except IndexError: # Pues la creo.
try:
empresa = Cliente(nombre = empresa.nombre,
tarifa = None,
contador = None,
cliente = None)
Auditoria.nuevo(empresa, None, __file__)
except TypeError: # Me falta algún campo.
print "utils_administracion.py::id_propia_empresa_cliente -> "\
"ERROR: TypeError al crear empresa como cliente."
return 0
except: # ¿SQLObjectNotFound?
print "utils_administracion.py::id_propia_empresa_cliente -> "\
"ERROR: La empresa no está en la tabla de clientes."
return 0
return empresa.id
def get_albaranes_from_pedido(pedido):
"""
Devuelve una lista de objetos Albaran que estén
relacionados con el Pedido "pedido" a través de
sus LineaDeVenta.
OJO: Pedidos de venta y albaranes de salida.
"""
albs = []
for ldv in pedido.lineasDeVenta:
if (ldv.albaranSalida != None) and (not ldv.albaranSalida in albs):
albs.append(ldv.albaran)
return albs
| pacoqueen/ginn | ginn/formularios/utils_administracion.py | Python | gpl-2.0 | 5,324 |
# coding: utf-8
import subprocess
import os
def deploycron(filename="", content="", override=False):
"""install crontabs into the system if it's not installed.
This will not remove the other crontabs installed in the system if not
specified as override. It just merge the new one with the existing one.
If you provide `filename`, then will install the crontabs in that file
otherwise install crontabs specified in content
filename - file contains crontab, one crontab for a line
content - string that contains crontab, one crontab for a line
override - override the origin crontab
"""
if not filename and not content:
raise ValueError("neither filename or crontab must be specified")
if filename:
try:
with open(filename, 'r') as f:
content = f.read()
except Exception as e:
raise ValueError("cannot open the file: %s" % str(e))
if override:
installed_content = ""
else:
installed_content = _get_installed_content()
installed_content = installed_content.rstrip("\n")
installed_crontabs = installed_content.split("\n")
for crontab in content.split("\n"):
if crontab and crontab not in installed_crontabs:
if not installed_content:
installed_content += crontab
else:
installed_content += "\n%s" % crontab
if installed_content:
installed_content += "\n"
# install back
_install_content(installed_content)
def undeploycron_between(start_line, stop_line, occur_start=1, occur_stop=1):
"""uninstall crontab parts between two lines (included).
If the start_line or the stop_line is not found into the installed crontab,
it won't be modified.
`start_line` - start crontab line (the actual line, not the line number)
to delimit the crontab block to remove
`stop_line` - stop crontab line (the actual line, not the line number)
to delimit the crontab block to remove
`occur_start` - nth occurence you want to consider as start_line (ex :
choose 2 if you want the 2nd occurence to be chosen as start_line)
`occur_stop` - nth occurence you want to consider as stop_line (ex :
choose 2 if you want the 2nd occurence to be chosen as stop_line)
"""
lines_installed = [x.strip() for x in
_get_installed_content().splitlines()]
start_line = start_line.strip()
stop_line = stop_line.strip()
if start_line not in lines_installed:
return False
if stop_line not in lines_installed:
return False
if occur_start is None or occur_start <= 0:
return False
if occur_stop is None or occur_stop <= 0:
return False
# Check if stop_line is before start_line by getting their indices
index_start = -1
index_stop = -1
try:
# Find the occurence we are interested in
for j in range(occur_start):
index_start = lines_installed.index(start_line, index_start + 1)
except ValueError:
# If the occurence number is too high (nth occurrence not found)
return False
try:
for j in range(occur_stop):
index_stop = lines_installed.index(stop_line, index_stop + 1)
except ValueError:
return False
# If stop is before start, we switch them
if index_stop < index_start:
buffer_var = index_start
index_start = index_stop
index_stop = buffer_var
lines_to_install = []
for i in range(len(lines_installed)):
if i < index_start or i > index_stop:
lines_to_install.append(lines_installed[i])
if len(lines_to_install) > 0:
lines_to_install.append("")
content_to_install = "\n".join(lines_to_install)
_install_content(content_to_install)
return True
def _get_installed_content():
"""get the current installed crontab.
"""
retcode, err, installed_content = _runcmd("crontab -l")
if retcode != 0 and b'no crontab for' not in err:
raise OSError("crontab not supported in your system")
return installed_content.decode("utf-8")
def _install_content(content):
"""install (replace) the given (multilines) string as new crontab...
"""
retcode, err, out = _runcmd("crontab", content)
if retcode != 0:
raise ValueError("failed to install crontab, check if crontab is "
"valid")
def _runcmd(cmd, input=None):
'''run shell command and return the a tuple of the cmd's return code, std
error and std out.
WARN: DO NOT RUN COMMANDS THAT NEED TO INTERACT WITH STDIN WITHOUT SPECIFY
INPUT, (eg cat), IT WILL NEVER TERMINATE.
'''
if input is not None:
p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
close_fds=True, preexec_fn=os.setsid)
input = input.encode()
else:
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
close_fds=True, preexec_fn=os.setsid)
stdoutdata, stderrdata = p.communicate(input)
return p.returncode, stderrdata, stdoutdata
| Hawker65/deploycron | deploycron/__init__.py | Python | mit | 5,288 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
"""Basic neural network layers."""
__all__ = ['Sequential', 'HybridSequential', 'Dense', 'Dropout', 'Embedding',
'BatchNorm', 'InstanceNorm', 'LayerNorm', 'Flatten', 'Lambda', 'HybridLambda']
import warnings
import numpy as np
from .activations import Activation
from ..block import Block, HybridBlock
from ..utils import _indent
from ... import nd, sym
class Sequential(Block):
"""Stacks Blocks sequentially.
Example::
net = nn.Sequential()
# use net's name_scope to give child Blocks appropriate names.
with net.name_scope():
net.add(nn.Dense(10, activation='relu'))
net.add(nn.Dense(20))
"""
def __init__(self, prefix=None, params=None):
super(Sequential, self).__init__(prefix=prefix, params=params)
def add(self, *blocks):
"""Adds block on top of the stack."""
for block in blocks:
self.register_child(block)
def forward(self, x):
for block in self._children.values():
x = block(x)
return x
def __repr__(self):
s = '{name}(\n{modstr}\n)'
modstr = '\n'.join([' ({key}): {block}'.format(key=key,
block=_indent(block.__repr__(), 2))
for key, block in self._children.items()])
return s.format(name=self.__class__.__name__,
modstr=modstr)
def __getitem__(self, key):
layers = list(self._children.values())[key]
if isinstance(layers, list):
net = type(self)(prefix=self._prefix)
with net.name_scope():
net.add(*layers)
return net
else:
return layers
def __len__(self):
return len(self._children)
def hybridize(self, active=True, **kwargs):
"""Activates or deactivates `HybridBlock`s recursively. Has no effect on
non-hybrid children.
Parameters
----------
active : bool, default True
Whether to turn hybrid on or off.
**kwargs : string
Additional flags for hybridized operator.
"""
if self._children and all(isinstance(c, HybridBlock) for c in self._children.values()):
warnings.warn(
"All children of this Sequential layer '%s' are HybridBlocks. Consider "
"using HybridSequential for the best performance."%self.prefix, stacklevel=2)
super(Sequential, self).hybridize(active, **kwargs)
class HybridSequential(HybridBlock):
"""Stacks HybridBlocks sequentially.
Example::
net = nn.HybridSequential()
# use net's name_scope to give child Blocks appropriate names.
with net.name_scope():
net.add(nn.Dense(10, activation='relu'))
net.add(nn.Dense(20))
net.hybridize()
"""
def __init__(self, prefix=None, params=None):
super(HybridSequential, self).__init__(prefix=prefix, params=params)
def add(self, *blocks):
"""Adds block on top of the stack."""
for block in blocks:
self.register_child(block)
def hybrid_forward(self, F, x):
for block in self._children.values():
x = block(x)
return x
def __repr__(self):
s = '{name}(\n{modstr}\n)'
modstr = '\n'.join([' ({key}): {block}'.format(key=key,
block=_indent(block.__repr__(), 2))
for key, block in self._children.items()])
return s.format(name=self.__class__.__name__,
modstr=modstr)
def __getitem__(self, key):
layers = list(self._children.values())[key]
if isinstance(layers, list):
net = type(self)(prefix=self._prefix)
with net.name_scope():
net.add(*layers)
return net
else:
return layers
def __len__(self):
return len(self._children)
class Dense(HybridBlock):
r"""Just your regular densely-connected NN layer.
`Dense` implements the operation:
`output = activation(dot(input, weight) + bias)`
where `activation` is the element-wise activation function
passed as the `activation` argument, `weight` is a weights matrix
created by the layer, and `bias` is a bias vector created by the layer
(only applicable if `use_bias` is `True`).
Note: the input must be a tensor with rank 2. Use `flatten` to convert it
to rank 2 manually if necessary.
Parameters
----------
units : int
Dimensionality of the output space.
activation : str
Activation function to use. See help on `Activation` layer.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias : bool
Whether the layer uses a bias vector.
flatten: bool
Whether the input tensor should be flattened.
If true, all but the first axis of input data are collapsed together.
If false, all but the last axis of input data are kept the same, and the transformation
applies on the last axis.
dtype : str or np.dtype, default 'float32'
Data type of output embeddings.
weight_initializer : str or `Initializer`
Initializer for the `kernel` weights matrix.
bias_initializer: str or `Initializer`
Initializer for the bias vector.
in_units : int, optional
Size of the input data. If not specified, initialization will be
deferred to the first time `forward` is called and `in_units`
will be inferred from the shape of input data.
prefix : str or None
See document of `Block`.
params : ParameterDict or None
See document of `Block`.
Inputs:
- **data**: if `flatten` is True, `data` should be a tensor with shape
`(batch_size, x1, x2, ..., xn)`, where x1 * x2 * ... * xn is equal to
`in_units`. If `flatten` is False, `data` should have shape
`(x1, x2, ..., xn, in_units)`.
Outputs:
- **out**: if `flatten` is True, `out` will be a tensor with shape
`(batch_size, units)`. If `flatten` is False, `out` will have shape
`(x1, x2, ..., xn, units)`.
"""
def __init__(self, units, activation=None, use_bias=True, flatten=True,
dtype='float32', weight_initializer=None, bias_initializer='zeros',
in_units=0, **kwargs):
super(Dense, self).__init__(**kwargs)
self._flatten = flatten
with self.name_scope():
self._units = units
self._in_units = in_units
self.weight = self.params.get('weight', shape=(units, in_units),
init=weight_initializer, dtype=dtype,
allow_deferred_init=True)
if use_bias:
self.bias = self.params.get('bias', shape=(units,),
init=bias_initializer, dtype=dtype,
allow_deferred_init=True)
else:
self.bias = None
if activation is not None:
self.act = Activation(activation, prefix=activation+'_')
else:
self.act = None
def hybrid_forward(self, F, x, weight, bias=None):
act = F.FullyConnected(x, weight, bias, no_bias=bias is None, num_hidden=self._units,
flatten=self._flatten, name='fwd')
if self.act is not None:
act = self.act(act)
return act
def __repr__(self):
s = '{name}({layout}, {act})'
shape = self.weight.shape
return s.format(name=self.__class__.__name__,
act=self.act if self.act else 'linear',
layout='{0} -> {1}'.format(shape[1] if shape[1] else None, shape[0]))
class Dropout(HybridBlock):
"""Applies Dropout to the input.
Dropout consists in randomly setting a fraction `rate` of input units
to 0 at each update during training time, which helps prevent overfitting.
Parameters
----------
rate : float
Fraction of the input units to drop. Must be a number between 0 and 1.
axes : tuple of int, default ()
The axes on which dropout mask is shared. If empty, regular dropout is applied.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
References
----------
`Dropout: A Simple Way to Prevent Neural Networks from Overfitting
<http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf>`_
"""
def __init__(self, rate, axes=(), **kwargs):
super(Dropout, self).__init__(**kwargs)
self._rate = rate
self._axes = axes
def hybrid_forward(self, F, x):
return F.Dropout(x, p=self._rate, axes=self._axes, name='fwd')
def __repr__(self):
s = '{name}(p = {_rate}, axes={_axes})'
return s.format(name=self.__class__.__name__,
**self.__dict__)
class BatchNorm(HybridBlock):
"""Batch normalization layer (Ioffe and Szegedy, 2014).
Normalizes the input at each batch, i.e. applies a transformation
that maintains the mean activation close to 0 and the activation
standard deviation close to 1.
Parameters
----------
axis : int, default 1
The axis that should be normalized. This is typically the channels
(C) axis. For instance, after a `Conv2D` layer with `layout='NCHW'`,
set `axis=1` in `BatchNorm`. If `layout='NHWC'`, then set `axis=3`.
momentum: float, default 0.9
Momentum for the moving average.
epsilon: float, default 1e-5
Small float added to variance to avoid dividing by zero.
center: bool, default True
If True, add offset of `beta` to normalized tensor.
If False, `beta` is ignored.
scale: bool, default True
If True, multiply by `gamma`. If False, `gamma` is not used.
When the next layer is linear (also e.g. `nn.relu`),
this can be disabled since the scaling
will be done by the next layer.
use_global_stats: bool, default False
If True, use global moving statistics instead of local batch-norm. This will force
change batch-norm into a scale shift operator.
If False, use local batch-norm.
beta_initializer: str or `Initializer`, default 'zeros'
Initializer for the beta weight.
gamma_initializer: str or `Initializer`, default 'ones'
Initializer for the gamma weight.
moving_mean_initializer: str or `Initializer`, default 'zeros'
Initializer for the moving mean.
moving_variance_initializer: str or `Initializer`, default 'ones'
Initializer for the moving variance.
in_channels : int, default 0
Number of channels (feature maps) in input data. If not specified,
initialization will be deferred to the first time `forward` is called
and `in_channels` will be inferred from the shape of input data.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, axis=1, momentum=0.9, epsilon=1e-5, center=True, scale=True,
use_global_stats=False, beta_initializer='zeros', gamma_initializer='ones',
running_mean_initializer='zeros', running_variance_initializer='ones',
in_channels=0, **kwargs):
super(BatchNorm, self).__init__(**kwargs)
self._kwargs = {'axis': axis, 'eps': epsilon, 'momentum': momentum,
'fix_gamma': not scale, 'use_global_stats': use_global_stats}
if in_channels != 0:
self.in_channels = in_channels
self.gamma = self.params.get('gamma', grad_req='write' if scale else 'null',
shape=(in_channels,), init=gamma_initializer,
allow_deferred_init=True,
differentiable=scale)
self.beta = self.params.get('beta', grad_req='write' if center else 'null',
shape=(in_channels,), init=beta_initializer,
allow_deferred_init=True,
differentiable=center)
self.running_mean = self.params.get('running_mean', grad_req='null',
shape=(in_channels,),
init=running_mean_initializer,
allow_deferred_init=True,
differentiable=False)
self.running_var = self.params.get('running_var', grad_req='null',
shape=(in_channels,),
init=running_variance_initializer,
allow_deferred_init=True,
differentiable=False)
def cast(self, dtype):
if np.dtype(dtype).name == 'float16':
dtype = 'float32'
super(BatchNorm, self).cast(dtype)
def hybrid_forward(self, F, x, gamma, beta, running_mean, running_var):
return F.BatchNorm(x, gamma, beta, running_mean, running_var,
name='fwd', **self._kwargs)
def __repr__(self):
s = '{name}({content}'
in_channels = self.gamma.shape[0]
s += ', in_channels={0}'.format(in_channels if in_channels else None)
s += ')'
return s.format(name=self.__class__.__name__,
content=', '.join(['='.join([k, v.__repr__()])
for k, v in self._kwargs.items()]))
class Embedding(HybridBlock):
r"""Turns non-negative integers (indexes/tokens) into dense vectors
of fixed size. eg. [4, 20] -> [[0.25, 0.1], [0.6, -0.2]]
Note: if `sparse_grad` is set to True, the gradient w.r.t weight will be
sparse. Only a subset of optimizers support sparse gradients, including SGD, AdaGrad
and Adam. By default lazy updates is turned on, which may perform differently
from standard updates. For more details, please check the Optimization API at:
https://mxnet.incubator.apache.org/api/python/optimization/optimization.html
Parameters
----------
input_dim : int
Size of the vocabulary, i.e. maximum integer index + 1.
output_dim : int
Dimension of the dense embedding.
dtype : str or np.dtype, default 'float32'
Data type of output embeddings.
weight_initializer : Initializer
Initializer for the `embeddings` matrix.
sparse_grad: bool
If True, gradient w.r.t. weight will be a 'row_sparse' NDArray.
Inputs:
- **data**: (N-1)-D tensor with shape: `(x1, x2, ..., xN-1)`.
Output:
- **out**: N-D tensor with shape: `(x1, x2, ..., xN-1, output_dim)`.
"""
def __init__(self, input_dim, output_dim, dtype='float32',
weight_initializer=None, sparse_grad=False, **kwargs):
super(Embedding, self).__init__(**kwargs)
grad_stype = 'row_sparse' if sparse_grad else 'default'
self._kwargs = {'input_dim': input_dim, 'output_dim': output_dim,
'dtype': dtype, 'sparse_grad': sparse_grad}
self.weight = self.params.get('weight', shape=(input_dim, output_dim),
init=weight_initializer, dtype=dtype,
allow_deferred_init=True, grad_stype=grad_stype)
def hybrid_forward(self, F, x, weight):
return F.Embedding(x, weight, name='fwd', **self._kwargs)
def __repr__(self):
s = '{block_name}({input_dim} -> {output_dim}, {dtype})'
return s.format(block_name=self.__class__.__name__,
**self._kwargs)
class Flatten(HybridBlock):
r"""Flattens the input to two dimensional.
Inputs:
- **data**: input tensor with arbitrary shape `(N, x1, x2, ..., xn)`
Output:
- **out**: 2D tensor with shape: `(N, x1 \cdot x2 \cdot ... \cdot xn)`
"""
def __init__(self, **kwargs):
super(Flatten, self).__init__(**kwargs)
def hybrid_forward(self, F, x):
return x.reshape((0, -1))
def __repr__(self):
return self.__class__.__name__
class InstanceNorm(HybridBlock):
r"""
Applies instance normalization to the n-dimensional input array.
This operator takes an n-dimensional input array where (n>2) and normalizes
the input using the following formula:
.. math::
\bar{C} = \{i \mid i \neq 0, i \neq axis\}
out = \frac{x - mean[data, \bar{C}]}{ \sqrt{Var[data, \bar{C}]} + \epsilon}
* gamma + beta
Parameters
----------
axis : int, default 1
The axis that will be excluded in the normalization process. This is typically the channels
(C) axis. For instance, after a `Conv2D` layer with `layout='NCHW'`,
set `axis=1` in `InstanceNorm`. If `layout='NHWC'`, then set `axis=3`. Data will be
normalized along axes excluding the first axis and the axis given.
epsilon: float, default 1e-5
Small float added to variance to avoid dividing by zero.
center: bool, default True
If True, add offset of `beta` to normalized tensor.
If False, `beta` is ignored.
scale: bool, default True
If True, multiply by `gamma`. If False, `gamma` is not used.
When the next layer is linear (also e.g. `nn.relu`),
this can be disabled since the scaling
will be done by the next layer.
beta_initializer: str or `Initializer`, default 'zeros'
Initializer for the beta weight.
gamma_initializer: str or `Initializer`, default 'ones'
Initializer for the gamma weight.
in_channels : int, default 0
Number of channels (feature maps) in input data. If not specified,
initialization will be deferred to the first time `forward` is called
and `in_channels` will be inferred from the shape of input data.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
References
----------
`Instance Normalization: The Missing Ingredient for Fast Stylization
<https://arxiv.org/abs/1607.08022>`_
Examples
--------
>>> # Input of shape (2,1,2)
>>> x = mx.nd.array([[[ 1.1, 2.2]],
... [[ 3.3, 4.4]]])
>>> # Instance normalization is calculated with the above formula
>>> layer = InstanceNorm()
>>> layer.initialize(ctx=mx.cpu(0))
>>> layer(x)
[[[-0.99998355 0.99998331]]
[[-0.99998319 0.99998361]]]
<NDArray 2x1x2 @cpu(0)>
"""
def __init__(self, axis=1, epsilon=1e-5, center=True, scale=False,
beta_initializer='zeros', gamma_initializer='ones',
in_channels=0, **kwargs):
super(InstanceNorm, self).__init__(**kwargs)
self._kwargs = {'eps': epsilon, 'axis': axis, 'center': center, 'scale': scale}
self._axis = axis
self._epsilon = epsilon
self.gamma = self.params.get('gamma', grad_req='write' if scale else 'null',
shape=(in_channels,), init=gamma_initializer,
allow_deferred_init=True)
self.beta = self.params.get('beta', grad_req='write' if center else 'null',
shape=(in_channels,), init=beta_initializer,
allow_deferred_init=True)
def hybrid_forward(self, F, x, gamma, beta):
if self._axis == 1:
return F.InstanceNorm(x, gamma, beta,
name='fwd', eps=self._epsilon)
x = x.swapaxes(1, self._axis)
return F.InstanceNorm(x, gamma, beta, name='fwd',
eps=self._epsilon).swapaxes(1, self._axis)
def __repr__(self):
s = '{name}({content}'
in_channels = self.gamma.shape[0]
s += ', in_channels={0}'.format(in_channels)
s += ')'
return s.format(name=self.__class__.__name__,
content=', '.join(['='.join([k, v.__repr__()])
for k, v in self._kwargs.items()]))
class LayerNorm(HybridBlock):
r"""
Applies layer normalization to the n-dimensional input array.
This operator takes an n-dimensional input array and normalizes
the input using the given axis:
.. math::
out = \frac{x - mean[data, axis]}{ \sqrt{Var[data, axis]} + \epsilon} * gamma + beta
Parameters
----------
axis : int, default -1
The axis that should be normalized. This is typically the axis of the channels.
epsilon: float, default 1e-5
Small float added to variance to avoid dividing by zero.
center: bool, default True
If True, add offset of `beta` to normalized tensor.
If False, `beta` is ignored.
scale: bool, default True
If True, multiply by `gamma`. If False, `gamma` is not used.
beta_initializer: str or `Initializer`, default 'zeros'
Initializer for the beta weight.
gamma_initializer: str or `Initializer`, default 'ones'
Initializer for the gamma weight.
in_channels : int, default 0
Number of channels (feature maps) in input data. If not specified,
initialization will be deferred to the first time `forward` is called
and `in_channels` will be inferred from the shape of input data.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
References
----------
`Layer Normalization
<https://arxiv.org/pdf/1607.06450.pdf>`_
Examples
--------
>>> # Input of shape (2, 5)
>>> x = mx.nd.array([[1, 2, 3, 4, 5], [1, 1, 2, 2, 2]])
>>> # Layer normalization is calculated with the above formula
>>> layer = LayerNorm()
>>> layer.initialize(ctx=mx.cpu(0))
>>> layer(x)
[[-1.41421 -0.707105 0. 0.707105 1.41421 ]
[-1.2247195 -1.2247195 0.81647956 0.81647956 0.81647956]]
<NDArray 2x5 @cpu(0)>
"""
def __init__(self, axis=-1, epsilon=1e-5, center=True, scale=True,
beta_initializer='zeros', gamma_initializer='ones',
in_channels=0, prefix=None, params=None):
super(LayerNorm, self).__init__(prefix=prefix, params=params)
self._kwargs = {'eps': epsilon, 'axis': axis, 'center': center, 'scale': scale}
self._axis = axis
self._epsilon = epsilon
self._center = center
self._scale = scale
self.gamma = self.params.get('gamma', grad_req='write' if scale else 'null',
shape=(in_channels,), init=gamma_initializer,
allow_deferred_init=True)
self.beta = self.params.get('beta', grad_req='write' if center else 'null',
shape=(in_channels,), init=beta_initializer,
allow_deferred_init=True)
def hybrid_forward(self, F, data, gamma, beta):
norm_data = F.LayerNorm(data, gamma=gamma, beta=beta, axis=self._axis, eps=self._epsilon)
return norm_data
def __repr__(self):
s = '{name}({content}'
in_channels = self.gamma.shape[0]
s += ', in_channels={0}'.format(in_channels)
s += ')'
return s.format(name=self.__class__.__name__,
content=', '.join(['='.join([k, v.__repr__()])
for k, v in self._kwargs.items()]))
class Lambda(Block):
r"""Wraps an operator or an expression as a Block object.
Parameters
----------
function : str or function
Function used in lambda must be one of the following:
1) the name of an operator that is available in ndarray. For example::
block = Lambda('tanh')
2) a function that conforms to "def function(*args)". For example::
block = Lambda(lambda x: nd.LeakyReLU(x, slope=0.1))
Inputs:
- ** *args **: one or more input data. Their shapes depend on the function.
Output:
- ** *outputs **: one or more output data. Their shapes depend on the function.
"""
def __init__(self, function, prefix=None):
super(Lambda, self).__init__(prefix=prefix)
if isinstance(function, str):
assert hasattr(nd, function), \
"Function name %s is not found in ndarray." % function
self._func_impl = getattr(nd, function)
elif callable(function):
self._func_impl = function
else:
raise ValueError(
"Unrecognized function in lambda: {} of type {}"
.format(function, type(function)))
def forward(self, *args):
return self._func_impl(*args)
def __repr__(self):
return '{name}({function})'.format(name=self.__class__.__name__,
function=self._func_impl.__name__)
class HybridLambda(HybridBlock):
r"""Wraps an operator or an expression as a HybridBlock object.
Parameters
----------
function : str or function
Function used in lambda must be one of the following:
1) the name of an operator that is available in both symbol and ndarray. For example::
block = HybridLambda('tanh')
2) a function that conforms to "def function(F, data, *args)". For example::
block = HybridLambda(lambda F, x: F.LeakyReLU(x, slope=0.1))
Inputs:
- ** *args **: one or more input data. First argument must be symbol or ndarray.
Their shapes depend on the function.
Output:
- ** *outputs **: one or more output data. Their shapes depend on the function.
"""
def __init__(self, function, prefix=None):
super(HybridLambda, self).__init__(prefix=prefix)
if isinstance(function, str):
assert hasattr(nd, function) and hasattr(sym, function), \
"Function name %s is not found in symbol/ndarray." % function
func_dict = {sym: getattr(sym, function), nd: getattr(nd, function)}
self._func = lambda F, *args: func_dict[F](*args)
self._func_name = function
elif callable(function):
self._func = function
self._func_name = function.__name__
else:
raise ValueError(
"Unrecognized function in lambda: {} of type {}"
.format(function, type(function)))
def hybrid_forward(self, F, x, *args):
return self._func(F, x, *args)
def __repr__(self):
return '{name}({function})'.format(name=self.__class__.__name__,
function=self._func_name)
| precedenceguo/mxnet | python/mxnet/gluon/nn/basic_layers.py | Python | apache-2.0 | 28,158 |
import copy
import pylab
import random
import numpy as np
from environment import Env
from keras.layers import Dense
from keras.optimizers import Adam
from keras.models import Sequential
EPISODES = 1000
# 그리드월드 예제에서의 딥살사 에이전트
class DeepSARSAgent:
def __init__(self):
self.load_model = False
# 에이전트가 가능한 모든 행동 정의
self.action_space = [0, 1, 2, 3, 4]
# 상태의 크기와 행동의 크기 정의
self.action_size = len(self.action_space)
self.state_size = 15
self.discount_factor = 0.99
self.learning_rate = 0.001
self.epsilon = 1. # exploration
self.epsilon_decay = .9999
self.epsilon_min = 0.01
self.model = self.build_model()
if self.load_model:
self.epsilon = 0.05
self.model.load_weights('./save_model/deep_sarsa_trained.h5')
# 상태가 입력 큐함수가 출력인 인공신경망 생성
def build_model(self):
model = Sequential()
model.add(Dense(30, input_dim=self.state_size, activation='relu'))
model.add(Dense(30, activation='relu'))
model.add(Dense(self.action_size, activation='linear'))
model.summary()
model.compile(loss='mse', optimizer=Adam(lr=self.learning_rate))
return model
# 입실론 탐욕 방법으로 행동 선택
def get_action(self, state):
if np.random.rand() <= self.epsilon:
# 무작위 행동 반환
return random.randrange(self.action_size)
else:
# 모델로부터 행동 산출
state = np.float32(state)
q_values = self.model.predict(state)
return np.argmax(q_values[0])
def train_model(self, state, action, reward, next_state, next_action, done):
if self.epsilon > self.epsilon_min:
self.epsilon *= self.epsilon_decay
state = np.float32(state)
next_state = np.float32(next_state)
target = self.model.predict(state)[0]
# 살사의 큐함수 업데이트 식
if done:
target[action] = reward
else:
target[action] = (reward + self.discount_factor *
self.model.predict(next_state)[0][next_action])
# 출력 값 reshape
target = np.reshape(target, [1, 5])
# 인공신경망 업데이트
self.model.fit(state, target, epochs=1, verbose=0)
if __name__ == "__main__":
# 환경과 에이전트 생성
env = Env()
agent = DeepSARSAgent()
global_step = 0
scores, episodes = [], []
for e in range(EPISODES):
done = False
score = 0
state = env.reset()
state = np.reshape(state, [1, 15])
while not done:
# env 초기화
global_step += 1
# 현재 상태에 대한 행동 선택
action = agent.get_action(state)
# 선택한 행동으로 환경에서 한 타임스텝 진행 후 샘플 수집
next_state, reward, done = env.step(action)
next_state = np.reshape(next_state, [1, 15])
next_action = agent.get_action(next_state)
# 샘플로 모델 학습
agent.train_model(state, action, reward, next_state, next_action,
done)
state = next_state
score += reward
state = copy.deepcopy(next_state)
if done:
# 에피소드마다 학습 결과 출력
scores.append(score)
episodes.append(e)
pylab.plot(episodes, scores, 'b')
pylab.savefig("./save_graph/deep_sarsa_.png")
print("episode:", e, " score:", score, "global_step",
global_step, " epsilon:", agent.epsilon)
# 100 에피소드마다 모델 저장
if e % 100 == 0:
agent.model.save_weights("./save_model/deep_sarsa.h5")
| rlcode/reinforcement-learning-kr | 1-grid-world/6-deep-sarsa/deep_sarsa_agent.py | Python | mit | 4,030 |
"""
File info
"""
# FIXME: No idea if this is necessary
__all__ = ["addup", "treebuilder", "treeprinter", "node"] | fourpoints/addup | __init__.py | Python | mit | 114 |
"""Entrance of Postprocess for SEIMS.
@author : Liangjun Zhu, Huiran Gao
@changelog:
- 17-08-17 - lj - redesign and rewrite the plotting program.
- 18-02-09 - lj - compatible with Python3.
"""
from __future__ import absolute_import, unicode_literals
import os
import sys
if os.path.abspath(os.path.join(sys.path[0], '..')) not in sys.path:
sys.path.insert(0, os.path.abspath(os.path.join(sys.path[0], '..')))
from postprocess.config import parse_ini_configuration
from postprocess.plot_timeseries import TimeSeriesPlots
def main():
"""Main workflow."""
cfg = parse_ini_configuration()
TimeSeriesPlots(cfg).generate_plots()
if __name__ == "__main__":
main()
| lreis2415/SEIMS | seims/postprocess/main.py | Python | gpl-3.0 | 707 |
"""
Poll for market data every 1 second and print bid/ask in blue/red
"""
import sys
import threading
from citytrader.request_client import RequestClient
from citytrader.helpers import price_to_decimal
# gather user auth input and init RequestClient()
def main():
client_id = raw_input('Enter your client_id: ')
client_secret = raw_input('Enter your client_secret: ')
username = raw_input('Enter your username: ')
password = raw_input('Enter your password: ')
rc = RequestClient(server="https://devservices.optionshop.com", client_id=client_id, client_secret=client_secret, username=username, password=password)
instrument_id = raw_input('Enter instrument_id: ')
instrument = rc.request(request_type="GET", url="instruments/%s" % instrument_id)
product = rc.request(request_type="GET", url="products/%s" % instrument["data"]["product_id"])
display_factor = product["data"]["display_factor"]
base_factor = product["data"]["base_factor"]
sys.stdout.write("\x1b]2;" + instrument["data"]["name"] + " market data poller" + "\x07")
print '\033[1;34m' + '%8s' % 'Bid' + '\033[1;m', '\033[1;31m' + '%20s' % 'Ask' + '\033[1;m', '\033[0m'
poll_md(rc, instrument_id, display_factor, base_factor)
# print md
def poll_md(rc, instrument_id, display_factor, base_factor):
threading.Timer(1.0, poll_md, [rc, instrument_id, display_factor, base_factor]).start()
md_message = rc.request(request_type="GET", url="marketdata?instrument_ids=%s" % instrument_id)
buy_price = None
buy_quantity = None
sell_price = None
sell_quantity = None
for i in md_message["data"]:
if i['side'] == "Buy":
buy_price = price_to_decimal(i['price'], display_factor, base_factor)
buy_quantity = i['quantity']
elif i['side'] == "Sell":
sell_price = price_to_decimal(i['price'], display_factor, base_factor)
sell_quantity = i['quantity']
if buy_price and sell_price:
bid_str = "%s @ %s" % (buy_quantity, buy_price)
ask_str = "%s @ %s" % (sell_quantity, sell_price)
print '\033[1;34m' + bid_str + '\033[1;m', '\033[1;31m' + '%20s' % ask_str + '\033[1;m', '\033[0m'
if __name__ == "__main__":
main()
| optionscity/city-trader-python-lib | examples/market_data_poller.py | Python | gpl-2.0 | 2,246 |
'''
This software is released under an MIT/X11 open source license.
Copyright 2012-2013 Diffeo, Inc.
'''
from __future__ import division
import os
import math
import hashlib
from itertools import ifilter, imap
try:
from collections import Counter
except ImportError:
from backport_collections import Counter
from nltk.corpus import stopwords
from _clean_visible import cleanse
def tps(text, min_token_len=2, quant_rate=0.01):
'''
:param text: tag-free UTF-8 string of free text
:returns string: 32-character md5 hash in hexadecimal
Python implementation of the TextProfileSignature provided in
SOLR. Unlike most other locality sensitive hashes, a TPS can be
indexed as a searchable property of each document that does not
require n^2 comparisons to find duplicates.
http://wiki.apache.org/solr/TextProfileSignature
'''
counts = Counter(
ifilter(lambda x: len(x) >= min_token_len,
imap(cleanse, text.split())))
max_freq = counts.most_common(1)[0][1]
if max_freq <= 1:
quant = 1
else:
quant = max(2, round(max_freq * quant_rate))
to_hash = []
for word, count in counts.most_common():
if count <= quant:
break
to_hash += [word, str(int(math.floor(count / quant)))]
to_hash = u' '.join(to_hash)
return hashlib.md5(to_hash.encode('utf8')).hexdigest()
| trec-kba/streamcorpus-pipeline | streamcorpus_pipeline/text_profile_signature.py | Python | mit | 1,404 |
# Copyright 2017 Capital One Services, LLC
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import json
from .common import BaseTest, functional, Bag
from botocore.exceptions import ClientError
from c7n.exceptions import PolicyValidationError
from c7n.resources.ecr import lifecycle_rule_validate
class TestECR(BaseTest):
def test_rule_validation(self):
policy = Bag(name='xyz')
with self.assertRaises(PolicyValidationError) as ecm:
lifecycle_rule_validate(
policy, {'selection': {'tagStatus': 'tagged'}})
self.assertIn('tagPrefixList required', str(ecm.exception))
with self.assertRaises(PolicyValidationError) as ecm:
lifecycle_rule_validate(
policy, {'selection': {
'tagStatus': 'untagged',
'countNumber': 10, 'countUnit': 'days',
'countType': 'imageCountMoreThan'}})
self.assertIn('countUnit invalid', str(ecm.exception))
def create_repository(self, client, name):
""" Create the named repository. Delete existing one first if applicable. """
existing_repos = {
r["repositoryName"]
for r in client.describe_repositories().get("repositories")
}
if name in existing_repos:
client.delete_repository(repositoryName=name)
client.create_repository(repositoryName=name)
self.addCleanup(client.delete_repository, repositoryName=name)
def test_ecr_set_scanning(self):
factory = self.replay_flight_data('test_ecr_set_scanning')
p = self.load_policy({
'name': 'ecr-set-scanning',
'resource': 'aws.ecr',
'filters': [
{'repositoryName': 'testrepo'},
{'imageScanningConfiguration.scanOnPush': False}],
'actions': ['set-scanning']}, session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['repositoryName'], 'testrepo')
client = factory().client('ecr')
repo = client.describe_repositories(repositoryNames=['testrepo'])[
'repositories'][0]
self.assertJmes(
'imageScanningConfiguration.scanOnPush', repo, True)
def test_ecr_set_immutability(self):
factory = self.replay_flight_data('test_ecr_set_immutability')
p = self.load_policy({
'name': 'ecr-set-immutability',
'resource': 'aws.ecr',
'filters': [
{'repositoryName': 'testrepo'},
{'imageTagMutability': 'MUTABLE'}],
'actions': [{'type': 'set-immutability'}]},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['repositoryName'], 'testrepo')
client = factory().client('ecr')
repo = client.describe_repositories(repositoryNames=['testrepo'])[
'repositories'][0]
self.assertEqual(repo['imageTagMutability'], 'IMMUTABLE')
def test_ecr_lifecycle_policy(self):
session_factory = self.replay_flight_data('test_ecr_lifecycle_update')
rule = {
"rulePriority": 1,
"description": "Expire images older than 14 days",
"selection": {
"tagStatus": "untagged",
"countType": "sinceImagePushed",
"countUnit": "days",
"countNumber": 14
},
"action": {
"type": "expire"
}
}
p = self.load_policy({
'name': 'ecr-update',
'resource': 'aws.ecr',
'filters': [
{'repositoryName': 'c7n'},
{'type': 'lifecycle-rule',
'state': False}],
'actions': [{
'type': 'set-lifecycle',
'rules': [rule]}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client('ecr')
policy = json.loads(
client.get_lifecycle_policy(
repositoryName='c7n')['lifecyclePolicyText'])
self.assertEqual(policy, {'rules': [rule]})
def test_ecr_lifecycle_delete(self):
session_factory = self.replay_flight_data('test_ecr_lifecycle_delete')
p = self.load_policy({
'name': 'ecr-update',
'resource': 'aws.ecr',
'filters': [
{'repositoryName': 'c7n'},
{'type': 'lifecycle-rule',
'state': True,
'match': [
{'action.type': 'expire'},
{'selection.tagStatus': 'untagged'}]}],
'actions': [{
'type': 'set-lifecycle',
'state': False}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client('ecr')
self.assertRaises(
client.exceptions.ClientError,
client.get_lifecycle_policy,
repositoryName='c7n')
def test_ecr_tags(self):
factory = self.replay_flight_data('test_ecr_tags')
p = self.load_policy({
'name': 'ecr-tag',
'resource': 'ecr',
'filters': [{'tag:Role': 'Dev'}],
'actions': [
{'type': 'tag',
'tags': {'Env': 'Dev'}},
{'type': 'remove-tag',
'tags': ['Role']},
{'type': 'mark-for-op',
'op': 'post-finding',
'days': 2}]},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
client = factory().client('ecr')
tags = {t['Key']: t['Value'] for t in
client.list_tags_for_resource(
resourceArn=resources[0]['repositoryArn']).get('tags')}
self.assertEqual(
tags,
{'Env': 'Dev',
'maid_status': 'Resource does not meet policy: post-finding@2019/02/07'})
@functional
def test_ecr_no_policy(self):
# running against a registry with no policy causes no issues.
session_factory = self.replay_flight_data("test_ecr_no_policy")
client = session_factory().client("ecr")
name = "test-ecr-no-policy"
self.create_repository(client, name)
p = self.load_policy(
{
"name": "ecr-stat-3",
"resource": "ecr",
"filters": [{"repositoryName": name}],
"actions": [{"type": "remove-statements", "statement_ids": ["abc"]}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual([r["repositoryName"] for r in resources], [name])
@functional
def test_ecr_remove_matched(self):
session_factory = self.replay_flight_data("test_ecr_remove_matched")
client = session_factory().client("ecr")
name = "test-ecr-remove-matched"
self.create_repository(client, name)
client.set_repository_policy(
repositoryName=name,
policyText=json.dumps(
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": {"AWS": "arn:aws:iam::185106417252:root"},
"Action": [
"ecr:GetDownloadUrlForLayer",
"ecr:BatchGetImage",
"ecr:BatchCheckLayerAvailability",
"ecr:ListImages",
"ecr:DescribeImages",
],
},
{
"Sid": "Public",
"Effect": "Allow",
"Principal": "*",
"Action": [
"ecr:GetDownloadUrlForLayer",
"ecr:BatchGetImage",
"ecr:BatchCheckLayerAvailability",
],
},
],
}
),
)
p = self.load_policy(
{
"name": "ecr-stat-2",
"resource": "ecr",
"filters": [
{"repositoryName": name},
{"type": "cross-account", "whitelist": ["185106417252"]},
],
"actions": [{"type": "remove-statements", "statement_ids": "matched"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual([r["repositoryName"] for r in resources], [name])
data = json.loads(
client.get_repository_policy(
repositoryName=resources[0]["repositoryName"]
).get(
"policyText"
)
)
self.assertEqual(
[s["Sid"] for s in data.get("Statement", ())], ["SpecificAllow"]
)
@functional
def test_ecr_remove_named(self):
# pre-requisites empty repo - no policy
# pre-requisites abc repo - policy w/ matched statement id
session_factory = self.replay_flight_data("test_ecr_remove_named")
client = session_factory().client("ecr")
name = "test-xyz"
self.create_repository(client, name)
client.set_repository_policy(
repositoryName=name,
policyText=json.dumps(
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "WhatIsIt",
"Effect": "Allow",
"Principal": "*",
"Action": ["ecr:Get*", "ecr:Batch*"],
}
],
}
),
)
p = self.load_policy(
{
"name": "ecr-stat",
"resource": "ecr",
"filters": [{"repositoryName": name}],
"actions": [
{"type": "remove-statements", "statement_ids": ["WhatIsIt"]}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertRaises(
ClientError,
client.get_repository_policy,
repositoryName=resources[0]["repositoryArn"],
)
def test_ecr_set_lifecycle(self):
pass
| capitalone/cloud-custodian | tests/test_ecr.py | Python | apache-2.0 | 10,970 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11a1 on 2017-02-09 13:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('lang', '0004_auto_20161222_1459'),
('trans', '0073_auto_20170209_1359'),
]
operations = [
migrations.RemoveField(
model_name='comment',
name='contentsum',
),
migrations.RemoveField(
model_name='suggestion',
name='contentsum',
),
migrations.AlterUniqueTogether(
name='check',
unique_together=set([('content_hash', 'project', 'language', 'check')]),
),
migrations.RemoveField(
model_name='check',
name='contentsum',
),
migrations.AlterUniqueTogether(
name='source',
unique_together=set([('id_hash', 'subproject')]),
),
migrations.RemoveField(
model_name='source',
name='checksum',
),
migrations.AlterUniqueTogether(
name='unit',
unique_together=set([('translation', 'id_hash')]),
),
migrations.RemoveField(
model_name='unit',
name='checksum',
),
migrations.RemoveField(
model_name='unit',
name='contentsum',
),
]
| lem9/weblate | weblate/trans/migrations/0074_auto_20170209_1412.py | Python | gpl-3.0 | 1,412 |
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 25 09:48:58 2016
@author: aitor
"""
from collections import Counter
import json
import sys
from keras.models import Sequential
from keras.models import model_from_json
from keras.layers import Dense, Activation, Dropout
from keras.layers import Convolution2D, MaxPooling2D, Flatten
import numpy as np
# Dataset with vectors but without the action timestamps
DATASET_NO_TIME = 'dataset_no_time.json'
# List of unique actions in the dataset
UNIQUE_ACTIVITIES = 'unique_activities.json'
# Maximun number of actions in an activity
ACTIVITY_MAX_LENGHT = 32
# Number of dimensions in a action
ACTION_MAX_LENGHT = 50
def save_model(model):
json_string = model.to_json()
model_name = 'model_activity_cnn'
open(model_name + '.json', 'w').write(json_string)
model.save_weights(model_name + '.h5', overwrite=True)
def load_model(model_file, weights_file):
model = model_from_json(open(model_file).read())
model.load_weights(weights_file)
def check_activity_distribution(y_np, unique_activities):
activities = []
for activity_np in y_np:
index = activity_np.tolist().index(1.0)
activities.append(unique_activities[index])
print Counter(activities)
unique_activities = json.load(open(UNIQUE_ACTIVITIES, 'r'))
total_activities = len(unique_activities)
print 'Building model...'
sys.stdout.flush()
classifier = Sequential()
# filter = 2 x ACTION_MAX_LENGHT
# input = 1 channel x 10 X 200
classifier.add(Convolution2D(50, 3, ACTION_MAX_LENGHT, input_shape=(1,ACTIVITY_MAX_LENGHT,ACTION_MAX_LENGHT),border_mode='valid',activation='relu'))
# Same height as the filter heigh
classifier.add(MaxPooling2D(pool_size=(30,1)))
classifier.add(Flatten())
classifier.add(Dense(512, activation='relu'))
classifier.add(Dropout(0.5))
classifier.add(Dense(total_activities))
classifier.add(Activation('softmax'))
classifier.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy', 'mse', 'mae'])
print 'Model built'
sys.stdout.flush()
print 'Preparing training set...'
print ' - Reading dataset'
sys.stdout.flush()
with open(DATASET_NO_TIME, 'r') as dataset_file:
activities = json.load(dataset_file)
print ' - processing activities'
X = []
y = []
for i, activity in enumerate(activities):
if i % 10000 == 0:
print ' - Number of activities processed:', i
sys.stdout.flush()
actions = []
for action in activity['actions']:
actions.append(np.array(action))
actions_array = np.array(actions)
X.append(actions_array)
y.append(np.array(activity['activity']))
total_examples = len(X)
test_per = 0.2
limit = int(test_per * total_examples)
X_train = X[limit:]
X_test = X[:limit]
y_train = y[limit:]
y_test = y[:limit]
print 'Total examples:', total_examples
print 'Train examples:', len(X_train), len(y_train)
print 'Test examples:', len(X_test), len(y_test)
sys.stdout.flush()
X = np.array(X_train)
y = np.array(y_train)
print 'Activity distribution for training:'
check_activity_distribution(y, unique_activities)
X = X.reshape(X.shape[0], 1, ACTIVITY_MAX_LENGHT, ACTION_MAX_LENGHT)
X_test = np.array(X_test)
y_test = np.array(y_test)
print 'Activity distribution for testing:'
check_activity_distribution(y_test, unique_activities)
X_test = X_test.reshape(X_test.shape[0], 1, ACTIVITY_MAX_LENGHT, ACTION_MAX_LENGHT)
print 'Shape (X,y):'
print X.shape
print y.shape
print 'Training set prepared'
sys.stdout.flush()
print 'Training...'
sys.stdout.flush()
classifier.fit(X, y, batch_size=20, nb_epoch=2000, validation_data=(X_test, y_test))
print 'Saving model...'
sys.stdout.flush()
save_model(classifier)
print 'Model saved'
| aitoralmeida/dl_activity_recognition | cnn/old/cnn_simple_relu.py | Python | gpl-3.0 | 3,729 |
#!/usr/bin/env python
##
from __future__ import print_function
import os
import argparse
import sys
import warnings
import copy
import imp
nodes = imp.load_source('nodes', 'steps/nnet3/components.py')
nnet3_train_lib = imp.load_source('ntl', 'steps/nnet3/nnet3_train_lib.py')
chain_lib = imp.load_source('ncl', 'steps/nnet3/chain/nnet3_chain_lib.py')
def GetArgs():
# we add compulsary arguments as named arguments for readability
parser = argparse.ArgumentParser(description="Writes config files and variables "
"for LSTMs creation and training",
epilog="See steps/nnet3/lstm/train.sh for example.")
# Only one of these arguments can be specified, and one of them has to
# be compulsarily specified
feat_group = parser.add_mutually_exclusive_group(required = True)
feat_group.add_argument("--feat-dim", type=int,
help="Raw feature dimension, e.g. 13")
feat_group.add_argument("--feat-dir", type=str,
help="Feature directory, from which we derive the feat-dim")
# only one of these arguments can be specified
ivector_group = parser.add_mutually_exclusive_group(required = False)
ivector_group.add_argument("--ivector-dim", type=int,
help="iVector dimension, e.g. 100", default=0)
ivector_group.add_argument("--ivector-dir", type=str,
help="iVector dir, which will be used to derive the ivector-dim ", default=None)
num_target_group = parser.add_mutually_exclusive_group(required = True)
num_target_group.add_argument("--num-targets", type=int,
help="number of network targets (e.g. num-pdf-ids/num-leaves)")
num_target_group.add_argument("--ali-dir", type=str,
help="alignment directory, from which we derive the num-targets")
num_target_group.add_argument("--tree-dir", type=str,
help="directory with final.mdl, from which we derive the num-targets")
# General neural network options
parser.add_argument("--splice-indexes", type=str,
help="Splice indexes at input layer, e.g. '-3,-2,-1,0,1,2,3'", required = True, default="0")
parser.add_argument("--xent-regularize", type=float,
help="For chain models, if nonzero, add a separate output for cross-entropy "
"regularization (with learning-rate-factor equal to the inverse of this)",
default=0.0)
parser.add_argument("--include-log-softmax", type=str, action=nnet3_train_lib.StrToBoolAction,
help="add the final softmax layer ", default=True, choices = ["false", "true"])
# LSTM options
parser.add_argument("--num-lstm-layers", type=int,
help="Number of LSTM layers to be stacked", default=1)
parser.add_argument("--cell-dim", type=int,
help="dimension of lstm-cell")
parser.add_argument("--recurrent-projection-dim", type=int,
help="dimension of recurrent projection")
parser.add_argument("--non-recurrent-projection-dim", type=int,
help="dimension of non-recurrent projection")
parser.add_argument("--hidden-dim", type=int,
help="dimension of fully-connected layers")
# Natural gradient optionhidden_dim,s
parser.add_argument("--ng-per-element-scale-options", type=str,
help="options to be supplied to NaturalGradientPerElementScaleComponent", default="")
parser.add_argument("--ng-affine-options", type=str,
help="options to be supplied to NaturalGradientAffineComponent", default="")
# Gradient clipper options
parser.add_argument("--norm-based-clipping", type=str, action=nnet3_train_lib.StrToBoolAction,
help="use norm based clipping in ClipGradient components ", default=True, choices = ["false", "true"])
parser.add_argument("--clipping-threshold", type=float,
help="clipping threshold used in ClipGradient components, if clipping-threshold=0 no clipping is done", default=30)
parser.add_argument("--self-repair-scale", type=float,
help="A non-zero value activates the self-repair mechanism in the sigmoid and tanh non-linearities of the LSTM", default=None)
# Delay options
parser.add_argument("--label-delay", type=int, default=None,
help="option to delay the labels to make the lstm robust")
parser.add_argument("--lstm-delay", type=str, default=None,
help="option to have different delays in recurrence for each lstm")
parser.add_argument("config_dir",
help="Directory to write config files and variables")
print(' '.join(sys.argv))
args = parser.parse_args()
args = CheckArgs(args)
return args
def CheckArgs(args):
if not os.path.exists(args.config_dir):
os.makedirs(args.config_dir)
## Check arguments.
if args.feat_dir is not None:
args.feat_dim = nnet3_train_lib.GetFeatDim(args.feat_dir)
if args.ali_dir is not None:
args.num_targets = nnet3_train_lib.GetNumberOfLeaves(args.ali_dir)
elif args.tree_dir is not None:
args.num_targets = chain_lib.GetNumberOfLeaves(args.tree_dir)
if args.ivector_dir is not None:
args.ivector_dim = nnet3_train_lib.GetIvectorDim(args.ivector_dir)
if not args.feat_dim > 0:
raise Exception("feat-dim has to be postive")
if not args.num_targets > 0:
print(args.num_targets)
raise Exception("num_targets has to be positive")
if not args.ivector_dim >= 0:
raise Exception("ivector-dim has to be non-negative")
if (args.num_lstm_layers < 1):
sys.exit("--num-lstm-layers has to be a positive integer")
if (args.clipping_threshold < 0):
sys.exit("--clipping-threshold has to be a non-negative")
if args.lstm_delay is None:
args.lstm_delay = [[-1]] * args.num_lstm_layers
else:
try:
args.lstm_delay = ParseLstmDelayString(args.lstm_delay.strip())
except ValueError:
sys.exit("--lstm-delay has incorrect format value. Provided value is '{0}'".format(args.lstm_delay))
if len(args.lstm_delay) != args.num_lstm_layers:
st=len(args.lstm_delay)
print('Value of lstm_delay is '+ repr(st))
sys.exit("--lstm-delay: Number of delays provided has to match --num-lstm-layers")
return args
def PrintConfig(file_name, config_lines):
f = open(file_name, 'w')
f.write("\n".join(config_lines['components'])+"\n")
f.write("\n#Component nodes\n")
f.write("\n".join(config_lines['component-nodes'])+"\n")
f.close()
def ParseSpliceString(splice_indexes, label_delay=None):
## Work out splice_array e.g. splice_array = [ [ -3,-2,...3 ], [0], [-2,2], .. [ -8,8 ] ]
split1 = splice_indexes.split(" "); # we already checked the string is nonempty.
if len(split1) < 1:
splice_indexes = "0"
left_context=0
right_context=0
if label_delay is not None:
left_context = -label_delay
right_context = label_delay
splice_array = []
try:
for i in range(len(split1)):
indexes = map(lambda x: int(x), split1[i].strip().split(","))
print(indexes)
if len(indexes) < 1:
raise ValueError("invalid --splice-indexes argument, too-short element: "
+ splice_indexes)
if (i > 0) and ((len(indexes) != 1) or (indexes[0] != 0)):
raise ValueError("elements of --splice-indexes splicing is only allowed initial layer.")
if not indexes == sorted(indexes):
raise ValueError("elements of --splice-indexes must be sorted: "
+ splice_indexes)
left_context += -indexes[0]
right_context += indexes[-1]
splice_array.append(indexes)
except ValueError as e:
raise ValueError("invalid --splice-indexes argument " + splice_indexes + str(e))
left_context = max(0, left_context)
right_context = max(0, right_context)
return {'left_context':left_context,
'right_context':right_context,
'splice_indexes':splice_array,
'num_hidden_layers':len(splice_array)
}
def ParseLstmDelayString(lstm_delay):
## Work out lstm_delay e.g. "-1 [-1,1] -2" -> list([ [-1], [-1, 1], [-2] ])
split1 = lstm_delay.split(" ");
lstm_delay_array = []
try:
for i in range(len(split1)):
indexes = map(lambda x: int(x), split1[i].strip().lstrip('[').rstrip(']').strip().split(","))
if len(indexes) < 1:
raise ValueError("invalid --lstm-delay argument, too-short element: "
+ lstm_delay)
elif len(indexes) == 2 and indexes[0] * indexes[1] >= 0:
raise ValueError('Warning: ' + str(indexes) + ' is not a standard BLSTM mode. There should be a negative delay for the forward, and a postive delay for the backward.')
lstm_delay_array.append(indexes)
except ValueError as e:
raise ValueError("invalid --lstm-delay argument " + lstm_delay + str(e))
return lstm_delay_array
def MakeConfigs(config_dir, feat_dim, ivector_dim, num_targets,
splice_indexes, lstm_delay, cell_dim,
recurrent_projection_dim, non_recurrent_projection_dim,
num_lstm_layers, num_hidden_layers,
norm_based_clipping, clipping_threshold,
ng_per_element_scale_options, ng_affine_options,
label_delay, include_log_softmax, xent_regularize, self_repair_scale):
config_lines = {'components':[], 'component-nodes':[]}
config_files={}
prev_layer_output = nodes.AddInputLayer(config_lines, feat_dim, splice_indexes[0], ivector_dim)
# Add the init config lines for estimating the preconditioning matrices
init_config_lines = copy.deepcopy(config_lines)
init_config_lines['components'].insert(0, '# Config file for initializing neural network prior to')
init_config_lines['components'].insert(0, '# preconditioning matrix computation')
nodes.AddOutputLayer(init_config_lines, prev_layer_output)
config_files[config_dir + '/init.config'] = init_config_lines
prev_layer_output = nodes.AddLdaLayer(config_lines, "L0", prev_layer_output, config_dir + '/lda.mat')
#prev_layer_output = nodes.AddAffRelNormLayer(config_lines, "L{0}".format(i+1),
# prev_layer_output, hidden_dim,
# ng_affine_options, self_repair_scale = self_repair_scale)
for i in range(num_lstm_layers):
if len(lstm_delay[i]) == 2: # BLSTM layer case, add both forward and backward
prev_layer_output1 = nodes.AddLstmLayer(config_lines, "BLstm{0}_forward".format(i+1), prev_layer_output, cell_dim,
recurrent_projection_dim, non_recurrent_projection_dim,
clipping_threshold, norm_based_clipping,
ng_per_element_scale_options, ng_affine_options,
lstm_delay = lstm_delay[i][0], self_repair_scale = self_repair_scale)
prev_layer_output2 = nodes.AddLstmLayer(config_lines, "BLstm{0}_backward".format(i+1), prev_layer_output, cell_dim,
recurrent_projection_dim, non_recurrent_projection_dim,
clipping_threshold, norm_based_clipping,
ng_per_element_scale_options, ng_affine_options,
lstm_delay = lstm_delay[i][1], self_repair_scale = self_repair_scale)
prev_layer_output['descriptor'] = 'Append({0}, {1})'.format(prev_layer_output1['descriptor'], prev_layer_output2['descriptor'])
prev_layer_output['dimension'] = prev_layer_output1['dimension'] + prev_layer_output2['dimension']
else: # LSTM layer case
prev_layer_output = nodes.AddLstmLayer(config_lines, "Lstm{0}".format(i+1), prev_layer_output, cell_dim,
recurrent_projection_dim, non_recurrent_projection_dim,
clipping_threshold, norm_based_clipping,
ng_per_element_scale_options, ng_affine_options,
lstm_delay = lstm_delay[i][0], self_repair_scale = self_repair_scale)
# make the intermediate config file for layerwise discriminative
# training
nodes.AddFinalLayer(config_lines, prev_layer_output, num_targets, ng_affine_options, label_delay = label_delay, include_log_softmax = include_log_softmax)
if xent_regularize != 0.0:
nodes.AddFinalLayer(config_lines, prev_layer_output, num_targets,
include_log_softmax = True,
name_affix = 'xent')
config_files['{0}/layer{1}.config'.format(config_dir, i+1)] = config_lines
config_lines = {'components':[], 'component-nodes':[]}
if len(lstm_delay[i]) == 2:
# since the form 'Append(Append(xx, yy), zz)' is not allowed, here we don't wrap the descriptor with 'Append()' so that we would have the form
# 'Append(xx, yy, zz)' in the next lstm layer
prev_layer_output['descriptor'] = '{0}, {1}'.format(prev_layer_output1['descriptor'], prev_layer_output2['descriptor'])
if len(lstm_delay[i]) == 2:
# since there is no 'Append' in 'AffRelNormLayer', here we wrap the descriptor with 'Append()'
prev_layer_output['descriptor'] = 'Append({0})'.format(prev_layer_output['descriptor'])
for i in range(num_lstm_layers, num_hidden_layers):
prev_layer_output = nodes.AddAffRelNormLayer(config_lines, "L{0}".format(i+1),
prev_layer_output, 1024,ng_affine_options, self_repair_scale = self_repair_scale)
# make the intermediate config file for layerwise discriminative
# training
nodes.AddFinalLayer(config_lines, prev_layer_output, num_targets, ng_affine_options, label_delay = label_delay, include_log_softmax = include_log_softmax)
if xent_regularize != 0.0:
nodes.AddFinalLayer(config_lines, prev_layer_output, num_targets,
include_log_softmax = True,
name_affix = 'xent')
config_files['{0}/layer{1}.config'.format(config_dir, i+1)] = config_lines
config_lines = {'components':[], 'component-nodes':[]}
# printing out the configs
# init.config used to train lda-mllt train
for key in config_files.keys():
PrintConfig(key, config_files[key])
def ProcessSpliceIndexes(config_dir, splice_indexes, label_delay, num_lstm_layers):
parsed_splice_output = ParseSpliceString(splice_indexes.strip(), label_delay)
left_context = parsed_splice_output['left_context']
right_context = parsed_splice_output['right_context']
num_hidden_layers = parsed_splice_output['num_hidden_layers']
splice_indexes = parsed_splice_output['splice_indexes']
#if (num_hidden_layers < num_lstm_layers):
# raise Exception("num-lstm-layers : number of lstm layers has to be greater than number of layers, decided based on splice-indexes")
# write the files used by other scripts like steps/nnet3/get_egs.sh
f = open(config_dir + "/vars", "w")
print('model_left_context=' + str(left_context), file=f)
print('model_right_context=' + str(right_context), file=f)
print('num_hidden_layers=' + str(num_hidden_layers), file=f)
# print('initial_right_context=' + str(splice_array[0][-1]), file=f)
f.close()
return [left_context, right_context, num_hidden_layers, splice_indexes]
def Main():
args = GetArgs()
[left_context, right_context, num_hidden_layers, splice_indexes] = ProcessSpliceIndexes(args.config_dir, args.splice_indexes, args.label_delay, args.num_lstm_layers)
print('Value of num_hidden_layers is '+ str(num_hidden_layers))
MakeConfigs(args.config_dir,
args.feat_dim, args.ivector_dim, args.num_targets,
splice_indexes, args.lstm_delay, args.cell_dim,
args.recurrent_projection_dim, args.non_recurrent_projection_dim,
args.num_lstm_layers, num_hidden_layers,
args.norm_based_clipping,
args.clipping_threshold,
args.ng_per_element_scale_options, args.ng_affine_options,
args.label_delay, args.include_log_softmax, args.xent_regularize,
args.self_repair_scale)
if __name__ == "__main__":
Main()
| shubhujf/Compare_GRUandLSTM | KALDI.V2/steps/nnet3/lstm/make_configs.py | Python | apache-2.0 | 17,057 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-09-26 14:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import djangocms_text_ckeditor.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.CreateModel(
name='NewsItem',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='hrptinfo_newsitem', serialize=False, to='cms.CMSPlugin')),
('title', models.CharField(max_length=255)),
('date', models.DateField(auto_now_add=True)),
('content', djangocms_text_ckeditor.fields.HTMLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| hrpt-se/hrpt | apps/hrptinfo/migrations/0001_initial.py | Python | agpl-3.0 | 1,028 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import models
from . import tests
| ddico/odoo | addons/l10n_it_edi/__init__.py | Python | agpl-3.0 | 141 |
# coding=utf-8
# This file is part of SickRage.
#
# URL: https://SickRage.GitHub.io
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
"""
Test ShowFanArt
"""
from generic_media_tests import GenericMediaTests
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')))
from sickrage.media.ShowFanArt import ShowFanArt
class ShowFanArtTests(GenericMediaTests):
"""
Test ShowFanArt
"""
def test_get_default_media_name(self):
"""
Test get_default_media_name
"""
self.assertEqual(ShowFanArt(0, '').get_default_media_name(), 'fanart.png')
if __name__ == '__main__':
print('=====> Testing {0}'.format(__file__))
SUITE = unittest.TestLoader().loadTestsFromTestCase(ShowFanArtTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| Jusedawg/SickRage | tests/sickrage_tests/media/show_fan_art_tests.py | Python | gpl-3.0 | 1,573 |
from airy.core.web import *
from airy.core.db import *
from airy.core.serializers.json import JSONSerializer
def expose_method(f):
def wrapped(self, *args, **kwargs):
if f.__name__ in self.methods:
return f(self, *args, **kwargs)
else:
self.write("Method Not Available")
self.finish()
return wrapped
class APIHandler(AiryRequestHandler):
model = Document
serializer = JSONSerializer
fields = set()
exclude = set()
levels = 1
methods = ('get', 'post', 'put', 'delete')
def __init__(self, *args, **kwargs):
super(APIHandler, self).__init__(*args, **kwargs)
if not self.fields:
self.fields = set(self.model._fields.keys())
def _generate_model(self, **kwargs):
model_fields = {}
for key, value in kwargs.items():
field = self.model._fields.get(key, None)
if field:
if isinstance(field, ReferenceField):
value = field.document_type.objects.get(pk=value)
model_fields[key] = value
return self.model(**model_fields)
def check_xsrf_cookie(self):
pass
def deserialize_query(self, query_dict):
for field_name in query_dict:
field = self.model._fields.get(field_name)
if isinstance(field, BooleanField):
query_dict[field_name] = bool(query_dict[field_name])
if isinstance(field, IntField):
query_dict[field_name] = int(query_dict[field_name])
return query_dict
def get_filter_query(self):
arguments = self.get_flat_arguments()
use_fields = set(self.fields) & set(arguments.keys())
use_fields = set(use_fields) - set(self.exclude)
query_dict = dict((field, arguments[field]) for field in use_fields)
query_dict = self.deserialize_query(query_dict)
return Q(**query_dict)
def get_queryset(self, id=None):
try:
if id:
queryset = self.model.objects.get(id=id)
else:
queryset = self.model.objects.filter(self.get_filter_query())
except Exception, e:
if settings.debug:
raise
logging.warn("API Error: %s" % e)
queryset = None
return queryset
def serialize(self, queryset):
try:
return self.serializer(levels=self.levels, fields=self.fields, exclude=self.exclude).serialize(queryset)
except ValidationError, e:
logging.warn("API Error: %s" % e)
if settings.debug:
return "API Error: %s" % e
else:
return ''
@report_on_fail
@expose_method
def get(self, id=None):
queryset = self.get_queryset(id)
self.set_header("Content-Type", "application/json")
self.write(self.serialize(queryset))
self.finish()
@report_on_fail
@expose_method
def put(self, id=None):
queryset = self.get_queryset(id)
if queryset:
queryset.update(**dict([("set__%s" % key, value) for key, value in self.get_flat_arguments().items()]))
self.set_header("Content-Type", "application/json")
self.write(self.serialize(queryset))
self.finish()
@report_on_fail
@expose_method
def post(self, id=None):
if id:
queryset = self.get_queryset(id)
if queryset:
queryset.update(**dict([("set__%s" % key, value) for key, value in self.get_flat_arguments().items()]))
else:
queryset = self._generate_model(**self.get_flat_arguments())
if queryset:
queryset.save()
self.set_header("Content-Type", "application/json")
self.write(self.serialize(queryset))
self.finish()
@report_on_fail
@expose_method
def delete(self, id=None):
queryset = self.get_queryset(id)
if queryset:
queryset.delete()
self.set_header("Content-Type", "application/json")
self.write(self.serialize(queryset))
self.finish() | letolab/airy | airy/contrib/api/handlers.py | Python | bsd-2-clause | 4,151 |
from __future__ import absolute_import
from __future__ import with_statement
from functools import wraps
from kombu.pidbox import Mailbox
from celery.app import app_or_default
from celery.task import control
from celery.task import PingTask
from celery.utils import uuid
from celery.tests.utils import unittest
class MockMailbox(Mailbox):
sent = []
def _publish(self, command, *args, **kwargs):
self.__class__.sent.append(command)
def close(self):
pass
def _collect(self, *args, **kwargs):
pass
class Control(control.Control):
Mailbox = MockMailbox
def with_mock_broadcast(fun):
@wraps(fun)
def _resets(*args, **kwargs):
MockMailbox.sent = []
try:
return fun(*args, **kwargs)
finally:
MockMailbox.sent = []
return _resets
class test_inspect(unittest.TestCase):
def setUp(self):
app = app_or_default()
self.i = Control(app=app).inspect()
def test_prepare_reply(self):
self.assertDictEqual(self.i._prepare([{"w1": {"ok": 1}},
{"w2": {"ok": 1}}]),
{"w1": {"ok": 1}, "w2": {"ok": 1}})
i = control.inspect(destination="w1")
self.assertEqual(i._prepare([{"w1": {"ok": 1}}]),
{"ok": 1})
@with_mock_broadcast
def test_active(self):
self.i.active()
self.assertIn("dump_active", MockMailbox.sent)
@with_mock_broadcast
def test_scheduled(self):
self.i.scheduled()
self.assertIn("dump_schedule", MockMailbox.sent)
@with_mock_broadcast
def test_reserved(self):
self.i.reserved()
self.assertIn("dump_reserved", MockMailbox.sent)
@with_mock_broadcast
def test_stats(self):
self.i.stats()
self.assertIn("stats", MockMailbox.sent)
@with_mock_broadcast
def test_revoked(self):
self.i.revoked()
self.assertIn("dump_revoked", MockMailbox.sent)
@with_mock_broadcast
def test_asks(self):
self.i.registered()
self.assertIn("dump_tasks", MockMailbox.sent)
@with_mock_broadcast
def test_enable_events(self):
self.i.enable_events()
self.assertIn("enable_events", MockMailbox.sent)
@with_mock_broadcast
def test_disable_events(self):
self.i.disable_events()
self.assertIn("disable_events", MockMailbox.sent)
@with_mock_broadcast
def test_ping(self):
self.i.ping()
self.assertIn("ping", MockMailbox.sent)
@with_mock_broadcast
def test_add_consumer(self):
self.i.add_consumer("foo")
self.assertIn("add_consumer", MockMailbox.sent)
@with_mock_broadcast
def test_cancel_consumer(self):
self.i.cancel_consumer("foo")
self.assertIn("cancel_consumer", MockMailbox.sent)
class test_Broadcast(unittest.TestCase):
def setUp(self):
self.app = app_or_default()
self.control = Control(app=self.app)
self.app.control = self.control
def tearDown(self):
del(self.app.control)
def test_discard_all(self):
self.control.discard_all()
@with_mock_broadcast
def test_broadcast(self):
self.control.broadcast("foobarbaz", arguments=[])
self.assertIn("foobarbaz", MockMailbox.sent)
@with_mock_broadcast
def test_broadcast_limit(self):
self.control.broadcast("foobarbaz1", arguments=[], limit=None,
destination=[1, 2, 3])
self.assertIn("foobarbaz1", MockMailbox.sent)
@with_mock_broadcast
def test_broadcast_validate(self):
with self.assertRaises(ValueError):
self.control.broadcast("foobarbaz2",
destination="foo")
@with_mock_broadcast
def test_rate_limit(self):
self.control.rate_limit(PingTask.name, "100/m")
self.assertIn("rate_limit", MockMailbox.sent)
@with_mock_broadcast
def test_revoke(self):
self.control.revoke("foozbaaz")
self.assertIn("revoke", MockMailbox.sent)
@with_mock_broadcast
def test_ping(self):
self.control.ping()
self.assertIn("ping", MockMailbox.sent)
@with_mock_broadcast
def test_revoke_from_result(self):
self.app.AsyncResult("foozbazzbar").revoke()
self.assertIn("revoke", MockMailbox.sent)
@with_mock_broadcast
def test_revoke_from_resultset(self):
r = self.app.TaskSetResult(uuid(),
map(self.app.AsyncResult,
[uuid() for i in range(10)]))
r.revoke()
self.assertIn("revoke", MockMailbox.sent)
| mzdaniel/oh-mainline | vendor/packages/celery/celery/tests/test_task/test_task_control.py | Python | agpl-3.0 | 4,711 |
#!/usr/bin/python -u
import numpy as np
from itdbase import Cell
import itin
def countbond(bondfile):
xcell = set_cell_from_vasp('POSCAR')
pos = xcell.get_cart_positions()
buff = []
with open(bondfile) as f:
for line in f:
buff.append(line.split())
binfo = {}
j = 0
for item in buff:
# print 'item', item
for i in range(0, len(item)-2, 2):
# print i, i+1, i+2
yz = tuple(sorted([int(item[0]), int(item[i+1])]))
binfo[yz] = float(item[i+2])
j += 1
# binfo.append([yz, float(item[i+2])])
# print binfo
# print len(binfo)
# print k
CC1 = []
CC2 = []
CC3 = []
CN1 = []
CN2 = []
CN3 = []
NN1 = []
NN2 = []
NN3 = []
for k, v in binfo.items():
if k[0] < 145 and k[1] < 145:
if v < 1.3: CC3.append(k)
if 1.3 <= v < 1.4: CC2.append(k)
if v >= 1.4: CC1.append(k)
if k[0] < 145 and k[1] > 144:
if v < 1.180: CN3.append(k)
if 1.180 <= v < 1.34: CN2.append(k)
if v >= 1.34: CN1.append(k)
if k[0] > 144 and k[1] > 144:
print 'NN', v
if v < 1.175: NN3.append(k)
if 1.175 <= v < 1.3: NN2.append(k)
if v >= 1.3: NN1.append(k)
print 'C-C', len(CC1)
print 'C=C', len(CC2)
print 'C#C', len(CC3)
print 'C-N', len(CN1)
print 'C=N', len(CN2)
print 'C#N', len(CN3)
print 'N-N', len(NN1)
print 'N=N', len(NN2)
print 'N#N', len(NN3)
Csp = []
Csp2 = []
Csp3 = []
Nsp = []
Nsp2 = []
Nsp3 = []
for x in buff[:144]:
if (len(x)-1) / 2 == 1:
Csp.append(x[0])
if (len(x)-1) / 2 == 2:
n0 = int(x[0]) - 1
n1 = int(x[1]) - 1
n2 = int(x[3]) - 1
if jiaodu(pos[n0], pos[n1], pos[n2]):
Csp.append(x[0])
else:
Csp2.append(x[0])
if (len(x)-1) / 2 == 3:
Csp2.append(x[0])
if (len(x)-1) / 2 == 4:
Csp3.append(x[0])
for x in buff[144:]:
if (len(x)-1) / 2 == 1:
Nsp.append(x[0])
if (len(x)-1) / 2 == 2:
n0 = int(x[0]) - 1
n1 = int(x[1]) - 1
n2 = int(x[3]) - 1
if jiaodu(pos[n0], pos[n1], pos[n2]):
Nsp.append(x[0])
else:
Nsp2.append(x[0])
if (len(x)-1) / 2 == 3:
Nsp2.append(x[0])
if (len(x)-1) / 2 == 4:
Nsp3.append(x[0])
print 'Csp', len(Csp)
print 'Csp2', len(Csp2)
print 'Csp3', len(Csp3)
print 'Nsp', len(Nsp)
print 'Nsp2', len(Nsp2)
print 'Nsp3', len(Nsp3)
print Csp
print Nsp
nco = 0
for x in buff:
nco = nco + (len(x)-1) / 2
print 'ave coor', nco/216.
nco = 0
for x in buff[:144]:
nco = nco + (len(x)-1) / 2
print 'c coor', nco/144.
nco = 0
for x in buff[144:]:
nco = nco + (len(x)-1) / 2
print 'n coor', nco/72.
def jiaodu(p0, p1, p2):
a = p1 - p0
b = p2 - p0
cosa = np.dot(a, b) / np.sqrt(np.dot(a,a) * np.dot(b, b))
du = np.arccos(cosa)/np.pi * 180.
if du > 170:
return True
else:
return False
def set_cell_from_vasp(pcar):
xcell = Cell()
buff = []
with open(pcar) as f:
for line in f:
buff.append(line.split())
lat = np.array(buff[2:5], float)
try:
typt = np.array(buff[5], int)
except:
del(buff[5])
typt = np.array(buff[5], int)
pos = np.array(buff[7:7+itin.nat], float)
xcell.set_name(itin.sname)
xcell.set_lattice(lat)
if buff[6][0].strip()[0] == 'D':
xcell.set_positions(pos)
else:
xcell.set_cart_positions(pos)
xcell.set_typt(typt)
xcell.set_znucl(itin.znucl)
xcell.set_types()
xcell.cal_fp(itin.fpcut, itin.lmax)
return xcell
if __name__ == '__main__':
countbond('cnbond')
# xcell = set_cell_from_vasp('POSCAR')
# pos = xcell.get_cart_positions()
# jiaodu(pos[215], pos[143], pos[69])
# jiaodu(pos[27], pos[16], pos[99])
| zhuligs/abinitioCafe | countbond.py | Python | gpl-3.0 | 4,211 |
# python3
# coding=utf-8
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base Airflow DAG for TCRM workflow."""
import abc
import datetime
import logging
from typing import Any, Optional
from airflow import utils
from airflow.exceptions import AirflowException
from airflow.models import baseoperator
from airflow.models import dag
from airflow.models import variable
from plugins.pipeline_plugins.operators import error_report_operator
from plugins.pipeline_plugins.operators import monitoring_cleanup_operator
from plugins.pipeline_plugins.utils import errors
# Airflow DAG configurations.
_DAG_RETRIES = 0
_DAG_RETRY_DELAY_MINUTES = 3
_DAG_SCHEDULE = '@once'
# Indicates whether the tasks will return a run report or not. The report will
# be returned as the operator's output. Not all operators have reports.
_ENABLE_RETURN_REPORT = False
# Whether or not the DAG should use the monitoring storage for logging.
# Enable monitoring to enable retry and reporting later.
_DAG_ENABLE_MONITORING = True
_DEFAULT_MONITORING_DATASET_ID = 'tcrm_monitoring_dataset'
_DEFAULT_MONITORING_TABLE_ID = 'tcrm_monitoring_table'
# Whether or not the cleanup operator should run automatically after a DAG
# completes.
_DEFAULT_DAG_ENABLE_MONITORING_CLEANUP = False
# Number of days data can live in the monitoring table before being removed.
_DEFAULT_MONITORING_DATA_DAYS_TO_LIVE = 50
# BigQuery connection ID for the monitoring table. Refer to
# https://cloud.google.com/composer/docs/how-to/managing/connections
# for more details on Managing Airflow connections.
# This could be the same or different from the input BQ connection ID.
_MONITORING_BQ_CONN_ID = 'bigquery_default'
# Whether or not the DAG should include a retry task. This is an internal retry
# to send failed events from previous similar runs. It is different from the
# Airflow retry of the whole DAG.
# If True, the input resource will be the monitoring BigQuery table and dataset
# (as described in _MONITORING_DATASET and _MONITORING_TABLE). Previously failed
# events will be resent to the same output resource.
_DAG_IS_RETRY = True
# Whether or not the DAG should include a main run. This option can be used
# should the user want to skip the main run and only run the retry operation.
_DAG_IS_RUN = True
def create_error_report_task(
error_report_dag: Optional[dag.DAG],
error: Exception) -> error_report_operator.ErrorReportOperator:
"""Creates an error task and attaches it to the DAG.
In case there was an error during a task creation in the DAG, that task will
be replaced with an error task. An error task will make the DAG
fail with the appropriate error message from the initial task so the user
can fix the issue that prevented the task creation (e.g. due to missing
params).
Args:
error_report_dag: The DAG that tasks attach to.
error: The error to display.
Returns:
An ErrorReportOperator instance task.
"""
return error_report_operator.ErrorReportOperator(
task_id='configuration_error',
error=error,
dag=error_report_dag)
class BaseDag(abc.ABC):
"""Base Airflow DAG.
Attributes:
dag_name: The name of the dag.
dag_retries: The retry times for the dag.
dag_retry_delay: The interval between Airflow DAG retries.
dag_schedule: The schedule for the dag.
dag_is_retry: Whether or not the DAG should include a retry task. This is
an internal retry to send failed events from previous
similar runs. It is different from the Airflow retry of the
whole DAG.
dag_is_run: Whether or not the DAG should include a main run.
dag_enable_run_report: Indicates whether the tasks will return a run report
or not.
dag_enable_monitoring: Whether or not the DAG should use the monitoring
storage for logging. Enable monitoring to enable
retry and reporting later.
dag_enable_monitoring_cleanup: Whether or not the cleanup operator should
run automatically after a DAG completes.
days_to_live: Number of days data can live in the monitoring table before
being removed.
monitoring_dataset: Dataset id of the monitoring table.
monitoring_table: Table name of the monitoring table.
monitoring_bq_conn_id: BigQuery connection ID for the monitoring table.
"""
def __init__(self, dag_name: str) -> None:
"""Initializes the base DAG.
Args:
dag_name: The name of the DAG.
"""
self.dag_name = dag_name
self.dag_retries = int(
variable.Variable.get(f'{self.dag_name}_retries', _DAG_RETRIES))
self.dag_retry_delay = int(
variable.Variable.get(f'{self.dag_name}_retry_delay',
_DAG_RETRY_DELAY_MINUTES))
self.dag_schedule = variable.Variable.get(f'{self.dag_name}_schedule',
_DAG_SCHEDULE)
self.dag_is_retry = bool(
int(variable.Variable.get(f'{self.dag_name}_is_retry', _DAG_IS_RETRY)))
self.dag_is_run = bool(
int(variable.Variable.get(f'{self.dag_name}_is_run', _DAG_IS_RUN)))
self.dag_enable_run_report = bool(
int(
variable.Variable.get(f'{self.dag_name}_enable_run_report',
_ENABLE_RETURN_REPORT)))
self.dag_enable_monitoring = bool(
int(
variable.Variable.get(f'{self.dag_name}_enable_monitoring',
_DAG_ENABLE_MONITORING)))
self.dag_enable_monitoring_cleanup = bool(
int(
variable.Variable.get(f'{self.dag_name}_enable_monitoring_cleanup',
_DEFAULT_DAG_ENABLE_MONITORING_CLEANUP)))
self.days_to_live = int(
variable.Variable.get('monitoring_data_days_to_live',
_DEFAULT_MONITORING_DATA_DAYS_TO_LIVE))
self.monitoring_dataset = variable.Variable.get(
'monitoring_dataset', _DEFAULT_MONITORING_DATASET_ID)
self.monitoring_table = variable.Variable.get('monitoring_table',
_DEFAULT_MONITORING_TABLE_ID)
self.monitoring_bq_conn_id = variable.Variable.get('monitoring_bq_conn_id',
_MONITORING_BQ_CONN_ID)
def _initialize_dag(self) -> dag.DAG:
"""Initializes an Airflow DAG with appropriate default args.
Returns:
models.DAG: Instance models.DAG.
"""
logging.info('Running pipeline at schedule %s.', self.dag_schedule)
default_args = {
'retries': self.dag_retries,
'retry_delay': datetime.timedelta(minutes=self.dag_retry_delay),
'start_date': utils.dates.days_ago(1)
}
return dag.DAG(
dag_id=self.dag_name,
schedule_interval=self.dag_schedule,
catchup=False,
default_args=default_args)
@abc.abstractmethod
def create_task(
self,
main_dag: Optional[dag.DAG] = None,
is_retry: bool = False) -> baseoperator.BaseOperator:
"""Creates a task and attaches it to the DAG.
Args:
main_dag: The DAG that tasks attach to.
is_retry: Whether or not the operator should include a retry task.
Returns:
An instance of models.BaseOperator.
"""
pass
def _try_create_task(self, main_dag: dag.DAG, is_retry: bool) -> Any:
"""Tries to create an Airflow task.
Args:
main_dag: The DAG that tasks attach to.
is_retry: Whether or not the operator should include a retry task.
Raises:
DAGError raised when task is failed to create.
Returns:
Airflow task instance.
"""
try:
return self.create_task(main_dag=main_dag, is_retry=is_retry)
except (errors.DataOutConnectorValueError,
errors.DataInConnectorValueError,
AirflowException,
ValueError) as error:
raise errors.DAGError(error=error, msg='Couldn\'t create task.')
def _create_cleanup_task(self, main_dag: dag.DAG) -> Any:
"""Creates and initializes the cleanup task.
Args:
main_dag: The dag that the task attaches to.
Returns:
MonitoringCleanupOperator.
"""
try:
return monitoring_cleanup_operator.MonitoringCleanupOperator(
task_id='monitoring_cleanup_task',
monitoring_bq_conn_id=self.monitoring_bq_conn_id,
monitoring_dataset=self.monitoring_dataset,
monitoring_table=self.monitoring_table,
days_to_live=self.days_to_live,
dag_name=self.dag_name,
dag=main_dag)
except (errors.DataOutConnectorValueError,
errors.DataInConnectorValueError,
AirflowException,
ValueError) as error:
raise errors.DAGError(error=error, msg='Couldn\'t create cleanup task.')
def create_dag(self) -> dag.DAG:
"""Creates the DAG.
Returns:
Airflow DAG instance.
"""
main_dag = self._initialize_dag()
try:
if self.dag_is_retry:
retry_task = self._try_create_task(main_dag=main_dag, is_retry=True)
if self.dag_is_run:
run_task = self._try_create_task(main_dag=main_dag, is_retry=False)
if self.dag_is_retry:
run_task.set_upstream(retry_task)
if self.dag_enable_monitoring_cleanup:
cleanup_task = self._create_cleanup_task(main_dag=main_dag)
run_task.set_upstream(cleanup_task)
except errors.DAGError as error:
main_dag = self._initialize_dag()
create_error_report_task(error_report_dag=main_dag, error=error)
return main_dag
def get_task_id(self, task_name: str, is_retry: bool) -> str:
"""Gets task_id by task type.
Args:
task_name: The name of the task.
is_retry: Whether or not the operator should include a retry task.
Returns:
Task id.
"""
if is_retry:
return task_name + '_retry_task'
else:
return task_name + '_task'
def get_variable_value(self,
prefix: str,
variable_name: str,
expected_type: Any = str,
fallback_value: object = '',
var_not_found_flag: Any = None) -> Any:
"""Try to get value by the prefixed name first, then the name directly.
Args:
prefix: The prefix of the variable.
variable_name: The name of the variable.
expected_type: The expected type of the value, can be str, int and bool.
fallback_value: The default value if no such variable is found.
var_not_found_flag: The flag that indicates no value is found by prefixed
variable_name and should try to retrieve value by variable_name only.
Returns:
The value of expected type of the corresponding variable.
"""
if fallback_value is not None:
if not isinstance(fallback_value, expected_type):
raise TypeError(
f'type of fallback mismatch expected type {expected_type}')
val = variable.Variable.get(
f'{prefix}_{variable_name}', var_not_found_flag)
if val == var_not_found_flag:
val = variable.Variable.get(f'{variable_name}', fallback_value)
try:
return expected_type(val)
except ValueError:
return fallback_value
| google/TaglessCRM | src/dags/base_dag.py | Python | apache-2.0 | 11,861 |
"""
A collection of linear classifier models.
.. autoclass:: revscoring.scoring.models.LogisticRegression
:members:
:member-order:
"""
import logging
from sklearn.linear_model import LogisticRegression as sklearn_LR
from .sklearn import ProbabilityClassifier
logger = logging.getLogger(__name__)
class LogisticRegression(ProbabilityClassifier):
"""
Implements a Logistic Regression
"""
Estimator = sklearn_LR
def __init__(self, *args, label_weights=None, **kwargs):
if label_weights:
logger.warn("LogisticRegression does not support label_weights.")
super().__init__(*args, **kwargs)
| he7d3r/revscoring | revscoring/scoring/models/linear.py | Python | mit | 647 |
#!/usr/bin/env python3
import os
import subprocess
import sys
if not os.environ.get('DESTDIR'):
icon_dir = os.path.join(sys.argv[1], 'icons', 'hicolor')
# schema_dir = os.path.join(sys.argv[1], 'glib-2.0', 'schemas')
print('Update icon cache...')
subprocess.call(['gtk-update-icon-cache', '-f', '-t', icon_dir])
# print('Compiling gsettings schemas...')
# subprocess.call(['glib-compile-schemas', schema_dir])
| GNOME/atomix | meson_post_install.py | Python | gpl-2.0 | 423 |
import hnco
from hnco import function as fn
from hnco import algorithm as algo
from hnco.function import controller as ctrl
from hnco.random import Generator
Generator.set_seed()
size = 100
f = fn.OneMax(size)
c1 = ctrl.ProgressTracker(f)
c1.set_log_improvement(True)
c2 = ctrl.StopOnMaximum(c1)
a = algo.OnePlusOneEa(size)
try:
a.maximize([c2])
except hnco.TargetReached:
pass
| courros/hnco | bindings/examples/progress-tracker.py | Python | lgpl-3.0 | 392 |
# -*- coding: utf-8 -*-
from flask import current_app as app, g, request, url_for
from flask.ext.restful import abort, fields, marshal
from werkzeug.exceptions import NotFound
from shiva.auth import Roles
from shiva.constants import HTTP
from shiva.exceptions import (InvalidFileTypeError, IntegrityError,
ObjectExistsError)
from shiva.http import Resource
from shiva.models import Album, Artist, db, Track, User, Playlist
from shiva.resources.fields import (ForeignKeyField, InstanceURI, TrackFiles,
ManyToManyField, PlaylistField)
from shiva.utils import parse_bool, get_list, get_by_name
class ArtistResource(Resource):
""" The resource responsible for artists. """
db_model = Artist
def get_resource_fields(self):
return {
'id': fields.String(attribute='pk'),
'name': fields.String,
'slug': fields.String,
'uri': InstanceURI('artists'),
'image': fields.String(default=app.config['DEFAULT_ARTIST_IMAGE']),
'events_uri': fields.String(attribute='events'),
}
def post(self):
name = request.form.get('name', '').strip()
if not name:
abort(HTTP.BAD_REQUEST)
image_url = request.form.get('image_url')
try:
artist = self.create(name, image_url)
except (IntegrityError, ObjectExistsError):
abort(HTTP.CONFLICT)
response = marshal(artist, self.get_resource_fields())
headers = {'Location': url_for('artists', id=artist.pk)}
return response, 201, headers
def create(self, name, image_url):
artist = Artist(name=name, image=image_url)
db.session.add(artist)
db.session.commit()
return artist
def update(self, artist):
if 'name' in request.form:
name = request.form.get('name', '').strip()
if not name:
abort(HTTP.BAD_REQUEST)
artist.name = name
if 'image' in request.form:
artist.image = request.form.get('image_url')
return artist
def get_full_tree(self, artist):
_artist = marshal(artist, self.get_resource_fields())
_artist['albums'] = []
albums = AlbumResource()
for album in artist.albums:
_artist['albums'].append(albums.get_full_tree(album))
no_album = artist.tracks.filter_by(albums=None).all()
track_fields = TrackResource().get_resource_fields()
_artist['no_album_tracks'] = marshal(no_album, track_fields)
return _artist
class AlbumResource(Resource):
""" The resource responsible for albums. """
db_model = Album
def get_resource_fields(self):
return {
'id': fields.String(attribute='pk'),
'name': fields.String,
'slug': fields.String,
'year': fields.Integer,
'uri': InstanceURI('albums'),
'artists': ManyToManyField(Artist, {
'id': fields.String(attribute='pk'),
'uri': InstanceURI('artists'),
}),
'cover': fields.String(default=app.config['DEFAULT_ALBUM_COVER']),
}
def post(self):
params = {
'name': request.form.get('name', '').strip(),
'year': request.form.get('year'),
'cover_url': request.form.get('cover_url'),
}
if not params['name']:
abort(HTTP.BAD_REQUEST)
album = self.create(**params)
response = marshal(album, self.get_resource_fields())
headers = {'Location': url_for('albums', id=album.pk)}
return response, 201, headers
def create(self, name, year, cover_url):
album = Album(name=name, year=year, cover=cover_url)
db.session.add(album)
db.session.commit()
return album
def update(self, album):
"""
Updates an album object with the given attributes. The `artists`
attribute, however, is treated as a calculated value so it cannot be
set through a PUT request. It has to be done through the Track model.
"""
if 'name' in request.form:
name = request.form.get('name', '').strip()
if not name:
abort(HTTP.BAD_REQUEST)
album.name = request.form.get('name')
if 'year' in request.form:
album.year = request.form.get('year')
if 'cover_url' in request.form:
album.cover = request.form.get('cover_url')
return album
def get_filters(self):
return (
('artist', 'artist_filter'),
)
def artist_filter(self, queryset, artist_pk):
try:
pk = artist_pk if int(artist_pk) > 0 else None
except ValueError:
abort(HTTP.BAD_REQUEST)
return queryset.join(Album.artists).filter(Artist.pk == pk)
def get_full_tree(self, album):
_album = marshal(album, self.get_resource_fields())
_album['tracks'] = []
tracks = TrackResource()
for track in album.tracks.order_by(Track.ordinal, Track.title):
_album['tracks'].append(tracks.get_full_tree(track))
return _album
class TrackResource(Resource):
""" The resource responsible for tracks. """
db_model = Track
def get_resource_fields(self):
return {
'id': fields.String(attribute='pk'),
'uri': InstanceURI('tracks'),
'files': TrackFiles,
'bitrate': fields.Integer,
'length': fields.Integer,
'title': fields.String,
'slug': fields.String,
'artists': ManyToManyField(Artist, {
'id': fields.String(attribute='pk'),
'uri': InstanceURI('artists'),
}),
'albums': ManyToManyField(Album, {
'id': fields.String(attribute='pk'),
'uri': InstanceURI('albums'),
}),
'ordinal': fields.Integer,
}
def post(self):
params = {
'title': request.form.get('title', '').strip(),
'artists': request.form.getlist('artist_id'),
'albums': request.form.getlist('album_id'),
'ordinal': request.form.get('ordinal'),
}
if 'track' not in request.files:
abort(HTTP.BAD_REQUEST)
try:
track = self.create(**params)
except (IntegrityError, ObjectExistsError):
abort(HTTP.CONFLICT)
response = marshal(track, self.get_resource_fields())
headers = {'Location': url_for('tracks', id=track.pk)}
return response, 201, headers
def create(self, title, artists, albums, ordinal):
UploadHandler = app.config.get('UPLOAD_HANDLER')
try:
handler = UploadHandler(track=request.files.get('track'))
except InvalidFileTypeError, e:
abort(HTTP.UNSUPPORTED_MEDIA_TYPE)
handler.save()
hash_file = parse_bool(request.args.get('hash_file', True))
no_metadata = parse_bool(request.args.get('no_metadata', False))
track = Track(path=handler.path, hash_file=hash_file,
no_metadata=no_metadata)
db.session.add(track)
# If an artist (or album) is given as argument, it will take precedence
# over whatever the file's metadata say.
artist_list = []
if artists:
try:
artist_list.extend(get_list(Artist, artists))
except ValueError:
abort(HTTP.BAD_REQUEST)
else:
if handler.artist:
artist_list.append(get_by_name(Artist, handler.artist))
album_list = []
if albums:
try:
album_list.extend(get_list(Album, albums))
except ValueError:
abort(HTTP.BAD_REQUEST)
else:
if handler.album:
artist_list.append(get_by_name(Album, handler.album))
for artist in artist_list:
db.session.add(artist)
artist.tracks.append(track)
for album in album_list:
db.session.add(album)
album.tracks.append(track)
db.session.commit()
return track
def update(self, track):
track.title = request.form.get('title')
track.ordinal = request.form.get('ordinal')
# The track attribute cannot be updated. A new track has to be created
# with the new value instead.
if 'track' in request.form:
abort(HTTP.BAD_REQUEST)
for artist_pk in request.form.getlist('artist_id'):
try:
artist = Artist.query.get(artist_pk)
track.artists.append(artist)
except:
pass
for album_pk in request.form.getlist('album_id'):
try:
album = Album.query.get(album_pk)
track.albums.append(album)
except:
pass
return track
def get_filters(self):
return (
('artist', 'artist_filter'),
('album', 'album_filter'),
)
def artist_filter(self, queryset, artist_pk):
try:
pk = artist_pk if int(artist_pk) > 0 else None
except ValueError:
abort(HTTP.BAD_REQUEST)
return queryset.filter(Track.artist_pk == pk)
def album_filter(self, queryset, album_pk):
try:
pk = album_pk if int(album_pk) > 0 else None
except ValueError:
abort(HTTP.BAD_REQUEST)
return queryset.filter_by(album_pk=pk)
def get_full_tree(self, track, include_scraped=False,
include_related=True):
"""
Retrives the full tree for a track. If the include_related option is
not set then a normal track structure will be retrieved. If its set
external resources that need to be scraped, like lyrics, will also be
included. Also related objects like artist and album will be expanded
to provide all their respective information.
This is disabled by default to avois DoS'ing lyrics' websites when
requesting many tracks at once.
"""
resource_fields = self.get_resource_fields()
if include_related:
artist = ArtistResource()
resource_fields['artists'] = ManyToManyField(
Artist,
artist.get_resource_fields())
album = AlbumResource()
resource_fields['albums'] = ManyToManyField(
Album,
album.get_resource_fields())
_track = marshal(track, resource_fields)
if include_scraped:
lyrics = LyricsResource()
try:
_track['lyrics'] = lyrics.get_for(track)
except NotFound:
_track['lyrics'] = None
# tabs = TabsResource()
# _track['tabs'] = tabs.get()
return _track
class PlaylistResource(Resource):
"""
Playlist are just a logical collection of tracks. Tracks must not be
necessarily related between them in any way.
To access a user's playlists filter by user id:
/playlists?user_id=6
"""
db_model = Playlist
def get_resource_fields(self):
return {
'id': fields.String(attribute='pk'),
'name': fields.String,
'user': ForeignKeyField(User, {
'id': fields.String(attribute='pk'),
'uri': InstanceURI('users'),
}),
'read_only': fields.Boolean,
'creation_date': fields.DateTime,
'length': fields.Integer,
'tracks': PlaylistField({
'id': fields.String(attribute='pk'),
'uri': InstanceURI('tracks'),
}),
}
def post(self):
if g.user is None:
abort(HTTP.BAD_REQUEST)
name = request.form.get('name', '').strip()
if not name:
abort(HTTP.BAD_REQUEST)
read_only = request.form.get('read_only', True)
playlist = self.create(name=name, read_only=read_only, user=g.user)
response = marshal(playlist, self.get_resource_fields())
headers = {'Location': url_for('playlists', id=playlist.pk)}
return response, 201, headers
def create(self, name, read_only, user):
playlist = Playlist(name=name, read_only=read_only, user=user)
db.session.add(playlist)
db.session.commit()
return playlist
def update(self, playlist):
if 'name' in request.form:
playlist.name = request.form.get('name')
if 'read_only' in request.form:
playlists.read_only = parse_bool(request.form.get('read_only'))
return playlist
class PlaylistTrackResource(Resource):
def post(self, id, verb):
handler = getattr(self, '%s_track' % verb)
if not handler:
abort(HTTP.BAD_REQUEST)
playlist = self.get_playlist(id)
if not playlist:
abort(HTTP.NOT_FOUND)
return handler(playlist)
def add_track(self, playlist):
if 'track' not in request.form:
abort(HTTP.BAD_REQUEST)
track = self.get_track(request.form.get('track'))
if not track:
abort(HTTP.BAD_REQUEST)
try:
playlist.insert(request.form.get('index'), track)
except ValueError:
abort(HTTP.BAD_REQUEST)
return self.Response('')
def remove_track(self, playlist):
if 'index' not in request.form:
abort(HTTP.BAD_REQUEST)
try:
playlist.remove_at(request.form.get('index'))
except (ValueError, IndexError):
abort(HTTP.BAD_REQUEST)
return self.Response('')
def get_playlist(self, playlist_id):
try:
playlist = Playlist.query.get(playlist_id)
except:
playlist = None
return playlist
def get_track(self, track_id):
try:
track = Track.query.get(track_id)
except:
track = None
return track
class UserResource(Resource):
""" The resource responsible for users. """
db_model = User
def get_resource_fields(self):
return {
'id': fields.String(attribute='pk'),
'display_name': fields.String,
'creation_date': fields.DateTime,
}
def get(self, id=None):
if id == 'me':
return marshal(g.user, self.get_resource_fields())
return super(UserResource, self).get(id)
def get_all(self):
return self.db_model.query.filter_by(is_public=True)
def post(self, id=None):
if id == 'me':
abort(HTTP.METHOD_NOT_ALLOWED)
if g.user is None:
abort(HTTP.METHOD_NOT_ALLOWED)
email = request.form.get('email')
if not email:
abort(HTTP.BAD_REQUEST)
display_name = request.form.get('display_name')
is_active = False
password = request.form.get('password')
if password:
is_active = parse_bool(request.form.get('is_active', False))
# FIXME: Check permissions
is_admin = parse_bool(request.form.get('admin', False))
try:
user = self.create(display_name=display_name, email=email,
password=password, is_active=is_active,
is_admin=is_admin)
except (IntegrityError, ObjectExistsError):
abort(HTTP.CONFLICT)
response = marshal(user, self.get_resource_fields())
headers = {'Location': url_for('users', id=user.pk)}
return response, 201, headers
def create(self, display_name, email, password, is_active, is_admin):
role = Roles.get('ADMIN' if is_admin else 'USER')
user = User(display_name=display_name, email=email, password=password,
is_active=is_active, role=role)
db.session.add(user)
db.session.commit()
return user
def put(self, id=None):
if id == 'me':
abort(HTTP.METHOD_NOT_ALLOWED)
return super(UserResource, self).put(id)
def update(self, user):
if 'email' in request.form:
email = request.form.get('email', '').strip()
if not email:
abort(HTTP.BAD_REQUEST)
user.email = email
if 'display_name' in request.form:
user.display_name = request.form.get('display_name')
if 'password' in request.form:
user.password = request.form.get('password')
if user.password == '':
user.is_active = False
else:
if 'is_active' in request.form:
user.is_active = parse_bool(request.form.get('is_active'))
if 'is_admin' in request.form:
is_admin = parse_bool(request.form.get('is_admin'))
user.role = Roles.get('ADMIN' if is_admin else 'USER')
return user
def delete(self, id=None):
if id == 'me':
abort(HTTP.METHOD_NOT_ALLOWED)
return super(UserResource, self).delete(id)
| tooxie/shiva-server | shiva/resources/base.py | Python | mit | 17,331 |
#!/usr/bin/env python3
# Version 1.0
# Author Alexis Blanchet-Cohen
# Date: 15/06/2014
import argparse
import glob
import os
import subprocess
import util
# Read the command line arguments.
parser = argparse.ArgumentParser(description='Generate scripts to convert bedgraph files from one-based start to zero-based start.')
parser.add_argument("-s", "--scriptsDirectory", help="Scripts directory.", default="convert1StartTo0Start_with_threshold")
parser.add_argument("-i", "--inputDirectory", help="Input directory with bedgraph files.", default="../bedgraph/methylation_counts_sorted/")
parser.add_argument("-o", "--outputDirectory", help="Output directory with sorted bedgraph files.", default="../bedgraph/methylation_counts_sorted_0_start/")
parser.add_argument("-q", "--submitJobsToQueue", help="Submit jobs to queue immediately.", choices=["yes", "no", "y", "n"], default="no")
args = parser.parse_args()
# Process the command line arguments.
scriptsDirectory = os.path.abspath(args.scriptsDirectory)
inputDirectory = os.path.abspath(args.inputDirectory)
outputDirectory = os.path.abspath(args.outputDirectory)
samples = util.getMergedsamples()
# Read configuration files.
config = util.readConfigurationFiles()
# Create scripts directory, if it does not exist yet, and cd to it.
if not os.path.exists(scriptsDirectory):
os.mkdir(scriptsDirectory)
os.chdir(scriptsDirectory)
# Create output directory, if it does not exist yet.
if not os.path.exists(outputDirectory):
os.mkdir(outputDirectory)
for file in os.listdir(inputDirectory):
file = os.path.splitext(file)[0]
# Create script file.
scriptName = 'convert1StartTo0Start_with_threshold_' + file + '.sh'
script = open(scriptName, 'w')
util.writeHeader(script, config, "convert1StartTo0Start_with_threshold")
script.write("convert1StartTo0Start_with_threshold.py " + "\\\n")
script.write("--one_start_bedgraph " + inputDirectory + "/" + file + ".bedgraph " + "\\\n")
script.write("--zero_start_bedgraph_with_threshold " + outputDirectory + "/" + file + ".bedgraph")
script.close()
if (args.submitJobsToQueue.lower() == "yes") | (args.submitJobsToQueue.lower() == "y"):
subprocess.call("submitJobs.py", shell=True)
| blancha/abcngspipelines | bischipseq/convert1StartTo0Start_with_threshold_batch.py | Python | gpl-3.0 | 2,231 |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Purpose
Shows how to use the AWS SDK for Python (Boto3) with AWS Key Management Service (AWS KMS)
to manage key aliases.
"""
# snippet-start:[python.example_code.kms.Scenario_AliasManagement]
import logging
from pprint import pprint
import boto3
from botocore.exceptions import ClientError
logger = logging.getLogger(__name__)
# snippet-start:[python.example_code.kms.AliasManager]
class AliasManager:
def __init__(self, kms_client):
self.kms_client = kms_client
self.created_key = None
# snippet-end:[python.example_code.kms.AliasManager]
def setup(self):
"""
Sets up a key for the demo. Either creates a new key or uses one supplied by
the user.
:return: The ARN or ID of the key to use for the demo.
"""
answer = input("Do you want to create a new key for the demo (y/n)? ")
if answer.lower() == 'y':
try:
key = self.kms_client.create_key(
Description="Alias management demo key")['KeyMetadata']
self.created_key = key
except ClientError as err:
logger.error(
"Couldn't create key. Here's why: %s", err.response['Error']['Message'])
raise
else:
key_id = key['KeyId']
else:
key_id = input("Enter a key ID or ARN to use for the demo: ")
if key_id == '':
key_id = None
return key_id
def teardown(self):
"""
Deletes any resources that were created for the demo.
"""
if self.created_key is not None:
answer = input(
f"Key {self.created_key['KeyId']} was created for this demo. Do you "
f"want to delete it (y/n)? ")
if answer.lower() == 'y':
try:
self.kms_client.schedule_key_deletion(
KeyId=self.created_key['KeyId'], PendingWindowInDays=7)
except ClientError as err:
logging.error(
"Couldn't delete key. Here's why: %s", err.response['Error']['Message'])
else:
print(f"Key scheduled for deletion in 7 days.")
# snippet-start:[python.example_code.kms.CreateAlias]
def create_alias(self, key_id):
"""
Creates an alias for the specified key.
:param key_id: The ARN or ID of a key to give an alias.
:return: The alias given to the key.
"""
alias = ''
while alias == '':
alias = input(f"What alias would you like to give to key {key_id}? ")
try:
self.kms_client.create_alias(AliasName=alias, TargetKeyId=key_id)
except ClientError as err:
logger.error(
"Couldn't create alias %s. Here's why: %s",
alias, err.response['Error']['Message'])
else:
print(f"Created alias {alias} for key {key_id}.")
return alias
# snippet-end:[python.example_code.kms.CreateAlias]
# snippet-start:[python.example_code.kms.ListAliases]
def list_aliases(self):
"""
Lists aliases for the current account.
"""
answer = input("\nLet's list your key aliases. Ready (y/n)? ")
if answer.lower() == 'y':
try:
page_size = 10
alias_paginator = self.kms_client.get_paginator('list_aliases')
for alias_page in alias_paginator.paginate(PaginationConfig={'PageSize': 10}):
print(f"Here are {page_size} aliases:")
pprint(alias_page['Aliases'])
if alias_page['Truncated']:
answer = input(
f"Do you want to see the next {page_size} aliases (y/n)? ")
if answer.lower() != 'y':
break
else:
print("That's all your aliases!")
except ClientError as err:
logging.error(
"Couldn't list your aliases. Here's why: %s",
err.response['Error']['Message'])
# snippet-end:[python.example_code.kms.ListAliases]
# snippet-start:[python.example_code.kms.UpdateAlias]
def update_alias(self, alias, current_key_id):
"""
Updates an alias by assigning it to another key.
:param alias: The alias to reassign.
:param current_key_id: The ARN or ID of the key currently associated with the alias.
"""
new_key_id = input(
f"Alias {alias} is currently associated with {current_key_id}. "
f"Enter another key ID or ARN that you want to associate with {alias}: ")
if new_key_id != '':
try:
self.kms_client.update_alias(AliasName=alias, TargetKeyId=new_key_id)
except ClientError as err:
logger.error(
"Couldn't associate alias %s with key %s. Here's why: %s",
alias, new_key_id, err.response['Error']['Message'])
else:
print(f"Alias {alias} is now associated with key {new_key_id}.")
else:
print("Skipping alias update.")
# snippet-end:[python.example_code.kms.UpdateAlias]
# snippet-start:[python.example_code.kms.DeleteAlias]
def delete_alias(self):
"""
Deletes an alias.
"""
alias = input(f"Enter an alias that you'd like to delete: ")
if alias != '':
try:
self.kms_client.delete_alias(AliasName=alias)
except ClientError as err:
logger.error(
"Couldn't delete alias %s. Here's why: %s",
alias, err.response['Error']['Message'])
else:
print(f"Deleted alias {alias}.")
else:
print("Skipping alias deletion.")
# snippet-end:[python.example_code.kms.DeleteAlias]
def alias_management(kms_client):
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
print('-'*88)
print("Welcome to the AWS Key Management Service (AWS KMS) alias management demo.")
print('-'*88)
alias_manager = AliasManager(kms_client)
key_id = None
while key_id is None:
key_id = alias_manager.setup()
print('-'*88)
alias = None
while alias is None:
alias = alias_manager.create_alias(key_id)
print('-'*88)
alias_manager.list_aliases()
print('-'*88)
alias_manager.update_alias(alias, key_id)
print('-'*88)
alias_manager.delete_alias()
print('-'*88)
alias_manager.teardown()
print("\nThanks for watching!")
print('-'*88)
if __name__ == '__main__':
try:
alias_management(boto3.client('kms'))
except Exception:
logging.exception("Something went wrong with the demo!")
# snippet-end:[python.example_code.kms.Scenario_AliasManagement]
| awsdocs/aws-doc-sdk-examples | python/example_code/kms/alias_management.py | Python | apache-2.0 | 7,114 |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main driver logic for managing accounts on GCE instances."""
import json
import logging
import os
import time
LOCKFILE = '/var/lock/manage-accounts.lock'
class AccountsManager(object):
"""Create accounts on a machine."""
def __init__(self, accounts_module, desired_accounts, system, lock_file,
lock_fname, single_pass=True):
"""Construct an AccountsFromMetadata with the given module injections."""
if not lock_fname:
lock_fname = LOCKFILE
self.accounts = accounts_module
self.desired_accounts = desired_accounts
self.lock_file = lock_file
self.lock_fname = lock_fname
self.system = system
self.single_pass = single_pass
def Main(self):
logging.debug('AccountsManager main loop')
# If this is a one-shot execution, then this can be run normally.
# Otherwise, run the actual operations in a subprocess so that any
# errors don't kill the long-lived process.
if self.single_pass:
self.RegenerateKeysAndCreateAccounts()
return
# Run this forever in a loop.
while True:
# Fork and run the key regeneration and account creation while the
# parent waits for the subprocess to finish before continuing.
# Create a pipe used to get the new etag value from child
reader, writer = os.pipe() # these are file descriptors, not file objects
pid = os.fork()
if pid:
# we are the parent
os.close(writer)
reader = os.fdopen(reader) # turn r into a file object
json_tags = reader.read()
if json_tags:
etags = json.loads(json_tags)
if etags:
self.desired_accounts.attributes_etag = etags[0]
self.desired_accounts.instance_sshkeys_etag = etags[1]
reader.close()
logging.debug('New etag: %s', self.desired_accounts.attributes_etag)
os.waitpid(pid, 0)
else:
# we are the child
os.close(reader)
writer = os.fdopen(writer, 'w')
try:
self.RegenerateKeysAndCreateAccounts()
except Exception as e:
logging.warning('error while trying to create accounts: %s', e)
# An error happened while trying to create the accounts. Lets sleep a
# bit to avoid getting stuck in a loop for intermittent errors.
time.sleep(5)
# Write the etag to pass to parent
json_tags = json.dumps(
[self.desired_accounts.attributes_etag,
self.desired_accounts.instance_sshkeys_etag])
writer.write(json_tags)
writer.close()
# The use of os._exit here is recommended for subprocesses spawned
# by forking to avoid issues with running the cleanup tasks that
# sys.exit() runs by preventing issues from the cleanup being run
# once by the subprocess and once by the parent process.
os._exit(0)
def RegenerateKeysAndCreateAccounts(self):
"""Regenerate the keys and create accounts as needed."""
logging.debug('RegenerateKeysAndCreateAccounts')
if self.system.IsExecutable('/usr/share/google/first-boot'):
self.system.RunCommand('/usr/share/google/first-boot')
self.lock_file.RunExclusively(self.lock_fname, self.CreateAccounts)
def CreateAccounts(self):
"""Create all accounts that should be present."""
desired_accounts = self.desired_accounts.GetDesiredAccounts()
if not desired_accounts:
return
for username, ssh_keys in desired_accounts.iteritems():
if not username:
continue
self.accounts.CreateUser(username, ssh_keys)
| kimasaki/compute-image-packages | google-daemon/usr/share/google/google_daemon/accounts_manager.py | Python | apache-2.0 | 4,198 |
# -*- coding: utf-8 -*-
import scrapy
class PDFItem(scrapy.Item):
file_urls = scrapy.Field()
files = scrapy.Field()
| chrisdev/barbados_stockex_scraper | barbados_stockex_scraper/items.py | Python | bsd-2-clause | 127 |
"""Tests for group models."""
# pylint: disable=invalid-name, no-self-use
from unittest import skipIf
from django.conf import settings
from django.contrib.auth.models import Group as AuthGroup
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db.models.query import EmptyQuerySet
from django.test import TestCase
from django.utils.timezone import now
from mock import patch
from model_mommy import mommy
from open_connect.connect_core.utils.basetests import ConnectTestMixin
from open_connect.groups import models
from open_connect.groups.models import Category, Group, GroupRequest
from open_connect.media.models import ShortenedURL
from open_connect.media.tests import get_in_memory_image_instance
IS_SQLITE = settings.DATABASES['default']['ENGINE'].endswith('sqlite3')
class GroupManagerTest(ConnectTestMixin, TestCase):
"""Group manager tests."""
def test_create_with_name(self):
"""Group.group.name should be the name passed to create."""
group = Group.objects.create(name='Some group')
self.assertEqual(group.group.name, 'Some group')
def test_create_with_name_and_auth_group(self):
"""If both group and name are given, prefer group over name."""
auth_group = AuthGroup.objects.create(name='Some group')
group = Group.objects.create(group=auth_group, name='Blahhhh')
self.assertEqual(group.group.name, 'Some group')
def test_published_has_published_groups(self):
"""Published groups should be returned in manager published method."""
group = Group.objects.create(name='Some group')
self.assertIn(group, Group.objects.published())
def test_published_no_unpublished_groups(self):
"""Unpublished groups should not be returned in manager published."""
group = Group.objects.create(name='Some group', published=False)
self.assertNotIn(group, Group.objects.published())
def test_active_groups_in_result(self):
"""Groups with active status should be included in all."""
group = Group.objects.create(name='Hello')
self.assertIn(group, Group.objects.all())
def test_deleted_groups_not_in_result(self):
"""Groups with deleted status """
group = Group.objects.create(name='Delete me!')
group.delete()
self.assertNotIn(group, Group.objects.all())
def test_with_deleted_includes_deleted(self):
"""Deleted groups should still be in with_deleted."""
group = Group.objects.create(name='find meeeee', status='deleted')
self.assertIn(group, Group.objects.with_deleted())
class GroupManagerSearchTest(ConnectTestMixin, TestCase):
"""Tests for the group search manager."""
@classmethod
def setUpClass(cls):
"""Only create these once."""
cls.regular_group = Group.objects.create(
name='Regular',
description='This is a group for people who like cheese.',
latitude=41.903123,
longitude=-87.685318,
radius=10
)
cls.regular_group.tags.add(
'Cheddar is better', 'I like muenster', 'Parmesan is yummy')
cls.unpublished_group = Group.objects.create(
name='Unpublished',
published=False)
# As `slug` is unique in category we must delete all non-default
# categories to ensure that our 2 slugs have not been created
# elsewhere
Category.objects.exclude(pk=1).delete()
cls.gvp_category = mommy.make('groups.Category', slug='gvp')
cls.lgbt_category = mommy.make('groups.Category', slug='lgbt')
cls.gvp_group = Group.objects.create(
name='GVP',
category=cls.gvp_category,
description="No more violence.",
latitude=42.397934,
longitude=-87.836380,
radius=50,
state='NY'
)
cls.gvp_group.tags.add('guns', 'violence')
cls.lgbt_group = Group.objects.create(
name='LGBT',
category=cls.lgbt_category,
description='Gay pride!',
latitude=43.655267,
longitude=-79.384460,
radius=50,
state='IL'
)
cls.lgbt_group.tags.add('lesbian', 'gay')
@classmethod
def tearDownClass(cls):
"""Delete groups that aren't needed anymore."""
cls.regular_group.delete()
cls.unpublished_group.delete()
cls.gvp_group.delete()
cls.lgbt_group.delete()
def test_no_targeting_options(self):
"""If nothing is passed to search, it should equal published."""
self.assertQuerysetEqual(
Group.objects.search(),
[repr(item) for item in Group.objects.published()],
ordered=False
)
def test_search_matches_category(self):
"""Searching by a string matches a group's category."""
gvp_groups = Group.objects.search('gvp')
self.assertIn(self.gvp_group, gvp_groups)
self.assertNotIn(self.lgbt_group, gvp_groups)
self.assertNotIn(self.regular_group, gvp_groups)
self.assertNotIn(self.unpublished_group, gvp_groups)
def test_search_matches_name(self):
"""Searching by a string matches a group's name."""
name_groups = Group.objects.search(search='LGB')
self.assertIn(self.lgbt_group, name_groups)
self.assertNotIn(self.regular_group, name_groups)
self.assertNotIn(self.unpublished_group, name_groups)
self.assertNotIn(self.gvp_group, name_groups)
def test_search_matches_description(self):
"""Searching by a string matches a group's description."""
groups = Group.objects.search(search='violence')
self.assertIn(self.gvp_group, groups)
self.assertNotIn(self.regular_group, groups)
self.assertNotIn(self.unpublished_group, groups)
self.assertNotIn(self.lgbt_group, groups)
def test_search_matches_tags(self):
"""Searching by a string matches a group's tags."""
groups = Group.objects.search(search='parmesan-is-yummy')
self.assertIn(self.regular_group, groups)
self.assertNotIn(self.gvp_group, groups)
self.assertNotIn(self.unpublished_group, groups)
self.assertNotIn(self.lgbt_group, groups)
@skipIf(IS_SQLITE, "Test not supported when using sqlite backend.")
def test_search_location_has_distances(self):
"""Searching by location matches groups that are close."""
groups = Group.objects.search(location=60657)
self.assertTrue(
all((hasattr(group, 'distance') for group in groups)))
@skipIf(IS_SQLITE, "Test not supported when using sqlite backend.")
def test_coords_targeting(self):
"""Searching by coordinates matches groups that are close."""
groups = Group.objects.location_search((43.603097, -79.592514))
self.assertIn(self.lgbt_group, groups)
self.assertNotIn(self.regular_group, groups)
self.assertNotIn(self.unpublished_group, groups)
self.assertNotIn(self.gvp_group, groups)
@skipIf(IS_SQLITE, "Test not supported when using sqlite backend.")
def test_coords_string_targeting(self):
"""Searching by coordinates in a string matches close groups."""
groups = Group.objects.location_search('43.603097, -79.592514')
self.assertIn(self.lgbt_group, groups)
self.assertNotIn(self.regular_group, groups)
self.assertNotIn(self.unpublished_group, groups)
self.assertNotIn(self.gvp_group, groups)
@patch('open_connect.groups.models.get_coordinates')
def test_coords_no_targets(self, mock):
"""If no coordinates are returned return an empty QuerySet"""
mock.return_value = None
groups = Group.objects.location_search('None, None')
mock.assert_called_once_with('None, None')
self.assertIsInstance(groups, EmptyQuerySet)
class GroupTest(ConnectTestMixin, TestCase):
"""Test group methods."""
def test_unicode(self):
"""Unicode conversion returns unicode of the group's name."""
group = Group.objects.create(name='Some group')
self.assertEqual(unicode(group), u'Some group')
def test_get_absolute_url(self):
"""Test getting the absolute URL of a group"""
group = mommy.make(Group)
url = group.get_absolute_url()
self.assertEqual(url, reverse(
'group_details', args=(group.pk,)))
def test_full_url(self):
"""Unicode conversion returns unicode of the group's name."""
group = mommy.make(Group)
url = group.full_url
self.assertEqual(url, settings.ORIGIN + reverse(
'group_details', args=(group.pk,)))
self.assertIn('http', url)
self.assertIn(str(group.pk), url)
def test_clean_location_fields(self):
"""Clean should run without errors with coordinates and radius."""
group = Group.objects.create(
name='Some group',
latitude='123',
longitude='123',
radius='10'
)
self.assertIsNone(group.clean())
def test_clean_location_fields_latitude_missing(self):
"""Clean raises ValidationError if latitude is missing."""
group = Group.objects.create(
name='Some group',
longitude='123',
radius='10'
)
self.assertRaises(ValidationError, group.clean)
def test_clean_location_fields_longitude_missing(self):
"""Clean raises ValidationError if longitude is missing."""
group = Group.objects.create(
name='Some group',
latitude='123',
radius='10'
)
self.assertRaises(ValidationError, group.clean)
def test_clean_location_fields_radius_missing(self):
"""Clean raises ValidationError if radius is missing."""
group = Group.objects.create(
name='Some group',
latitude='123',
longitude='123',
)
self.assertRaises(ValidationError, group.clean)
def test_clean_location_fields_no_location_fields(self):
"""Clean runs without errors if no location data is set."""
group = Group.objects.create(name='Some group')
self.assertIsNone(group.clean())
def test_get_members(self):
"""Group.get_members() returns users that are in a group."""
group = mommy.make(Group)
user = self.create_user()
user.add_to_group(group.pk)
self.assertIn(user, group.get_members())
user.remove_from_group(group)
self.assertNotIn(user, group.get_members())
def test_get_members_avatar_prioritized(self):
"""
Group.get_members_avatar_prioritized() returns users that are in a
group ordered by if they have an image.
"""
group = mommy.make(Group)
image = mommy.make('media.Image')
user1 = self.create_user()
user2 = self.create_user(image=image)
user1.add_to_group(group.pk)
user2.add_to_group(group.pk)
members = group.get_members_avatar_prioritized()
self.assertEqual(user2, members[0])
self.assertEqual(user1, members[1])
user1.remove_from_group(group)
self.assertNotIn(user1, group.get_members_avatar_prioritized())
@patch.object(models, 'Thread')
def test_public_threads_by_user(self, mock_thread):
"""Get threads that this user can see for the current group."""
# Thread.public.by_user is already tested so we'll just verify
# that the right thing gets called.
group = mommy.make(Group)
user = self.create_user()
group.public_threads_by_user(user)
mock_thread.public.by_user.assert_called_with(user)
mock_thread.public.by_user().filter.assert_called_with(group=group)
@patch.object(models, 'Thread')
def test_public_threads(self, mock_thread):
"""Test that this gets public threads for this group."""
# pylint: disable=no-self-use
# Thread.public.by_group is already tested, so just verify it is called.
group = mommy.make(Group)
group.public_threads()
mock_thread.public.by_group.assert_called_with(group=group)
def test_is_national(self):
"""Should be True if all geolocation fields are missing."""
group = Group.objects.create(
name='some local group', latitude=None, longitude=None, radius=None)
self.assertTrue(group.is_national)
def test_is_national_some_geoloc_fields_missing(self):
"""Should be True if any geolocation field is missing."""
group = Group.objects.create(
name='some local group', latitude=1, longitude=1, radius=None)
self.assertTrue(group.is_national)
def test_is_national_with_geolocated_group(self):
"""Should be false if all geolocation fields are entered."""
group = Group.objects.create(
name='some local group', latitude=1, longitude=1, radius=1)
self.assertFalse(group.is_national)
def test_delete(self):
"""Test delete method"""
group = Group.objects.create(name="please don't delete me :(")
self.assertEqual(group.status, 'active')
user = self.create_user()
user.add_to_group(group.pk)
group.delete()
group = Group.objects.with_deleted().get(pk=group.pk)
self.assertEqual(group.status, 'deleted')
self.assertFalse(group.group.user_set.filter(pk=user.pk).exists())
class GroupOwnersChangedReceiverTest(ConnectTestMixin, TestCase):
"""Tests for group_owners_changed reciever."""
def test_user_added_gets_permissions(self):
"""When a user is added as owner, they should get new permissions."""
group = mommy.make('groups.Group')
user = self.create_user()
group.owners.add(user)
self.assertTrue(user.has_perm('accounts.can_initiate_direct_messages'))
def test_user_added_already_in_group(self):
"""If a user already has owner permissions, shouldn't have any error."""
group = mommy.make('groups.Group')
user = self.create_user()
group.owners.add(user)
group.owners.add(user)
self.assertTrue(user.has_perm('accounts.can_initiate_direct_messages'))
@patch('open_connect.groups.models.cache')
def test_adding_clears_cache(self, mock):
"""Test that the cache is cleared for each owner added"""
group = self.create_group()
user1 = self.create_user()
user2 = self.create_user()
group.owners.add(user1)
group.owners.add(user2)
mock.delete.assert_any_call(user1.cache_key + 'owned_groups')
mock.delete.assert_any_call(user2.cache_key + 'owned_groups')
def test_removing_clears_cache(self):
"""Test that the cache is cleared for each owner removing"""
group = self.create_group()
user1 = self.create_user()
user2 = self.create_user()
# Because the cache is cleared when we add the user to the group using
# the same receiver, we should separate out those calls and confirm
# they happened
with patch('open_connect.groups.models.cache') as cache_add_mock:
group.owners.add(user1)
group.owners.add(user2)
cache_add_mock.delete.assert_any_call(
user1.cache_key + 'owned_groups')
cache_add_mock.delete.assert_any_call(
user2.cache_key + 'owned_groups')
self.assertEqual(cache_add_mock.delete.call_count, 2)
with patch('open_connect.groups.models.cache') as cache_remove_mock:
group.owners.remove(user1)
group.owners.remove(user2)
cache_remove_mock.delete.assert_any_call(
user1.cache_key + 'owned_groups')
cache_remove_mock.delete.assert_any_call(
user2.cache_key + 'owned_groups')
self.assertEqual(cache_remove_mock.delete.call_count, 2)
class GroupImagesTest(ConnectTestMixin, TestCase):
"""Test images method"""
def setUp(self):
"""Setup the images test"""
# Make a popular image and an unpopular image
self.superuser = self.create_superuser()
self.popular_image = get_in_memory_image_instance(user=self.superuser)
self.popular_image.view_count = 10000
self.popular_image.save()
self.unpopular_image = get_in_memory_image_instance(user=self.superuser)
def get_images_message(self, group, images=None):
"""Make a message that has the images in the specified group."""
# Create a message
thread = mommy.make('connectmessages.Thread')
message = mommy.make(
'connectmessages.Message', thread=thread, sender=self.superuser)
# Default images
if not images:
images = [self.popular_image, self.unpopular_image]
# Add images to message
for image in images:
message.images.add(image)
# Add message to group
message.thread.group = group
message.thread.save()
return message
def test_image_posted_to_group_present(self):
"""An image posted to the group should be present."""
group = mommy.make('groups.Group')
message = self.get_images_message(group)
self.assertQuerysetEqual(
group.images(user=message.sender).all(),
[repr(item) for item in message.images.all()],
ordered=False
)
def test_image_posted_to_another_group_not_present(self):
"""An image posted to another group should not be present."""
group = mommy.make('groups.Group')
other_group = mommy.make('groups.Group')
message = self.get_images_message(other_group)
for image in message.images.all():
self.assertNotIn(image, group.images(user=message.sender))
class GroupLinksTest(ConnectTestMixin, TestCase):
"""Test the links method"""
def setUp(self):
"""Setup the links test"""
super(GroupLinksTest, self).setUp()
# Make a popular link and an unpopular link
self.popular_link = ShortenedURL(url='http://something.com')
self.popular_link.click_count = 10000
self.popular_link.save()
self.unpopular_link = ShortenedURL.objects.create(
url='http://somethingelse.com')
def get_links_message(self, group, links=None):
"""Make a message that has the links in the specified group."""
# Create a message
thread = mommy.make('connectmessages.Thread')
message = mommy.make(
'connectmessages.Message',
thread=thread,
sender=self.create_superuser()
)
# Default images
if not links:
links = [self.popular_link, self.unpopular_link]
# Add images to message
for link in links:
message.links.add(link)
# Add message to group
message.thread.group = group
message.thread.save()
return message
def test_link_posted_to_group_present(self):
"""An image posted to the group should be present."""
group = mommy.make('groups.Group')
message = self.get_links_message(group)
self.assertQuerysetEqual(
group.links().all(),
[repr(item) for item in message.links.all()],
ordered=False
)
def test_link_posted_to_another_group_not_present(self):
"""An image posted to another group should not be present."""
group = mommy.make('groups.Group')
other_group = mommy.make('groups.Group')
message = self.get_links_message(other_group)
for link in message.links.all():
self.assertNotIn(link, group.links())
class GroupRequestManagerTest(ConnectTestMixin, TestCase):
"""Group join requests manager tests."""
def test_unapproved_present(self):
"""Unapproved requests are returned in unapproved method."""
group_request = GroupRequest.objects.create(
user=self.create_user(), group=mommy.make('groups.Group'))
self.assertIn(group_request, GroupRequest.objects.unapproved())
def test_approved_not_present(self):
"""Approved requests are not returned in unapproved method."""
group_request = GroupRequest.objects.create(
user=self.create_user(),
group=mommy.make('groups.Group'),
moderated_by=self.create_superuser(),
moderated_at=now(),
approved=True
)
self.assertNotIn(group_request, GroupRequest.objects.unapproved())
def test_rejected_not_present(self):
"""Rejected requests are not returned in unapproved method."""
group_request = GroupRequest.objects.create(
user=self.create_user(),
group=mommy.make('groups.Group'),
moderated_by=self.create_superuser(),
moderated_at=now(),
approved=False
)
self.assertNotIn(group_request, GroupRequest.objects.unapproved())
class GroupRequestTest(ConnectTestMixin, TestCase):
"""GroupRequest method tests."""
def test_unicode(self):
"""Unicode conversion returns the expected unicode string."""
user = mommy.make(
'accounts.User',
email='[email protected]',
first_name='Katherine',
last_name='Janeway',
state='IL',
zip_code='60657'
)
group_request = mommy.prepare(GroupRequest, user=user)
self.assertEqual(
unicode(group_request),
u'<a href="{url}">Katherine Janeway ([email protected] / IL, 60657)'
u' requested to join {group}.</a>'.format(
url=user.get_absolute_url(),
group=group_request.group
)
)
| lpatmo/actionify_the_news | open_connect/groups/tests/test_models.py | Python | mit | 21,994 |
#
# replcode.py
# By Joel Gould
# [email protected]
# http://www.gouldhome.com/
#
# This subroutine takes a string, which may have embedded Python code, and
# executes that embedded code, returning the resulting string. It is useful
# for things like dynamically producing webpages.
#
# We first execute any code in [!! ... !!]. Then we evaluate any code in
# [?? ... ??]. We do allow nested block of executed code. To use a nested
# block, include an integer in the block delimiters, ex: [1!! ... !!1]
#
# You can pass an errorLogger function to runPythonCode. This function
# will be called to print additional error messages. Default is
# sys.stdout.write().
#
# Use the special function "OUTPUT" inside the embeded Python code to add text
# to the output.
#
# Here is a sample of using replcode:
#
# >>> import replcode
# >>> input_text = """
# ... Normal line.
# ... Expression [?? 1+2 ??].
# ... Global variable [?? variable ??].
# ... [!!
# ... def foo(x):
# ... return x+x !!].
# ... Function [?? foo('abc') ??].
# ... [!!
# ... OUTPUT('Nested call [?? variable ??]') !!].
# ... [!!
# ... OUTPUT('''Double nested [1!!
# ... myVariable = '456' !!1][?? myVariable ??]''') !!].
# ... """
# >>> global_dict = { 'variable': '123' }
# >>> output_text = replcode.runPythonCode(input_text,global_dict)
# >>> print output_text
#
# Normal line.
# Expression 3.
# Global variable 123.
# .
# Function abcabc.
# Nested call 123.
# Double nested 456.
#
import re
import sys
import string
#---------------------------------------------------------------------------
def runPythonCode(data, global_dict={}, local_dict=None, errorLogger=None):
eval_state = EvalState(global_dict, local_dict, errorLogger)
data = re.sub(r'(?s)\[(?P<num>\d?)!!(?P<code>.+?)!!(?P=num)\]', eval_state.exec_python, data)
data = re.sub(r'(?s)\[\?\?(?P<code>.+?)\?\?\]', eval_state.eval_python, data)
return data
#---------------------------------------------------------------------------
# This class is used to encapuslate the global and local dictionaries with
# the exec_python and eval_python functions.
class EvalState:
def __init__(self, global_dict, local_dict, errorLogger):
self.global_dict = global_dict
self.local_dict = local_dict
if errorLogger:
self.errorLogger = errorLogger
else:
self.errorLogger = sys.stdout.write
# copy these things into the global dictionary
self.global_dict['OUTPUT'] = OUTPUT
self.global_dict['sys'] = sys
self.global_dict['string'] = string
self.global_dict['__builtins__'] = __builtins__
# Subroutine called from re module for every block of code to be
# executed. Executed code can not return anything but it is allowed to
# call the OUTPUT subroutine. Any string produced from OUTPUT will
# added to the OUTPUT_TEXT global variable and returned.
def exec_python(self, result):
# Condition the code. Replace all tabs with four spaces. Then make
# sure that we unindent every line by the indentation level of the
# first line.
code = result.group('code')
code = string.replace(code, '\t', ' ')
result2 = re.search(r'(?P<prefix>\n[ ]*)[#a-zA-Z0-9''"]', code)
if not result2:
raise ParsingError,'Invalid template code expression: ' + code
code = string.replace(code, result2.group('prefix'), '\n')
code = code + '\n'
try:
self.global_dict['OUTPUT_TEXT'] = ''
if self.local_dict:
exec code in self.global_dict, self.local_dict
else:
exec code in self.global_dict
return self.global_dict['OUTPUT_TEXT']
except:
self.errorLogger('\n---- Error parsing: ----\n')
self.errorLogger(code)
self.errorLogger('\n------------------------\n')
raise
# Subroutine called from re module for every block of code to be
# evaluated. Returned the result of the evaluation (should be a string).
def eval_python(self, result):
code = result.group('code')
code = string.replace(code, '\t', ' ')
try:
if self.local_dict:
result = eval(code, self.global_dict, self.local_dict)
else:
result = eval(code, self.global_dict)
return str(result)
except:
self.errorLogger('\n---- Error parsing: ----\n')
self.errorLogger(code)
self.errorLogger('\n------------------------\n')
raise
#---------------------------------------------------------------------------
# This routine is only called when OUTPUT() is included in executed Python
# code from the templates. It evaluates its parameter as if it was a
# template and appends the result to the OUTPUT_TEXT variable in the global
# dictionary.
def OUTPUT(data):
# This magic python code extracts the local and global dictionaries in
# the stack frame which was in effect when OUTPUT was called.
try:
raise ZeroDivisionError
except ZeroDivisionError:
local_dict = sys.exc_info()[2].tb_frame.f_back.f_locals
global_dict = sys.exc_info()[2].tb_frame.f_back.f_globals
global_dict['OUTPUT_TEXT'] = global_dict['OUTPUT_TEXT'] + runPythonCode(data, global_dict, local_dict)
| ActiveState/code | recipes/Python/52217_Replace_embedded_Pythcode_string_results/recipe-52217.py | Python | mit | 5,534 |
import xbmcgui, xbmcplugin
import urllib
import sys
# xbmc-hockey-streams
# author: craig mcnicholas, swedemon
# contact: [email protected], [email protected]
addonId = 'plugin.video.xbmc-hockey-streams-frodo'
dataPath = 'special://profile/addon_data/' + addonId
# Represents an enumeration for application modes
class Mode:
HOME = 1
ONDEMAND = 2
ONDEMAND_BYDATE = 3
ONDEMAND_BYDATE_YEARMONTH = 4
ONDEMAND_BYDATE_YEARMONTH_DAY = 5
ONDEMAND_BYDATE_YEARMONTH_DAY_EVENT = 6
ONDEMAND_BYDATE_CUSTOM = 7
ONDEMAND_BYDATE_CUSTOM_YEARMONTH = 8
ONDEMAND_BYDATE_CUSTOM_YEARMONTH_RANGE = 9
ONDEMAND_BYTEAM = 10
ONDEMAND_BYTEAM_LEAGUE = 11
ONDEMAND_BYTEAM_LEAGUE_TEAM = 12
ONDEMAND_BYTEAM_LEAGUE_TEAM_EVENT = 13
LIVE = 14
LIVE_EVENT = 15
LIVE_FINALEVENT = 16
LIVEEVENT = 17
# Method to get the parameters for the current view
# @return an array of parameters
def getParams():
param = {}
paramString = sys.argv[2]
if len(paramString) >= 2:
cleanedParams = paramString.replace('?', '')
if (paramString[len(paramString) - 1] == '/'):
paramString = paramString[0 : len(paramString) - 2]
pairsOfParams = cleanedParams.split('&')
for i in range(len(pairsOfParams)):
splitParams = pairsOfParams[i].split('=')
if (len(splitParams)) == 2:
param[splitParams[0]] = splitParams[1]
return param
# Method to parse a parameter as an int
# @param params the parameters to parse
# @key the key name of the parameter to parse
# @return the int value of the parameter or None
def parseParamInt(params, key):
value = None
try:
value = int(params[key])
except:
pass
return value
# Method to parse a parameter as a string
# @param params the parameters to parse
# @key the key name of the parameter to parse
# @return the string value of the parameter or None
def parseParamString(params, key):
value = None
try:
value = urllib.unquote_plus(params[key])
except:
pass
return value
# Method to add a link to the xbmc gui
# @param name the name of the link to show
# @param url the url of the link
# @param image the image to display as the thumbnail
# @param totalItems [optional] the total number of items to add to show progress
# @return a flag indicating success
def addLink(name, url, image, totalItems = None, showfanart = None, icon = None):
ok = True
thumbnail = icon.save() if (icon) else 'special://home/addons/' + addonId + '/Ice-Hockey-icon.png'
item = xbmcgui.ListItem(name, iconImage = 'DefaultVideo.png', thumbnailImage = thumbnail)
item.setInfo(type = 'Video', infoLabels = { 'Title': name })
if showfanart:
item.setProperty( "Fanart_Image", 'special://home/addons/' + addonId + '/fanart.jpg' )
if totalItems == None:
ok = xbmcplugin.addDirectoryItem(handle = int(sys.argv[1]), url = url, listitem = item)
else:
ok = xbmcplugin.addDirectoryItem(handle = int(sys.argv[1]), url = url, listitem = item, totalItems = totalItems)
return ok
# Method to add a directory to the xbmc gui
# @param name the name of the directory to show
# @param mode the mode number
# @param image the image to display as the thumbnail
# @param params a dictionary of params to append
# @param totalItems [optional] the total number of items to add to show progress
# @return a flag indicating success
def addDir(name, mode, image, params, totalItems = None, showfanart = None, icon = None):
thumbnail = icon.save() if (icon) else 'special://home/addons/' + addonId + '/Ice-Hockey-icon.png'
url = sys.argv[0] + "?mode=" + str(mode)
if params != None:
for k, v in params.iteritems():
url += '&' + k + '=' + urllib.quote_plus(v)
ok = True
item = xbmcgui.ListItem(name, iconImage = 'DefaultFolder.png', thumbnailImage = thumbnail)
item.setInfo(type = 'Video', infoLabels = { 'Title': name })
if showfanart:
item.setProperty( "Fanart_Image", 'special://home/addons/' + addonId + '/fanart.jpg' )
if totalItems == None:
ok = xbmcplugin.addDirectoryItem(handle = int(sys.argv[1]), url = url, listitem = item, isFolder = True)
else:
ok = xbmcplugin.addDirectoryItem(handle = int(sys.argv[1]), url = url, listitem = item, isFolder = True, totalItems = totalItems)
return ok
# Method to show a dialog message
# @param title the title of the dialog
# @param message the message of the dialog
def showMessage(title, message):
dialog = xbmcgui.Dialog()
ok = dialog.ok(title, message)
| actionbronson/xbmc-hockey-streams | source/plugin.video.xbmc-hockey-streams-frodo/utils.py | Python | gpl-2.0 | 4,628 |
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import testtools
from neutron.agent.linux import ip_lib
from neutron.agent.linux import iptables_manager
from neutron.agent.linux import utils
from neutron.tests import base
from neutron.tests.common import machine_fixtures
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent.linux import base as linux_base
from neutron.tests.functional.agent.linux.bin import ipt_binname
from neutron.tests.functional.agent.linux import helpers
from neutron.tests.functional import base as functional_base
class IptablesManagerTestCase(functional_base.BaseSudoTestCase):
DIRECTION_CHAIN_MAPPER = {'ingress': 'INPUT',
'egress': 'OUTPUT'}
PROTOCOL_BLOCK_RULE = '-p %s -j DROP'
PROTOCOL_PORT_BLOCK_RULE = '-p %s --dport %d -j DROP'
def setUp(self):
super(IptablesManagerTestCase, self).setUp()
bridge = self.useFixture(net_helpers.VethBridgeFixture()).bridge
self.client, self.server = self.useFixture(
machine_fixtures.PeerMachines(bridge)).machines
self.client_fw, self.server_fw = self.create_firewalls()
# The port is used in isolated namespace that precludes possibility of
# port conflicts
self.port = helpers.get_free_namespace_port(self.server.namespace)
def create_firewalls(self):
client_iptables = iptables_manager.IptablesManager(
namespace=self.client.namespace)
server_iptables = iptables_manager.IptablesManager(
namespace=self.server.namespace)
return client_iptables, server_iptables
def filter_add_rule(self, fw_manager, address, direction, protocol, port):
self._ipv4_filter_execute(fw_manager, 'add_rule', direction, protocol,
port)
def filter_remove_rule(self, fw_manager, address, direction, protocol,
port):
self._ipv4_filter_execute(fw_manager, 'remove_rule', direction,
protocol, port)
def _ipv4_filter_execute(self, fw_manager, method, direction, protocol,
port):
chain, rule = self._get_chain_and_rule(direction, protocol, port)
method = getattr(fw_manager.ipv4['filter'], method)
method(chain, rule)
fw_manager.apply()
def _get_chain_and_rule(self, direction, protocol, port):
chain = self.DIRECTION_CHAIN_MAPPER[direction]
if port:
rule = self.PROTOCOL_PORT_BLOCK_RULE % (protocol, port)
else:
rule = self.PROTOCOL_BLOCK_RULE % protocol
return chain, rule
def _test_with_nc(self, fw_manager, direction, port, udp):
netcat = helpers.NetcatTester(
ip_lib.IPWrapper(self.client.namespace),
ip_lib.IPWrapper(self.server.namespace),
self.server.ip, self.port, run_as_root=True, udp=udp)
self.addCleanup(netcat.stop_processes)
protocol = 'tcp'
if udp:
protocol = 'udp'
self.assertTrue(netcat.test_connectivity())
self.filter_add_rule(
fw_manager, self.server.ip, direction, protocol, port)
with testtools.ExpectedException(RuntimeError):
netcat.test_connectivity()
self.filter_remove_rule(
fw_manager, self.server.ip, direction, protocol, port)
self.assertTrue(netcat.test_connectivity(True))
def test_icmp(self):
self.client.assert_ping(self.server.ip)
self.server_fw.ipv4['filter'].add_rule('INPUT',
linux_base.ICMP_BLOCK_RULE)
self.server_fw.apply()
self.client.assert_no_ping(self.server.ip)
self.server_fw.ipv4['filter'].remove_rule('INPUT',
linux_base.ICMP_BLOCK_RULE)
self.server_fw.apply()
self.client.assert_ping(self.server.ip)
def test_mangle_icmp(self):
self.client.assert_ping(self.server.ip)
self.server_fw.ipv4['mangle'].add_rule('INPUT',
linux_base.ICMP_MARK_RULE)
self.server_fw.ipv4['filter'].add_rule('INPUT',
linux_base.MARKED_BLOCK_RULE)
self.server_fw.apply()
self.client.assert_no_ping(self.server.ip)
self.server_fw.ipv4['mangle'].remove_rule('INPUT',
linux_base.ICMP_MARK_RULE)
self.server_fw.ipv4['filter'].remove_rule('INPUT',
linux_base.MARKED_BLOCK_RULE)
self.server_fw.apply()
self.client.assert_ping(self.server.ip)
def test_tcp_input_port(self):
self._test_with_nc(self.server_fw, 'ingress', self.port, udp=False)
def test_tcp_output_port(self):
self._test_with_nc(self.client_fw, 'egress', self.port, udp=False)
def test_tcp_input(self):
self._test_with_nc(self.server_fw, 'ingress', port=None, udp=False)
def test_tcp_output(self):
self._test_with_nc(self.client_fw, 'egress', port=None, udp=False)
def test_udp_input_port(self):
self._test_with_nc(self.server_fw, 'ingress', self.port, udp=True)
def test_udp_output_port(self):
self._test_with_nc(self.client_fw, 'egress', self.port, udp=True)
def test_udp_input(self):
self._test_with_nc(self.server_fw, 'ingress', port=None, udp=True)
def test_udp_output(self):
self._test_with_nc(self.client_fw, 'egress', port=None, udp=True)
class IptablesManagerNonRootTestCase(base.BaseTestCase):
@staticmethod
def _normalize_module_name(name):
for suf in ['.pyc', '.pyo']:
if name.endswith(suf):
return name[:-len(suf)] + '.py'
return name
def _test_binary_name(self, module, *extra_options):
executable = self._normalize_module_name(module.__file__)
expected = os.path.basename(executable)[:16]
observed = utils.execute([executable] + list(extra_options)).rstrip()
self.assertEqual(expected, observed)
def test_binary_name(self):
self._test_binary_name(ipt_binname)
def test_binary_name_eventlet_spawn(self):
self._test_binary_name(ipt_binname, 'spawn')
| yuewko/neutron | neutron/tests/functional/agent/linux/test_iptables.py | Python | apache-2.0 | 6,946 |
import os
import pytest
from eg import config
from eg import substitute
from mock import patch
# Support python 2 and 3
try:
import ConfigParser
except ImportError:
from configparser import ConfigParser
PATH_EGRC_WITH_DATA = os.path.join(
'test',
'assets',
'egrc_withdata'
)
PATH_EGRC_NO_DATA = os.path.join(
'test',
'assets',
'egrc_nodata'
)
PATH_EGRC_SINGLE_SUB = os.path.join(
'test',
'assets',
'egrc_single_substitution'
)
def _create_dummy_egrc_config():
"""
Return a dummy Config object as if constructed from an egrc.
"""
egrc_examples_dir = 'egrc_examples_dir'
egrc_custom_dir = 'egrc_custom_dir'
egrc_use_color = 'the_egrc_says_yes_color'
egrc_pager_cmd = 'the_egrc_pages_with_more'
egrc_squeeze = 'egrc_says_squeeze'
egrc_subs = ['sub1', 'sub2']
egrc_editor_cmd = 'vim from egrc'
result = config.Config(
examples_dir=egrc_examples_dir,
custom_dir=egrc_custom_dir,
color_config=config.get_default_color_config(),
use_color=egrc_use_color,
pager_cmd=egrc_pager_cmd,
squeeze=egrc_squeeze,
subs=egrc_subs,
editor_cmd=egrc_editor_cmd,
)
return result
@patch('os.path.isfile', return_value=True)
@patch('eg.config.get_config_tuple_from_egrc')
def test_config_returns_egrc_values_if_present(mock_get_config, mock_isfile):
"""
If values are present from an egrc, make sure we take them.
Doesn't make sure they are extracted correctly from an egrc file.
"""
examples_dir = 'test_eg_dir_from_egrc'
custom_dir = 'test_custom_dir_from_egrc'
test_color_config = _get_color_config_from_egrc_withdata()
test_use_color = True
test_pager_cmd = 'more baby'
test_editor_cmd = 'vim is the best'
test_squeeze = True
test_subs = ['alpha', 'beta']
def_config = config.Config(
examples_dir=examples_dir,
custom_dir=custom_dir,
color_config=test_color_config,
use_color=test_use_color,
pager_cmd=test_pager_cmd,
editor_cmd=test_editor_cmd,
squeeze=test_squeeze,
subs=test_subs,
)
mock_get_config.return_value = def_config
resolved_config = config.get_resolved_config(
None,
None,
None,
None,
None,
None,
)
assert resolved_config.examples_dir == examples_dir
assert resolved_config.custom_dir == custom_dir
assert resolved_config.color_config == test_color_config
assert resolved_config.use_color == test_use_color
assert resolved_config.pager_cmd == test_pager_cmd
assert resolved_config.editor_cmd == test_editor_cmd
assert resolved_config.squeeze == test_squeeze
assert resolved_config.subs == test_subs
def _call_get_resolved_config_with_defaults(
egrc_path=None,
examples_dir=None,
custom_dir=None,
use_color=None,
pager_cmd=None,
squeeze=None,
debug=False,
):
"""
Wraps config.get_resolved_config_items with default values to allow callers
to set fewer variables.
"""
return config.get_resolved_config(
egrc_path=egrc_path,
examples_dir=examples_dir,
custom_dir=custom_dir,
use_color=use_color,
pager_cmd=pager_cmd,
squeeze=squeeze,
debug=debug,
)
@patch('eg.config._inform_if_path_does_not_exist')
def test_inform_if_paths_invalid_selectively_informs(mock_inform):
"""
We should only inform the user if the values are truthy.
"""
config.inform_if_paths_invalid(None, None, None)
assert mock_inform.call_count == 0
egrc_path = 'egrc'
ex_dir = 'ex dir'
cu_dir = 'custom'
config.inform_if_paths_invalid(egrc_path, ex_dir, cu_dir)
assert mock_inform.call_count == 3
mock_inform.assert_any_call(egrc_path)
mock_inform.assert_any_call(ex_dir)
mock_inform.assert_any_call(cu_dir)
@patch('os.path.isfile', return_value=True)
@patch('eg.config.inform_if_paths_invalid')
@patch('eg.config.get_config_tuple_from_egrc')
@patch('eg.config.get_expanded_path')
def test_get_resolved_config_uses_custom_egrc_path(
mock_expand, mock_get_config, mock_inform, mock_isfile
):
"""Make sure we use the passed in egrc path rather than the default."""
egrc_path = 'test/path/to/egrc'
expanded_path = egrc_path + '/expanded'
mock_expand.return_value = expanded_path
_call_get_resolved_config_with_defaults(egrc_path=egrc_path)
# We should have expanded the path as well as tried to retrieve the tuple
# with the path.
mock_expand.return_value = expanded_path
mock_get_config.assert_called_once_with(expanded_path)
def test_get_egrc_config_reads_from_command_line():
"""
get_egrc_config should use the command line path if it is provided.
"""
cli_path = 'path/from/command/line'
expected = 'mock config from egrc'
_assert_about_get_egrc_config(
cli_path=cli_path, path_to_expand=cli_path, expected_config=expected
)
def test_get_egrc_config_uses_default():
"""
get_egrc_config should use the default path if not provided on the command
line.
"""
expected = 'mock config from default position'
_assert_about_get_egrc_config(
cli_path=None,
path_to_expand=config.DEFAULT_EGRC_PATH,
expected_config=expected,
)
def test_get_egrc_returns_empty_if_no_egrc():
"""
We should return an empty config if no file is given.
"""
expected = config.get_empty_config()
_assert_about_get_egrc_config(
cli_path=None,
path_to_expand=config.DEFAULT_EGRC_PATH,
expected_config=expected,
is_file=False,
)
@patch('eg.config.get_config_tuple_from_egrc')
@patch('eg.config.get_expanded_path')
@patch('os.path.isfile')
def _assert_about_get_egrc_config(
mock_isfile,
mock_expand,
mock_get_config,
cli_path=None,
path_to_expand=None,
is_file=True,
expected_config=None
):
expanded_path = path_to_expand + 'expanded'
mock_isfile.return_value = is_file
mock_expand.return_value = expanded_path
mock_get_config.return_value = expected_config
actual = config.get_egrc_config(cli_path)
assert actual == expected_config
mock_expand.assert_called_once_with(path_to_expand)
mock_isfile.assert_called_once_with(expanded_path)
if (is_file):
mock_get_config.assert_called_once_with(expanded_path)
@patch('eg.config.get_expanded_path')
@patch('eg.config.get_egrc_config')
def test_get_resolved_config_calls_expand_paths(
mock_get_egrc_config, mock_expand
):
"""
We expect the examples_dir and custom_dir to be expanded.
"""
def pretend_to_expand(path):
if (path):
return path + '/expanded'
else:
return None
mock_get_egrc_config.return_value = config.get_empty_config()
mock_expand.side_effect = pretend_to_expand
# We are going to check against the default values, as the other paths have
# an opportunity to already be expanded at this point. The function that
# parses from the egrc returns the values expanded, eg.
expected_examples_dir = pretend_to_expand(config.DEFAULT_EXAMPLES_DIR)
expected_custom_dir = pretend_to_expand(config.DEFAULT_CUSTOM_DIR)
actual = _call_get_resolved_config_with_defaults()
assert actual.examples_dir == expected_examples_dir
assert actual.custom_dir == expected_custom_dir
@patch('eg.config.get_editor_cmd_from_environment')
@patch('eg.config.inform_if_paths_invalid')
@patch('eg.config.get_egrc_config')
def _assert_about_get_resolved_config(
mock_get_egrc_config,
mock_inform,
mock_get_editor,
cli_egrc_path=None,
cli_examples_dir=None,
cli_custom_dir=None,
cli_use_color=None,
cli_pager_cmd=None,
cli_squeeze=None,
egrc_config=None,
environment_editor_cmd=None,
expected_config=None,
):
"""
Helper for assertions surrounding get_resolved_config.
"""
mock_get_egrc_config.return_value = expected_config
mock_get_editor.return_value = environment_editor_cmd
actual = config.get_resolved_config(
cli_egrc_path,
cli_examples_dir,
cli_custom_dir,
cli_use_color,
cli_pager_cmd,
cli_squeeze,
debug=False
)
assert actual.examples_dir == expected_config.examples_dir
assert actual.custom_dir == expected_config.custom_dir
assert actual.use_color == expected_config.use_color
assert actual.color_config == expected_config.color_config
assert actual.pager_cmd == expected_config.pager_cmd
assert actual.squeeze == expected_config.squeeze
assert actual.subs == expected_config.subs
assert actual.editor_cmd == expected_config.editor_cmd
mock_get_egrc_config.assert_called_once_with(cli_egrc_path)
mock_get_editor.assert_called_once_with()
def test_get_resolved_config_prioritizes_cli():
"""
Options passed in at the command line should override those in the egrc.
"""
cli_examples_dir = 'test_eg_dir_user_defined'
cli_custom_dir = 'test_custom_dir_user_defined'
cli_use_color = 'we_should_use_color'
cli_pager_cmd = 'command_line_says_pager_with_cat'
cli_squeeze = 'command_line_wants_to_squeeze'
egrc_config = _create_dummy_egrc_config()
expected = config.Config(
examples_dir=cli_examples_dir,
custom_dir=cli_custom_dir,
use_color=cli_use_color,
color_config=egrc_config.color_config,
pager_cmd=cli_pager_cmd,
squeeze=cli_squeeze,
subs=egrc_config.subs,
editor_cmd=egrc_config.editor_cmd,
)
_assert_about_get_resolved_config(
cli_egrc_path=None,
cli_examples_dir=cli_examples_dir,
cli_custom_dir=cli_custom_dir,
cli_use_color=cli_use_color,
cli_pager_cmd=cli_pager_cmd,
cli_squeeze=cli_squeeze,
egrc_config=egrc_config,
environment_editor_cmd=None,
expected_config=expected,
)
def test_get_resolved_config_defaults_to_egrc():
"""
When no command line options are passed, we should prefer those in the
egrc.
"""
egrc_config = _create_dummy_egrc_config()
# The second level of priority for editor_cmd is the environment variable,
# so we include that here rather than from the egrc. Slightly hacky.
editor_cmd = 'value from env'
_assert_about_get_resolved_config(
egrc_config=egrc_config,
environment_editor_cmd=editor_cmd,
expected_config=egrc_config,
)
def test_get_resolved_config_falls_back_to_defaults():
"""
When no cli arguments or egrc arguments are present, we should use the raw
defaults.
"""
empty_config = config.get_empty_config()
expected = config.Config(
examples_dir=config.DEFAULT_EXAMPLES_DIR,
custom_dir=config.DEFAULT_CUSTOM_DIR,
use_color=config.DEFAULT_USE_COLOR,
color_config=config.get_default_color_config(),
pager_cmd=config.DEFAULT_PAGER_CMD,
squeeze=config.DEFAULT_SQUEEZE,
subs=config.get_default_subs(),
editor_cmd=config.DEFAULT_EDITOR_CMD,
)
_assert_about_get_resolved_config(
egrc_config=empty_config,
environment_editor_cmd=None,
expected_config=expected
)
def test_get_config_tuple_from_egrc_all_none_when_not_present():
"""
Return correct data if the egrc has no data.
We should return None for all values and an empty color_config if there is
no data in the egrc.
"""
actual = config.get_config_tuple_from_egrc(PATH_EGRC_NO_DATA)
empty_color_config = config.get_empty_color_config()
target = config.Config(
examples_dir=None,
custom_dir=None,
color_config=empty_color_config,
use_color=None,
pager_cmd=None,
squeeze=None,
subs=None,
editor_cmd=None,
)
assert actual == target
@patch('eg.config.get_expanded_path')
def test_get_config_tuple_from_egrc_when_present(mock_expand):
"""
Make sure we extract values correctly from the egrc.
"""
# These are the values hardcoded into the files.
egrc_examples_dir = 'test/example/dir/in/egrc_withdata'
egrc_custom_dir = 'test/custom/dir/in/egrc_withdata'
egrc_use_color = True
egrc_pager_cmd = 'more egrc'
egrc_editor_cmd = 'vim egrc'
color_config_from_file = _get_color_config_from_egrc_withdata()
egrc_squeeze = True
# Order matters--we apply substitutions alphabetically.
egrc_subs = [
substitute.Substitution(r' ', r'', False),
substitute.Substitution('\n\n\n', '\n\n', True)
]
def return_expanded_path(*args, **kwargs):
if args[0] == egrc_examples_dir:
return egrc_examples_dir
elif args[0] == egrc_custom_dir:
return egrc_custom_dir
else:
raise TypeError(
args[0] +
' was an unexpected path--should be ' +
egrc_examples_dir +
' or ' +
egrc_custom_dir
)
mock_expand.side_effect = return_expanded_path
actual = config.get_config_tuple_from_egrc(PATH_EGRC_WITH_DATA)
expected = config.Config(
examples_dir=egrc_examples_dir,
custom_dir=egrc_custom_dir,
color_config=color_config_from_file,
use_color=egrc_use_color,
pager_cmd=egrc_pager_cmd,
squeeze=egrc_squeeze,
subs=egrc_subs,
editor_cmd=egrc_editor_cmd,
)
assert actual == expected
mock_expand.assert_any_call(egrc_examples_dir)
mock_expand.assert_any_call(egrc_custom_dir)
def _get_color_config_from_egrc_withdata():
"""Get the color_config that is defined in the egrc_withdata test file."""
test_color_config = config.ColorConfig(
pound='\x1b[32m',
heading='heading_val',
code='code_val',
backticks='backticks_val',
prompt='prompt_val',
pound_reset='pound_reset_val',
heading_reset='heading_reset_val',
code_reset='code_reset_val',
backticks_reset='backticks_reset_val',
prompt_reset='prompt_reset_val'
)
return test_color_config
def test_merge_color_configs_first_all_none():
second = config.get_default_color_config()
first = config.ColorConfig(
pound=None,
heading=None,
code=None,
backticks=None,
prompt=None,
pound_reset=None,
heading_reset=None,
code_reset=None,
backticks_reset=None,
prompt_reset=None
)
merged = config.merge_color_configs(first, second)
assert merged == second
def test_merge_color_configs_take_all_first():
second = config.get_default_color_config()
first = config.ColorConfig(
pound='pound_color',
heading='heading_color',
code='code_color',
backticks='backticks_color',
prompt='prompt_color',
pound_reset='p_reset',
heading_reset='h_reset',
code_reset='c_reset',
backticks_reset='b_reset',
prompt_reset='prmpt_reset'
)
merged = config.merge_color_configs(first, second)
assert merged == first
def test_merge_color_configs_mixed():
second = config.get_default_color_config()
first = config.ColorConfig(
pound='pound_color',
heading=None,
code='code_color',
backticks=None,
prompt=None,
pound_reset=None,
heading_reset=None,
code_reset=None,
backticks_reset=None,
prompt_reset=None
)
merged = config.merge_color_configs(first, second)
target = config.ColorConfig(
pound=first.pound,
heading=second.heading,
code=first.code,
backticks=second.backticks,
prompt=second.prompt,
pound_reset=second.pound_reset,
heading_reset=second.heading_reset,
code_reset=second.code_reset,
backticks_reset=second.backticks_reset,
prompt_reset=second.prompt_reset
)
assert merged == target
def test_default_color_config():
"""Make sure the default color config is set to the right values."""
actual = config.get_default_color_config()
assert actual.pound == config.DEFAULT_COLOR_POUND
assert actual.heading == config.DEFAULT_COLOR_HEADING
assert actual.code == config.DEFAULT_COLOR_CODE
assert actual.backticks == config.DEFAULT_COLOR_BACKTICKS
assert actual.prompt == config.DEFAULT_COLOR_PROMPT
assert actual.pound_reset == config.DEFAULT_COLOR_POUND_RESET
assert actual.heading_reset == config.DEFAULT_COLOR_HEADING_RESET
assert actual.code_reset == config.DEFAULT_COLOR_CODE_RESET
assert actual.backticks_reset == config.DEFAULT_COLOR_BACKTICKS_RESET
assert actual.prompt_reset == config.DEFAULT_COLOR_PROMPT_RESET
def test_parse_bool_true_for_truthy_values():
"""We should parse both 'True' and 'true' to True."""
assert config._parse_bool_from_raw_egrc_value('True') == True
assert config._parse_bool_from_raw_egrc_value('true') == True
def test_parse_bool_false_for_non_truthy_values():
"""Make sure we parse the likely non-truthy things as false."""
assert config._parse_bool_from_raw_egrc_value('') == False
assert config._parse_bool_from_raw_egrc_value(None) == False
assert config._parse_bool_from_raw_egrc_value('false') == False
assert config._parse_bool_from_raw_egrc_value('False') == False
def test_get_priority_first():
"""The first non-None value should always be returned."""
target = 'alpha'
actual = config.get_priority(target, 'second', 'third')
assert target == actual
def test_get_priority_second():
"""The second non-None should be returned if the first is None."""
target = 'beta'
actual = config.get_priority(None, target, 'third')
assert target == actual
def test_get_priority_third():
"""The last should be taken if the first two are None."""
target = 'gamma'
actual = config.get_priority(None, None, target)
assert target == actual
def test_get_priority_respect_false():
"""
We should accept False as a priority-worthy value.
False should be able to be specified and respected as non-None.
"""
target = False
actual = config.get_priority(False, 'second', 'third')
assert target == actual
def test_parse_substitution_from_list_without_is_multiline():
"""
Make sure we can parse a list without the is_multiline option set, i.e.
just a two element list.
"""
target = substitute.Substitution('foo', 'bar', False)
list_rep = ['foo', 'bar']
actual = config.parse_substitution_from_list(list_rep)
assert actual == target
def test_parse_substitution_from_list_with_is_multiline():
"""
We should be able to parse a Subsitution if is_multiline is set.
"""
target = substitute.Substitution('patt', 'repl', True)
list_rep = ['patt', 'repl', True]
actual = config.parse_substitution_from_list(list_rep)
assert actual == target
def test_parse_substitution_error_if_not_list():
"""
Raise a SyntaxError if the value is not a list.
"""
with pytest.raises(SyntaxError):
config.parse_substitution_from_list('foo_str')
def test_parse_substitution_error_if_wrong_length():
"""
Raise a SyntaxError if the list is less than two long.
"""
with pytest.raises(SyntaxError):
config.parse_substitution_from_list(['foo'])
def test_parse_substitution_error_if_third_element_not_bool():
"""
Raise a SyntaxError if the third element in the list is not a boolean.
"""
with pytest.raises(SyntaxError):
bad_args = ['foo', 'bar', 'intentionally_not_a_bool']
config.parse_substitution_from_list(bad_args)
def test_get_substitution_from_config_finds_single_substitution():
"""
Retrieve a single substitution from the config. Integration test--actually
pulls from a file.
"""
# This is hardcoded matching the value in the file.
single_sub = substitute.Substitution('foo', 'bar', False)
target = [single_sub]
config_obj = _get_egrc_config(PATH_EGRC_SINGLE_SUB)
actual = config.get_substitutions_from_config(config_obj)
assert actual == target
def test_get_substitution_from_config_finds_multiple_substitutions():
"""
Retrieve multiple substitutions from a config in the appropriate order.
Integration test--actually pulls from a file.
"""
# These are hardcoded matching the value in the file. They will be sorted
# alphabetically by pattern name.
first_sub = substitute.Substitution(r' ', r'', False)
second_sub = substitute.Substitution('\n\n\n', '\n\n', True)
target = [first_sub, second_sub]
config_obj = _get_egrc_config(PATH_EGRC_WITH_DATA)
actual = config.get_substitutions_from_config(config_obj)
assert actual == target
def _get_egrc_config(egrc_path):
"""
Return a config object based on the config file at the given path.
"""
with open(egrc_path, 'r') as egrc:
try:
config = ConfigParser.RawConfigParser()
except AttributeError:
config = ConfigParser()
config.readfp(egrc)
return config
| scorphus/eg | test/config_test.py | Python | mit | 21,317 |
revision = '1b8378b8914'
down_revision = '55c008192aa'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('person',
sa.Column('romania_curata', sa.Text(), nullable=True))
def downgrade():
op.drop_column('person', 'romania_curata')
| mgax/mptracker | alembic/versions/1b8378b8914_person_romania_curat.py | Python | mit | 281 |
"""Append module search paths for third-party packages to sys.path.
****************************************************************
* This module is automatically imported during initialization. *
****************************************************************
In earlier versions of Python (up to 1.5a3), scripts or modules that
needed to use site-specific modules would place ``import site''
somewhere near the top of their code. Because of the automatic
import, this is no longer necessary (but code that does it still
works).
This will append site-specific paths to to the module search path. On
Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
appends lib/python<version>/site-packages as well as lib/site-python.
On other platforms (mainly Mac and Windows), it uses just sys.prefix
(and sys.exec_prefix, if different, but this is unlikely). The
resulting directories, if they exist, are appended to sys.path, and
also inspected for path configuration files.
A path configuration file is a file whose name has the form
<package>.pth; its contents are additional directories (one per line)
to be added to sys.path. Non-existing directories (or
non-directories) are never added to sys.path; no directory is added to
sys.path more than once. Blank lines and lines beginning with
\code{#} are skipped.
For example, suppose sys.prefix and sys.exec_prefix are set to
/usr/local and there is a directory /usr/local/lib/python1.5/site-packages
with three subdirectories, foo, bar and spam, and two path
configuration files, foo.pth and bar.pth. Assume foo.pth contains the
following:
# foo package configuration
foo
bar
bletch
and bar.pth contains:
# bar package configuration
bar
Then the following directories are added to sys.path, in this order:
/usr/local/lib/python1.5/site-packages/bar
/usr/local/lib/python1.5/site-packages/foo
Note that bletch is omitted because it doesn't exist; bar precedes foo
because bar.pth comes alphabetically before foo.pth; and spam is
omitted because it is not mentioned in either path configuration file.
After these path manipulations, an attempt is made to import a module
named sitecustomize, which can perform arbitrary additional
site-specific customizations. If this import fails with an
ImportError exception, it is silently ignored.
"""
import sys, os
def makepath(*paths):
dir = os.path.join(*paths)
return os.path.normcase(os.path.abspath(dir))
L = sys.modules.values()
for m in L:
if hasattr(m, "__file__"):
m.__file__ = makepath(m.__file__)
del m, L
# This ensures that the initial path provided by the interpreter contains
# only absolute pathnames, even if we're running from the build directory.
L = []
for dir in sys.path:
dir = makepath(dir)
if dir not in L:
L.append(dir)
sys.path[:] = L
del dir, L
def addsitedir(sitedir):
sitedir = makepath(sitedir)
if sitedir not in sys.path:
sys.path.append(sitedir) # Add path component
try:
names = os.listdir(sitedir)
except os.error:
return
names = map(os.path.normcase, names)
names.sort()
for name in names:
if name[-4:] == ".pth":
addpackage(sitedir, name)
def addpackage(sitedir, name):
fullname = os.path.join(sitedir, name)
try:
f = open(fullname)
except IOError:
return
while 1:
dir = f.readline()
if not dir:
break
if dir[0] == '#':
continue
if dir[-1] == '\n':
dir = dir[:-1]
dir = makepath(sitedir, dir)
if dir not in sys.path and os.path.exists(dir):
sys.path.append(dir)
prefixes = [sys.prefix]
if sys.exec_prefix != sys.prefix:
prefixes.append(sys.exec_prefix)
for prefix in prefixes:
if prefix:
if os.sep == '/':
sitedirs = [makepath(prefix,
"lib",
"python" + sys.version[:3],
"site-packages"),
makepath(prefix, "lib", "site-python")]
else:
sitedirs = [prefix]
for sitedir in sitedirs:
if os.path.isdir(sitedir):
addsitedir(sitedir)
# Define new built-ins 'quit' and 'exit'.
# These are simply strings that display a hint on how to exit.
if os.sep == ':':
exit = 'Use Cmd-Q to quit.'
elif os.sep == '\\':
exit = 'Use Ctrl-Z plus Return to exit.'
else:
exit = 'Use Ctrl-D (i.e. EOF) to exit.'
import __builtin__
__builtin__.quit = __builtin__.exit = exit
del exit
# interactive prompt objects for printing the license text, a list of
# contributors and the copyright notice.
class _Printer:
MAXLINES = 23
def __init__(self, name, data, files=(), dirs=()):
self.__name = name
self.__data = data
self.__files = files
self.__dirs = dirs
self.__lines = None
def __setup(self):
if self.__lines:
return
data = None
for dir in self.__dirs:
for file in self.__files:
file = os.path.join(dir, file)
try:
fp = open(file)
data = fp.read()
fp.close()
break
except IOError:
pass
if data:
break
if not data:
data = self.__data
self.__lines = data.split('\n')
self.__linecnt = len(self.__lines)
def __repr__(self):
self.__setup()
if len(self.__lines) <= self.MAXLINES:
return "\n".join(self.__lines)
else:
return "Type %s() to see the full %s text" % ((self.__name,)*2)
def __call__(self):
self.__setup()
prompt = 'Hit Return for more, or q (and Return) to quit: '
lineno = 0
while 1:
try:
for i in range(lineno, lineno + self.MAXLINES):
print self.__lines[i]
except IndexError:
break
else:
lineno += self.MAXLINES
key = None
while key is None:
key = raw_input(prompt)
if key not in ('', 'q'):
key = None
if key == 'q':
break
__builtin__.copyright = _Printer("copyright", sys.copyright)
if sys.platform[:4] == 'java':
__builtin__.credits = _Printer("credits",
"Jython is maintained by the Jython developers (www.jython.org).")
else:
__builtin__.credits = _Printer("credits",
"Python development is led by BeOpen PythonLabs (www.pythonlabs.com).")
here = sys.prefix + "/Lib" # os.path.dirname(os.__file__)
__builtin__.license = _Printer(
"license", "See http://www.pythonlabs.com/products/python2.0/license.html",
["LICENSE.txt", "LICENSE"],
[here, os.path.join(here, os.pardir), os.curdir])
# Set the string encoding used by the Unicode implementation. The
# default is 'ascii', but if you're willing to experiment, you can
# change this.
encoding = "ascii" # Default value set by _PyUnicode_Init()
if 0:
# Enable to support locale aware default string encodings.
import locale
loc = locale.getdefaultlocale()
if loc[1]:
encoding = loc[1]
if 0:
# Enable to switch off string to Unicode coercion and implicit
# Unicode to string conversion.
encoding = "undefined"
if encoding != "ascii":
sys.setdefaultencoding(encoding)
#
# Run custom site specific code, if available.
#
try:
import sitecustomize
except ImportError:
pass
#
# Remove sys.setdefaultencoding() so that users cannot change the
# encoding after initialization. The test for presence is needed when
# this module is run as a script, becuase this code is executed twice.
#
if hasattr(sys, "setdefaultencoding"):
del sys.setdefaultencoding
def _test():
print "sys.path = ["
for dir in sys.path:
print " %s," % `dir`
print "]"
if __name__ == '__main__':
_test()
| carvalhomb/tsmells | guess/src/Lib/site.py | Python | gpl-2.0 | 8,373 |
'''
Created on May 2, 2012
@package: superdesk posts
@copyright: 2012 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Gabriel Nistor
Contains the SQL alchemy meta for post type API.
'''
from ..api.type import PostType
from sqlalchemy.dialects.mysql.base import INTEGER
from sqlalchemy.schema import Column
from sqlalchemy.types import String
from superdesk.meta.metadata_superdesk import Base
# --------------------------------------------------------------------
class PostTypeMapped(Base, PostType):
'''
Provides the mapping for PostType.
'''
__tablename__ = 'post_type'
__table_args__ = dict(mysql_engine='InnoDB', mysql_charset='utf8')
Key = Column('key', String(100), nullable=False, unique=True)
# None REST model attribute --------------------------------------
id = Column('id', INTEGER(unsigned=True), primary_key=True)
| superdesk/Live-Blog | plugins/superdesk-post/superdesk/post/meta/type.py | Python | agpl-3.0 | 898 |
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals
from .lang_EN import Num2Word_EN
class Num2Word_EN_GB(Num2Word_EN):
def to_currency(self, val, longval=True):
return self.to_splitnum(val, hightxt="pound/s", lowtxt="pence",
jointxt="and", longval=longval)
n2w = Num2Word_EN_GB()
to_card = n2w.to_cardinal
to_ord = n2w.to_ordinal
to_ordnum = n2w.to_ordinal_num
to_year = n2w.to_year
def main():
for val in [ 1, 11, 12, 21, 31, 33, 71, 80, 81, 91, 99, 100, 101, 102, 155,
180, 300, 308, 832, 1000, 1001, 1061, 1100, 1500, 1701, 3000,
8280, 8291, 150000, 500000, 1000000, 2000000, 2000001,
-21212121211221211111, -2.121212, -1.0000100]:
n2w.test(val)
n2w.test(1325325436067876801768700107601001012212132143210473207540327057320957032975032975093275093275093270957329057320975093272950730)
for val in [1,120,1000,1120,1800, 1976,2000,2010,2099,2171]:
print val, "is", n2w.to_currency(val)
print val, "is", n2w.to_year(val)
if __name__ == "__main__":
main()
| golismero/golismero | thirdparty_libs/num2words/lang_EN_GB.py | Python | gpl-2.0 | 1,930 |
import numpy as np
from new_feature_set import NewFeatureSet
from sklearn.metrics import log_loss
from sklearn.model_selection import cross_val_score
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import get_scorer
class Evaluator:
def __init__(self, cv_fold = 5):
self._metric = 'neg_log_loss'
self._cv_folds = cv_fold
def evaluate(self, X, y, X_vald = None, y_vald = None):
clf = RandomForestClassifier(n_estimators=32, max_depth=3, n_jobs=-1) # used as base classifier
if X_vald is None:
return cross_val_score(clf, X, y, scoring=self._metric, cv=self._cv_folds, n_jobs=-1).mean()
else:
clf.fit(X, y)
sk = get_scorer(self._metric)
return sk(clf, X_vald, y_vald)
| pplonski/gafe | gafe/evaluator.py | Python | apache-2.0 | 844 |
# -*- coding: utf-8 -*-
# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END
import json
import urllib.parse
import aiohttp
from aiohttp import web
from foglamp.common import utils
from foglamp.common import logger
from foglamp.common.service_record import ServiceRecord
from foglamp.common.storage_client.exceptions import StorageServerError
from foglamp.common.configuration_manager import ConfigurationManager
from foglamp.services.core import connect
from foglamp.services.core.service_registry.service_registry import ServiceRegistry
from foglamp.services.core.service_registry import exceptions as service_registry_exceptions
from foglamp.common.audit_logger import AuditLogger
__author__ = "Amarendra K Sinha"
__copyright__ = "Copyright (c) 2018 Dianomic Systems"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_help = """
-------------------------------------------------------------------------------
| GET | /foglamp/notification/plugin |
| GET POST PUT DELETE | /foglamp/notification |
-------------------------------------------------------------------------------
"""
_logger = logger.setup()
NOTIFICATION_TYPE = ["one shot", "retriggered", "toggled"]
async def get_plugin(request):
""" GET lists of rule plugins and delivery plugins
:Example:
curl -X GET http://localhost:8081/foglamp/notification/plugin
"""
try:
notification_service = ServiceRegistry.get(s_type=ServiceRecord.Type.Notification.name)
_address, _port = notification_service[0]._address, notification_service[0]._port
except service_registry_exceptions.DoesNotExist:
raise web.HTTPNotFound(reason="No Notification service available.")
try:
url = 'http://{}:{}/notification/rules'.format(_address, _port)
rule_plugins = json.loads(await _hit_get_url(url))
url = 'http://{}:{}/notification/delivery'.format(_address, _port)
delivery_plugins = json.loads(await _hit_get_url(url))
except Exception as ex:
raise web.HTTPInternalServerError(reason=ex)
else:
return web.json_response({'rules': rule_plugins, 'delivery': delivery_plugins})
async def get_type(request):
""" GET the list of available notification types
:Example:
curl -X GET http://localhost:8081/foglamp/notification/type
"""
return web.json_response({'notification_type': NOTIFICATION_TYPE})
async def get_notification(request):
""" GET an existing notification
:Example:
curl -X GET http://localhost:8081/foglamp/notification/<notification_name>
"""
try:
notif = request.match_info.get('notification_name', None)
if notif is None:
raise ValueError("Notification name is required.")
notification = {}
storage = connect.get_storage_async()
config_mgr = ConfigurationManager(storage)
notification_config = await config_mgr._read_category_val(notif)
if notification_config:
rule_config = await config_mgr._read_category_val("rule{}".format(notif))
delivery_config = await config_mgr._read_category_val("delivery{}".format(notif))
notification = {
"name": notification_config['name']['value'],
"description": notification_config['description']['value'],
"rule": notification_config['rule']['value'],
"ruleConfig": rule_config,
"channel": notification_config['channel']['value'],
"deliveryConfig": delivery_config,
"notificationType": notification_config['notification_type']['value'],
"enable": notification_config['enable']['value'],
}
else:
raise ValueError("The Notification: {} does not exist.".format(notif))
except ValueError as ex:
raise web.HTTPBadRequest(reason=str(ex))
except Exception as ex:
raise web.HTTPInternalServerError(reason=ex)
else:
return web.json_response({'notification': notification})
async def get_notifications(request):
""" GET list of notifications
:Example:
curl -X GET http://localhost:8081/foglamp/notification
"""
try:
storage = connect.get_storage_async()
config_mgr = ConfigurationManager(storage)
all_notifications = await config_mgr._read_all_child_category_names("Notifications")
notifications = []
for notification in all_notifications:
notification_config = await config_mgr._read_category_val(notification['child'])
notification = {
"name": notification_config['name']['value'],
"rule": notification_config['rule']['value'],
"channel": notification_config['channel']['value'],
"notificationType": notification_config['notification_type']['value'],
"enable": notification_config['enable']['value'],
}
notifications.append(notification)
except Exception as ex:
raise web.HTTPInternalServerError(reason=ex)
else:
return web.json_response({'notifications': notifications})
async def post_notification(request):
"""
Create a new notification to run a specific plugin
:Example:
curl -X POST http://localhost:8081/foglamp/notification -d '{"name": "Test Notification", "description":"Test Notification", "rule": "threshold", "channel": "email", "notification_type": "one shot", "enabled": false}'
curl -X POST http://localhost:8081/foglamp/notification -d '{"name": "Test Notification", "description":"Test Notification", "rule": "threshold", "channel": "email", "notification_type": "one shot", "enabled": false, "rule_config": {}, "delivery_config": {}}'
"""
try:
notification_service = ServiceRegistry.get(s_type=ServiceRecord.Type.Notification.name)
_address, _port = notification_service[0]._address, notification_service[0]._port
except service_registry_exceptions.DoesNotExist:
raise web.HTTPNotFound(reason="No Notification service available.")
try:
data = await request.json()
if not isinstance(data, dict):
raise ValueError('Data payload must be a valid JSON')
name = data.get('name', None)
description = data.get('description', None)
rule = data.get('rule', None)
channel = data.get('channel', None)
notification_type = data.get('notification_type', None)
enabled = data.get('enabled', None)
rule_config = data.get('rule_config', {})
delivery_config = data.get('delivery_config', {})
if name is None or name.strip() == "":
raise ValueError('Missing name property in payload.')
if description is None:
raise ValueError('Missing description property in payload.')
if rule is None:
raise ValueError('Missing rule property in payload.')
if channel is None:
raise ValueError('Missing channel property in payload.')
if notification_type is None:
raise ValueError('Missing notification_type property in payload.')
if utils.check_reserved(name) is False:
raise ValueError('Invalid name property in payload.')
if utils.check_reserved(rule) is False:
raise ValueError('Invalid rule property in payload.')
if utils.check_reserved(channel) is False:
raise ValueError('Invalid channel property in payload.')
if notification_type not in NOTIFICATION_TYPE:
raise ValueError('Invalid notification_type property in payload.')
if enabled is not None:
if enabled not in ['true', 'false', True, False]:
raise ValueError('Only "true", "false", true, false are allowed for value of enabled.')
is_enabled = "true" if ((type(enabled) is str and enabled.lower() in ['true']) or (
(type(enabled) is bool and enabled is True))) else "false"
storage = connect.get_storage_async()
config_mgr = ConfigurationManager(storage)
curr_config = await config_mgr.get_category_all_items(name)
if curr_config is not None:
raise ValueError("A Category with name {} already exists.".format(name))
try:
# Get default config for rule and channel plugins
url = '{}/plugin'.format(request.url)
try:
# When authentication is mandatory we need to pass token in request header
auth_token = request.token
except AttributeError:
auth_token = None
list_plugins = json.loads(await _hit_get_url(url, auth_token))
r = list(filter(lambda rules: rules['name'] == rule, list_plugins['rules']))
c = list(filter(lambda channels: channels['name'] == channel, list_plugins['delivery']))
if len(r) == 0 or len(c) == 0: raise KeyError
rule_plugin_config = r[0]['config']
delivery_plugin_config = c[0]['config']
except KeyError:
raise ValueError("Invalid rule plugin {} and/or delivery plugin {} supplied.".format(rule, channel))
# Verify if rule_config contains valid keys
if rule_config != {}:
for k, v in rule_config.items():
if k not in rule_plugin_config:
raise ValueError("Invalid key {} in rule_config {} supplied for plugin {}.".format(k, rule_config, rule))
# Verify if delivery_config contains valid keys
if delivery_config != {}:
for k, v in delivery_config.items():
if k not in delivery_plugin_config:
raise ValueError(
"Invalid key {} in delivery_config {} supplied for plugin {}.".format(k, delivery_config, channel))
# First create templates for notification and rule, channel plugins
post_url = 'http://{}:{}/notification/{}'.format(_address, _port, urllib.parse.quote(name))
await _hit_post_url(post_url) # Create Notification template
post_url = 'http://{}:{}/notification/{}/rule/{}'.format(_address, _port, urllib.parse.quote(name),
urllib.parse.quote(rule))
await _hit_post_url(post_url) # Create Notification rule template
post_url = 'http://{}:{}/notification/{}/delivery/{}'.format(_address, _port, urllib.parse.quote(name),
urllib.parse.quote(channel))
await _hit_post_url(post_url) # Create Notification delivery template
# Create configurations
notification_config = {
"description": description,
"rule": rule,
"channel": channel,
"notification_type": notification_type,
"enable": is_enabled,
}
await _update_configurations(config_mgr, name, notification_config, rule_config, delivery_config)
audit = AuditLogger(storage)
await audit.information('NTFAD', {"name": name})
except ValueError as ex:
raise web.HTTPBadRequest(reason=str(ex))
except Exception as e:
raise web.HTTPInternalServerError(reason=str(e))
else:
return web.json_response({'result': "Notification {} created successfully".format(name)})
class NotFoundError(Exception):
pass
async def put_notification(request):
"""
Update an existing notification
:Example:
curl -X PUT http://localhost:8081/foglamp/notification/<notification_name> -d '{"description":"Test Notification modified"}'
curl -X PUT http://localhost:8081/foglamp/notification/<notification_name> -d '{"rule": "threshold", "channel": "email"}'
curl -X PUT http://localhost:8081/foglamp/notification/<notification_name> -d '{"notification_type": "one shot", "enabled": false}'
curl -X PUT http://localhost:8081/foglamp/notification/<notification_name> -d '{"enabled": false}'
curl -X PUT http://localhost:8081/foglamp/notification/<notification_name> -d '{"description":"Test Notification", "rule": "threshold", "channel": "email", "notification_type": "one shot", "enabled": false, "rule_config": {}, "delivery_config": {}}'
"""
try:
notification_service = ServiceRegistry.get(s_type=ServiceRecord.Type.Notification.name)
_address, _port = notification_service[0]._address, notification_service[0]._port
except service_registry_exceptions.DoesNotExist:
raise web.HTTPNotFound(reason="No Notification service available.")
try:
notif = request.match_info.get('notification_name', None)
if notif is None:
raise ValueError("Notification name is required for updation.")
# TODO: Stop notification before update
data = await request.json()
if not isinstance(data, dict):
raise ValueError('Data payload must be a valid JSON')
description = data.get('description', None)
rule = data.get('rule', None)
channel = data.get('channel', None)
notification_type = data.get('notification_type', None)
enabled = data.get('enabled', None)
rule_config = data.get('rule_config', {})
delivery_config = data.get('delivery_config', {})
if utils.check_reserved(notif) is False:
raise ValueError('Invalid notification instance name.')
if rule is not None and utils.check_reserved(rule) is False:
raise ValueError('Invalid rule property in payload.')
if channel is not None and utils.check_reserved(channel) is False:
raise ValueError('Invalid channel property in payload.')
if notification_type is not None and notification_type not in NOTIFICATION_TYPE:
raise ValueError('Invalid notification_type property in payload.')
if enabled is not None:
if enabled not in ['true', 'false', True, False]:
raise ValueError('Only "true", "false", true, false are allowed for value of enabled.')
is_enabled = "true" if ((type(enabled) is str and enabled.lower() in ['true']) or (
(type(enabled) is bool and enabled is True))) else "false"
storage = connect.get_storage_async()
config_mgr = ConfigurationManager(storage)
current_config = await config_mgr._read_category_val(notif)
if current_config is None:
raise NotFoundError('No {} notification instance found'.format(notif))
rule_changed = True if rule is not None and rule != current_config['rule']['value'] else False
channel_changed = True if channel is not None and channel != current_config['channel']['value'] else False
try:
# Get default config for rule and channel plugins
url = str(request.url)
url_parts = url.split("/foglamp/notification")
url = '{}/foglamp/notification/plugin'.format(url_parts[0])
try:
# When authentication is mandatory we need to pass token in request header
auth_token = request.token
except AttributeError:
auth_token = None
list_plugins = json.loads(await _hit_get_url(url, auth_token))
search_rule = rule if rule_changed else current_config['rule']['value']
r = list(filter(lambda rules: rules['name'] == search_rule, list_plugins['rules']))
if len(r) == 0:
raise KeyError
rule_plugin_config = r[0]['config']
search_channel = channel if channel_changed else current_config['channel']['value']
c = list(filter(lambda channels: channels['name'] == search_channel, list_plugins['delivery']))
if len(c) == 0:
raise KeyError
delivery_plugin_config = c[0]['config']
except KeyError:
raise ValueError("Invalid rule plugin:{} and/or delivery plugin:{} supplied.".format(rule, channel))
# Verify if rule_config contains valid keys
if rule_config != {}:
for k, v in rule_config.items():
if k not in rule_plugin_config:
raise ValueError("Invalid key:{} in rule plugin:{}".format(k, rule_plugin_config))
# Verify if delivery_config contains valid keys
if delivery_config != {}:
for k, v in delivery_config.items():
if k not in delivery_plugin_config:
raise ValueError(
"Invalid key:{} in delivery plugin:{}".format(k, delivery_plugin_config))
if rule_changed: # A new rule has been supplied
category_desc = rule_plugin_config['plugin']['description']
category_name = "rule{}".format(notif)
await config_mgr.create_category(category_name=category_name,
category_description=category_desc,
category_value=rule_plugin_config,
keep_original_items=False)
if channel_changed: # A new delivery has been supplied
category_desc = delivery_plugin_config['plugin']['description']
category_name = "delivery{}".format(notif)
await config_mgr.create_category(category_name=category_name,
category_description=category_desc,
category_value=delivery_plugin_config,
keep_original_items=False)
notification_config = {}
if description is not None:
notification_config.update({"description": description})
if rule is not None:
notification_config.update({"rule": rule})
if channel is not None:
notification_config.update({"channel": channel})
if notification_type is not None:
notification_config.update({"notification_type": notification_type})
if enabled is not None:
notification_config.update({"enable": is_enabled})
await _update_configurations(config_mgr, notif, notification_config, rule_config, delivery_config)
except ValueError as e:
raise web.HTTPBadRequest(reason=str(e))
except NotFoundError as e:
raise web.HTTPNotFound(reason=str(e))
except Exception as ex:
raise web.HTTPInternalServerError(reason=str(ex))
else:
# TODO: Start notification after update
return web.json_response({'result': "Notification {} updated successfully".format(notif)})
async def delete_notification(request):
""" Delete an existing notification
:Example:
curl -X DELETE http://localhost:8081/foglamp/notification/<notification_name>
"""
try:
notification_service = ServiceRegistry.get(s_type=ServiceRecord.Type.Notification.name)
_address, _port = notification_service[0]._address, notification_service[0]._port
except service_registry_exceptions.DoesNotExist:
raise web.HTTPNotFound(reason="No Notification service available.")
try:
notif = request.match_info.get('notification_name', None)
if notif is None:
raise ValueError("Notification name is required for deletion.")
# Stop & remove notification
url = 'http://{}:{}/notification/{}'.format(_address, _port, urllib.parse.quote(notif))
notification = json.loads(await _hit_delete_url(url))
# Removes the child categories for the rule and delivery plugins, Removes the category for the notification itself
storage = connect.get_storage_async()
config_mgr = ConfigurationManager(storage)
await config_mgr.delete_category_and_children_recursively(notif)
audit = AuditLogger(storage)
await audit.information('NTFDL', {"name": notif})
except ValueError as ex:
raise web.HTTPBadRequest(reason=str(ex))
except Exception as ex:
raise web.HTTPInternalServerError(reason=str(ex))
else:
return web.json_response({'result': 'Notification {} deleted successfully.'.format(notif)})
async def _hit_get_url(get_url, token=None):
headers = {"Authorization": token} if token else None
try:
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with session.get(get_url, headers=headers) as resp:
status_code = resp.status
jdoc = await resp.text()
if status_code not in range(200, 209):
_logger.error("Error code: %d, reason: %s, details: %s, url: %s", resp.status, resp.reason, jdoc,
get_url)
raise StorageServerError(code=resp.status, reason=resp.reason, error=jdoc)
except Exception:
raise
else:
return jdoc
async def _hit_post_url(post_url, data=None):
try:
async with aiohttp.ClientSession() as session:
async with session.post(post_url, data=data) as resp:
status_code = resp.status
jdoc = await resp.text()
if status_code not in range(200, 209):
_logger.error("Error code: %d, reason: %s, details: %s, url: %s", resp.status, resp.reason, jdoc,
post_url)
raise StorageServerError(code=resp.status, reason=resp.reason, error=jdoc)
except Exception:
raise
else:
return jdoc
async def _update_configurations(config_mgr, name, notification_config, rule_config, delivery_config):
try:
# Update main notification
if notification_config != {}:
await config_mgr.update_configuration_item_bulk(name, notification_config)
# Replace rule configuration
if rule_config != {}:
category_name = "rule{}".format(name)
await config_mgr.update_configuration_item_bulk(category_name, rule_config)
# Replace delivery configuration
if delivery_config != {}:
category_name = "delivery{}".format(name)
await config_mgr.update_configuration_item_bulk(category_name, delivery_config)
except Exception as ex:
_logger.exception("Failed to update notification configuration. %s", str(ex))
raise web.HTTPInternalServerError(reason='Failed to update notification configuration. {}'.format(ex))
async def _hit_delete_url(delete_url, data=None):
try:
async with aiohttp.ClientSession() as session:
async with session.delete(delete_url, data=data) as resp:
status_code = resp.status
jdoc = await resp.text()
if status_code not in range(200, 209):
_logger.error("Error code: %d, reason: %s, details: %s, url: %s",
resp.status,
resp.reason,
jdoc,
delete_url)
raise StorageServerError(code=resp.status,
reason=resp.reason,
error=jdoc)
except Exception:
raise
else:
return jdoc
| foglamp/FogLAMP | python/foglamp/services/core/api/notification.py | Python | apache-2.0 | 23,568 |
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import Any, List
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.core.helpers.strtools import to_bool
from pychron.core.ui.progress_dialog import myProgressDialog
from pychron.envisage.initialization.initialization_parser import InitializationParser
from pychron.globals import globalv
from pychron.hardware.core.i_core_device import ICoreDevice
from pychron.loggable import Loggable
class InitializerError(BaseException):
pass
class Initializer(Loggable):
name = "Initializer"
_init_list = List
_parser = Any
_pd = Any
def add_initialization(self, a):
""" """
self.debug("add initialization {}".format(a))
self._init_list.append(a)
def run(self, application=None):
self._parser = InitializationParser()
self.info("Initialization Path: {}".format(self._parser.path))
self.application = application
ok = True
self.info("Running Initializer")
nsteps = (
sum([self._get_nsteps(idict["plugin_name"]) for idict in self._init_list])
+ 1
)
pd = self._setup_progress(nsteps)
try:
for idict in self._init_list:
ok = self._run(**idict)
if not ok:
break
msg = "Complete" if ok else "Failed"
self.info("Initialization {}".format(msg))
pd.close()
except BaseException as e:
import traceback
traceback.print_exc()
self.debug("Initializer Exception: {}".format(e))
raise e
return ok
def info(self, msg, **kw):
pd = self._pd
if pd is not None:
offset = pd.get_value()
if offset == pd.max - 1:
pd.max += 1
pd.change_message(msg)
super(Initializer, self).info(msg, **kw)
def _run(self, name=None, manager=None, plugin_name=None):
parser = self._parser
if manager is not None:
self.info("Manager loading {}".format(name))
manager.application = self.application
manager.load()
else:
return False
managers = []
if plugin_name:
mp = self._get_plugin(plugin_name)
else:
mp, name = self._get_plugin_by_name(name)
if mp is not None:
if not globalv.ignore_initialization_required:
if not self._check_required(mp):
return False
managers = parser.get_managers(mp)
if managers:
self.info("loading managers - {}".format(", ".join(managers)))
manager.name = name
self._load_managers(manager, managers, plugin_name)
self._load_elements(mp, manager, name, plugin_name)
if manager is not None:
self.info("finish {} loading".format(name))
manager.finish_loading()
return True
def _load_elements(self, element, manager, name, plugin_name):
mp = element
parser = self._parser
devices = parser.get_devices(mp)
flags = parser.get_flags(mp)
timed_flags = parser.get_timed_flags(mp)
valve_flags = parser.get_valve_flags(mp, element=True)
valve_flags_attrs = []
if valve_flags:
for vf in valve_flags:
vs = vf.find("valves")
if vs:
vs = vs.split(",")
valve_flags_attrs.append((vf.text.strip(), vs))
if devices:
self.info("loading devices - {}".format(", ".join(devices)))
self._load_devices(manager, name, devices, plugin_name)
if flags:
self.info("loading flags - {}".format(", ".join(flags)))
self._load_flags(manager, flags)
if timed_flags:
self.info("loading timed flags - {}".format(",".join(timed_flags)))
self._load_timed_flags(manager, timed_flags)
if valve_flags_attrs:
self.info("loading valve flags - {}".format(",".join(valve_flags_attrs)))
self._load_valve_flags(manager, valve_flags_attrs)
# loaders
def _load_flags(self, manager, flags):
for f in flags:
self.info("loading {}".format(f))
manager.add_flag(f)
def _load_timed_flags(self, manager, flags):
for f in flags:
self.info("loading {}".format(f))
manager.add_timed_flag(f)
def _load_valve_flags(self, manager, flags):
for f, v in flags:
self.info("loading {}, valves={}".format(f, v))
manager.add_valve_flag(f, v)
def _load_devices(
self,
manager,
name,
devices,
plugin_name,
):
""" """
devs = []
if manager is None:
return
for device in devices:
if not device:
continue
pdev = self._parser.get_device(name, device, plugin_name, element=True)
dev_class = pdev.find("klass")
if dev_class is not None:
dev_class = dev_class.text.strip()
try:
dev = getattr(manager, device)
if dev is None:
dev = manager.create_device(device, dev_class=dev_class)
else:
if dev_class and dev.__class__.__name__ != dev_class:
dev = manager.create_device(
device, dev_class=dev_class, obj=dev
)
except AttributeError:
dev = manager.create_device(device, dev_class=dev_class)
if dev is None:
self.warning("No device for {}".format(device))
continue
self.info("loading {}".format(dev.name))
dev.application = self.application
if dev.load():
# register the device
if self.application is not None:
# display with the HardwareManager
self.info("Register device name={}, {}".format(dev.name, dev))
self.application.register_service(
ICoreDevice, dev, {"display": True}
)
devs.append(dev)
self.info("opening {}".format(dev.name))
if not dev.open(prefs=self.device_prefs):
self.info("failed connecting to {}".format(dev.name))
else:
self.info("failed loading {}".format(dev.name))
for od in devs:
self.info("Initializing {}".format(od.name))
result = od.initialize(progress=self._pd)
if result is not True:
self.warning("Failed setting up communications to {}".format(od.name))
od.set_simulation(True)
elif result is None:
self.debug(
"{} initialize function does not return a boolean".format(od.name)
)
raise NotImplementedError
od.application = self.application
od.post_initialize()
manager.devices.append(od)
def _load_managers(self, manager, managers, plugin_name):
for mi in managers:
man = None
self.info("load {}".format(mi))
try:
man = getattr(manager, mi)
if man is None:
man = manager.create_manager(mi)
except AttributeError as e:
self.warning(e)
try:
man = manager.create_manager(mi)
except InitializerError:
import traceback
traceback.print_exc()
if man is None:
self.debug("trouble creating manager {}".format(mi))
continue
if self.application is not None:
# register this manager as a service
man.application = self.application
self.application.register_service(type(man), man)
man.load()
element = self._get_manager(mi, plugin_name)
if not globalv.ignore_initialization_required:
if not self._check_required(element):
return False
self._load_elements(element, man, mi, plugin_name)
self.info("finish {} loading".format(mi))
man.finish_loading()
# helpers
def _setup_progress(self, n):
"""
n: int, initialize progress dialog with n steps
return a myProgressDialog object
"""
pd = myProgressDialog(
max=n, message="Welcome", position=(100, 100), size=(500, 50)
)
self._pd = pd
self._pd.open()
return pd
def _check_required(self, subtree):
# check the subtree has all required devices enabled
devs = self._parser.get_devices(subtree, all_=True, element=True)
for di in devs:
required = True
req = self._parser.get_parameter(di, "required")
if req:
required = to_bool(req)
enabled = to_bool(di.get("enabled"))
if required and not enabled:
name = di.text.strip().upper()
msg = """Device {} is REQUIRED but is not ENABLED.
Do you want to quit to enable {} in the Initialization File?""".format(
name, name
)
result = self.confirmation_dialog(msg, title="Quit Pychron")
if result:
raise InitializerError()
return True
def _get_manager(self, name, plugin_name):
parser = self._parser
man = parser.get_manager(name, plugin_name)
return man
def _get_plugin(self, name):
parser = self._parser
mp = parser.get_plugin(name)
return mp
def _get_nsteps(self, plugin_name):
parser = self._parser
mp = self._get_plugin(plugin_name)
ns = 0
if mp is not None:
ns += 2 * (len(parser.get_managers(mp)) + 1)
ns += 3 * (len(parser.get_devices(mp)) + 1)
ns += len(parser.get_flags(mp)) + 1
ns += len(parser.get_timed_flags(mp)) + 1
return ns
# ========================= EOF ===================================
| USGSDenverPychron/pychron | pychron/envisage/initialization/initializer.py | Python | apache-2.0 | 11,331 |
import collections
import numpy as np
from guesswhat.statistics.abstract_plotter import *
import seaborn as sns
import pandas as pd
class SuccessArea(AbstractPlotter):
def __init__(self, path, games, logger, suffix):
super(SuccessArea, self).__init__(path, self.__class__.__name__, suffix)
status = []
area_list = []
status_count = collections.defaultdict(int)
for game in games:
status_count[game.status] += 1
status.append(game.status)
area_list.append(float(game.object.area))
success = np.array([s == "success" for s in status])
failure = np.array([s == "failure" for s in status])
incomp = np.array([s == "incomplete" for s in status])
sns.set(style="whitegrid", color_codes=True)
rng = range(4, 13)
if sum(incomp) > 0:
columns = ['Area', 'Success', 'Failure', 'Incomplete']
data = np.array([np.log(area_list), success, failure, incomp]).transpose()
else:
columns = ['Area', 'Success', 'Failure']
data = np.array([np.log(area_list), success, failure]).transpose()
df = pd.DataFrame(data, columns=columns)
df = df.convert_objects(convert_numeric=True)
df = df.groupby(pd.cut(df["Area"], range(4,14))).sum()
df = df.drop('Area', 1)
f = df.plot(kind="bar", stacked=True, width=1, alpha=0.3, figsize=(9,6), color=["g", "r", "b"])
#f.set_xlim(-0.5,8.5)
#f.set_ylim(0,30000)
f.set_xlabel("log of object area", {'size':'14'})
f.set_ylabel("Number of dialogues", {'size':'14'})
sns.regplot(x=np.array([0]), y=np.array([0]), scatter=False, line_kws={'linestyle':'--'}, label="% Success",ci=None, color="b")
f.legend(loc="upper left", fontsize='x-large')
###########################################
histo_success = np.histogram(np.log(area_list)[success], bins=rng)
histo_failure = np.histogram(np.log(area_list)[failure], bins=rng)
histo_incomp = np.histogram(np.log(area_list)[incomp] , bins=rng)
normalizer = histo_success[0] + histo_failure[0] + histo_incomp[0]
histo_success = 1.0*histo_success[0] / normalizer
histo_failure = 1.0*histo_failure[0] / normalizer
histo_incomp = 1.0*histo_incomp[0] / normalizer
ax2 = f.twinx()
curve = np.ones(len(normalizer))-histo_failure-histo_incomp
f = sns.regplot(x=np.linspace(1, 10, 8), y=curve, order=3, scatter=False, line_kws={'linestyle':'--'},ci=None, truncate=False, color="b")
ax2.set_ylim(0,1)
ax2.grid(None)
ax2.set_ylabel("Success ratio", {'size':'14'})
| GuessWhatGame/guesswhat | src/guesswhat/statistics/success_area.py | Python | apache-2.0 | 2,720 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='last_ip_used',
field=models.GenericIPAddressField(null=True, blank=True),
),
migrations.AlterField(
model_name='userprofile',
name='registration_ip',
field=models.GenericIPAddressField(null=True, blank=True),
),
]
| codefisher/djangopress | djangopress/accounts/migrations/0002_auto_20140927_1229.py | Python | mit | 610 |
import pyb
p = pyb.Pin(8)
p.init(pyb.Pin.OUT_PP,pyb.Pin.PULL_NONE)
print(p)
while True:
#p.value(True)
p.high()
print("value:"+str(p.value()))
pyb.delay(1000)
#p.value(False)
p.low()
print("value:"+str(p.value()))
pyb.delay(1000)
| martinribelotta/uPyIDE | share/uPyIDE/snipplet/Main_gpios.py | Python | gpl-3.0 | 244 |
# Copyright (C) 2014 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib import constants
from sqlalchemy import func
from sqlalchemy import sql
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.db import l3_agentschedulers_db as l3_sch_db
from neutron.db.models import agent as agent_model
from neutron.db.models import l3 as l3_models
from neutron.db.models import l3_attrs
from neutron.db.models import l3agent as rb_model
from neutron.extensions import portbindings
from neutron import manager
from neutron.plugins.common import constants as service_constants
class L3_HA_scheduler_db_mixin(l3_sch_db.AZL3AgentSchedulerDbMixin):
def get_ha_routers_l3_agents_count(self, context):
"""Return a map between HA routers and how many agents every
router is scheduled to.
"""
# Postgres requires every column in the select to be present in
# the group by statement when using an aggregate function.
# One solution is to generate a subquery and join it with the desired
# columns.
binding_model = rb_model.RouterL3AgentBinding
sub_query = (context.session.query(
binding_model.router_id,
func.count(binding_model.router_id).label('count')).
join(l3_attrs.RouterExtraAttributes,
binding_model.router_id ==
l3_attrs.RouterExtraAttributes.router_id).
join(l3_models.Router).
filter(l3_attrs.RouterExtraAttributes.ha == sql.true()).
group_by(binding_model.router_id).subquery())
query = (context.session.query(l3_models.Router, sub_query.c.count).
join(sub_query))
return [(self._make_router_dict(router), agent_count)
for router, agent_count in query]
def get_l3_agents_ordered_by_num_routers(self, context, agent_ids):
if not agent_ids:
return []
query = (context.session.query(agent_model.Agent, func.count(
rb_model.RouterL3AgentBinding.router_id)
.label('count')).
outerjoin(rb_model.RouterL3AgentBinding).
group_by(agent_model.Agent.id).
filter(agent_model.Agent.id.in_(agent_ids)).
order_by('count'))
return [record[0] for record in query]
def _get_agents_dict_for_router(self, agents_and_states):
agents = []
for agent, ha_state in agents_and_states:
l3_agent_dict = self._make_agent_dict(agent)
l3_agent_dict['ha_state'] = ha_state
agents.append(l3_agent_dict)
return {'agents': agents}
def list_l3_agents_hosting_router(self, context, router_id):
with context.session.begin(subtransactions=True):
router_db = self._get_router(context, router_id)
if router_db.extra_attributes.ha:
bindings = self.get_l3_bindings_hosting_router_with_ha_states(
context, router_id)
else:
bindings = self._get_l3_bindings_hosting_routers(
context, [router_id])
bindings = [(binding.l3_agent, None) for binding in bindings]
return self._get_agents_dict_for_router(bindings)
def _notify_l3_agent_ha_port_update(resource, event, trigger, **kwargs):
new_port = kwargs.get('port')
original_port = kwargs.get('original_port')
context = kwargs.get('context')
host = new_port[portbindings.HOST_ID]
if new_port and original_port and host:
new_device_owner = new_port.get('device_owner', '')
if (new_device_owner == constants.DEVICE_OWNER_ROUTER_HA_INTF and
new_port['status'] == constants.PORT_STATUS_ACTIVE and
original_port['status'] != new_port['status']):
l3plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
l3plugin.l3_rpc_notifier.routers_updated_on_host(
context, [new_port['device_id']], host)
def subscribe():
registry.subscribe(
_notify_l3_agent_ha_port_update, resources.PORT, events.AFTER_UPDATE)
| sebrandon1/neutron | neutron/db/l3_hascheduler_db.py | Python | apache-2.0 | 4,750 |
# Copyright 2009 Shikhar Bhushan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'Session-related NETCONF operations'
from ncclient.xml_ import *
from rpc import RPC
class CloseSession(RPC):
"*<close-session>* RPC. The connection to NETCONF server is also closed."
def request(self):
try:
return self._request(new_ele("close-session"))
finally:
self.session.close()
class KillSession(RPC):
"*<kill-session>* RPC."
def request(self, session_id):
"""
:arg session_id: *session-id* of NETCONF session to kill
:type session_id: `string`
"""
node = new_ele("kill-session")
if not isinstance(session_id, basestring): # make sure
session_id = str(session_id)
sub_ele(node, "session-id").text = session_id
return self._request(node)
| travelping/ncclient | ncclient/operations/session.py | Python | apache-2.0 | 1,368 |
# encoding = utf-8
import importlib
import copy
import logging
import os
import sys
import json
import tempfile
from solnlib.packages.splunklib import modularinput as smi
from solnlib.log import Logs
from solnlib.modular_input import checkpointer
from solnlib import utils as sutils
from splunktaucclib.global_config import GlobalConfig, GlobalConfigSchema
from splunk_aoblib.rest_helper import TARestHelper
from splunk_aoblib.setup_util import Setup_Util
DATA_INPUTS_OPTIONS = "data_inputs_options"
AOB_TEST_FLAG = 'AOB_TEST'
FIELD_TYPE = "type"
FIELD_FORMAT = "format_type"
CUSTOMIZED_VAR = "customized_var"
TYPE_CHECKBOX = "checkbox"
TYPE_ACCOUNT = "global_account"
class BaseModInput(smi.Script):
'''
This is a modular input wrapper, which provides some helper
functions to read the paramters from setup pages and the arguments
from input definition
'''
LogLevelMapping = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
def __init__(self, app_namespace, input_name, use_single_instance=False):
super(BaseModInput, self).__init__()
self.use_single_instance = use_single_instance
self._canceled = False
self.input_type = input_name
self.input_stanzas = {}
self.context_meta = {}
self.namespace = app_namespace
# redirect all the logging to one file
Logs.set_context(namespace=app_namespace,
root_logger_log_file=input_name)
self.logger = logging.getLogger()
self.logger.setLevel(logging.INFO)
self.rest_helper = TARestHelper(self.logger)
# check point
self.ckpt = None
self.setup_util = None
@property
def app(self):
return self.get_app_name()
@property
def global_setup_util(self):
"""
This is a private API used in AoB code internally. It is not allowed to be used in user's code.
:return: setup util instance to read global configurations
"""
return self.setup_util
def get_app_name(self):
"""Get TA name.
:return: the name of TA this modular input is in
"""
raise NotImplemented
def get_scheme(self):
"""Get basic scheme, with use_single_instance field set.
:return: a basic input scheme
"""
scheme = smi.Scheme(self.input_type)
scheme.use_single_instance = self.use_single_instance
return scheme
def stream_events(self, inputs, ew):
"""The method called to stream events into Splunk.
This method overrides method in splunklib modular input.
It pre-processes the input args and call collect_events to stream events.
:param inputs: An ``InputDefinition`` object.
:param ew: An object with methods to write events and log messages to Splunk.
"""
# the input metadata is like
# {
# 'server_uri': 'https://127.0.0.1:8089',
# 'server_host': 'localhost',
# 'checkpoint_dir': '...',
# 'session_key': 'ceAvf3z^hZHYxe7wjTyTNo6_0ZRpf5cvWPdtSg'
# }
self.context_meta = inputs.metadata
# init setup util
uri = inputs.metadata["server_uri"]
session_key = inputs.metadata['session_key']
self.setup_util = Setup_Util(uri, session_key, self.logger)
input_definition = smi.input_definition.InputDefinition()
input_definition.metadata = copy.deepcopy(inputs.metadata)
input_definition.inputs = copy.deepcopy(inputs.inputs)
try:
self.parse_input_args(input_definition)
except Exception as e:
import traceback
self.log_error(traceback.format_exc())
print(traceback.format_exc(), file=sys.stderr)
# print >> sys.stderr, traceback.format_exc()
self.input_stanzas = {}
if not self.input_stanzas:
# if no stanza found. Just return
return
try:
self.set_log_level(self.log_level)
except:
self.log_debug('set log level fails.')
try:
self.collect_events(ew)
except Exception as e:
import traceback
self.log_error('Get error when collecting events.\n' + traceback.format_exc())
print(traceback.format_exc(), file=sys.stderr)
# print >> sys.stderr, traceback.format_exc()
raise RuntimeError(str(e))
def collect_events(self, event_writer):
"""Collect events and stream to Splunk using event writer provided.
Note: This method is originally collect_events(self, inputs, event_writer).
:param event_writer: An object with methods to write events and log messages to Splunk.
"""
raise NotImplemented()
def parse_input_args(self, inputs):
"""Parse input arguments, either from os environment when testing or from global configuration.
:param inputs: An ``InputDefinition`` object.
:return:
"""
if os.environ.get(AOB_TEST_FLAG, 'false') == 'true':
self._parse_input_args_from_env(inputs)
else:
self._parse_input_args_from_global_config(inputs)
if not self.use_single_instance:
assert len(self.input_stanzas) == 1
def _parse_input_args_from_global_config(self, inputs):
"""Parse input arguments from global configuration.
:param inputs:
"""
dirname = os.path.dirname
config_path = os.path.join(dirname(dirname(dirname(dirname(dirname(__file__))))), 'appserver', 'static', 'js', 'build',
'globalConfig.json')
with open(config_path) as f:
schema_json = ''.join([l for l in f])
global_schema = GlobalConfigSchema(json.loads(schema_json))
uri = inputs.metadata["server_uri"]
session_key = inputs.metadata['session_key']
global_config = GlobalConfig(uri, session_key, global_schema)
ucc_inputs = global_config.inputs.load(input_type=self.input_type)
all_stanzas = ucc_inputs.get(self.input_type, {})
if not all_stanzas:
# for single instance input. There might be no input stanza.
# Only the default stanza. In this case, modinput should exit.
self.log_warning("No stanza found for input type: " + self.input_type)
sys.exit(0)
account_fields = self.get_account_fields()
checkbox_fields = self.get_checkbox_fields()
self.input_stanzas = {}
for stanza in all_stanzas:
full_stanza_name = '{}://{}'.format(self.input_type, stanza.get('name'))
if full_stanza_name in inputs.inputs:
if stanza.get('disabled', False):
raise RuntimeError("Running disabled data input!")
stanza_params = {}
for k, v in stanza.items():
if k in checkbox_fields:
stanza_params[k] = sutils.is_true(v)
elif k in account_fields:
stanza_params[k] = copy.deepcopy(v)
else:
stanza_params[k] = v
self.input_stanzas[stanza.get('name')] = stanza_params
def _parse_input_args_from_env(self, inputs):
"""Parse input arguments from os environment. This is used for testing inputs.
:param inputs:
"""
data_inputs_options = json.loads(os.environ.get(DATA_INPUTS_OPTIONS, '[]'))
account_fields = self.get_account_fields()
checkbox_fields = self.get_checkbox_fields()
self.input_stanzas = {}
while len(inputs.inputs) > 0:
input_stanza, stanza_args = inputs.inputs.popitem()
kind_and_name = input_stanza.split("://")
if len(kind_and_name) == 2:
stanza_params = {}
for arg_name, arg_value in stanza_args.items():
try:
arg_value_trans = json.loads(arg_value)
except ValueError:
arg_value_trans = arg_value
stanza_params[arg_name] = arg_value_trans
if arg_name in account_fields:
stanza_params[arg_name] = self.get_user_credential_by_id(arg_value_trans)
elif arg_name in checkbox_fields:
stanza_params[arg_name] = sutils.is_true(arg_value_trans)
self.input_stanzas[kind_and_name[1]] = stanza_params
def get_account_fields(self):
"""Get the names of account variables.
Should be implemented in subclass.
:return: a list of variable names
"""
raise NotImplemented
def get_checkbox_fields(self):
"""Get the names of checkbox variables.
Should be implemented in subclass.
:return: a list of variable names
"""
raise NotImplemented
def get_global_checkbox_fields(self):
"""Get the names of checkbox global parameters.
:return: a list of global variable names
"""
raise NotImplemented
# Global setting related functions.
# Global settings consist of log setting, proxy, account(user_credential) and customized settings.
@property
def log_level(self):
return self.get_log_level()
def get_log_level(self):
"""Get the log level configured in global configuration.
:return: log level set in global configuration or "INFO" by default.
"""
return self.setup_util.get_log_level()
def set_log_level(self, level):
"""Set the log level this python process uses.
:param level: log level in `string`. Accept "DEBUG", "INFO", "WARNING", "ERROR" and "CRITICAL".
"""
if isinstance(level, str):
level = level.lower()
if level in self.LogLevelMapping:
level = self.LogLevelMapping[level]
else:
level = logging.INFO
self.logger.setLevel(level)
def log(self, msg):
"""Log msg using logging level in global configuration.
:param msg: log `string`
"""
self.logger.log(level=self.log_level, msg=msg)
def log_debug(self, msg):
"""Log msg using logging.DEBUG level.
:param msg: log `string`
"""
self.logger.debug(msg)
def log_info(self, msg):
"""Log msg using logging.INFO level.
:param msg: log `string`
"""
self.logger.info(msg)
def log_warning(self, msg):
"""Log msg using logging.WARNING level.
:param msg: log `string`
"""
self.logger.warning(msg)
def log_error(self, msg):
"""Log msg using logging.ERROR level.
:param msg: log `string`
"""
self.logger.error(msg)
def log_critical(self, msg):
"""Log msg using logging.CRITICAL level.
:param msg: log `string`
"""
self.logger.critical(msg)
@property
def proxy(self):
return self.get_proxy()
def get_proxy(self):
"""Get proxy settings in global configuration.
Proxy settings include fields "proxy_url", "proxy_port", "proxy_username", "proxy_password", "proxy_type" and "proxy_rdns".
:return: a `dict` containing proxy parameters or empty `dict` if proxy is not set.
"""
return self.setup_util.get_proxy_settings()
def get_user_credential_by_username(self, username):
"""Get global credential information based on username.
Credential settings include fields "name"(account id), "username" and "password".
:param username: `string`
:return: if credential with username exists, return a `dict`, else None.
"""
return self.setup_util.get_credential_by_username(username)
def get_user_credential_by_id(self, account_id):
"""Get global credential information based on account id.
Credential settings include fields "name"(account id), "username" and "password".
:param account_id: `string`
:return: if credential with account_id exists, return a `dict`, else None.
"""
return self.setup_util.get_credential_by_id(account_id)
def get_global_setting(self, var_name):
"""Get customized setting value configured in global configuration.
:param var_name: `string`
:return: customized global configuration value or None
"""
var_value = self.setup_util.get_customized_setting(var_name)
if var_value is not None and var_name in self.get_global_checkbox_fields():
var_value = sutils.is_true(var_value)
return var_value
# Functions to help create events.
def new_event(self, data, time=None, host=None, index=None, source=None, sourcetype=None, done=True, unbroken=True):
"""Create a Splunk event object.
:param data: ``string``, the event's text.
:param time: ``float``, time in seconds, including up to 3 decimal places to represent milliseconds.
:param host: ``string``, the event's host, ex: localhost.
:param index: ``string``, the index this event is specified to write to, or None if default index.
:param source: ``string``, the source of this event, or None to have Splunk guess.
:param sourcetype: ``string``, source type currently set on this event, or None to have Splunk guess.
:param done: ``boolean``, is this a complete ``Event``? False if an ``Event`` fragment.
:param unbroken: ``boolean``, Is this event completely encapsulated in this ``Event`` object?
:return: ``Event`` object
"""
return smi.Event(data=data, time=time, host=host, index=index,
source=source, sourcetype=sourcetype, done=done, unbroken=unbroken)
# Basic get functions. To get params in input stanza.
def get_input_type(self):
"""Get input type.
:return: the modular input type
"""
return self.input_type
def get_input_stanza(self, input_stanza_name=None):
"""Get input stanzas.
If stanza name is None, return a dict with stanza name as key and params as values.
Else return a dict with param name as key and param value as value.
:param input_stanza_name: None or `string`
:return: `dict`
"""
if input_stanza_name:
return self.input_stanzas.get(input_stanza_name, None)
return self.input_stanzas
def get_input_stanza_names(self):
"""Get all stanza names this modular input instance is given.
For multi instance mode, a single string value will be returned.
For single instance mode, stanza names will be returned in a list.
:return: `string` or `list`
"""
if self.input_stanzas:
names = list(self.input_stanzas.keys())
if self.use_single_instance:
return names
else:
assert len(names) == 1
return names[0]
return None
def get_arg(self, arg_name, input_stanza_name=None):
"""Get the input argument.
If input_stanza_name is not provided:
For single instance mode, return a dict <input_name, arg_value>.
For multi instance mode, return a single value or None.
If input_stanza_name is provided, return a single value or None.
:param arg_name: `string`, argument name
:param input_stanza_name: None or `string`, a stanza name
:return: `dict` or `string` or None
"""
if input_stanza_name is None:
args_dict = {k: args[
arg_name] for k, args in self.input_stanzas.items() if arg_name in args}
if self.use_single_instance:
return args_dict
else:
if len(args_dict) == 1:
return list(args_dict.values())[0]
return None
else:
return self.input_stanzas.get(input_stanza_name, {}).get(arg_name, None)
def get_output_index(self, input_stanza_name=None):
"""Get output Splunk index.
:param input_stanza_name: `string`
:return: `string` output index
"""
return self.get_arg('index', input_stanza_name)
def get_sourcetype(self, input_stanza_name=None):
"""Get sourcetype to index.
:param input_stanza_name: `string`
:return: the sourcetype to index to
"""
return self.get_arg('sourcetype', input_stanza_name)
# HTTP request helper
def send_http_request(self, url, method, parameters=None, payload=None, headers=None, cookies=None, verify=True,
cert=None, timeout=None, use_proxy=True):
"""Send http request and get response.
:param url: URL for the new Request object.
:param method: method for the new Request object. Can be "GET", "POST", "PUT", "DELETE"
:param parameters: (optional) Dictionary or bytes to be sent in the query string for the Request.
:param payload: (optional) Dictionary, bytes, or file-like object to send in the body of the Request.
:param headers: (optional) Dictionary of HTTP Headers to send with the Request.
:param cookies: (optional) Dict or CookieJar object to send with the Request.
:param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:param timeout: (optional) How long to wait for the server to send data before giving up, as a float,
or a (connect timeout, read timeout) tuple. Default to (10.0, 5.0).
:param use_proxy: (optional) whether to use proxy. If set to True, proxy in global setting will be used.
:return: Response
"""
return self.rest_helper.send_http_request(url=url, method=method, parameters=parameters, payload=payload,
headers=headers, cookies=cookies, verify=verify, cert=cert,
timeout=timeout,
proxy_uri=self._get_proxy_uri() if use_proxy else None)
def _get_proxy_uri(self):
uri = None
proxy = self.get_proxy()
if proxy and proxy.get('proxy_url') and proxy.get('proxy_type'):
uri = proxy['proxy_url']
if proxy.get('proxy_port'):
uri = '{0}:{1}'.format(uri, proxy.get('proxy_port'))
if proxy.get('proxy_username') and proxy.get('proxy_password'):
uri = '{0}://{1}:{2}@{3}/'.format(proxy['proxy_type'], proxy[
'proxy_username'], proxy['proxy_password'], uri)
else:
uri = '{0}://{1}'.format(proxy['proxy_type'], uri)
return uri
# Checkpointing related functions
def _init_ckpt(self):
if self.ckpt is None:
if 'AOB_TEST' in os.environ:
ckpt_dir = self.context_meta.get('checkpoint_dir', tempfile.mkdtemp())
if not os.path.exists(ckpt_dir):
os.makedirs(ckpt_dir)
self.ckpt = checkpointer.FileCheckpointer(ckpt_dir)
else:
if 'server_uri' not in self.context_meta:
raise ValueError('server_uri not found in input meta.')
if 'session_key' not in self.context_meta:
raise ValueError('session_key not found in input meta.')
dscheme, dhost, dport = sutils.extract_http_scheme_host_port(self.context_meta[
'server_uri'])
self.ckpt = checkpointer.KVStoreCheckpointer(self.app + "_checkpointer",
self.context_meta['session_key'], self.app,
scheme=dscheme, host=dhost, port=dport)
def get_check_point(self, key):
"""Get checkpoint.
:param key: `string`
:return: Checkpoint state if exists else None.
"""
if self.ckpt is None:
self._init_ckpt()
return self.ckpt.get(key)
def save_check_point(self, key, state):
"""Update checkpoint.
:param key: Checkpoint key. `string`
:param state: Checkpoint state.
"""
if self.ckpt is None:
self._init_ckpt()
self.ckpt.update(key, state)
def batch_save_check_point(self, states):
"""Batch update checkpoint.
:param states: a `dict` states with checkpoint key as key and checkpoint state as value.
"""
if self.ckpt is None:
self._init_ckpt()
self.ckpt.batch_update(states)
def delete_check_point(self, key):
"""Delete checkpoint.
:param key: Checkpoint key. `string`
"""
if self.ckpt is None:
self._init_ckpt()
self.ckpt.delete(key)
| PaloAltoNetworks-BD/SplunkforPaloAltoNetworks | Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/modinput_wrapper/base_modinput.py | Python | isc | 21,425 |
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
from google.appengine.ext import ndb
from rogerthat.models import NdbModel
class ParticipationCity(NdbModel):
secret = ndb.StringProperty(indexed=False)
@property
def community_id(self):
return self.key.id()
@classmethod
def create_key(cls, community_id):
return ndb.Key(cls, community_id)
| our-city-app/oca-backend | src/solutions/common/models/participation.py | Python | apache-2.0 | 971 |
from discord.ext import commands
from clembot.core.logs import Logger
class RaidLevelConverter(commands.Converter):
level_alias_map = {
'MEGA': 'M',
'EX': 'E'
}
label_map = {
'M' : 'mega',
'E' : 'ex'
}
@classmethod
def to_level(cls, argument) -> str:
case_corrected_level = argument.upper()
if case_corrected_level in RaidLevelMaster.get_all_levels():
return case_corrected_level
if case_corrected_level in RaidLevelConverter.level_alias_map.keys():
return RaidLevelConverter.level_alias_map.get(case_corrected_level)
return None
async def convert(self, ctx, argument) -> str:
return RaidLevelConverter.to_level(argument)
@classmethod
def label(cls, level):
if level in RaidLevelConverter.label_map.keys():
return RaidLevelConverter.label_map.get(level)
return level
class RaidLevelMaster:
TABLE_NAME = 'raid_master'
by_level = dict()
def __init__(self, bot, db_dict=dict()):
self.bot = bot
self.db_dict = {
'raid_level' : None,
'raid_boss': [],
'next_raid_boss': [],
'egg_key': None,
'egg_image': None,
'egg_timer': None,
'raid_timer': None
}
self.db_dict.update(db_dict)
@property
def level(self):
return self['raid_level']
@property
def egg_timer(self):
return self['egg_timer']
@property
def raid_timer(self):
return self['raid_timer']
@property
def egg_image(self):
return self['egg_image']
@property
def raid_boss_list(self):
return self['raid_boss']
def update_dict(self, db_dict):
self.db_dict.update(db_dict)
async def insert(self):
table = self.bot.dbi.table(RaidLevelMaster.TABLE_NAME)
insert_query = table.insert(**self.db_dict)
await insert_query.commit()
RaidLevelMaster.cache(self)
async def update(self):
table = self.bot.dbi.table(RaidLevelMaster.TABLE_NAME)
update_query = table.update(**self.db_dict).where(raid_level=self.level)
await update_query.commit()
RaidLevelMaster.cache(self)
@staticmethod
async def find_all(bot):
table = bot.dbi.table(RaidLevelMaster.TABLE_NAME)
query = table.query().select()
record_list = await query.getjson()
return record_list
@classmethod
async def find_by(cls, bot, raid_level):
table = bot.dbi.table(RaidLevelMaster.TABLE_NAME)
query = table.query().select().where(raid_level=raid_level)
try:
record_list = await query.getjson()
if record_list:
return cls(bot, record_list[0])
except Exception as error:
Logger.info(f"{error}")
return None
@classmethod
async def load(cls, bot, force=False):
Logger.info("load()")
if len(cls.by_level) == 0 or force:
table = bot.dbi.table(RaidLevelMaster.TABLE_NAME)
records = await table.query().select().getjson()
for record in records:
raid_master = RaidLevelMaster(bot, record)
RaidLevelMaster.cache(raid_master)
@classmethod
def cache(cls, raid_master):
cls.by_level[raid_master.level] = raid_master
pass
@classmethod
def from_cache(cls, level):
if len(cls.by_level) < 1:
raise Exception("Error : Raid bosses are not loaded.")
if level:
raid_master = cls.by_level.get(str(level), None)
return raid_master
raise Exception(f"Error : Raid bosses (level - {level}) are not loaded.")
def __getitem__(self, item):
"""use [] operator to access members, simpler to create entity objects"""
return self.db_dict.get(item)
def __setitem__(self, key, value):
"""use [] operator to access members, simpler to create entity objects. Handles array kind of values."""
self.db_dict[key] = value
@classmethod
def get_level(cls, pokeform):
"""get_level(pokemon) - return None if the boss is listed."""
if 'MEGA' in pokeform.id:
return 'M'
for raid_master_level in RaidLevelMaster.by_level.values():
if pokeform.id in raid_master_level.raid_boss_list:
return raid_master_level.level
return None
@classmethod
def get_all_levels(cls):
"""get_all_levels(pokemon) - return pokemon raid levels"""
return RaidLevelMaster.by_level.keys()
@classmethod
def get_boss_list(cls, level):
"""get_boss_list(level) - returns a list of raid bosses for that level"""
raid_master = RaidLevelMaster.by_level.get(str(level), None)
if raid_master:
return raid_master.raid_boss_list
return []
@classmethod
def is_current_raid_boss(cls, pokeform):
return RaidLevelMaster.get_level(pokeform) is not None
| TrainingB/Clembot | clembot/exts/pkmn/raid_boss.py | Python | gpl-3.0 | 5,091 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#raise主动抛出异常
raise IOError, "file not exit" | zhangyage/Python-oldboy | day04/exceptions/Raise.py | Python | apache-2.0 | 105 |
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2014 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from .xmlutils import *
import uuid
import lxml.etree as ET
from .packitem import PackItem
class PerkCateg(PackItem):
def __init__(self):
super(PerkCateg, self).__init__()
self.id = uuid.uuid1().hex
self.name = None
@staticmethod
def build_from_xml(elem):
f = PerkCateg()
f.id = elem.attrib['id']
f.name = elem.text
return f
def write_into(self, elem):
pass
def __str__(self):
return self.name
def __unicode__(self):
return self.name
def __eq__(self, obj):
return obj and obj.id == self.id
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return self.id.__hash__()
class PerkException(object):
def __init__(self):
self.tag = None
self.value = None
@staticmethod
def build_from_xml(elem):
f = PerkException()
f.tag = elem.attrib['tag']
f.value = int(elem.attrib['value'])
return f
def write_into(self, elem):
ec = ET.SubElement(elem, "Exception",
{
'tag' : self.tag,
'value' : str(self.value),
})
class PerkRank(object):
def __init__(self):
self.id = None
self.value = None
self.exceptions = []
@staticmethod
def build_from_xml(elem):
f = PerkRank()
f.id = int(elem.attrib['id'])
f.value = int(elem.attrib['value'])
f.exceptions = []
for se in elem.iter():
if se.tag == 'Exception':
f.exceptions.append(PerkException.build_from_xml(se))
return f
def write_into(self, elem):
ec = ET.SubElement(elem, "Rank",
{
'id' : str(self.id),
'value' : str(self.value),
})
for e in self.exceptions:
e.write_into( ec )
class Perk(object):
def __init__(self):
self.id = uuid.uuid1().hex
self.name = None
self.type = None
self.rule = None
self.desc = None
self.ranks = []
@staticmethod
def build_from_xml(elem):
f = Perk()
f.name = elem.attrib['name']
f.id = elem.attrib['id']
f.type = elem.attrib['type']
f.rule = elem.attrib['rule'] if ('rule' in elem.attrib) else f.id
f.desc = read_sub_element_text(elem, 'Description', "")
f.ranks = []
for se in elem.iter():
if se.tag == 'Rank':
f.ranks.append(PerkRank.build_from_xml(se))
return f
def write_into(self, name, elem):
ec = ET.SubElement(elem, name,
{ 'name' : self.name,
'id' : self.id,
'type' : self.type,
'rule' : self.rule
})
for r in self.ranks:
r.write_into( ec )
ET.SubElement(ec, 'Description').text = self.desc
def get_rank_value(self, rank):
for r in self.ranks:
if r.id == rank: return r.value
return None
def __str__(self):
return self.name or self.id
def __unicode__(self):
return self.name or self.id
def __eq__(self, obj):
return obj and obj.id == self.id
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return self.id.__hash__() | OpenNingia/l5rcm-data-access | l5rdal/perk.py | Python | lgpl-3.0 | 4,184 |
# coding: utf-8
import struct
import pytest
from pandas.compat import OrderedDict, u
from pandas import compat
from pandas.io.msgpack import Packer, Unpacker, packb, unpackb
class TestPack(object):
def check(self, data, use_list=False):
re = unpackb(packb(data), use_list=use_list)
assert re == data
def testPack(self):
test_data = [
0, 1, 127, 128, 255, 256, 65535, 65536,
-1, -32, -33, -128, -129, -32768, -32769,
1.0,
b"", b"a", b"a" * 31, b"a" * 32,
None, True, False,
(), ((),), ((), None,),
{None: 0},
(1 << 23),
]
for td in test_data:
self.check(td)
def testPackUnicode(self):
test_data = [u(""), u("abcd"), [u("defgh")], u("Русский текст"), ]
for td in test_data:
re = unpackb(
packb(td, encoding='utf-8'), use_list=1, encoding='utf-8')
assert re == td
packer = Packer(encoding='utf-8')
data = packer.pack(td)
re = Unpacker(
compat.BytesIO(data), encoding='utf-8', use_list=1).unpack()
assert re == td
def testPackUTF32(self):
test_data = [
compat.u(""),
compat.u("abcd"),
[compat.u("defgh")],
compat.u("Русский текст"),
]
for td in test_data:
re = unpackb(
packb(td, encoding='utf-32'), use_list=1, encoding='utf-32')
assert re == td
def testPackBytes(self):
test_data = [b"", b"abcd", (b"defgh", ), ]
for td in test_data:
self.check(td)
def testIgnoreUnicodeErrors(self):
re = unpackb(
packb(b'abc\xeddef'), encoding='utf-8', unicode_errors='ignore',
use_list=1)
assert re == "abcdef"
def testStrictUnicodeUnpack(self):
msg = (r"'utf-*8' codec can't decode byte 0xed in position 3:"
" invalid continuation byte")
with pytest.raises(UnicodeDecodeError, match=msg):
unpackb(packb(b'abc\xeddef'), encoding='utf-8', use_list=1)
def testStrictUnicodePack(self):
msg = (r"'ascii' codec can't encode character u*'\\xed' in position 3:"
r" ordinal not in range\(128\)")
with pytest.raises(UnicodeEncodeError, match=msg):
packb(compat.u("abc\xeddef"), encoding='ascii',
unicode_errors='strict')
def testIgnoreErrorsPack(self):
re = unpackb(
packb(
compat.u("abcФФФdef"), encoding='ascii',
unicode_errors='ignore'), encoding='utf-8', use_list=1)
assert re == compat.u("abcdef")
def testNoEncoding(self):
msg = "Can't encode unicode string: no encoding is specified"
with pytest.raises(TypeError, match=msg):
packb(compat.u("abc"), encoding=None)
def testDecodeBinary(self):
re = unpackb(packb("abc"), encoding=None, use_list=1)
assert re == b"abc"
def testPackFloat(self):
assert packb(1.0,
use_single_float=True) == b'\xca' + struct.pack('>f', 1.0)
assert packb(
1.0, use_single_float=False) == b'\xcb' + struct.pack('>d', 1.0)
def testArraySize(self, sizes=[0, 5, 50, 1000]):
bio = compat.BytesIO()
packer = Packer()
for size in sizes:
bio.write(packer.pack_array_header(size))
for i in range(size):
bio.write(packer.pack(i))
bio.seek(0)
unpacker = Unpacker(bio, use_list=1)
for size in sizes:
assert unpacker.unpack() == list(range(size))
def test_manualreset(self, sizes=[0, 5, 50, 1000]):
packer = Packer(autoreset=False)
for size in sizes:
packer.pack_array_header(size)
for i in range(size):
packer.pack(i)
bio = compat.BytesIO(packer.bytes())
unpacker = Unpacker(bio, use_list=1)
for size in sizes:
assert unpacker.unpack() == list(range(size))
packer.reset()
assert packer.bytes() == b''
def testMapSize(self, sizes=[0, 5, 50, 1000]):
bio = compat.BytesIO()
packer = Packer()
for size in sizes:
bio.write(packer.pack_map_header(size))
for i in range(size):
bio.write(packer.pack(i)) # key
bio.write(packer.pack(i * 2)) # value
bio.seek(0)
unpacker = Unpacker(bio)
for size in sizes:
assert unpacker.unpack() == {i: i * 2 for i in range(size)}
def test_odict(self):
seq = [(b'one', 1), (b'two', 2), (b'three', 3), (b'four', 4)]
od = OrderedDict(seq)
assert unpackb(packb(od), use_list=1) == dict(seq)
def pair_hook(seq):
return list(seq)
assert unpackb(
packb(od), object_pairs_hook=pair_hook, use_list=1) == seq
def test_pairlist(self):
pairlist = [(b'a', 1), (2, b'b'), (b'foo', b'bar')]
packer = Packer()
packed = packer.pack_map_pairs(pairlist)
unpacked = unpackb(packed, object_pairs_hook=list)
assert pairlist == unpacked
| GuessWhoSamFoo/pandas | pandas/tests/io/msgpack/test_pack.py | Python | bsd-3-clause | 5,296 |
from sqlalchemy import create_engine, MetaData
from sqlalchemy.orm import scoped_session, create_session
engine = None
metadata = MetaData()
db_session = scoped_session(
lambda: create_session(autocommit=False, autoflush=False, bind=engine))
def init_engine(uri):
global engine
engine = create_engine(uri, convert_unicode=True)
return engine
def init_db():
global engine
metadata.create_all(bind=engine)
| myblt/myblt-website | database.py | Python | gpl-2.0 | 434 |
def DoDDL(con):
con.query("use tiaraboom")
con.query("""create reference table if not exists sources(
user_id bigint not null,
source_id bigint auto_increment,
source blob default null,
primary key(source_id) using hash)""")
con.query("""create table if not exists sentences(
source_id bigint not null,
sentence_id bigint auto_increment,
sentence_index bigint not null,
sentence blob not null,
key (source_id, sentence_id) using clustered columnstore,
shard (sentence_id))""")
con.query("""create table if not exists tokens(
source_id bigint not null,
sentence_id bigint not null,
token_id bigint not null,
token blob not null,
lemma blob not null,
start_char bigint not null,
end_char bigint not null,
pos blob not null,
pre blob not null,
post blob not null,
ner blob not null,
normalized_ner blob,
speaker blob not null,
original_text blob not null,
key (source_id, sentence_id, token_id) using clustered columnstore,
shard (sentence_id))""")
con.query("""create table if not exists relations(
source_id bigint not null,
sentence_id bigint not null,
relation_id bigint not null,
subject blob not null,
subject_start bigint not null,
subject_end bigint not null,
relation blob not null,
relation_start bigint not null,
relation_end bigint not null,
object blob not null,
object_start bigint not null,
object_end bigint not null,
key (subject, object, relation) using clustered columnstore,
shard (sentence_id))""")
| jvictor0/TiaraBoom | sharat/sharat_ddl.py | Python | mit | 2,217 |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
requires = [
'ecreall_dace',
'pyramid_layout',
'substanced',
'pillow'
]
setup(name='ecreall_pontus',
version='1.1.1.dev0',
description='An application programming interface built upon the Pyramid web framework and substanced application. It provides libraries which make it easy to manage complex and imbricated views. For that purpose, Pontus introduces the concept of operations on views.',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
],
author='Amen Souissi',
author_email='[email protected]',
url='https://github.com/ecreall/pontus/',
keywords='process',
license="AGPLv3+",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="pontus",
message_extractors={
'pontus': [
('**.py', 'python', None), # babel extractor supports plurals
('**.pt', 'chameleon', None),
],
},
extras_require = dict(
test=['WebTest'],
),
entry_points="""\
""",
)
| ecreall/pontus | setup.py | Python | agpl-3.0 | 1,701 |
from dbt.exceptions import CompilationException
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.files import ParseFileType
from test.integration.base import DBTIntegrationTest, use_profile, normalize, get_manifest
import shutil
import os
class TestDocs(DBTIntegrationTest):
@property
def schema(self):
return "test_068docs"
@property
def models(self):
return "models"
@property
def project_config(self):
return {
'config-version': 2,
'seed-paths': ['seeds'],
'test-paths': ['tests'],
'macro-paths': ['macros'],
'analysis-paths': ['analyses'],
'snapshot-paths': ['snapshots'],
'seeds': {
'quote_columns': False,
},
}
def setup_directories(self):
os.mkdir(os.path.join(self.test_root_dir, 'models'))
os.mkdir(os.path.join(self.test_root_dir, 'tests'))
os.mkdir(os.path.join(self.test_root_dir, 'seeds'))
os.mkdir(os.path.join(self.test_root_dir, 'macros'))
os.mkdir(os.path.join(self.test_root_dir, 'analyses'))
os.mkdir(os.path.join(self.test_root_dir, 'snapshots'))
os.environ['DBT_PP_TEST'] = 'true'
@use_profile('postgres')
def test_postgres_pp_docs(self):
# initial run
self.setup_directories()
self.copy_file('test-files/model_one.sql', 'models/model_one.sql')
self.copy_file('test-files/raw_customers.csv', 'seeds/raw_customers.csv')
self.copy_file('test-files/my_macro-docs.sql', 'macros/my_macro.sql')
results = self.run_dbt(["run"])
self.assertEqual(len(results), 1)
# Add docs file customers.md
self.copy_file('test-files/customers1.md', 'models/customers.md')
results = self.run_dbt(["--partial-parse", "run"])
manifest = get_manifest()
self.assertEqual(len(manifest.docs), 2)
model_one_node = manifest.nodes['model.test.model_one']
# Add schema file with 'docs' description
self.copy_file('test-files/schema-docs.yml', 'models/schema.yml')
results = self.run_dbt(["--partial-parse", "run"])
manifest = get_manifest()
self.assertEqual(len(manifest.docs), 2)
doc_id = 'test.customer_table'
self.assertIn(doc_id, manifest.docs)
doc = manifest.docs[doc_id]
doc_file_id = doc.file_id
self.assertIn(doc_file_id, manifest.files)
source_file = manifest.files[doc_file_id]
self.assertEqual(len(source_file.nodes), 1)
model_one_id = 'model.test.model_one'
self.assertIn(model_one_id, source_file.nodes)
model_node = manifest.nodes[model_one_id]
self.assertEqual(model_node.description, 'This table contains customer data')
# Update the doc file
self.copy_file('test-files/customers2.md', 'models/customers.md')
results = self.run_dbt(["--partial-parse", "run"])
manifest = get_manifest()
self.assertEqual(len(manifest.docs), 2)
doc_node = manifest.docs[doc_id]
model_one_id = 'model.test.model_one'
self.assertIn(model_one_id, manifest.nodes)
model_node = manifest.nodes[model_one_id]
self.assertRegex(model_node.description, r'LOTS')
# Add a macro patch, source and exposure with doc
self.copy_file('test-files/schema-docs2.yml', 'models/schema.yml')
results = self.run_dbt(["--partial-parse", "run"])
self.assertEqual(len(results), 1)
manifest = get_manifest()
doc_file = manifest.files[doc_file_id]
expected_nodes = ['model.test.model_one', 'source.test.seed_sources.raw_customers', 'macro.test.my_macro', 'exposure.test.proxy_for_dashboard']
self.assertEqual(expected_nodes, doc_file.nodes)
source_id = 'source.test.seed_sources.raw_customers'
self.assertEqual(manifest.sources[source_id].source_description, 'LOTS of customer data')
macro_id = 'macro.test.my_macro'
self.assertEqual(manifest.macros[macro_id].description, 'LOTS of customer data')
exposure_id = 'exposure.test.proxy_for_dashboard'
self.assertEqual(manifest.exposures[exposure_id].description, 'LOTS of customer data')
# update the doc file again
self.copy_file('test-files/customers1.md', 'models/customers.md')
results = self.run_dbt(["--partial-parse", "run"])
manifest = get_manifest()
source_file = manifest.files[doc_file_id]
model_one_id = 'model.test.model_one'
self.assertIn(model_one_id, source_file.nodes)
model_node = manifest.nodes[model_one_id]
self.assertEqual(model_node.description, 'This table contains customer data')
self.assertEqual(manifest.sources[source_id].source_description, 'This table contains customer data')
self.assertEqual(manifest.macros[macro_id].description, 'This table contains customer data')
self.assertEqual(manifest.exposures[exposure_id].description, 'This table contains customer data')
# check that _lock is working
with manifest._lock:
self.assertIsNotNone(manifest._lock)
| analyst-collective/dbt | test/integration/068_partial_parsing_tests/test_pp_docs.py | Python | apache-2.0 | 5,212 |
from pytest import mark
from intervals import IntInterval
class TestArithmeticOperators(object):
@mark.parametrize(('first', 'second', 'result'), (
(IntInterval([1, 3]), IntInterval([1, 2]), [2, 5]),
(IntInterval([1, 3]), 1, [2, 4]),
(1, IntInterval([1, 3]), [2, 4]),
([1, 2], IntInterval([1, 2]), [2, 4])
))
def test_add_operator(self, first, second, result):
assert first + second == IntInterval(result)
@mark.parametrize(('first', 'second', 'result'), (
(IntInterval([1, 3]), IntInterval([1, 2]), [-1, 2]),
(IntInterval([1, 3]), 1, [0, 2]),
(1, IntInterval([1, 3]), [-2, 0])
))
def test_sub_operator(self, first, second, result):
assert first - second == IntInterval(result)
def test_isub_operator(self):
range_ = IntInterval([1, 3])
range_ -= IntInterval([1, 2])
assert range_ == IntInterval([-1, 2])
def test_iadd_operator(self):
range_ = IntInterval([1, 2])
range_ += IntInterval([1, 2])
assert range_ == IntInterval([2, 4])
class TestArithmeticFunctions(object):
@mark.parametrize(('first', 'second', 'result'), (
(IntInterval([1, 3]), IntInterval([1, 2]), [1, 2]),
(IntInterval([-2, 2]), 1, [-2, 1])
))
def test_glb(self, first, second, result):
assert first.glb(second) == IntInterval(result)
@mark.parametrize(('first', 'second', 'result'), (
(IntInterval([1, 3]), IntInterval([1, 2]), [1, 3]),
(IntInterval([-2, 2]), 1, [1, 2])
))
def test_lub(self, first, second, result):
assert first.lub(second) == IntInterval(result)
@mark.parametrize(('first', 'second', 'result'), (
(IntInterval([1, 3]), IntInterval([1, 2]), [1, 2]),
(IntInterval([-2, 2]), 1, [1, 1])
))
def test_inf(self, first, second, result):
assert first.inf(second) == IntInterval(result)
@mark.parametrize(('first', 'second', 'result'), (
(IntInterval([1, 3]), IntInterval([1, 2]), [1, 3]),
(IntInterval([-2, 2]), 1, [-2, 2])
))
def test_sup(self, first, second, result):
assert first.sup(second) == IntInterval(result)
| kvesteri/intervals | tests/interval/test_arithmetic.py | Python | bsd-3-clause | 2,203 |
import os
from dummy.collectors.generic import PassFailCollector
from dummy.statistics.generic import CountEngine
TEMP_DIR = '.tmp' # directory for temp files
TESTS_DIR = 'tests' # directory with all the tests
TEST_OUTPUT_DIR = os.path.join( TEMP_DIR, 'logs' )
TARGET_DIR = 'results' # Store results in this directory
SRC_DIR = 'src'
ENV = [
'TEMP_DIR',
'TESTS_DIR',
'SRC_DIR',
]
SUITES = {
'all': [ '%s/*' % TESTS_DIR ]
}
GIT_TAG_RELEASE_REGEX = r".*"
RELEASES = {}
TEST_RUNNER = '%s/bin/runner.sh'
METRICS = {
# passing/failing of a test is configured as a collector
'pass/fail': {
'collector': PassFailCollector()
},
}
STATISTICS = {
# we can configure what statistics are gathered over the different tests
# several statistics engines exist in the python module `dummy.statistics.generic`
# let's count for example the amount of tests failed/passed
'tests passing': {
'engine': CountEngine()
},
}
| ElessarWebb/dummy | docs/code/example_dummy_config.py | Python | mit | 1,006 |
"""
get aggregate and cluster information of asphaltene molecules from .lammpstrj file.
"""
import numpy
import pp
import gc
from datetime import datetime
import matplotlib.pyplot as plt
a, b, c = 86.62, 86.62, 86.62
NProcess = 16
stride = 10
dirName = r"F:\simulations\asphaltenes\production\longtime\athInHeptane\nvt\analysis/"
fileName = '../ath-dump.lammpstrj'
# label = 'ATH in Toluene'
label = 'ATH in Heptane'
time0 = datetime.now()
print 'opening trajctory file...'
with open(dirName + fileName, 'r') as foo:
dataFile = foo.read().split('ITEM: TIMESTEP\n')
time1 = datetime.now()
print 'time elapsed:', time1-time0
criteriaCluster = 4.0
def ave_accum(list):
avelist = []
avelist.append(list[0])
for i in range(1, len(list)):
avelist.append((avelist[i - 1] * i + list[i]) / float((i + 1)))
return avelist
def dis(pbca, pbcb, pbcc, t1, t2):
xmin = numpy.abs(t1[0] - t2[0]) - pbca * numpy.round(numpy.abs(t1[0] - t2[0]) / pbca)
ymin = numpy.abs(t1[1] - t2[1]) - pbcb * numpy.round(numpy.abs(t1[1] - t2[1]) / pbcb)
zmin = numpy.abs(t1[2] - t2[2]) - pbcc * numpy.round(numpy.abs(t1[2] - t2[2]) / pbcc)
return numpy.sqrt(xmin * xmin + ymin * ymin + zmin * zmin)
# minimal distance between two
def disMol(a, b, c, molecule1, molecule2):
# molecule1,2 = [[id1,mol1,type1,x1,y1,z1],[id2,mol2,type2,x2,y2,z2],...]
dis0 = max(a, b, c)
for i in range(len(molecule1)):
for j in range(len(molecule2)):
distmp = dis(a, b, c, molecule1[i][3:], molecule2[j][3:])
if distmp <= dis0:
dis0 = distmp
return dis0
print "spliting trajectory..."
trj = []
# remove -1 after testing
'''
for iframe, frame in enumerate(dataFile[1:]):
if iframe % 100 == 0:
print 'Frame No.: %d' % iframe
lines = frame.split('\n')[8:-1]
config = []
for line in lines:
id, mol, type, x, y, z = line.split()
config.append([int(id), int(mol), int(type), float(x), float(y), float(z)])
# use sorted() function not list.sort() method, the later have no return value
trj.append(sorted(config))
'''
# remove -1 after testing
def split(iframe,frame):
if iframe % 500 == 0:
print 'Frame No.: %d' % iframe
lines = frame.split('\n')[8:-1]
config = []
for line in lines:
id, mol, type, x, y, z = line.split()
config.append([int(id), int(mol), int(type), float(x), float(y), float(z)])
# use sorted() function not list.sort() method, the later have no return value
return sorted(config)
print 'using %d CPUs to split file.' % NProcess
job_server = pp.Server(NProcess)
jobs1 = [job_server.submit(split, args=(iframe, frame,), depfuncs=(),
modules=()) for iframe, frame in enumerate(dataFile[1:])]
for job in jobs1:
trj.append(job())
del dataFile
gc.collect()
time2 = datetime.now()
print 'time elapsed:',time2-time1
print 'split compelted\n'
print 'total frames: %d' % len(trj)
# get molecule index list using the first frame
mol = []
for xatom in trj[0]:
if xatom[1] not in mol:
mol.append(xatom[1])
print 'total molecules:%d' % len(mol)
# generate in each frame, split config by molecules
# get the min_min and max_min for each two molecules
print "caculating distance..."
# for iframe, frame in enumerate(trj):
def clustering(iframe, frame, mol, criteriaCluster, a, b, c):
if iframe % 500 == 0:
print 'Frame No.: %d' % iframe
molecules = [[] for i in range(len(mol))]
for xatom in frame:
for j, imol in enumerate(mol):
if xatom[1] == imol:
molecules[j].append(xatom)
xclusterLabel = [i for i in range(len(molecules))]
for i in range(len(molecules) - 1):
for j in range(i + 1, len(molecules)):
xdis = disMol(a, b, c, molecules[i], molecules[j])
if xdis <= criteriaCluster:
if xclusterLabel[j] == j:
xclusterLabel[j] = xclusterLabel[i]
else:
xclusterLabel[i] = xclusterLabel[j]
unionCluster = set(xclusterLabel)
count = []
for i in unionCluster:
cnttmp = xclusterLabel.count(i)
count.append(cnttmp)
avecount = float(sum(count)) / len(count)
return [avecount, len(unionCluster)]
print 'using %d CPUs to caculate distance.' % NProcess
# job_server2 = pp.Server(NProcess)
jobs2 = [job_server.submit(clustering, args=(iframe, frame, mol, criteriaCluster, a, b, c,), depfuncs=(disMol, dis,),
modules=("numpy","datetime"), globals=globals()) for iframe, frame in enumerate(trj[::stride])]
cluster = []
for job in jobs2:
cluster.append(job())
# print cluster
xcluster = [x[0] for x in cluster]
cumulave = ave_accum(xcluster)
with open(dirName + 'cluster.dat', 'w') as foo:
print >> foo, '#frame clustersize ave_accum No_of_clusters'
for i, t in enumerate(cluster):
print >> foo, '%5d%10.4f%10.4f%5d' % (i, t[0], cumulave[i], t[1])
time3 = datetime.now()
print 'timing: ', time3 - time2
print 'total time:',time3-time0
plt.figure(0, figsize=(8, 4))
figName = dirName + 'average cluster size.png'
plt.title('Average Cluster Size', fontsize=12)
plt.grid(True)
plt.xlabel('Frames', fontsize=12)
plt.ylabel('Number of molecules', fontsize=12)
plt.plot(xcluster, label='%s' % label)
plt.plot(cumulave, label='average', linewidth=3)
plt.legend(loc='best', ncol=4, fontsize=12, shadow=False)
plt.savefig(figName, format='png', dpi=300)
# plt.show()
plt.close(0)
| riddlezyc/geolab | src/structure/aggregateClusterAnalysis.py | Python | gpl-3.0 | 5,696 |
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def get_install_requires():
with open('requirements.txt', 'r') as f:
return [line.strip() for line in f]
def get_test_requires():
with open('test-requirements.txt', 'r') as f:
return [line.strip() for line in f if not line.startswith('-r ')]
setup(
name='libpagure',
packages=['libpagure'],
version='0.10',
description='A Python library for Pagure APIs.',
author='Lei Yang',
author_email='[email protected]',
url='https://github.com/yangl1996/libpagure',
keywords=['pagure', 'api', 'library'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: GNU General Public License v2 '
'or later (GPLv2+)',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
],
license='GNU General Public License v2.0',
install_requires=get_install_requires(),
test_requires=get_test_requires(),
)
| yangl1996/libpagure | setup.py | Python | gpl-2.0 | 1,365 |
#! /usr/bin/env python
from collections import OrderedDict
from codonutils import translate_codon
from data_reader import (
CONSENSUS, any_fasta_reader)
MUTINS = 0
MUTDEL = 1
FSINS = 2
FSDEL = 3
def build_mutation(pos, cons, codon, indel_type=-1, indel=None):
codon = codon if indel_type != MUTDEL else ''
aa = translate_codon(codon) if codon else ''
if aa == cons:
return
return {
'Position': pos,
'ReferenceText': cons,
'CodonText': codon,
'AminoAcidText': aa,
'InsertedCodonsText': indel if indel_type == MUTINS else '',
'IsInsertion': indel_type == MUTINS,
'IsDeletion': indel_type == MUTDEL,
'IsPartial': '-' in codon
}
def lanl_reader(gene, filename):
consensus = CONSENSUS[gene]['AASeq']
sequences = list(any_fasta_reader(filename))
patients = OrderedDict()
for header, naseq in sequences:
accession, ptid, subtype = header.split('.', 2)
if accession == 'K03455':
pass
elif not ptid or ptid == '-':
continue
elif ptid in patients:
continue
patients[ptid] = (accession, naseq, subtype)
headers = [header for header, _, _ in patients.values()]
sequences = [seq.upper() for _, seq, _ in patients.values()]
subtypes = [subtype for _, _, subtype in patients.values()]
hxb2idx = headers.index('K03455')
insertions = set()
for i, nas in enumerate(zip(*sequences)):
# use HXB2 to identify indels
is_insertion = nas[hxb2idx] == '-'
if is_insertion:
insertions.add(i)
header = ['Accession']
results = []
for j, sequence in enumerate(sequences):
all_codons = []
all_indels = {}
cur_codon = ''
cur_ins = ''
cur_pos = 0
for i, na in enumerate(sequence):
if len(cur_codon) == 3:
cur_pos += 1
numdel = cur_codon.count('-')
if numdel == 3:
all_indels[(cur_pos, MUTDEL)] = True
elif numdel > 0:
all_indels[(cur_pos, FSDEL)] = numdel
all_codons.append(cur_codon)
cur_codon = ''
if i in insertions:
if na != '-':
cur_ins += na
else:
if cur_ins:
mut_ins = cur_ins[:len(cur_ins) // 3 * 3]
fs_ins = cur_ins[-len(cur_ins) % 3:]
if mut_ins:
all_indels[(cur_pos, MUTINS)] = mut_ins
if fs_ins:
all_indels[(cur_pos, FSINS)] = fs_ins
cur_ins = ''
cur_codon += na
else:
if len(cur_codon) != 3:
raise ValueError(
'Sequence {} is not fully aligned'.format(headers[j]))
cur_pos += 1
numdel = cur_codon.count('-')
if numdel == 3:
all_indels[(cur_pos, MUTDEL)] = True
elif numdel > 0:
all_indels[(cur_pos, FSDEL)] = numdel
all_codons.append(cur_codon)
seqresult = {
'Accession': headers[j],
'FirstAA': 1,
'LastAA': len(consensus),
'Subtype': subtypes[j],
'Mutations': [],
'NumFrameShifts': len([p for p, t in all_indels.keys()
if t in (FSINS, FSDEL)]),
'NASequence': sequence.replace('-', ''),
'AlignedNASequence': ''.join(all_codons)
}
for posm1, codon in enumerate(all_codons):
pos = posm1 + 1
cons = consensus[posm1]
mut = None
if (pos, MUTINS) in all_indels:
# ins
mut = build_mutation(pos, cons, codon, MUTINS,
all_indels[(pos, MUTINS)])
elif (pos, MUTDEL) in all_indels:
# del
mut = build_mutation(pos, cons, codon, MUTDEL,
all_indels[(pos, MUTDEL)])
else:
mut = build_mutation(pos, cons, codon)
if mut:
seqresult['Mutations'].append(mut)
results.append(seqresult)
return results
if __name__ == '__main__':
lanl_reader('Gag', '/app/local/hiv-db_gag_squeeze.fasta')
| hivdb/gag-gp41 | scripts/lanl_reader.py | Python | cc0-1.0 | 4,425 |
import functools
from multiprocessing.pool import ThreadPool
from avocado.models import DataView
from avocado.formatters import FormatterMismatchError, registry as formatters
from cStringIO import StringIO
class BaseExporter(object):
"Base class for all exporters"
short_name = 'base'
file_extension = 'txt'
content_type = 'text/plain'
preferred_formats = ()
# List of available readers by name. Call reader(name) to return
# the specified reader.
readers = (
'default',
'manual',
'threaded',
'cached',
'cached_threaded',
)
def __init__(self, concepts=None, preferred_formats=None):
if preferred_formats is not None:
self.preferred_formats = preferred_formats
if concepts is None:
concepts = ()
elif isinstance(concepts, DataView):
node = concepts.parse()
concepts = node.get_concepts_for_select()
self.params = []
self.row_length = 0
self.concepts = concepts
self._header = []
self._header_checked = False
for concept in concepts:
formatter_class = formatters.get(concept.formatter)
self.add_formatter(formatter_class, concept=concept)
self._format_cache = {}
def __repr__(self):
return u'<{0}: {1}/{2}>'.format(self.__class__.__name__,
len(self.params), self.row_length)
def add_formatter(self, formatter_class, concept=None, keys=None,
index=None):
# Initialize a formatter instance.
formatter = formatter_class(concept=concept,
keys=keys,
formats=self.preferred_formats)
length = len(formatter.field_names)
params = (formatter, length)
self.row_length += length
if index is not None:
self.params.insert(index, params)
else:
self.params.append(params)
# Get the expected fields from this formatter to build
# up the header.
meta = formatter.get_meta(exporter=self.short_name.lower())
header = meta['header']
if index is not None:
self._header.insert(index, header)
else:
self._header.append(header)
@property
def header(self):
header = []
for fields in self._header:
header.extend(fields)
return header
def get_file_obj(self, name=None):
if name is None:
return StringIO()
if isinstance(name, basestring):
return open(name, 'w+')
return name
def _check_header(self, row, context):
self._header_checked = True
errors = []
# Compare the header fields with the row output.
for i, (formatter, length) in enumerate(self.params):
values, row = row[:length], row[length:]
fields = self._header[i]
output = formatter(values, context)
if len(fields) != len(output):
errors.append('Formatter "{0}" header is size {1}, '
'but outputs a record of size {2} for '
'concept "{3}"'
.format(formatter, len(fields), len(output),
formatter.concept))
if errors:
raise FormatterMismatchError(errors)
def _format_row(self, row, context=None):
if not self._header_checked:
self._check_header(row, context)
_row = []
for formatter, length in self.params:
values, row = row[:length], row[length:]
_row.extend(formatter(values, context=context))
return tuple(_row)
def _cache_format_row(self, row, context=None):
if not self._header_checked:
self._check_header(row, context)
_row = []
for formatter, length in self.params:
values, row = row[:length], row[length:]
key = (formatter, values)
if key not in self._format_cache:
segment = formatter(values, context=context)
self._format_cache[key] = segment
else:
segment = self._format_cache[key]
_row.extend(segment)
return tuple(_row)
def reader(self, name=None):
if name == 'threaded':
return self.threaded_read
if name == 'cached':
return self.cached_read
if name == 'cached_threaded':
return self.cached_threaded_read
if name == 'manual':
return self.manual_read
if not name or name == 'default':
return self.read
raise ValueError('No reader named {0}. Choices are {1}'
.format(name, ', '.join(self.readers)))
def read(self, iterable, *args, **kwargs):
"Reads an iterable and generates formatted rows."
for row in iterable:
yield self._format_row(row, context=kwargs)
def cached_read(self, iterable, *args, **kwargs):
"""Reads an iterable and generates formatted rows.
This read implementation caches the output segments of the input
values and can significantly speed up formatting at the expense of
memory.
The benefit of this method is dependent on the data. If there is
high variability in the data, this method may not perform well.
"""
self._format_cache = {}
for row in iterable:
yield self._cache_format_row(row, context=kwargs)
def threaded_read(self, iterable, threads=None, *args, **kwargs):
"""Reads an iterable and generates formatted rows.
This read implementation starts a pool of worker threads to format
the data in parallel.
"""
pool = ThreadPool(threads)
f = functools.partial(self._format_row,
context=kwargs)
for row in pool.map(f, iterable):
yield row
def cached_threaded_read(self, iterable, threads=None, *args, **kwargs):
"""Reads an iterable and generates formatted rows.
This read implementation combines the `cached_read` and `threaded_read`
methods.
"""
self._format_cache = {}
pool = ThreadPool(threads)
f = functools.partial(self._cache_format_row,
context=kwargs)
for row in pool.map(f, iterable):
yield row
def manual_read(self, iterable, force_distinct=True, offset=None,
limit=None, *args, **kwargs):
"""Reads an iterable and generates formatted rows.
This method must be used if columns were added to the query to get
particular ordering, but are not part of the concepts being handled.
If `force_distinct` is true, rows will be filtered based on the slice
of the row that is going to be formatted.
If `offset` is defined, only rows that are produced after the offset
index are returned.
If `limit` is defined, once the limit has been reached (or the
iterator is exhausted), the loop will exit.
"""
emitted = 0
unique_rows = set()
for i, row in enumerate(iterable):
if limit is not None and emitted >= limit:
break
_row = row[:self.row_length]
if force_distinct:
_row_hash = hash(tuple(_row))
if _row_hash in unique_rows:
continue
unique_rows.add(_row_hash)
if offset is None or i >= offset:
emitted += 1
yield self._format_row(_row, context=kwargs)
def write(self, iterable, *args, **kwargs):
for row in iterable:
yield row
| murphyke/avocado | avocado/export/_base.py | Python | bsd-2-clause | 7,956 |
#coding: utf-8
"""
this file for define params of program
"""
G_DEBUG = True #调试阶段为True 正式部署至北京服务器为False
# =====================================================================================================================
if G_DEBUG: # 测试版
End = 0
# 主域名
G_DOMAIN = 'http://127.0.0.1:8000'
else: # 正式发布
End = 0
# 主域名
G_DOMAIN = ''
# 发送者账号
G_EMAIL_SENDER_NAME = '[email protected]'
# 发送者账号密码
G_EMAIL_SENDER_PW = '**********'
# smtp服务器地址
G_EMAIL_SMTP = 'smtp.exmail.qq.com'
# smtp服务器端口
G_EMAIL_PORT = 25
# =====================================================================================================================
| hupeng/Blog | public_define/pub_define.py | Python | gpl-2.0 | 837 |
import json
import logging
import random
import time
from datetime import date, datetime, timedelta
from django.conf import settings
from django.contrib import auth
from django.contrib import messages
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import PermissionDenied
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.db.models import Q
from django.http import (HttpResponseRedirect, HttpResponse, Http404,
HttpResponseBadRequest, HttpResponseForbidden)
from django.shortcuts import get_object_or_404, render, redirect
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST, require_GET, require_http_methods
import waffle
from ordereddict import OrderedDict
from mobility.decorators import mobile_template
from session_csrf import anonymous_csrf
from statsd import statsd
from taggit.models import Tag
from tidings.events import ActivationRequestFailed
from tidings.models import Watch
from kitsune.access.decorators import permission_required, login_required
from kitsune.community.utils import top_contributors_questions
from kitsune.products.api import ProductSerializer, TopicSerializer
from kitsune.products.models import Product, Topic
from kitsune.questions import config
from kitsune.questions.events import QuestionReplyEvent, QuestionSolvedEvent
from kitsune.questions.feeds import (
QuestionsFeed, AnswersFeed, TaggedQuestionsFeed)
from kitsune.questions.forms import (
NewQuestionForm, EditQuestionForm, AnswerForm, WatchQuestionForm,
FREQUENCY_CHOICES, MarketplaceAaqForm, MarketplaceRefundForm,
MarketplaceDeveloperRequestForm, StatsForm)
from kitsune.questions.marketplace import (
MARKETPLACE_CATEGORIES, ZendeskError)
from kitsune.questions.models import (
Question, Answer, QuestionVote, AnswerVote, QuestionMappingType,
QuestionLocale)
from kitsune.questions.signals import tag_added
from kitsune.search.es_utils import (ES_EXCEPTIONS, Sphilastic, F,
es_query_with_analyzer)
from kitsune.search.utils import locale_or_default, clean_excerpt
from kitsune.sumo.api_utils import JSONRenderer
from kitsune.sumo.decorators import ssl_required, ratelimit
from kitsune.sumo.templatetags.jinja_helpers import urlparams
from kitsune.sumo.urlresolvers import reverse, split_path
from kitsune.sumo.utils import paginate, simple_paginate, build_paged_url, is_ratelimited
from kitsune.tags.utils import add_existing_tag
from kitsune.upload.api import ImageAttachmentSerializer
from kitsune.upload.models import ImageAttachment
from kitsune.upload.views import upload_imageattachment
from kitsune.users.forms import RegisterForm
from kitsune.users.templatetags.jinja_helpers import display_name
from kitsune.users.models import Setting
from kitsune.users.utils import handle_login, handle_register
from kitsune.wiki.facets import documents_for, topics_for
from kitsune.wiki.models import Document, DocumentMappingType
log = logging.getLogger('k.questions')
UNAPPROVED_TAG = _lazy(u'That tag does not exist.')
NO_TAG = _lazy(u'Please provide a tag.')
FILTER_GROUPS = {
'all': OrderedDict([
('recently-unanswered', _lazy('Recently unanswered')),
]),
'needs-attention': OrderedDict([
('new', _lazy('New')),
('unhelpful-answers', _lazy('Answers didn\'t help')),
]),
'responded': OrderedDict([
('needsinfo', _lazy('Needs info')),
('solution-provided', _lazy('Solution provided')),
]),
'done': OrderedDict([
('solved', _lazy('Solved')),
('locked', _lazy('Locked')),
]),
}
ORDER_BY = OrderedDict([
('updated', ('updated', _lazy('Updated'))),
('views', ('questionvisits__visits', _lazy('Views'))),
('votes', ('num_votes_past_week', _lazy('Votes'))),
('replies', ('num_answers', _lazy('Replies'))),
])
@mobile_template('questions/{mobile/}product_list.html')
def product_list(request, template):
"""View to select a product to see related questions."""
return render(request, template, {
'products': Product.objects.filter(questions_locales__locale=request.LANGUAGE_CODE)
})
@mobile_template('questions/{mobile/}question_list.html')
def question_list(request, template, product_slug):
"""View the list of questions."""
filter_ = request.GET.get('filter')
owner = request.GET.get(
'owner', request.session.get('questions_owner', 'all'))
show = request.GET.get('show')
# Show defaults to NEEDS ATTENTION
if show not in FILTER_GROUPS:
show = 'needs-attention'
escalated = request.GET.get('escalated')
tagged = request.GET.get('tagged')
tags = None
topic_slug = request.GET.get('topic')
order = request.GET.get('order', 'updated')
if order not in ORDER_BY:
order == 'updated'
sort = request.GET.get('sort', 'desc')
product_slugs = product_slug.split(',')
products = []
if len(product_slugs) > 1 or product_slugs[0] != 'all':
for slug in product_slugs:
products.append(get_object_or_404(Product, slug=slug))
multiple = len(products) > 1
else:
# We want all products (no product filtering at all).
products = None
multiple = True
if topic_slug and not multiple:
# We don't support topics when there is more than one product.
# There is no way to know what product the topic applies to.
try:
topic = Topic.objects.get(slug=topic_slug, product=products[0])
except Topic.DoesNotExist:
topic = None
else:
topic = None
question_qs = Question.objects
if filter_ not in FILTER_GROUPS[show]:
filter_ = None
if escalated:
filter_ = None
if filter_ == 'new':
question_qs = question_qs.new()
elif filter_ == 'unhelpful-answers':
question_qs = question_qs.unhelpful_answers()
elif filter_ == 'needsinfo':
question_qs = question_qs.needs_info()
elif filter_ == 'solution-provided':
question_qs = question_qs.solution_provided()
elif filter_ == 'solved':
question_qs = question_qs.solved()
elif filter_ == 'locked':
question_qs = question_qs.locked()
elif filter_ == 'recently-unanswered':
question_qs = question_qs.recently_unanswered()
else:
if show == 'needs-attention':
question_qs = question_qs.needs_attention()
if show == 'responded':
question_qs = question_qs.responded()
if show == 'done':
question_qs = question_qs.done()
if escalated:
question_qs = question_qs.filter(
tags__name__in=[config.ESCALATE_TAG_NAME])
question_qs = question_qs.select_related(
'creator', 'last_answer', 'last_answer__creator')
question_qs = question_qs.prefetch_related('topic', 'topic__product')
question_qs = question_qs.filter(creator__is_active=1)
if not request.user.has_perm('flagit.can_moderate'):
question_qs = question_qs.filter(is_spam=False)
if owner == 'mine' and request.user.is_authenticated():
criteria = Q(answers__creator=request.user) | Q(creator=request.user)
question_qs = question_qs.filter(criteria).distinct()
else:
owner = None
feed_urls = ((urlparams(reverse('questions.feed'),
product=product_slug, topic=topic_slug),
QuestionsFeed().title()),)
if tagged:
tag_slugs = tagged.split(',')
tags = Tag.objects.filter(slug__in=tag_slugs)
if tags:
for t in tags:
question_qs = question_qs.filter(tags__name__in=[t.name])
if len(tags) == 1:
feed_urls += ((reverse('questions.tagged_feed',
args=[tags[0].slug]),
TaggedQuestionsFeed().title(tags[0])),)
else:
question_qs = Question.objects.none()
# Exclude questions over 90 days old without an answer.
oldest_date = date.today() - timedelta(days=90)
question_qs = question_qs.exclude(created__lt=oldest_date, num_answers=0)
# Filter by products.
if products:
# This filter will match if any of the products on a question have the
# correct id.
question_qs = question_qs.filter(product__in=products).distinct()
# Filter by topic.
if topic:
# This filter will match if any of the topics on a question have the
# correct id.
question_qs = question_qs.filter(topic__id=topic.id)
# Filter by locale for AAQ locales, and by locale + default for others.
if request.LANGUAGE_CODE in QuestionLocale.objects.locales_list():
forum_locale = request.LANGUAGE_CODE
locale_query = Q(locale=request.LANGUAGE_CODE)
else:
forum_locale = settings.WIKI_DEFAULT_LANGUAGE
locale_query = Q(locale=request.LANGUAGE_CODE)
locale_query |= Q(locale=settings.WIKI_DEFAULT_LANGUAGE)
question_qs = question_qs.filter(locale_query)
# Set the order.
order_by = ORDER_BY[order][0]
question_qs = question_qs.order_by(
order_by if sort == 'asc' else '-%s' % order_by)
try:
with statsd.timer('questions.view.paginate.%s' % filter_):
questions_page = simple_paginate(
request, question_qs, per_page=config.QUESTIONS_PER_PAGE)
except (PageNotAnInteger, EmptyPage):
# If we aren't on page 1, redirect there.
# TODO: Is 404 more appropriate?
if request.GET.get('page', '1') != '1':
url = build_paged_url(request)
return HttpResponseRedirect(urlparams(url, page=1))
# Recent answered stats
extra_filters = locale_query
if products:
extra_filters &= Q(product__in=products)
recent_asked_count = Question.recent_asked_count(extra_filters)
recent_unanswered_count = Question.recent_unanswered_count(extra_filters)
if recent_asked_count:
recent_answered_percent = int(
(float(recent_asked_count - recent_unanswered_count) /
recent_asked_count) * 100)
else:
recent_answered_percent = 0
# List of products to fill the selector.
product_list = Product.objects.filter(visible=True)
# List of topics to fill the selector. Only shows if there is exactly
# one product selected.
if products and not multiple:
topic_list = Topic.objects.filter(
visible=True, product=products[0])[:10]
else:
topic_list = []
# Store current filters in the session
if request.user.is_authenticated():
request.session['questions_owner'] = owner
# Get the top contributors for the locale and product.
# If we are in a product forum, limit the top contributors to that product.
if products and len(products) == 1:
product = products[0]
else:
product = None
try:
top_contributors, _ = top_contributors_questions(
locale=forum_locale, product=product)
except ES_EXCEPTIONS:
top_contributors = []
statsd.incr('questions.topcontributors.eserror')
log.exception('Support Forum Top contributors query failed.')
data = {'questions': questions_page,
'feeds': feed_urls,
'filter': filter_,
'owner': owner,
'show': show,
'filters': FILTER_GROUPS[show],
'order': order,
'orders': ORDER_BY,
'sort': sort,
'escalated': escalated,
'tags': tags,
'tagged': tagged,
'recent_asked_count': recent_asked_count,
'recent_unanswered_count': recent_unanswered_count,
'recent_answered_percent': recent_answered_percent,
'product_list': product_list,
'products': products,
'product_slug': product_slug,
'multiple_products': multiple,
'all_products': product_slug == 'all',
'top_contributors': top_contributors,
'topic_list': topic_list,
'topic': topic}
with statsd.timer('questions.view.render'):
return render(request, template, data)
def parse_troubleshooting(troubleshooting_json):
"""Normalizes the troubleshooting data from `question`.
Returns a normalized version, or `None` if something was wrong.
This does not try very hard to fix bad data.
"""
if not troubleshooting_json:
return None
try:
parsed = json.loads(troubleshooting_json)
except ValueError:
return None
# This is a spec of what is expected to be in the parsed
# troubleshooting data. The format here is a list of tuples. The
# first item in the tuple is a list of keys to access to get to the
# item in question. The second item in the tuple is the type the
# referenced item should be. For example, this line
#
# (('application', 'name'), basestring),
#
# means that parse['application']['name'] must be a basestring.
#
# An empty path means the parsed json.
spec = (
((), dict),
(('accessibility', ), dict),
(('accessibility', 'isActive'), bool),
(('application', ), dict),
(('application', 'name'), basestring),
(('application', 'supportURL'), basestring),
(('application', 'userAgent'), basestring),
(('application', 'version'), basestring),
(('extensions', ), list),
(('graphics', ), dict),
(('javaScript', ), dict),
(('modifiedPreferences', ), dict),
(('userJS', ), dict),
(('userJS', 'exists'), bool),
)
for path, type_ in spec:
item = parsed
for piece in path:
item = item.get(piece)
if item is None:
return None
if not isinstance(item, type_):
return None
# The data has been inspected, and should be in the right format.
# Now remove all the printing preferences, because they are noisy.
# TODO: If the UI for this gets better, we can include these prefs
# and just make them collapsible.
parsed['modifiedPreferences'] = dict(
(key, val) for (key, val) in parsed['modifiedPreferences'].items()
if not key.startswith('print'))
return parsed
@mobile_template('questions/{mobile/}question_details.html')
@anonymous_csrf # Need this so the anon csrf gets set for watch forms.
def question_details(request, template, question_id, form=None,
watch_form=None, answer_preview=None, **extra_kwargs):
"""View the answers to a question."""
ans_ = _answers_data(request, question_id, form, watch_form,
answer_preview)
question = ans_['question']
if question.is_spam and not request.user.has_perm('flagit.can_moderate'):
raise Http404('No question matches the given query.')
# Try to parse troubleshooting data as JSON.
troubleshooting_json = question.metadata.get('troubleshooting')
question.metadata['troubleshooting_parsed'] = (
parse_troubleshooting(troubleshooting_json))
if request.user.is_authenticated():
ct = ContentType.objects.get_for_model(request.user)
ans_['images'] = ImageAttachment.objects.filter(creator=request.user,
content_type=ct)
extra_kwargs.update(ans_)
products = Product.objects.filter(visible=True)
topics = topics_for(product=question.product)
related_documents = question.related_documents
related_questions = question.related_questions
extra_kwargs.update({'all_products': products, 'all_topics': topics,
'product': question.product, 'topic': question.topic,
'related_documents': related_documents,
'related_questions': related_questions})
if not request.MOBILE:
# Add noindex to questions without a solution.
if not question.solution_id:
extra_kwargs.update(robots_noindex=True)
return render(request, template, extra_kwargs)
@require_POST
@permission_required('questions.change_question')
def edit_details(request, question_id):
try:
product = Product.objects.get(id=request.POST.get('product'))
topic = Topic.objects.get(id=request.POST.get('topic'),
product=product)
locale = request.POST.get('locale')
# If locale is not in AAQ_LANGUAGES throws a ValueError
tuple(QuestionLocale.objects.locales_list()).index(locale)
except (Product.DoesNotExist, Topic.DoesNotExist, ValueError):
return HttpResponseBadRequest()
question = get_object_or_404(Question, pk=question_id)
question.product = product
question.topic = topic
question.locale = locale
question.save()
return redirect(reverse('questions.details',
kwargs={'question_id': question_id}))
@ssl_required
@anonymous_csrf
def aaq_react(request):
request.session['in-aaq'] = True
to_json = JSONRenderer().render
products = ProductSerializer(
Product.objects.filter(questions_locales__locale=request.LANGUAGE_CODE),
many=True)
topics = TopicSerializer(Topic.objects.filter(in_aaq=True), many=True)
ctx = {
'products_json': to_json(products.data),
'topics_json': to_json(topics.data),
}
if request.user.is_authenticated():
user_ct = ContentType.objects.get_for_model(request.user)
images = ImageAttachmentSerializer(
ImageAttachment.objects.filter(creator=request.user, content_type=user_ct), many=True)
ctx['images_json'] = to_json(images.data)
else:
ctx['images_json'] = to_json([])
return render(request, 'questions/new_question_react.html', ctx)
@ssl_required
@mobile_template('questions/{mobile/}new_question.html')
@anonymous_csrf # This view renders a login form
def aaq(request, product_key=None, category_key=None, showform=False,
template=None, step=0):
"""Ask a new question."""
# Use react version if waffle flag is set
if waffle.flag_is_active(request, 'new_aaq'):
return aaq_react(request)
# This tells our LogoutDeactivatedUsersMiddleware not to
# boot this user.
request.session['in-aaq'] = True
if (request.LANGUAGE_CODE not in QuestionLocale.objects.locales_list() and
request.LANGUAGE_CODE != settings.WIKI_DEFAULT_LANGUAGE):
locale, path = split_path(request.path)
path = '/' + settings.WIKI_DEFAULT_LANGUAGE + '/' + path
old_lang = settings.LANGUAGES_DICT[request.LANGUAGE_CODE.lower()]
new_lang = settings.LANGUAGES_DICT[settings.WIKI_DEFAULT_LANGUAGE
.lower()]
msg = (_(u"The questions forum isn't available in {old_lang}, we "
u"have redirected you to the {new_lang} questions forum.")
.format(old_lang=old_lang, new_lang=new_lang))
messages.add_message(request, messages.WARNING, msg)
return HttpResponseRedirect(path)
if product_key is None:
product_key = request.GET.get('product')
if request.MOBILE and product_key is None:
# Firefox OS is weird. The best way we can detect it is to
# look for a mobile Firefox that is not Android.
ua = request.META.get('HTTP_USER_AGENT', '').lower()
if 'firefox' in ua and 'android' not in ua:
product_key = 'firefox-os'
elif 'fxios' in ua:
product_key = 'ios'
else:
product_key = 'mobile'
product_config = config.products.get(product_key)
if product_key and not product_config:
raise Http404
if product_config and 'product' in product_config:
try:
product = Product.objects.get(slug=product_config['product'])
except Product.DoesNotExist:
pass
else:
if not product.questions_locales.filter(locale=request.LANGUAGE_CODE).count():
locale, path = split_path(request.path)
path = '/' + settings.WIKI_DEFAULT_LANGUAGE + '/' + path
old_lang = settings.LANGUAGES_DICT[request.LANGUAGE_CODE.lower()]
new_lang = settings.LANGUAGES_DICT[settings.WIKI_DEFAULT_LANGUAGE.lower()]
msg = (_(u"The questions forum isn't available for {product} in {old_lang}, we "
u"have redirected you to the {new_lang} questions forum.")
.format(product=product.title, old_lang=old_lang, new_lang=new_lang))
messages.add_message(request, messages.WARNING, msg)
return HttpResponseRedirect(path)
if category_key is None:
category_key = request.GET.get('category')
if product_config and category_key:
product_obj = Product.objects.get(slug=product_config.get('product'))
category_config = product_config['categories'].get(category_key)
if not category_config:
# If we get an invalid category, redirect to previous step.
return HttpResponseRedirect(reverse('questions.aaq_step2', args=[product_key]))
deadend = category_config.get('deadend', False)
topic = category_config.get('topic')
if topic:
html = None
articles, fallback = documents_for(
locale=request.LANGUAGE_CODE,
products=[product_obj],
topics=[Topic.objects.get(slug=topic, product=product_obj)])
else:
html = category_config.get('html')
articles = category_config.get('articles')
else:
category_config = None
deadend = product_config.get('deadend', False) if product_config else False
html = product_config.get('html') if product_config else None
articles = None
if product_config:
# User is on the select category step
statsd.incr('questions.aaq.select-category')
else:
# User is on the select product step
statsd.incr('questions.aaq.select-product')
if request.MOBILE:
login_t = 'questions/mobile/new_question_login.html'
else:
login_t = 'questions/new_question_login.html'
if request.method == 'GET':
search = request.GET.get('search', '')
if search:
results = _search_suggestions(
request,
search,
locale_or_default(request.LANGUAGE_CODE),
[product_config.get('product')])
tried_search = True
else:
results = []
tried_search = False
if category_config:
# User is on the "Ask This" step
statsd.incr('questions.aaq.search-form')
if showform or request.GET.get('showform'):
# Before we show the form, make sure the user is auth'd:
if not request.user.is_authenticated():
# User is on the login or register Step
statsd.incr('questions.aaq.login-or-register')
login_form = AuthenticationForm()
register_form = RegisterForm()
return render(request, login_t, {
'product': product_config,
'category': category_config,
'title': search,
'register_form': register_form,
'login_form': login_form})
form = NewQuestionForm(
product=product_config,
category=category_config,
initial={'title': search})
# User is on the question details step
statsd.incr('questions.aaq.details-form')
else:
form = None
if search:
# User is on the article and questions suggestions step
statsd.incr('questions.aaq.suggestions')
return render(request, template, {
'form': form,
'results': results,
'tried_search': tried_search,
'products': config.products,
'current_product': product_config,
'current_category': category_config,
'current_html': html,
'current_articles': articles,
'current_step': step,
'deadend': deadend,
'host': Site.objects.get_current().domain})
# Handle the form post.
if not request.user.is_authenticated():
if request.POST.get('login'):
login_form = handle_login(request, only_active=False)
statsd.incr('questions.user.login')
register_form = RegisterForm()
if login_form.is_valid():
statsd.incr('questions.user.login.success')
else:
statsd.incr('questions.user.login.fail')
elif request.POST.get('register'):
login_form = AuthenticationForm()
register_form = handle_register(
request=request,
text_template='users/email/activate.ltxt',
html_template='users/email/activate.html',
subject=_('Please confirm your Firefox Help question'),
email_data=request.GET.get('search'),
reg='aaq')
if register_form.is_valid(): # Now try to log in.
user = auth.authenticate(
username=request.POST.get('username'),
password=request.POST.get('password'))
auth.login(request, user)
statsd.incr('questions.user.register')
else:
# L10n: This shouldn't happen unless people tamper with POST data.
message = _lazy('Request type not recognized.')
return render(request, 'handlers/400.html', {'message': message}, status=400)
if request.user.is_authenticated():
# Redirect to GET the current URL replacing the step parameter.
# This is also required for the csrf middleware to set the auth'd
# tokens appropriately.
url = urlparams(request.get_full_path(), step='aaq-question')
return HttpResponseRedirect(url)
else:
return render(request, login_t, {
'product': product_config,
'category': category_config,
'title': request.POST.get('title'),
'register_form': register_form,
'login_form': login_form})
form = NewQuestionForm(product=product_config, category=category_config, data=request.POST)
# NOJS: upload image
if 'upload_image' in request.POST:
upload_imageattachment(request, request.user)
user_ct = ContentType.objects.get_for_model(request.user)
if form.is_valid() and not is_ratelimited(request, 'aaq-day', '5/d'):
question = Question(
creator=request.user,
title=form.cleaned_data['title'],
content=form.cleaned_data['content'],
locale=request.LANGUAGE_CODE)
if product_obj:
question.product = product_obj
if category_config:
t = category_config.get('topic')
if t:
question.topic = Topic.objects.get(slug=t, product=product_obj)
question.save()
qst_ct = ContentType.objects.get_for_model(question)
# Move over to the question all of the images I added to the reply form
up_images = ImageAttachment.objects.filter(creator=request.user, content_type=user_ct)
up_images.update(content_type=qst_ct, object_id=question.id)
# User successfully submitted a new question
statsd.incr('questions.new')
question.add_metadata(**form.cleaned_metadata)
if product_config:
# TODO: This add_metadata call should be removed once we are
# fully IA-driven (sync isn't special case anymore).
question.add_metadata(product=product_config['key'])
if category_config:
# TODO: This add_metadata call should be removed once we are
# fully IA-driven (sync isn't special case anymore).
question.add_metadata(category=category_config['key'])
# The first time a question is saved, automatically apply some tags:
question.auto_tag()
# Submitting the question counts as a vote
question_vote(request, question.id)
if request.user.is_active:
messages.add_message(
request, messages.SUCCESS,
_('Done! Your question is now posted on the Mozilla community support forum.'))
# Done with AAQ.
request.session['in-aaq'] = False
url = reverse('questions.details', kwargs={'question_id': question.id})
return HttpResponseRedirect(url)
return HttpResponseRedirect(reverse('questions.aaq_confirm'))
if getattr(request, 'limited', False):
raise PermissionDenied
images = ImageAttachment.objects.filter(creator=request.user,
content_type=user_ct)
statsd.incr('questions.aaq.details-form-error')
return render(request, template, {
'form': form,
'images': images,
'products': config.products,
'current_product': product_config,
'current_category': category_config,
'current_articles': articles})
@ssl_required
def aaq_step2(request, product_key):
"""Step 2: The product is selected."""
return aaq(request, product_key=product_key, step=1)
@ssl_required
def aaq_step3(request, product_key, category_key):
"""Step 3: The product and category is selected."""
return aaq(request, product_key=product_key, category_key=category_key,
step=1)
@ssl_required
def aaq_step4(request, product_key, category_key):
"""Step 4: Search query entered."""
return aaq(request, product_key=product_key, category_key=category_key,
step=1)
@ssl_required
def aaq_step5(request, product_key, category_key):
"""Step 5: Show full question form."""
return aaq(request, product_key=product_key, category_key=category_key,
showform=True, step=3)
def aaq_confirm(request):
"""AAQ confirm email step for new users."""
if request.user.is_authenticated():
email = request.user.email
auth.logout(request)
statsd.incr('questions.user.logout')
else:
email = None
# Done with AAQ.
request.session['in-aaq'] = False
confirm_t = ('questions/mobile/confirm_email.html' if request.MOBILE
else 'questions/confirm_email.html')
return render(request, confirm_t, {'email': email})
@require_http_methods(['GET', 'POST'])
@login_required
def edit_question(request, question_id):
"""Edit a question."""
question = get_object_or_404(Question, pk=question_id)
user = request.user
if not question.allows_edit(user):
raise PermissionDenied
ct = ContentType.objects.get_for_model(question)
images = ImageAttachment.objects.filter(content_type=ct, object_id=question.pk)
if request.method == 'GET':
initial = question.metadata.copy()
initial.update(title=question.title, content=question.content)
form = EditQuestionForm(product=question.product_config,
category=question.category_config,
initial=initial)
else:
form = EditQuestionForm(data=request.POST,
product=question.product_config,
category=question.category_config)
# NOJS: upload images, if any
upload_imageattachment(request, question)
if form.is_valid():
question.title = form.cleaned_data['title']
question.content = form.cleaned_data['content']
question.updated_by = user
question.save()
# TODO: Factor all this stuff up from here and new_question into
# the form, which should probably become a ModelForm.
question.clear_mutable_metadata()
question.add_metadata(**form.cleaned_metadata)
return HttpResponseRedirect(reverse('questions.details',
kwargs={'question_id': question.id}))
return render(request, 'questions/edit_question.html', {
'question': question,
'form': form,
'images': images,
'current_product': question.product_config,
'current_category': question.category_config})
def _skip_answer_ratelimit(request):
"""Exclude image uploading and deleting from the reply rate limiting.
Also exclude users with the questions.bypass_ratelimit permission.
"""
return 'delete_images' in request.POST or 'upload_image' in request.POST
@require_POST
@login_required
@ratelimit('answer-min', '4/m', skip_if=_skip_answer_ratelimit)
@ratelimit('answer-day', '100/d', skip_if=_skip_answer_ratelimit)
def reply(request, question_id):
"""Post a new answer to a question."""
question = get_object_or_404(Question, pk=question_id, is_spam=False)
answer_preview = None
if not question.allows_new_answer(request.user):
raise PermissionDenied
form = AnswerForm(request.POST)
# NOJS: delete images
if 'delete_images' in request.POST:
for image_id in request.POST.getlist('delete_image'):
ImageAttachment.objects.get(pk=image_id).delete()
return question_details(request, question_id=question_id, form=form)
# NOJS: upload image
if 'upload_image' in request.POST:
upload_imageattachment(request, request.user)
return question_details(request, question_id=question_id, form=form)
if form.is_valid() and not request.limited:
answer = Answer(question=question, creator=request.user,
content=form.cleaned_data['content'])
if 'preview' in request.POST:
answer_preview = answer
else:
answer.save()
ans_ct = ContentType.objects.get_for_model(answer)
# Move over to the answer all of the images I added to the
# reply form
user_ct = ContentType.objects.get_for_model(request.user)
up_images = ImageAttachment.objects.filter(creator=request.user,
content_type=user_ct)
up_images.update(content_type=ans_ct, object_id=answer.id)
statsd.incr('questions.answer')
# Handle needsinfo tag
if 'needsinfo' in request.POST:
question.set_needs_info()
elif 'clear_needsinfo' in request.POST:
question.unset_needs_info()
if Setting.get_for_user(request.user,
'questions_watch_after_reply'):
QuestionReplyEvent.notify(request.user, question)
return HttpResponseRedirect(answer.get_absolute_url())
return question_details(
request, question_id=question_id, form=form,
answer_preview=answer_preview)
def solve(request, question_id, answer_id):
"""Accept an answer as the solution to the question."""
question = get_object_or_404(Question, pk=question_id, is_spam=False)
# It is possible this was clicked from the email.
if not request.user.is_authenticated():
watch_secret = request.GET.get('watch', None)
try:
watch = Watch.objects.get(secret=watch_secret,
event_type='question reply',
user=question.creator)
# Create a new secret.
distinguishable_letters = \
'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRTUVWXYZ'
new_secret = ''.join(random.choice(distinguishable_letters)
for x in xrange(10))
watch.update(secret=new_secret)
request.user = question.creator
except Watch.DoesNotExist:
# This user is neither authenticated nor using the correct secret
return HttpResponseForbidden()
answer = get_object_or_404(Answer, pk=answer_id, is_spam=False)
if not question.allows_solve(request.user):
raise PermissionDenied
if (question.creator != request.user and
not request.user.has_perm('questions.change_solution')):
return HttpResponseForbidden()
if not question.solution:
question.set_solution(answer, request.user)
messages.add_message(request, messages.SUCCESS,
_('Thank you for choosing a solution!'))
else:
# The question was already solved.
messages.add_message(request, messages.ERROR,
_('This question already has a solution.'))
return HttpResponseRedirect(question.get_absolute_url())
@require_POST
@login_required
def unsolve(request, question_id, answer_id):
"""Accept an answer as the solution to the question."""
question = get_object_or_404(Question, pk=question_id)
get_object_or_404(Answer, pk=answer_id)
if not question.allows_unsolve(request.user):
raise PermissionDenied
if (question.creator != request.user and
not request.user.has_perm('questions.change_solution')):
return HttpResponseForbidden()
question.solution = None
question.save()
question.remove_metadata('solver_id')
statsd.incr('questions.unsolve')
messages.add_message(request, messages.SUCCESS,
_("The solution was undone successfully."))
return HttpResponseRedirect(question.get_absolute_url())
@require_POST
@csrf_exempt
@ratelimit('question-vote', '10/d')
def question_vote(request, question_id):
"""I have this problem too."""
question = get_object_or_404(Question, pk=question_id, is_spam=False)
if not question.editable:
raise PermissionDenied
if not question.has_voted(request):
vote = QuestionVote(question=question)
if request.user.is_authenticated():
vote.creator = request.user
else:
vote.anonymous_id = request.anonymous.anonymous_id
if not request.limited:
vote.save()
if 'referrer' in request.REQUEST:
referrer = request.REQUEST.get('referrer')
vote.add_metadata('referrer', referrer)
if referrer == 'search' and 'query' in request.REQUEST:
vote.add_metadata('query', request.REQUEST.get('query'))
ua = request.META.get('HTTP_USER_AGENT')
if ua:
vote.add_metadata('ua', ua)
statsd.incr('questions.votes.question')
if request.is_ajax():
tmpl = 'questions/includes/question_vote_thanks.html'
form = _init_watch_form(request)
html = render_to_string(tmpl, {
'question': question,
'user': request.user,
'watch_form': form,
})
return HttpResponse(json.dumps({
'html': html,
'ignored': request.limited
}))
return HttpResponseRedirect(question.get_absolute_url())
@csrf_exempt
@ratelimit('answer-vote', '10/d')
def answer_vote(request, question_id, answer_id):
"""Vote for Helpful/Not Helpful answers"""
answer = get_object_or_404(Answer, pk=answer_id, question=question_id,
is_spam=False, question__is_spam=False)
if not answer.question.editable:
raise PermissionDenied
if request.limited:
if request.is_ajax():
return HttpResponse(json.dumps({'ignored': True}))
else:
return HttpResponseRedirect(answer.get_absolute_url())
if not answer.has_voted(request):
vote = AnswerVote(answer=answer)
if 'helpful' in request.REQUEST:
vote.helpful = True
message = _('Glad to hear it!')
else:
message = _('Sorry to hear that.')
if request.user.is_authenticated():
vote.creator = request.user
else:
vote.anonymous_id = request.anonymous.anonymous_id
vote.save()
if 'referrer' in request.REQUEST:
referrer = request.REQUEST.get('referrer')
vote.add_metadata('referrer', referrer)
if referrer == 'search' and 'query' in request.REQUEST:
vote.add_metadata('query', request.REQUEST.get('query'))
ua = request.META.get('HTTP_USER_AGENT')
if ua:
vote.add_metadata('ua', ua)
statsd.incr('questions.votes.answer')
else:
message = _('You already voted on this reply.')
if request.is_ajax():
return HttpResponse(json.dumps({'message': message}))
return HttpResponseRedirect(answer.get_absolute_url())
@permission_required('questions.tag_question')
def add_tag(request, question_id):
"""Add a (case-insensitive) tag to question.
If the question already has the tag, do nothing.
"""
# If somebody hits Return in the address bar after provoking an error from
# the add form, nicely send them back to the question:
if request.method == 'GET':
return HttpResponseRedirect(
reverse('questions.details', args=[question_id]))
try:
question, canonical_name = _add_tag(request, question_id)
except Tag.DoesNotExist:
template_data = _answers_data(request, question_id)
template_data['tag_adding_error'] = UNAPPROVED_TAG
template_data['tag_adding_value'] = request.POST.get('tag-name', '')
return render(
request, 'questions/question_details.html', template_data)
if canonical_name: # success
question.clear_cached_tags()
return HttpResponseRedirect(
reverse('questions.details', args=[question_id]))
# No tag provided
template_data = _answers_data(request, question_id)
template_data['tag_adding_error'] = NO_TAG
return render(request, 'questions/question_details.html', template_data)
@permission_required('questions.tag_question')
@require_POST
def add_tag_async(request, question_id):
"""Add a (case-insensitive) tag to question asyncronously. Return empty.
If the question already has the tag, do nothing.
"""
try:
question, canonical_name = _add_tag(request, question_id)
except Tag.DoesNotExist:
return HttpResponse(json.dumps({'error': unicode(UNAPPROVED_TAG)}),
content_type='application/json',
status=400)
if canonical_name:
question.clear_cached_tags()
tag = Tag.objects.get(name=canonical_name)
tag_url = urlparams(reverse(
'questions.list', args=[question.product_slug]), tagged=tag.slug)
data = {'canonicalName': canonical_name,
'tagUrl': tag_url}
return HttpResponse(json.dumps(data),
content_type='application/json')
return HttpResponse(json.dumps({'error': unicode(NO_TAG)}),
content_type='application/json',
status=400)
@permission_required('questions.tag_question')
@require_POST
def remove_tag(request, question_id):
"""Remove a (case-insensitive) tag from question.
Expects a POST with the tag name embedded in a field name, like
remove-tag-tagNameHere. If question doesn't have that tag, do nothing.
"""
prefix = 'remove-tag-'
names = [k for k in request.POST if k.startswith(prefix)]
if names:
name = names[0][len(prefix):]
question = get_object_or_404(Question, pk=question_id)
question.tags.remove(name)
question.clear_cached_tags()
return HttpResponseRedirect(
reverse('questions.details', args=[question_id]))
@permission_required('questions.tag_question')
@require_POST
def remove_tag_async(request, question_id):
"""Remove a (case-insensitive) tag from question.
If question doesn't have that tag, do nothing. Return value is JSON.
"""
name = request.POST.get('name')
if name:
question = get_object_or_404(Question, pk=question_id)
question.tags.remove(name)
question.clear_cached_tags()
return HttpResponse('{}', content_type='application/json')
return HttpResponseBadRequest(json.dumps({'error': unicode(NO_TAG)}),
content_type='application/json')
@permission_required('flagit.can_moderate')
@require_POST
def mark_spam(request):
"""Mark a question or an answer as spam"""
if request.POST.get('question_id'):
question_id = request.POST.get('question_id')
obj = get_object_or_404(Question, pk=question_id)
else:
answer_id = request.POST.get('answer_id')
obj = get_object_or_404(Answer, pk=answer_id)
question_id = obj.question.id
obj.mark_as_spam(request.user)
return HttpResponseRedirect(reverse('questions.details',
kwargs={'question_id': question_id}))
@permission_required('flagit.can_moderate')
@require_POST
def unmark_spam(request):
"""Mark a question or an answer as spam"""
if request.POST.get('question_id'):
question_id = request.POST.get('question_id')
obj = get_object_or_404(Question, pk=question_id)
else:
answer_id = request.POST.get('answer_id')
obj = get_object_or_404(Answer, pk=answer_id)
question_id = obj.question.id
obj.is_spam = False
obj.marked_as_spam = None
obj.marked_as_spam_by = None
obj.save()
return HttpResponseRedirect(reverse('questions.details',
kwargs={'question_id': question_id}))
@login_required
def delete_question(request, question_id):
"""Delete a question"""
question = get_object_or_404(Question, pk=question_id)
if not question.allows_delete(request.user):
raise PermissionDenied
if request.method == 'GET':
# Render the confirmation page
return render(request, 'questions/confirm_question_delete.html', {
'question': question})
# Capture the product slug to build the questions.list url below.
product = question.product_slug
# Handle confirm delete form POST
log.warning('User %s is deleting question with id=%s' %
(request.user, question.id))
question.delete()
statsd.incr('questions.delete')
return HttpResponseRedirect(reverse('questions.list', args=[product]))
@login_required
def delete_answer(request, question_id, answer_id):
"""Delete an answer"""
answer = get_object_or_404(Answer, pk=answer_id, question=question_id)
if not answer.allows_delete(request.user):
raise PermissionDenied
if request.method == 'GET':
# Render the confirmation page
return render(request, 'questions/confirm_answer_delete.html', {
'answer': answer})
# Handle confirm delete form POST
log.warning('User %s is deleting answer with id=%s' %
(request.user, answer.id))
answer.delete()
statsd.incr('questions.delete_answer')
return HttpResponseRedirect(reverse('questions.details',
args=[question_id]))
@require_POST
@login_required
def lock_question(request, question_id):
"""Lock or unlock a question"""
question = get_object_or_404(Question, pk=question_id)
if not question.allows_lock(request.user):
raise PermissionDenied
question.is_locked = not question.is_locked
log.info("User %s set is_locked=%s on question with id=%s " %
(request.user, question.is_locked, question.id))
question.save()
if question.is_locked:
statsd.incr('questions.lock')
else:
statsd.incr('questions.unlock')
return HttpResponseRedirect(question.get_absolute_url())
@require_POST
@login_required
def archive_question(request, question_id):
"""Archive or unarchive a question"""
question = get_object_or_404(Question, pk=question_id)
if not question.allows_archive(request.user):
raise PermissionDenied
question.is_archived = not question.is_archived
log.info("User %s set is_archived=%s on question with id=%s " %
(request.user, question.is_archived, question.id))
question.save()
return HttpResponseRedirect(question.get_absolute_url())
@login_required
def edit_answer(request, question_id, answer_id):
"""Edit an answer."""
answer = get_object_or_404(Answer, pk=answer_id, question=question_id)
answer_preview = None
if not answer.allows_edit(request.user):
raise PermissionDenied
# NOJS: upload images, if any
upload_imageattachment(request, answer)
if request.method == 'GET':
form = AnswerForm({'content': answer.content})
return render(request, 'questions/edit_answer.html', {
'form': form, 'answer': answer})
form = AnswerForm(request.POST)
if form.is_valid():
answer.content = form.cleaned_data['content']
answer.updated_by = request.user
if 'preview' in request.POST:
answer.updated = datetime.now()
answer_preview = answer
else:
log.warning('User %s is editing answer with id=%s' %
(request.user, answer.id))
answer.save()
return HttpResponseRedirect(answer.get_absolute_url())
return render(request, 'questions/edit_answer.html', {
'form': form, 'answer': answer,
'answer_preview': answer_preview})
@require_POST
@anonymous_csrf
def watch_question(request, question_id):
"""Start watching a question for replies or solution."""
question = get_object_or_404(Question, pk=question_id, is_spam=False)
form = WatchQuestionForm(request.user, request.POST)
# Process the form
msg = None
if form.is_valid():
user_or_email = (request.user if request.user.is_authenticated()
else form.cleaned_data['email'])
try:
if form.cleaned_data['event_type'] == 'reply':
QuestionReplyEvent.notify(user_or_email, question)
else:
QuestionSolvedEvent.notify(user_or_email, question)
statsd.incr('questions.watches.new')
except ActivationRequestFailed:
msg = _('Could not send a message to that email address.')
# Respond to ajax request
if request.is_ajax():
if form.is_valid():
msg = msg or (_('You will be notified of updates by email.') if
request.user.is_authenticated() else
_('You should receive an email shortly '
'to confirm your subscription.'))
return HttpResponse(json.dumps({'message': msg}))
if request.POST.get('from_vote'):
tmpl = 'questions/includes/question_vote_thanks.html'
else:
tmpl = 'questions/includes/email_subscribe.html'
html = render_to_string(tmpl, {'question': question, 'watch_form': form})
return HttpResponse(json.dumps({'html': html}))
if msg:
messages.add_message(request, messages.ERROR, msg)
return HttpResponseRedirect(question.get_absolute_url())
@require_POST
@login_required
def unwatch_question(request, question_id):
"""Stop watching a question."""
question = get_object_or_404(Question, pk=question_id)
QuestionReplyEvent.stop_notifying(request.user, question)
QuestionSolvedEvent.stop_notifying(request.user, question)
return HttpResponseRedirect(question.get_absolute_url())
@require_GET
def unsubscribe_watch(request, watch_id, secret):
"""Stop watching a question, for anonymous users."""
watch = get_object_or_404(Watch, pk=watch_id)
question = watch.content_object
success = False
if watch.secret == secret and isinstance(question, Question):
user_or_email = watch.user or watch.email
QuestionReplyEvent.stop_notifying(user_or_email, question)
QuestionSolvedEvent.stop_notifying(user_or_email, question)
success = True
return render(request, 'questions/unsubscribe_watch.html', {
'question': question, 'success': success})
@require_GET
def activate_watch(request, watch_id, secret):
"""Activate watching a question."""
watch = get_object_or_404(Watch, pk=watch_id)
question = watch.content_object
if watch.secret == secret and isinstance(question, Question):
watch.activate().save()
statsd.incr('questions.watches.activate')
return render(request, 'questions/activate_watch.html', {
'question': question,
'unsubscribe_url': reverse('questions.unsubscribe',
args=[watch_id, secret]),
'is_active': watch.is_active})
@login_required
@require_POST
def answer_preview_async(request):
"""Create an HTML fragment preview of the posted wiki syntax."""
statsd.incr('questions.preview')
answer = Answer(creator=request.user,
content=request.POST.get('content', ''))
template = 'questions/includes/answer_preview.html'
return render(request, template, {'answer_preview': answer})
@mobile_template('questions/{mobile/}marketplace.html')
def marketplace(request, template=None):
"""AAQ landing page for Marketplace."""
return render(request, template, {
'categories': MARKETPLACE_CATEGORIES})
ZENDESK_ERROR_MESSAGE = _lazy(
u'There was an error submitting the ticket. '
u'Please try again later.')
@anonymous_csrf
@mobile_template('questions/{mobile/}marketplace_category.html')
def marketplace_category(request, category_slug, template=None):
"""AAQ category page. Handles form post that submits ticket."""
try:
category_name = MARKETPLACE_CATEGORIES[category_slug]
except KeyError:
raise Http404
error_message = None
if request.method == 'GET':
form = MarketplaceAaqForm(request.user)
else:
form = MarketplaceAaqForm(request.user, request.POST)
if form.is_valid():
# Submit ticket
try:
form.submit_ticket()
return HttpResponseRedirect(
reverse('questions.marketplace_aaq_success'))
except ZendeskError:
error_message = ZENDESK_ERROR_MESSAGE
return render(request, template, {
'category': category_name,
'category_slug': category_slug,
'categories': MARKETPLACE_CATEGORIES,
'form': form,
'error_message': error_message})
@anonymous_csrf
@mobile_template('questions/{mobile/}marketplace_refund.html')
def marketplace_refund(request, template):
"""Form page that handles refund requests for Marketplace."""
error_message = None
if request.method == 'GET':
form = MarketplaceRefundForm(request.user)
else:
form = MarketplaceRefundForm(request.user, request.POST)
if form.is_valid():
# Submit ticket
try:
form.submit_ticket()
return HttpResponseRedirect(
reverse('questions.marketplace_aaq_success'))
except ZendeskError:
error_message = ZENDESK_ERROR_MESSAGE
return render(request, template, {
'form': form,
'error_message': error_message})
@anonymous_csrf
@mobile_template('questions/{mobile/}marketplace_developer_request.html')
def marketplace_developer_request(request, template):
"""Form page that handles developer requests for Marketplace."""
error_message = None
if request.method == 'GET':
form = MarketplaceDeveloperRequestForm(request.user)
else:
form = MarketplaceDeveloperRequestForm(request.user, request.POST)
if form.is_valid():
# Submit ticket
try:
form.submit_ticket()
return HttpResponseRedirect(
reverse('questions.marketplace_aaq_success'))
except ZendeskError:
error_message = ZENDESK_ERROR_MESSAGE
return render(request, template, {
'form': form,
'error_message': error_message})
@mobile_template('questions/{mobile/}marketplace_success.html')
def marketplace_success(request, template=None):
"""Confirmation of ticket submitted successfully."""
return render(request, template)
def stats_topic_data(bucket_days, start, end, locale=None, product=None):
"""Gets a zero filled histogram for each question topic.
Uses elastic search.
"""
search = Sphilastic(QuestionMappingType)
bucket = 24 * 60 * 60 * bucket_days
# datetime is a subclass of date.
if isinstance(start, date):
start = int(time.mktime(start.timetuple()))
if isinstance(end, date):
end = int(time.mktime(end.timetuple()))
f = F(model='questions_question')
f &= F(created__gt=start)
f &= F(created__lt=end)
if locale:
f &= F(question_locale=locale)
if product:
f &= F(product=product.slug)
topics = Topic.objects.values('slug', 'title')
facets = {}
# TODO: If we change to using datetimes in ES, 'histogram' below
# should change to 'date_histogram'.
for topic in topics:
filters = search._process_filters([f & F(topic=topic['slug'])])
facets[topic['title']] = {
'histogram': {'interval': bucket, 'field': 'created'},
'facet_filter': filters
}
# Get some sweet histogram data.
search = search.facet_raw(**facets).values_dict()
try:
histograms_data = search.facet_counts()
except ES_EXCEPTIONS:
return []
# The data looks like this right now:
# {
# 'topic-1': [{'key': 1362774285, 'count': 100}, ...],
# 'topic-2': [{'key': 1362774285, 'count': 100}, ...],
# }
# Massage the data to achieve 2 things:
# - All points between the earliest and the latest values have data,
# at a resolution of 1 day.
# - It is in a format usable by k.Graph.
# - ie: [{"created": 1362774285, 'topic-1': 10, 'topic-2': 20}, ...]
for series in histograms_data.itervalues():
if series['entries']:
earliest_point = series['entries'][0]['key']
break
else:
return []
latest_point = earliest_point
interim_data = {}
for key, data in histograms_data.iteritems():
if not data:
continue
for point in data:
timestamp = point['key']
value = point['count']
earliest_point = min(earliest_point, timestamp)
latest_point = max(latest_point, timestamp)
datum = interim_data.get(timestamp, {'date': timestamp})
datum[key] = value
interim_data[timestamp] = datum
# Interim data is now like
# {
# 1362774285: {'date': 1362774285, 'topic-1': 100, 'topic-2': 200},
# }
# Zero fill the interim data.
timestamp = earliest_point
while timestamp <= latest_point:
datum = interim_data.get(timestamp, {'date': timestamp})
for key in histograms_data.iterkeys():
if key not in datum:
datum[key] = 0
timestamp += bucket
# The keys are irrelevant, and the values are exactly what we want.
return interim_data.values()
def metrics(request, locale_code=None):
"""The Support Forum metrics dashboard."""
template = 'questions/metrics.html'
product = request.GET.get('product')
if product:
product = get_object_or_404(Product, slug=product)
form = StatsForm(request.GET)
if form.is_valid():
bucket_days = form.cleaned_data['bucket']
start = form.cleaned_data['start']
end = form.cleaned_data['end']
else:
bucket_days = 1
start = date.today() - timedelta(days=30)
end = date.today()
graph_data = stats_topic_data(bucket_days, start, end, locale_code, product)
for group in graph_data:
for name, count in group.items():
if count == 0:
del group[name]
data = {
'graph': graph_data,
'form': form,
'current_locale': locale_code,
'product': product,
'products': Product.objects.filter(visible=True),
}
return render(request, template, data)
@require_POST
@permission_required('users.screen_share')
def screen_share(request, question_id):
question = get_object_or_404(Question, pk=question_id, is_spam=False)
if not question.allows_new_answer(request.user):
raise PermissionDenied
content = _(u"I invited {user} to a screen sharing session, "
u"and I'll give an update here once we are done.")
answer = Answer(question=question, creator=request.user,
content=content.format(user=display_name(question.creator)))
answer.save()
statsd.incr('questions.answer')
question.add_metadata(screen_sharing='true')
if Setting.get_for_user(request.user, 'questions_watch_after_reply'):
QuestionReplyEvent.notify(request.user, question)
message = render_to_string('questions/message/screen_share.ltxt', {
'asker': display_name(question.creator), 'contributor': display_name(request.user)})
return HttpResponseRedirect('%s?to=%s&message=%s' % (reverse('messages.new'),
question.creator.username,
message))
def _search_suggestions(request, text, locale, product_slugs):
"""Return an iterable of the most relevant wiki pages and questions.
:arg text: full text to search on
:arg locale: locale to limit to
:arg product_slugs: list of product slugs to filter articles on
(["desktop", "mobile", ...])
Items are dicts of::
{
'type':
'search_summary':
'title':
'url':
'object':
}
:returns: up to 3 wiki pages, then up to 3 questions.
"""
# TODO: this can be reworked to pull data from ES rather than
# hit the db.
question_s = QuestionMappingType.search()
wiki_s = DocumentMappingType.search()
# Max number of search results per type.
WIKI_RESULTS = QUESTIONS_RESULTS = 3
default_categories = settings.SEARCH_DEFAULT_CATEGORIES
# Apply product filters
if product_slugs:
wiki_s = wiki_s.filter(product__in=product_slugs)
question_s = question_s.filter(product__in=product_slugs)
results = []
try:
# Search for relevant KB documents.
query = dict(('%s__match' % field, text)
for field in DocumentMappingType.get_query_fields())
query.update(dict(('%s__match_phrase' % field, text)
for field in DocumentMappingType.get_query_fields()))
query = es_query_with_analyzer(query, locale)
filter = F()
filter |= F(document_locale=locale)
filter |= F(document_locale=settings.WIKI_DEFAULT_LANGUAGE)
filter &= F(document_category__in=default_categories)
filter &= F(document_is_archived=False)
raw_results = (
wiki_s.filter(filter)
.query(or_=query)
.values_list('id')[:WIKI_RESULTS])
raw_results = [result[0][0] for result in raw_results]
for id_ in raw_results:
try:
doc = (Document.objects.select_related('current_revision')
.get(pk=id_))
results.append({
'search_summary': clean_excerpt(
doc.current_revision.summary),
'url': doc.get_absolute_url(),
'title': doc.title,
'type': 'document',
'object': doc,
})
except Document.DoesNotExist:
pass
# Search for relevant questions.
query = dict(('%s__match' % field, text)
for field in QuestionMappingType.get_query_fields())
query.update(dict(('%s__match_phrase' % field, text)
for field in QuestionMappingType.get_query_fields()))
# Filter questions by language. Questions should be either in English
# or in the locale's language. This is because we have some questions
# marked English that are really in other languages. The assumption
# being that if a native speakers submits a query in given language,
# the items that are written in that language will automatically match
# better, so questions incorrectly marked as english can be found too.
question_filter = F(question_locale=locale)
question_filter |= F(question_locale=settings.WIKI_DEFAULT_LANGUAGE)
question_filter &= F(question_is_archived=False)
raw_results = (question_s.query(or_=query)
.filter(question_filter)
.values_list('id')[:QUESTIONS_RESULTS])
raw_results = [result[0][0] for result in raw_results]
for id_ in raw_results:
try:
q = Question.objects.get(pk=id_)
results.append({
'search_summary': clean_excerpt(q.content[0:500]),
'url': q.get_absolute_url(),
'title': q.title,
'type': 'question',
'object': q,
'last_updated': q.updated,
'is_solved': q.is_solved,
'num_answers': q.num_answers,
'num_votes': q.num_votes,
'num_votes_past_week': q.num_votes_past_week
})
except Question.DoesNotExist:
pass
except ES_EXCEPTIONS as exc:
statsd.incr('questions.suggestions.eserror')
log.debug(exc)
return results
def _answers_data(request, question_id, form=None, watch_form=None,
answer_preview=None):
"""Return a map of the minimal info necessary to draw an answers page."""
question = get_object_or_404(Question, pk=question_id)
answers_ = question.answers.all()
if not request.user.has_perm('flagit.can_moderate'):
answers_ = answers_.filter(is_spam=False)
if not request.MOBILE:
answers_ = paginate(request, answers_,
per_page=config.ANSWERS_PER_PAGE)
feed_urls = ((reverse('questions.answers.feed',
kwargs={'question_id': question_id}),
AnswersFeed().title(question)),)
frequencies = dict(FREQUENCY_CHOICES)
is_watching_question = (
request.user.is_authenticated() and (
QuestionReplyEvent.is_notifying(request.user, question) or
QuestionSolvedEvent.is_notifying(request.user, question)))
return {'question': question,
'answers': answers_,
'form': form or AnswerForm(),
'answer_preview': answer_preview,
'watch_form': watch_form or _init_watch_form(request, 'reply'),
'feeds': feed_urls,
'frequencies': frequencies,
'is_watching_question': is_watching_question,
'can_tag': request.user.has_perm('questions.tag_question'),
'can_create_tags': request.user.has_perm('taggit.add_tag')}
def _add_tag(request, question_id):
"""Add a named tag to a question, creating it first if appropriate.
Tag name (case-insensitive) must be in request.POST['tag-name'].
If there is no such tag and the user is not allowed to make new tags, raise
Tag.DoesNotExist. If no tag name is provided, return None. Otherwise,
return the canonicalized tag name.
"""
tag_name = request.POST.get('tag-name', '').strip()
if tag_name:
question = get_object_or_404(Question, pk=question_id)
try:
canonical_name = add_existing_tag(tag_name, question.tags)
except Tag.DoesNotExist:
if request.user.has_perm('taggit.add_tag'):
question.tags.add(tag_name) # implicitly creates if needed
canonical_name = tag_name
else:
raise
# Fire off the tag_added signal.
tag_added.send(sender=Question, question_id=question.id,
tag_name=canonical_name)
return question, canonical_name
return None, None
# Initialize a WatchQuestionForm
def _init_watch_form(request, event_type='solution'):
initial = {'event_type': event_type}
return WatchQuestionForm(request.user, initial=initial)
| brittanystoroz/kitsune | kitsune/questions/views.py | Python | bsd-3-clause | 69,523 |
import gzip
import shutil
import pathlib
import subprocess
from urllib.request import urlretrieve
from clldutils.db import DB
from clldutils.path import md5
import attr
@attr.s
class Release:
tag = attr.ib()
version = attr.ib()
cdstar_oid = attr.ib()
sql_dump_md5 = attr.ib()
sql_dump_url = attr.ib()
@classmethod
def from_config(cls, cfg, section):
return cls(tag=section, **cfg[section])
def dump_fname(self, zipped=False):
return pathlib.Path('glottolog-{0}.sql{1}'.format(self.version, '.gz' if zipped else ''))
def download_sql_dump(self, log):
target = self.dump_fname(zipped=True)
log.info('retrieving {0}'.format(self.sql_dump_url))
urlretrieve(self.sql_dump_url, str(target))
assert md5(target) == self.sql_dump_md5
unpacked = target.with_suffix('')
with gzip.open(str(target)) as f, unpacked.open('wb') as u:
shutil.copyfileobj(f, u)
target.unlink()
log.info('SQL dump for Glottolog release {0} written to {1}'.format(self.version, unpacked))
def load_sql_dump(self, log):
dump = self.dump_fname()
dbname = dump.stem
db = DB('postgresql://postgres@/{0.stem}'.format(dump))
if db.exists():
log.warn('db {0} exists! Drop first to recreate.'.format(dump.stem))
else:
if not dump.exists():
self.download_sql_dump(log)
db.create()
subprocess.check_call(['psql', '-d', dbname, '-f', str(dump)])
log.info('db {0} created'.format(dbname))
| clld/glottolog3 | glottolog3/releases.py | Python | mit | 1,597 |
import operator
# A slightly efficient superset of primes.
def PrimesPlus():
yield 2
yield 3
i = 5
while True:
yield i
if i % 6 == 1:
i += 2
i += 2
# Returns a dict d with n = product p ^ d[p]
def GetPrimeDecomp(n):
d = {}
primes = PrimesPlus()
for p in primes:
while n % p == 0:
n /= p
d[p] = d.setdefault(p, 0) + 1
if n == 1:
return d
def divisors(n):
d = GetPrimeDecomp(n)
powers_plus = map(lambda x: x+1, d.values())
return reduce(operator.mul, powers_plus, 1)
def find_triangle_number(estimate, min_divisors):
curr_divisors = divisors(estimate)
while curr_divisors < min_divisors:
# Estimate the distance to the
# triangle number with more than
# min_divisors
factor = min_divisors / curr_divisors +1
print "Estimate", estimate
print "Divisors", curr_divisors
print "Factor", factor
last = estimate
estimate *= factor
curr_divisors = divisors(estimate)
print estimate
print last
for n in xrange(last+1, estimate+1):
print n
if divisors(n) >= min_divisors:
break
print "Estimate", n
print "Divisors", divisors(n)
find_triangle_number(393600000, 500)
| mre/the-coding-interview | problems/euler/12/triangle_alt.py | Python | mit | 1,258 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Plugin to parse the OLECF summary/document summary information items."""
from plaso.lib import event
from plaso.lib import eventdata
from plaso.parsers.olecf_plugins import interface
class OleCfSummaryInfoEvent(event.FiletimeEvent):
"""Convenience class for an OLECF Summary info event."""
DATA_TYPE = 'olecf:summary_info'
def __init__(self, timestamp, usage, attributes):
"""Initializes the event.
Args:
timestamp: The FILETIME timestamp value.
usage: The usage string, describing the timestamp value.
attributes: A dict object containing all extracted attributes.
"""
super(OleCfSummaryInfoEvent, self).__init__(
timestamp, usage)
self.name = u'Summary Information'
for attribute_name, attribute_value in attributes.iteritems():
setattr(self, attribute_name, attribute_value)
# TODO: Move this class to a higher level (to the interface)
# so the these functions can be shared by other plugins.
class OleCfSummaryInfo(object):
"""An OLECF Summary Info object."""
_CLASS_IDENTIFIER = 'f29f85e0-4ff9-1068-ab91-08002b27b3d9'
_PROPERTY_NAMES_INT32 = {
0x000e: 'number_of_pages', # PIDSI_PAGECOUNT
0x000f: 'number_of_words', # PIDSI_WORDCOUNT
0x0010: 'number_of_characters', # PIDSI_CHARCOUNT
0x0013: 'security', # PIDSI_SECURITY
}
_PROPERTY_NAMES_STRING = {
0x0002: 'title', # PIDSI_TITLE
0x0003: 'subject', # PIDSI_SUBJECT
0x0004: 'author', # PIDSI_AUTHOR
0x0005: 'keywords', # PIDSI_KEYWORDS
0x0006: 'comments', # PIDSI_COMMENTS
0x0007: 'template', # PIDSI_TEMPLATE
0x0008: 'last_saved_by', # PIDSI_LASTAUTHOR
0x0009: 'revision_number', # PIDSI_REVNUMBER
0x0012: 'application', # PIDSI_APPNAME
}
PIDSI_CODEPAGE = 0x0001
PIDSI_EDITTIME = 0x000a
PIDSI_LASTPRINTED = 0x000b
PIDSI_CREATE_DTM = 0x000c
PIDSI_LASTSAVE_DTM = 0x000d
PIDSI_THUMBNAIL = 0x0011
def __init__(self, olecf_item, root_creation_time, root_modification_time):
"""Initialize the OLECF summary object.
Args:
olecf_item: The OLECF item (instance of pyolecf.property_set_stream).
root_creation_time: The creation time of the root OLECF item.
root_modification_time: The modification time of the root OLECF item.
"""
super(OleCfSummaryInfo, self).__init__()
self._root_creation_time = root_creation_time
self._root_modification_time = root_modification_time
self._events = []
self.attributes = {}
self._InitFromPropertySet(olecf_item.set)
def _InitFromPropertySet(self, property_set):
"""Initializes the object from a property set.
Args:
property_set: The OLECF property set (pyolecf.property_set).
"""
# Combine the values of multiple property sections
# but do not override properties that are already set.
for property_section in property_set.sections:
if property_section.class_identifier != self._CLASS_IDENTIFIER:
continue
for property_value in property_section.properties:
self._InitFromPropertyValue(property_value)
def _InitFromPropertyValue(self, property_value):
"""Initializes the object from a property value.
Args:
property_value: The OLECF property value (pyolecf.property_value).
"""
if property_value.type == interface.OleDefinitions.VT_I2:
self._InitFromPropertyValueTypeInt16(property_value)
elif property_value.type == interface.OleDefinitions.VT_I4:
self._InitFromPropertyValueTypeInt32(property_value)
elif (property_value.type == interface.OleDefinitions.VT_LPSTR or
property_value.type == interface.OleDefinitions.VT_LPWSTR):
self._InitFromPropertyValueTypeString(property_value)
elif property_value.type == interface.OleDefinitions.VT_FILETIME:
self._InitFromPropertyValueTypeFiletime(property_value)
def _InitFromPropertyValueTypeInt16(self, property_value):
"""Initializes the object from a 16-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I2).
"""
if property_value.identifier == self.PIDSI_CODEPAGE:
# TODO: can the codepage vary per property section?
# And is it needed to interpret the ASCII strings?
# codepage = property_value.data_as_integer
pass
def _InitFromPropertyValueTypeInt32(self, property_value):
"""Initializes the object from a 32-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I4).
"""
property_name = self._PROPERTY_NAMES_INT32.get(
property_value.identifier, None)
if property_name and not property_name in self.attributes:
self.attributes[property_name] = property_value.data_as_integer
def _InitFromPropertyValueTypeString(self, property_value):
"""Initializes the object from a string type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_LPSTR or VT_LPWSTR).
"""
property_name = self._PROPERTY_NAMES_STRING.get(
property_value.identifier, None)
if property_name and not property_name in self.attributes:
self.attributes[property_name] = property_value.data_as_string
def _InitFromPropertyValueTypeFiletime(self, property_value):
"""Initializes the object from a filetime type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_FILETIME).
"""
if property_value.identifier == self.PIDSI_LASTPRINTED:
self._events.append(
(property_value.data_as_integer, 'Document Last Printed Time'))
elif property_value.identifier == self.PIDSI_CREATE_DTM:
self._events.append(
(property_value.data_as_integer, 'Document Creation Time'))
elif property_value.identifier == self.PIDSI_LASTSAVE_DTM:
self._events.append(
(property_value.data_as_integer, 'Document Last Save Time'))
elif property_value.identifier == self.PIDSI_EDITTIME:
# property_name = 'total_edit_time'
# TODO: handle duration.
pass
def GetEventObjects(self):
"""Yields extracted event objects."""
for timestamp, timestamp_description in self._events:
yield OleCfSummaryInfoEvent(
timestamp, timestamp_description, self.attributes)
if self._root_creation_time:
yield OleCfSummaryInfoEvent(
self._root_creation_time, eventdata.EventTimestamp.CREATION_TIME,
self.attributes)
if self._root_modification_time:
yield OleCfSummaryInfoEvent(
self._root_modification_time,
eventdata.EventTimestamp.MODIFICATION_TIME, self.attributes)
class OleCfDocumentSummaryInfoEvent(event.FiletimeEvent):
"""Convenience class for an OLECF Document Summary info event."""
DATA_TYPE = 'olecf:document_summary_info'
_CLASS_IDENTIFIER = 'd5cdd502-2e9c-101b-9397-08002b2cf9ae'
_PROPERTY_NAMES_BOOL = {
0x0013: 'shared_document', # PIDDSI_SHAREDDOC
}
_PROPERTY_NAMES_INT32 = {
0x0004: 'number_of_bytes', # PIDDSI_BYTECOUNT
0x0005: 'number_of_lines', # PIDDSI_LINECOUNT
0x0006: 'number_of_paragraphs', # PIDDSI_PARCOUNT
0x0007: 'number_of_slides', # PIDDSI_SLIDECOUNT
0x0008: 'number_of_notes', # PIDDSI_NOTECOUNT
0x0009: 'number_of_hidden_slides', # PIDDSI_HIDDENCOUNT
0x000a: 'number_of_clips', # PIDDSI_MMCLIPCOUNT
0x0011: 'number_of_characters_with_white_space', # PIDDSI_CCHWITHSPACES
0x0017: 'application_version', # PIDDSI_VERSION
}
_PROPERTY_NAMES_STRING = {
0x000e: 'manager', # PIDDSI_MANAGER
0x000f: 'company', # PIDDSI_COMPANY
0x001a: 'content_type', # PIDDSI_CONTENTTYPE
0x001b: 'content_status', # PIDDSI_CONTENTSTATUS
0x001c: 'language', # PIDDSI_LANGUAGE
0x001d: 'document_version', # PIDDSI_DOCVERSION
}
PIDDSI_CODEPAGE = 0x0001
PIDDSI_CATEGORY = 0x0002
PIDDSI_PRESFORMAT = 0x0003
PIDDSI_SCALE = 0x000b
PIDDSI_HEADINGPAIR = 0x000c
PIDDSI_DOCPARTS = 0x000d
PIDDSI_LINKSDIRTY = 0x0010
PIDDSI_VERSION = 0x0017
def __init__(self, timestamp, usage, olecf_item):
"""Initializes the event.
Args:
timestamp: The FILETIME timestamp value.
usage: The usage string, describing the timestamp value.
olecf_item: The OLECF item (pyolecf.property_set_stream).
"""
super(OleCfDocumentSummaryInfoEvent, self).__init__(
timestamp, usage)
self.name = u'Document Summary Information'
self._InitFromPropertySet(olecf_item.set)
def _InitFromPropertySet(self, property_set):
"""Initializes the event from a property set.
Args:
property_set: The OLECF property set (pyolecf.property_set).
"""
# Combine the values of multiple property sections
# but do not override properties that are already set.
for property_section in property_set.sections:
if property_section.class_identifier != self._CLASS_IDENTIFIER:
continue
for property_value in property_section.properties:
self._InitFromPropertyValue(property_value)
def _InitFromPropertyValue(self, property_value):
"""Initializes the event from a property value.
Args:
property_value: The OLECF property value (pyolecf.property_value).
"""
if property_value.type == interface.OleDefinitions.VT_I2:
self._InitFromPropertyValueTypeInt16(property_value)
elif property_value.type == interface.OleDefinitions.VT_I4:
self._InitFromPropertyValueTypeInt32(property_value)
elif property_value.type == interface.OleDefinitions.VT_BOOL:
self._InitFromPropertyValueTypeBool(property_value)
elif (property_value.type == interface.OleDefinitions.VT_LPSTR or
property_value.type == interface.OleDefinitions.VT_LPWSTR):
self._InitFromPropertyValueTypeString(property_value)
def _InitFromPropertyValueTypeInt16(self, property_value):
"""Initializes the event from a 16-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I2).
"""
if property_value.identifier == self.PIDDSI_CODEPAGE:
# TODO: can the codepage vary per property section?
# And is it needed to interpret the ASCII strings?
# codepage = property_value.data_as_integer
pass
def _InitFromPropertyValueTypeInt32(self, property_value):
"""Initializes the event from a 32-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I4).
"""
property_name = self._PROPERTY_NAMES_INT32.get(
property_value.identifier, None)
# The application version consists of 2 16-bit values that make up
# the version number. Where the upper 16-bit is the major number
# and the lower 16-bit the minor number.
if property_value.identifier == self.PIDDSI_VERSION:
application_version = property_value.data_as_integer
setattr(self, property_name, u'{0:d}.{1:d}'.format(
application_version >> 16, application_version & 0xffff))
elif property_name and not hasattr(self, property_name):
setattr(self, property_name, property_value.data_as_integer)
def _InitFromPropertyValueTypeBool(self, property_value):
"""Initializes the event from a boolean type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_BOOL).
"""
property_name = self._PROPERTY_NAMES_BOOL.get(
property_value.identifier, None)
if property_name and not hasattr(self, property_name):
setattr(self, property_name, property_value.data_as_boolean)
def _InitFromPropertyValueTypeString(self, property_value):
"""Initializes the event from a string type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_LPSTR or VT_LPWSTR).
"""
property_name = self._PROPERTY_NAMES_STRING.get(
property_value.identifier, None)
if property_name and not hasattr(self, property_name):
setattr(self, property_name, property_value.data_as_string)
class DocumentSummaryPlugin(interface.OlecfPlugin):
"""Plugin that parses DocumentSummary information from an OLECF file."""
NAME = 'olecf_document_summary'
REQUIRED_ITEMS = frozenset(['\005DocumentSummaryInformation'])
def GetEntries(self, root_item, items, **unused_kwargs):
"""Generate event based on the document summary item.
Args:
root_item: The root item of the OLECF file.
item_names: A list of all items discovered in the root.
Yields:
Event objects (instance of OleCfDocumentSummaryInfoEvent).
"""
creation_time, modification_time = self.GetTimestamps(root_item)
for item in items:
if creation_time:
yield OleCfDocumentSummaryInfoEvent(
creation_time, eventdata.EventTimestamp.CREATION_TIME, item)
if modification_time:
yield OleCfDocumentSummaryInfoEvent(
modification_time, eventdata.EventTimestamp.MODIFICATION_TIME,
item)
class SummaryInfoPlugin(interface.OlecfPlugin):
"""Plugin that parses the SummaryInformation item from an OLECF file."""
NAME = 'olecf_summary'
REQUIRED_ITEMS = frozenset(['\005SummaryInformation'])
def GetEntries(self, root_item, items, **unused_kwargs):
"""Generate event based on the summary information item.
Args:
root_item: The root item of the OLECF file.
item_names: A list of all items discovered in the root.
Yields:
Event objects (instance of OleCfSummaryInfoEvent).
"""
root_creation_time, root_modification_time = self.GetTimestamps(root_item)
for item in items:
summary_information_object = OleCfSummaryInfo(
item, root_creation_time, root_modification_time)
for event_object in summary_information_object.GetEventObjects():
yield event_object
| iwm911/plaso | plaso/parsers/olecf_plugins/summary.py | Python | apache-2.0 | 14,885 |
__author__ = 'guillaumediallo-mulliez'
| Guitoof/qnao | python/__init__.py | Python | gpl-3.0 | 39 |
from datetime import timedelta
import socket
import t
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import logging
import tempfile
import shutil
import os
from gunicorn.config import Config
from gunicorn.instrument.statsd import Statsd
class TestException(Exception):
pass
class MockSocket(object):
"Pretend to be a UDP socket"
def __init__(self, failp):
self.failp = failp
self.msgs = [] # accumulate messages for later inspection
def send(self, msg):
if self.failp:
raise TestException("Should not interrupt the logger")
sock_dir = tempfile.mkdtemp()
sock_file = os.path.join(sock_dir, "test.sock")
server = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
client = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
try:
server.bind(sock_file)
client.connect(sock_file)
client.send(msg)
self.msgs.append(server.recv(1024))
finally:
client.close()
server.close()
shutil.rmtree(sock_dir)
def reset(self):
self.msgs = []
class MockResponse(object):
def __init__(self, status):
self.status = status
def test_statsd_fail():
"UDP socket fails"
logger = Statsd(Config())
logger.sock = MockSocket(True)
logger.info("No impact on logging")
logger.debug("No impact on logging")
logger.critical("No impact on logging")
logger.error("No impact on logging")
logger.warning("No impact on logging")
logger.exception("No impact on logging")
def test_instrument():
logger = Statsd(Config())
# Capture logged messages
sio = StringIO()
logger.error_log.addHandler(logging.StreamHandler(sio))
logger.sock = MockSocket(False)
# Regular message
logger.info("Blah", extra={"mtype": "gauge", "metric": "gunicorn.test", "value": 666})
t.eq(logger.sock.msgs[0], b"gunicorn.test:666|g")
t.eq(sio.getvalue(), "Blah\n")
logger.sock.reset()
# Only metrics, no logging
logger.info("", extra={"mtype": "gauge", "metric": "gunicorn.test", "value": 666})
t.eq(logger.sock.msgs[0], b"gunicorn.test:666|g")
t.eq(sio.getvalue(), "Blah\n") # log is unchanged
logger.sock.reset()
# Debug logging also supports metrics
logger.debug("", extra={"mtype": "gauge", "metric": "gunicorn.debug", "value": 667})
t.eq(logger.sock.msgs[0], b"gunicorn.debug:667|g")
t.eq(sio.getvalue(), "Blah\n") # log is unchanged
logger.sock.reset()
logger.critical("Boom")
t.eq(logger.sock.msgs[0], b"gunicorn.log.critical:1|c|@1.0")
logger.sock.reset()
logger.access(MockResponse("200 OK"), None, {}, timedelta(seconds=7))
t.eq(logger.sock.msgs[0], b"gunicorn.request.duration:7000.0|ms")
t.eq(logger.sock.msgs[1], b"gunicorn.requests:1|c|@1.0")
t.eq(logger.sock.msgs[2], b"gunicorn.request.status.200:1|c|@1.0")
def test_prefix():
c = Config()
c.set("statsd_prefix", "test.")
logger = Statsd(c)
logger.sock = MockSocket(False)
logger.info("Blah", extra={"mtype": "gauge", "metric": "gunicorn.test", "value": 666})
t.eq(logger.sock.msgs[0], b"test.gunicorn.test:666|g")
def test_prefix_no_dot():
c = Config()
c.set("statsd_prefix", "test")
logger = Statsd(c)
logger.sock = MockSocket(False)
logger.info("Blah", extra={"mtype": "gauge", "metric": "gunicorn.test", "value": 666})
t.eq(logger.sock.msgs[0], b"test.gunicorn.test:666|g")
def test_prefix_multiple_dots():
c = Config()
c.set("statsd_prefix", "test...")
logger = Statsd(c)
logger.sock = MockSocket(False)
logger.info("Blah", extra={"mtype": "gauge", "metric": "gunicorn.test", "value": 666})
t.eq(logger.sock.msgs[0], b"test.gunicorn.test:666|g")
def test_prefix_nested():
c = Config()
c.set("statsd_prefix", "test.asdf.")
logger = Statsd(c)
logger.sock = MockSocket(False)
logger.info("Blah", extra={"mtype": "gauge", "metric": "gunicorn.test", "value": 666})
t.eq(logger.sock.msgs[0], b"test.asdf.gunicorn.test:666|g")
| ccl0326/gunicorn | tests/test_010-statsd.py | Python | mit | 4,144 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('head', '0009_auto_20150417_1709'),
]
operations = [
migrations.AddField(
model_name='org',
name='color',
field=models.CharField(default='#002366', max_length=10),
preserve_default=False,
),
]
| ayys/bloodData | head/migrations/0010_org_color.py | Python | gpl-3.0 | 448 |
import shlex
import collections
import os
from string import find
from string import split
"""
TODO: Syntax extensions
1. Enumerated parameters split by ',' sign placed inside square brackets.
2. Regions doesn't generate all values when created, but operates with min/max ranges.
3. Single value parameters interprets like a group id.
4. Range group parameters interprets like a tuple of groups.
5. Text labels are enclosed with quotes inside square brackets after value split by a column from value.
6. Correct group id detection.
7. Use meta-class to create more accurate and extensible parameters class.
8. Locations may have descriptive name. Optional for plain locations and required for parametrized.
"""
"""
YAPLC locations: Input, Memory, Output(Q)
"""
YAPLCLocationTypes = ['I', 'M', 'Q']
"""
YAPLC locations data types: bool, byte, word, double-word, long, string
"""
YAPLCLocationDataTypes = ['X', 'B', 'W', 'D', 'L', 'S']
"""
YAPLC location parameter types
"""
YAPLCParameterType = {'Number': 0, 'Range': 1, 'Items': 2}
"""
"""
YAPLCNameIllegal = ['.', ',', '"', '*', ':', '#', '@', '!', '(', ')', '{', '}']
class ParseError(BaseException):
""" Exception reports parsing errors when processing YAPLC template files """
def __init__(self, message=None):
self._message = message
def message(self):
return self._message
class YAPLCLocationBase:
def __init__(self):
self._parameters = list()
self._parametrized = False
def addParameters(self, values, name=""):
if find(values, '..') >= 0:
# range of channels
bounds = split(values, '..')
if len(bounds) != 2:
raise ParseError(_("Wrong range syntax"))
if not bounds[0].isdigit() or not bounds[1].isdigit():
raise ParseError(_("Incorrect bounds format %s..%s") % bounds[0] % bounds[1])
lbound = int(bounds[0])
rbound = int(bounds[1])
self._parameters.append({"name": name,
"type": 'Range',
"min": lbound,
"max": rbound
})
elif find(values, ',') >= 0:
items = split(values, ',')
self._parameters.append({"name": name,
"type": 'Items',
"items": items
})
else:
self._parameters.append({"name": name,
"type": 'Number',
"value": values
})
def parameters(self):
return self._parameters
def parametrized(self):
return self._parametrized
class YAPLCLocation(YAPLCLocationBase):
"""
YAPLC location abstraction to represent an location described by syntax
"""
def __init__(self, typestr, group, unique=False, *args):
YAPLCLocationBase.__init__(self)
self._descriptive = None
if len(typestr) != 2:
raise ParseError(_("Incorrect type coding %s") % typestr)
if typestr[0] not in YAPLCLocationTypes:
raise ParseError(_("Type %s not recognized") % typestr[0])
else:
self._type = typestr[0]
if typestr[1] not in YAPLCLocationDataTypes:
raise ParseError(_("Data type %s not recognized") % typestr[1])
else:
self._datatype = typestr[1]
for p in args:
if str(p).startswith('['):
# this is named parameter
param = str(p).rstrip(']').lstrip('[')
name, value = param.split(':')
# print name, value
self.addParameters(value, name)
self._parametrized = True
if not self._descriptive:
raise ParseError(_("Parametrized locations requires descriptive name"))
elif str(p).startswith('"'):
# descriptive name of location
self._descriptive = str(p).rstrip('"').lstrip('"')
if any(s in self._descriptive for s in YAPLCNameIllegal):
raise ParseError(_("Illegal symbol in group's name: %s") % self._descriptive)
elif str(p).isdigit():
self.addParameters(p)
else:
# this is the unnamed range or items
self.addParameters(p)
self._unique = unique
self._group = group # group to this location
def type(self):
return self._type
def datatype(self):
return self._datatype
def unique(self):
return self._unique
def descriptive(self):
return self._descriptive
def __str__(self):
return '{0}{1}'.format(self._type, self._datatype)
def name(self):
return self.__str__()
def __repr__(self):
return self.__str__()
class YAPLCGroup(YAPLCLocationBase):
"""
YAPLC group abstraction allow to store info about group extracted from DSL
"""
def __init__(self, name, values=None, unique=False, parent=None, *args):
YAPLCLocationBase.__init__(self)
self._name = str(name).rstrip('"').lstrip('"')
if any(s in self._name for s in YAPLCNameIllegal):
raise ParseError(_("Illegal symbol in group's name: %s") % self._name)
if len(values) > 1:
raise ParseError(_("Too many parameters for group: %s") % self._name)
for v in values:
if str(v).startswith('['):
param = str(v).rstrip(']').lstrip('[')
name, value = param.split(':')
self.addParameters(value, name)
self._parametrized = True
else:
self.addParameters(v)
self._unique = unique
self._locations = list()
self._parent = parent
self._children = list()
def name(self):
return self._name
def group(self):
return None
def append(self, location):
self._locations.append(location)
def locations(self):
return self._locations
def getlocation(self, name):
for loc in self._locations:
if loc.name() == name or loc.descriptive() == name:
return loc
return None
def children(self):
return self._children
def unique(self):
return self._unique
def parent(self):
return self._parent
def hasParametrized(self):
for child in self._children:
if child.parametrized():
return True
else:
return child.hasParametrized()
for loc in self._locations:
if loc.parametrized():
return True
return False
def addsubgroup(self, group):
self._children.append(group)
# YAPLC Extensions configuration parser
class YAPLCConfigParser:
class yaplcparser(shlex.shlex):
def __init__(self, instream=None, infile=None, posix=False):
shlex.shlex.__init__(self, instream=instream, infile=infile, posix=posix)
# add this tu usual shlex parser
self.brackets = "[]"
def read_token(self):
quoted = False
enclosed = False
escapedstate = ' '
while True:
nextchar = self.instream.read(1)
if nextchar == '\n':
self.lineno += 1
if self.debug >= 3:
print "shlex: in state", repr(self.state), \
"I see character:", repr(nextchar)
if self.state is None:
self.token = '' # past end of file
break
elif self.state == ' ':
if not nextchar:
self.state = None # end of file
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print "shlex: I see whitespace in whitespace state"
if self.token or (self.posix and quoted) or (self.posix and enclosed):
break # emit current token
else:
continue
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars:
self.token = nextchar
self.state = 'a'
elif nextchar in self.quotes:
if not self.posix:
self.token = nextchar
self.state = nextchar
elif self.whitespace_split:
self.token = nextchar
self.state = 'a'
elif nextchar in self.brackets:
self.token = nextchar
self.state = '['
else:
self.token = nextchar
if self.token or (self.posix and quoted) or (self.posix and enclosed):
break # emit current token
else:
continue
elif self.state in self.quotes:
quoted = True
if not nextchar: # end of file
if self.debug >= 2:
print "shlex: I see EOF in quotes state"
# XXX what error should be raised here?
raise ValueError, "No closing quotation"
if nextchar == self.state:
if not self.posix:
self.token = self.token + nextchar
self.state = ' '
break
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and \
self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
else:
self.token = self.token + nextchar
elif self.state in self.brackets:
enclosed = True
if not nextchar: # end of file
if self.debug >= 2:
print "shlex: I see EOF in quotes state"
# XXX what error should be raised here?
raise ValueError, "No closing bracket"
if nextchar == ']': # closing bracket
if not self.posix:
self.token = self.token + nextchar
self.state = ' '
break
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and \
self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
else:
self.token = self.token + nextchar
elif self.state in self.escape:
if not nextchar: # end of file
if self.debug >= 2:
print "shlex: I see EOF in escape state"
# XXX what error should be raised here?
raise ValueError, "No escaped character"
# In posix shells, only the quote itself or the escape
# character may be escaped within quotes.
if escapedstate in self.quotes and \
nextchar != self.state and nextchar != escapedstate:
self.token = self.token + self.state
self.token = self.token + nextchar
self.state = escapedstate
elif self.state == 'a':
if not nextchar:
self.state = None # end of file
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print "shlex: I see whitespace in word state"
self.state = ' '
if self.token or (self.posix and quoted) or (self.posix and enclosed):
break # emit current token
else:
continue
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if self.token or (self.posix and quoted) or (self.posix and enclosed):
break # emit current token
else:
continue
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes \
or self.whitespace_split or nextchar in self.brackets:
self.token = self.token + nextchar
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print "shlex: I see punctuation in word state"
self.state = ' '
if self.token:
break # emit current token
else:
continue
result = self.token
self.token = ''
if self.posix and not quoted and not enclosed and result == '':
result = None
if self.debug > 1:
if result:
print "shlex: raw token=" + repr(result)
else:
print "shlex: raw token=EOF"
return result
@staticmethod
def parseline(line):
"""Parse single line read from settings file
:param line: Line of text (string) to parse
:return: list of tokens split from line
"""
lexer = YAPLCConfigParser.yaplcparser(line)
lexer.commenters = '#'
lexer.wordchars += '.():,'
return list(lexer)
def groups(self):
"""Get groups parsed from configuration file
:return: list of groups keys
"""
return self._groups.values()
def getgroup(self, name):
def findgroup(name, group):
if name == group.name():
return group
for g in group.children():
if g.name() == name:
return g
if len(g.children()) > 0:
return findgroup(name, g)
return None
group = None
if name in self._groups:
# in root groups
group = self._groups[name]
else:
# in nested groups
for g in self._groups.values():
group = findgroup(name, g)
if group is not None:
break
return group
def getlocations(self, group):
"""Get locations of specified group
:param group: Group of locations
:return: Locations list
"""
if group in self._groups:
return self._groups[group].locations()
else:
return None
def addgroup(self, group):
if group not in self._groups:
self._groups[group.name()] = group
def addlocation(self, group, location):
if group in self._groups:
self._groups.get(group).append(location)
def __init__(self, dict_type=collections.defaultdict):
self._dict = dict_type
self._groups = self._dict()
def fparse(self, fileName = None):
if fileName is not None:
try:
with open(fileName) as f:
currentGroup = None
for line in f:
tokens = YAPLCConfigParser.parseline(line)
if tokens:
if tokens[0] == 'UGRP' or tokens[0] == 'GRP':
rest = []
if len(tokens) < 3:
raise ParseError("Arguments number for group less than required")
elif len(tokens) >= 3:
rest = tokens[2:]
# begin of the unique group/end of previous
if tokens[1] in self._groups:
if self._groups[tokens[1]].unique():
raise ParseError(_("Has the same unique group %s") % tokens[1])
if currentGroup is not None:
grp = YAPLCGroup(tokens[1], rest, (tokens[0] == 'UGRP'), currentGroup)
currentGroup.addsubgroup(grp)
else:
grp = YAPLCGroup(tokens[1], rest, (tokens[0] == 'UGRP'), None)
self.addgroup(grp) # also add to flat root groups table
currentGroup = grp
elif tokens[0] == 'LOC' or tokens[0] == 'ULOC':
# non-unique location description
if currentGroup is None:
raise ParseError(_("Location %s without group") % tokens[0])
if currentGroup.unique():
loc = YAPLCLocation(tokens[1], currentGroup,
(tokens[0] == 'ULOC'), *tokens[2:])
else:
# non-unique group could have no GID and parameters only
loc = YAPLCLocation(tokens[1], currentGroup,
(tokens[0] == 'ULOC'), *tokens[2:])
currentGroup.append(loc)
elif tokens[0] == 'ENDGRP':
# close current group and try to return to parent group
if currentGroup is None:
raise ParseError(_("Illegal end of group"))
currentGroup = currentGroup.parent()
else:
raise ParseError(_("Illegal instruction: %s") % tokens[0])
if currentGroup is not None:
raise ParseError(_("Group %s has not been closed properly!") % currentGroup.name())
except IOError:
raise ParseError(_("No template file for current target"))
if __name__ == '__main__':
parser = YAPLCConfigParser()
path = os.path.join(os.path.dirname(__file__),
'..', 'yaplctargets', 'nuc247',
r'extensions.cfg')
try:
parser.fparse(path)
except ParseError as pe:
print pe.message()
for grp in parser.groups():
print grp
print grp.locations()
| nucleron/IDE | yaplcconfig/yaplcparser.py | Python | gpl-3.0 | 20,172 |
# -*- coding: utf-8 -*-
# Copyright (C) Cardiff University (2020)
#
# This file is part of ciecplib.
#
# ciecplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ciecplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ciecplib. If not, see <http://www.gnu.org/licenses/>.
"""ECP-integated requests session
"""
from requests_ecp import Session as ECPSession
from .cookies import ECPCookieJar
from .env import _get_default_idp
from .kerberos import has_credential
from .utils import get_idp_url
__all__ = [
"Session",
]
class Session(ECPSession):
"""`requests.Session` with default ECP auth and pre-populated cookies
"""
def __init__(
self,
idp=_get_default_idp(),
kerberos=None,
username=None,
password=None,
cookiejar=None,
):
if kerberos is None:
kerberos = has_credential()
# open session with ECP authentication
super().__init__(
idp=get_idp_url(idp),
kerberos=kerberos,
username=username,
password=password,
)
# load cookies from existing jar or file
self.cookies = ECPCookieJar()
if cookiejar:
self.cookies.update(cookiejar)
| duncanmmacleod/ligo.org | ciecplib/sessions.py | Python | gpl-3.0 | 1,712 |
# -*- coding: utf-8 -*-
from __future__ import print_function
import urllib
def get_addon():
pass
def get_translation(key):
translation = {'30000': 'Recherche',
'30001': 'Recherche :',
'30100': 'Télécharger',
'30110': 'Information',
'30200': 'Erreur!',
'30300': 'Information',
'30301': 'Lancement du téléchargement',
'30302': 'Fichier téléchargé avec succès',
'30551': 'Debut de la mise à jour',
'30552': 'Weboob est maintenant à jour'}
return translation.get(key)
def get_addon_dir():
return '/home/benjamin'
def get_settings(key):
settings = {'downloadPath': get_addon_dir(),
'nbVideoPerBackend': '0',
'nsfw': 'False'}
return settings.get(key)
def display_error(error):
print("%s: %s" % ("ERROR", error))
def display_info(msg):
print("%s: %s" % ("INFO", msg))
def parse_params(paramStr):
paramDic = {}
# Parameters are on the 3rd arg passed to the script
if len(paramStr) > 1:
paramStr = paramStr.replace('?', '')
# Ignore last char if it is a '/'
if paramStr[len(paramStr) - 1] == '/':
paramStr = paramStr[0:len(paramStr) - 2]
# Processing each parameter splited on '&'
for param in paramStr.split('&'):
try:
# Spliting couple key/value
key, value = param.split('=')
except:
key = param
value = ''
key = urllib.unquote_plus(key)
value = urllib.unquote_plus(value)
# Filling dictionnary
paramDic[key] = value
return paramDic
def ask_user(content, title):
return raw_input(title)
def create_param_url(paramsDic, quote_plus=False):
#url = sys.argv[0]
url = ''
sep = '?'
try:
for param in paramsDic:
if quote_plus:
url = url + sep + urllib.quote_plus(param) + '=' + urllib.quote_plus(paramsDic[param])
else:
url = "%s%s%s=%s" % (url, sep, param, paramsDic[param])
sep = '&'
except Exception as msg:
display_error("create_param_url %s" % msg)
url = None
return url
def add_menu_item(params={}):
print('%s => "%s"' % (params.get('name'), create_param_url(params)))
def add_menu_link(params={}):
print('[%s] %s (%s)' % (params.get('id'), params.get('name'), params.get('url')))
#print params.get('itemInfoLabels')
#print params.get('c_items')
def end_of_directory(update=False):
print('******************************************************')
def download_video(url, name, dir='./'):
print('Downlaod a video %s from %s' % (name, url))
| sputnick-dev/weboob | contrib/plugin.video.videoobmc/resources/lib/test/common_test.py | Python | agpl-3.0 | 2,860 |
from shared_memory import SharedMemory
def print_session(key_id):
#memory = SharedMemory
print(SharedMemory.get(key_id)) | xmnlab/minilab | memcache/printing.py | Python | gpl-3.0 | 130 |
#*****************************************************************************
#
# Linux and OSX version of Audiovideocours
# Using Python 2.7 coming with OS X Lion
#
# (c) Universite de Strasbourg 2006-2014
# Conception and development : schnellf [AT] unistra.fr
#---
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#*******************************************************************************
__version__="1.23Mac"
## Python import (base Python 2.4)
import sys,os,time,datetime,tarfile,ConfigParser,threading,shutil,gettext,zipfile
if sys.platform=="darwin":
# To enable AppleScript launch:
if 0: # don't change path for now
os.chdir(os.path.expanduser("~/Documents/workspace/audiovideocours/"))
import subprocess, socket, traceback, webbrowser
#import winsound # windows libs
from thread import start_new_thread, exit
from urllib2 import urlopen
from os import chdir
from ftplib import FTP
## External Python libs import
import wx, wx.lib.colourdb # GUI
import wx.lib.hyperlink as hl
print wx.wx.__version__
#import msvcrt,pythoncom,pyHook,serial # access MS C/C++ Runtime lib, MS COM, hook, serial port
import cherrypy
if sys.platform =="darwin":
from AppKit import NSBeep,NSApp,NSApplication
if 0:
import PIL
from PIL import GifImagePlugin # Python Imaging Lib
from PIL import JpegImagePlugin # Static imports from PIL for py2exe
from PIL import Image
#from PIL import ImageGrab #Used for taking screenshots but only works on Windows (will use built in screencapture in OS X)
#import pymedia.audio.sound as sound # for mp3 or ogg encoding
#import pymedia.audio.acodec as acodec
#import pymedia.muxer as muxer
#from pywinauto import * # used to put app. back on foreground
#from reportlab.platypus.doctemplate import FrameBreak # PDF lib.
## Local imports
from FMEcmd import * # Script to control Flash Media Encoder and genrate profile.xml file
import htmlBits # HTML chuncks for html format output
## Linux hook specific
if sys.platform=="linux2": from pyxhook import *
## OS X hook specific
if sys.platform=="darwin":
from Quartz import *
def MyFunction(p, t, e, c):
#print "*",e,dir(e)
#print "Event Tye", CGEventGetType(e)
if CGEventGetType(e)==10: # Event from the keyboard
#print CGEventGetFlags(e) # Indicates modifier key used (shift, fn, etc)
keyPressed=CGEventGetIntegerValueField(e,9)
print "Key pressed", keyPressed
if keyPressed ==100:
print "- F8 detected! -"
stopFromKBhook()
if keyPressed in [49,124,126,125,123]:
screenshot()
# see constants event fields in http://developer.apple.com/library/mac/#documentation/Carbon/Reference/QuartzEventServicesRef/Reference/reference.html
# Key pressed is number 100 for F8
if CGEventGetType(e)==1 and recording==True:
print "Mouse left click"
screenshot()
tap = CGEventTapCreate(kCGHIDEventTap, kCGHeadInsertEventTap,
kCGEventTapOptionListenOnly, CGEventMaskBit(kCGEventLeftMouseDown) | CGEventMaskBit(kCGEventKeyDown),
MyFunction, None)
runLoopSource = CFMachPortCreateRunLoopSource(None, tap, 0);
CFRunLoopAddSource(CFRunLoopGetCurrent(), runLoopSource, kCFRunLoopDefaultMode);
CGEventTapEnable(tap, True);
#CGEventTapEnable(tap, False);
# CFRelease(tap); # something to do to manage memory ??
# CFRunLoopRun(); # No comprendo! I have the events if i comment this because i'm using the GUI loop?
#----------------------------------------------------------------------------------------
## Some default global variables in case no configuration file is found
remoteControl=False
"If True the client will also act as a mini server for maintenance purposes. Go to http://your-PC:port"
remotePort="8080"
"Remote access port"
standalone=True
"GUI design, False=amphi (always running in background, minimal choices), True= individual PC"
# Publishing form variables
publishingForm=False
"Indicates there's no publishing form on the client and everything is done on the website"
title=""
"Recording's title"
description=""
"Recording's description"
name=""
"Author's name"
firstname=""
"Author's firstname"
login=""
"Author's login"
genre=""
"Eventual access code"
ue=""
" To use the app without a webserver to publish to"
recording = False
" Recording Status : to know if we are recording now"
last_session_recording_start="None"
" Date/time of the last recording start since the app. is running"
last_session_recording_stop="None"
" Date/time of the last recording stop since the app. is running"
app_startup_date="None"
"Date/time when the app. has been launched"
last_session_publish_order="None"
"Date/time when the app. has been launched"
last_session_publish_problem="None"
"Date/time when the app. encountered an error while publishing "
workDirectory=""
"The working/current directory"
dirName=""
"The data directory"
pathData=None
"Path/name of the folder containing the recordings"
id= ""
"An id which can be received and send from the socket (for external control or monitoring)"
samplingFrequency= 48000
"Default sampling frequency for audio recording"
stopKey= "F8"
"Default key to start/stop recording if you don't have the serial Kb"
socketEnabled=False
"To listen to a socket for eventual orders from a server"
portNumber= 3737
"Socket port to listen to for server orders (sending an eventual ID also)"
READ_CHUNK= 512
"Chunck size for pymedia audio reading"
if sys.platform == 'win32':
cparams= { 'id': acodec.getCodecID( 'mp3' ),
'bitrate': 64000,
'sample_rate': 48000 ,
'channels': 1 }
"Set of parameters for the pymedia audio Encoder"
nameRecord="enregistrement-micro.mp3"
"Dafault name of the audio recording"
tryFocus=False
"A boolean variable to inform when the app is trying to get focus back on a frame"
serialKeyboard=False
"To indicate if the special keyboard must be used"
keyboardPort=0
"Serial port number to use for serial keyboard"
videoprojectorInstalled=False
"To indicate if a videoprojector bust be used on a serial plug"
videoprojectorPort=1
"Port number of the serial port for the videoprojector"
tempo = 0
"If tempp = 0 => no tempo (Nb of seconds to wait before each screenshot)"
lastEvent=time.time()
"Initialize last Event time as a global variable"
videoProjON='PWR ON\x0D'
"Video proj protocol : ON (EPSON here)"
videoProjOFF='PWR OFF\x0D'
"Video proj protocol : OFF (EPSON here)"
ftpUrl="audiovideocours.u-strasbg.fr"
"FTP URL server for online publication"
urlserver= "https://audiovideocours.u-strasbg.fr/avc/publication"
"Default URL of the audiovideocours server containing the submit form"
urlLiveState="http://audiovideocours.u-strasbg.fr/audiocours_v2/servlet/LiveState"
"URL of the live form status"
eventDelay=1.5
"Number of seconds before allowing a new screenshot"
recordingPlace= "not given"
"Optional location indication for the log file and the server"
maxRecordingLength=18000
"Maximum recording length in seconds(1h=3600s,5h=18000s)"
usage="audio"
"Usage ='audio' for audio and 'video' for video recording "
"A generic access code"
videoEncoder="wmv"
"""Choice of the videoencoder to use if usage=video
'wmv' = Windows Media Encoder ; 'real'= Real producer """
smilBegin=""" <?xml version="1.0"?>
<!DOCTYPE smil PUBLIC "-//W3C//DTD SMIL 2.0//EN" "http://www.w3.org/2001/SMIL20/SMIL20.dtd">
<smil xmlns="http://www.w3.org/2001/SMIL20/Language">
<head>
<meta name="title" content="MediaCours"/>
<meta name="author" content="ULP Multimedia"/>
<meta name="copyright" content="Copyright ULP Multimedia"/>
<layout>
<root-layout width="1024" height="768"/>
<region id="Images" width="1024" height="768" fit="meet" />
</layout>
</head>
<body>
<par>
"""
"Smil template"
loginENT="initial"
"Login"
emailENT="initial"
"Email"
live=False
"Live choice enable or not in GUI"
language="French"
"Current language in the GUI"
videoinput="0"
"Video input"
audioinput="0"
"Audio input"
flashServerIP="130.79.188.196"
"Flash Server IP for live sessions"
formFormation="" # a default entry for "formation" in the publishing form
"Automatically fills the formation field with this value"
lastGlobalEvent=time.time()
"Indicates last keyboard or mouse activity"
liveCheckBox=False
"Indicates if user wants(checked) a live session from the GUI"
audioVideoChoice=False # give the possibility to choose between an audio or video recording
"Show in the GUI the choice between a video or audio recording"
ftpHandleReady=False
"For live session: indicates if we have an open FTP connection to send live screenshots"
previewPlayer="realplayer"
"Standalone preview player ( realplayer or browser), used in standalone mode only"
audiocue=True
"make a sound when a screenshot is taken"
if 1:# in case no server informations found in the configuration file
ftpLogin=""
"FTP login for publishing and live screenshots"
ftpPass=""
"FTP password for publishing and live screenshots"
#------- i18n settings ------------------------------------------------------------------
gettext.install("mediacours","locale")
#----------------------------------------------------------------------------------------
def readConfFile(confFile="mediacours.conf"):
"""
Read the configuration file and get those values as global vars
"""
print "Search and read configuration (if it exist):"
global confFileReport,id,urlserver,samplingFrequency,createMp3,stopKey,portNumber,pathData\
,serialKeyboard,startKey,videoprojectorInstalled,videoprojectorPort,keyboardPort\
,videoProjON,videoProjOFF,ftpUrl,eventDelay,maxRecordingLength,recordingPlace\
,usage,cparams,bitrate,socketEnabled,standalone,videoEncoder,amxKeyboard,liveCheckBox,\
language,ftpLogin,ftpPass,cparams, videoinput,audioinput,flashServerIP,audiocue\
,formFormation, audioVideoChoice,urlLiveState,publishingForm, remoteControl, remotePort,previewPlayer
confFileReport=""
section="mediacours"
def readParam(param):
global confFileReport
param=str(param)
paramValue= config.get(section,param)
if paramValue=="True" or paramValue=="False":
paramValue=eval(paramValue)
if (param != "ftpPass") and (param != "ftpLogin"):
print "... "+param+" = ", paramValue
#writeInLogs("\n\t:"+param+"= "+paramValue)
confFileReport += "\n\t:"+str(param)+"= "+str(paramValue)
return paramValue
#try:
if 1:
fconf=open(confFile,"r")
config= ConfigParser.ConfigParser()
config.readfp(fconf)
if config.has_option(section,"language") == True: language=readParam("language")
if config.has_option(section,"usage") == True: usage=readParam("usage")
if config.has_option(section,"pathData") == True: pathData=readParam("pathData")
if config.has_option(section,"standalone") == True: standalone=readParam("standalone")
if config.has_option(section,"videoEncoder") == True: videoEncoder=readParam("videoEncoder")
if config.has_option(section,"urlserver") == True: urlserver=readParam("urlserver")
if sys.platform == 'win32':
if config.has_option(section,"samplingFrequency") == True: samplingFrequency=readParam("samplingFrequency")
if config.has_option(section,"bitrate") == True: cparams['bitrate']=eval(readParam("bitrate"))
if config.has_option(section,"stopKey") == True: stopKey=readParam("stopKey")
if config.has_option(section,"socketEnabled") == True: socketEnabled=readParam("socketEnabled")
if config.has_option(section,"portNumber") == True: portNumber=int(readParam("portNumber"))
if config.has_option(section,"serialKeyboard") == True: serialKeyboard=readParam("serialKeyboard")
if config.has_option(section,"amxKeyboard") == True: amxKeyboard=readParam("amxKeyboard")
if config.has_option(section,"keyboardPort") == True: keyboardPort=int(readParam("keyboardPort"))
if config.has_option(section,"videoprojectorInstalled") == True: videoprojectorInstalled=readParam("videoprojectorInstalled")
if config.has_option(section,"videoprojectorPort") == True: videoprojectorPort=int(readParam("videoprojectorPort"))
if config.has_option(section,"videoProjON") == True: videoProjON=readParam("videoProjON")
if config.has_option(section,"videoProjOFF") == True: videoProjOFF=readParam("videoProjOFF")
if config.has_option(section,"ftpUrl") == True: ftpUrl=readParam("ftpUrl")
if config.has_option(section,"eventDelay") == True: eventDelay=float(readParam("eventDelay"))
if config.has_option(section,"maxRecordingLength") == True: maxRecordingLength=float(readParam("maxRecordingLength"))
if config.has_option(section,"ftpLogin") == True: ftpLogin=readParam("ftpLogin")
if config.has_option(section,"ftpPass") == True: ftpPass=readParam("ftpPass")
if config.has_option(section,"live") == True: liveCheckBox=readParam("live")
if config.has_option(section,"videoinput") == True: videoinput=readParam("videoinput")
if config.has_option(section,"audioinput") == True: audioinput=readParam("audioinput")
if config.has_option(section,"flashServerIP") == True: flashServerIP=readParam("flashServerIP")
if config.has_option(section,"formFormation") == True: formFormation=readParam("formFormation")
if config.has_option(section,"audioVideoChoice") == True: audioVideoChoice=readParam("audioVideoChoice")
if config.has_option(section,"urlLiveState") == True: urlLiveState=readParam("urlLiveState")
if config.has_option(section,"publishingForm") == True: publishingForm=readParam("publishingForm")
if config.has_option(section,"remoteControl") == True: remoteControl=readParam("remoteControl")
if config.has_option(section,"remotePort") == True: remotePort=int(readParam("remotePort"))
if config.has_option(section,"previewPlayer") == True: previewPlayer=readParam("previewPlayer")
if config.has_option(section,"audiocue") == True: audiocue=readParam("audiocue")
fconf.close()
#except:
if 0:
print "Something went wrong while reading the configuration file..."
def showVuMeter():
""" If available in installation folder show VUMeter.exe
http://www.vuplayer.com/files/vumeter.zip """
try:
subprocess.Popen(["VUMeter.exe"])
except:
print "Couldn't find VUMeter.exe"
def stopFromKBhook():
"""
Start/stop recording when asked from the PC keyboard 'stopKey'
"""
global frameEnd, frameBegin, tryFocus,id
#screenshot()#gives a delay too long in case of live recording here
print ">> In stopFromKBhook now..."
print ">> In stopFromKB hook order recordStop() now"
recordStop()
#sys.exit()
print ">> In stopFromKBhook order recordStop() passed"
print ">> Closing hook now..."
if sys.platform!="darwin":
hm.cancel()
print ">> hm.cancel() passed..."
print "In stopFromKBhook order frameEndShow() now"
wx.CallAfter(frameEndShow)
if sys.platform=="darwin":
#frameEndShow()
print "."
print ">> In stopFromKBhook order frameEndShow() passed"
if 0:
#if recording==False and tryFocus==False:
if recording==False: # no tryFocus for Linux version?
print "!!!!! Trying to put frameBegin back in stopFromHook"
if sys.platform=="win32": showVuMeter()
# try to show a vumeter here
#showVuMeter()
if recording==True and tryFocus==False:
print "id:",id
if id=="":
print "Trying to put Ending frame back foreground..."
if live==False:
if sys.platform!="darwin": screenshot()
print "stop recording now recordStop()"
if sys.platform=="win32": windowBack(frameEnd)
if sys.platform=="linux2":
print "<<<< trying frameEnd.Show() on Linux >>>>>"
wx.CallAfter(frameEndShow)
recordStop()
#frameEnd.Show()
else:
print "hello?"
recordStop()
print "Not showing usual publishing form"
start_new_thread(confirmPublish,())
frameUnivr.Show()
# make sure buttons "publish" and "cancel" are enabled for user input
if 0:
try:
if btnPublish.IsEnabled()==False:
btnPublish.Enable(True)
if btnCancel.IsEnabled()==False:
btnCancel.Enable(True)
except:
print "warning! tried to check if buttons 'publish' and 'cancel' were enabled but had problems"
def OnKeyboardEventAfter(event):
wx.CallAfter(OnKeyboardEvent(event))
def OnKeyboardEvent(event):
"""
Catching keyboard events from the hook and deciding what to do
"""
global stopKey,lastEvent,lastGlobalEvent,frameBegin,frameEnd
if 0: winsound.Beep(300,50) # For testing purposes
lastGlobalEvent=time.time()# For shutdownPC_if_noactivity
screenshotKeys=["Snapshot","Space","space","Return","Up","Down","Right",
"Left","Prior","Next"]
print "Key event seen : ", event.Key
if (stopKey!="") and (event.Key== stopKey)and (tryFocus==False):
print "Received order to stop recording ..."
print "from hook stopKey= :", stopKey
global recording, fen1
print "Escape Key pressed from the hook ..."
if sys.platform=="win32":
start_new_thread(stopFromKBhook,())
else:
print "launching CallAfter stopFromKBhook now"
stopFromKBhook()
print"after launch"
if 0:
if event.Key in screenshotKeys and( (time.time()-lastEvent)>eventDelay):
start_new_thread(screenshot,())
lastEvent=time.time()
# Show each key stroke (for debug only)
if 0:
print 'MessageName:',event.MessageName
print 'Message:',event.Message
print 'Time:',event.Time
print 'Window:',event.Window
print 'WindowName:',event.WindowName
print 'Ascii:', event.Ascii, chr(event.Ascii)
print 'Key:', event.Key
print 'KeyID:', event.KeyID
print 'ScanCode:', event.ScanCode
print 'Extended:', event.Extended
print 'Injected:', event.Injected
print 'Alt', event.Alt
print 'Transition', event.Transition
print '---'
return True # return True to pass the event to other handlers
def OnMouseEvent(event):
"""
Catching mouse events from the hook and deciding what to do
"""
global recording,lastEvent,lastGlobalEvent
lastGlobalEvent=time.time()# For shutdownPC_if_noactivity
if (recording == True) and (tryFocus == False)\
and( (time.time()-lastEvent)>eventDelay):
if (event.MessageName == "mouse left down") or (event.MessageName=="mouse wheel down")\
or (event.MessageName=="mouse wheel up"):
if 0: winsound.Beep(300,50) # For testing purposes
start_new_thread(screenshot,())
lastEvent=time.time()
if 0: # For debug purpose put 0 for example
print 'MessageName:',event.MessageName
print 'Message:',event.Message
print 'Time:',event.Time
print 'Window:',event.Window
print 'WindowName:',event.WindowName
print 'Position:',event.Position
print 'Wheel:',event.Wheel
print 'Injected:',event.Injected
print '---'
return True # return True to pass the event to other handlers
def recordNow():
"""
Record the audio input now with pymedia or video via an external encoder
"""
print "Entering recordNow() function"
global recording, diaId, timecodeFile, t0, dateTime0, dirName, workDirectory
global snd,ac,cparams, nameRecord,usage,smil,pathData, last_session_recording_start
usage=frameBegin.usage
recording= True
last_session_recording_start=getTime()
ftpHandleReady=False
if sys.platform in ("win32","darwin"):
# Visual cue to confirm recording state
tbicon.SetIcon(icon2, "Enregistrement en cours")
# Audio cue to confirm recording state
if sys.platform=="win32":
winsound.Beep(800,100)
diaId = 0 # initialize screenshot number and time
t0 = time.time()
dateTime0 = datetime.datetime.now()
dirName = str(dateTime0)
dirName = dirName[0:10]+'-'+ dirName[11:13] +"h-"+dirName[14:16] +"m-" +dirName[17:19]+"s"+"-"+recordingPlace#+ "-"+ dirName[20:22]
workDirectory=pathData+"/"+dirName
print "workDirectory= ",workDirectory
os.mkdir(workDirectory)
writeInLogs("- Begin recording at "+ str(datetime.datetime.now())+"\n")
os.mkdir(workDirectory + "/screenshots")
if sys.platform=="linux2":
print "launching screenshot() thread from recordNow()"
start_new_thread(screenshot,())
def record():
""" Record audio only - mp3 - with pymedia"""
global recording, cparams
f= open( workDirectory+'/'+nameRecord, 'wb' )
ac= acodec.Encoder( cparams )
snd= sound.Input(cparams["sample_rate"],cparams["channels"], sound.AFMT_S16_LE)
snd.start()
while recording==True:
time.sleep(0.1)
if (time.time()-t0 > maxRecordingLength):
writeInLogs("- Recording duration > maxRecordingLength => Stop recording "+\
str(datetime.datetime.now())+"\n")
recordStop()
s= snd.getData()
if s and len( s ):
for fr in ac.encode(s):
f.write( fr )
#print "*",
if recording == False:
snd.stop()
print "-- stopped recording now --"
def windowsMediaEncoderRecord():
"""
Record Video with Windows Media Encoder Series 9
"""
scriptPath=r' cscript.exe C:\"Program Files\Windows Media Components\Encoder\WMCmd.vbs"'
arguments=" -adevice 1 -vdevice 1 -output "+\
dirName+"\enregistrement-video.wmv -duration "+str(maxRecordingLength)
os.system(scriptPath+arguments)
def realProducerRecord():
"""
Record video with Real Producer basic
"""
MaximumRecordingLength=str(maxRecordingLength)
fileVideo=workDirectory+ '\\enregistrement-video.rm'
if live==False:
print "Videoinput and audio port = ",videoinput,audioinput
os.system(('producer.exe -vc %s -ac %s -pid pid.txt -o "%s" -d %s')%(videoinput,audioinput,fileVideo,MaximumRecordingLength))
elif live==True:
#todoLiveReal=r'producer.exe -vc '+videoinput+' -ac '+videoinput+' -pid pid.txt -o '+fileVideo+" -sp 130.79.188.5/"+recordingPlace+".rm"
page = urlopen(urlLiveState,\
"recordingPlace="+recordingPlace+"&status="+"begin")
print "------ Response from Audiocours : -----"
serverAnswer= page.read() # Read/Check the result
print serverAnswer
todoLiveReal=('producer.exe -vc %s -ac %s -pid pid.txt -o "%s" -sp 130.79.188.5/%s.rm')%(videoinput,audioinput,fileVideo,recordingPlace)
print todoLiveReal
os.system(todoLiveReal)
def ffmpegLinuxAudioRecord():
""" Record mp3 in Linux with FFMPEG and liblamemp3 """
print "In ffmpegLinuxAudioRecord"
cmd="ffmpeg -f alsa -ac 2 -i pulse -acodec libmp3lame -aq 0 -y -loglevel 0 "+workDirectory+"/"+nameRecord
os.system(cmd)
def flashMediaEncoderRecord():
"""
Record video with Flash Media Encoder
"""
print "In flashMediaEncoderRecord()"
global flv,flashServer,FMLEpid,urlLiveState
if live==True:
print "Going for live==True"
liveParams="""<rtmp>
<url>rtmp://"""+flashServerIP+"""/live</url>
<backup_url></backup_url>
<stream>"""+recordingPlace+"""</stream>
</rtmp>"""
#Send the information that live is ON
#urlLiveState="http://audiovideocours.u-strasbg.fr/audiocours_v2/servlet/LiveState"
page = urlopen(urlLiveState,\
"recordingPlace="+recordingPlace+"&status="+"begin")
if 0:
print "------ Response from Audiocours : -----"
serverAnswer= page.read() # Read/Check the result
print serverAnswer
else:
liveParams=""
#flvPath=r"C:\Documents and Settings\franz\Bureau\newsample.flv"
if usage=="video":
flvPath=pathData+'/'+ dirName+ '/enregistrement-video.flv'
elif usage=="audio":
flvPath=pathData+'/'+ dirName+ '/enregistrement-micro.flv'
print flvPath
print "In FlashMediaRecord() videoinput=",videoinput,"audioinput=",audioinput
print "Current directory is", os.getcwd()
if os.path.isfile("startup.xml")==True:
print ">>> Found startup.xml in AudioVideoCours folder. This profile will be used by Flash Media Encoder insted of the configuration file parameters."
#subprocess.Popen(["FMEcmd.exe", "/P","startup.xml"])
flv=FMEcmd(videoDeviceName=videoinput,audioDeviceName=audioinput,
flvPath=flvPath,liveParams=liveParams,externalProfile=True,usage=usage,live=live,pathData=pathData)
flv.record()
#FMEprocess=flv.record()
#FMLEpid=FMEprocess.pid
FMLEpid=flvPath # FME use the full path of the flv not the pid...
else:
print "FME: using configuration file parameters"
flv=FMEcmd(videoDeviceName=videoinput,audioDeviceName=audioinput,
flvPath=flvPath,liveParams=liveParams,externalProfile=False,usage=usage,live=live,pathData=pathData)
flv.record()
#FMEprocess=flv.record()
#FMLEpid=FMEprocess.pid
FMLEpid=flvPath # FME use the full path of the flv not the pid...
def liveStream():
""" Control VLC for *audio* live stream """
global vlcPid,dirName
time.sleep(2)
print "Going audio live with VLC ..."
vlcapp='C:\\Program'+' '+'Files\\VideoLAN\\VLC\\vlc.exe'
command=r'C:\"Program Files"\VideoLAN\VLC\vlc.exe -vvvv '
file=pathData+"/"+dirName+"/enregistrement-micro.mp3"
typeout="#standard{access=http,mux=raw}"
# try to launch a pre-configured trayit!.exe to hide VLC GUI
try:
subprocess.Popen(['trayit!'])
#time.sleep(0.5)
except:
pass
if 0: # Using os.system (meaning there will be a DOS window visible)
os.system('%s -vvvv "%s" --sout %s'%(command,file,typeout))
if 1: # Using subprocess (no DOS window visible)
arg1= '-vvvv '+file
arg2= '"#standard{access=http,mux=asf}"'
subprocess.Popen(['%s'%(vlcapp),"-vvvv",file,"--sout","%s"%typeout])
# Check for usage and engage recording
if usage=="audio":
if sys.platform=="win32":
start_new_thread(record,())
if sys.platform=="linux2":
start_new_thread(ffmpegLinuxAudioRecord,())
if live==True:
start_new_thread(liveScreenshotStart,())
if live==True and usage=="audio":
#start_new_thread(liveStream,())
start_new_thread(flashMediaEncoderRecord,())
#Send the information that live is ON
page = urlopen(urlLiveState,\
"recordingPlace="+recordingPlace+"&status="+"begin")
if 0:#For Degub
print "------ Response from Audiocours : -----"
serverAnswer= page.read() # Read/Check the result
print serverAnswer
print "Usage is > ", usage
if usage=="video" and videoEncoder=="flash":
print "searching Flash Media Encoder"
start_new_thread(flashMediaEncoderRecord,())
if usage=="audio" and videoEncoder=="flash" and sys.platform=="darwin":
start_new_thread(flashMediaEncoderRecord,())
if usage=="video" and videoEncoder=="wmv":
print "searching Windows Media Encoder ..."
start_new_thread(windowsMediaEncoderRecord,())
if usage=="video" and videoEncoder=="real":
print "searching Real media encoder"
start_new_thread(realProducerRecord,())
def screenshot():
"""
Take a screenshot and thumbnails of the screen
"""
global recording, diaId, t0, timecodeFile,audiocue
time.sleep(tempo)
if recording == True:
if sys.platform=="win32":
myscreen= ImageGrab.grab() #print "screenshot from mouse"
t = time.time()
diaId += 1
myscreen.save(workDirectory+"/screenshots/" + 'D'+ str(diaId)+'.jpg')
timeStamp = str(round((t-t0),2))
print "Screenshot number ", diaId," taken at timeStamp = ", timeStamp
if audiocue==True:
winsound.Beep(500,70)
timecodeFile = open (workDirectory +'/timecode.csv','a')
timecodeFile.write(timeStamp+"\n")
timecodeFile.close()
"""smilFile.write('<a href="screenshots/D'+str(diaId)+'.jpg" external="true">\n'\
+ '<img begin="'+timeStamp+'" region="Images" src="screenshots/D'\
+ str(diaId)+'.jpg"/> </a>\n')"""
myscreen.thumbnail((256,192))
myscreen.save(workDirectory+"/screenshots/" + 'D'+ str(diaId)+'-thumb'+'.jpg')
if sys.platform=="darwin" or "linux2":
t = time.time()
diaId += 1
if sys.platform=="darwin" and audiocue==True:
os.system("screencapture -t jpg "+workDirectory+"/screenshots/" + 'D'+ str(diaId)+'.jpg')
if sys.platform=="darwin" and audiocue==False:
os.system("screencapture -x -t jpg "+workDirectory+"/screenshots/" + 'D'+ str(diaId)+'.jpg')
if sys.platform=="linux2":
os.system("scrot "+workDirectory+"/screenshots/" + 'D'+ str(diaId)+'.jpg')
t = time.time()
timeStamp = str(round((t-t0),2))
print "Screenshot number ", diaId," taken at timeStamp = ", timeStamp
timecodeFile = open (workDirectory +'/timecode.csv','a')
timecodeFile.write(timeStamp+"\n")
timecodeFile.close()
if sys.platform=="linux2":
myscreen= Image.open(workDirectory+"/screenshots/" + 'D'+ str(diaId)+'.jpg')
#myscreen= Image.open(workDirectory+"/screenshots/" + 'D'+ str(diaId-1)+'.png')
myscreen.thumbnail((256,192))
#print "WARNING: must see how to avoid this error when creating jpg thumbs, png work fine for now"
myscreen.save(workDirectory+"/screenshots/" + 'D'+ str(diaId)+'-thumb'+'.jpg')
#myscreen.save(workDirectory+"/screenshots/" + 'D'+ str(diaId)+'-thumb'+'.png')
if sys.platform=="darwin":
print "Creating thumb with sips"
os.system("sips -z 192 256 "+workDirectory+"/screenshots/" + 'D'+ str(diaId)+'.jpg --out '+workDirectory+"/screenshots/" + 'D'+ str(diaId)+'-thumb.jpg')
if live==True and ftpHandleReady:
time.sleep(3) # in live mode add a tempo to have the current dia (after an eventual transition)
#if 1:
print "sending screenshot live, note to self: code MUST BE OSXified!"
try:
livescreen= ImageGrab.grab()
livescreen.save(workDirectory+"/screenshots/Dlive.jpg")
print "[FTP] sending live screenshot"
f = open(workDirectory+"/screenshots/Dlive.jpg",'rb')
ftpHandle.storbinary('STOR '+recordingPlace+'.jpg', f)
f.close()
#if 0:
except:
print "Couldn't send live screenshot to FTP port"
if recording==True:
try:
print "trying to open a new FTP handle..."
liveScreenshotStart()
except:
print "Couldn't retry FTP send"
def liveScreenshotStream():
""" send screenshot at regular interval in live mode"""
# Function not currently used
while live==True and recording==True:
time.sleep(5) # in live mode add a tempo to have the current dia (after an eventual transition)
f = open(workDirectory+"/screenshots/" + 'D'+ str(diaId)+'.jpg','rb')
ftp.storbinary('STOR '+recordingPlace+'.jpg', f)
f.close()
def liveScreenshotStart():
""" Open ftpLiveHandle for live screenshots capabilities """
global ftpHandle, ftpHandleReady
#ftpHandleReady=False
try:
ftpHandle = FTP(ftpUrl)
ftpHandle.login(ftpLogin, ftpPass)
ftpHandle.cwd("live")
print "[FTP] Opened ftpLiveHandle for live screenshots capabilities "
ftpHandleReady=True
except:
print "couldn't open ftpHandle"
def liveScreenshotStop():
""" Close ftpLiveHandle """
global ftpHandle
ftpHandle.quit()
ftpHandleReady=False
def recordStop():
"""
Stop recording the audio input now
"""
global recording,timecodeFile,FMLEpid, last_session_recording_stop
print "In recordStop() now..."
## Create smile file
print "trying to create smile file now..."
try:
smil=SmilGen(usage,workDirectory)
f=open(workDirectory+"/timecode.csv")
diaTime=f.read().split("\n")[:-2]
f.close()
diaId=1
for timeStamp in diaTime:
smil.smilEvent(timeStamp,diaId+1)
diaId+=1
smil.smilEnd(usage,videoEncoder)
except:
writeInLogs("- Problem while genration smil file... "+ str(datetime.datetime.now())+"\n")
## Create html file and thirdparty forlder
print "trying to create html file now..."
try:
htmlGen()
except:
writeInLogs("- Problem at generating html and thirdparty folder... "+ str(datetime.datetime.now())+"\n")
print "setting recording variable to False ..."
recording= False
last_session_recording_stop=getTime()
print "recording is now = ", recording
if sys.platform in ("win32","darwin"):
print "changing systray icon to non-recording"
# Visual cue to confirm recording state
tbicon.SetIcon(icon1, usage+"cours en attente")
# Audio cue to confirming recording state (2 bis when recording stopped)
if sys.platform=="win32":
winsound.Beep(800,100)
time.sleep(0.2)
winsound.Beep(800,100)
if live==True:
flv.stop(FMLEpid="rtmp://"+flashServerIP+"/live+"+recordingPlace)
#if 1:
try:
liveScreenshotStop()
#if 0:
except:
print "problem with FTP connection"
if live==True:
page = urlopen(urlLiveState,\
"recordingPlace="+recordingPlace+"&status="+"end")
if 0:#For debug
print "------ Response from Audiocours : -----"
serverAnswer= page.read() # Read/Check the result
print serverAnswer
lastEvent=time.time()
#timecodeFile.close()
if usage=="video" and videoEncoder=="wmv":
os.popen("taskkill /F /IM cscript.exe")#stop MWE !!!
if usage=="video" and videoEncoder=="real":
os.popen("signalproducer.exe -P pid.txt")#stop Real producer
if usage=="video" and videoEncoder=="flash":
flv.stop(FMLEpid)
if usage=="audio" and sys.platform=="linux2":
print "trying to stop ffmpeg now"
os.popen("killall ffmpeg")
if usage=="audio" and sys.platform=="darwin":
try:
print "trying to stop FMLE"
flv.stop(FMLEpid)
except:
pass
if live==True:
liveFeed.SetValue(False) #uncheck live checkbox for next user in GUI
writeInLogs("- Stopped recording at "+ str(datetime.datetime.now())+"\n")
def playAudio():
"""
Play the sound file from the folder selected
"""
global workDirectory, fen1
print "... playSound"
mixer= sound.Mixer()
dm= muxer.Demuxer( 'mp3' )
dc= acodec.Decoder( cparams )
sndOut= sound.Output( cparams["sample_rate"],cparams["channels"], sound.AFMT_S16_LE )
bytesRead= 0
f= open( workDirectory+'/'+nameRecord, 'rb' )
s=' '
while len( s ):
s= f.read(READ_CHUNK)
frames= dm.parse( s )
if frames:
for fr in frames:
r= dc.decode( fr[ 1 ] )
if r and r.data:
sndOut.play( r.data )
else:
time.sleep( .01 )
while sndOut.isPlaying(): time.sleep( 0.05 )
sndOut.stop()
def createZip():
""" zip all recording data in a .zip folder """
frameEnd.statusBar.SetStatusText("Please wait ...(creating archive) ...")
#frameEnd.statusBar.w
zip = zipfile.ZipFile(pathData+"/"+dirName+".zip", 'w')
for fileName in os.listdir ( workDirectory ):
if os.path.isfile (workDirectory+"/"+fileName):
zip.write(workDirectory+"/"+fileName,
dirName+"/"+fileName,zipfile.ZIP_DEFLATED)
for fileName in os.listdir ( workDirectory+"/screenshots"):
zip.write(workDirectory+"/screenshots/"+fileName,
dirName+"/"+"screenshots/"+fileName,zipfile.ZIP_DEFLATED)
zip.close()
def confirmPublish(folder=''):
"""
Publish the recording when hitting the 'publish' button
"""
global id,entryTitle,entryDescription,entryTraining,workDirectoryToPublish, dirNameToPublish,loginENT
global emailENT,pathData, last_session_publish_order, last_session_publish_problem, standalone
idtosend=id
print "sockedEnabled: ", socketEnabled, "id: ",id
last_session_publish_order=getTime()
if socketEnabled==True and id != "": # useful for remote order only (doesn't go through publish())
print "=> Creating Zip file (no publishing form, distant order)"
workDirectoryToPublish=workDirectory # pathData + dirName
dirNameToPublish=dirName
#if 1:
try:
createZip()
#if 0:
except:
print "Warning! couldn't create zip file!"
id="" # if confirmPublsih fails id is back to ""
if sys.platform=='win32': # gives memory leak on OS X see what can be done ??
frameEnd.statusBar.SetStatusText(" Publication en cours, merci de patienter ...")
# !!!! Test: changing dirName and workDirectory to dirNameToPublish and workDirectoryToPublish
# to avoid conflicts when publishing and recording a new file straight away
if dirNameToPublish =="":
frameEnd.statusBar.SetStatusText("Rien a publier ...")
print ">>> before if dirNameToPublish != '' :"
if dirNameToPublish != "":
print ">>> after if dirNameToPublish != '' : and before writeInlogs"
writeInLogs("- Asked for publishing at "+ str(datetime.datetime.now())+\
" with id="+idtosend+" title="+title+" description="+description+" mediapath="+\
dirNameToPublish+".zip"+" prenom "+firstname+" name="+name+" genre="+genre+" ue="+ue+ " To server ="+urlserver+"\n")
print ">>> after writeInLogs and before if"
if 1:
#try:
# Send by ftp
print "Sending an FTP version..."
ftp = FTP(ftpUrl)
ftp.login(ftpLogin, ftpPass)
print "debut de ftp"
f = open(workDirectoryToPublish+".zip",'rb')# file to send
#f = open(pathData+"/"+dirName+".zip",'rb')# file to send
if folder=="canceled":
print "Trying to open cancel forlder"
ftp.cwd("canceled")
ftp.storbinary('STOR '+ dirNameToPublish+".zip", f) # Send the file
f.close() # Close file and FTP
ftp.quit()
print "fin de ftp"
if sys.platform=='win32':
if standalone == True:
frameEnd.Hide()
frameBegin.Show()
if 0:
#except:
print "!!! Something went wrong while sending the archive to the server !!!"
last_session_publish_problem=getTime()
if standalone == False:
start_new_thread(rss_warning_client_feed,())
writeInLogs("!!! Something went wrong while sending the Tar to the server at "\
+str(datetime.datetime.now())+" !!!\n")
frameEnd.statusBar.SetStatusText("Impossible d'ouvrir la connexion FTP")
# Information DialogBox
caption=_("!!! Publication ERROR !!!")
text=_("IMPOSSIBLE TO PUBLISH\
\nIs there an internet connection?\nIs the FTP port opened?")
dialog=wx.MessageDialog(None,message=text,caption=caption,
style=wx.OK|wx.ICON_INFORMATION)
dialog.ShowModal()
if folder=="":
#try:
if 1:
#Send data to the AudioCours server (submit form)
print "login ENT, emailENT >>>>>>>>> " ,loginENT, emailENT
if publishingForm==True:
urlParams="id="+idtosend+"&title="+title+"&description="+description+\
"&name="+name+"&firstname="+firstname+"&login="+loginENT+"&email="+emailENT+"&genre="+genre+"&ue="+ue+"&mediapath="+\
dirNameToPublish+".zip"
page = urlopen(urlserver,urlParams)
print "urlParams",urlParams
print "------ Response from Audiocours : -----"
serverAnswer= page.read() # Read/Check the result
print serverAnswer
if publishingForm==False:
if idtosend=="": idtosend="none"
#urlParams="mediapath="+dirNameToPublish+".zip"+"&id="+idtosend
urlParams="mediapath="+dirNameToPublish+".zip"
def launch():
print "params>>"+urlserver+"?"+urlParams
useBrowser(urlserver+"?"+urlParams)
#command='"c:\program files\internet explorer\iexplore" '+urlserver+'?'+urlParams
#print "commande URL= ", command
#os.system(command)
start_new_thread(launch,())
#except:
if 0:
print "Had problem while submitting the form"
# set the id variable to id="" again
idtosend= ""
print "setting entry fields back to empty"
entryTitle.SetValue("")
entryDescription.SetValue("")
entryLastname.SetValue("")
entryFirstname.SetValue("")
if formFormation=="": entryTraining.SetValue("")
frameEnd.statusBar.SetStatusText("---")
else:
print "Pas de publication: pas d'enregistrement effectue"
def rss_warning_client_feed(what=""):
"""
If a publication problem is encountered this code will attempt to write in an RSS feed on the server when the connection
is recovered. This function must be launched in a thread;
"""
global PathData
reporting_time_string=getTime() # time (string) at which the incident has been reported
reporting_time= time.time() # for seconds computation
t_span=172800 # total time during which to attempt server notification by RSS update (one day = 86400, 2 days= 172800)
t_interval=60 # time interval for each try in seconds (1 minute= 60 seconds)
print "Total time duration during which to attempt server notification (hours) =", str(t_span/3600.0)
print "Time interval for each try in seconds =",t_interval
def report(what=what):
# a RSS head head not used for now cause i take the exist RSS file for now
if what=="": what=" Publishing problem for "+ socket.gethostname()
rss_head="""<?xml version="1.0"?>
<rss version="2.0">
<channel>
<title>Audiovideocours RSS client warnings reports</title>
<link></link>
<description>This feed is updated when an audiovideocours client encounters a problem. </description>"""
item_new="""<item>
<title>"""+reporting_time_string+", "+what+"""</title>
<link>"""+"http://"+socket.gethostname()+"""</link>
<description>"""+reporting_time_string+", "+what+" IP:"+socket.gethostbyname(socket.gethostname())+"""</description>\n</item>"""
rss_tail="</channel></rss>"
# Retrieve RSS feed
print "Attempting to open FTP connection to server..."
# Retrieve server feed
ftp = FTP(ftpUrl)
ftp.login(ftpLogin, ftpPass)
ftp.cwd("releases")
# Checking if feed exists on server
filelist=[]
ftp.retrlines('LIST',filelist.append)
for f in filelist:
if "clients-warnings.xml" in f:
feedExists=True
break
else:
feedExists=False
if feedExists==False:
print "clients-warnings.xml d'ont exist on the server, creating a new feed"
content_new= rss_head+"\n"+item_new+"\n"+rss_tail
else:
print "clients-warnings.xml exists on the server, updating this feed"
gFile=open(pathData+"/clients-warnings.xml","wb")
ftp.retrbinary('RETR clients-warnings.xml',gFile.write)
gFile.close()
# Write new warning item in RSS field
f=open(pathData+"/clients-warnings.xml","r")
content_old=f.read()
#print content_old
f.close()
content_head=content_old.split("</description>",1)[0]+"</description>"
content_body=content_old.split("</description>",1)[1]
content_new= content_head+"\n"+item_new+content_body
#print content_new
# write new file
f=open(pathData+"/clients-warnings.xml","w")
f.write(content_new)
f.close()
# send file to FTP server
f=open(pathData+"/clients-warnings.xml","rb")
ftp.storbinary('STOR '+ "clients-warnings.xml", f) # Send the file
f.close()
# Close FTP session
ftp.close()
if 0: report(what)
if 1:
while (time.time()-reporting_time)<t_span:
try:
#print "time.time()-reporting_time =", time.time()-reporting_time," t_span =", t_span
print "Trying to update clients-warnings.xml on FTP server"
report(what)
print "Publishing should be ok, no exceptions encountered at publishing"
break
except:
print "Couldn't update clients-warnings.xml on FTP server, going to sleep for", str(t_interval),"seconds"
time.sleep(t_interval)
def LaunchSocketServer():
"""
Launch a socket server, listen to eventual orders
and decide what to do
"""
global id,recording,mySocket
print "Client is listening for socket order on port",str(portNumber)
mySocket = socket.socket ( socket.AF_INET, socket.SOCK_STREAM )
mySocket.bind ( ( '', portNumber ) )
mySocket.listen ( 1 )
while True:
channel, details = mySocket.accept()
print 'We have an opened connection with', details
writeInLogs('- We have an opened connection with '+str(details)+"\n")
received = channel.recv(100)
writeInLogs("- received = "+str(received)+"\n")
if received != "":
if received=="SHOW_AVC":
frameBegin.Show()
if received=="SHUTDOWN_PC" and recording==False:
os.system("shutdown -s -f")
if received=="VERSION":
channel.send ('v ' + __version__)
# search for an (id:xxxxx) pattern
iDbegin1= received.find("(id:")
iDbegin2= received.find("(title:")
iDbegin3= received.find("(description:")
iDend= received.find(")")
iDrecord= received.find("(order:record)")
if (iDbegin1 > -1)and (iDend > -1):
id=received[(iDbegin1+4):iDend]
print "received ID number ", id
if recording==False:
channel.send ( 'Received ID' + str(id))
windowBack(frameBegin)
if recording==True:
channel.send ( 'Received ID' + str(id)+" !!! already recording !!!")
print "Already recording"
if 0:
caption="Message Univ-R Debut"
text="Veulliez appuyer sur le bouton audiovideocours\
\ndu clavier de commande de la salle."
dialog=wx.MessageDialog(None,message=text,caption=caption,
style=wx.OK|wx.CANCEL|wx.ICON_INFORMATION)
if dialog.ShowModal() == wx.ID_OK:
print "user clicked on OK in the Univ-r dialog"
else:
print "user canceled the Univ-R dialg"
print "putting id back to empty"
id=""
if (iDbegin2 > -1)and (iDend > -1):
title=received[(iDbegin2+6):iDend]
print "received title ", title
if (iDbegin3 > -1)and (iDend > -1):
description=received[(iDbegin3+12):iDend]
print "received description ", description
if (iDrecord>-1):
print "Reading now ..."
recordNow()
channel.close()
def windowBack(frame,windowTitle="Attention"):
"""
Show the frame back in wiew
"""
global tryFocus, recording
print "windowTitle target= ", windowTitle
tryFocus=True
frame.Show()
#time.sleep(0.5)
frame.Show()
def comeBack():
print "-",
appAuto = application.Application()
#appAuto.connect_(handle = findwindows.find_windows(title = "Attention")[0])
appAuto.connect_(handle = findwindows.find_windows(title = windowTitle)[0])
appAuto.Attention.SetFocus()
appAuto.Attention.Restore()
for i in range(5):
#print "Try set focus"
try:
comeBack()
time.sleep(0.1)
except:
pass
tryFocus=False
def setupHooks():
"""
Setup hooks for Windows OS
"""
hm = pyHook.HookManager () # create a hook manager
hm.KeyDown = OnKeyboardEvent # watch for keyoard events
#hm.MouseAll = OnMouseEvent # watch for all mouse events
hm.MouseLeftDown = OnMouseEvent
hm.MouseWheel= OnMouseEvent
hm.HookKeyboard() # set the hook
hm.HookMouse() # set the hook
def setupHooksLinux():
"""
Setup hooks for Linux OS
"""
print "In linux pyxhook now ..."
global hm
hm = HookManager()
hm.HookKeyboard()
hm.HookMouse()
#hm.KeyDown = toto
hm.KeyDown = OnKeyboardEvent
#hm.KeyUp = hm.printevent
# hm.MouseAllButtonsDown = hm.printevent
hm.MouseAllButtonsDown = OnMouseEvent
#hm.MouseAllButtonsUp = hm.printevent
hm.start()
#time.sleep(10)
#hm.cancel()
def toto(event):
""" A test for hook events callback """
print dir(event) # what is possible
print event.Key
def writeInLogs(what):
"""
Write events in a configuration file (one per month)
"""
global logFile,pathData
yearMonth=str(datetime.datetime.now())
yearMonth=yearMonth[:7]
#logFile = open ("log-"+yearMonth+".txt","a")
logFile = open (pathData+"/log-audiovideocours-"+yearMonth+".txt","a")
#logFile = open (os.environ["USERPROFILE"]+"/audiovideocours/log-audiovideocours-"+yearMonth+".txt","a")
logFile.write(what)
logFile.close()
def writeStack():
"""
Write current exception stack with date stamp in the data folder for futher analysis (file: errlog.txt)
"""
f=open(pathData+"/errlog.txt","a")
f.write("\n"+str(datetime.datetime.now())+"\n")
f.close()
traceback.print_exc(file=open(pathData+"/errlog.txt","a"))
def kill_if_double():
"""
Kill an eventual running instance of mediacours.exe
"""
try:
print "Trying to kill an eventual running instance of mediacours.exe."
PID_f=open(os.environ["USERPROFILE"]+"/PID_mediacours.txt",'r')
PID=PID_f.readline()
#print "PID of mediacours is ",PID
#print "Killing PID ",PID
os.popen("tskill "+PID)
except:
print "Passed kill_if_double"
def shutdownPC_if_noactivity():
"""
This function must reside in a thread and be launched at startup
"""
global lastGlobalEvent # lat time event given by the KB and mouse hooks
tempoCheck=5 # time interval in seconds for each check
noactivityMax=30 # time threshold in seconds over which the the PC will ...
#... shutdown if no activity is present
while 1:
print "*",
time.sleep(tempoCheck)
if ((time.time()-lastGlobalEvent)>noactivityMax) and (recording==False):
print "No activity over "+str(noactivityMax)+" s => shutdown the PC"
#os.system("shutdown -s -f")
print 'would do: os.system("shutdown -s -f")'
def htmlGen():
""" Genereate html version for playback"""
global workDirectory, usage
f=open(workDirectory+"/timecode.csv")
diaTime=f.read().split("\n")[:-2]
f.close()
diaArray="("
for d in diaTime:
diaArray+= d+","
diaArray=diaArray[:-1]+")"
if usage=="audio":
media="enregistrement-micro.mp3"
if sys.platform=="darwin":
media="enregistrement-micro.flv"
playerHeight="20"
delayMediaSlides=0
else:
media="../enregistrement-video.flv"
playerHeight="250"
delayMediaSlides=-3
title=workDirectory.split("/")[-1]
htmlVars="// --- Variable generated from script\n// timecode of slides for this recording\n"\
+"var timecode=new Array"+diaArray+";\n"\
+"var media='"+media+"';\nvar playerHeight='"+playerHeight+"';//Give height='20' for audio and '200' for video\n"\
+"var title='"+title+"';\n"+"// ---"
file=open(workDirectory+"/recording.html",'w')
file.write(htmlBits.head)
file.write(htmlVars)
if sys.platform=="darwin":
message="<blink><p> Mac: Ecoute audio possible que via publication sur le serveur</p></blink>"
else:
message=""
file.write(htmlBits.tail(delayMediaSlides=delayMediaSlides,message=message))
file.close()
## copy third party script in a "thirdparty" folder
os.mkdir(workDirectory+"/thirdparty")
shutil.copyfile("thirdparty/player.swf",workDirectory+"/thirdparty/player.swf")
shutil.copyfile("thirdparty/swfobject.js",workDirectory+"/thirdparty/swfobject.js")
shutil.copyfile("thirdparty/JSFX_ImageZoom.js",workDirectory+"/thirdparty/JSFX_ImageZoom.js")
shutil.copyfile("thirdparty/README.txt",workDirectory+"/thirdparty/README.txt")
def useBrowser(what=""):
""" Defining the browser to use for 'what' content"""
print "In useBrowser function"
if 1:
print ">>> asking to open browser and publication form with what=", what
webbrowser.open_new(what)
if 0:
if os.path.isfile("c:/program files (x86)/internet explorer/iexplore.exe") == True:
print "useBrowser =","c:/program files (x86)/internet explorer/iexplore.exe"
useBrowser='"c:/program files (x86)/internet explorer/iexplore.exe"'
else:
print "useBrowser =","c:/program files/internet explorer/iexplore.exe"
useBrowser='"c:/program files/internet explorer/iexplore.exe"'
subprocess.Popen(useBrowser+" "+what) # console should be hidden
#os.system(useBrowser+" "+what)
#################################################################################################################
class SerialHook:
"""
A driver (soft hook) to an optional RS-232 keyboard used for amphi automation
"""
def __init__(self):
""" Open the serial port and initialize the serial keyboard"""
print "Opening the serial port of the serial keyboard"
self.ser = serial.Serial(int(keyboardPort))
print "Setting DTR to level 1 : +12 Volts"
self.ser.setDTR(level=1) #set DTR line to specified logic level
self.kb1=self.ser.getCD()
print "Initial kb1= ",self.kb1
self.kb2=self.ser.getDSR()
print "Initial kb2= ",self.kb2
self.kb3=self.ser.getCTS()
print "Initial kb3= ",self.kb3
self.kb4=self.ser.getRI()
print "Initial kb4= ",self.kb4
def listen(self,delta=0.001):
""" Reads the state of the Kb at each delta """
print "Entering listen loop ..."
while 1:
if (self.ser.getCD()!=self.kb1) and (self.ser.getCD()==True):
self.kb1=True
print "kb1 True"
windowBack(frameBegin)
if (self.ser.getCD()!=self.kb1) and (self.ser.getCD()==False):
self.kb1=False
print "kb1 False"
if recording== True:
print "id:",id,"(optional id for this recording)"
if id=="":
windowBack(frameEnd)
recordStop()
else:
recordStop()
frameUnivr.Show()
if recording==False:
frameBegin.Hide()
if (self.ser.getDSR()!=self.kb2) and (self.ser.getDSR()==True):
self.kb2=True
print "kb2 True"
if (self.ser.getDSR()!=self.kb2) and (self.ser.getDSR()==False):
self.kb2=False
print "kb2 False"
if (self.ser.getCTS()!=self.kb3) and (self.ser.getCTS()==True):
self.kb3=True
print "kb3 True"
if videoprojectorInstalled==True:
videoprojector.projOn()
print "Send order: videoprojector ON"
if (self.ser.getCTS()!=self.kb3) and (self.ser.getCTS()==False):
self.kb3=False
print "kb3 False"
if (self.ser.getRI()!=self.kb4) and (self.ser.getRI()==True):
self.kb4=True
print "kb4 True"
if videoprojectorInstalled==True:
videoprojector.projOff()
print "Send order: videoprojector OFF"
if (self.ser.getRI()!=self.kb4) and (self.ser.getRI()==False):
self.kb4=False
print "kb4 False"
time.sleep(delta)
class AMX:
"""
A class to read the optional 'capture ON/OFF' orders from the AMX
keyboard in many UDS amphitheatres and start the recording through it.
"""
def __init__(self):
self.ser = serial.Serial(int(keyboardPort))
print "AMX keyboard init"
def listen(self,frameEnd, frameBegin, tryFocus):
""" Listen to the AMX keyboard and decide what to do."""
while 1:
time.sleep(0.3)
inWaiting= self.ser.inWaiting()
if inWaiting>= 4:
print "inWaiting =", inWaiting
readBuffer= self.ser.read(inWaiting)
print readBuffer
if readBuffer == "START":
screenshot()
if recording==False and tryFocus==False:
windowBack(frameBegin)
if readBuffer == "STOP":
screenshot()
if recording==True and tryFocus==False:
windowBack(frameEnd)
recordStop()
class Videoprojector:
"""
A class to control a videoprojector through RS232
"""
def __init__(self):
""" Open the serial port of the videoprojector"""
print "Initiating videoprojector object..."
#print "Opening serial port of the videoprojector"
#self.ser = serial.Serial(videoprojectorPort)
def projOn(self):
"""Send the 'switch on' command to the videoprojector"""
self.ser = serial.Serial(videoprojectorPort)
self.ser.write(videoProjON)
self.ser.close()
print "- sending "+videoProjON+" to port com "+str(videoprojectorPort)
def projOff(self):
"""Send the 'switch off' command to the videoprojector"""
self.ser = serial.Serial(videoprojectorPort)
self.ser.write(videoProjOFF)
self.ser.close()
print "- sending "+videoProjOFF+" to port com "+str(videoprojectorPort)
class SmilGen:
"""
A class to produce a SMIL file on the fly
"""
def __init__(self,usage,workDirectory):
""" Create the first part of the smil file """
self.smilBegin='<?xml version="1.0"?>\n'\
+'<!DOCTYPE smil PUBLIC "-//W3C//DTD SMIL 2.0//EN" "http://www.w3.org/2001/SMIL20/SMIL20.dtd">\n'\
+'<smil xmlns="http://www.w3.org/2001/SMIL20/Language">\n'\
+'<head>\n'\
+'<meta name="title" content="MediaCours"/>\n'\
+'<meta name="author" content="ULP Multimedia"/>\n'\
+'<meta name="copyright" content="Copyright ULP Multimedia"/>\n'
if usage=="audio":
self.smilLayout='<layout>\n'\
+'<root-layout width="1024" height="768"/>\n'\
+'<region id="Images" width="1024" height="768" fit="meet" />\n'\
+'</layout>\n'\
+'</head>\n'\
+'<body>\n'\
+'<par>\n'
if usage=="video":
self.smilLayout="""<layout>
<root-layout width="800" height="600"/>
<region id="Images" left="0" width="800" height="600" fit="meet" />
<topLayout width="320" height="240">
<region id="Video" left="0" width="320" height="240" fit="meet"/>
</topLayout>
</layout>
</head>
<body>
<par>"""
self.smilFile=open (workDirectory +'/cours.smil','w')
self.smilFile.write(self.smilBegin)
self.smilFile.write(self.smilLayout)
def smilEvent(self,timeStamp,diaId):
"""
When screenshot occure => writting the event in the SMIL
and link it to the screenshot in the screenshots folder
Parameter:
timeStamp: a time stamp for the event (number of seconds since the
begining of the recording)
"""
self.smilFile.write('<a href="screenshots/D'+str(diaId)+'.jpg" external="true">\n'\
+ '<img begin="'+timeStamp+'" region="Images" src="screenshots/D'\
+ str(diaId)+'.jpg"/> </a>\n')
def smilEnd(self,usage,videoEncoder="real"):
"""
Write the end part of the SMIL file
Parameters:
usage= "audio" or "video"
videoEncoder= "real" or "wmv"
"""
if usage=="audio":
self.smilFile.write('<audio src="enregistrement-micro.mp3" />\
\n</par>\n</body>\n</smil>')
if usage=="video":
if videoEncoder=="real":
self.smilFile.write('<video region="Video"' \
+ ' src="enregistrement-video.rm" />\n'\
+'</par>\n'\
+'</body>\n'\
+'</smil>')
if videoEncoder=="wmv":
self.smilFile.write('<video region="Video" '\
+' src="enregistrement-video.wmv" />\n'\
+'</par>\n'\
+'</body>\n'\
+'</smil>')
if videoEncoder=="flash":
self.smilFile.write('<video region="Video" '\
+' src="enregistrement-video.flv" />\n'\
+'</par>\n'\
+'</body>\n'\
+'</smil>')
self.smilFile.close()
class BeginFrame(wx.Frame):
"""
A begining frame to warn the user he will begin to record
"""
def __init__(self, parent, title):
global liveFeed
#self.usage="audio"
self.usage=usage
"""Create the warning window"""
wx.Frame.__init__(self, parent, -1, title,
pos=(150, 150), size=(500, 400),
style=wx.DEFAULT_FRAME_STYLE ^ (wx.CLOSE_BOX|wx.RESIZE_BORDER|wx.MAXIMIZE_BOX))
favicon = wx.Icon('images/audiocours1.ico', wx.BITMAP_TYPE_ICO, 16, 16)
statusMessage= " AudioVideoCours Version "+__version__
self.statusBar=self.CreateStatusBar()
self.statusBar.SetStatusText(statusMessage)
wx.Frame.SetIcon(self, favicon)
panel=wx.Panel(self)
if sys.platform=="linux2":
panel.SetBackgroundColour("orange")
if standalone==True:
menubar=wx.MenuBar()
menuInformation=wx.Menu()
if sys.platform=="darwin":
menubar.Append(menuInformation,"Audiovideocours-Informations")
else:
menubar.Append(menuInformation,"Informations")
help=menuInformation.Append(wx.NewId(),_("Help"))
conf=menuInformation.Append(wx.NewId(),_("Configuration"))
version=menuInformation.Append(wx.NewId(),"Version")
self.Bind(wx.EVT_MENU,self.help,help)
self.Bind(wx.EVT_MENU,self.about,version)
self.Bind(wx.EVT_MENU,self.configuration,conf)
#self.SetMenuBar
if audioVideoChoice==True:
radio1=wx.RadioButton(panel,-1,"audio")
radio2=wx.RadioButton(panel,-1,"video")
if usage=="video":
radio2.SetValue(True)
if usage=="audio":
radio1.SetValue(True)
def onRadio(evt):
radioSelected=evt.GetEventObject()
self.usage=radioSelected.GetLabel()
print "Usage selected (audio or video):",self.usage
for eachRadio in [radio1,radio2]:
self.Bind(wx.EVT_RADIOBUTTON ,onRadio,eachRadio)
if 1:
im1 = wx.Image('images/ban1.jpg', wx.BITMAP_TYPE_ANY).ConvertToBitmap()
text1="\n\t"+\
_("By pressing the ' Record ! ' button, the recording will ")+"\n\t"+\
_("begin immediately and this window will disappear. ")
if serialKeyboard==False:
text1=text1+"\n\n\t"+\
_("To stop the recording press the following key: ")+stopKey+\
". "
text = wx.StaticText(panel, -1, text1,size=(420,100),style=wx.LEFT)
if sys.platform =="linux2":
text.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL,wx.NORMAL, False,"MS Sans Serif"))
else:
text.SetFont(wx.Font(11, wx.DEFAULT, wx.NORMAL,wx.NORMAL, False,"MS Sans Serif"))
if sys.platform=="linux2":
text.SetBackgroundColour("orange") # warning doesn't work on linux/GTK
text.SetForegroundColour("white")
else:
text.SetBackgroundColour("steel blue")
text.SetForegroundColour("white")
if 0: # for dev, what fonts are available on the system
e=wx.FontEnumerator()
e.EnumerateFacenames()
fontList=e.GetFacenames()
print fontList
if liveCheckBox==True:
liveFeed=wx.CheckBox(panel,-1,_("Live streaming"),)
btnRecord = wx.Button(parent=panel, id=-1, label=_("Record!"))
btnStopRecord=wx.Button(parent=panel, id=-1, label="Stop")
if sys.platform=="linux2": btnRecord.SetFont(wx.Font(9, wx.DEFAULT, wx.NORMAL,wx.NORMAL, False,"MS Sans Serif"))
if standalone == True:
btnNext = wx.Button(parent=panel, id=-1, label=_("Other choices"),size=(100,50))
btnQuit = wx.Button(parent=panel, id=-1, label=_("Quit"),size=(100,50))
if sys.platform=="linux2": btnNext.SetFont(wx.Font(9, wx.DEFAULT, wx.NORMAL,wx.NORMAL, False,"MS Sans Serif"))
if sys.platform=="linux2": btnQuit.SetFont(wx.Font(9, wx.DEFAULT, wx.NORMAL,wx.NORMAL, False,"MS Sans Serif"))
sizerV = wx.BoxSizer(wx.VERTICAL)
sizerH=wx.BoxSizer()
sizerV.Add(wx.StaticBitmap(panel, -1, im1, (5, 5)), 0, wx.ALIGN_CENTER|wx.ALL, 0)
sizerV.Add(text, 0, wx.ALIGN_CENTER|wx.ALL, 10)
if audioVideoChoice==True:
sizerH2=wx.BoxSizer()
sizerH2.Add(radio1,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=2)
sizerH2.Add(radio2,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=2)
sizerV.Add(sizerH2, 0, wx.ALIGN_CENTER|wx.ALL, 10)
if liveCheckBox==True:
sizerV.Add(liveFeed, 0, wx.ALIGN_CENTER|wx.ALL, 2)
sizerV.Add(sizerH, 0, wx.ALIGN_CENTER|wx.ALL, 10)
sizerH.Add(btnRecord, 0, wx.ALIGN_CENTER|wx.ALL,0)
sizerH.Add(btnStopRecord, 0, wx.ALIGN_CENTER|wx.ALL, 0)
if standalone == True:
sizerH.Add(btnNext, 0, wx.ALIGN_CENTER|wx.ALL, 10)
sizerH.Add(btnQuit, 0, wx.ALIGN_CENTER|wx.ALL, 10)
panel.SetSizer(sizerV)
panel.Layout()
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.engageRecording, btnRecord)
self.Bind(wx.EVT_BUTTON, self.stopRecording, btnStopRecord)
if standalone == True:
self.Bind(wx.EVT_BUTTON, self.SkiptoEndingFrame, btnNext)
self.Bind(wx.EVT_BUTTON, self.exitApp, btnQuit)
if standalone==True:
self.SetMenuBar(menubar)
def about(self,evt):
"""An about message dialog"""
text="AudioVideoCours version "+__version__+" \n\n"\
+_("Website:")+"\n\n"+\
"http://audiovideocours.u-strasbg.fr/"+"\n\n"\
+"(c) UDS 2007-2011"
dialog=wx.MessageDialog(self,message=text,
style=wx.OK|wx.CANCEL|wx.ICON_INFORMATION)
dialog.ShowModal()
def help(self,evt):
""" A function to provide help on how to use the software"""
global pathData
def launch():
print "I'm in launch in help"
try:
useBrowser(what="http://sites.google.com/site/audiovideocours/")
#subprocess.Popen([r'C:\Program Files\Internet Explorer\iexplore.exe',os.environ["USERPROFILE"]+"/audiovideocours/Aide_AudioCours_StandAlone.url"])
except:
print "Couldn't open or find Aide_AudioCours_StandAlone.url"
start_new_thread(launch,())
def configuration(self,evt):
""" A fucntion to open the configuration file"""
def launch():
if sys.platform=="win32":
subprocess.Popen([r'C:\Windows\System32\notepad.exe',os.environ["ALLUSERSPROFILE"]+"\\audiovideocours\\mediacours.conf"])
if sys.platform=="linux2":
#subprocess.Popen(["gedit","~/audiovideocours/mediacours.conf"])
os.popen("gedit ~/audiovideocours/mediacours.conf")
if sys.platform=="darwin":
os.popen("open -t mediacours.conf")
start_new_thread(launch,())
def exitApp(self,evt):
"""A function to quit the app"""
print "exit"
print "trying to close an eventual opened socket"
if 0:
try:
mySocket.close()
except:
pass
if 0:
exitAVC()
#sys.exit()
quit()
def SkiptoEndingFrame(self,evt):
"""Skip to Ending frame without recording"""
frameBegin.Hide()
frameEnd.Show()
def engageRecording(self,evt):
"""Confirms and engage recording"""
if sys.platform=="linux2": setupHooksLinux()
global live, recording
if recording==False:
if liveCheckBox==True:
live=liveFeed.GetValue()
if tryFocus==False and recording==False:
start_new_thread(recordNow,())
if 0:
self.Hide()
if 1:
self.Iconize( True )
def stopRecording(self,evt):
""" Stops recording from the stop button"""
self.Iconize( True )
if recording==True:
stopFromKBhook()
class EndingFrame(wx.Frame):
"""
An ending frame which also enable to publish the recordings on a webserver
"""
def __init__(self, parent, title):
"""Create the ending frame"""
global entryTitle,entryDescription,entryTraining,entryLastname,entryFirstname,entryCode,btnPublish\
,btnCancel,loginENT,emailENT,entryLoginENT,entryLoginENT,entryEmail,textWeb,dirName,workDirectory
windowXsize=500
windowYsize=650
fieldSize=420
if standalone==True:
windowXsize=500
fieldSize=420
if publishingForm==False:
windowYsize=250
wx.Frame.__init__(self, parent, -1, title,
pos=(150, 150), size=(windowXsize, windowYsize),
style=wx.DEFAULT_FRAME_STYLE ^ (wx.CLOSE_BOX|wx.RESIZE_BORDER|wx.MAXIMIZE_BOX))
favicon = wx.Icon('images/audiocours1.ico', wx.BITMAP_TYPE_ICO, 16, 16)
wx.Frame.SetIcon(self, favicon)
# Status bar
self.statusBar=self.CreateStatusBar()
self.statusBar.SetStatusText(_("Status bar"))
panel=wx.Panel(self)
#panel.SetBackgroundColour((244,180,56))
panel.SetBackgroundColour("steel blue")
logos = wx.Image('images/ban1.jpg', wx.BITMAP_TYPE_ANY).ConvertToBitmap()
textTitle = wx.StaticText(panel, -1, _("Title:"),size=(400,-1),style=wx.ALIGN_CENTER)
entryTitle = wx.TextCtrl(panel, -1,"", size=(fieldSize, -1))
textDescription = wx.StaticText(panel, -1, _("Eventual description:"),
size=(400,-1),style=wx.ALIGN_CENTER)
entryDescription = wx.TextCtrl(panel, -1,"", size=(fieldSize, -1))
textLastname= wx.StaticText(panel, -1, _("Name:"),size=(400,-1),style=wx.ALIGN_CENTER)
entryLastname = wx.TextCtrl(panel, -1,"", size=(fieldSize, -1))
textFirstname= wx.StaticText(panel, -1, _("First name:"),size=(400,-1),style=wx.ALIGN_CENTER)
entryFirstname = wx.TextCtrl(panel, -1,"", size=(fieldSize, -1))
textTraining = wx.StaticText(panel, -1, _("Degree:"),size=(400,-1),style=wx.ALIGN_CENTER)
entryTraining = wx.TextCtrl(panel,-1,"", size=(fieldSize, -1))
entryTraining.SetValue(formFormation)
textLoginENT=wx.StaticText(panel,-1, _("Identifiant ENT UDS:"),size=(400,-1),style=wx.ALIGN_CENTER)
entryLoginENT = wx.TextCtrl(panel,-1,"", size=(fieldSize, -1))
textEmail=wx.StaticText(panel,-1, _("Email :"),size=(400,-1),style=wx.ALIGN_CENTER)
entryEmail = wx.TextCtrl(panel,-1,"", size=(fieldSize, -1))
textCode=wx.StaticText(panel,-1, _("Access Code if you wish to set a limited access:"),
size=(400,-1),style=wx.ALIGN_CENTER)
entryCode = wx.TextCtrl(panel,-1,"", size=(fieldSize, -1))
if publishingForm==True:
linkWebsite=hl.HyperLinkCtrl(panel, wx.ID_ANY, (_("Access to")+" audiovideocours.u-strasbg.fr"),
URL="http://audiovideocours.u-strasbg.fr/",size=(300,-1),style=wx.ALIGN_CENTER)
linkWebsite.SetFont(wx.Font(11, wx.SWISS, wx.NORMAL,wx.NORMAL, False,'Arial'))
linkWebsite.SetForegroundColour("white")
linkWebsite.SetColours("white", "white", "white")
textWeb=wx.StaticText(panel,-1, _("Pour publier sur le serveur, cliquez sur 'Publier' et remplissez \nle forumlaire dans le navigateur qui se lancera."),size=(400,-1),style=wx.ALIGN_CENTER)
textWeb.SetForegroundColour("white")
if sys.platform=="linux2": textWeb.SetFont(wx.Font(9, wx.DEFAULT, wx.NORMAL,wx.NORMAL, False,"MS Sans Serif"))
for label in [textTitle,textDescription,textLastname,textFirstname,textTraining,textCode,
textLoginENT,textEmail]:
label.SetForegroundColour("white")
label.SetFont(wx.Font(11, wx.DEFAULT, wx.NORMAL,wx.BOLD, False,"MS Sans Serif"))
"""
for entry in [entryTitle,entryDescription,entryLastname,entryFirstname,entryTraining,entryCode]:
#entry.SetBackgroundColour((254,236,170))
#entry.SetBackgroundColour("light blue")
pass
"""
btnPublish = wx.Button(panel, -1, _("Publish!"),size=(130,50))
btnCancel=wx.Button(panel, -1, _("Cancel"),size=(70,50))
if standalone==True :
btnPreview=wx.Button(panel, -1, _("Read"),size=(70,50))
btnQuit=wx.Button(panel,-1,_("Quit"),size=(70,50))
btnOpen=wx.Button(panel,-1,_("Open"),size=(70,50))
hbox=wx.BoxSizer()
hbox.Add(btnPublish,proportion=0,flag=wx.RIGHT,border=5)
hbox.Add(btnCancel,proportion=0,flag=wx.RIGHT,border=5)
if standalone==True :
hbox.Add(btnPreview,proportion=0,flag=wx.RIGHT,border=5)
hbox.Add(btnOpen,proportion=0,flag=wx.RIGHT,border=5)
hbox.Add(btnQuit,proportion=0,flag=wx.RIGHT,border=5)
pad1=4
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(wx.StaticBitmap(panel, -1, logos, (5, 5)), 0, wx.ALIGN_CENTER|wx.ALL, 0)
if publishingForm==True:
sizer.Add(textTitle, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryTitle, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(textDescription, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryDescription, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(textLastname, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryLastname, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(textFirstname, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryFirstname, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(textTraining, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryTraining, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(textCode, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryCode, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(textLoginENT, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryLoginENT, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(textEmail, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
sizer.Add(entryEmail, 0, wx.ALIGN_CENTER|wx.ALL, pad1)
if publishingForm==False:
for entry in [entryTitle,entryDescription,entryLastname,entryFirstname,entryTraining,entryCode,entryLoginENT,entryEmail]:
entry.Hide()
sizer.Add(hbox, 0, wx.ALIGN_CENTER|wx.ALL, 10)
if publishingForm==True:
sizer.Add(linkWebsite, 0, wx.ALIGN_CENTER|wx.ALL, 4)
if publishingForm==False:
sizer.Add(textWeb, 0, wx.ALIGN_CENTER|wx.ALL, 4)
panel.SetSizer(sizer)
panel.Layout() # what for ?
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.publish, btnPublish)
self.Bind(wx.EVT_BUTTON, self.exitPublish, btnCancel)
if standalone==True :
self.Bind(wx.EVT_BUTTON, self.readPreview, btnPreview)
self.Bind(wx.EVT_BUTTON, self.exitApp, btnQuit)
self.Bind(wx.EVT_BUTTON, self.openOldOne, btnOpen)
def openOldOne(self,evt):
"""
Open an old recording for latter playing or publishing
"""
global workDirectory,dirName,previewPlayer
selectPreview=wx.FileDialog(self)
print "pathData=",pathData
if pathData=="":
pathData==os.getcwd()
selectPreview.SetDirectory(pathData)
if previewPlayer=="realplayer":
print "Showing smil file for selection"
selectPreview.SetWildcard("*.smil")
else:
print "Showing html file for selection"
selectPreview.SetWildcard("*.html")
selectPreview.ShowModal()
toPlay= selectPreview.GetFilenames()
dirName=os.path.basename(selectPreview.GetDirectory())
print "dirName=",dirName
workDirectory=selectPreview.GetDirectory()
frameEnd.statusBar.SetStatusText(_("Current recording ")+workDirectory)
if len(toPlay)>0:
self.readPreview(self)
print workDirectory
def exitApp(self,evt):
"""A function to quit the app"""
print "time to exit"
#frameEnd.Close(True)
#frameBegin.Close(True)
exitAVC()
def exitPublish(self,evt):
"""Don't publich the recordings on the webserver"""
global dirNameToPublish, workDirectoryToPublish
frameBegin.Iconize( False )
writeInLogs("- 'Cancel' button pressed at"+ \
str(datetime.datetime.now())+"\n")
folder="canceled"
if tryFocus==False:
self.Hide()
if standalone == True:
frameBegin.Show()
if standalone==False:
global entryTitle,entryDescription,entryTraining,entryLastname,entryFirstname
global title,description,firstname,name,ue,login,loginENT,emailENT
title= entryTitle.GetValue()
description=entryDescription.GetValue()
name=entryLastname.GetValue()
firstname=entryFirstname.GetValue()
ue=entryTraining.GetValue()
genre=entryCode.GetValue()
loginENT=entryLoginENT.GetValue()
emailENT=entryEmail.GetValue()
#login=entry3.GetValue()
print "tile: ",title
print "description: ",description
print "name: ",name
print "ue: ",ue
print "prenom: ",firstname
print "Creating .zip file..."
createZip()
print "Zip file created"
workDirectoryToPublish=workDirectory
dirNameToPublish=dirName
start_new_thread(confirmPublish,(folder,))
entryTitle.SetValue("")
entryDescription.SetValue("")
entryLastname.SetValue("")
entryFirstname.SetValue("")
entryLoginENT.SetValue("")
entryEmail.SetValue("")
if formFormation=="": entryTraining.SetValue("")
def readPreview(self,evt):
"""Read the smil 'recording' file (audio or video + screenshots)"""
def readSmilNow():
if dirName =="":
frameEnd.statusBar.SetStatusText(_("Nothing to read."))
if workDirectory!="":
#os.system("realplay.exe file://"+workDirectory+"/cours.smil") #also works
try:
subprocess.Popen(["realplay.exe", "file://"+workDirectory+"/cours.smil"])
except:
try:
# try to search for the common full path of realplay.exe
realplay="C:\\Program"+" "+"Files\\Real\\RealPlayer\\realplay.exe"
subprocess.Popen([realplay, "file://"+workDirectory+"/cours.smil"])
except:
caption="Audiovideocours Error Message"
text="RealPlayer software not found"
dialog=wx.MessageDialog(None,message=text,caption=caption,
style=wx.OK|wx.ICON_INFORMATION)
dialog.ShowModal()
def readLocalWebPreview():
""" Read a local web preview of the recording using the integrated cherrypy server"""
if dirName =="":
frameEnd.statusBar.SetStatusText(_("Nothing to read."))
if workDirectory!="":
print "Attempting to read web preview"
useBrowser("http://localhost:"+str(remotePort)+"/"+dirName+"/recording.html")
if previewPlayer=="realplayer":
start_new_thread(readSmilNow,())
else:
start_new_thread(readLocalWebPreview,())
def publish(self,evt):
"""Publish the recording on the website"""
global workDirectoryToPublish, dirNameToPublish,loginENT,emailENT
writeInLogs("- 'Publish' button pressed at"+ \
str(datetime.datetime.now())+"\n")
workDirectoryToPublish=workDirectory
dirNameToPublish=dirName
print "dirNameToPublish =",dirNameToPublish
print "workDirectoryToPublish =",workDirectoryToPublish
if tryFocus==False:
global title,description,name,firstname, ue,genre
title= entryTitle.GetValue()
description=entryDescription.GetValue()
name=entryLastname.GetValue()
firstname=entryFirstname.GetValue()
ue=entryTraining.GetValue()
genre=entryCode.GetValue()
loginENT=entryLoginENT.GetValue()
emailENT=entryEmail.GetValue()
print "tile : ",title
print "description: ",description
print "name: ",name
print "prenom: ",firstname
print "ue: ",ue
print "login ENT",loginENT
print "email ENT",emailENT
entryTitle.SetValue("")
entryDescription.SetValue("")
entryLastname.SetValue("")
entryFirstname.SetValue("")
entryCode.SetValue("")
entryLoginENT.SetValue("")
entryEmail.SetValue("")
if formFormation=="": entryTraining.SetValue("")
print "Creating .zip file..."
btnPublish.Enable(False)
btnCancel.Enable(False)
try:
createZip()
except:
print "Warning! couldn't create zip file!"
print "If no above warning, Zip file created"
btnPublish.Enable(True)
btnCancel.Enable(True)
if standalone !=True:
self.Hide()
start_new_thread(confirmPublish,())
class univrEndFrame(wx.Frame):
"""
Optional: used when receiving an external order to stop a recording (from server)
"""
def __init__(self, parent, title):
global liveFeed
"""Create the warning window"""
wx.Frame.__init__(self, parent, -1, title,
pos=(150, 150), size=(300,110),
style=wx.DEFAULT_FRAME_STYLE)
panel=wx.Panel(self)
#panel.SetBackgroundColour("white")
text = wx.StaticText(panel, -1, "Enregistrement transmis pour Univ-r\net audiovideocours.u-strasbg.fr")
okButton=wx.Button(panel,size=(-1,-1),label="Fermer")
self.Bind(wx.EVT_BUTTON, self.hideNow,okButton)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(text,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(okButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
panel.SetSizer(vbox)
def hideNow(self,evt):
""" Hide window"""
self.Hide()
def onEndSession(evt):
import winsound
winsound.PlaySound("waves\\exit.wav",winsound.SND_FILENAME)
writeInLogs("!!! RED ALERT: Windows Session is ending at "+ str(datetime.datetime.now())+" launching emergency procedures...")
class AVCremote:
""" Remote http form form informations and remote control """
global welcome, pathData,recording
print "Inside AVCremote"
def index(self):
print "Inside index in AVCremote"
global welcome
welcome="<p> ((( AudioVideoCours Client - Web Interface ))) </p> "
welcome+="Recording now: "+str(recording)+" <br><br>"
welcome+= "> client version : "+__version__+"</br>"
welcome+= "> app_startup_date : "+app_startup_date+"</br>"
welcome+= "> last_session_recording_start : "+last_session_recording_start+"</br>"
welcome+= "> last_session_recording_stop : "+last_session_recording_stop+"</br>"
welcome+= "> last_session_publish_order : "+last_session_publish_order+"</br>"
welcome+= "> last_session_publish_problem : "+last_session_publish_problem+"</br>"
welcome+= "> session data folder : "+pathData+"</br>"
welcome+="<br><br><br>"
# Ask for an order
return welcome+'''
<form action="getOrder" method="GET">
Command :
<input type="text" name="order" size="50" />
<input type="submit" />
</form>'''
index.exposed = True
def getOrder(self, order = None):
if order:
print "received order:", order
if order=="list":
print "trying to retrieve list for pathData", pathData
return fileList(folderPath=pathData)[0]
elif order=="help":
helpList="""
<p>Current available commands:</p>
help -> returns a list of available commands.<br>
list -> returns a list of folders and files in your current data folder.<br>
recover:NameOfFolder -> FTP folder to FTP server. <br>
start -> start recording if not already recording. COMING SOON.<br>
stop -> Stop recording if recording currently. COMING SOON.<br>
"""
return helpList
elif order.find("recover:")>=0:
print "Attempting to recover an old recording folder..."
fileOrFolder=order.split("recover:")[1].strip()
recoverFeedback=recoverFileOrFolder(name=fileOrFolder, pathData=pathData, ftpUrl=ftpUrl, ftpLogin=ftpLogin, ftpPass=ftpPass)
return recoverFeedback
elif order=="start":
print "will start recording if not already recording"
elif order=="stop":
print "will stop recording if recording currently"
elif order.find("recover-f:")==True:
print "will do later"
else:
return welcome+"You have order: %s " % order
else:
if order is None:
# No name was specified
return 'Please enter your order <a href="./">here</a>.'
else:
return 'No, really, enter your order <a href="./">here</a>.'
getOrder.exposed = True
def goAVCremote(remPort=remotePort,pathData=pathData,hosts="127.0.0.1"):
"Create an instance of AVCremote"
print "Launch AVCremote thread"
global traceback,remotePort
cherrypy.config.update({'server.socket_host': hosts,
'server.socket_port': int(remPort),
'tools.staticdir.on': True,
#'tools.staticdir.dir': "C:\\",
'tools.staticdir.dir': pathData,
})
try:
print "trying to launch cherrypy at", hosts, remPort
cherrypy.quickstart(AVCremote())
except:
print "Hmm OS X doesn't come here in case of error?"
# My attempt to relaunch with another port number fail for now
# => display a dialog box to users in the meantime
print "!!! Couldn't launch integrated server at port "+str(remPort)+"!!!"
dialog=wx.MessageDialog(None,message="[French] Attention, le port 80 est deja occupe (Skype, serveur?), la lecture avant publication ne sera pas possible.\n\
Arretez l'application utilisant ce port ou changez le numero de port dans le fichier de configuration d'Audiovideocours.\n\n[English] Warning, port 80 is already used (Skype? server?), preview reading before publication won't be possible.\nStop the application using this port or change port number in configuration file",
caption="Port 80 non disponible, Port 80 busy", style=wx.OK|wx.ICON_INFORMATION)
dialog.ShowModal()
writeInLogs("\nCouldn't launch integrated server at port "+str(remPort)+"!!!")
writeStack()
writeInLogs("\nAttempting to launch server on redirected port 8080 now ")
"""
if 1:
time.sleep(5)
print "Trying port 8081 now..."
cherrypy.config.update({'server.socket_host': hosts,
'server.socket_port': 8081,
'tools.staticdir.on': True,
#'tools.staticdir.dir': "C:\\",
'tools.staticdir.dir': pathData,
})
remotePort=8081
remPort=remotePort
cherrypy.quickstart(AVCremote())
if 0:
print "!!! Couldn't launch integrated server at redirected 8080 port either !!!"
writeInLogs("\nCouldn't launch integrated server at redirected 8080 port either")
writeStack()
"""
def fileList(folderPath="."):
"return a list of the files in the data folder"
print "In def fileList folderPath=", folderPath
def get_dir_size(folder):
size = 0
for path, dirs, files in os.walk(folder):
for f in files:
size += os.path.getsize( os.path.join( path, f ) )
return str( round((float(size)/(1024*1024)),2) )+" Mo"
content= os.listdir(folderPath)
#print "Content list", content
files_list =[]
dirs_list=[]
for i in content:
if os.path.isfile(folderPath+"/"+i) == True:
files_list.append(i)
print os.path.getsize(folderPath+"/"+i)
elif os.path.isdir(folderPath+"/"+i) == True:
dirs_list.append(i)
print os.path.getsize(folderPath+"/"+i)
answer="<p>Listing of files and folders in "+folderPath+" </p>"
answer+="<p>Files:</p>"
print "Files list",files_list
print "Folders list", dirs_list
for index,name in enumerate(files_list):
size=os.path.getsize(folderPath+"/"+name)
answer+=str(index)+" - "+name+" - "+str(round(float(size)/1024,2))+" Ko <br>"
answer+="<p>Folders:</p>"
for index,name in enumerate(dirs_list):
answer+=str(index)+" - "+name+" - "+get_dir_size(folderPath+"/"+name)+"<br>"
print answer
return [answer,files_list,dirs_list]
def getTime():
"Returns current date/time in an appropriate string format"
time= str(datetime.datetime.now())[:-7]
return time
def recoverFileOrFolder(name,pathData, ftpUrl,ftpLogin,ftpPass):
"Recovering an folder back to the FTP server usualy following a remote order"
if os.path.isdir(pathData+"/"+name):
print "Creating Zip file of ...",name
workDirectory=pathData+"/"+name
zip = zipfile.ZipFile(pathData+"/"+name+".zip", 'w')
for fileName in os.listdir ( workDirectory ):
if os.path.isfile (workDirectory+"/"+fileName):
zip.write(workDirectory+"/"+fileName,
name+"/"+fileName,zipfile.ZIP_DEFLATED)
for fileName in os.listdir ( workDirectory+"/screenshots"):
zip.write(workDirectory+"/screenshots/"+fileName,
name+"/"+"screenshots/"+fileName,zipfile.ZIP_DEFLATED)
zip.close()
#writeInLogs("In recoverFolder, counldn't create ZIP file")
print "Opening FTP connection..."
ftp = FTP(ftpUrl)
ftp.login(ftpLogin, ftpPass)
f = open(workDirectory+".zip",'rb')
ftp.storbinary('STOR '+ name+".zip", f)
f.close()
ftp.quit()
print "FTP closed."
result="Folder "+name+" zipped and transfered through FTP to the FTP server."
return result
elif os.path.isfile(pathData+"/"+name):
ftp = FTP(ftpUrl)
ftp.login(ftpLogin, ftpPass)
f = open(pathData+"/"+name,'rb')
ftp.storbinary('STOR '+ name, f)
f.close()
ftp.quit()
print "FTP closed."
result="File "+name+" transfered through FTP to the FTP server."
return result
else:
result=name+ " is is not a folder or a file. No action taken."
return result
## Start app
if __name__=="__main__":
# Check if another instance is already launched and kill it if it exist
if sys.platform=="win32":
kill_if_double()
time.sleep(1)#delay to be sure serial port is free if just killed a double?
app_startup_date=getTime()
## GUI Define
app=wx.App(redirect=False)
# Create a default data audiovideocours folder if it doesn't exists
if sys.platform == 'win32':
if os.path.isdir(os.environ["USERPROFILE"]+"\\audiovideocours"):
print "Default user data exists at USERPROFILE\\audiovideocours : OK"
else:
print "Creating default data folter in USERPROFILE\\audiovideocours"
os.mkdir(os.environ["USERPROFILE"]+"\\audiovideocours")
if sys.platform == 'linux2':
if os.path.isdir(os.path.expanduser("~/audiovideocours")):
print "Default user data exists in ~/audiovideocours"
else:
print "Creating default data folter in ~/audiovideocours"
os.mkdir(os.path.expanduser("~/audiovideocours"))
if sys.platform == 'darwin':
if os.path.isdir(os.path.expanduser("~/audiovideocours-enregistrements")):
print "Default user data exists in ~/audiovideocours-enregistrements"
else:
print "Creating default data folter in ~/audiovideocours-enregistrements"
os.mkdir(os.path.expanduser("~/audiovideocours-enregistrements"))
confFileReport=""
# Check if a configuration file exist in USERPROFILE
# otherwise search for one in ALLUSERPROFILE
print "searching for a configuration file"
if sys.platform == 'win32':
if os.path.isfile(os.environ["USERPROFILE"]+"\\audiovideocours\\mediacours.conf"):
print "Found and using configuration file in USERPROFILE\\audiovideocours"
readConfFile(confFile=os.environ["USERPROFILE"]+"\\audiovideocours\\mediacours.conf")
elif os.path.isfile(os.environ["ALLUSERSPROFILE"]+"\\audiovideocours\\mediacours.conf"):
print "Found and using configuration file in ALLUSERSPROFILE\\audiovideocours"
readConfFile(confFile=os.environ["ALLUSERSPROFILE"]+"\\audiovideocours\\mediacours.conf")
else:
print "No configuration file found"
dialog=wx.MessageDialog(None,message="No configuration file found in either USERPROFILE or ALLUSERSPEOFILE",
caption="Audiovideocours Error Message", style=wx.OK|wx.ICON_INFORMATION)
dialog.ShowModal()
if sys.platform =='linux2' or 'darwin':
if os.path.isfile("mediacours.conf"):
print "Found and using configuration file in script folder ."
readConfFile(confFile=os.path.expanduser("mediacours.conf"))
elif os.path.isfile(os.path.expanduser("~/audiovideocours/mediacours.conf")):
print "Found and using configuration file in ~/audiovideocours"
readConfFile(confFile=os.path.expanduser("~/audiovideocours/mediacours.conf"))
else:
print "No configuration file found"
dialog=wx.MessageDialog(None,message="No configuration file found in ~/audiovideocours",
caption="Audiovideocours Error Message", style=wx.OK|wx.ICON_INFORMATION)
dialog.ShowModal()
# Automatically detect IP of the recoriding place
recordingPlace=socket.gethostbyname(socket.gethostname()).replace(".","_")
#recordingPlace=socket.gethostbyaddr(socket.gethostname()) #gives also the litteral hostname (list)
print "... recordingPlace = ", recordingPlace
# Set-up language
if language=="French":
print "Setting French language..."
langFr = gettext.translation('mediacours', "./locale",languages=['fr'],codeset="iso-8859-1")
langFr.install(unicode=True)
if pathData == None or pathData=="":
#pathData=os.getcwd()
if sys.platform == 'win32':
pathData=os.environ["USERPROFILE"]+"\\audiovideocours"
if sys.platform == 'darwin' or 'linux2':
pathData=os.path.expanduser("~/audiovideocours")
if sys.platform == 'darwin':
pathData=os.path.expanduser("~/audiovideocours-enregistrements")
print "pathData=None => PathData is now ", pathData
writeInLogs(confFileReport)
# Start-up message
print "AudioVideoCours client launched at ", datetime.datetime.now(), " ..."
writeInLogs("\nAudioVideoCours client launched at "+ \
str(datetime.datetime.now()))
writeInLogs("\npathData is "+pathData)
# Start-up message
print "AudioVideoCours client launched at ", datetime.datetime.now(), " ..."
writeInLogs("\nAudioVideoCours client launched at "+ \
str(datetime.datetime.now()))
writeInLogs("\npathData is "+pathData)
# Start socket server
if socketEnabled==True:
start_new_thread(LaunchSocketServer,())
# Set-up hooks
if sys.platform=="win32": setupHooks()
#if sys.platform=="linux2": setupHooksLinux()
if 0: # needs osxification ?
# Set-up videoprojector
if videoprojectorInstalled==True:
videoprojector=Videoprojector()
# start shutdown PC thread if no PC activity detected
if 0:
start_new_thread(shutdownPC_if_noactivity,())
# Write mediacours PID in a file (for use in the automatic updater)
PID_f=open(os.environ["USERPROFILE"]+"\\PID_mediacours.txt",'w')
PID_f.write(str(os.getpid()))
PID_f.close()
## GUI launch
#app=wx.App(redirect=False)
#frameUnivr=univrEndFrame(None,title="Message Univ-R")
#frameUnivr.Show()
frameBegin=BeginFrame(None,title="Attention")
#frameBegin.Bind(wx.EVT_END_SESSION,onEndSession)
#frameBegin.Show() # For debug
frameBegin.Show()
if standalone != True: frameBegin.Hide()
global frameEnd
frameEnd=EndingFrame(None,title="Attention")
def frameEndShow():
global frameEnd
frameEnd.Show()
frameEnd.RequestUserAttention()
if sys.platform=="darwin":
NSBeep()
time.sleep(0.3)
NSBeep()
def bounceDock():
#print ">>>>>>>>>>>", frameEnd.IsShownOnScreen(), str(frameEnd.FindFocus())
if 1:
for i in range(50):
if frameEnd.FindFocus()==None:
time.sleep(0.1)
#print "*"
frameEnd.RequestUserAttention()
def go_foreground():
NSApplication.sharedApplication()
NSApp().activateIgnoringOtherApps_(True)
go_foreground()
start_new_thread(bounceDock,())
#frameEnd.SetFocus()
#frameEnd.Raise()
#frameEnd.UpdateWindowUI()
#frameEnd.MoveXY(100,500)
def exitAVC():
print "In exitAVC in main thread"
#os.kill(str(os.getpid()),signal.SIGKILL)
if sys.platform!="darwin":
os.system("kill "+str(os.getpid()))
else:
sys.exit()
#frameEnd.Bind(wx.EVT_END_SESSION,onEndSession)
#frameEnd.Show()
#frameEnd.Hide()
#frameEnd.Show() # For debug
## Use a special serial keyboard ?
if serialKeyboard==True:
if amxKeyboard==True:
clavierAMX=AMX()
start_new_thread(clavierAMX.listen,(frameEnd, frameBegin, tryFocus))
else:
clavier=SerialHook()
start_new_thread(clavier.listen,())
## Systray
def onTaskbarActivate():
print ">>>>> yes my lord?"
print "setting up icons"
if usage=="audio" and sys.platform in("win32","darwin"):
print ">>> Setting up TaskBarIcon <<<"
icon1 = wx.Icon('images/audiocours1.ico', wx.BITMAP_TYPE_ICO)
icon2 = wx.Icon('images/audiocours2.ico', wx.BITMAP_TYPE_ICO)
if 1:
tbicon = wx.TaskBarIcon()
#print "setting up binding for left click event"
#app.Bind(wx.EVT_TASKBAR_LEFT_DCLICK, onTaskbarActivate)
def OnTaskBarRight(event):
print "test from tb icon"
tbicon.SetIcon(icon1, "AudioCours en attente")
#add taskbar icon event
if 0:
#wx.EVT_TASKBAR_CLICK(tbicon, OnTaskBarRight)
wx.EVT_KILL_FOCUS(tbicon, OnTaskBarRight)
wx.EVT_TASKBAR_CLICK(tbicon, OnTaskBarRight)
app.ExitMainLoop()
if 0:
app.Bind(wx.EVT_TASKBAR_CLICK,OnTaskBarRight,tbicon)
#print dir(tbicon)
#print dir(frameBegin)
if usage=="video":
icon1 = wx.Icon('images/videocours1.ico', wx.BITMAP_TYPE_ICO)
icon2 = wx.Icon('images/videocours2.ico', wx.BITMAP_TYPE_ICO)
tbicon = wx.TaskBarIcon()
tbicon.SetIcon(icon1, "VideoCours en attente")
if standalone==True:
showVuMeter()
print "remoteControl =",remoteControl
if remoteControl==False and standalone==True:
print "remote Control: False"
hosts="127.0.0.1"
else:
hosts="0.0.0.0"
if 1:
print "Launching integrated server with port", remotePort, "for hosts", hosts
remotePort=8080
start_new_thread(goAVCremote,(remotePort,pathData,hosts))
app.MainLoop()
| unistra/avc-client | avcOsxLinux.py | Python | gpl-2.0 | 107,248 |
# -*- coding: utf-8 -*-
"""
Fixtures factories.
"""
from __future__ import unicode_literals
import factory
from modoboa.core.factories import PermissionFactory, UserFactory
from . import models
class DomainFactory(PermissionFactory):
"""Factory to create Domains."""
class Meta:
model = models.Domain
django_get_or_create = ("name", )
type = "domain" # NOQA:A003
quota = 0
default_mailbox_quota = 10
enabled = True
class DomainAliasFactory(PermissionFactory):
"""Factory to create DomainAlias objects."""
class Meta:
model = models.DomainAlias
django_get_or_create = ("name", )
target = factory.SubFactory(DomainFactory)
enabled = True
class MailboxFactory(PermissionFactory):
"""A factory to create Mailbox instances."""
class Meta:
model = models.Mailbox
django_get_or_create = ("address", "domain")
domain = factory.SubFactory(DomainFactory)
user = factory.SubFactory(UserFactory)
quota = 10
class AliasRecipientFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.AliasRecipient
class AliasFactory(PermissionFactory):
class Meta:
model = models.Alias
enabled = True
class SenderAddressFactory(factory.django.DjangoModelFactory):
"""Factory for SenderAddress model."""
mailbox = factory.SubFactory(MailboxFactory)
class Meta:
model = models.SenderAddress
def populate_database():
"""Create test data.
2 domains, 1 domain admin, 2 users
"""
dom = DomainFactory(name="test.com", quota=50)
admin = UserFactory(
username="[email protected]", groups=("DomainAdmins", ),
password="{PLAIN}toto"
)
MailboxFactory(address="admin", domain=dom, user=admin)
account = UserFactory.create(
username="[email protected]", groups=("SimpleUsers",),
)
MailboxFactory.create(address="user", domain=dom, user=account)
al = AliasFactory.create(
address="[email protected]", domain=dom
)
AliasRecipientFactory.create(
address="[email protected]", alias=al)
al = AliasFactory.create(
address="[email protected]", domain=dom
)
mb = account.mailbox
AliasRecipientFactory.create(
address=mb.full_address, alias=al, r_mailbox=mb)
al = AliasFactory.create(
address="[email protected]", domain=dom
)
for address in ["[email protected]", "[email protected]"]:
AliasRecipientFactory.create(address=address, alias=al)
dom.add_admin(admin)
dom2 = DomainFactory.create(name="test2.com", default_mailbox_quota=0)
admin = UserFactory.create(
username="[email protected]", groups=("DomainAdmins",),
password="{PLAIN}toto"
)
MailboxFactory.create(address="admin", domain=dom2, user=admin)
u = UserFactory.create(
username="[email protected]", groups=("SimpleUsers",)
)
MailboxFactory.create(address="user", domain=dom2, user=u)
dom2.add_admin(admin)
| tonioo/modoboa | modoboa/admin/factories.py | Python | isc | 3,016 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'ExternalAccount.type'
db.alter_column('users_externalaccount', 'type', self.gf('django.db.models.fields.CharField')(max_length=30))
def backwards(self, orm):
# Changing field 'ExternalAccount.type'
db.alter_column('users_externalaccount', 'type', self.gf('django.db.models.fields.PositiveIntegerField')())
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'groups.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc_channel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'steward': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'system': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'wiki': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'groups.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'groups.skill': {
'Meta': {'ordering': "['name']", 'object_name': 'Skill'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'users.externalaccount': {
'Meta': {'ordering': "['type']", 'object_name': 'ExternalAccount'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'privacy': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'users.usernameblacklist': {
'Meta': {'ordering': "['value']", 'object_name': 'UsernameBlacklist'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_regex': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'users.userprofile': {
'Meta': {'ordering': "['full_name']", 'object_name': 'UserProfile', 'db_table': "'profile'"},
'allows_community_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allows_mozilla_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'basket_token': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
'date_mozillian': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'date_vouched': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ircname': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'is_vouched': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Language']"}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'privacy_bio': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_city': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_country': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_date_mozillian': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_email': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_full_name': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_groups': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_ircname': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_languages': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_photo': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_region': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_skills': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_timezone': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_title': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_tshirt': ('mozillians.users.models.PrivacyField', [], {'default': '1'}),
'privacy_vouched_by': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_website': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'region': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Skill']"}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '70', 'blank': 'True'}),
'tshirt': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'vouched_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vouchees'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['users.UserProfile']", 'blank': 'True', 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['users'] | justinpotts/mozillians | mozillians/users/migrations/0042_auto__chg_field_externalaccount_type.py | Python | bsd-3-clause | 12,259 |
from base import *
from urusan import Urusan
class Unit(Base, base):
__tablename__ ='units'
__table_args__ = {'extend_existing':True,
'schema' :'apbd','autoload':True}
@classmethod
def get_by_kode(cls,kode):
return DBSession.query(cls).filter_by(kode=kode).first()
@classmethod
def import_data(cls):
filenm ='unit.csv'
with open(filenm, 'rb') as csvfile:
reader = csv.DictReader(csvfile, delimiter=';', quotechar='"')
for row in reader:
print row
kode = row['kode'].strip()
data = cls.get_by_kode(kode)
if not data:
data=cls()
data.kode = kode
data.created = datetime.now()
data.create_uid = 1
#data.tahun = data.created.year
#data.level_id = data.kode.count('.')+1
#data.parent_id = DBSession.query(Rekening.id).filter(Rekening.kode==data.kode[:data.kode.rfind('.')]).scalar()
data.disabled = 0
#data.defsign = 1
data.urusan_id=Urusan.get_by_kode(row['urusan'].strip()).id
data.nama = row['nama'].strip()
DBSession.add(data)
DBSession.flush()
DBSession.commit()
if __name__ == '__main__':
Unit.import_data()
| aagusti/sp2d | docs/ref/unit.py | Python | mit | 1,276 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_regressors
---------------
Tests for the `regressors` module.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import pandas as pd
import unittest2 as unittest
from sklearn import datasets
from sklearn import decomposition
from sklearn import linear_model
from sklearn import preprocessing
from regressors import regressors
from regressors import _utils
from regressors import stats
boston = datasets.load_boston()
which_betas = np.ones(13, dtype=bool)
which_betas[3] = False # Eliminate dummy variable
X = boston.data[:, which_betas]
y = boston.target
class TestStatsResiduals(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_classifier_type_assertion_raised(self):
# Test that assertion is raised for unsupported model
pcomp = decomposition.PCA()
pcomp.fit(X, y)
with self.assertRaises(AttributeError):
stats.residuals(pcomp, X, y)
def tests_classifier_type_assertion_not_raised(self):
# Test that assertion is not raise for supported models
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.residuals(clf, X, y)
except Exception as e:
self.fail("Testing supported linear models in residuals "
"function failed unexpectedly: {0}".format(e))
def test_getting_raw_residuals(self):
ols = linear_model.LinearRegression()
ols.fit(X, y)
try:
stats.residuals(ols, X, y, r_type='raw')
except Exception as e:
self.fail("Testing raw residuals failed unexpectedly: "
"{0}".format(e))
def test_getting_standardized_residuals(self):
ols = linear_model.LinearRegression()
ols.fit(X, y)
try:
stats.residuals(ols, X, y, r_type='standardized')
except Exception as e:
self.fail("Testing standardized residuals failed unexpectedly: "
"{0}".format(e))
def test_getting_studentized_residuals(self):
ols = linear_model.LinearRegression()
ols.fit(X, y)
try:
stats.residuals(ols, X, y, r_type='studentized')
except Exception as e:
self.fail("Testing studentized residuals failed unexpectedly: "
"{0}".format(e))
class TestSummaryStats(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_error_not_raised_by_sse(self):
# Test that assertion is not raise for supported models
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
sse = stats.sse(clf, X, y)
except Exception as e:
self.fail("Testing SSE function for supported linear models "
"failed unexpectedly: {0}".format(e))
def test_error_not_raised_by_adj_r2_score(self):
# Test that assertion is not raise for supported models
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.adj_r2_score(clf, X, y)
except Exception as e:
self.fail("Testing adjusted R2 function for supported linear "
"models failed unexpectedly: {0}".format(e))
def test_verify_adj_r2_score_return_type(self):
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
adj_r2_score = stats.adj_r2_score(clf, X, y)
self.assertIsInstance(adj_r2_score, float)
def test_error_not_raised_by_coef_se(self):
# Test that assertion is not raise for supported models
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.coef_se(clf, X, y).shape
except Exception as e:
self.fail("Testing standard error of coefficients function for "
"supported linear models failed "
"unexpectedly: {0}".format(e))
def test_length_of_returned_coef_se(self):
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
coef_se = stats.coef_se(clf, X, y)
expected_length = X.shape[1] + 1 # Add 1 for the intercept
self.assertEqual(coef_se.shape[0], expected_length)
def test_error_not_raised_by_coef_tval(self):
# Test that assertion is not raise for supported models
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.coef_tval(clf, X, y).shape
except Exception as e:
self.fail("Testing t-values of coefficients function for "
"supported linear models failed "
"unexpectedly: {0}".format(e))
def test_length_of_returned_coef_tval(self):
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
coef_tval = stats.coef_tval(clf, X, y)
expected_length = X.shape[1] + 1 # Add 1 for the intercept
self.assertEqual(coef_tval.shape[0], expected_length)
def test_error_not_raised_by_coef_pval(self):
# Test that assertion is not raise for supported models
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.coef_pval(clf, X, y).shape
except Exception as e:
self.fail("Testing p-values of coefficients function for "
"supported linear models failed "
"unexpectedly: {0}".format(e))
def test_length_of_returned_coef_pval(self):
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
coef_pval = stats.coef_tval(clf, X, y)
expected_length = X.shape[1] + 1 # Add 1 for the intercept
self.assertEqual(coef_pval.shape[0], expected_length)
def test_error_not_raised_by_f_stat(self):
# Test that assertion is not raise for supported models
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.f_stat(clf, X, y).shape
except Exception as e:
self.fail("Testing summary F-statistic function for "
"supported linear models failed "
"unexpectedly: {0}".format(e))
def test_verify_f_stat_return_type(self):
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
adj_r2_score = stats.adj_r2_score(clf, X, y)
self.assertIsInstance(adj_r2_score, float)
def test_error_not_raised_by_summary_function(self):
for classifier in _utils.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.f_stat(clf, X, y).shape
except Exception as e:
self.fail("Testing summary function for "
"supported linear models failed "
"unexpectedly: {0}".format(e))
class TestPCRBetaCoef(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_pcr_beta_coef_returns_coefs_for_all_predictors(self):
# Just return the coefficients for the predictors because the intercept
# for PCR is the same as the intercept in the PCA regression model.
scaler = preprocessing.StandardScaler()
x_scaled = scaler.fit_transform(X)
pcomp = decomposition.PCA()
pcomp.fit(x_scaled)
x_reduced = pcomp.transform(x_scaled)
ols = linear_model.LinearRegression()
ols.fit(x_reduced, y)
beta_coef = regressors.pcr_beta_coef(ols, pcomp)
self.assertEqual(beta_coef.shape, ols.coef_.shape)
class TestPCRClass(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_fitting_a_pcr_model_with_various_regression_types(self):
for regression in ('ols', 'lasso', 'ridge', 'elasticnet'):
pcr = regressors.PCR(regression_type=regression)
try:
pcr.fit(X, y)
except Exception as e:
self.fail("Testing .fit() on a PCR class with regression model "
"{0} failed unexpectedly: {1}".format(regression, e))
def test_get_beta_coefficients_from_pcr_model(self):
pcr = regressors.PCR(n_components=10)
pcr.fit(X, y)
self.assertEqual(X.shape[1], pcr.beta_coef_.shape[0])
def test_get_intercept_from_pcr_model(self):
pcr = regressors.PCR(n_components=10)
pcr.fit(X, y)
self.assertIsInstance(pcr.intercept_, float)
def test_get_score_from_pcr_model(self):
pcr = regressors.PCR(n_components=10)
pcr.fit(X, y)
self.assertIsInstance(pcr.score(X, y), float)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
| nsh87/regressors | tests/test_regressors.py | Python | isc | 9,685 |
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os
import traceback
import sickbeard
from common import SNATCHED, Quality, SEASON_RESULT, MULTI_EP_RESULT,ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
from sickbeard import sab
from sickbeard import nzbget
from sickbeard import clients
from sickbeard import history
from sickbeard import notifiers
from sickbeard import nzbSplitter
from sickbeard import ui
from sickbeard import encodingKludge as ek
from sickbeard import providers
from sickbeard.exceptions import ex
from sickbeard.providers.generic import GenericProvider
def _downloadResult(result):
"""
Downloads a result to the appropriate black hole folder.
Returns a bool representing success.
result: SearchResult instance to download.
"""
resProvider = result.provider
newResult = False
if resProvider == None:
logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR)
return False
# nzbs with an URL can just be downloaded from the provider
if result.resultType == "nzb":
newResult = resProvider.downloadResult(result)
# if it's an nzb data result
elif result.resultType == "nzbdata":
# get the final file path to the nzb
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
logger.log(u"Saving NZB to " + fileName)
newResult = True
# save the data to disk
try:
fileOut = open(fileName, "w")
fileOut.write(result.extraInfo[0])
fileOut.close()
helpers.chmodAsParent(fileName)
except IOError, e:
logger.log(u"Error trying to save NZB to black hole: "+ex(e), logger.ERROR)
newResult = False
elif result.resultType == "torrentdata":
# get the final file path to the nzb
fileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, result.name + ".torrent")
logger.log(u"Saving Torrent to " + fileName)
newResult = True
# save the data to disk
try:
fileOut = open(fileName, "wb")
fileOut.write(result.extraInfo[0])
fileOut.close()
helpers.chmodAsParent(fileName)
except IOError, e:
logger.log(u"Error trying to save Torrent to black hole: "+ex(e), logger.ERROR)
newResult = False
elif resProvider.providerType == "torrent":
newResult = resProvider.downloadResult(result)
else:
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
return False
if newResult:
ui.notifications.message('Episode snatched','<b>%s</b> snatched from <b>%s</b>' % (result.name, resProvider.name))
return newResult
def snatchEpisode(result, endStatus=SNATCHED):
"""
Contains the internal logic necessary to actually "snatch" a result that
has been found.
Returns a bool representing success.
result: SearchResult instance to be snatched.
endStatus: the episode status that should be used for the episode object once it's snatched.
"""
# NZBs can be sent straight to SAB or saved to disk
if hasattr(result,'resultType'):
if result.resultType in ("nzb", "nzbdata"):
if sickbeard.NZB_METHOD == "blackhole":
dlResult = _downloadResult(result)
elif sickbeard.NZB_METHOD == "sabnzbd":
dlResult = sab.sendNZB(result)
elif sickbeard.NZB_METHOD == "nzbget":
dlResult = nzbget.sendNZB(result)
else:
logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
dlResult = False
# TORRENTs can be sent to clients or saved to disk
elif result.resultType in ("torrent", "torrentdata"):
#if the torrent data has not been put in extraInfo yet we get it
if hasattr(result,'extraInfo') and hasattr(result,'item') and result.extraInfo.__len__() == 0:
result.extraInfo.append(result.provider.getURL(result.item.url))
# torrents are saved to disk when blackhole mode
if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result)
else:
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
if hasattr(result,'extraInfo') and result.resultType=="torrentdata":
result.content=result.extraInfo[0]
dlResult = client.sendTORRENT(result)
else:
logger.log(u"Unknown result type, unable to download it", logger.ERROR)
dlResult = False
if dlResult == False:
return False
history.logSnatch(result)
# don't notify when we re-download an episode
for curEpObj in result.episodes:
with curEpObj.lock:
curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
curEpObj.audio_langs = result.audio_lang
curEpObj.saveToDB()
if curEpObj.status not in Quality.DOWNLOADED:
providerClass = result.provider
if providerClass != None:
provider = providerClass.name
else:
provider = "unknown"
notifiers.notify_snatch(curEpObj.prettyName()+ ' on ' + provider)
return True
else:
return False
def searchForNeededEpisodes():
logger.log(u"Searching all providers for any needed episodes")
foundResults = {}
didSearch = False
# ask all providers for any episodes it finds
for curProvider in providers.sortedProviderList():
if not curProvider.isActive():
continue
curFoundResults = {}
try:
curFoundResults = curProvider.searchRSS()
except exceptions.AuthException, e:
logger.log(u"Authentication error: "+ex(e), logger.ERROR)
continue
except Exception, e:
logger.log(u"Error while searching "+curProvider.name+", skipping: "+ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
continue
didSearch = True
# pick a single result for each episode, respecting existing results
for curEp in curFoundResults:
if curEp.show.paused:
logger.log(u"Show "+curEp.show.name+" is paused, ignoring all RSS items for "+curEp.prettyName(), logger.DEBUG)
continue
# find the best result for the current episode
bestResult = None
for curResult in curFoundResults[curEp]:
if not bestResult or bestResult.quality < curResult.quality:
bestResult = curResult
epi={}
epi[1]=curEp
bestResult = pickBestResult(curFoundResults[curEp],episode=epi)
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
if curEp in foundResults and bestResult.quality <= foundResults[curEp].quality:
continue
foundResults[curEp] = bestResult
if not didSearch:
logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR)
return foundResults.values()
def pickBestResult(results, quality_list=None, episode=None, season=None):
logger.log(u"Picking the best result out of "+str([x.name for x in results]), logger.DEBUG)
links=[]
myDB = db.DBConnection()
if season !=None:
epidr=myDB.select("SELECT episode_id from tv_episodes where showid=? and season=?",[episode,season])
for epid in epidr:
listlink=myDB.select("SELECT link from episode_links where episode_id=?",[epid[0]])
for dlink in listlink:
links.append(dlink[0])
else:
for eps in episode.values():
if hasattr(eps,'tvdbid'):
epidr=myDB.select("SELECT episode_id from tv_episodes where tvdbid=?",[eps.tvdbid])
listlink=myDB.select("SELECT link from episode_links where episode_id=?",[epidr[0][0]])
for dlink in listlink:
links.append(dlink[0])
# find the best result for the current episode
bestResult = None
for cur_result in results:
curmethod="nzb"
bestmethod="nzb"
if cur_result.resultType == "torrentdata" or cur_result.resultType == "torrent":
curmethod="torrent"
if bestResult:
if bestResult.resultType == "torrentdata" or bestResult.resultType == "torrent":
bestmethod="torrent"
if hasattr(cur_result,'item'):
if hasattr(cur_result.item,'nzburl'):
eplink=cur_result.item.nzburl
elif hasattr(cur_result.item,'url'):
eplink=cur_result.item.url
elif hasattr(cur_result,'nzburl'):
eplink=cur_result.nzburl
elif hasattr(cur_result,'url'):
eplink=cur_result.url
else:
eplink=""
else:
if hasattr(cur_result,'nzburl'):
eplink=cur_result.nzburl
elif hasattr(cur_result,'url'):
eplink=cur_result.url
else:
eplink=""
logger.log("Quality of "+cur_result.name+" is "+Quality.qualityStrings[cur_result.quality])
if quality_list and cur_result.quality not in quality_list:
logger.log(cur_result.name+" is a quality we know we don't want, rejecting it", logger.DEBUG)
continue
if eplink in links:
logger.log(eplink +" was already downloaded so let's skip it assuming the download failed, you can erase the downloaded links for that episode if you want", logger.DEBUG)
continue
if ((not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN)) or (bestmethod != sickbeard.PREFERED_METHOD and curmethod == sickbeard.PREFERED_METHOD and cur_result.quality != Quality.UNKNOWN):
bestResult = cur_result
elif bestResult.quality == cur_result.quality:
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
bestResult = cur_result
elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower():
bestResult = cur_result
if bestResult:
logger.log(u"Picked "+bestResult.name+" as the best", logger.DEBUG)
if hasattr(bestResult,'item'):
if hasattr(bestResult.item,'nzburl'):
eplink=bestResult.item.nzburl
elif hasattr(bestResult.item,'url'):
eplink=bestResult.item.url
elif hasattr(bestResult,'nzburl'):
eplink=bestResult.nzburl
elif hasattr(bestResult,'url'):
eplink=bestResult.url
else:
eplink=""
else:
if hasattr(bestResult,'nzburl'):
eplink=bestResult.nzburl
elif hasattr(bestResult,'url'):
eplink=bestResult.url
else:
eplink=""
if season !=None:
for epid in epidr:
count=myDB.select("SELECT count(*) from episode_links where episode_id=? and link=?",[epid[0],eplink])
if count[0][0]==0:
myDB.action("INSERT INTO episode_links (episode_id, link) VALUES (?,?)",[epid[0],eplink])
else:
count=myDB.select("SELECT count(*) from episode_links where episode_id=? and link=?",[epidr[0][0],eplink])
if count[0][0]==0:
myDB.action("INSERT INTO episode_links (episode_id, link) VALUES (?,?)",[epidr[0][0],eplink])
else:
logger.log(u"No result picked.", logger.DEBUG)
return bestResult
def isFinalResult(result):
"""
Checks if the given result is good enough quality that we can stop searching for other ones.
If the result is the highest quality in both the any/best quality lists then this function
returns True, if not then it's False
"""
logger.log(u"Checking if we should keep searching after we've found "+result.name, logger.DEBUG)
links=[]
lists=[]
myDB = db.DBConnection()
show_obj = result.episodes[0].show
epidr=myDB.select("SELECT episode_id from tv_episodes where showid=?",[show_obj.tvdbid])
for eplist in epidr:
lists.append(eplist[0])
for i in lists:
listlink=myDB.select("SELECT link from episode_links where episode_id =?",[i])
for dlink in listlink:
links.append(dlink[0])
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
if hasattr(result,'item'):
if hasattr(result.item,'nzburl'):
eplink=result.item.nzburl
elif hasattr(result.item,'url'):
eplink=result.item.url
elif hasattr(result,'nzburl'):
eplink=result.nzburl
elif hasattr(result,'url'):
eplink=result.url
else:
eplink=""
else:
if hasattr(result,'nzburl'):
eplink=result.nzburl
elif hasattr(result,'url'):
eplink=result.url
else:
eplink=""
# if episode link seems to have been already downloaded continue searching:
if eplink in links:
logger.log(eplink +" was already downloaded so let's continue searching assuming the download failed", logger.DEBUG)
return False
# if there is a redownload that's higher than this then we definitely need to keep looking
if best_qualities and result.quality < max(best_qualities):
return False
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
elif any_qualities and result.quality == max(any_qualities):
return True
elif best_qualities and result.quality == max(best_qualities):
# if this is the best redownload but we have a higher initial download then keep looking
if any_qualities and result.quality < max(any_qualities):
return False
# if this is the best redownload and we don't have a higher initial download then we're done
else:
return True
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
else:
return False
def findEpisode(episode, manualSearch=False):
logger.log(u"Searching for " + episode.prettyName())
foundResults = []
didSearch = False
for curProvider in providers.sortedProviderList():
if not curProvider.isActive():
continue
try:
curFoundResults = curProvider.findEpisode(episode, manualSearch=manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: "+ex(e), logger.ERROR)
continue
except Exception, e:
logger.log(u"Error while searching "+curProvider.name+", skipping: "+ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
continue
didSearch = True
# skip non-tv crap
curFoundResults = filter(lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, episode.show), curFoundResults)
# loop all results and see if any of them are good enough that we can stop searching
done_searching = False
for cur_result in curFoundResults:
done_searching = isFinalResult(cur_result)
logger.log(u"Should we stop searching after finding "+cur_result.name+": "+str(done_searching), logger.DEBUG)
if done_searching:
break
foundResults += curFoundResults
# if we did find a result that's good enough to stop then don't continue
if done_searching:
break
if not didSearch:
logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR)
epi={}
epi[1]=episode
bestResult = pickBestResult(foundResults,episode=epi)
return bestResult
def findSeason(show, season):
myDB = db.DBConnection()
allEps = [int(x["episode"]) for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?", [show.tvdbid, season])]
logger.log(u"Episode list: "+str(allEps), logger.DEBUG)
reallywanted=[]
notwanted=[]
finalResults = []
for curEpNum in allEps:
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [show.tvdbid, season, curEpNum])
epStatus = int(sqlResults[0]["status"])
if epStatus ==3:
reallywanted.append(curEpNum)
else:
notwanted.append(curEpNum)
if notwanted != []:
for EpNum in reallywanted:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, show.tvdbid)
episode = showObj.getEpisode(season, EpNum)
res=findEpisode(episode, manualSearch=True)
snatchEpisode(res)
return
else:
logger.log(u"Searching for stuff we need from "+show.name+" season "+str(season))
foundResults = {}
didSearch = False
for curProvider in providers.sortedProviderList():
if not curProvider.isActive():
continue
try:
curResults = curProvider.findSeasonResults(show, season)
# make a list of all the results for this provider
for curEp in curResults:
# skip non-tv crap
curResults[curEp] = filter(lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), curResults[curEp])
if curEp in foundResults:
foundResults[curEp] += curResults[curEp]
else:
foundResults[curEp] = curResults[curEp]
except exceptions.AuthException, e:
logger.log(u"Authentication error: "+ex(e), logger.ERROR)
continue
except Exception, e:
logger.log(u"Error while searching "+curProvider.name+", skipping: "+ex(e), logger.DEBUG)
logger.log(traceback.format_exc(), logger.DEBUG)
continue
didSearch = True
if not didSearch:
logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR)
finalResults = []
anyQualities, bestQualities = Quality.splitQuality(show.quality)
# pick the best season NZB
bestSeasonNZB = None
if len(foundResults)==0:
logger.log(u"No multi eps found trying a single ep search", logger.DEBUG)
for EpNum in reallywanted:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, show.tvdbid)
episode = showObj.getEpisode(season, EpNum)
res=findEpisode(episode, manualSearch=True)
snatchEpisode(res)
return
if SEASON_RESULT in foundResults:
bestSeasonNZB = pickBestResult(foundResults[SEASON_RESULT], anyQualities+bestQualities,show.tvdbid,season)
highest_quality_overall = 0
for cur_season in foundResults:
for cur_result in foundResults[cur_season]:
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality
logger.log(u"The highest quality of any match is "+Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
# see if every episode is wanted
if bestSeasonNZB:
# get the quality of the season nzb
seasonQual = Quality.nameQuality(bestSeasonNZB.name)
seasonQual = bestSeasonNZB.quality
logger.log(u"The quality of the season NZB is "+Quality.qualityStrings[seasonQual], logger.DEBUG)
myDB = db.DBConnection()
allEps = [int(x["episode"]) for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?", [show.tvdbid, season])]
logger.log(u"Episode list: "+str(allEps), logger.DEBUG)
allWanted = True
anyWanted = False
for curEpNum in allEps:
if not show.wantEpisode(season, curEpNum, seasonQual):
allWanted = False
else:
anyWanted = True
# if we need every ep in the season and there's nothing better then just download this and be done with it
if allWanted and bestSeasonNZB.quality == highest_quality_overall:
logger.log(u"Every ep in this season is needed, downloading the whole NZB "+bestSeasonNZB.name)
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonNZB.episodes = epObjs
return [bestSeasonNZB]
elif not anyWanted:
logger.log(u"No eps from this season are wanted at this quality, ignoring the result of "+bestSeasonNZB.name, logger.DEBUG)
else:
if bestSeasonNZB.provider.providerType == GenericProvider.NZB:
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
# if not, break it apart and add them as the lowest priority results
individualResults = nzbSplitter.splitResult(bestSeasonNZB)
individualResults = filter(lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), individualResults)
for curResult in individualResults:
if len(curResult.episodes) == 1:
epNum = curResult.episodes[0].episode
elif len(curResult.episodes) > 1:
epNum = MULTI_EP_RESULT
if epNum in foundResults:
foundResults[epNum].append(curResult)
else:
foundResults[epNum] = [curResult]
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
else:
# Season result from BTN must be a full-season torrent, creating multi-ep result for it.
logger.log(u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonNZB.episodes = epObjs
epNum = MULTI_EP_RESULT
if epNum in foundResults:
foundResults[epNum].append(bestSeasonNZB)
else:
foundResults[epNum] = [bestSeasonNZB]
# go through multi-ep results and see if we really want them or not, get rid of the rest
multiResults = {}
if MULTI_EP_RESULT in foundResults:
for multiResult in foundResults[MULTI_EP_RESULT]:
logger.log(u"Seeing if we want to bother with multi-episode result "+multiResult.name, logger.DEBUG)
# see how many of the eps that this result covers aren't covered by single results
neededEps = []
notNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
# if we have results for the episode
if epNum in foundResults and len(foundResults[epNum]) > 0:
# but the multi-ep is worse quality, we don't want it
# TODO: wtf is this False for
#if False and multiResult.quality <= pickBestResult(foundResults[epNum]):
# notNeededEps.append(epNum)
#else:
neededEps.append(epNum)
else:
neededEps.append(epNum)
logger.log(u"Single-ep check result is neededEps: "+str(neededEps)+", notNeededEps: "+str(notNeededEps), logger.DEBUG)
if not neededEps:
logger.log(u"All of these episodes were covered by single nzbs, ignoring this multi-ep result", logger.DEBUG)
continue
# check if these eps are already covered by another multi-result
multiNeededEps = []
multiNotNeededEps = []
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in multiResults:
multiNotNeededEps.append(epNum)
else:
multiNeededEps.append(epNum)
logger.log(u"Multi-ep check result is multiNeededEps: "+str(multiNeededEps)+", multiNotNeededEps: "+str(multiNotNeededEps), logger.DEBUG)
if not multiNeededEps:
logger.log(u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result", logger.DEBUG)
continue
# if we're keeping this multi-result then remember it
for epObj in multiResult.episodes:
multiResults[epObj.episode] = multiResult
# don't bother with the single result if we're going to get it with a multi result
for epObj in multiResult.episodes:
epNum = epObj.episode
if epNum in foundResults:
logger.log(u"A needed multi-episode result overlaps with a single-episode result for ep #"+str(epNum)+", removing the single-episode results from the list", logger.DEBUG)
del foundResults[epNum]
finalResults += set(multiResults.values())
# of all the single ep results narrow it down to the best one for each episode
for curEp in foundResults:
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
continue
if len(foundResults[curEp]) == 0:
continue
epi={}
epi[1]=show.episodes[season][curEp]
finalResults.append(pickBestResult(foundResults[curEp],None,episode=epi))
return finalResults
| Pakoach/Sick-Beard | sickbeard/search.py | Python | gpl-3.0 | 28,256 |
from django.utils.html import escape
from wagtail.images.formats import register_image_format, \
unregister_image_format, Format
from wagtail.images.models import SourceImageIOError
class FullWidthImgFormat(Format):
def image_to_editor_html(self, image, alt_text, extra_attributes=None):
if extra_attributes is None:
extra_attributes = {}
return self.image_to_html(image, alt_text, extra_attributes)
def image_to_html(self, image, alt_text, extra_attributes=None):
if extra_attributes is None:
extra_attributes = {}
try:
rendition = image.get_rendition(self.filter_spec)
except SourceImageIOError:
# Image file is (probably) missing from /media/original_images - generate a dummy
# rendition so that we just output a broken image, rather than crashing out completely
# during rendering
Rendition = image.renditions.model # pick up any custom Image / Rendition classes that may be in use
rendition = Rendition(image=image, width=0, height=0)
rendition.file.name = 'not-found'
try:
half_rendition = image.get_rendition('max-512x410')
except SourceImageIOError:
# Image file is (probably) missing from /media/original_images - generate a dummy
# rendition so that we just output a broken image, rather than crashing out completely
# during rendering
Rendition = image.renditions.model # pick up any custom Image / Rendition classes that may be in use
half_rendition = Rendition(image=image, width=0, height=0)
half_rendition.file.name = 'not-found'
if self.classnames:
class_attr = 'class="%s" ' % escape(self.classnames)
else:
class_attr = ''
sizes = "(max-width: 480px) 512w, 100vw"
srcset = "%s 512w, %s" % (escape(half_rendition.url),
escape(rendition.url))
return ('<img %s%s '
'width="%d" height="%d" '
'alt="%s" srcset="%s" sizes="%s">') % (
extra_attributes, class_attr,
rendition.width, rendition.height, alt_text,
srcset, sizes
)
register_image_format(Format('halfwidth', 'Half Width (512px)', 'richtext-image half-width', 'max-512x410'))
unregister_image_format("fullwidth")
register_image_format(FullWidthImgFormat('fullwidth', 'Full width', 'richtext-image full-width', 'max-1400x1120'))
| PARINetwork/pari | article/image_formats.py | Python | bsd-3-clause | 2,575 |
# -*- coding: utf-8 -*-
################################################
###### Copyright (c) 2016, Alexandre Popoff
###
import numpy as np
from .categoryaction import CatObject,CatMorphism,MonoidAction
class Noll_Monoid(MonoidAction):
"""Defines the Noll monoid acting on the set of the twelve pitch classes.
The Noll monoid is generated by the two transformations
- f(x)=3x+7
- g(x)=8x+4
"""
def __init__(self):
super(Noll_Monoid,self).__init__()
X = CatObject(".",["C","Cs","D","Eb","E","F","Fs","G","Gs","A","Bb","B"])
self.set_objects([X])
F = CatMorphism("f",X,X)
M_F = np.zeros((12,12),dtype=bool)
for i in range(12):
M_F[(3*i+7)%12,i] = True
F.set_mapping_matrix(M_F)
G = CatMorphism("g",X,X)
M_G = np.zeros((12,12),dtype=bool)
for i in range(12):
M_G[(8*i+4)%12,i] = True
G.set_mapping_matrix(M_G)
self.set_generators([F,G])
self.generate_category()
class TI_Group_PC(MonoidAction):
"""Defines the TI group acting on the set of the twelve pitch classes.
"""
def __init__(self):
super(TI_Group_PC,self).__init__()
X = CatObject(".",["C","Cs","D","Eb","E","F","Fs","G","Gs","A","Bb","B"])
self.set_objects([X])
T = CatMorphism("T1",X,X)
M_T = np.zeros((12,12),dtype=bool)
for i in range(12):
M_T[(i+1)%12,i]=True
T.set_mapping_matrix(M_T)
I = CatMorphism("I0",X,X)
M_I = np.zeros((12,12),dtype=bool)
for i in range(12):
M_I[(-i)%12,i]=True
I.set_mapping_matrix(M_I)
self.set_generators([T,I])
self._add_identities()
self._add_morphisms([T,I])
for i in range(2,12):
x = self.morphisms['id_.']
for j in range(i):
x = T*x
x.set_name("T"+str(i))
self._add_morphisms([x])
for i in range(1,12):
x = self.morphisms['id_.']
for j in range(i):
x = T*x
y = x*I
y.set_name("I"+str(i))
self._add_morphisms([y])
class TI_Group_Triads(MonoidAction):
"""Defines the TI group acting on the set of the 24 major and minor triads.
"""
def __init__(self):
super(TI_Group_Triads,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m"])
self.set_objects([X])
T = CatMorphism("T1",X,X)
M_T = np.zeros((24,24),dtype=bool)
for i in range(12):
M_T[(i+1)%12,i]=True
M_T[12+(i+1)%12,i+12]=True
T.set_mapping_matrix(M_T)
I = CatMorphism("I0",X,X)
M_I = np.zeros((24,24),dtype=bool)
for i in range(12):
M_I[(5-i)%12 + 12,i]=True
M_I[(5-i)%12, i+12]=True
I.set_mapping_matrix(M_I)
self.set_generators([T,I])
self._add_identities()
self._add_morphisms([T,I])
for i in range(2,12):
x = self.morphisms['id_.']
for j in range(i):
x = T*x
x.set_name("T"+str(i))
self._add_morphisms([x])
for i in range(1,12):
x = self.morphisms['id_.']
for j in range(i):
x = T*x
y = x*I
y.set_name("I"+str(i))
self._add_morphisms([y])
class PRL_Group(MonoidAction):
"""Defines the neo-Riemannian PRL group acting on the set
of the 24 major and minor triads.
"""
def __init__(self):
super(PRL_Group,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m"])
self.set_objects([X])
L = CatMorphism("L",X,X)
M_L = np.zeros((24,24),dtype=bool)
for i in range(12):
M_L[(i+4)%12 + 12,i]=True
M_L[(i+8)%12,12+i]=True
L.set_mapping_matrix(M_L)
R = CatMorphism("R",X,X)
M_R = np.zeros((24,24),dtype=bool)
for i in range(12):
M_R[(i+9)%12 + 12,i]=True
M_R[(i+3)%12,12+i]=True
R.set_mapping_matrix(M_R)
P = CatMorphism("P",X,X)
M_P = np.zeros((24,24),dtype=bool)
for i in range(12):
M_P[i%12 + 12,i]=True
M_P[i%12,12+i]=True
P.set_mapping_matrix(M_P)
self.set_generators([P,R,L])
self._add_identities()
self.generate_category()
class UTT_Group(MonoidAction):
"""Defines Hook's UTT group acting on the set of the 24 major and minor triads.
Each element of the group (whose order is equal to 244) is of the form <p,q,s>.
s is a signature (+ or -):
- if s is +, the element sends a major triad of root n to n+p,
and a minor triad of root n to n+q
- if s is -, the element sends a major triad of root n to n+q,
and a minor triad of root n to n+p
"""
def __init__(self):
super(UTT_Group,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m"])
self.set_objects([X])
T = CatMorphism("T",X,X)
M_T = np.zeros((24,24),dtype=bool)
for i in range(12):
M_T[(i+1)%12,i]=True
M_T[i+12,i+12]=True
T.set_mapping_matrix(M_T)
I = CatMorphism("I",X,X)
M_I = np.zeros((24,24),dtype=bool)
for i in range(12):
M_I[i + 12,i]=True
M_I[i, i+12]=True
I.set_mapping_matrix(M_I)
self.set_generators([T,I])
self.generate_category()
## Quick rewriting of the operation names to conform to
## Hook's terminology for UTTs
new_operations = []
for name_f,f in self.get_morphisms():
op = [0,0,0]
for j in name_f[::-1]:
if j=="T":
op[op[2]]=op[op[2]]+1
if j=="I":
op[2]=1-op[2]
new_name = "<"+str(op[0])+","+str(op[1])+","+("+"*(op[2]==0)+"-"*(op[2]==1))+">"
new_morphism = CatMorphism(new_name,X,X)
new_morphism.set_mapping_matrix(f.get_mapping_matrix())
new_operations.append(new_morphism)
self.set_objects([X]) ## This erases previous morphisms
self._add_morphisms(new_operations)
self.generators = {"<1,0,+>":self.morphisms["<1,0,+>"],"<0,0,->":self.morphisms["<0,0,->"]}
class Left_Z3Q8_Group(MonoidAction):
"""Defines a simply transitive generalized neo-Riemannian group acting
on the left on the set of the 24 major and minor triads.
The group is an extension of Z_12 by Z_2 with a non-trivial cocycle.
"""
def __init__(self):
super(Left_Z3Q8_Group,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m"])
self.set_objects([X])
T = CatMorphism("T1",X,X)
M_T = np.zeros((24,24),dtype=bool)
for i in range(12):
M_T[(i+1)%12,i]=True
M_T[12+(i+1)%12,i+12]=True
T.set_mapping_matrix(M_T)
J = CatMorphism("J0",X,X)
M_J = np.zeros((24,24),dtype=bool)
for i in range(12):
M_J[(-i)%12 + 12,i]=True
M_J[(-i+6)%12, i+12]=True
J.set_mapping_matrix(M_J)
self.set_generators([T,J])
self._add_identities()
self._add_morphisms([T,J])
for i in range(2,12):
x = self.morphisms['id_.']
for j in range(i):
x = x*T
x.set_name("T"+str(i))
self._add_morphisms([x])
for i in range(1,12):
x = self.morphisms['id_.']
for j in range(i):
x = x*T
y=x*J
y.set_name("J"+str(i))
self._add_morphisms([y])
class Right_Z3Q8_Group(MonoidAction):
"""Defines a simply transitive generalized neo-Riemannian group acting
on the right on the set of the 24 major and minor triads.
The group is an extension of Z_12 by Z_2 with a non-trivial cocycle.
"""
def __init__(self):
super(Right_Z3Q8_Group,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m"])
self.set_objects([X])
T = CatMorphism("T1",X,X)
M_T = np.zeros((24,24),dtype=bool)
for i in range(12):
M_T[(i+1)%12,i]=True
M_T[12+(i-1)%12,i+12]=True
T.set_mapping_matrix(M_T)
J = CatMorphism("J0",X,X)
M_J = np.zeros((24,24),dtype=bool)
for i in range(12):
M_J[i + 12,i]=True
M_J[(i+6)%12, i+12]=True
J.set_mapping_matrix(M_J)
self.set_generators([T,J])
self._add_identities()
self._add_morphisms([T,J])
for i in range(2,12):
x = self.morphisms['id_.']
for j in range(i):
x = x*T
x.set_name("T"+str(i))
self._add_morphisms([x])
for i in range(1,12):
x = self.morphisms['id_.']
for j in range(i):
x = x*T
y=x*J
y.set_name("J"+str(i))
self._add_morphisms([y])
class UPL_Monoid(MonoidAction):
"""Defines a monoid acting on the set of the 28 major, minor,
and augmented triads by relations.
It is generated by three operations:
- P and L are the relational equivalent of the neo-Riemannian
P and L operations.
- U is the relation such that we have xUy whenever x (or y)
is an augmented triad, and the other chord has
two tones in common with x (or y)
"""
def __init__(self):
super(UPL_Monoid,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m",
"C_aug","F_aug","D_aug","G_aug"])
self.set_objects([X])
P = CatMorphism("P",X,X)
M_P = np.zeros((28,28),dtype=bool)
for i in range(12):
M_P[i+12,i]=True
M_P[i,i+12]=True
for i in range(24,28):
M_P[i,i]=True
P.set_mapping_matrix(M_P)
L = CatMorphism("L",X,X)
M_L = np.zeros((28,28),dtype=bool)
for i in range(12):
M_L[(i+4)%12+12,i]=True
M_L[(i+8)%12,12+i]=True
for i in range(24,28):
M_L[i,i]=True
L.set_mapping_matrix(M_L)
U = CatMorphism("U",X,X)
M_U = np.zeros((28,28),dtype=bool)
for i in range(12):
M_U[24+(i%4),i]=True
M_U[i,24+(i%4)]=True
M_U[24+((i+3)%4),12+i]=True
M_U[12+i,24+((i+3)%4)]=True
U.set_mapping_matrix(M_U)
self.set_generators([P,L,U])
self.generate_category()
class S_Monoid(MonoidAction):
"""Defines a monoid acting on the set of the 28 major, minor,
and augmented triads by relations.
It is generated by the S=P_1,0 relation, i.e. we have x(P_1,0)y
whenever the chord x differ from y by the movement
of a single note by a semitone.
"""
def __init__(self):
super(S_Monoid,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m",
"C_aug","F_aug","D_aug","G_aug"])
self.set_objects([X])
S = CatMorphism("S",X,X)
M_S = np.zeros((28,28),dtype=bool)
for i in range(12):
M_S[24+(i%4),i]=True
M_S[i,24+(i%4)]=True
M_S[24+((i+3)%4),12+i]=True
M_S[12+i,24+((i+3)%4)]=True
M_S[12+i,i]=True
M_S[i,12+i]=True
M_S[12+(i+4)%12,i]=True
M_S[i,12+(i+4)%12]=True
S.set_mapping_matrix(M_S)
self.set_generators([S])
self.generate_category()
class T_Monoid(MonoidAction):
"""Defines a monoid acting on the set of the 28 major, minor,
and augmented triads by relations.
It is generated by the T=P_2,0 relation, i.e. we have x(P_2,0)y
whenever the chord x differ from y by the movement
of two notes by a semitone each.
"""
def __init__(self):
super(T_Monoid,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m",
"C_aug","F_aug","D_aug","G_aug"])
self.set_objects([X])
T = CatMorphism("T",X,X)
M_T = np.zeros((28,28),dtype=bool)
for i in range(12):
M_T[(i+4)%12,i]=True
M_T[i,(i+4)%12]=True
M_T[(i+8)%12,i]=True
M_T[i,(i+8)%12]=True
M_T[12+(i+1)%12,i]=True
M_T[i,12+(i+1)%12]=True
M_T[12+(i+5)%12,i]=True
M_T[i,12+(i+5)%12]=True
M_T[24+((i+3)%4),i]=True
M_T[i,24+((i+3)%4)]=True
for i in range(12):
M_T[24+(i%4),12+i]=True
M_T[12+i,24+(i%4)]=True
M_T[12+(i+4)%12,12+i]=True
M_T[12+i,12+(i+4)%12]=True
M_T[12+(i+8)%12,12+i]=True
M_T[12+i,12+(i+8)%12]=True
T.set_mapping_matrix(M_T)
self.set_generators([T])
self.generate_category()
class K_Monoid(MonoidAction):
"""Defines a monoid acting on the set of the 28 major, minor,
and augmented triads by relations.
It is generated by the K=P_2,1 relation, i.e. we have x(P_2,1)y
whenever the chord x differ from y by the movement of two notes
by a semitone each, and the remaining note by a tone.
"""
def __init__(self):
super(K_Monoid,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m",
"C_aug","F_aug","D_aug","G_aug"])
self.set_objects([X])
K = CatMorphism("K",X,X)
M_K = np.zeros((28,28),dtype=bool)
for i in range(12):
M_K[12+(i+3)%12,i]=True
M_K[i,12+(i+3)%12]=True
M_K[12+(i+11)%12,i]=True
M_K[i,12+(i+11)%12]=True
M_K[24+((i+1)%4),i]=True
M_K[i,24+((i+1)%4)]=True
M_K[24+((i+2)%4),12+i]=True
M_K[12+i,24+((i+2)%4)]=True
K.set_mapping_matrix(M_K)
self.set_generators([K])
self.generate_category()
class W_Monoid(MonoidAction):
"""Defines a monoid acting on the set of the 28 major, minor,
and augmented triads by relations.
It is generated by the K=P_1,2 relation, i.e. we have x(P_2,1)y
whenever the chord x differ from y by the movement of a single note
by a semitone, and the remaining notes by a tone each.
"""
def __init__(self):
super(W_Monoid,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m",
"C_aug","F_aug","D_aug","G_aug"])
self.set_objects([X])
W = CatMorphism("W",X,X)
M_W = np.zeros((28,28),dtype=bool)
for i in range(12):
M_W[12+(i+2)%12,i]=True
M_W[i,12+(i+2)%12]=True
M_W[12+(i+6)%12,i]=True
M_W[i,12+(i+6)%12]=True
M_W[24+((i+2)%4),i]=True
M_W[i,24+((i+2)%4)]=True
M_W[24+((i+1)%4),12+i]=True
M_W[12+i,24+((i+1)%4)]=True
W.set_mapping_matrix(M_W)
self.set_generators([W])
self.generate_category()
class ST_Monoid(MonoidAction):
"""Defines a monoid acting on the set of the 28 major, minor,
and augmented triads by relations.
It is generated by the S and T operations presented above.
"""
def __init__(self):
super(ST_Monoid,self).__init__()
X = CatObject(".",["C_M","Cs_M","D_M","Eb_M","E_M","F_M","Fs_M","G_M","Gs_M","A_M","Bb_M","B_M",
"C_m","Cs_m","D_m","Eb_m","E_m","F_m","Fs_m","G_m","Gs_m","A_m","Bb_m","B_m",
"C_aug","F_aug","D_aug","G_aug"])
self.set_objects([X])
S = CatMorphism("S",X,X)
M_S = np.zeros((28,28),dtype=bool)
for i in range(12):
M_S[24+(i%4),i]=True
M_S[i,24+(i%4)]=True
M_S[24+((i+3)%4),12+i]=True
M_S[12+i,24+((i+3)%4)]=True
M_S[12+i,i]=True
M_S[i,12+i]=True
M_S[12+(i+4)%12,i]=True
M_S[i,12+(i+4)%12]=True
S.set_mapping_matrix(M_S)
T = CatMorphism("T",X,X)
M_T = np.zeros((28,28),dtype=bool)
for i in range(12):
M_T[(i+4)%12,i]=True
M_T[i,(i+4)%12]=True
M_T[(i+8)%12,i]=True
M_T[i,(i+8)%12]=True
M_T[12+(i+1)%12,i]=True
M_T[i,12+(i+1)%12]=True
M_T[12+(i+5)%12,i]=True
M_T[i,12+(i+5)%12]=True
M_T[24+((i+3)%4),i]=True
M_T[i,24+((i+3)%4)]=True
for i in range(12):
M_T[24+(i%4),12+i]=True
M_T[12+i,24+(i%4)]=True
M_T[12+(i+4)%12,12+i]=True
M_T[12+i,12+(i+4)%12]=True
M_T[12+(i+8)%12,12+i]=True
M_T[12+i,12+(i+8)%12]=True
T.set_mapping_matrix(M_T)
self.set_generators([S,T])
self.generate_category()
| AlexPof/opycleid | opycleid/musicmonoids.py | Python | bsd-3-clause | 19,122 |
'''OpenGL extension MESA.ycbcr_texture
This module customises the behaviour of the
OpenGL.raw.GL.MESA.ycbcr_texture to provide a more
Python-friendly API
Overview (from the spec)
This extension supports texture images stored in the YCbCr format.
There is no support for converting YCbCr images to RGB or vice versa
during pixel transfer. The texture's YCbCr colors are converted to
RGB during texture sampling, after-which, all the usual per-fragment
operations take place. Only 2D texture images are supported (not
glDrawPixels, glReadPixels, etc).
A YCbCr pixel (texel) is a 16-bit unsigned short with two components.
The first component is luminance (Y). For pixels in even-numbered
image columns, the second component is Cb. For pixels in odd-numbered
image columns, the second component is Cr. If one were to convert the
data to RGB one would need to examine two pixels from columns N and N+1
(where N is even) to deduce the RGB color.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/MESA/ycbcr_texture.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.MESA.ycbcr_texture import *
### END AUTOGENERATED SECTION | D4wN/brickv | src/build_data/windows/OpenGL/GL/MESA/ycbcr_texture.py | Python | gpl-2.0 | 1,306 |
# !/usr/bin/env python
import rospy
from flexbe_core import EventState, Logger
'''
Created on 21.09.2017
@author: Philippe La Madeleine
'''
class SetRosParam(EventState):
'''
Store a value in the ros parameter server for later use.
-- ParamName string The desired value.
># Value object The rosparam to set.
<= done The rosparam is set
'''
def __init__(self, ParamName):
'''
Constructor
'''
super(SetRosParam, self).__init__(outcomes=['done'], input_keys=['Value'])
self.ParamName = ParamName
def execute(self, userdata):
'''
Execute this state
'''
if userdata.Value:
rospy.set_param(self.ParamName, userdata.Value)
else:
if rospy.has_param(self.ParamName):
rospy.delete_param(self.ParamName)
return "done"
| WalkingMachine/sara_behaviors | sara_flexbe_states/src/sara_flexbe_states/SetRosParam.py | Python | bsd-3-clause | 917 |
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(contains="STORM")
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
| teoreteetik/api-snippets | rest/available-phone-numbers/local-basic-example-3/local-get-basic-example-3.6.x.py | Python | mit | 528 |
from simple_model import *
| grollins/foldkin | foldkin/simple/__init__.py | Python | bsd-2-clause | 27 |
#!/usr/bin/env python
#
# Copyright 2014 Institute for Theoretical Information Technology,
# RWTH Aachen University
# www.ti.rwth-aachen.de
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from numpy import concatenate
from gnuradio import gr
from gnuradio.eng_option import eng_option
from ofdm import frequency_shift_vcc
from ofdm import vector_sampler, peak_detector_02_fb
from ofdm import vector_mask
from gnuradio.blocks import delay
from optparse import OptionParser
import schmidl
from gr_tools import log_to_file,terminate_stream
from morelli import morelli_foe
import ofdm as ofdm
class ofdm_receiver(gr.hier_block2):
"""
input: complex baseband
output: extracted frequency-offset corrected ofdm symbols
second stream, bytes, 1 if ofdm sym is first preamble, 0 else
consume second stream at same rate as first stream!
"""
def __init__(self, fft_length, block_length, frame_data_part, block_header,
options):
gr.hier_block2.__init__(self, "ofdm_receiver",
gr.io_signature (1,1,gr.sizeof_gr_complex),
gr.io_signature2(2,2,gr.sizeof_gr_complex*fft_length,
gr.sizeof_char))
frame_length = frame_data_part + block_header.no_pilotsyms
cp_length = block_length-fft_length
self.input=gr.kludge_copy(gr.sizeof_gr_complex)
self.connect(self, self.input)
self.blocks_out = (self,0)
self.frame_trigger_out = (self,1)
self.snr_out = (self,2)
if options.log:
log_to_file(self, self.input, "data/receiver_input.compl")
# peak detector: thresholds low, high
#self._pd_thres_lo = 0.09
#self._pd_thres_hi = 0.1
self._pd_thres = 0.2
self._pd_lookahead = fft_length / 2 # empirically chosen
#########################
# coarse timing offset estimator
# self.tm = schmidl.modified_timing_metric(fft_length,[1]*(fft_length))
self.tm = schmidl.recursive_timing_metric(fft_length)
self.connect(self.input,self.tm)
assert(hasattr(block_header, 'sc_preamble_pos'))
assert(block_header.sc_preamble_pos == 0) # TODO: relax this restriction
if options.filter_timingmetric:
timingmetric_shift = -2 #int(-cp_length * 0.8)
tmfilter = gr.fir_filter_fff(1, [1./cp_length]*cp_length)
self.connect( self.tm, tmfilter )
self.timing_metric = tmfilter
print "Filtering timing metric, experimental"
else:
self.timing_metric = self.tm
timingmetric_shift = int(-cp_length/4)
if options.log:
log_to_file(self, self.timing_metric, "data/tm.float")
# peak detection
#threshold = gr.threshold_ff(self._pd_thres_lo,self._pd_thres_hi,0)
#muted_tm = gr.multiply_ff()
peak_detector = peak_detector_02_fb(self._pd_lookahead, self._pd_thres)
#self.connect(self.timing_metric, threshold, (muted_tm,0))
#self.connect(self.timing_metric, (muted_tm,1))
#self.connect(muted_tm, peak_detector)
self.connect(self.timing_metric, peak_detector)
if options.log:
pd_float = gr.char_to_float()
self.connect(peak_detector,pd_float)
log_to_file(self, pd_float, "data/peakdetector.float")
if options.no_timesync:
terminate_stream( self, peak_detector )
trigger = [0]*(frame_length*block_length)
trigger[ block_length-1 ] = 1
peak_detector = blocks.vector_source_b( trigger, True )
print "Bypassing timing synchronisation"
# TODO: refine detected peaks with 90% average method as proposed
# from Schmidl & Cox:
# Starting from peak, find first points to the left and right whose
# value is less than or equal 90% of the peak value. New trigger point
# is average of both
# Frequency Offset Estimation
# Used: Algorithm as proposed from Morelli & Mengali
# Idea: Use periodic preamble, correlate identical parts, determine
# phase offset. This phase offset is a function of the frequency offset.
assert(hasattr(block_header, 'mm_preamble_pos'))
foe = morelli_foe(fft_length,block_header.mm_periodic_parts)
self.connect(self.input,(foe,0))
if block_header.mm_preamble_pos > 0:
delayed_trigger = gr.delay(gr.sizeof_char,
block_header.mm_preamble_pos*block_length)
self.connect(peak_detector,delayed_trigger,(foe,1))
else:
self.connect(peak_detector,(foe,1))
self.freq_offset = foe
if options.log:
log_to_file(self, self.freq_offset, "data/freqoff_out.float")
if options.average_freqoff:
#avg_foe = gr.single_pole_iir_filter_ff( 0.1 )
avg_foe = ofdm.lms_fir_ff( 20, 1e-3 )
self.connect( self.freq_offset, avg_foe )
self.freq_offset = avg_foe
#log_to_file( self, avg_foe, "data/freqoff_out_avg.float" )
print "EXPERIMENTAL!!! Filtering frequency offset estimate"
if options.no_freqsync:
terminate_stream( self, self.freq_offset )
self.freq_offset = blocks.vector_source_f( [0.0], True )
print "Bypassing frequency offset estimator, offset=0.0"
# TODO: dynamic solution
frametrig_seq = concatenate([[1],[0]*(frame_length-1)])
self.time_sync = peak_detector
self.frame_trigger = blocks.vector_source_b(frametrig_seq,True)
self.connect(self.frame_trigger, self.frame_trigger_out)
##########################
# symbol extraction and processing
# First, we extract the whole ofdm block, then we divide this block into
# several ofdm symbols. This asserts that all symbols belonging to the
# same ofdm block will be a consecutive order.
# extract ofdm symbols
# compensate frequency offset
# TODO: use PLL and update/reset signals
delayed_timesync = gr.delay(gr.sizeof_char,
(frame_length-1)*block_length+timingmetric_shift)
self.connect( self.time_sync, delayed_timesync )
self.block_sampler = vector_sampler(gr.sizeof_gr_complex,block_length*frame_length)
self.discard_cp = vector_mask(block_length,cp_length,fft_length,[])
if options.use_dpll:
dpll = gr.dpll_bb( frame_length * block_length , .01 )
self.connect( delayed_timesync, dpll )
if options.log:
dpll_f = gr.char_to_float()
delayed_timesync_f = gr.char_to_float()
self.connect( dpll, dpll_f )
self.connect( delayed_timesync, delayed_timesync_f )
log_to_file( self, dpll_f, "data/dpll.float" )
log_to_file( self, delayed_timesync_f, "data/dpll_in.float" )
delayed_timesync = dpll
print "Using DPLL, EXPERIMENTAL!!!!!"
self.connect(self.input,self.block_sampler)
self.connect(delayed_timesync,(self.block_sampler,1))
if options.log:
log_to_file(self, self.block_sampler, "data/block_sampler_out.compl")
# TODO: dynamic solution
self.ofdm_symbols = blocks.vector_to_stream(gr.sizeof_gr_complex*block_length,
frame_length)
self.connect(self.block_sampler,self.ofdm_symbols,self.discard_cp)
if options.log:
log_to_file(self, self.discard_cp, "data/discard_cp_out.compl")
dcp_fft = gr.fft_vcc(fft_length, True, [], True)
self.connect(self.discard_cp,dcp_fft)
log_to_file(self, dcp_fft, "data/discard_cp_fft.compl")
# reset phase accumulator inside freq_shift on every block start
# setup output connection
freq_shift = frequency_shift_vcc(fft_length, -1.0/fft_length, cp_length)
self.connect(self.discard_cp,(freq_shift,0))
self.connect(self.freq_offset,(freq_shift,1))
self.connect(self.frame_trigger,(freq_shift,2))
self.connect(freq_shift, self.blocks_out)
if options.log:
log_to_file(self, freq_shift, "data/freqshift_out.compl")
if options.no_freqshift:
terminate_stream( self, freq_shift )
freq_shift = self.discard_cp
print "Bypassing frequency shift block"
def _print_verbage(self):
print "\nOFDM Receiver:"
def add_options(normal, expert):
expert.add_option("", "--no-timesync", action="store_true",
default=False,
help = "Debug time synchronisation, replace estimator"+
" with static fixed spaced trigger")
expert.add_option("", "--no-freqsync", action="store_true",
default=False,
help = "Debug frequency synchronisation, replace estimator"+
" with fixed offset 0.0")
expert.add_option("", "--no-freqshift", action="store_true",
default=False,
help="Debug frequency shift block, bypass")
expert.add_option("", "--use-dpll", action="store_true",
default=False,
help="Enable digital PLL")
expert.add_option("", "--average-freqoff", action="store_true",
default=False,
help="Experimental filtering/averaging of frequency "+
"offset estimate")
expert.add_option("", "--filter-timingmetric", action="store_true",
default=False,
help="Enable filtering of timing metric")
add_options = staticmethod(add_options)
| rwth-ti/gr-ofdm | python/ofdm/ofdm_receiver.py | Python | gpl-3.0 | 10,092 |
# coding: utf-8
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.1.2-pre.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import openapi_client
from openapi_client.models.string_parameter_value import StringParameterValue # noqa: E501
from openapi_client.rest import ApiException
class TestStringParameterValue(unittest.TestCase):
"""StringParameterValue unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test StringParameterValue
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = openapi_client.models.string_parameter_value.StringParameterValue() # noqa: E501
if include_optional :
return StringParameterValue(
_class = '',
name = '',
value = ''
)
else :
return StringParameterValue(
)
def testStringParameterValue(self):
"""Test StringParameterValue"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| cliffano/swaggy-jenkins | clients/python-legacy/generated/test/test_string_parameter_value.py | Python | mit | 1,534 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-02 00:55
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('images', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='imageseries',
name='patient_id',
field=models.CharField(default=django.utils.timezone.now, max_length=64),
preserve_default=False,
),
]
| antonow/concept-to-clinic | interface/backend/images/migrations/0002_imageseries_patient_id.py | Python | mit | 546 |
import unittest
from troposphere import GetAtt, Template, Join
from troposphere.awslambda import Code, Function
class TestAWSLambda(unittest.TestCase):
def test_exclusive(self):
lambda_func = Function(
"AMIIDLookup",
Handler="index.handler",
Role=GetAtt("LambdaExecutionRole", "Arn"),
Code=Code(
S3Bucket="lambda-functions",
S3Key="amilookup.zip",
),
Runtime="nodejs",
Timeout="25",
)
t = Template()
t.add_resource(lambda_func)
t.to_json()
def test_zip_file(self):
lambda_func = Function(
"AMIIDLookup",
Handler="index.handler",
Role=GetAtt("LambdaExecutionRole", "Arn"),
Code=Code(
ZipFile=Join("", [
"var response = require('cfn-response');",
"exports.handler = function(event, context) {",
" var input = parseInt(event.ResourceProperties.Input);",
" var responseData = {Value: input * 5};",
" response.send("
" event, context, response.SUCCESS, responseData"
" );",
"};"
]),
),
Runtime="nodejs",
Timeout="25",
)
t = Template()
t.add_resource(lambda_func)
t.to_json()
if __name__ == '__main__':
unittest.main()
| WeAreCloudar/troposphere | tests/test_awslambda.py | Python | bsd-2-clause | 1,508 |
## Something goes here
# Define submodules
__all__ = ['lib']
import bittrader
import bittrader.lib
from bittrader.api import API
from bittrader.bot import bot
from bittrader.lib import *
| potterzot/bit-trader | bittrader/__init__.py | Python | mit | 189 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.