code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# Written by Boudewijn Schoon
# see LICENSE.txt for license information
# the extention id for the 'ut_metadata' message
EXTEND_MSG_METADATA = 'ut_metadata'
EXTEND_MSG_METADATA_ID = chr(224)
| egbertbouman/tribler-g | Tribler/Core/DecentralizedTracking/MagnetLink/__init__.py | Python | lgpl-2.1 | 203 |
#!/usr/bin/env python
# rebase.py
# Copyright (c) 2013-2016 Pablo Acosta-Serafini
# See LICENSE for details
# pylint: disable=C0111,F0401,R0914,W0141
# Standard library imports
from __future__ import print_function
import argparse
import difflib
import os
import shutil
import stat
import sys
# Putil imports
import sbin.functions
###
# Functions
###
def diff(file1, file2):
""" Diff two files """
with open(file1, 'r') as fobj1:
flines1 = [item.rstrip() for item in fobj1.readlines()]
with open(file2, 'r') as fobj2:
flines2 = [item.rstrip() for item in fobj2.readlines()]
diff_list = list(
difflib.unified_diff(flines1, flines2, fromfile=file1, tofile=file2)
)
return not bool(not diff_list)
def get_current_branch():
""" Get git current branch """
stdout = sbin.functions.shcmd(
['git', 'branch', '--list', '--no-color'],
'Cannot get branch names'
)
branches = [
item.strip() for item in stdout.split()
]
for num, item in enumerate(branches):
if item == '*':
return branches[num+1]
raise RuntimeError('Current Git branch could not be determined')
def rebase(pkg_dir, output_dir, obranch, rbranch):
""" Rebase branch """
stdout = sbin.functions.shcmd(
['git', 'rev-list', '--first-parent', '^'+obranch, rbranch],
'Branches do not have common ancestor'
)
if get_current_branch() != rbranch:
raise RuntimeError('Current branch is not branch to rebase')
commits = [item.strip() for item in stdout.split()]
files = []
for commit in commits:
stdout = sbin.functions.shcmd(
[
'git',
'diff-tree',
'--no-commit-id',
'--name-only',
'-r',
commit
],
'Cannot get commit files for {0}'.format(commit)
)
cfiles = [item.strip() for item in stdout.split()]
files.extend(cfiles)
files = sorted(list(set(files)))
output_dir = os.path.abspath(output_dir)
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
diff_script = ['#!/bin/bash\n']
del_script = ['#!/bin/bash\n']
for item in files:
sfile = os.path.join(pkg_dir, item)
dfile = os.path.join(output_dir, item)
if not os.path.exists(sfile):
del_script.append('rm -rf '+sfile+'\n')
elif diff(sfile, dfile):
ddir = os.path.dirname(dfile)
if not os.path.exists(ddir):
os.makedirs(ddir)
shutil.copy(sfile, dfile)
diff_script.append('meld '+sfile+' '+dfile+'\n')
if len(del_script) > 1:
sfile = os.path.join(output_dir, 'rmfiles.sh')
with open(sfile, 'w') as fobj:
fobj.writelines(del_script)
cst = os.stat(sfile)
os.chmod(sfile, cst.st_mode | stat.S_IXUSR)
if len(diff_script) > 1:
sfile = os.path.join(output_dir, 'diff.sh')
with open(sfile, 'w') as fobj:
fobj.writelines(diff_script)
cst = os.stat(sfile)
os.chmod(sfile, cst.st_mode | stat.S_IXUSR)
def valid_branch(branch):
""" Argparse checker for branch name """
branch = branch.strip()
stdout = sbin.functions.shcmd(
['git', 'branch', '--list', '--no-color'],
'Cannot verify branch'
)
branches = [
item.strip() for item in stdout.split() if item.strip() != '*'
]
branches.append('master')
branches = list(set(branches))
if branch not in branches:
raise argparse.ArgumentTypeError(
'Branch {0} does not exist'.format(branch)
)
return branch
if __name__ == "__main__":
PKG_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PARSER = argparse.ArgumentParser(
description='Rebase Git branch'
)
PARSER.add_argument(
'-d', '--directory',
help=(
'directory where branch files are copied to '
'(default ${{HOME}}/rebase)'
),
nargs=1,
default=[os.path.join(os.environ['HOME'], 'rebase')]
)
PARSER.add_argument(
'-r', '--rebase_branch',
help='branch to rebase',
type=valid_branch,
nargs=1,
)
PARSER.add_argument(
'-o', '--origin_branch',
help='origin branch',
type=valid_branch,
nargs=1,
)
ARGS = PARSER.parse_args()
sys.exit(
rebase(
PKG_DIR,
ARGS.directory[0],
ARGS.origin_branch[0],
ARGS.rebase_branch[0]
)
)
| pmacosta/putil | sbin/rebase.py | Python | mit | 4,662 |
# partial --== Decompile ==-- with fixes
import Live
from _Framework.TransportComponent import TransportComponent
from _Framework.ButtonElement import ButtonElement
from _Framework.EncoderElement import EncoderElement #added
class ShiftableTransportComponent(TransportComponent):
__doc__ = ' TransportComponent that only uses certain buttons if a shift button is pressed '
def __init__(self):
TransportComponent.__init__(self)
self._shift_button = None
self._quant_toggle_button = None
self._shift_pressed = False
self._last_quant_value = Live.Song.RecordingQuantization.rec_q_eight
self.song().add_midi_recording_quantization_listener(self._on_quantisation_changed)
self._on_quantisation_changed()
self._undo_button = None #added from OpenLabs SpecialTransportComponent script
self._redo_button = None #added from OpenLabs SpecialTransportComponent script
self._bts_button = None #added from OpenLabs SpecialTransportComponent script
self._tempo_encoder_control = None #new addition
return None
def disconnect(self):
TransportComponent.disconnect(self)
if self._shift_button != None:
self._shift_button.remove_value_listener(self._shift_value)
self._shift_button = None
if self._quant_toggle_button != None:
self._quant_toggle_button.remove_value_listener(self._quant_toggle_value)
self._quant_toggle_button = None
self.song().remove_midi_recording_quantization_listener(self._on_quantisation_changed)
if (self._undo_button != None): #added from OpenLabs SpecialTransportComponent script
self._undo_button.remove_value_listener(self._undo_value)
self._undo_button = None
if (self._redo_button != None): #added from OpenLabs SpecialTransportComponent script
self._redo_button.remove_value_listener(self._redo_value)
self._redo_button = None
if (self._bts_button != None): #added from OpenLabs SpecialTransportComponent script
self._bts_button.remove_value_listener(self._bts_value)
self._bts_button = None
if (self._tempo_encoder_control != None): #new addition
self._tempo_encoder_control.remove_value_listener(self._tempo_encoder_value)
self._tempo_encoder_control = None
return None
def set_shift_button(self, button):
if not(button == None or isinstance(button, ButtonElement) and button.is_momentary()):
isinstance(button, ButtonElement)
raise AssertionError
if self._shift_button != button:
if self._shift_button != None:
self._shift_button.remove_value_listener(self._shift_value)
self._shift_button = button
if self._shift_button != None:
self._shift_button.add_value_listener(self._shift_value)
self._rebuild_callback()
self.update()
return None
def set_quant_toggle_button(self, button):
if not(button == None or isinstance(button, ButtonElement) and button.is_momentary()):
isinstance(button, ButtonElement)
raise AssertionError
if self._quant_toggle_button != button:
if self._quant_toggle_button != None:
self._quant_toggle_button.remove_value_listener(self._quant_toggle_value)
self._quant_toggle_button = button
if self._quant_toggle_button != None:
self._quant_toggle_button.add_value_listener(self._quant_toggle_value)
self._rebuild_callback()
self.update()
return None
def update(self):
self._on_metronome_changed()
self._on_overdub_changed()
self._on_quantisation_changed()
def _shift_value(self, value):
if not self._shift_button != None:
raise AssertionError
if not value in range(128):
raise AssertionError
self._shift_pressed = value != 0
if self.is_enabled():
self.is_enabled()
self.update()
else:
self.is_enabled()
return None
def _metronome_value(self, value):
if self._shift_pressed:
TransportComponent._metronome_value(self, value)
def _overdub_value(self, value):
if not self._shift_pressed:
TransportComponent._overdub_value(self, value)
def _tap_tempo_value(self, value): # Added
if not self._shift_pressed:
TransportComponent._tap_tempo_value(self, value)
def _quant_toggle_value(self, value):
assert (self._quant_toggle_button != None)
assert (value in range(128))
assert (self._last_quant_value != Live.Song.RecordingQuantization.rec_q_no_q)
if (self.is_enabled() and (not self._shift_pressed)):
if ((value != 0) or (not self._quant_toggle_button.is_momentary())):
quant_value = self.song().midi_recording_quantization
if (quant_value != Live.Song.RecordingQuantization.rec_q_no_q):
self._last_quant_value = quant_value
self.song().midi_recording_quantization = Live.Song.RecordingQuantization.rec_q_no_q
else:
self.song().midi_recording_quantization = self._last_quant_value
def _on_metronome_changed(self):
if self._shift_pressed:
TransportComponent._on_metronome_changed(self)
def _on_overdub_changed(self):
if not self._shift_pressed:
TransportComponent._on_overdub_changed(self)
def _on_quantisation_changed(self):
if self.is_enabled():
quant_value = self.song().midi_recording_quantization
quant_on = (quant_value != Live.Song.RecordingQuantization.rec_q_no_q)
if quant_on:
self._last_quant_value = quant_value
if ((not self._shift_pressed) and (self._quant_toggle_button != None)):
if quant_on:
self._quant_toggle_button.turn_on()
else:
self._quant_toggle_button.turn_off()
""" from OpenLabs module SpecialTransportComponent """
def set_undo_button(self, undo_button):
assert isinstance(undo_button, (ButtonElement,
type(None)))
if (undo_button != self._undo_button):
if (self._undo_button != None):
self._undo_button.remove_value_listener(self._undo_value)
self._undo_button = undo_button
if (self._undo_button != None):
self._undo_button.add_value_listener(self._undo_value)
self.update()
def set_redo_button(self, redo_button):
assert isinstance(redo_button, (ButtonElement,
type(None)))
if (redo_button != self._redo_button):
if (self._redo_button != None):
self._redo_button.remove_value_listener(self._redo_value)
self._redo_button = redo_button
if (self._redo_button != None):
self._redo_button.add_value_listener(self._redo_value)
self.update()
def set_bts_button(self, bts_button): #"back to start" button
assert isinstance(bts_button, (ButtonElement,
type(None)))
if (bts_button != self._bts_button):
if (self._bts_button != None):
self._bts_button.remove_value_listener(self._bts_value)
self._bts_button = bts_button
if (self._bts_button != None):
self._bts_button.add_value_listener(self._bts_value)
self.update()
def _undo_value(self, value):
assert (self._undo_button != None)
assert (value in range(128))
if self.is_enabled():
if ((value != 0) or (not self._undo_button.is_momentary())):
if self.song().can_undo:
self.song().undo()
def _redo_value(self, value):
assert (self._redo_button != None)
assert (value in range(128))
if self.is_enabled():
if ((value != 0) or (not self._redo_button.is_momentary())):
if self.song().can_redo:
self.song().redo()
def _bts_value(self, value):
assert (self._bts_button != None)
assert (value in range(128))
if self.is_enabled():
if ((value != 0) or (not self._bts_button.is_momentary())):
self.song().current_song_time = 0.0
def _tempo_encoder_value(self, value):
if not self._shift_pressed:
assert (self._tempo_encoder_control != None)
assert (value in range(128))
backwards = (value >= 64)
step = 0.1 #step = 1.0 #reduce this for finer control; 1.0 is 1 bpm
if backwards:
amount = (value - 128)
else:
amount = value
tempo = max(20, min(999, (self.song().tempo + (amount * step))))
self.song().tempo = tempo
def set_tempo_encoder(self, control):
assert ((control == None) or (isinstance(control, EncoderElement) and (control.message_map_mode() is Live.MidiMap.MapMode.relative_two_compliment)))
if (self._tempo_encoder_control != None):
self._tempo_encoder_control.remove_value_listener(self._tempo_encoder_value)
self._tempo_encoder_control = control
if (self._tempo_encoder_control != None):
self._tempo_encoder_control.add_value_listener(self._tempo_encoder_value)
self.update() | jim-cooley/abletonremotescripts | remote-scripts/samples/APC40_20/ShiftableTransportComponent.py | Python | apache-2.0 | 9,766 |
'''Bubble sort'''
import os
def pedirNumeros():
vetor = []
while True:
try:
num = int(input("Digite um valor ou qualquer letra para sair: "))
vetor.append(num)
except:
break
return vetor
def bubbleSort(vetor):
if len(vetor) <= 1:
return vetor
else:
for x in range(0, len(vetor)):
for i in range(0,len(vetor)-1):
if vetor[i] > vetor[i+1]:
aux = vetor[i+1]
vetor[i+1] = vetor[i]
vetor[i] = aux
return vetor
def main():
vetor = pedirNumeros()
print(bubbleSort(vetor))
main()
| PablNico/ScriptsAndMore | python/bubblesort.py | Python | gpl-3.0 | 533 |
# -*- coding: utf-8 -*-
"""Init and utils."""
from zope.i18nmessageid import MessageFactory
_ = MessageFactory('stv.sitecontent')
def initialize(context):
"""Initializer called when used as a Zope 2 product."""
| a25kk/stv | src/stv.sitecontent/stv/sitecontent/__init__.py | Python | mit | 219 |
# Authors: Alexandre Gramfort <[email protected]>
# Denis Engemann <[email protected]>
# Martin Luessi <[email protected]>
# Eric Larson <[email protected]>
#
# License: Simplified BSD
import os.path as op
import warnings
import numpy as np
from numpy.testing import assert_raises, assert_array_equal
from nose.tools import assert_true, assert_equal
from mne import io, read_evokeds, read_proj
from mne.io.constants import FIFF
from mne.channels import read_layout, make_eeg_layout
from mne.datasets import testing
from mne.time_frequency.tfr import AverageTFR
from mne.utils import slow_test
from mne.viz import plot_evoked_topomap, plot_projs_topomap
from mne.viz.topomap import (_check_outlines, _onselect, plot_topomap,
_find_peaks)
# Set our plotters to test mode
import matplotlib
matplotlib.use('Agg') # for testing don't use X server
warnings.simplefilter('always') # enable b/c these tests throw warnings
data_dir = testing.data_path(download=False)
subjects_dir = op.join(data_dir, 'subjects')
ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg_proj.fif')
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
evoked_fname = op.join(base_dir, 'test-ave.fif')
fname = op.join(base_dir, 'test-ave.fif')
raw_fname = op.join(base_dir, 'test_raw.fif')
event_name = op.join(base_dir, 'test-eve.fif')
layout = read_layout('Vectorview-all')
def _get_raw():
return io.Raw(raw_fname, preload=False)
@slow_test
@testing.requires_testing_data
def test_plot_topomap():
"""Test topomap plotting
"""
import matplotlib.pyplot as plt
from matplotlib.patches import Circle
# evoked
warnings.simplefilter('always')
res = 16
evoked = read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0))
ev_bad = evoked.pick_types(meg=False, eeg=True, copy=True)
ev_bad.pick_channels(ev_bad.ch_names[:2])
ev_bad.plot_topomap(times=ev_bad.times[:2] - 1e-6) # auto, should plot EEG
assert_raises(ValueError, ev_bad.plot_topomap, ch_type='mag')
assert_raises(TypeError, ev_bad.plot_topomap, head_pos='foo')
assert_raises(KeyError, ev_bad.plot_topomap, head_pos=dict(foo='bar'))
assert_raises(ValueError, ev_bad.plot_topomap, head_pos=dict(center=0))
assert_raises(ValueError, ev_bad.plot_topomap, times=[-100]) # bad time
assert_raises(ValueError, ev_bad.plot_topomap, times=[[0]]) # bad time
evoked.plot_topomap(0.1, layout=layout, scale=dict(mag=0.1))
plt.close('all')
axes = [plt.subplot(221), plt.subplot(222)]
evoked.plot_topomap(axes=axes, colorbar=False)
plt.close('all')
evoked.plot_topomap(times=[-0.1, 0.2])
plt.close('all')
mask = np.zeros_like(evoked.data, dtype=bool)
mask[[1, 5], :] = True
evoked.plot_topomap(ch_type='mag', outlines=None)
times = [0.1]
evoked.plot_topomap(times, ch_type='eeg', res=res, scale=1)
evoked.plot_topomap(times, ch_type='grad', mask=mask, res=res)
evoked.plot_topomap(times, ch_type='planar1', res=res)
evoked.plot_topomap(times, ch_type='planar2', res=res)
evoked.plot_topomap(times, ch_type='grad', mask=mask, res=res,
show_names=True, mask_params={'marker': 'x'})
plt.close('all')
assert_raises(ValueError, evoked.plot_topomap, times, ch_type='eeg',
res=res, average=-1000)
assert_raises(ValueError, evoked.plot_topomap, times, ch_type='eeg',
res=res, average='hahahahah')
p = evoked.plot_topomap(times, ch_type='grad', res=res,
show_names=lambda x: x.replace('MEG', ''),
image_interp='bilinear')
subplot = [x for x in p.get_children() if
isinstance(x, matplotlib.axes.Subplot)][0]
assert_true(all('MEG' not in x.get_text()
for x in subplot.get_children()
if isinstance(x, matplotlib.text.Text)))
# Test title
def get_texts(p):
return [x.get_text() for x in p.get_children() if
isinstance(x, matplotlib.text.Text)]
p = evoked.plot_topomap(times, ch_type='eeg', res=res, average=0.01)
assert_equal(len(get_texts(p)), 0)
p = evoked.plot_topomap(times, ch_type='eeg', title='Custom', res=res)
texts = get_texts(p)
assert_equal(len(texts), 1)
assert_equal(texts[0], 'Custom')
plt.close('all')
# delaunay triangulation warning
with warnings.catch_warnings(record=True): # can't show
warnings.simplefilter('always')
evoked.plot_topomap(times, ch_type='mag', layout=None, res=res)
assert_raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag',
proj='interactive') # projs have already been applied
# change to no-proj mode
evoked = read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0), proj=False)
with warnings.catch_warnings(record=True):
warnings.simplefilter('always')
evoked.plot_topomap(0.1, 'mag', proj='interactive', res=res)
assert_raises(RuntimeError, plot_evoked_topomap, evoked,
np.repeat(.1, 50))
assert_raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6])
with warnings.catch_warnings(record=True): # file conventions
warnings.simplefilter('always')
projs = read_proj(ecg_fname)
projs = [pp for pp in projs if pp['desc'].lower().find('eeg') < 0]
plot_projs_topomap(projs, res=res)
plt.close('all')
ax = plt.subplot(111)
plot_projs_topomap([projs[0]], res=res, axes=ax) # test axes param
plt.close('all')
for ch in evoked.info['chs']:
if ch['coil_type'] == FIFF.FIFFV_COIL_EEG:
ch['loc'].fill(0)
# Remove extra digitization point, so EEG digitization points
# correspond with the EEG electrodes
del evoked.info['dig'][85]
pos = make_eeg_layout(evoked.info).pos[:, :2]
pos, outlines = _check_outlines(pos, 'head')
assert_true('head' in outlines.keys())
assert_true('nose' in outlines.keys())
assert_true('ear_left' in outlines.keys())
assert_true('ear_right' in outlines.keys())
assert_true('autoshrink' in outlines.keys())
assert_true(outlines['autoshrink'])
assert_true('clip_radius' in outlines.keys())
assert_array_equal(outlines['clip_radius'], 0.5)
pos, outlines = _check_outlines(pos, 'skirt')
assert_true('head' in outlines.keys())
assert_true('nose' in outlines.keys())
assert_true('ear_left' in outlines.keys())
assert_true('ear_right' in outlines.keys())
assert_true('autoshrink' in outlines.keys())
assert_true(not outlines['autoshrink'])
assert_true('clip_radius' in outlines.keys())
assert_array_equal(outlines['clip_radius'], 0.625)
pos, outlines = _check_outlines(pos, 'skirt',
head_pos={'scale': [1.2, 1.2]})
assert_array_equal(outlines['clip_radius'], 0.75)
# Plot skirt
evoked.plot_topomap(times, ch_type='eeg', outlines='skirt')
# Pass custom outlines without patch
evoked.plot_topomap(times, ch_type='eeg', outlines=outlines)
plt.close('all')
# Pass custom outlines with patch callable
def patch():
return Circle((0.5, 0.4687), radius=.46,
clip_on=True, transform=plt.gca().transAxes)
outlines['patch'] = patch
plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines)
# Remove digitization points. Now topomap should fail
evoked.info['dig'] = None
assert_raises(RuntimeError, plot_evoked_topomap, evoked,
times, ch_type='eeg')
plt.close('all')
# Test error messages for invalid pos parameter
n_channels = len(pos)
data = np.ones(n_channels)
pos_1d = np.zeros(n_channels)
pos_3d = np.zeros((n_channels, 2, 2))
assert_raises(ValueError, plot_topomap, data, pos_1d)
assert_raises(ValueError, plot_topomap, data, pos_3d)
assert_raises(ValueError, plot_topomap, data, pos[:3, :])
pos_x = pos[:, :1]
pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]]
assert_raises(ValueError, plot_topomap, data, pos_x)
assert_raises(ValueError, plot_topomap, data, pos_xyz)
# An #channels x 4 matrix should work though. In this case (x, y, width,
# height) is assumed.
pos_xywh = np.c_[pos, np.zeros((n_channels, 2))]
plot_topomap(data, pos_xywh)
plt.close('all')
# Test peak finder
axes = [plt.subplot(131), plt.subplot(132)]
evoked.plot_topomap(times='peaks', axes=axes)
plt.close('all')
evoked.data = np.zeros(evoked.data.shape)
evoked.data[50][1] = 1
assert_array_equal(_find_peaks(evoked, 10), evoked.times[1])
evoked.data[80][100] = 1
assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]])
evoked.data[2][95] = 2
assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]])
assert_array_equal(_find_peaks(evoked, 1), evoked.times[95])
def test_plot_tfr_topomap():
"""Test plotting of TFR data
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
raw = _get_raw()
times = np.linspace(-0.1, 0.1, 200)
n_freqs = 3
nave = 1
rng = np.random.RandomState(42)
data = rng.randn(len(raw.ch_names), n_freqs, len(times))
tfr = AverageTFR(raw.info, data, times, np.arange(n_freqs), nave)
tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10,
res=16)
eclick = mpl.backend_bases.MouseEvent('button_press_event',
plt.gcf().canvas, 0, 0, 1)
eclick.xdata = 0.1
eclick.ydata = 0.1
eclick.inaxes = plt.gca()
erelease = mpl.backend_bases.MouseEvent('button_release_event',
plt.gcf().canvas, 0.9, 0.9, 1)
erelease.xdata = 0.3
erelease.ydata = 0.2
pos = [[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]]
_onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list())
tfr._onselect(eclick, erelease, None, 'mean', None)
plt.close('all')
| rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | python-packages/mne-python-0.10/mne/viz/tests/test_topomap.py | Python | bsd-3-clause | 10,198 |
#
# BitBox02 Electrum plugin code.
#
import hid
from typing import TYPE_CHECKING, Dict, Tuple, Optional, List, Any, Callable
from electrum_grs import bip32, constants
from electrum_grs.i18n import _
from electrum_grs.keystore import Hardware_KeyStore
from electrum_grs.transaction import PartialTransaction
from electrum_grs.wallet import Standard_Wallet, Multisig_Wallet, Deterministic_Wallet
from electrum_grs.util import bh2u, UserFacingException
from electrum_grs.base_wizard import ScriptTypeNotSupported, BaseWizard
from electrum_grs.logging import get_logger
from electrum_grs.plugin import Device, DeviceInfo, runs_in_hwd_thread
from electrum_grs.simple_config import SimpleConfig
from electrum_grs.json_db import StoredDict
from electrum_grs.storage import get_derivation_used_for_hw_device_encryption
from electrum_grs.bitcoin import OnchainOutputType
import electrum_grs.bitcoin as bitcoin
import electrum_grs.ecc as ecc
from ..hw_wallet import HW_PluginBase, HardwareClientBase
_logger = get_logger(__name__)
try:
from bitbox02 import bitbox02
from bitbox02 import util
from bitbox02.communication import (
devices,
HARDENED,
u2fhid,
bitbox_api_protocol,
FirmwareVersionOutdatedException,
)
requirements_ok = True
except ImportError as e:
if not (isinstance(e, ModuleNotFoundError) and e.name == 'bitbox02'):
_logger.exception('error importing bitbox02 plugin deps')
requirements_ok = False
class BitBox02Client(HardwareClientBase):
# handler is a BitBox02_Handler, importing it would lead to a circular dependency
def __init__(self, handler: Any, device: Device, config: SimpleConfig, *, plugin: HW_PluginBase):
HardwareClientBase.__init__(self, plugin=plugin)
self.bitbox02_device = None # type: Optional[bitbox02.BitBox02]
self.handler = handler
self.device_descriptor = device
self.config = config
self.bitbox_hid_info = None
if self.config.get("bitbox02") is None:
bitbox02_config: dict = {
"remote_static_noise_keys": [],
"noise_privkey": None,
}
self.config.set_key("bitbox02", bitbox02_config)
bitboxes = devices.get_any_bitbox02s()
for bitbox in bitboxes:
if (
bitbox["path"] == self.device_descriptor.path
and bitbox["interface_number"]
== self.device_descriptor.interface_number
):
self.bitbox_hid_info = bitbox
if self.bitbox_hid_info is None:
raise Exception("No BitBox02 detected")
def is_initialized(self) -> bool:
return True
@runs_in_hwd_thread
def close(self):
try:
self.bitbox02_device.close()
except:
pass
def has_usable_connection_with_device(self) -> bool:
if self.bitbox_hid_info is None:
return False
return True
@runs_in_hwd_thread
def get_soft_device_id(self) -> Optional[str]:
if self.handler is None:
# Can't do the pairing without the handler. This happens at wallet creation time, when
# listing the devices.
return None
if self.bitbox02_device is None:
self.pairing_dialog()
return self.bitbox02_device.root_fingerprint().hex()
@runs_in_hwd_thread
def pairing_dialog(self):
def pairing_step(code: str, device_response: Callable[[], bool]) -> bool:
msg = "Please compare and confirm the pairing code on your BitBox02:\n" + code
self.handler.show_message(msg)
try:
res = device_response()
except:
# Close the hid device on exception
hid_device.close()
raise
finally:
self.handler.finished()
return res
def exists_remote_static_pubkey(pubkey: bytes) -> bool:
bitbox02_config = self.config.get("bitbox02")
noise_keys = bitbox02_config.get("remote_static_noise_keys")
if noise_keys is not None:
if pubkey.hex() in [noise_key for noise_key in noise_keys]:
return True
return False
def set_remote_static_pubkey(pubkey: bytes) -> None:
if not exists_remote_static_pubkey(pubkey):
bitbox02_config = self.config.get("bitbox02")
if bitbox02_config.get("remote_static_noise_keys") is not None:
bitbox02_config["remote_static_noise_keys"].append(pubkey.hex())
else:
bitbox02_config["remote_static_noise_keys"] = [pubkey.hex()]
self.config.set_key("bitbox02", bitbox02_config)
def get_noise_privkey() -> Optional[bytes]:
bitbox02_config = self.config.get("bitbox02")
privkey = bitbox02_config.get("noise_privkey")
if privkey is not None:
return bytes.fromhex(privkey)
return None
def set_noise_privkey(privkey: bytes) -> None:
bitbox02_config = self.config.get("bitbox02")
bitbox02_config["noise_privkey"] = privkey.hex()
self.config.set_key("bitbox02", bitbox02_config)
def attestation_warning() -> None:
self.handler.show_error(
"The BitBox02 attestation failed.\nTry reconnecting the BitBox02.\nWarning: The device might not be genuine, if the\n problem persists please contact Shift support.",
blocking=True
)
class NoiseConfig(bitbox_api_protocol.BitBoxNoiseConfig):
"""NoiseConfig extends BitBoxNoiseConfig"""
def show_pairing(self, code: str, device_response: Callable[[], bool]) -> bool:
return pairing_step(code, device_response)
def attestation_check(self, result: bool) -> None:
if not result:
attestation_warning()
def contains_device_static_pubkey(self, pubkey: bytes) -> bool:
return exists_remote_static_pubkey(pubkey)
def add_device_static_pubkey(self, pubkey: bytes) -> None:
return set_remote_static_pubkey(pubkey)
def get_app_static_privkey(self) -> Optional[bytes]:
return get_noise_privkey()
def set_app_static_privkey(self, privkey: bytes) -> None:
return set_noise_privkey(privkey)
if self.bitbox02_device is None:
hid_device = hid.device()
hid_device.open_path(self.bitbox_hid_info["path"])
bitbox02_device = bitbox02.BitBox02(
transport=u2fhid.U2FHid(hid_device),
device_info=self.bitbox_hid_info,
noise_config=NoiseConfig(),
)
try:
bitbox02_device.check_min_version()
except FirmwareVersionOutdatedException:
raise
self.bitbox02_device = bitbox02_device
self.fail_if_not_initialized()
def fail_if_not_initialized(self) -> None:
assert self.bitbox02_device
if not self.bitbox02_device.device_info()["initialized"]:
raise Exception(
"Please initialize the BitBox02 using the BitBox app first before using the BitBox02 in electrum"
)
def coin_network_from_electrum_network(self) -> int:
if constants.net.TESTNET:
return bitbox02.btc.TBTC
return bitbox02.btc.BTC
@runs_in_hwd_thread
def get_password_for_storage_encryption(self) -> str:
derivation = get_derivation_used_for_hw_device_encryption()
derivation_list = bip32.convert_bip32_path_to_list_of_uint32(derivation)
xpub = self.bitbox02_device.electrum_encryption_key(derivation_list)
node = bip32.BIP32Node.from_xkey(xpub, net = constants.BitcoinMainnet()).subkey_at_public_derivation(())
return node.eckey.get_public_key_bytes(compressed=True).hex()
@runs_in_hwd_thread
def get_xpub(self, bip32_path: str, xtype: str, *, display: bool = False) -> str:
if self.bitbox02_device is None:
self.pairing_dialog()
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
self.fail_if_not_initialized()
xpub_keypath = bip32.convert_bip32_path_to_list_of_uint32(bip32_path)
coin_network = self.coin_network_from_electrum_network()
if xtype == "p2wpkh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.ZPUB
else:
out_type = bitbox02.btc.BTCPubRequest.VPUB
elif xtype == "p2wpkh-p2sh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.YPUB
else:
out_type = bitbox02.btc.BTCPubRequest.UPUB
elif xtype == "p2wsh-p2sh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_YPUB
else:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_UPUB
elif xtype == "p2wsh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_ZPUB
else:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_VPUB
# The other legacy types are not supported
else:
raise Exception("invalid xtype:{}".format(xtype))
return self.bitbox02_device.btc_xpub(
keypath=xpub_keypath,
xpub_type=out_type,
coin=coin_network,
display=display,
)
@runs_in_hwd_thread
def label(self) -> str:
if self.handler is None:
# Can't do the pairing without the handler. This happens at wallet creation time, when
# listing the devices.
return super().label()
if self.bitbox02_device is None:
self.pairing_dialog()
# We add the fingerprint to the label, as if there are two devices with the same label, the
# device manager can mistake one for another and fail.
return "%s (%s)" % (
self.bitbox02_device.device_info()["name"],
self.bitbox02_device.root_fingerprint().hex(),
)
@runs_in_hwd_thread
def request_root_fingerprint_from_device(self) -> str:
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
return self.bitbox02_device.root_fingerprint().hex()
def is_pairable(self) -> bool:
if self.bitbox_hid_info is None:
return False
return True
@runs_in_hwd_thread
def btc_multisig_config(
self, coin, bip32_path: List[int], wallet: Multisig_Wallet, xtype: str,
):
"""
Set and get a multisig config with the current device and some other arbitrary xpubs.
Registers it on the device if not already registered.
xtype: 'p2wsh' | 'p2wsh-p2sh'
"""
assert xtype in ("p2wsh", "p2wsh-p2sh")
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
account_keypath = bip32_path[:-2]
xpubs = wallet.get_master_public_keys()
our_xpub = self.get_xpub(
bip32.convert_bip32_intpath_to_strpath(account_keypath), xtype
)
multisig_config = bitbox02.btc.BTCScriptConfig(
multisig=bitbox02.btc.BTCScriptConfig.Multisig(
threshold=wallet.m,
xpubs=[util.parse_xpub(xpub) for xpub in xpubs],
our_xpub_index=xpubs.index(our_xpub),
script_type={
"p2wsh": bitbox02.btc.BTCScriptConfig.Multisig.P2WSH,
"p2wsh-p2sh": bitbox02.btc.BTCScriptConfig.Multisig.P2WSH_P2SH,
}[xtype]
)
)
is_registered = self.bitbox02_device.btc_is_script_config_registered(
coin, multisig_config, account_keypath
)
if not is_registered:
name = self.handler.name_multisig_account()
try:
self.bitbox02_device.btc_register_script_config(
coin=coin,
script_config=multisig_config,
keypath=account_keypath,
name=name,
)
except bitbox02.DuplicateEntryException:
raise
except:
raise UserFacingException("Failed to register multisig\naccount configuration on BitBox02")
return multisig_config
@runs_in_hwd_thread
def show_address(
self, bip32_path: str, address_type: str, wallet: Deterministic_Wallet
) -> str:
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
address_keypath = bip32.convert_bip32_path_to_list_of_uint32(bip32_path)
coin_network = self.coin_network_from_electrum_network()
if address_type == "p2wpkh":
script_config = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH
)
elif address_type == "p2wpkh-p2sh":
script_config = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH_P2SH
)
elif address_type in ("p2wsh-p2sh", "p2wsh"):
if type(wallet) is Multisig_Wallet:
script_config = self.btc_multisig_config(
coin_network, address_keypath, wallet, address_type,
)
else:
raise Exception("Can only use p2wsh-p2sh or p2wsh with multisig wallets")
else:
raise Exception(
"invalid address xtype: {} is not supported by the BitBox02".format(
address_type
)
)
return self.bitbox02_device.btc_address(
keypath=address_keypath,
coin=coin_network,
script_config=script_config,
display=True,
)
def _get_coin(self):
return bitbox02.btc.TBTC if constants.net.TESTNET else bitbox02.btc.BTC
@runs_in_hwd_thread
def sign_transaction(
self,
keystore: Hardware_KeyStore,
tx: PartialTransaction,
wallet: Deterministic_Wallet,
):
if tx.is_complete():
return
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
coin = self._get_coin()
tx_script_type = None
# Build BTCInputType list
inputs = []
for txin in tx.inputs():
my_pubkey, full_path = keystore.find_my_pubkey_in_txinout(txin)
if full_path is None:
raise Exception(
"A wallet owned pubkey was not found in the transaction input to be signed"
)
prev_tx = txin.utxo
if prev_tx is None:
raise UserFacingException(_('Missing previous tx.'))
prev_inputs: List[bitbox02.BTCPrevTxInputType] = []
prev_outputs: List[bitbox02.BTCPrevTxOutputType] = []
for prev_txin in prev_tx.inputs():
prev_inputs.append(
{
"prev_out_hash": prev_txin.prevout.txid[::-1],
"prev_out_index": prev_txin.prevout.out_idx,
"signature_script": prev_txin.script_sig,
"sequence": prev_txin.nsequence,
}
)
for prev_txout in prev_tx.outputs():
prev_outputs.append(
{
"value": prev_txout.value,
"pubkey_script": prev_txout.scriptpubkey,
}
)
inputs.append(
{
"prev_out_hash": txin.prevout.txid[::-1],
"prev_out_index": txin.prevout.out_idx,
"prev_out_value": txin.value_sats(),
"sequence": txin.nsequence,
"keypath": full_path,
"script_config_index": 0,
"prev_tx": {
"version": prev_tx.version,
"locktime": prev_tx.locktime,
"inputs": prev_inputs,
"outputs": prev_outputs,
},
}
)
if tx_script_type == None:
tx_script_type = txin.script_type
elif tx_script_type != txin.script_type:
raise Exception("Cannot mix different input script types")
if tx_script_type == "p2wpkh":
tx_script_type = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH
)
elif tx_script_type == "p2wpkh-p2sh":
tx_script_type = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH_P2SH
)
elif tx_script_type in ("p2wsh-p2sh", "p2wsh"):
if type(wallet) is Multisig_Wallet:
tx_script_type = self.btc_multisig_config(coin, full_path, wallet, tx_script_type)
else:
raise Exception("Can only use p2wsh-p2sh or p2wsh with multisig wallets")
else:
raise UserFacingException(
"invalid input script type: {} is not supported by the BitBox02".format(
tx_script_type
)
)
# Build BTCOutputType list
outputs = []
for txout in tx.outputs():
assert txout.address
# check for change
if txout.is_change:
my_pubkey, change_pubkey_path = keystore.find_my_pubkey_in_txinout(txout)
outputs.append(
bitbox02.BTCOutputInternal(
keypath=change_pubkey_path, value=txout.value, script_config_index=0,
)
)
else:
addrtype, pubkey_hash = bitcoin.address_to_hash(txout.address)
if addrtype == OnchainOutputType.P2PKH:
output_type = bitbox02.btc.P2PKH
elif addrtype == OnchainOutputType.P2SH:
output_type = bitbox02.btc.P2SH
elif addrtype == OnchainOutputType.WITVER0_P2WPKH:
output_type = bitbox02.btc.P2WPKH
elif addrtype == OnchainOutputType.WITVER0_P2WSH:
output_type = bitbox02.btc.P2WSH
else:
raise UserFacingException(
"Received unsupported output type during transaction signing: {} is not supported by the BitBox02".format(
addrtype
)
)
outputs.append(
bitbox02.BTCOutputExternal(
output_type=output_type,
output_hash=pubkey_hash,
value=txout.value,
)
)
keypath_account = full_path[:-2]
sigs = self.bitbox02_device.btc_sign(
coin,
[bitbox02.btc.BTCScriptConfigWithKeypath(
script_config=tx_script_type,
keypath=keypath_account,
)],
inputs=inputs,
outputs=outputs,
locktime=tx.locktime,
version=tx.version,
)
# Fill signatures
if len(sigs) != len(tx.inputs()):
raise Exception("Incorrect number of inputs signed.") # Should never occur
signatures = [bh2u(ecc.der_sig_from_sig_string(x[1])) + "01" for x in sigs]
tx.update_signatures(signatures)
def sign_message(self, keypath: str, message: bytes, xtype: str) -> bytes:
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
try:
simple_type = {
"p2wpkh-p2sh":bitbox02.btc.BTCScriptConfig.P2WPKH_P2SH,
"p2wpkh": bitbox02.btc.BTCScriptConfig.P2WPKH,
}[xtype]
except KeyError:
raise UserFacingException("The BitBox02 does not support signing messages for this address type: {}".format(xtype))
_, _, signature = self.bitbox02_device.btc_sign_msg(
self._get_coin(),
bitbox02.btc.BTCScriptConfigWithKeypath(
script_config=bitbox02.btc.BTCScriptConfig(
simple_type=simple_type,
),
keypath=bip32.convert_bip32_path_to_list_of_uint32(keypath),
),
message,
)
return signature
class BitBox02_KeyStore(Hardware_KeyStore):
hw_type = "bitbox02"
device = "BitBox02"
plugin: "BitBox02Plugin"
def __init__(self, d: dict):
super().__init__(d)
self.force_watching_only = False
self.ux_busy = False
def get_client(self):
return self.plugin.get_client(self)
def give_error(self, message: Exception, clear_client: bool = False):
self.logger.info(message)
if not self.ux_busy:
self.handler.show_error(message)
else:
self.ux_busy = False
if clear_client:
self.client = None
raise UserFacingException(message)
def decrypt_message(self, pubkey, message, password):
raise UserFacingException(
_(
"Message encryption, decryption and signing are currently not supported for {}"
).format(self.device)
)
def sign_message(self, sequence, message, password):
if password:
raise Exception("BitBox02 does not accept a password from the host")
client = self.get_client()
keypath = self.get_derivation_prefix() + "/%d/%d" % sequence
xtype = self.get_bip32_node_for_xpub().xtype
return client.sign_message(keypath, message.encode("utf-8"), xtype)
@runs_in_hwd_thread
def sign_transaction(self, tx: PartialTransaction, password: str):
if tx.is_complete():
return
client = self.get_client()
assert isinstance(client, BitBox02Client)
try:
try:
self.handler.show_message("Authorize Transaction...")
client.sign_transaction(self, tx, self.handler.get_wallet())
finally:
self.handler.finished()
except Exception as e:
self.logger.exception("")
self.give_error(e, True)
return
@runs_in_hwd_thread
def show_address(
self, sequence: Tuple[int, int], txin_type: str, wallet: Deterministic_Wallet
):
client = self.get_client()
address_path = "{}/{}/{}".format(
self.get_derivation_prefix(), sequence[0], sequence[1]
)
try:
try:
self.handler.show_message(_("Showing address ..."))
dev_addr = client.show_address(address_path, txin_type, wallet)
finally:
self.handler.finished()
except Exception as e:
self.logger.exception("")
self.handler.show_error(e)
class BitBox02Plugin(HW_PluginBase):
keystore_class = BitBox02_KeyStore
minimum_library = (5, 2, 0)
DEVICE_IDS = [(0x03EB, 0x2403)]
SUPPORTED_XTYPES = ("p2wpkh-p2sh", "p2wpkh", "p2wsh", "p2wsh-p2sh")
def __init__(self, parent: HW_PluginBase, config: SimpleConfig, name: str):
super().__init__(parent, config, name)
self.libraries_available = self.check_libraries_available()
if not self.libraries_available:
return
self.device_manager().register_devices(self.DEVICE_IDS, plugin=self)
def get_library_version(self):
try:
from bitbox02 import bitbox02
version = bitbox02.__version__
except:
version = "unknown"
if requirements_ok:
return version
else:
raise ImportError()
# handler is a BitBox02_Handler
@runs_in_hwd_thread
def create_client(self, device: Device, handler: Any) -> BitBox02Client:
if not handler:
self.handler = handler
return BitBox02Client(handler, device, self.config, plugin=self)
def setup_device(
self, device_info: DeviceInfo, wizard: BaseWizard, purpose: int
):
device_id = device_info.device.id_
client = self.scan_and_create_client_for_device(device_id=device_id, wizard=wizard)
assert isinstance(client, BitBox02Client)
if client.bitbox02_device is None:
wizard.run_task_without_blocking_gui(
task=lambda client=client: client.pairing_dialog())
client.fail_if_not_initialized()
return client
def get_xpub(
self, device_id: str, derivation: str, xtype: str, wizard: BaseWizard
):
if xtype not in self.SUPPORTED_XTYPES:
raise ScriptTypeNotSupported(
_("This type of script is not supported with {}: {}").format(self.device, xtype)
)
client = self.scan_and_create_client_for_device(device_id=device_id, wizard=wizard)
assert isinstance(client, BitBox02Client)
assert client.bitbox02_device is not None
return client.get_xpub(derivation, xtype)
@runs_in_hwd_thread
def show_address(
self,
wallet: Deterministic_Wallet,
address: str,
keystore: BitBox02_KeyStore = None,
):
if keystore is None:
keystore = wallet.get_keystore()
if not self.show_address_helper(wallet, address, keystore):
return
txin_type = wallet.get_txin_type(address)
sequence = wallet.get_address_index(address)
keystore.show_address(sequence, txin_type, wallet)
@runs_in_hwd_thread
def show_xpub(self, keystore: BitBox02_KeyStore):
client = keystore.get_client()
assert isinstance(client, BitBox02Client)
derivation = keystore.get_derivation_prefix()
xtype = keystore.get_bip32_node_for_xpub().xtype
client.get_xpub(derivation, xtype, display=True)
def create_device_from_hid_enumeration(self, d: dict, *, product_key) -> 'Device':
device = super().create_device_from_hid_enumeration(d, product_key=product_key)
# The BitBox02's product_id is not unique per device, thus use the path instead to
# distinguish devices.
id_ = str(d['path'])
return device._replace(id_=id_)
| GroestlCoin/electrum-grs | electrum_grs/plugins/bitbox02/bitbox02.py | Python | gpl-3.0 | 27,314 |
"""
WSGI config for zulip project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(BASE_DIR)
from scripts.lib.setup_path import setup_path
setup_path()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
from django.core.wsgi import get_wsgi_application
try:
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
except Exception:
# If /etc/zulip/settings.py contains invalid syntax, Django
# initialization will fail in django.setup(). In this case, our
# normal configuration to logs errors to /var/log/zulip/errors.log
# won't have been initialized. Since it's really valuable for the
# debugging process for a Zulip 500 error to always be "check
# /var/log/zulip/errors.log", we log to that file directly here.
import logging
logging.basicConfig(filename='/var/log/zulip/errors.log', level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s %(message)s')
logger = logging.getLogger(__name__)
logger.exception("get_wsgi_application() failed:")
raise
| showell/zulip | zproject/wsgi.py | Python | apache-2.0 | 1,909 |
#! /usr/bin/env python
"""
A parser for the -m 8 blast output
"""
import sys
if len(sys.argv) != 2:
print "USAGE: blast-table-parser.py <blat file>"
sys.exit(1)
CONTIG_THRESHOLD = 1000
E_THRESHOLD = 1e-5
IDENTITY_THRESHOLD = 30
dict = {}
#Query id,Subject id,% identity,alignment length,mismatches,gap openings,q. start,q. end,s. start,s. end,e-value,bit score
for line in open(sys.argv[1]):
data = line.split('\t')
query = data[0]
hit = data[1]
identity = float(data[2])
length = int(data[3])
q_start = int(data[6])
q_end = int(data[7])
e_value = float(data[-2])
contig_length = int(query.split('_')[3])+33-1
'''only examining contigs > 1000 and with evalues > 1e-5'''
if contig_length >= CONTIG_THRESHOLD and identity >= int(IDENTITY_THRESHOLD):
print line.rstrip()
| fandemonium/code | tiedje-scripts/blast-table-parser.py | Python | mit | 848 |
#!/usr/bin/env python
# Copyright (c) 2005-2009 Jaroslav Gresula
#
# Distributed under the MIT license (See accompanying file
# LICENSE.txt or copy at http://jagpdf.org/LICENSE.txt)
#
import jagpdf
import jag.testlib as testlib
def do_document(argv, name, cfg_opts):
cfg = testlib.test_config()
for k, v in cfg_opts.iteritems():
cfg.set(k, v)
doc = testlib.create_test_doc(argv, name+'.pdf', cfg)
doc.page_start(400, 400)
canvas = doc.page().canvas()
canvas.text_start(20, 380)
l = [(k, v) for k, v in cfg_opts.iteritems()]
l.sort()
for k, v in l:
canvas.text("%s : %s" % (k, v))
canvas.text_translate_line(0, -15)
canvas.text_end()
doc.page_end()
doc.finalize()
def test_main(argv=None):
def doc(name, cfg={}, versions=('3','4')):
cfg['doc.encryption'] = 'standard'
cfg["doc.static_file_id"] = "1"
cfg["info.creation_date"] = "0"
cfg["info.static_producer"] = "1"
for ver in versions:
cfg['doc.version'] = ver
do_document(argv, 'enc_' + name + '1' + ver, cfg)
doc('usrpwd', {'stdsh.pwd_user': 'user'})
doc('ownpwd', {'stdsh.pwd_owner': 'owner'})
doc('no_restr')
doc('no_print', {'stdsh.permissions': 'no_print'})
doc('no_copy', {'stdsh.permissions': 'no_copy'})
doc('no_hires_print', {'stdsh.permissions': 'no_hires_print'}, ['4'])
doc('no_modify', {'stdsh.permissions': 'no_modify'})
doc('no_assemble', {'stdsh.permissions': 'no_assemble'}, ['4'])
doc('no_accessib', {'stdsh.permissions': 'no_copy',
'stdsh.pwd_owner': 'owner'}, ['4'])
if __name__ == "__main__":
test_main()
| jgresula/jagpdf | code/test/apitest/py/encrypt.py | Python | mit | 1,689 |
from .postman import Postman
from ._sanitise import sanitise
| wtsi-hgi/CoGS-Webapp | cogs/mail/__init__.py | Python | agpl-3.0 | 61 |
import ast
import collections
import contextlib
import copy
import functools
import itertools
import logging
import os
import subprocess
import sys
import tempfile
from .third_party.google.protobuf import text_format
from . import package_pb2
class UncleanFilesystemError(Exception):
pass
class InconsistentDependencyGraphError(Exception):
pass
class CyclicDependencyError(Exception):
pass
class InfraRepoConfig(object):
def to_recipes_cfg(self, repo_root):
# TODO(luqui): This is not always correct. It can be configured in
# infra/config:refs.cfg.
return os.path.join(repo_root, 'infra', 'config', 'recipes.cfg')
def from_recipes_cfg(self, recipes_cfg):
return os.path.dirname( # <repo root>
os.path.dirname( # infra
os.path.dirname( # config
os.path.abspath(recipes_cfg)))) # recipes.cfg
class ProtoFile(object):
"""A collection of functions operating on a proto path.
This is an object so that it can be mocked in the tests.
"""
def __init__(self, path):
self._path = path
@property
def path(self):
return os.path.realpath(self._path)
def read_text(self):
with open(self._path, 'r') as fh:
return fh.read()
def read(self):
text = self.read_text()
buf = package_pb2.Package()
text_format.Merge(text, buf)
return buf
def to_text(self, buf):
return text_format.MessageToString(buf)
def write(self, buf):
with open(self._path, 'w') as fh:
fh.write(self.to_text(buf))
class PackageContext(object):
"""Contains information about where the root package and its dependency
checkouts live.
- recipes_dir is the location of recipes/ and recipe_modules/ which contain
the actual recipes of the root package.
- package_dir is where dependency checkouts live, e.g.
package_dir/recipe_engine/recipes/...
- repo_root is the root of the repository containing the root package.
"""
def __init__(self, recipes_dir, package_dir, repo_root):
self.recipes_dir = recipes_dir
self.package_dir = package_dir
self.repo_root = repo_root
@classmethod
def from_proto_file(cls, repo_root, proto_file):
proto_path = proto_file.path
buf = proto_file.read()
recipes_path = buf.recipes_path.replace('/', os.sep)
return cls(os.path.join(repo_root, recipes_path),
os.path.join(repo_root, recipes_path, '.recipe_deps'),
repo_root)
class CommitInfo(object):
"""Holds the stuff we need to know about a commit."""
def __init__(self, author, message, repo_id, revision):
self.author = author
self.message = message
self.repo_id = repo_id
self.revision = revision
def dump(self):
return {
'author': self.author,
'message': self.message,
'repo_id': self.repo_id,
'revision': self.revision,
}
@functools.total_ordering
class RepoUpdate(object):
"""Wrapper class that specifies the sort order of roll updates when merging.
"""
def __init__(self, spec, commit_infos=()):
self.spec = spec
self.commit_infos = commit_infos
@property
def id(self):
return self.spec.id
def __eq__(self, other):
return (self.id, self.spec.revision) == (other.id, other.spec.revision)
def __le__(self, other):
return (self.id, self.spec.revision) <= (other.id, other.spec.revision)
class RepoSpec(object):
"""RepoSpec is the specification of a repository to check out.
The prototypical example is GitRepoSpec, which includes a url, revision,
and branch.
"""
def checkout(self, context):
"""Fetches the specified package and returns the path of the package root
(the directory that contains recipes and recipe_modules).
"""
raise NotImplementedError()
def check_checkout(self, context):
"""Checks that the package is already fetched and in a good state, without
actually changing anything.
Returns None in normal conditions, otherwise raises some sort of exception.
"""
raise NotImplementedError()
def repo_root(self, context):
"""Returns the root of this repository."""
raise NotImplementedError()
def proto_file(self, context):
"""Returns the ProtoFile of the recipes config file in this repository.
Requires a good checkout."""
return ProtoFile(InfraRepoConfig().to_recipes_cfg(self.repo_root(context)))
class GitRepoSpec(RepoSpec):
def __init__(self, id, repo, branch, revision, path):
self.id = id
self.repo = repo
self.branch = branch
self.revision = revision
self.path = path
def checkout(self, context):
dep_dir = self._dep_dir(context)
logging.info('Freshening repository %s' % dep_dir)
if not os.path.isdir(dep_dir):
_run_cmd([self._git, 'clone', self.repo, dep_dir])
elif not os.path.isdir(os.path.join(dep_dir, '.git')):
raise UncleanFilesystemError('%s exists but is not a git repo' % dep_dir)
try:
subprocess.check_output([self._git, 'rev-parse', '-q', '--verify',
'%s^{commit}' % self.revision], cwd=dep_dir)
except subprocess.CalledProcessError:
_run_cmd([self._git, 'fetch'], cwd=dep_dir)
_run_cmd([self._git, 'reset', '-q', '--hard', self.revision], cwd=dep_dir)
def check_checkout(self, context):
dep_dir = self._dep_dir(context)
if not os.path.isdir(dep_dir):
raise UncleanFilesystemError('Dependency %s does not exist' %
dep_dir)
elif not os.path.isdir(os.path.join(dep_dir, '.git')):
raise UncleanFilesystemError('Dependency %s is not a git repo' %
dep_dir)
git_status_command = [self._git, 'status', '--porcelain']
logging.info('%s', git_status_command)
output = subprocess.check_output(git_status_command, cwd=dep_dir)
if output:
raise UncleanFilesystemError('Dependency %s is unclean:\n%s' %
(dep_dir, output))
def repo_root(self, context):
return os.path.join(self._dep_dir(context), self.path)
def dump(self):
buf = package_pb2.DepSpec(
project_id=self.id,
url=self.repo,
branch=self.branch,
revision=self.revision)
if self.path:
buf.path_override = self.path
return buf
def updates(self, context):
"""Returns a list of all updates to the branch since the revision this
repo spec refers to, paired with their commit timestamps; i.e.
(timestamp, GitRepoSpec).
Although timestamps are not completely reliable, they are the best tool we
have to approximate global coherence.
"""
lines = filter(bool, self._raw_updates(context).strip().split('\n'))
updates = []
for rev in lines:
info = self._get_commit_info(rev, context)
updates.append(RepoUpdate(
GitRepoSpec(self.id, self.repo, self.branch, rev, self.path),
commit_infos=(info,)))
return updates
def _raw_updates(self, context):
self.checkout(context)
_run_cmd([self._git, 'fetch'], cwd=self._dep_dir(context))
# XXX(luqui): Should this just focus on the recipes subtree rather than
# the whole repo?
git = subprocess.Popen([self._git, 'log',
'%s..origin/%s' % (self.revision, self.branch),
'--pretty=%H',
'--reverse'],
stdout=subprocess.PIPE,
cwd=self._dep_dir(context))
(stdout, _) = git.communicate()
return stdout
def _get_commit_info(self, rev, context):
author = subprocess.check_output(
[self._git, 'show', '-s', '--pretty=%aE', rev],
cwd=self._dep_dir(context)).strip()
message = subprocess.check_output(
[self._git, 'show', '-s', '--pretty=%B', rev],
cwd=self._dep_dir(context)).strip()
return CommitInfo(author, message, self.id, rev)
def _dep_dir(self, context):
return os.path.join(context.package_dir, self.id)
@property
def _git(self):
if sys.platform.startswith(('win', 'cygwin')):
return 'git.bat'
else:
return 'git'
def _components(self):
return (self.id, self.repo, self.revision, self.path)
def __eq__(self, other):
return self._components() == other._components()
def __ne__(self, other):
return not self.__eq__(other)
class RootRepoSpec(RepoSpec):
def __init__(self, proto_file):
self._proto_file = proto_file
def checkout(self, context):
# We assume this is already checked out.
pass
def check_checkout(self, context):
pass
def repo_root(self, context):
return context.repo_root
def proto_file(self, context):
return self._proto_file
class Package(object):
"""Package represents a loaded package, and contains path and dependency
information.
This is accessed by loader.py through RecipeDeps.get_package.
"""
def __init__(self, repo_spec, deps, recipes_dir):
self.repo_spec = repo_spec
self.deps = deps
self.recipes_dir = recipes_dir
@property
def recipe_dirs(self):
return [os.path.join(self.recipes_dir, 'recipes')]
@property
def module_dirs(self):
return [os.path.join(self.recipes_dir, 'recipe_modules')]
def find_dep(self, dep_name):
return self.deps[dep_name]
def module_path(self, module_name):
return os.path.join(self.recipes_dir, 'recipe_modules', module_name)
class PackageSpec(object):
API_VERSION = 1
def __init__(self, project_id, recipes_path, deps):
self._project_id = project_id
self._recipes_path = recipes_path
self._deps = deps
@classmethod
def load_proto(cls, proto_file):
buf = proto_file.read()
assert buf.api_version == cls.API_VERSION
deps = { dep.project_id: GitRepoSpec(dep.project_id,
dep.url,
dep.branch,
dep.revision,
dep.path_override)
for dep in buf.deps }
return cls(buf.project_id, buf.recipes_path, deps)
@property
def project_id(self):
return self._project_id
@property
def recipes_path(self):
return self._recipes_path
@property
def deps(self):
return self._deps
def dump(self):
return package_pb2.Package(
api_version=self.API_VERSION,
project_id=self._project_id,
recipes_path=self._recipes_path,
deps=[ self._deps[dep].dump() for dep in sorted(self._deps.keys()) ])
def updates(self, context):
"""Returns a list of RepoUpdate<PackageSpec>s, corresponding to the updates
in self's dependencies.
See iterate_consistent_updates below."""
dep_updates = _merge([
self._deps[dep].updates(context) for dep in sorted(self._deps.keys()) ])
deps_so_far = self._deps
ret_updates = []
for update in dep_updates:
deps_so_far = _updated(deps_so_far, { update.id: update.spec })
ret_updates.append(RepoUpdate(PackageSpec(
self._project_id, self._recipes_path, deps_so_far),
commit_infos=update.commit_infos))
return ret_updates
def iterate_consistent_updates(self, proto_file, context):
"""Returns a list of RepoUpdate<PackageSpec>s, corresponding to the updates
in self's dependencies, with inconsistent dependency graphs filtered out.
This is the entry point of the rolling logic, which is called by recipes.py.
To roll, we look at all updates on the specified branches in each of our
direct dependencies. We don't look at transitive dependencies because
our direct dependencies are responsible for rolling those. If we have two
dependencies A and B, each with three updates, we can visualize this in
a two-dimensional space like so:
A1 A2 A3
+--------
B1 | . . .
B2 | . . .
B3 | . . .
Each of the 9 locations here corresponds to a possible PackageSpec. Some
of these will be inconsistent; e.g. A and B depend on the same package at
different versions. Let's mark a few with X's to indicate inconsistent
dependencies:
A1 A2 A3
+--------
B1 | . . X
B2 | . X .
B3 | X X .
We are trying to find which consistent versions to commit, and in which
order. We only want to commit in monotone order (left to right and top to
bottom); i.e. committing a spec depending on A3 then in the next commit
depending on A2 doesn't make sense. But as we can see here, there are
multiple monotone paths.
A1B1 A2B1 A3B2 A3B3
A1B1 A1B2 A3B2 A3B3
So we necessarily need to choose one over the other. We would like to go
for as fine a granularity as possible, so it would seem we need to choose
the longest one. But since the granularity of our updates depends on the
granularity of our dependencies' updates, what we would actually aim for is
"global coherence"; i.e. everybody chooses mutually consistent paths. So if
we update A2B1, somebody else who also depends on A and B will update to
A2B1, in order to be consistent for anybody downstream.
It also needs to be consistent with the future; e.g. we don't want to choose
A2B1 if there is an A2 and A1B2 otherwise, because in the future A2 might
become available, which would make the order of rolls depend on when you
did the roll. That leads to, as far as I can tell, the only global
coherence strategy, which is to roll along whichever axis has the smallest
time delta from the current configuration.
HOWEVER timestamps on git commits are not reliable, so we don't do any of
this logic. Instead, we rely on the fact that we expect the auto-roller bot
to roll frequently, which means that we will roll in minimum-time-delta
order anyway (at least up to an accuracy of the auto-roller bot's cycle
time). So in the rare that there are multiple commits to roll, we naively
choose to roll them in lexicographic order: roll all of A's commits, then
all of B's.
In the case that we need rolling to be more distributed, it will be
important to solve the timestamp issue so we ensure coherence.
"""
root_spec = RootRepoSpec(proto_file)
# We keep track of accumulated commit infos, so that we correctly attribute
# authors even when we skip a state due to inconsistent dependencies.
commit_infos_accum = []
for update in self.updates(context):
commit_infos_accum.extend(update.commit_infos)
try:
package_deps = PackageDeps(context)
# Inconsistent graphs will throw an exception here, thus skipping the
# yield.
package_deps._create_from_spec(root_spec, update.spec, allow_fetch=True)
new_update = RepoUpdate(update.spec, tuple(commit_infos_accum))
commit_infos_accum = []
yield new_update
except InconsistentDependencyGraphError:
pass
def __eq__(self, other):
return (
self._project_id == other._project_id and
self._recipes_path == other._recipes_path and
self._deps == other._deps)
def __ne__(self, other):
return not self.__eq__(other)
class PackageDeps(object):
"""An object containing all the transitive dependencies of the root package.
"""
def __init__(self, context):
self._context = context
self._repos = {}
@classmethod
def create(cls, repo_root, proto_file, allow_fetch=False):
"""Creates a PackageDeps object.
Arguments:
repo_root: the root of the repository containing this package.
proto_file: a ProtoFile object corresponding to the repos recipes.cfg
allow_fetch: whether to fetch dependencies rather than just checking for
them.
"""
context = PackageContext.from_proto_file(repo_root, proto_file)
package_deps = cls(context)
root_package = package_deps._create_package(
RootRepoSpec(proto_file), allow_fetch)
return package_deps
def _create_package(self, repo_spec, allow_fetch):
if allow_fetch:
repo_spec.checkout(self._context)
else:
try:
repo_spec.check_checkout(self._context)
except UncleanFilesystemError as e:
logging.warn(
'Unclean environment. You probably need to run "recipes.py fetch"\n'
'%s' % e.message)
package_spec = PackageSpec.load_proto(repo_spec.proto_file(self._context))
return self._create_from_spec(repo_spec, package_spec, allow_fetch)
def _create_from_spec(self, repo_spec, package_spec, allow_fetch):
project_id = package_spec.project_id
if project_id in self._repos:
if self._repos[project_id] is None:
raise CyclicDependencyError(
'Package %s depends on itself' % project_id)
if repo_spec != self._repos[project_id].repo_spec:
raise InconsistentDependencyGraphError(
'Package specs do not match: %s vs %s' %
(repo_spec, self._repos[project_id].repo_spec))
self._repos[project_id] = None
deps = {}
for dep, dep_repo in sorted(package_spec.deps.items()):
deps[dep] = self._create_package(dep_repo, allow_fetch)
package = Package(
repo_spec, deps,
os.path.join(repo_spec.repo_root(self._context),
package_spec.recipes_path))
self._repos[project_id] = package
return package
# TODO(luqui): Remove this, so all accesses to packages are done
# via other packages with properly scoped deps.
def get_package(self, package_id):
return self._repos[package_id]
@property
def all_recipe_dirs(self):
for repo in self._repos.values():
for subdir in repo.recipe_dirs:
yield str(subdir)
@property
def all_module_dirs(self):
for repo in self._repos.values():
for subdir in repo.module_dirs:
yield str(subdir)
def _run_cmd(cmd, cwd=None):
cwd_str = ' (in %s)' % cwd if cwd else ''
logging.info('%s%s', cmd, cwd_str)
subprocess.check_call(cmd, cwd=cwd)
def _merge2(xs, ys, compare=lambda x, y: x <= y):
"""Merges two sorted iterables, preserving sort order.
>>> list(_merge2([1, 3, 6], [2, 4, 5]))
[1, 2, 3, 4, 5, 6]
>>> list(_merge2([1, 2, 3], []))
[1, 2, 3]
>>> list(_merge2([], [4, 5, 6]))
[4, 5, 6]
>>> list(_merge2([], []))
[]
>>> list(_merge2([4, 2], [3, 1], compare=lambda x, y: x >= y))
[4, 3, 2, 1]
The merge is left-biased and preserves order within each argument.
>>> list(_merge2([1, 4], [3, 2], compare=lambda x, y: True))
[1, 4, 3, 2]
"""
nothing = object()
xs = iter(xs)
ys = iter(ys)
x = nothing
y = nothing
try:
x = xs.next()
y = ys.next()
while True:
if compare(x, y):
yield x
x = nothing
x = xs.next()
else:
yield y
y = nothing
y = ys.next()
except StopIteration:
if x is not nothing: yield x
for x in xs: yield x
if y is not nothing: yield y
for y in ys: yield y
def _merge(xss, compare=lambda x, y: x <= y):
"""Merges a sequence of sorted iterables in sorted order.
>>> list(_merge([ [1,5], [2,5,6], [], [0,7] ]))
[0, 1, 2, 5, 5, 6, 7]
>>> list(_merge([ [1,2,3] ]))
[1, 2, 3]
>>> list(_merge([]))
[]
"""
return reduce(lambda xs, ys: _merge2(xs, ys, compare=compare), xss, [])
def _updated(d, updates):
"""Updates a dictionary without mutation.
>>> d = { 'x': 1, 'y': 2 }
>>> sorted(_updated(d, { 'y': 3, 'z': 4 }).items())
[('x', 1), ('y', 3), ('z', 4)]
>>> sorted(d.items())
[('x', 1), ('y', 2)]
"""
d = copy.copy(d)
d.update(updates)
return d
| shishkander/recipes-py | recipe_engine/package.py | Python | bsd-3-clause | 19,649 |
# Problem 1581. Teamwork
# http://acm.timus.ru/problem.aspx?space=1&num=1581
total = int(raw_input())
numbers = map(int,raw_input().split())
i = 0
count = 1
while i < total:
if (i+1 < total) and (numbers[i+1] == numbers[i]):
count += 1
else:
print count, numbers[i],
count = 1
i += 1 | mil3na/code-challenges | timus/python/p1581.py | Python | gpl-2.0 | 296 |
from .core.State import State
import time
from .utils.stickers import is_sticker
from vkapp.bot.dao import usersDAO, newsDAO, moneyDAO
import json
PROPOSAL_AMOUNT=1
PUBLISH_AMOUNT=100
MAX_DAILY_NEWS=10
class BootStrapState(State):
def on_trigger(self, trigger):
usersDAO.new_blogger(trigger.current_uid)
trigger.send_message(trigger.current_uid,
message='Привет! Я умный бот для Лентача, разработанный компанией Cleverbots')
trigger.send_message(trigger.current_uid, message='https://cleverbots.ru')
time.sleep(3)
trigger.send_message(trigger.current_uid,
message='Тут ты сможешь предлагать новости и получать за них деньги!')
time.sleep(3)
trigger.send_message(trigger.current_uid, message='Держи набор стикеров!')
time.sleep(3)
trigger.send_message(trigger.current_uid, message='<набор стикеров тут>')
time.sleep(3)
trigger.send_message(trigger.current_uid, message='Это секретные стикеры, с их помощью ты сможешь управлять мной!')
time.sleep(3)
trigger.send_message(trigger.current_uid, message='Чтобы предложить новость, отправь мне стикер "Привет"')
time.sleep(1.5)
trigger.send_message(trigger.current_uid, sticker_id=4639)
time.sleep(3)
trigger.send_message(trigger.current_uid, message='Чтобы связаться с администратором Лентача, отправь мне стикер "Я Вам пишу"')
time.sleep(1.5)
trigger.send_message(trigger.current_uid, sticker_id=4650)
time.sleep(3)
trigger.send_message(trigger.current_uid, message='Чтобы просмотреть статистику по своим новостям и по балансу, '
'а также вывести деньги, отправь мне стикер "Чудеса"')
time.sleep(1.5)
trigger.send_message(trigger.current_uid, sticker_id=4659)
time.sleep(3)
trigger.send_message(trigger.current_uid,
message='Ну что, начнем? 😉')
return RootState()
class RootState(State):
def on_enter(self, trigger):
trigger.get_user_struct().erase_queue()
pass
def on_trigger(self, trigger):
update = trigger.get_update()
# trigger.send_message(trigger.current_uid,
# message=is_sticker(update))
sticker_result = is_sticker(update)
if sticker_result[0]:
if sticker_result[1]=='4639':
return ProposeNewsState()
elif sticker_result[1] == '4659':
return StatisticsState()
elif sticker_result[1] == '4650':
return AdminChatState()
else:
trigger.send_message(trigger.current_uid,
message='Неопознанный стикер!')
else:
trigger.send_message(trigger.current_uid, message='Неопознанное сообщение!')
class ProposeNewsState(State):
def on_enter(self, trigger):
trigger.get_user_struct().erase_queue()
news = newsDAO.get_news_proposed_today(trigger.current_uid)
trigger.send_message(trigger.current_uid, message='Сегодня ты отправил {} новостей'.format(len(news)))
if len(news) > MAX_DAILY_NEWS:
trigger.send_message(trigger.current_uid,
message='На сегодня превышен лимит отправки новостей')
return RootState()
# for news_i in news:
# trigger.send_message(trigger.current_uid, message=news_i.media)
trigger.send_message(trigger.current_uid,
message='Отлично! Пиши сюда все, что нужно. Как закончишь, отправь мне стикер "Все на бал"')
trigger.send_message(trigger.current_uid, sticker_id=4662)
def on_trigger(self, trigger):
update = trigger.get_update()
sticker_result = is_sticker(update)
if sticker_result[0]:
if sticker_result[1]=='4662':
media_news=''
pic=''
link=None
for entity in trigger.get_user_struct().message_queue[:-1]:
if len(entity)>6:
if 'attach1_photo' in entity[6]:
pic=entity[6]['attach1_photo']
print ('pic=', pic)
if 'attach1_url' in entity[6]:
link=entity[6]['attach1_url']
print ('link=', link)
media_news += entity[5]+' '
http_index = media_news.find('http')
print('http_index={}'.format(http_index))
if http_index != -1:
link = media_news[http_index:]
media_news = media_news[:http_index]
print (media_news)
news = newsDAO.new_news(link=link, media=media_news, uid=trigger.current_uid, pic=pic)
trigger.send_message(trigger.current_uid,
message='Ок, новость сохранена. Ее просмотрят администраторы, и ты получишь '
'уведомление о статусе ее рассмотрения')
moneyDAO.new_income_proposal(PROPOSAL_AMOUNT, news)
trigger.send_message(trigger.current_uid, message='Тебе начислено {} рублей за предложение новости. '
'Твой баланс составляет {} рублей. '
'Подробнее в режиме "Статистика"'
' (отправь стикер "Чудеса")'
.format(PROPOSAL_AMOUNT, moneyDAO.re_count_balance(trigger.current_uid)))
trigger.send_message(trigger.current_uid, sticker_id=4659)
return RootState()
class StatisticsState(State):
def on_enter(self, trigger):
trigger.get_user_struct().erase_queue()
trigger.send_message(trigger.current_uid, message='У тебя на счету {} рублей. Вот статус твоих постов:'
.format(moneyDAO.re_count_balance(trigger.current_uid)))
news = newsDAO.news_by_blogger(trigger.current_uid)
for i in range(len(news)):
trigger.send_message(trigger.current_uid, message='-------------------------')
trigger.send_message(trigger.current_uid, message='Пост №{}'.format(i+1))
#media_post = ' '.join(media_list)
# if len(media_list[i]) > 300:
# trigger.send_message(trigger.current_uid, message='{}...'.format(media_list[i][:30]))
# else:
if (news[i].pic is not None) and (news[i].pic != ''):
trigger.send_message(trigger.current_uid, attachment='photo'+news[i].pic)
if (news[i].media is not None) and (news[i].media != ''):
trigger.send_message(trigger.current_uid, message=news[i].media)
if (news[i].link is not None) and (news[i].link != ''):
trigger.send_message(trigger.current_uid, message='Ссылка: {}'.format(news[i].link))
review_rating =newsDAO.get_news_review_rating(news[i])
trigger.send_message(trigger.current_uid, message='Оценено администратором: '+('Да' if review_rating!=0 else 'Нет'))
if review_rating != 0:
trigger.send_message(trigger.current_uid,
message='Оценка: ' + 'Лайк' if review_rating == 1 else 'Дизлайк')
trigger.send_message(trigger.current_uid, message='Опубликавано: '+('Да' if newsDAO.is_news_published(news[i]) else 'Нет'))
return RootState()
class AdminChatState(State):
def on_enter(self, trigger):
trigger.get_user_struct().erase_queue()
trigger.send_message(trigger.current_uid,
message='Включен режим диалога с администратором. В этом режиме бот не будет реагировать на твои команды.'
' Отправь стикер "Все на бал", чтобы возобновить использование бота')
trigger.send_message(trigger.current_uid, sticker_id=4662)
def on_trigger(self, trigger):
update = trigger.get_update()
sticker_result = is_sticker(update)
if sticker_result[0]:
if sticker_result[1] == '4662':
trigger.send_message(trigger.current_uid,
message='Режим диалога с администратором выключен. Чтобы предложить новость, отправь стикер "Привет",'
' а чтобы посмотреть статистику, отправь стикер "Чудеса"')
return RootState()
| ParuninPavel/lenta4_hack | vkapp/bot/logic/user_states.py | Python | mit | 9,904 |
"""
Taiga integration for Zulip.
Tips for notification output:
*Emojis*: most of the events have specific emojis e.g.
- :notebook: - change of subject/name/description
- :chart_with_upwards_trend: - change of status
etc. If no there's no meaningful emoji for certain event, the defaults are used:
- :thought_balloon: - event connected to commenting
- :busts_in_silhouette: - event connected to a certain user
- :package: - all other events connected to user story
- :calendar: - all other events connected to milestones
- :clipboard: - all other events connected to tasks
- :bulb: - all other events connected to issues
*Text formatting*: if there has been a change of a property, the new value should always be in bold; otherwise the
subject of US/task should be in bold.
"""
from __future__ import absolute_import
from typing import Any, Dict, List, Mapping, Optional, Tuple, Text
from django.utils.translation import ugettext as _
from django.http import HttpRequest, HttpResponse
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
import ujson
from six.moves import range
@api_key_only_webhook_view('Taiga')
@has_request_variables
def api_taiga_webhook(request, user_profile, message=REQ(argument_type='body'),
stream=REQ(default='taiga'), topic=REQ(default='General')):
# type: (HttpRequest, UserProfile, Dict[str, Any], Text, Text) -> HttpResponse
parsed_events = parse_message(message)
content_lines = []
for event in parsed_events:
content_lines.append(generate_content(event) + '\n')
content = "".join(sorted(content_lines))
check_send_message(user_profile, request.client, 'stream', [stream], topic, content)
return json_success()
templates = {
'userstory': {
'create': u':package: %(user)s created user story **%(subject)s**.',
'set_assigned_to': u':busts_in_silhouette: %(user)s assigned user story **%(subject)s** to %(new)s.',
'unset_assigned_to': u':busts_in_silhouette: %(user)s unassigned user story **%(subject)s**.',
'changed_assigned_to': u':busts_in_silhouette: %(user)s reassigned user story **%(subject)s**'
' from %(old)s to %(new)s.',
'points': u':game_die: %(user)s changed estimation of user story **%(subject)s**.',
'blocked': u':lock: %(user)s blocked user story **%(subject)s**.',
'unblocked': u':unlock: %(user)s unblocked user story **%(subject)s**.',
'set_milestone': u':calendar: %(user)s added user story **%(subject)s** to sprint %(new)s.',
'unset_milestone': u':calendar: %(user)s removed user story **%(subject)s** from sprint %(old)s.',
'changed_milestone': u':calendar: %(user)s changed sprint of user story **%(subject)s** from %(old)s'
' to %(new)s.',
'changed_status': u':chart_with_upwards_trend: %(user)s changed status of user story **%(subject)s**'
' from %(old)s to %(new)s.',
'closed': u':checkered_flag: %(user)s closed user story **%(subject)s**.',
'reopened': u':package: %(user)s reopened user story **%(subject)s**.',
'renamed': u':notebook: %(user)s renamed user story from %(old)s to **%(new)s**.',
'description_diff': u':notebook: %(user)s updated description of user story **%(subject)s**.',
'commented': u':thought_balloon: %(user)s commented on user story **%(subject)s**.',
'delete': u':x: %(user)s deleted user story **%(subject)s**.'
},
'milestone': {
'create': u':calendar: %(user)s created sprint **%(subject)s**.',
'renamed': u':notebook: %(user)s renamed sprint from %(old)s to **%(new)s**.',
'estimated_start': u':calendar: %(user)s changed estimated start of sprint **%(subject)s**'
' from %(old)s to %(new)s.',
'estimated_finish': u':calendar: %(user)s changed estimated finish of sprint **%(subject)s**'
' from %(old)s to %(new)s.',
'delete': u':x: %(user)s deleted sprint **%(subject)s**.'
},
'task': {
'create': u':clipboard: %(user)s created task **%(subject)s**.',
'set_assigned_to': u':busts_in_silhouette: %(user)s assigned task **%(subject)s** to %(new)s.',
'unset_assigned_to': u':busts_in_silhouette: %(user)s unassigned task **%(subject)s**.',
'changed_assigned_to': u':busts_in_silhouette: %(user)s reassigned task **%(subject)s**'
' from %(old)s to %(new)s.',
'blocked': u':lock: %(user)s blocked task **%(subject)s**.',
'unblocked': u':unlock: %(user)s unblocked task **%(subject)s**.',
'set_milestone': u':calendar: %(user)s added task **%(subject)s** to sprint %(new)s.',
'changed_milestone': u':calendar: %(user)s changed sprint of task **%(subject)s** from %(old)s to %(new)s.',
'changed_status': u':chart_with_upwards_trend: %(user)s changed status of task **%(subject)s**'
' from %(old)s to %(new)s.',
'renamed': u':notebook: %(user)s renamed task %(old)s to **%(new)s**.',
'description_diff': u':notebook: %(user)s updated description of task **%(subject)s**.',
'commented': u':thought_balloon: %(user)s commented on task **%(subject)s**.',
'delete': u':x: %(user)s deleted task **%(subject)s**.',
'changed_us': u':clipboard: %(user)s moved task **%(subject)s** from user story %(old)s to %(new)s.'
},
'issue': {
'create': u':bulb: %(user)s created issue **%(subject)s**.',
'set_assigned_to': u':busts_in_silhouette: %(user)s assigned issue **%(subject)s** to %(new)s.',
'unset_assigned_to': u':busts_in_silhouette: %(user)s unassigned issue **%(subject)s**.',
'changed_assigned_to': u':busts_in_silhouette: %(user)s reassigned issue **%(subject)s**'
' from %(old)s to %(new)s.',
'changed_priority': u':rocket: %(user)s changed priority of issue **%(subject)s** from %(old)s to %(new)s.',
'changed_severity': u':warning: %(user)s changed severity of issue **%(subject)s** from %(old)s to %(new)s.',
'changed_status': u':chart_with_upwards_trend: %(user)s changed status of issue **%(subject)s**'
' from %(old)s to %(new)s.',
'changed_type': u':bulb: %(user)s changed type of issue **%(subject)s** from %(old)s to %(new)s.',
'renamed': u':notebook: %(user)s renamed issue %(old)s to **%(new)s**.',
'description_diff': u':notebook: %(user)s updated description of issue **%(subject)s**.',
'commented': u':thought_balloon: %(user)s commented on issue **%(subject)s**.',
'delete': u':x: %(user)s deleted issue **%(subject)s**.'
},
}
def get_old_and_new_values(change_type, message):
# type: (str, Mapping[str, Any]) -> Tuple[Optional[Dict[str, Any]], Optional[Dict[str, Any]]]
""" Parses the payload and finds previous and current value of change_type."""
if change_type in ['subject', 'name', 'estimated_finish', 'estimated_start']:
old = message["change"]["diff"][change_type]["from"]
new = message["change"]["diff"][change_type]["to"]
return old, new
try:
old = message["change"]["diff"][change_type]["from"]
except KeyError:
old = None
try:
new = message["change"]["diff"][change_type]["to"]
except KeyError:
new = None
return old, new
def parse_comment(message):
# type: (Mapping[str, Any]) -> Dict[str, Any]
""" Parses the comment to issue, task or US. """
return {
'event': 'commented',
'type': message["type"],
'values': {
'user': get_owner_name(message),
'subject': get_subject(message)
}
}
def parse_create_or_delete(message):
# type: (Mapping[str, Any]) -> Dict[str, Any]
""" Parses create or delete event. """
return {
'type': message["type"],
'event': message["action"],
'values': {
'user': get_owner_name(message),
'subject': get_subject(message)
}
}
def parse_change_event(change_type, message):
# type: (str, Mapping[str, Any]) -> Dict[str, Any]
""" Parses change event. """
evt = {} # type: Dict[str, Any]
values = {
'user': get_owner_name(message),
'subject': get_subject(message)
} # type: Dict[str, Any]
if change_type in ["description_diff", "points"]:
event_type = change_type
elif change_type in ["milestone", "assigned_to"]:
old, new = get_old_and_new_values(change_type, message)
if not old:
event_type = "set_" + change_type
values["new"] = new
elif not new:
event_type = "unset_" + change_type
values["old"] = old
else:
event_type = "changed_" + change_type
values.update({'old': old, 'new': new})
elif change_type == "is_blocked":
if message["change"]["diff"]["is_blocked"]["to"]:
event_type = "blocked"
else:
event_type = "unblocked"
elif change_type == "is_closed":
if message["change"]["diff"]["is_closed"]["to"]:
event_type = "closed"
else:
event_type = "reopened"
elif change_type == "user_story":
old, new = get_old_and_new_values(change_type, message)
event_type = "changed_us"
values.update({'old': old, 'new': new})
elif change_type in ["subject", 'name']:
event_type = 'renamed'
old, new = get_old_and_new_values(change_type, message)
values.update({'old': old, 'new': new})
elif change_type in ["estimated_finish", "estimated_start"]:
old, new = get_old_and_new_values(change_type, message)
if not old == new:
event_type = change_type
values.update({'old': old, 'new': new})
else:
# date hasn't changed
return None
elif change_type in ["priority", "severity", "type", "status"]:
event_type = 'changed_' + change_type
old, new = get_old_and_new_values(change_type, message)
values.update({'old': old, 'new': new})
else:
# we are not supporting this type of event
return None
evt.update({"type": message["type"], "event": event_type, "values": values})
return evt
def parse_message(message):
# type: (Mapping[str, Any]) -> List[Dict[str, Any]]
""" Parses the payload by delegating to specialized functions. """
events = []
if message["action"] in ['create', 'delete']:
events.append(parse_create_or_delete(message))
elif message["action"] == 'change':
if message["change"]["diff"]:
for value in message["change"]["diff"]:
parsed_event = parse_change_event(value, message)
if parsed_event:
events.append(parsed_event)
if message["change"]["comment"]:
events.append(parse_comment(message))
return events
def generate_content(data):
# type: (Mapping[str, Any]) -> str
""" Gets the template string and formats it with parsed data. """
try:
return templates[data['type']][data['event']] % data['values']
except KeyError:
return json_error(_("Unknown message"))
def get_owner_name(message):
# type: (Mapping[str, Any]) -> str
return message["by"]["full_name"]
def get_subject(message):
# type: (Mapping[str, Any]) -> str
data = message["data"]
return data.get("subject", data.get("name"))
| jphilipsen05/zulip | zerver/webhooks/taiga/view.py | Python | apache-2.0 | 11,630 |
# encoding: utf-8
# module gtk.gdk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/pynotify/_pynotify.so
# by generator 1.135
# no doc
# imports
from exceptions import Warning
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
import pango as __pango
import pangocairo as __pangocairo
from PixbufAnimation import PixbufAnimation
class PixbufSimpleAnim(PixbufAnimation):
"""
Object GdkPixbufSimpleAnim
Properties from GdkPixbufSimpleAnim:
loop -> gboolean: Loop
Whether the animation should loop when it reaches the end
Signals from GObject:
notify (GParam)
"""
def add_frame(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/gtk/gdk/__init__/PixbufSimpleAnim.py | Python | gpl-2.0 | 866 |
"""
TR-specific Form helpers
"""
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select, CharField
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
import re
phone_digits_re = re.compile(r'^(\+90|0)? ?(([1-9]\d{2})|\([1-9]\d{2}\)) ?([2-9]\d{2} ?\d{2} ?\d{2})$')
class TRPostalCodeField(RegexField):
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXXXX.'),
}
def __init__(self, *args, **kwargs):
super(TRPostalCodeField, self).__init__(r'^\d{5}$',
max_length=5, min_length=5, *args, **kwargs)
def clean(self, value):
value = super(TRPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if len(value) != 5:
raise ValidationError(self.error_messages['invalid'])
province_code = int(value[:2])
if province_code == 0 or province_code > 81:
raise ValidationError(self.error_messages['invalid'])
return value
class TRPhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Phone numbers must be in 0XXX XXX XXXX format.'),
}
def clean(self, value):
super(TRPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\(|\)|\s+)', '', smart_unicode(value))
m = phone_digits_re.search(value)
if m:
return u'%s%s' % (m.group(2), m.group(4))
raise ValidationError(self.error_messages['invalid'])
class TRIdentificationNumberField(Field):
"""
A Turkey Identification Number number.
See: http://tr.wikipedia.org/wiki/T%C3%BCrkiye_Cumhuriyeti_Kimlik_Numaras%C4%B1
Checks the following rules to determine whether the number is valid:
* The number is 11-digits.
* First digit is not 0.
* Conforms to the following two formula:
(sum(1st, 3rd, 5th, 7th, 9th)*7 - sum(2nd,4th,6th,8th)) % 10 = 10th digit
sum(1st to 10th) % 10 = 11th digit
"""
default_error_messages = {
'invalid': _(u'Enter a valid Turkish Identification number.'),
'not_11': _(u'Turkish Identification number must be 11 digits.'),
}
def clean(self, value):
super(TRIdentificationNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if len(value) != 11:
raise ValidationError(self.error_messages['not_11'])
if not re.match(r'^\d{11}$', value):
raise ValidationError(self.error_messages['invalid'])
if int(value[0]) == 0:
raise ValidationError(self.error_messages['invalid'])
chksum = (sum([int(value[i]) for i in xrange(0,9,2)])*7-
sum([int(value[i]) for i in xrange(1,9,2)])) % 10
if chksum != int(value[9]) or \
(sum([int(value[i]) for i in xrange(10)]) % 10) != int(value[10]):
raise ValidationError(self.error_messages['invalid'])
return value
class TRProvinceSelect(Select):
"""
A Select widget that uses a list of provinces in Turkey as its choices.
"""
def __init__(self, attrs=None):
from tr_provinces import PROVINCE_CHOICES
super(TRProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
| ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-1.3/django/contrib/localflavor/tr/forms.py | Python | bsd-3-clause | 3,430 |
# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy)
# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This file is part of the LOFAR software suite.
# The LOFAR software suite is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The LOFAR software suite is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
# $Id$
from lofar.lta.ltastorageoverview import store
from lofar.lta.ingest.common.srm import *
from lofar.lta.ingest.client.ingestbuslistener import IngestEventMesssageBusListener, IngestEventMessageHandler
from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME
import logging
logger = logging.getLogger(__name__)
class LTASOIngestEventHandler(IngestEventMessageHandler):
def __init__(self, dbcreds):
self._dbcreds = dbcreds
super().__init__(log_subject_filters=("JobFinished", "TaskFinished"))
def onJobFinished(self, job_dict):
"""onJobFinished is called upon receiving a JobFinished message.
In this LTASOIngestEventHandler, it calls _schedule_srmurl_for_visit to schedule the finished surl for a scraper visit.
:param job_dict: dictionary with the finised job"""
self._schedule_srmurl_for_visit(job_dict.get('srm_url'))
def onTaskFinished(self, task_dict):
"""onTaskFinished is called upon receiving a TaskFinished message. (when all dataproducts of a observation/pipeline were ingested)
In this LTASOIngestEventHandler, it calls _schedule_srmurl_for_visit to schedule the finished surl for a scraper visit.
:param task_dict: dictionary with the finished task"""
self._schedule_srmurl_for_visit(task_dict.get('srm_url'))
def _schedule_srmurl_for_visit(self, srm_url):
"""process the given srm_url, insert it in the db if needed, and mark it as not visited,
so that the scraper will visit it soon.
:param srm_url: a valid srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar
:return: None
"""
if srm_url:
with store.LTAStorageDb(self._dbcreds) as db:
site = self._get_site_from_db(srm_url)
dir_path = get_dir_path_in_site(srm_url)
directory = db.directoryByName(dir_path, site['id'])
if directory is None:
dir_id = self._insert_missing_directory_tree_if_needed(srm_url).get(dir_path)
else:
dir_id = directory.get('dir_id')
if dir_id is not None:
self._mark_directory_for_a_visit(dir_id)
def _mark_directory_for_a_visit(self, dir_id):
"""
update the directory's last visit time to unix-epoch (which is the lowest possible visit timestamp), so that it
appears in the visitStats which are used by the scraper to determine the next directory to be visited.
:param int dir_id: the id of the directory
:return: None
"""
with store.LTAStorageDb(self._dbcreds) as db:
return db.updateDirectoryLastVisitTime(dir_id, datetime.fromtimestamp(0))
def _get_site_from_db(self, srm_url):
"""
find the site entry in the database for the given srm_url.
raises a lookup error if not found.
:param string srm_url: a valid srm url
:return: a site entry dict from the database
"""
site_url = get_site_surl(srm_url)
# find site in db
with store.LTAStorageDb(self._dbcreds) as db:
site = next((s for s in db.sites() if s['url'] == site_url), None)
if site is None:
raise LookupError('Could not find site %s in database %s' % (site_url, self._dbcreds.database))
return site
def _insert_missing_directory_tree_if_needed(self, srm_url):
# example url: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar
# or for a dir: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884
# site_url then becomes: srm://lofar-srm.fz-juelich.de:8443
# dir_path then becomes: /pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884
site = self._get_site_from_db(srm_url)
dir_path = get_dir_path_in_site(srm_url)
with store.LTAStorageDb(self._dbcreds) as db:
return db.insert_missing_directory_tree_if_needed(dir_path, site['id'])
| kernsuite-debian/lofar | LTA/ltastorageoverview/lib/ingesteventhandler.py | Python | gpl-3.0 | 5,055 |
from __future__ import print_function
import torchfile
import numpy as np
EMBEDDING = torchfile.load('../lstm-char-cnn/param_init_1.t7')
KERNEL_1_W = torchfile.load('../lstm-char-cnn/param_init_2.t7')
KERNEL_1_B = torchfile.load('../lstm-char-cnn/param_init_3.t7')
LSTM_1_W = torchfile.load('../lstm-char-cnn/param_init_4.t7')
LSTM_B = torchfile.load('../lstm-char-cnn/param_init_5.t7')
LSTM_2_W = torchfile.load('../lstm-char-cnn/param_init_6.t7')
# following manipulations make LSTM_W usable with BasicLSTMCell - need to flip some blocks to convert from Karpathy's LSTM implementation
LSTM_W = np.concatenate([LSTM_1_W, LSTM_2_W], axis=1)
a, b, c, d = np.split(LSTM_W, 4, axis=0)
LSTM_W = np.concatenate([a, d, c, b], axis=0)
LSTM_W = LSTM_W.transpose()
a, b, c, d = np.split(LSTM_B, 4)
LSTM_B = np.concatenate([a, d, c, b], axis=0)
SOFTMAX_W = torchfile.load('../lstm-char-cnn/param_init_7.t7')
SOFTMAX_B = torchfile.load('../lstm-char-cnn/param_init_8.t7')
if __name__ == '__main__':
print(EMBEDDING)
print(KERNEL_1_W)
print(KERNEL_1_B)
print(LSTM_1_W.shape)
print(LSTM_2_W.shape)
print(np.vstack([np.transpose(LSTM_1_W), np.transpose(LSTM_2_W)]))
print(LSTM_B)
'''
-- evaluate the input sums at once for efficiency
local i2h = nn.Linear(input_size_L, 4 * rnn_size)(x)
local h2h = nn.Linear(rnn_size, 4 * rnn_size, false)(prev_h)
local all_input_sums = nn.CAddTable()({i2h, h2h})
local sigmoid_chunk = nn.Narrow(2, 1, 3*rnn_size)(all_input_sums)
sigmoid_chunk = nn.Sigmoid()(sigmoid_chunk)
local in_gate = nn.Narrow(2,1,rnn_size)(sigmoid_chunk)
local out_gate = nn.Narrow(2, rnn_size+1, rnn_size)(sigmoid_chunk)
local forget_gate = nn.Narrow(2, 2*rnn_size + 1, rnn_size)(sigmoid_chunk)
local in_transform = nn.Tanh()(nn.Narrow(2,3*rnn_size + 1, rnn_size)(all_input_sums))
-- perform the LSTM update
local next_c = nn.CAddTable()({
nn.CMulTable()({forget_gate, prev_c}),
nn.CMulTable()({in_gate, in_transform})
})
-- gated cells form the output
local next_h = nn.CMulTable()({out_gate, nn.Tanh()(next_c)})
'''
x = np.array([-0.04201929, 0.02275813])
prev_h = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
prev_c = np.array([0.0, 0.0, 0.0, 0.0, 0.0])
i2h = np.dot(LSTM_1_W, x) + LSTM_B
h2h = np.dot(LSTM_2_W, prev_h)
all_input_sums = i2h + h2h
print('ALL_INPUT_SUMS', all_input_sums)
'''
ALL_INPUT_SUMS [ 0.02735383 0.03522781 -0.03592717 -0.02283547 0.04040729
0.01193809 0.00140385 -0.01781952 -0.0431703 0.01421306
-0.02227222 -0.02860017 -0.0485126 0.02249379 -0.02521783
-0.03297023 0.00699924 0.02405969 0.03880194 0.01295331]
'''
sigmoid_chunk = all_input_sums[0:15]
def sigmoid(x):
return 1. / (1. + np.exp(-x))
sigmoid_chunk = sigmoid(sigmoid_chunk)
print(sigmoid_chunk)
in_gate = sigmoid_chunk[0:5]
out_gate = sigmoid_chunk[5:10]
forget_gate = sigmoid_chunk[10:15]
in_transform = all_input_sums[15:20]
print(forget_gate, prev_c)
print(in_gate, in_transform)
next_c = forget_gate * prev_c + in_gate * in_transform
print('next_c:', next_c)
next_h = out_gate * np.tanh(next_c)
print('next_h:', next_h)
'''
next_c: [-0.01671056 0.00356125 0.01181377 0.01917946 0.00660749]
next_h: [-0.00840437 0.00178187 0.00585398 0.00938162 0.00332717]
'''
| mkroutikov/tf-lstm-char-cnn | read_param_init.py | Python | mit | 3,410 |
import fs
import json
print('--- start ---')
t = fs.open_fs('osfs://.')
for f in t.scandir('/', namespaces=['details']):
print( f.raw )
t.close()
print('--- end ---')
| dagnelies/restfs | fstest.py | Python | mit | 176 |
"""
Dingemans Wave Shoaling
"""
from proteus import Domain, Context
from proteus.mprans import SpatialTools as st
from proteus import WaveTools as wt
from math import *
import numpy as np
opts=Context.Options([
# predefined test cases
("water_level", 0.86, "Height of free surface above seabed"),
# tank
("tank_dim", (58., 3.), "Dimensions of the tank"),
("generation", True, "Generate waves at the left boundary (True/False)"),
("absorption", True, "Absorb waves at the right boundary (True/False)"),
("tank_sponge", (5., 5.), "Length of relaxation zones zones (left, right)"),
("free_slip", True, "Should tank walls have free slip conditions "
"(otherwise, no slip conditions will be applied)."),
# waves
("waves", True, "Generate waves (True/False)"),
("wave_period", 2.02, "Period of the waves"),
("wave_height", 0.02, "Height of the waves"),
("wave_depth", 0.86, "Wave depth"),
("wave_dir", (1.,0.,0.), "Direction of the waves (from left boundary)"),
("wave_wavelength", 5.037, "Direction of the waves (from left boundary)"), #calculated by FFT
("wave_type", 'Fenton', "type of wave"),
("Bcoeff", np.array([0.01402408, 0.00008097, 0.00000013, 0.00000000, 0.00000000,
0.00000000, 0.00000000, 0.00000000]), "Bcoeffs"),
("Ycoeff", np.array([0.01246994, 0.00018698, 0.00000300, 0.00000006, 0.00000000,
0.00000000, 0.00000000, 0.00000000]), "Ycoeffs"),
("fast", True, "switch for fast cosh calculations in WaveTools"),
# mesh refinement
("refinement", False, "Gradual refinement"),
("he", 0.04, "Set characteristic element size"),
("he_max", 10, "Set maximum characteristic element size"),
("he_max_water", 10, "Set maximum characteristic in water phase"),
("refinement_freesurface", 0.1,"Set area of constant refinement around free surface (+/- value)"),
("refinement_caisson", 0.,"Set area of constant refinement (Box) around caisson (+/- value)"),
("refinement_grading", np.sqrt(1.1*4./np.sqrt(3.))/np.sqrt(1.*4./np.sqrt(3)), "Grading of refinement/coarsening (default: 10% volume)"),
# numerical options
("gen_mesh", True, "True: generate new mesh every time. False: do not generate mesh if file exists"),
("use_gmsh", True, "True: use Gmsh. False: use Triangle/Tetgen"),
("movingDomain", False, "True/False"),
("T", 30.0, "Simulation time"),
("dt_init", 0.001, "Initial time step"),
("dt_fixed", None, "Fixed (maximum) time step"),
("timeIntegration", "backwardEuler", "Time integration scheme (backwardEuler/VBDF)"),
("cfl", 0.5 , "Target cfl"),
("nsave", 5, "Number of time steps to save per second"),
("useRANS", 0, "RANS model"),
])
# ----- CONTEXT ------ #
# waves
omega = 1.
if opts.waves is True:
period = opts.wave_period
omega = 2*np.pi/opts.wave_period
height = opts.wave_height
mwl = opts.water_level
depth = opts.wave_depth
direction = opts.wave_dir
waves = wt.MonochromaticWaves(period=period, waveHeight=height, mwl=mwl, depth=depth,
g=np.array([0., -9.81, 0.]), waveDir=direction,
wavelength=opts.wave_wavelength,
waveType=opts.wave_type,
Ycoeff=np.array(opts.Ycoeff),
Bcoeff=np.array(opts.Bcoeff),
Nf=len(opts.Bcoeff),
fast=opts.fast)
wavelength = waves.wavelength
# tank options
waterLevel = opts.water_level
tank_dim = opts.tank_dim
tank_sponge = opts.tank_sponge
# ----- DOMAIN ----- #
domain = Domain.PlanarStraightLineGraphDomain()
# refinement
he = opts.he
smoothing = he*3.
# ----- TANK ------ #
sloped_shore = [[[9.22, 0.],
[9.64, 0.06],
[15.01, 0.06],
[27.04, 0.66],
[31.04, 0.66],
[37.07, 0.06],
[45.39, 0.06],
[45.81, 0.]],]
tank = st.TankWithObstacles2D(domain=domain,
dim=tank_dim,
obstacles=sloped_shore)
# ----- GENERATION / ABSORPTION LAYERS ----- #
tank.setSponge(x_n=tank_sponge[0], x_p=tank_sponge[1])
dragAlpha = 10.*omega/1e-6
if opts.generation:
tank.setGenerationZones(x_n=True, waves=waves, dragAlpha=dragAlpha, smoothing = smoothing)
if opts.absorption:
tank.setAbsorptionZones(x_p=True, dragAlpha = dragAlpha)
# ----- BOUNDARY CONDITIONS ----- #
# Waves
tank.BC['x-'].setUnsteadyTwoPhaseVelocityInlet(waves, smoothing=smoothing, vert_axis=1)
# open top
tank.BC['y+'].setAtmosphere()
if opts.free_slip:
tank.BC['y-'].setFreeSlip()
tank.BC['x+'].setFreeSlip()
if not opts.generation:
tank.BC['x-'].setFreeSlip()
else: # no slip
tank.BC['y-'].setNoSlip()
tank.BC['x+'].setNoSlip()
# sponge
tank.BC['sponge'].setNonMaterial()
for bc in tank.BC_list:
bc.setFixedNodes()
# ----- GAUGES ----- #
gauge_x = [6.26, 10.26, 12.66, 23.26, 27.26, 29.26, 31.26, 33.66, 36.86, 40.26, 44.26]
gauge_y = []
column_gauge_locations = []
for i in range(len(gauge_x)):
if 9.22 < gauge_x[i] < 9.64:
gauge_y.append( (gauge_x[i]-9.22)*0.06/(9.64-9.22) )
elif 9.64 <= gauge_x[i] <= 15.01:
gauge_y.append(0.06)
elif 15.01 < gauge_x[i] < 27.04:
gauge_y.append( 0.06+(gauge_x[i]-15.01)*(0.66-0.06)/(27.04-15.01) )
elif 27.04 <= gauge_x[i] <= 31.04:
gauge_y.append(0.66)
elif 31.04 < gauge_x[i] < 37.07:
gauge_y.append( 0.66+(gauge_x[i]-31.04)*(0.06-0.66)/(37.07-31.04) )
elif 37.07 <= gauge_x[i] <= 45.39:
gauge_y.append(0.06)
elif 45.39 < gauge_x[i] < 45.81:
gauge_y.append( 0.06+(gauge_x[i]-45.39)*(0.-0.06)/(45.81-45.39) )
else:
gauge_y.append(0.)
column_gauge_locations.append(((gauge_x[i], gauge_y[i], 0.), (gauge_x[i], tank_dim[1], 0.)))
tank.attachLineIntegralGauges('vof', gauges=((('vof',),column_gauge_locations),), fileName='column_gauges.csv')
tank.facets = np.array([[[i for i in range(12)]]]+[[[11, 12, 13, 10]]]+[[[8, 14, 15, 9]]])
# ----- ASSEMBLE DOMAIN ----- #
domain.MeshOptions.use_gmsh = opts.use_gmsh
domain.MeshOptions.genMesh = opts.gen_mesh
domain.MeshOptions.he = he
domain.use_gmsh = opts.use_gmsh
st.assembleDomain(domain)
# ----- REFINEMENT OPTIONS ----- #
import py2gmsh
from MeshRefinement import geometry_to_gmsh
mesh = geometry_to_gmsh(domain)
field_list = []
box = 0.1001
box1 = py2gmsh.Fields.Box(mesh=mesh)
box1.VIn = 0.03
box1.VOut = he
box1.XMin = -tank_sponge[0]
box1.XMax = tank_dim[0]+tank_sponge[1]
box1.YMin = waterLevel-box
box1.YMax = waterLevel+box
field_list += [box1]
p0 = py2gmsh.Entity.Point([-tank_sponge[0], waterLevel+box, 0.], mesh=mesh)
p1 = py2gmsh.Entity.Point([tank_dim[0]+tank_sponge[1], waterLevel+box, 0.], mesh=mesh)
p2 = py2gmsh.Entity.Point([-tank_sponge[0], waterLevel-box, 0.], mesh=mesh)
p3 = py2gmsh.Entity.Point([tank_dim[0]+tank_sponge[1], waterLevel-box, 0.], mesh=mesh)
l1 = py2gmsh.Entity.Line([p0, p1], mesh=mesh)
l2 = py2gmsh.Entity.Line([p2, p3], mesh=mesh)
grading = 1.05
bl2 = py2gmsh.Fields.BoundaryLayer(mesh=mesh)
bl2.hwall_n = 0.03
bl2.ratio = grading
bl2.EdgesList = [l1, l2]
field_list += [bl2]
fmin = py2gmsh.Fields.Min(mesh=mesh)
fmin.FieldsList = field_list
mesh.setBackgroundField(fmin)
mesh.Options.Mesh.CharacteristicLengthMax = he
domain.MeshOptions.genMesh = opts.gen_mesh
domain.MeshOptions.use_gmsh = opts.use_gmsh
domain.use_gmsh = opts.use_gmsh
geofile = 'mesh'
mesh.writeGeo(geofile+'.geo')
domain.geofile = geofile
##########################################
# Numerical Options and other parameters #
##########################################
rho_0=998.2
nu_0 =1.004e-6
rho_1=1.205
nu_1 =1.500e-5
sigma_01=0.0
g = [0., -9.81]
from math import *
from proteus import MeshTools, AuxiliaryVariables
import numpy
import proteus.MeshTools
from proteus import Domain
from proteus.Profiling import logEvent
from proteus.default_n import *
from proteus.ctransportCoefficients import smoothedHeaviside
from proteus.ctransportCoefficients import smoothedHeaviside_integral
#----------------------------------------------------
# Boundary conditions and other flags
#----------------------------------------------------
movingDomain=opts.movingDomain
checkMass=False
applyCorrection=True
applyRedistancing=True
freezeLevelSet=True
#----------------------------------------------------
# Time stepping and velocity
#----------------------------------------------------
weak_bc_penalty_constant = 10.0/nu_0#Re
dt_init = opts.dt_init
T = opts.T
nDTout = int(opts.T*opts.nsave)
timeIntegration = opts.timeIntegration
if nDTout > 0:
dt_out= (T-dt_init)/nDTout
else:
dt_out = 0
runCFL = opts.cfl
dt_fixed = opts.dt_fixed
#----------------------------------------------------
# Discretization -- input options
useOldPETSc=False
useSuperlu = not True
spaceOrder = 1
useHex = False
useRBLES = 0.0
useMetrics = 1.0
useVF = 1.0
useOnlyVF = False
useRANS = opts.useRANS # 0 -- None
# 1 -- K-Epsilon
# 2 -- K-Omega, 1998
# 3 -- K-Omega, 1988
# Input checks
if spaceOrder not in [1,2]:
print "INVALID: spaceOrder" + spaceOrder
sys.exit()
if useRBLES not in [0.0, 1.0]:
print "INVALID: useRBLES" + useRBLES
sys.exit()
if useMetrics not in [0.0, 1.0]:
print "INVALID: useMetrics"
sys.exit()
# Discretization
nd = 2
if spaceOrder == 1:
hFactor=1.0
if useHex:
basis=C0_AffineLinearOnCubeWithNodalBasis
elementQuadrature = CubeGaussQuadrature(nd,3)
elementBoundaryQuadrature = CubeGaussQuadrature(nd-1,3)
else:
basis=C0_AffineLinearOnSimplexWithNodalBasis
elementQuadrature = SimplexGaussQuadrature(nd,3)
elementBoundaryQuadrature = SimplexGaussQuadrature(nd-1,3)
#elementBoundaryQuadrature = SimplexLobattoQuadrature(nd-1,1)
elif spaceOrder == 2:
hFactor=0.5
if useHex:
basis=C0_AffineLagrangeOnCubeWithNodalBasis
elementQuadrature = CubeGaussQuadrature(nd,4)
elementBoundaryQuadrature = CubeGaussQuadrature(nd-1,4)
else:
basis=C0_AffineQuadraticOnSimplexWithNodalBasis
elementQuadrature = SimplexGaussQuadrature(nd,4)
elementBoundaryQuadrature = SimplexGaussQuadrature(nd-1,4)
# Numerical parameters
sc = 0.5 # default: 0.5. Test: 0.25
sc_beta = 1.5 # default: 1.5. Test: 1.
epsFact_consrv_diffusion = 1. # default: 1.0. Test: 0.1
ns_forceStrongDirichlet = False
backgroundDiffusionFactor=0.01
if useMetrics:
ns_shockCapturingFactor = sc
ns_lag_shockCapturing = True
ns_lag_subgridError = True
ls_shockCapturingFactor = sc
ls_lag_shockCapturing = True
ls_sc_uref = 1.0
ls_sc_beta = sc_beta
vof_shockCapturingFactor = sc
vof_lag_shockCapturing = True
vof_sc_uref = 1.0
vof_sc_beta = sc_beta
rd_shockCapturingFactor =sc
rd_lag_shockCapturing = False
epsFact_density = 3.
epsFact_viscosity = epsFact_curvature = epsFact_vof = epsFact_consrv_heaviside = epsFact_consrv_dirac = epsFact_density
epsFact_redistance = 0.33
epsFact_consrv_diffusion = epsFact_consrv_diffusion
redist_Newton = True#False
kappa_shockCapturingFactor = sc
kappa_lag_shockCapturing = True
kappa_sc_uref = 1.0
kappa_sc_beta = sc_beta
dissipation_shockCapturingFactor = sc
dissipation_lag_shockCapturing = True
dissipation_sc_uref = 1.0
dissipation_sc_beta = sc_beta
else:
ns_shockCapturingFactor = 0.9
ns_lag_shockCapturing = True
ns_lag_subgridError = True
ls_shockCapturingFactor = 0.9
ls_lag_shockCapturing = True
ls_sc_uref = 1.0
ls_sc_beta = 1.0
vof_shockCapturingFactor = 0.9
vof_lag_shockCapturing = True
vof_sc_uref = 1.0
vof_sc_beta = 1.0
rd_shockCapturingFactor = 0.9
rd_lag_shockCapturing = False
epsFact_density = 1.5
epsFact_viscosity = epsFact_curvature = epsFact_vof = epsFact_consrv_heaviside = epsFact_consrv_dirac = epsFact_density
epsFact_redistance = 0.33
epsFact_consrv_diffusion = 10.0
redist_Newton = False#True
kappa_shockCapturingFactor = 0.9
kappa_lag_shockCapturing = True#False
kappa_sc_uref = 1.0
kappa_sc_beta = 1.0
dissipation_shockCapturingFactor = 0.9
dissipation_lag_shockCapturing = True#False
dissipation_sc_uref = 1.0
dissipation_sc_beta = 1.0
ns_nl_atol_res = 1e-6#max(1.0e-6,0.001*domain.MeshOptions.he**2)
vof_nl_atol_res = 1e-6#max(1.0e-6,0.001*domain.MeshOptions.he**2)
ls_nl_atol_res = 1e-6#max(1.0e-6,0.001*domain.MeshOptions.he**2)
mcorr_nl_atol_res = 1e-6#max(1.0e-6,0.0001*domain.MeshOptions.he**2)
rd_nl_atol_res = 1e-6#max(1.0e-6,0.01*domain.MeshOptions.he)
kappa_nl_atol_res = 1e-6#max(1.0e-6,0.001*domain.MeshOptions.he**2)
dissipation_nl_atol_res = 1e-6#max(1.0e-6,0.001*domain.MeshOptions.he**2)
mesh_nl_atol_res = 1e-6#max(1.0e-6,0.001*domain.MeshOptions.he**2)
mesh.writeGeo(geofile+'.geo')
#turbulence
ns_closure=0 #1-classic smagorinsky, 2-dynamic smagorinsky, 3 -- k-epsilon, 4 -- k-omega
if useRANS == 1:
ns_closure = 3
elif useRANS >= 2:
ns_closure == 4
def twpflowPressure_init(x, t):
p_L = 0.0
phi_L = tank_dim[nd-1] - waterLevel
phi = x[nd-1] - waterLevel
return p_L -g[nd-1]*(rho_0*(phi_L - phi)+(rho_1 -rho_0)*(smoothedHeaviside_integral(epsFact_consrv_heaviside*opts.he,phi_L)
-smoothedHeaviside_integral(epsFact_consrv_heaviside*opts.he,phi)))
| erdc-cm/air-water-vv | 2d/waveTransformation/Dingemans_wave_shoaling/DingemansWaveShoaling.py | Python | mit | 13,687 |
import pkg_resources
pkg_resources.require( "Cheetah" )
from Cheetah.Template import Template
def fill_template( template_text, context=None, **kwargs ):
if not context:
context = kwargs
return str( Template( source=template_text, searchList=[context] ) )
| mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/lib/galaxy/util/template.py | Python | gpl-3.0 | 274 |
# !/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for Test event formatter."""
import unittest
from plaso.formatters import test
from tests.formatters import test_lib
class TestTheuserFormatterTest(test_lib.EventFormatterTestCase):
"""Tests the Test theuser event formatter."""
def testInitialization(self):
"""Tests the initialization."""
event_formatter = test.TestTheuserFormatter()
self.assertIsNotNone(event_formatter)
def testGetFormatStringAttributeNames(self):
"""Tests the GetFormatStringAttributeNames function."""
event_formatter = test.TestTheuserFormatter()
expected_attribute_names = [
u'advertiser_account_type', u'analytics_type', u'bio_entities',
u'business_profile_state', u'could_be_stale', u'description',
u'device_following', u'extended_profile_fields', u'favorites_count',
u'followers_count', u'followers_count_fast', u'followers_count_normal',
u'following', u'following_count', u'has_collections',
u'has_extended_profile_fields', u'id', u'is_lifeline_institution',
u'is_translator', u'location', u'media_count', u'name',
u'pinned_tweet_id', u'profile_banner_url', u'profile_image_url',
u'profile_link_color_hex_triplet', u'protected', u'screen_name',
u'statuses_count', u'structured_location', u'url', u'url_entities',
u'verified'
]
self._TestGetFormatStringAttributeNames(
event_formatter, expected_attribute_names)
if __name__ == '__main__':
unittest.main()
| ClaudiaSaxer/PlasoScaffolder | src/tests/end_to_end_test/ExpectedEasyGenerationRowNameFiles/formatters_test.py | Python | apache-2.0 | 1,531 |
# -*- coding: utf-8 -*-
#
# OpenStack Command Line Client documentation build configuration file, created
# by sphinx-quickstart on Wed May 16 12:05:58 2012.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import pbr.version
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
# NOTE(blk-u): Path for our Sphinx extension, remove when
# https://launchpad.net/bugs/1260495 is fixed.
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'oslosphinx',
'ext.apidoc',
]
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OpenStack Command Line Client'
copyright = u'2012-2013 OpenStack Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
version_info = pbr.version.VersionInfo('python-openstackclient')
#
# The short X.Y version.
version = version_info.version_string()
# The full version, including alpha/beta/rc tags.
release = version_info.release_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['openstackclient.']
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme_path = ["."]
#html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'OpenStackCommandLineClientdoc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual])
# .
latex_documents = [
('index', 'OpenStackCommandLineClient.tex',
u'OpenStack Command Line Client Documentation',
u'OpenStack', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
'man/openstack',
'openstack',
u'OpenStack Command Line Client',
[u'OpenStack contributors'],
1,
),
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OpenStackCommandLineClient',
u'OpenStack Command Line Client Documentation',
u'OpenStack', 'OpenStackCommandLineClient',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| BjoernT/python-openstackclient | doc/source/conf.py | Python | apache-2.0 | 8,560 |
from ctypes import *
class TfsTask(Structure):
_fields_ = [
("title", c_char_p),
("description", c_char_p),
("id", c_long)
]
class TfsTaskList(Structure):
pass
TfsTaskList._fields_ = [
("task", TfsTask),
("next", POINTER(TfsTaskList))
]
class TfsWrapper(object):
def __init__(self):
self.lib = cdll.LoadLibrary("PyUtil.dll")
def connect(self, url, collection):
self.lib.ConnectToTfs.restype = c_bool
self.lib.ConnectToTfs.argtype = [c_char_p, c_char_p]
return self.lib.ConnectToTfs(url, collection)
def query(self, query):
self.lib.QueryTfs.restype = POINTER(TfsTaskList)
self.lib.QueryTfs.argtype = [c_char_p]
result = self.lib.QueryTfs(query)
task_list = result;
tasks = []
while bool(task_list) != False:
data = task_list.contents
tasks.append(data.task)
task_list = data.next
tasks.pop() #Remove last element, since it is a nullptr
return tasks | zaibacu/PyTfs | libs/TfsWrapper.py | Python | mit | 907 |
"""
QuerySets for the FOIA application
"""
# Django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.db import models
from django.db.models import Count, F, Max, OuterRef, Q, Subquery, Sum
from django.utils import timezone
from django.utils.text import slugify
# Standard Library
import logging
from datetime import date, datetime, time
from itertools import groupby
# Third Party
import requests
# MuckRock
from muckrock.agency.constants import STALE_REPLIES
from muckrock.core.models import ExtractDay
logger = logging.getLogger(__name__)
class PreloadFileQuerysetMixin:
"""Mixin for preloading related files"""
files_path = "files"
comm_id = "id"
def __init__(self, *args, **kwargs):
super(PreloadFileQuerysetMixin, self).__init__(*args, **kwargs)
self._preload_files_amt = 0
self._preload_files_done = False
def _clone(self):
"""Add _preload_files_amt to vlaues to copy over in a clone"""
# pylint: disable=protected-access
clone = super(PreloadFileQuerysetMixin, self)._clone()
clone._preload_files_amt = self._preload_files_amt
return clone
def preload_files(self, limit=11):
"""Preload up to limit files for the communications
Mark as needing to be preloaded - actually preloading will be done lazily
"""
# pylint: disable=protected-access
self._preload_files_amt = limit
return self
def _do_preload_files(self):
"""Do the preloading of the files lazily"""
# pylint: disable=import-outside-toplevel
from muckrock.foia.models.file import FOIAFile
comm_ids = [getattr(i, self.comm_id) for i in self._result_cache]
if comm_ids:
files = FOIAFile.objects.preload(comm_ids, self._preload_files_amt)
for obj in self._result_cache:
self._process_preloaded_files(obj, files)
self._preload_files_done = True
def _process_preloaded_files(self, obj, files):
"""What to do with the preloaded files for each record"""
obj.display_files = files.get(obj.pk, [])
def _fetch_all(self):
"""Override fetch all to lazily preload files if needed"""
super(PreloadFileQuerysetMixin, self)._fetch_all()
if self._preload_files_amt > 0 and not self._preload_files_done:
self._do_preload_files()
class FOIARequestQuerySet(models.QuerySet):
"""Object manager for FOIA requests"""
# pylint: disable=too-many-public-methods
def get_done(self):
"""Get all FOIA requests with responses"""
return self.filter(status__in=["partial", "done"]).exclude(datetime_done=None)
def get_viewable(self, user):
"""Get all viewable FOIA requests for given user"""
if user.is_staff:
return self.all()
if user.is_authenticated:
# Requests are visible if you own them, have view or edit permissions,
# or if they are not embargoed
query = (
Q(composer__user=user)
| Q(proxy=user)
| Q(pk__in=user.edit_access.all())
| Q(pk__in=user.read_access.all())
| ~Q(embargo=True)
)
# agency users may also view requests for their agency
if user.profile.is_agency_user:
query = query | Q(agency=user.profile.agency)
# organizational users may also view requests from their org that are shared
query = query | Q(
composer__user__profile__org_share=True,
composer__organization__in=user.organizations.all(),
)
return self.exclude(deleted=True).filter(query)
else:
# anonymous user, filter out embargoes and noindex requests
return (
self.exclude(embargo=True).exclude(noindex=True).exclude(deleted=True)
)
def get_public(self):
"""Get all publically viewable FOIA requests"""
return self.get_viewable(AnonymousUser())
def get_overdue(self):
"""Get all overdue FOIA requests"""
return self.filter(status__in=["ack", "processed"], date_due__lt=date.today())
def get_followup(self):
"""Get requests that need follow up emails sent"""
return (
self.filter(
status__in=["ack", "processed"],
date_followup__lte=date.today(),
disable_autofollowups=False,
)
# Exclude requests which should be emailed or faxed,
# but need to have their email address or fax number updated.
# This is to not overwhelm snail mail tasks with autogenerated
# messages while trying to find new contact info
.exclude(
Q(email__status="error", fax=None)
| Q(email=None, fax__status="error")
| Q(email__status="error", fax__status="error")
)
)
def get_open(self):
"""Get requests which we are awaiting a response from"""
return self.filter(status__in=["ack", "processed", "appealing"])
def organization(self, organization):
"""Get requests belonging to an organization's members."""
return (
self.select_related("agency__jurisdiction__parent__parent")
.filter(composer__organization=organization)
.order_by("-composer__datetime_submitted")
)
def select_related_view(self):
"""Select related models for viewing"""
return self.select_related(
"agency__jurisdiction__parent__parent",
"composer__user__profile",
"crowdfund",
)
def get_public_file_count(self, limit=None):
"""Annotate the public file count"""
if limit is not None:
foias = list(self[:limit])
else:
foias = list(self)
count_qs = (
self.model.objects.filter(id__in=[f.pk for f in foias])
.values_list("id")
.annotate(Count("communications__files"))
)
counts = dict(count_qs)
for foia in foias:
foia.public_file_count = counts.get(foia.pk, 0)
return foias
def get_featured(self, user):
"""Get featured requests"""
return (
self.get_viewable(user)
.filter(featured=True)
.select_related_view()
.get_public_file_count()
)
def get_processing_days(self):
"""Get the number of processing days"""
return (
self.filter(status="submitted")
.exclude(date_processing=None)
.aggregate(days=ExtractDay(Sum(date.today() - F("date_processing"))))[
"days"
]
)
def get_submitted_range(self, start, end):
"""Get requests submitted within a date range"""
return self.filter(composer__datetime_submitted__range=(start, end))
def get_today(self):
"""Get requests submitted today"""
midnight = time(tzinfo=timezone.get_current_timezone())
today_midnight = datetime.combine(date.today(), midnight)
return self.filter(composer__datetime_submitted__gte=today_midnight)
def exclude_org_users(self):
"""Exclude requests made by org users"""
return self.filter(composer__organization__individual=True)
def create_new(self, composer, agency, no_proxy, contact_info):
"""Create a new request and submit it"""
# pylint: disable=import-outside-toplevel
from muckrock.foia.message import notify_proxy_user
if composer.agencies.count() > 1:
title = "%s (%s)" % (composer.title, agency.name)
else:
title = composer.title
if agency.jurisdiction.days:
calendar = agency.jurisdiction.get_calendar()
date_due = calendar.business_days_from(
date.today(), agency.jurisdiction.days
)
else:
date_due = None
if no_proxy:
proxy_user = None
missing_proxy = False
else:
proxy_info = agency.get_proxy_info()
proxy_user = proxy_info.get("from_user")
missing_proxy = proxy_info["missing_proxy"]
foia = self.create(
status="submitted",
title=title,
slug=slugify(title),
agency=agency,
embargo=composer.embargo,
permanent_embargo=composer.permanent_embargo,
composer=composer,
date_due=date_due,
proxy=proxy_user,
missing_proxy=missing_proxy,
)
foia.tags.set(composer.tags.all())
foia.create_initial_communication(composer.user, proxy=proxy_user)
if proxy_user:
notify_proxy_user(foia)
foia.process_attachments(composer.user, composer=True)
foia.set_address(agency, appeal=False, contact_info=contact_info, clear=False)
def get_stale(self):
"""Get stale requests"""
# pylint: disable=import-outside-toplevel
from muckrock.foia.models import FOIACommunication
with_response = (
self.filter(
communications__response=False,
communications__datetime__gt=Subquery(
FOIACommunication.objects.filter(foia=OuterRef("pk"), response=True)
.order_by()
.values("foia")
.annotate(max=Max("datetime"))
.values("max")
),
)
.annotate(count=Count("communications"))
.filter(count__gt=STALE_REPLIES, status__in=["processed", "appealing"])
)
without_response = self.annotate(count=Count("communications")).filter(
status="ack", count__gt=STALE_REPLIES
)
return with_response.union(without_response)
class FOIAComposerQuerySet(models.QuerySet):
"""Custom Query Set for FOIA Composers"""
def get_viewable(self, user):
"""Return all composers viewable to the user"""
if user.is_staff:
return self.all()
if user.is_authenticated:
# you can view if
# * you are the owner
# * you are a read or edit collaborator on at least one foia
# * the request is public
# * not a draft
# * at leats one foia request is not embargoed
query = (
Q(user=user)
| Q(foias__read_collaborators=user)
| Q(foias__edit_collaborators=user)
| (~Q(status="started") & Q(foias__embargo=False))
)
# organizational users may also view requests from their org
# that are shared
query = query | Q(user__profile__org_share=True, organization__users=user)
return self.filter(query)
else:
# anonymous user, filter out drafts and embargoes
return self.exclude(status="started").filter(foias__embargo=False)
def get_or_create_draft(self, user, organization):
"""Return an existing blank draft or create one"""
draft = self.filter(
user=user,
organization=organization,
title="Untitled",
slug="untitled",
status="started",
agencies=None,
requested_docs="",
edited_boilerplate=False,
datetime_submitted=None,
embargo=False,
permanent_embargo=False,
parent=None,
tags=None,
num_monthly_requests=0,
num_reg_requests=0,
).first()
if draft:
draft.datetime_created = timezone.now()
draft.save()
return draft
else:
return self.create(user=user, organization=organization)
class FOIACommunicationQuerySet(PreloadFileQuerysetMixin, models.QuerySet):
"""Object manager for FOIA Communications"""
prefetch_fields = ("emails", "faxes", "mails", "web_comms", "portals")
def visible(self):
"""Hide hidden communications"""
return self.filter(hidden=False)
def preload_list(self):
"""Preload the relations required for displaying a list of communications"""
return self.prefetch_related(*self.prefetch_fields).preload_files()
def get_viewable(self, user):
"""Get all viewable FOIA communications for given user"""
# This is only used for filtering API view
if user.is_staff:
return self.all()
if user.is_authenticated:
# Requests are visible if you own them, have view or edit permissions,
# or if they are not embargoed
query = (
Q(foia__composer__user=user)
| Q(foia__in=user.edit_access.all())
| Q(foia__in=user.read_access.all())
| Q(foia__embargo=False)
)
# organizational users may also view requests from their org that are shared
query = query | Q(
foia__composer__user__profile__org_share=True,
foia__composer__organization__in=user.organizations.all(),
)
return self.filter(query)
else:
# anonymous user, filter out embargoes
return self.filter(foia__embargo=False)
class FOIAFileQuerySet(models.QuerySet):
"""Custom Queryset for FOIA Files"""
def preload(self, comm_ids, limit=11):
"""Preload the top limit files for the communications in comm_ids"""
file_qs = self.raw(
"""
SELECT rank_filter.* FROM (
SELECT foia_foiafile.*, ROW_NUMBER() OVER (
PARTITION BY comm_id ORDER BY foia_foiafile.datetime DESC
) FROM foia_foiafile
WHERE comm_id IN %s
) rank_filter WHERE ROW_NUMBER <= %s
""",
[tuple(comm_ids), limit],
).prefetch_related("comm__foia")
return {
comm_id: list(files)
for comm_id, files in groupby(file_qs, lambda f: f.comm_id)
}
def get_doccloud(self):
"""Return files which can be uploaded to DocumentCloud"""
is_doccloud = Q()
for ext in settings.DOCCLOUD_EXTENSIONS:
is_doccloud |= Q(ffile__iendswith=ext)
return self.filter(is_doccloud)
class FOIATemplateQuerySet(models.QuerySet):
"""Custom Queryset for FOIA Templates"""
def render(self, agencies, user, requested_docs, **kwargs):
"""Render the template language for the given agencies"""
if kwargs.get("split"):
requested_docs = "$split$"
if len(agencies) == 1:
template = self._render_single(agencies[0], user, requested_docs, **kwargs)
elif kwargs.get("jurisdiction"):
template = self._render_single(None, user, requested_docs, **kwargs)
else:
template = self._render_generic(user, requested_docs, **kwargs)
if kwargs.get("split"):
return template.split(requested_docs, 1)
return template
def _render_single(self, agency, user, requested_docs, **kwargs):
"""Render the template for a single agency"""
if kwargs.get("edited_boilerplate"):
# if they edited the boilerplate, make a temporary template
template = self.model(template=requested_docs)
else:
jurisdiction = kwargs.get(
"jurisdiction", agency.jurisdiction if agency else None
)
template = self.filter(jurisdiction=jurisdiction).order_by("pk").first()
if template is None:
template = self.filter(jurisdiction=None).order_by("pk").first()
return template.render(agency, user, requested_docs, **kwargs)
def _render_generic(self, user, requested_docs, **kwargs):
"""Render the template in a generic way, suitable for more than one agency"""
if kwargs.get("edited_boilerplate"):
# if they edited the boilerplate, make a temporary template
template = self.model(template=requested_docs)
else:
template = self.filter(jurisdiction=None).order_by("pk").first()
return template.render_generic(user, requested_docs, **kwargs)
class RawEmailQuerySet(models.QuerySet):
"""Custom query set for Raw Emails"""
def make(self, message_id):
"""Store a raw email fetched from mailgun's API
Launch celery task
"""
# pylint: disable=import-outside-toplevel
from muckrock.foia.tasks import fetch_raw_email
if message_id:
fetch_raw_email.delay(message_id)
def make_async(self, emails):
"""Store a raw email fetched from mailgun's API
Perform retrieval, called from celery task
All emails should have the same message ID
"""
if not emails:
return
message_id = emails[0].message_id
response = requests.get(
settings.MAILGUN_API_URL + "/events",
auth=("api", settings.MAILGUN_ACCESS_KEY),
params={"event": "stored", "message-id": message_id},
)
response.raise_for_status()
items = response.json()["items"]
if not items:
logger.info(
"Fetching raw emails: message_id: %s - items not found, will retry",
message_id,
)
raise ValueError
url = items[0]["storage"]["url"]
response = requests.get(
url,
auth=("api", settings.MAILGUN_ACCESS_KEY),
headers={"Accept": "message/rfc2822"},
)
response.raise_for_status()
logger.info(
"Fetching raw emails: message_id: %s - saving raw email", message_id
)
raw_email_content = response.json()["body-mime"]
for email in emails:
raw_email = self.create(email=email)
# set explicitly to store in S3 (raw_email is a property)
raw_email.raw_email = raw_email_content
raw_email.save()
| MuckRock/muckrock | muckrock/foia/querysets.py | Python | agpl-3.0 | 18,338 |
# tlseabra@github
def not_McNugget(n):
mcn = lambda x: x != 3 and (x > 43 or x % 3 == 0)
for i in range(1, n+1):
d = i
while i >= 0:
if mcn(i): break
i -= 20
else: print(d)
| FreddieV4/DailyProgrammerChallenges | Intermediate Challenges/Challenge 0023 Intermediate/solutions/solution.py | Python | mit | 230 |
# General information about the project.
project = u'Triangula'
copyright = u'2015, Tom Oinn'
author = u'Tom Oinn'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3.1'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.pngmath',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.graphviz',
'sphinx.ext.mathjax',
'sphinxcontrib.youtube'
]
# Configure graphviz to generate PNG and set up some default colours and graph styling. We were using SVGs here, but
# it seems that pythonhosted.org isn't setting their MIME type correctly and is therefore failing to display.
graphviz_output_format = 'png'
graphviz_dark_colour = '#343131'
graphviz_background_colour = 'linen'
graphviz_dot_args = ['-Gbgcolor=transparent', '-Nshape=rectangle', '-Nfontname=courier', '-Nfontsize=12', '-Nheight=0',
'-Nwidth=0', '-Nfillcolor={}'.format(graphviz_background_colour),
'-Ncolor={}'.format(graphviz_dark_colour), '-Nstyle=filled',
'-Nfontcolor={}'.format(graphviz_dark_colour), '-Efontcolor={}'.format(graphviz_dark_colour),
'-Ecolor={}'.format(graphviz_dark_colour)]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# Static HTML, used to override style sheets in themes
html_static_path = ['_static']
html_context = {
'css_files': [
'_static/theme_overrides.css'
],
}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# Define skip rules to exclude some functions and other members from autodoc
def skip(app, what, name, obj, skip, options):
if name == "__init__":
return False
if name == "as_dict" or name == "from_dict":
return True
return skip
def setup(app):
app.connect("autodoc-skip-member", skip)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Configures links into the main Python language docs
intersphinx_mapping = {'python': ('https://docs.python.org/2.7', None)}
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
html_extra_path = ['_html_extra/.htaccess']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
html_use_opensearch = 'https://pythonhosted.org/triangula'
# This is the file name suffix for HTML files (e.g. ".xhtml").
html_file_suffix = '.html'
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
html_search_language = 'en'
| BaseBot/Triangula | src/docs/conf.py | Python | apache-2.0 | 6,075 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-12 13:15
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('catalog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='bookinstance',
name='borrower',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
| chrisdavidmills/django_local_library | catalog/migrations/0002_bookinstance_borrower.py | Python | mpl-2.0 | 663 |
# coding=utf-8
from qgis.core import (
QgsCoordinateTransform,
QgsRectangle,
QgsGeometry,
QgsPoint,
QgsCoordinateReferenceSystem,
QGis)
from qgis.gui import QgsRubberBand # pylint: disable=no-name-in-module
# noinspection PyPackageRequirements
from PyQt4.QtCore import QSettings, Qt
from safe.definitions.styles import (
user_analysis_color,
next_analysis_color,
last_analysis_color,
user_analysis_width,
next_analysis_width,
last_analysis_width
)
from safe.utilities.settings import set_setting
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "[email protected]"
__revision__ = '$Format:%H$'
class Extent(object):
"""Extent class to handle analysis extent.
Rubber bands and extents for showing analysis extent etc.
Note that rubber bands are transient but their associated extents are
persistent for the session.
Rubberbands are stored in the map canvas CRS.
"""
def __init__(self, iface):
"""Constructor."""
self._map_canvas = iface.mapCanvas()
# Last analysis extents
self._last_analysis_rubberband = None
self._last_analysis_extent = None # QgsGeometry
# The AOI of the next analysis.
self._next_analysis_rubberband = None
self._next_analysis_extent = None # QgsGeometry
# Rectangle defining the user's preferred extent for the analysis
self._user_analysis_rubberband = None
self._user_extent = None # QgsRectangle
# Whether to show rubber band of last and next scenario
self._show_rubber_bands = False
@property
def show_rubber_bands(self):
"""Return if we display rubberbands
:return: Boolean if we display rubberbands
:rtype: bool
"""
return self._show_rubber_bands
@show_rubber_bands.setter
def show_rubber_bands(self, display):
"""Setter if we need to display rubberbands.
:param display: The boolean.
:type display: bool
"""
self._show_rubber_bands = display
if self._show_rubber_bands:
self.display_last_analysis()
self.display_next_analysis()
self.display_user_extent()
else:
self.hide_last_analysis_extent()
self.hide_user_analysis_extent()
self.hide_next_analysis_extent()
@property
def crs(self):
"""Return the CRS of the map canvas.
:return: The map canvas CRS.
:rtype: QgsCoordinateTransform
"""
return self._map_canvas.mapSettings().destinationCrs()
@property
def user_extent(self):
"""The user extent in the canvas CRS.
:return: The user extent.
:rtype: QgsGeometry
"""
return self._user_extent
def set_user_extent(self, extent, crs):
"""Setter for the user requested extent.
This function will redraw the rubberband if needed.
:param extent: The user extent.
:type extent: QgsGeometry
:param crs: The CRS of the extent.
:type crs: QgsCoordinateReferenceSystem
"""
extent = QgsGeometry(extent)
transform = QgsCoordinateTransform(crs, self.crs)
extent.transform(transform)
self._user_extent = extent
set_setting('user_extent', extent.exportToWkt())
set_setting('user_extent_crs', crs.authid())
if self._show_rubber_bands:
self.display_user_extent()
@property
def last_analysis_extent(self):
"""The last analysis extent in the canvas CRS.
:return: The last analysis extent.
:rtype: QgsGeometry
"""
return self._last_analysis_extent
def set_last_analysis_extent(self, extent, crs):
"""Setter for the last analysis extent.
This function will redraw the rubberband if needed.
:param extent: The last analysis extent.
:type extent: QgsGeometry
:param crs: The CRS of the extent.
:type crs: QgsCoordinateReferenceSystem
"""
extent = QgsGeometry(extent)
transform = QgsCoordinateTransform(crs, self.crs)
extent.transform(transform)
self._last_analysis_extent = extent
if self._show_rubber_bands:
self.display_last_analysis()
@property
def next_analysis_extent(self):
"""The next analysis extent in the canvas CRS.
:return: The next analysis extent.
:rtype: QgsGeometry
"""
return self._next_analysis_extent
def set_next_analysis_extent(self, extent, crs):
"""Setter for the next analysis extent.
This function will redraw the rubberband if needed.
:param extent: The next analysis extent.
:type extent: QgsGeometry
:param crs: The CRS of the extent.
:type crs: QgsCoordinateReferenceSystem
"""
extent = QgsGeometry(extent)
transform = QgsCoordinateTransform(crs, self.crs)
extent.transform(transform)
self._next_analysis_extent = extent
if self._show_rubber_bands:
self.display_next_analysis()
def _draw_rubberband(self, geometry, colour, width):
"""Draw a rubber band on the canvas.
.. versionadded: 2.2.0
:param geometry: Extent that the rubber band should be drawn for.
:type geometry: QgsGeometry
:param colour: Colour for the rubber band.
:type colour: QColor
:param width: The width for the rubber band pen stroke.
:type width: int
:returns: Rubber band that should be set to the extent.
:rtype: QgsRubberBand
"""
# noinspection PyArgumentList
rubber_band = QgsRubberBand(
self._map_canvas, geometryType=QGis.Polygon)
rubber_band.setBrushStyle(Qt.NoBrush)
rubber_band.setColor(colour)
rubber_band.setWidth(width)
rubber_band.setToGeometry(geometry, None)
return rubber_band
def display_user_extent(self):
"""Display the user extent."""
self.hide_user_analysis_extent()
if self._user_extent:
self._user_analysis_rubberband = self._draw_rubberband(
self._user_extent, user_analysis_color, user_analysis_width)
def display_next_analysis(self):
"""Display the next analysis extent."""
self.hide_next_analysis_extent()
if self._next_analysis_extent:
self._next_analysis_rubberband = self._draw_rubberband(
self._next_analysis_extent,
next_analysis_color,
next_analysis_width)
def display_last_analysis(self):
"""Display the next analysis extent."""
self.hide_last_analysis_extent()
if self._last_analysis_extent:
self._last_analysis_rubberband = self._draw_rubberband(
self._last_analysis_extent,
last_analysis_color,
last_analysis_width)
def clear_user_analysis_extent(self):
"""Slot called when the users clears the analysis extents."""
self.hide_user_analysis_extent()
self._user_extent = None
def clear_next_analysis_extent(self):
"""Slot called when the users clears the analysis extents."""
self.hide_next_analysis_extent()
self._next_analysis_extent = None
def hide_user_analysis_extent(self):
"""Hide the rubber band showing extent of the next analysis.
.. versionadded: 2.2.0
"""
if self._user_analysis_rubberband is not None:
self._user_analysis_rubberband.reset(QGis.Polygon)
self._user_analysis_rubberband = None
def hide_next_analysis_extent(self):
"""Hide the rubber band showing extent of the next analysis.
.. versionadded:: 2.1.0
"""
if self._next_analysis_rubberband is not None:
self._next_analysis_rubberband.reset(QGis.Polygon)
self._next_analysis_rubberband = None
def hide_last_analysis_extent(self):
"""Clear extent rubber band if any.
This method can safely be called even if there is no rubber band set.
.. versionadded:: 2.1.0
"""
if self._last_analysis_rubberband is not None:
self._last_analysis_rubberband.reset(QGis.Polygon)
self._last_analysis_rubberband = None
| Gustry/inasafe | safe/utilities/extent.py | Python | gpl-3.0 | 8,459 |
# encoding: utf-8
from __future__ import print_function
import argparse
import os
from os.path import splitext
import platform
import traceback
import sys
import warnings
# this is needed for vitables and needs to happen BEFORE
# matplotlib.use('Qt4Agg')
# (which imports PyQt)
try:
import sip
sip.setapi('QString', 2)
sip.setapi('QVariant', 2)
del sip
except ImportError:
pass
import yaml
import config
from console import Console
from context import EvaluationContext
from data import entities_from_h5, H5Source
from importer import csv2h5
from simulation import Simulation
from upgrade import upgrade
from utils import AutoFlushFile
from view import viewhdf
from version import __version__
def write_traceback(ex_type, e, tb):
try:
import traceback
# output_directory might not be set at this point yet and it is
# only set for run and explore commands but when it is not set
# its default value of "." is used and thus we get the "old"
# behaviour: error.log in the working directory
out_dir = config.output_directory
error_path = os.path.join(out_dir, 'error.log')
error_path = os.path.abspath(error_path)
with file(error_path, 'w') as f:
traceback.print_exception(ex_type, e, tb, file=f)
if hasattr(e, 'liam2context'):
f.write(e.liam2context)
return error_path
except IOError, log_ex:
print("WARNING: could not save technical error log "
"({} on '{}')".format(log_ex.strerror, log_ex.filename))
except Exception, log_ex:
print(log_ex)
return None
def printerr(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def print_exception_wh_context(ex_type, e, tb):
traceback.print_exception(ex_type, e, tb, file=sys.stderr)
if hasattr(e, 'liam2context'):
printerr(e.liam2context)
def print_exception_simplified(ex_type, e, tb):
# e.context | while parsing a block mapping
# e.context_mark | in "import.yml", line 18, column 9
# e.problem | expected <block end>, but found '<block sequence start>'
# e.problem_mark | in "import.yml", line 29, column 12
error_log_path = write_traceback(ex_type, e, tb)
if isinstance(e, yaml.parser.ParserError):
# eg, inconsistent spacing, no space after a - in a list, ...
printerr("SYNTAX ERROR {}".format(str(e.problem_mark).strip()))
elif isinstance(e, yaml.scanner.ScannerError):
# eg, tabs, missing colon for mapping. The reported problem is
# different when it happens on the first line (no context_mark) and
# when it happens on a subsequent line.
if e.context_mark is not None:
msg = e.problem if e.problem != "could not found expected ':'" \
else "could not find expected ':'"
mark = e.context_mark
else:
if (e.problem ==
"found character '\\t' that cannot start any token"):
msg = "found a TAB character instead of spaces"
else:
msg = ""
mark = e.problem_mark
if msg:
msg = ": " + msg
printerr("SYNTAX ERROR {}{}".format(str(mark).strip(), msg))
elif isinstance(e, yaml.reader.ReaderError):
if e.encoding == 'utf8':
printerr("\nERROR in '{}': invalid character found, this probably "
"means you have used non ASCII characters (accents and "
"other non-english characters) and did not save your file "
"using the UTF8 encoding".format(e.name))
else:
printerr("\nERROR:", str(e))
elif isinstance(e, SyntaxError):
printerr("SYNTAX ERROR:", e.msg.replace('EOF', 'end of block'))
if e.text is not None:
printerr(e.text)
offset_str = ' ' * (e.offset - 1) if e.offset > 0 else ''
printerr(offset_str + '^')
else:
printerr("\nERROR:", str(e))
if hasattr(e, 'liam2context'):
printerr(e.liam2context)
if error_log_path is not None:
printerr()
printerr("the technical error log can be found at", error_log_path)
def simulate(args):
print("Using simulation file: '{}'".format(args.fpath))
simulation = Simulation.from_yaml(args.fpath,
input_dir=args.input_path,
input_file=args.input_file,
output_dir=args.output_path,
output_file=args.output_file,
start_period=args.startperiod,
periods=args.periods, seed=args.seed,
skip_shows=args.skipshows,
skip_timings=args.skiptimings,
log_level=args.loglevel,
assertions=args.assertions,
autodump=args.autodump,
autodiff=args.autodiff)
simulation.run(args.interactive)
# import cProfile as profile
# profile.runctx('simulation.run(args.interactive)', vars(), {},
# 'c:\\tmp\\simulation.profile')
# to use profiling data:
# import pstats
# p = pstats.Stats('c:\\tmp\\simulation.profile')
# p.strip_dirs().sort_stats('cum').print_stats(30)
def explore(fpath):
_, ext = splitext(fpath)
ftype = 'data' if ext in ('.h5', '.hdf5') else 'simulation'
print("Using {} file: '{}'".format(ftype, fpath))
if ftype == 'data':
globals_def, entities = entities_from_h5(fpath)
simulation = Simulation(globals_def, None, None, None, None,
entities.values(), 'h5', fpath, None)
period, entity_name = None, None
else:
simulation = Simulation.from_yaml(fpath)
# use output as input
simulation.data_source = H5Source(simulation.data_sink.output_path)
period = simulation.start_period + simulation.periods - 1
entity_name = simulation.default_entity
dataset = simulation.load()
data_source = simulation.data_source
data_source.as_fake_output(dataset, simulation.entities_map)
data_sink = simulation.data_sink
entities = simulation.entities_map
if entity_name is None and len(entities) == 1:
entity_name = entities.keys()[0]
if period is None and entity_name is not None:
entity = entities[entity_name]
period = max(entity.output_index.keys())
eval_ctx = EvaluationContext(simulation, entities, dataset['globals'],
period, entity_name)
try:
c = Console(eval_ctx)
c.run()
finally:
data_source.close()
if data_sink is not None:
data_sink.close()
def display(fpath):
print("Launching ViTables...")
if fpath:
_, ext = splitext(fpath)
if ext in ('.h5', '.hdf5'):
files = [fpath]
else:
simulation = Simulation.from_yaml(fpath)
files = [simulation.data_source.input_path,
simulation.data_sink.output_path]
print("Trying to open:", " and ".join(str(f) for f in files))
else:
files = []
viewhdf(files)
class PrintVersionsAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
import numpy
import numexpr
import tables
try:
from cpartition import filter_to_indices
del filter_to_indices
cext = True
except ImportError:
cext = False
print("C extensions are" + (" NOT" if not cext else "") + " available")
# optional dependencies
try:
import vitables
vt_version = vitables.__version__
del vitables
except ImportError:
vt_version = 'N/A'
try:
import matplotlib
mpl_version = matplotlib.__version__
del matplotlib
except ImportError:
mpl_version = 'N/A'
try:
import bcolz
bcolz_version = bcolz.__version__
del bcolz
except ImportError:
bcolz_version = 'N/A'
py_version = '{} ({})'.format(platform.python_version(),
platform.architecture()[0])
print("""
python {py}
numpy {np}
numexpr {ne}
pytables {pt}
bcolz {bc}
pyyaml {yml}
vitables {vt}
matplotlib {mpl}
""".format(py=py_version, np=numpy.__version__, ne=numexpr.__version__,
pt=tables.__version__, vt=vt_version, mpl=mpl_version,
bc=bcolz_version, yml=yaml.__version__))
parser.exit()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--versions', action=PrintVersionsAction, nargs=0,
help="display versions of dependencies")
parser.add_argument('--debug', action='store_true', default=False,
help="run in debug mode")
parser.add_argument('--input-path', dest='input_path',
help='override the input path')
parser.add_argument('--input-file', dest='input_file',
help='override the input file')
parser.add_argument('--output-path', dest='output_path',
help='override the output path')
parser.add_argument('--output-file', dest='output_file',
help='override the output file')
subparsers = parser.add_subparsers(dest='action')
# create the parser for the "run" command
parser_run = subparsers.add_parser('run', help='run a simulation')
parser_run.add_argument('fpath', help='simulation file')
parser_run.add_argument('-i', '--interactive', action='store_true',
help='show the interactive console after the '
'simulation')
parser_run.add_argument('-sp', '--startperiod', type=int,
help='first period to simulate (integer)')
parser_run.add_argument('-p', '--periods', type=int,
help='number of periods to simulate (integer)')
parser_run.add_argument('-s', '--seed', type=int,
help='defines the starting point of the '
'pseudo-random generator (integer)')
# action='store_const', const=True is NOT equivalent to action='store_true'
# (if the flag is not used, the result will be None vs False)
parser_run.add_argument('-ss', '--skipshows', action='store_const', const=True,
help='do not log shows')
parser_run.add_argument('-st', '--skiptimings', action='store_const', const=True,
help='do not log timings')
parser_run.add_argument('-ll', '--loglevel',
choices=['periods', 'functions', 'processes'],
help='defines the logging level')
parser_run.add_argument('--autodump', help='path of the autodump file')
parser_run.add_argument('--autodiff', help='path of the autodiff file')
parser_run.add_argument('--assertions', choices=['raise', 'warn', 'skip'],
help='determines behavior of assertions')
# create the parser for the "import" command
parser_import = subparsers.add_parser('import', help='import data')
parser_import.add_argument('file', help='import file')
# create the parser for the "explore" command
parser_explore = subparsers.add_parser('explore', help='explore data of a '
'past simulation')
parser_explore.add_argument('file', help='explore file')
# create the parser for the "upgrade" command
parser_upgrade = subparsers.add_parser('upgrade',
help='upgrade a simulation file to '
'the latest syntax')
parser_upgrade.add_argument('input', help='input simulation file')
out_help = "output simulation file. If missing, the original file will " \
"be backed up (to filename.bak) and the upgrade will be " \
"done in-place."
parser_upgrade.add_argument('output', help=out_help, nargs='?')
# create the parser for the "view" command
parser_import = subparsers.add_parser('view', help='view data')
parser_import.add_argument('file', nargs='?',
help='data (.h5) or simulation (.yml) file')
parsed_args = parser.parse_args()
if parsed_args.debug:
config.debug = True
# this can happen via the environment variable too!
if config.debug:
# by default, DeprecationWarning and PendingDeprecationWarning, and
# ImportWarning are ignored, this shows them.
warnings.simplefilter('default')
sys.excepthook = print_exception_wh_context
else:
sys.excepthook = print_exception_simplified
action = parsed_args.action
if action == 'run':
func, args = simulate, (parsed_args,)
elif action == "import":
func, args = csv2h5, (parsed_args.file,)
elif action == "explore":
func, args = explore, (parsed_args.file,)
elif action == "upgrade":
func, args = upgrade, (parsed_args.input, parsed_args.output)
elif action == "view":
func, args = display, (parsed_args.file,)
else:
raise ValueError("invalid action: {}".format(action))
return func(*args)
if __name__ == '__main__':
sys.stdout = AutoFlushFile(sys.stdout)
sys.stderr = AutoFlushFile(sys.stderr)
print("LIAM2 {} ({})".format(__version__[:-2] if __version__.endswith('.0') else __version__, platform.architecture()[0]))
print()
main()
| benjello/liam2 | liam2/main.py | Python | gpl-3.0 | 14,344 |
import unittest
from infi.clickhouse_orm import F
from .base_test_with_data import *
from time import sleep
class MutationsTestCase(TestCaseWithData):
def setUp(self):
super().setUp()
if self.database.server_version < (18,):
raise unittest.SkipTest('ClickHouse version too old')
self._insert_all()
def _wait_for_mutations(self):
sql = 'SELECT * FROM system.mutations WHERE is_done = 0'
while list(self.database.raw(sql)):
sleep(0.25)
def test_delete_all(self):
Person.objects_in(self.database).delete()
self._wait_for_mutations()
self.assertFalse(Person.objects_in(self.database))
def test_delete_with_where_cond(self):
cond = Person.first_name == 'Cassady'
self.assertTrue(Person.objects_in(self.database).filter(cond))
Person.objects_in(self.database).filter(cond).delete()
self._wait_for_mutations()
self.assertFalse(Person.objects_in(self.database).filter(cond))
self.assertTrue(Person.objects_in(self.database).exclude(cond))
def test_delete_with_prewhere_cond(self):
cond = F.toYear(Person.birthday) == 1977
self.assertTrue(Person.objects_in(self.database).filter(cond))
Person.objects_in(self.database).filter(cond, prewhere=True).delete()
self._wait_for_mutations()
self.assertFalse(Person.objects_in(self.database).filter(cond))
self.assertTrue(Person.objects_in(self.database).exclude(cond))
def test_update_all(self):
Person.objects_in(self.database).update(height=0)
self._wait_for_mutations()
for p in Person.objects_in(self.database): print(p.height)
self.assertFalse(Person.objects_in(self.database).exclude(height=0))
def test_update_with_where_cond(self):
cond = Person.first_name == 'Cassady'
Person.objects_in(self.database).filter(cond).update(height=0)
self._wait_for_mutations()
self.assertFalse(Person.objects_in(self.database).filter(cond).exclude(height=0))
def test_update_with_prewhere_cond(self):
cond = F.toYear(Person.birthday) == 1977
Person.objects_in(self.database).filter(cond, prewhere=True).update(height=0)
self._wait_for_mutations()
self.assertFalse(Person.objects_in(self.database).filter(cond).exclude(height=0))
def test_update_multiple_fields(self):
Person.objects_in(self.database).update(height=0, passport=None)
self._wait_for_mutations()
self.assertFalse(Person.objects_in(self.database).exclude(height=0))
self.assertFalse(Person.objects_in(self.database).exclude(passport=None))
def test_chained_update(self):
Person.objects_in(self.database).update(height=F.rand()).update(passport=99999)
self._wait_for_mutations()
self.assertFalse(Person.objects_in(self.database).exclude(passport=99999))
def test_invalid_state_for_mutations(self):
base_query = Person.objects_in(self.database)
queries = [
base_query[0:1],
base_query.limit_by(5, 'first_name'),
base_query.distinct(),
base_query.aggregate('first_name', count=F.count())
]
for query in queries:
print(query)
with self.assertRaises(AssertionError):
query.delete()
with self.assertRaises(AssertionError):
query.update(height=1.8)
def test_missing_fields_for_update(self):
with self.assertRaises(AssertionError):
Person.objects_in(self.database).update()
| Infinidat/infi.clickhouse_orm | tests/test_mutations.py | Python | bsd-3-clause | 3,619 |
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.mashups.interactive import interactive_shell
import boto
import os
import time
import shutil
import StringIO
import paramiko
import socket
import subprocess
class SSHClient(object):
def __init__(self, server,
host_key_file='~/.ssh/known_hosts',
uname='root', timeout=None, ssh_pwd=None):
self.server = server
self.host_key_file = host_key_file
self.uname = uname
self._timeout = timeout
self._pkey = paramiko.RSAKey.from_private_key_file(server.ssh_key_file,
password=ssh_pwd)
self._ssh_client = paramiko.SSHClient()
self._ssh_client.load_system_host_keys()
self._ssh_client.load_host_keys(os.path.expanduser(host_key_file))
self._ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.connect()
def connect(self, num_retries=5):
retry = 0
while retry < num_retries:
try:
self._ssh_client.connect(self.server.hostname,
username=self.uname,
pkey=self._pkey,
timeout=self._timeout)
return
except socket.error, (value, message):
if value in (51, 61, 111):
print 'SSH Connection refused, will retry in 5 seconds'
time.sleep(5)
retry += 1
else:
raise
except paramiko.BadHostKeyException:
print "%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname
print 'Edit that file to remove the entry and then hit return to try again'
raw_input('Hit Enter when ready')
retry += 1
except EOFError:
print 'Unexpected Error from SSH Connection, retry in 5 seconds'
time.sleep(5)
retry += 1
print 'Could not establish SSH connection'
def open_sftp(self):
return self._ssh_client.open_sftp()
def get_file(self, src, dst):
sftp_client = self.open_sftp()
sftp_client.get(src, dst)
def put_file(self, src, dst):
sftp_client = self.open_sftp()
sftp_client.put(src, dst)
def open(self, filename, mode='r', bufsize=-1):
"""
Open a file on the remote system and return a file-like object.
"""
sftp_client = self.open_sftp()
return sftp_client.open(filename, mode, bufsize)
def listdir(self, path):
sftp_client = self.open_sftp()
return sftp_client.listdir(path)
def isdir(self, path):
status = self.run('[ -d %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
return 1
def exists(self, path):
status = self.run('[ -a %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
return 1
def shell(self):
"""
Start an interactive shell session on the remote host.
"""
channel = self._ssh_client.invoke_shell()
interactive_shell(channel)
def run(self, command):
"""
Execute a command on the remote host. Return a tuple containing
an integer status and two strings, the first containing stdout
and the second containing stderr from the command.
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
status = 0
try:
t = self._ssh_client.exec_command(command)
except paramiko.SSHException:
status = 1
std_out = t[1].read()
std_err = t[2].read()
t[0].close()
t[1].close()
t[2].close()
boto.log.debug('stdout: %s' % std_out)
boto.log.debug('stderr: %s' % std_err)
return (status, std_out, std_err)
def run_pty(self, command):
"""
Execute a command on the remote host with a pseudo-terminal.
Returns a string containing the output of the command.
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
channel = self._ssh_client.get_transport().open_session()
channel.get_pty()
channel.exec_command(command)
return channel
def close(self):
transport = self._ssh_client.get_transport()
transport.close()
self.server.reset_cmdshell()
class LocalClient(object):
def __init__(self, server, host_key_file=None, uname='root'):
self.server = server
self.host_key_file = host_key_file
self.uname = uname
def get_file(self, src, dst):
shutil.copyfile(src, dst)
def put_file(self, src, dst):
shutil.copyfile(src, dst)
def listdir(self, path):
return os.listdir(path)
def isdir(self, path):
return os.path.isdir(path)
def exists(self, path):
return os.path.exists(path)
def shell(self):
raise NotImplementedError('shell not supported with LocalClient')
def run(self):
boto.log.info('running:%s' % self.command)
log_fp = StringIO.StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while process.poll() is None:
time.sleep(1)
t = process.communicate()
log_fp.write(t[0])
log_fp.write(t[1])
boto.log.info(log_fp.getvalue())
boto.log.info('output: %s' % log_fp.getvalue())
return (process.returncode, log_fp.getvalue())
def close(self):
pass
class FakeServer(object):
"""
A little class to fake out SSHClient (which is expecting a
:class`boto.manage.server.Server` instance. This allows us
to
"""
def __init__(self, instance, ssh_key_file):
self.instance = instance
self.ssh_key_file = ssh_key_file
self.hostname = instance.dns_name
self.instance_id = self.instance.id
def start(server):
instance_id = boto.config.get('Instance', 'instance-id', None)
if instance_id == server.instance_id:
return LocalClient(server)
else:
return SSHClient(server)
def sshclient_from_instance(instance, ssh_key_file,
host_key_file='~/.ssh/known_hosts',
user_name='root', ssh_pwd=None):
"""
Create and return an SSHClient object given an
instance object.
:type instance: :class`boto.ec2.instance.Instance` object
:param instance: The instance object.
:type ssh_key_file: str
:param ssh_key_file: A path to the private key file used
to log into instance.
:type host_key_file: str
:param host_key_file: A path to the known_hosts file used
by the SSH client.
Defaults to ~/.ssh/known_hosts
:type user_name: str
:param user_name: The username to use when logging into
the instance. Defaults to root.
:type ssh_pwd: str
:param ssh_pwd: The passphrase, if any, associated with
private key.
"""
s = FakeServer(instance, ssh_key_file)
return SSHClient(s, host_key_file, user_name, ssh_pwd)
| harshilasu/LinkurApp | y/google-cloud-sdk/platform/gsutil/third_party/boto/boto/manage/cmdshell.py | Python | gpl-3.0 | 8,585 |
"""Tests"""
import pytest
class TestDunder():
def test_contains(self, poff):
assert "a" not in poff
poff.append("a")
assert "a" in poff
def test_eq_repr_str(self, poff):
assert repr(poff) == str(poff) == poff == ""
poff.append("a")
assert repr(poff) == str(poff) == poff == "a"
def test_len(self, poff):
assert not poff
poff.append("a")
assert len(poff) == 1
class TestAttribute():
bad_in = [None, True, 1, [], {}, (), ("a",), ("a", "b", "c")] # non-strings and len(tuple) != 2
good_in = [["a"], ["a", "b"], [("a", 1)], [("a", 1), ("b", "2")], ["a", ("b", 2)], [("a", 1), "b"]]
good_out = ["a", "a b", 'a="1"', 'a="1" b="2"', 'a b="2"', 'a="1" b']
def test_empty_input(self, attr):
assert attr() == ""
@pytest.mark.parametrize("test_input, expected", list(zip(good_in, good_out)))
def test_good_input(self, attr, test_input, expected):
assert attr(*test_input) == expected
@pytest.mark.parametrize("bad", bad_in)
def test_bad_input(self, attr, bad):
with pytest.raises(ValueError):
attr(bad)
class TestAppend():
@pytest.mark.parametrize("repeat", range(4))
def test_with_newline(self, pon, repeat):
for _ in range(repeat):
pon.append("a")
assert pon == "a\n" * repeat
@pytest.mark.parametrize("repeat", range(4))
def test_without_newline(self, poff, repeat):
for _ in range(repeat):
poff.append("a")
assert poff == "a" * repeat
@pytest.mark.parametrize("bad", [None, True, 1])
def test_non_string_type(self, poff, bad):
with pytest.raises(ValueError):
poff.append(bad)
class TestIndent():
@pytest.mark.parametrize("spaces", range(4))
@pytest.mark.parametrize("depth", range(4))
def test_depth(self, poff, spaces, depth):
poff.depth = depth
poff.spaces = spaces
assert poff.indent() == " " * poff.spaces * poff.depth
class TestNewline():
@pytest.mark.parametrize("depth", range(2))
def test_depth_does_not_indent(self, poff, depth):
poff.depth = depth
poff.newline().newline()
assert poff == "\n\n"
poff.newline().newline()
assert " " not in poff
class TestDocType():
def test_doctype_declaration(self, doctypes, pdoc):
for key, value in doctypes.items():
assert value == pdoc(key)
def test_unknown_doctype(self, pdoc):
with pytest.raises(ValueError):
pdoc("unknown")
class TestVoidWrap():
def test_without_attribute(self, poff):
poff.vwrap("br")
assert poff == "<br>"
def test_with_attribute(self, attr, poff):
poff.vwrap("img", attr("abc", ("x", 1), ("y", 2), ("z", 3)))
assert poff == '<img abc x="1" y="2" z="3">'
@pytest.mark.parametrize("bad", ["a", "b", "c"])
def test_non_void_element(self, poff, bad):
with pytest.raises(ValueError):
poff.vwrap(bad)
def test_wrap_void_element(self, poff):
with pytest.raises(ValueError):
with poff.wrap("br"): pass
class TestWrapAutoSpacingOff():
def test_single(self, poff):
with poff.wrap("a"):
poff.append("1")
assert poff == "<a>1</a>"
def test_nested(self, poff):
with poff.wrap("a"), poff.wrap("b"):
poff.append("1 2")
assert poff == "<a><b>1 2</b></a>"
def test_double_nested(self, poff):
with poff.wrap("a"), poff.wrap("b"), poff.wrap("c"):
poff.append("1 2 3")
assert poff == "<a><b><c>1 2 3</c></b></a>"
def test_sibling(self, poff):
with poff.wrap("a"):
poff.append("1")
with poff.wrap("b"):
poff.append("2")
assert poff == "<a>1</a><b>2</b>"
def test_nested_sibling(self, poff):
with poff.wrap("a"):
with poff.wrap("b"):
poff.append("2")
with poff.wrap("c"):
poff.append("3")
assert poff == "<a><b>2</b><c>3</c></a>"
class TestWrapAutoSpacingOn():
def test_single(self, pon, read_file):
with pon.wrap("a"):
pon.append("1")
assert pon == read_file("expected_single.txt")
def test_nested(self, pon, read_file):
with pon.wrap("a"), pon.wrap("b"):
pon.append("1 2")
assert pon == read_file("expected_nested.txt")
def test_double_nested(self, pon, read_file):
with pon.wrap("a"), pon.wrap("b"), pon.wrap("c"):
pon.append("1 2 3")
assert pon == read_file("expected_double_nested.txt")
def test_sibling(self, pon, read_file):
with pon.wrap("a"):
pon.append("1")
with pon.wrap("b"):
pon.append("2")
assert pon == read_file("expected_sibling.txt")
def test_nested_sibling(self, pon, read_file):
with pon.wrap("a"):
with pon.wrap("b"):
pon.append("2")
with pon.wrap("c"):
pon.append("3")
assert pon == read_file("expected_nested_sibling.txt")
class TestWrapAutoSpacingMixed():
def test_nested_oneline(self, pon, read_file):
with pon.wrap("a"), pon.manual_spacing():
pon.indent()
with pon.wrap("b"):
pon.append("2")
pon.newline()
assert pon == read_file("expected_nested_oneline.txt")
def test_nested_oneline_sibling(self, pon, read_file):
with pon.wrap("a"), pon.manual_spacing():
pon.indent()
with pon.wrap("b"):
pon.append("2")
pon.newline().indent()
with pon.wrap("c"):
pon.append("3")
pon.newline()
assert pon == read_file("expected_nested_oneline_sibling.txt")
def test_complex(self, pon, read_file):
with pon.wrap("a"):
pon.append("1")
pon.newline()
with pon.wrap("b"):
pon.append("2")
pon.newline()
with pon.manual_spacing():
pon.indent()
with pon.wrap("c"):
pon.append("3")
pon.newline().indent()
with pon.wrap("d"):
pon.append("4")
pon.newline().newline()
with pon.wrap("e"):
pon.append("5")
pon.newline()
with pon.manual_spacing():
pon.indent()
with pon.wrap("f"):
pon.append("6")
pon.newline().newline()
with pon.wrap("g"):
with pon.wrap("h"):
pon.append("8")
with pon.manual_spacing():
pon.indent()
with pon.wrap("i"):
pon.append("9")
with pon.wrap("j"):
pon.append("10")
pon.newline().newline().indent()
with pon.wrap("k"), pon.wrap("l"):
pon.append("11 12")
pon.newline()
pon.append("7")
assert pon == read_file("expected_complex.txt")
| chingc/pxml | tests/test_pyhtml.py | Python | bsd-2-clause | 7,297 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals # unicode by default
import sys
import datetime
from collections import OrderedDict
import pandoc
#import bib
from flask import Flask
from flask import render_template, redirect, url_for
from flaskext.babel import Babel
from flask_flatpages import FlatPages
from flask_frozen import Freezer
# TODO:
# * Get babel locale from request path
# Create the Flask app
app = Flask(__name__)
# Load settings
app.config.from_pyfile('settings/common.py')
app.config.from_pyfile('settings/local_settings.py', silent=True)
if len(sys.argv) > 2:
extra_conf = sys.argv[2]
app.config.from_pyfile('settings/{}_settings.py'.format(extra_conf), silent=True)
# Add the babel extension
babel = Babel(app)
# Add the FlatPages extension
pages = FlatPages(app)
# Add the Frozen extension
freezer = Freezer(app)
#
# Utils
#
# Frozen url generators
@freezer.register_generator
def default_locale_urls():
''' Genarates the urls for default locale without prefix. '''
for page in pages:
yield '/{}/'.format(remove_l10n_prefix(page.path))
@freezer.register_generator
def page_urls():
''' Genarates the urls with locale prefix. '''
for page in pages:
yield '/{}/'.format(page.path)
# l10n helpers
def has_l10n_prefix(path):
''' Verifies if the path have a localization prefix. '''
return reduce(lambda x, y: x or y, [path.startswith(l)
for l in app.config.get('AVAILABLE_LOCALES', [])])
def add_l10n_prefix(path, locale=app.config.get('DEFAULT_LOCALE')):
'''' Add localization prefix if necessary. '''
return path if has_l10n_prefix(path) else '{}/{}'.format(locale, path)
def remove_l10n_prefix(path, locale=app.config.get('DEFAULT_LOCALE')):
''' Remove specific localization prefix. '''
return path if not path.startswith(locale) else path[(len(locale) + 1):]
# Make remove_l10n_prefix accessible to Jinja
app.jinja_env.globals.update(remove_l10n_prefix=remove_l10n_prefix)
# Structure helpers
def render_markdown(text):
''' Render Markdown text to HTML. '''
doc = pandoc.Document()
# doc.bib(app.config.get('BIB_FILE', 'static/papers.bib'))
doc.markdown = text.encode('utf8')
return unicode(doc.html, 'utf8')
app.config['FLATPAGES_HTML_RENDERER'] = render_markdown
#
# Routes
#
@app.route('/')
def root():
''' Main page '''
# Get the page
#path = 'Main'
#page = pages.get_or_404(add_l10n_prefix(path))
posts = [p for p in pages if 'blog' in p.path ]
latest = sorted(posts, reverse=True, key=lambda p: p.meta['post'])
last_post = latest[0]
bloco1 = pages.get(add_l10n_prefix('blocos/bloco1'))
bloco2 = pages.get(add_l10n_prefix('blocos/bloco2'))
bloco3 = pages.get(add_l10n_prefix('blocos/bloco3'))
today = datetime.datetime.now().strftime("%B %dth %Y")
return render_template('root.html', today=today,
#page=page,
last_post=last_post,
bloco1=bloco1,
bloco2=bloco2,
bloco3=bloco3,
pages=pages)
@app.route('/blog/')
def blog():
posts = [p for p in pages if 'blog' in p.path ]
latest = sorted(posts, reverse=True, key=lambda p: p.meta['post'])
return render_template('blog.html', posts=latest)
@app.route('/<path:path>/')
def page(path):
''' All pages from markdown files '''
# Get the page
page = pages.get_or_404(add_l10n_prefix(path))
# Get custom template
template = page.meta.get('template', 'page.html')
# Verify if need redirect
redirect_ = page.meta.get('redirect', None)
if redirect_:
return redirect(url_for('page', path=redirect_))
# if path == 'Papers' or path == add_l10n_prefix('Papers'):
# b = get_papers()
# return render_template(template, page=page, pages=pages, bib=b)
today = datetime.datetime.now().strftime("%B %dth %Y")
# Render the page
return render_template(template, page=page, today=today, pages=pages)
#
# Main
#
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'build':
freezer.freeze()
else:
app.run(port=8000)
| lem-usp/site-bio208 | site.py | Python | mit | 4,354 |
# Copyright 2008 Mike Wakerly <[email protected]>
#
# This file is part of the Kegboard package of the Kegbot project.
# For more information on Kegboard or Kegbot, see http://kegbot.org/
#
# Kegboard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Kegboard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kegboard. If not, see <http://www.gnu.org/licenses/>.
"""Python interfaces to a Kegboard device."""
import glob
import itertools
import os
import struct
import time
import gflags
import select
import serial
from . import crc16
from .message import *
from .exceptions import *
DEFAULT_GLOB_PATHS = (
'/dev/ttyUSB*',
'/dev/ttyACM*',
'/dev/cu.usbserial*',
'/dev/tty.usbmodem*'
)
def find_devices(glob_paths=DEFAULT_GLOB_PATHS):
paths = []
for p in glob_paths:
paths += glob.glob(p)
return paths
_GLOBS = find_devices()
if _GLOBS:
_DEFAULT_PORT = _GLOBS[0]
else:
_DEFAULT_PORT = '/dev/ttyUSB0'
FLAGS = gflags.FLAGS
gflags.DEFINE_string('kegboard_device', _DEFAULT_PORT,
'An explicit device file (eg /dev/ttyUSB0) on which to listen for kegboard '
'packets.')
gflags.DEFINE_integer('kegboard_speed', 115200,
'Baud rate of device at --kegboard_device')
gflags.DEFINE_boolean('verbose', os.environ.get('VERBOSE') is not None,
'Generate extra logging information.',
allow_override=True)
def get_kegboard(glob_paths=None):
"""Immediately returns a Kegboard if available, None otherwise.
Args:
glob_paths: Paths to test for a valid kegboard.
"""
return wait_for_kegboard(timeout=0, glob_paths=glob_paths)
def wait_for_kegboard(interval=0.1, timeout=None, glob_paths=None):
if not glob_paths:
glob_paths = DEFAULT_GLOB_PATHS
elapsed = 0
while True:
paths = find_devices(glob_paths)
if paths:
return Kegboard(paths[0])
elapsed += interval
if timeout is not None and elapsed >= timeout:
return None
time.sleep(interval)
class Kegboard:
def __init__(self, device_path, speed=None):
self.device_path = device_path
self.incomplete_message = ""
if not speed:
speed = FLAGS.kegboard_speed
self.speed = speed
self.fd = None
def __str__(self):
return '<Kegboard path=%s speed=%s>' % (self.device_path, self.speed)
def open(self):
"""Opens the backing device; must be called before any operations."""
if self.fd:
raise IOError('Already open!')
if not os.path.isfile(self.device_path):
self.fd = serial.Serial(self.device_path, self.speed, timeout=0.1)
self.fd.flushInput()
else:
self.fd = open(self.device_path, 'rb')
def close(self):
"""Closes the backing device."""
self._assert_open()
self.fd.close()
self.fd = None
self.incomplete_message = ""
def close_quietly(self):
"""Similar to `close()`, but swallows any errors."""
try:
self.close()
except IOError:
pass
def read_message_nonblock(self):
"""Immediately returns a message if available, None otherwise."""
self._assert_open()
while True:
# Since we also support 'plain' fds, cannot use serial.inWaiting()
rr, wr, er = select.select([self.fd], [], [], 0)
if not rr:
break
c = self.fd.read(1)
if self.incomplete_message is None:
if c == '\n':
# Reset.
self.incomplete_message = ''
continue
self.incomplete_message += c
if len(self.incomplete_message) >= KBSP_MAXLEN:
# Packet too big; mark corrupt.
self.incomplete_message = None
elif not KBSP_PREFIX.startswith(self.incomplete_message[:len(KBSP_PREFIX)]):
# Bad packet start; mark corrupt.
self.incomplete_message = None
elif self.incomplete_message[-2:] == KBSP_TRAILER:
# Packet ended! Check it.
bytes = self.incomplete_message
self.incomplete_message = ''
header = bytes[:12]
payload = bytes[12:-4]
trailer = bytes[-4:]
crcd_bytes = bytes[:-2]
checked_crc = crc16.crc16_ccitt(crcd_bytes)
message_id, message_len = struct.unpack('<HH', header[8:])
try:
return get_message_by_id(message_id, payload)
except UnknownMessageError, e:
continue
else:
# Just continue.
continue
return None
def drain_messages(self):
"""Immediately returns all available messages without blocking.
This method is a convenience wrapper around `read_message_nonblock()`.
"""
self._assert_open()
ret = []
while True:
m = self.read_message_nonblock()
if not m:
break
ret.append(m)
return ret
def read_message(self, timeout=None, interval=0.1):
"""Blocks until a message is available, returning it.
If `timeout` given, the method will return None after this many seconds
have elapsed without reading a message.
"""
self._assert_open()
elapsed = 0
while True:
m = self.read_message_nonblock()
if m:
return m
elapsed += interval
if timeout is not None and elapsed >= timeout:
return None
time.sleep(interval)
def wait_for_ping(self, attempts=5):
self.drain_messages()
for i in xrange(attempts):
self.ping()
messages = [self.read_message(timeout=1)] + self.drain_messages()
for message in messages:
if isinstance(message, HelloMessage):
return message
def write_message(self, message):
"""Send a message to the device."""
self._assert_open()
return self.fd.write(message.ToBytes())
def ping(self):
return self.write_message(PingCommand())
def set_serial_number(self, serial_number):
command = SetSerialNumberCommand()
command.SetValue('serial_number', serial_number)
return self.write_message(command)
def set_output(self, output_id, enable):
command = SetOutputCommand()
command.SetValue('output_id', int(output_id))
command.SetValue('output_mode', int(enable))
return self.write_message(command)
def _assert_open(self):
if not self.fd:
raise IOError('Kegboard not open; call open() first.')
| Indemnity83/kegboard | python/kegbot/kegboard/kegboard.py | Python | gpl-2.0 | 6,596 |
'''
Created on Mar 31, 2011
@author: Stephen O'Hara
Colorado State University
All Rights Reserved
High level functions for implementing KTH data partitioning.
Standard partition is set5 = training, sets 1-4 for testing.
It is also instructive to try leave-one-set-out protocols,
so a partitioning for that strategy is provided as well.
'''
import proximityforest as pf
import scipy
def partitionGestures(vidData, leaveout_set=0):
'''partitions gesture data into a leave-one-set-out test and training set'''
testvids = []
testlabels = []
trainvids = []
trainlabels = []
for i in range(5):
if i==leaveout_set:
testvids = vidData[i].getData()
testlabels = vidData[i].getLabels()
else:
tv = vidData[i].getData()
tl = vidData[i].getLabels()
trainvids += tv
trainlabels += tl
return ( (trainvids,trainlabels),(testvids,testlabels))
def partitionCambridgeProtocol(vidData, test_set=0):
'''
training set is set 5 (4, using zero-based index),
test set 0-3 specified by parameter.
'''
set5 = vidData[4]
trainvids = set5.getData()
trainlabels = set5.getLabels()
setX = vidData[test_set]
testvids = setX.getData()
testlabels = setX.getLabels()
return ( (trainvids,trainlabels),(testvids,testlabels))
def cambridgeStandardClassification(GDat, treeClass=pf.TensorSubspaceTree, N=9, Tau=21, method="knn", K=1, **kwargs):
'''
Performs the standard cambridge classification protocol, using
Set 5 for training, and testing on Sets 1-4.
@param GDat: The cambridge gestures tracklet data structure generated
via loadGestureData() function in the DataSource module, or via unPickling
a previously stored object.
'''
(train, _) = partitionCambridgeProtocol(GDat)
tracklet_data = pf.build_simple_TD_Structure(train[0], train[1])
#step 1: train a forest on set 5
forest = pf.TensorSubspaceForest(N, tracklet_data, treeClass=treeClass, Tau=Tau, **kwargs)
forest.addList(range(len(train[0])), train[1])
#step 2: test against each set 1-4 independently
errList = []
confusionList = []
for i in range(4):
print "Classifying Set%d vs. Set5..."%(i+1)
testvids = GDat[i].getData()
testlabels = GDat[i].getLabels()
pfa = pf.ProximityForestAnalysis(forest)
(err, cfList) = pfa.classifyAll(testvids, testlabels, 9, method, K, verbose=False)
#cfm = pf.confusionMatrix(cfList, [20]*9) #a single set has 20 repetitions of each of 9 gestures
errList.append(err)
confusionList.append(cfList)
print
print "Set%d error rate: %f"%(i+1,err)
print
avg_err = scipy.mean(errList)
print "Average Error Rate = %f"%avg_err
print "Error by Set = %s"%str(errList)
return (avg_err, errList, confusionList)
if __name__ == '__main__':
pass
| Sciumo/ProximityForest | evaluation/action_data_sets/cambridgeGestures/Protocol.py | Python | gpl-3.0 | 2,973 |
#!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
# +++your code here+++
if (count>=10):
retValue='many'
else:
retValue=count
return 'Number of donuts: '+str(retValue)
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
# +++your code here+++
firsttwo=s[:2]
lasttwo=s[-2:]
if (len(s)<2):
result=''
else:
result=firsttwo+lasttwo
return result
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
# +++your code here+++
first=s[0]
s=s.replace(first,'*')
return first+s[1:]
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
# +++your code here+++
return b[:2]+a[2:]+' '+a[:2]+b[2:]
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print 'donuts'
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print
print 'both_ends'
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print
print 'fix_start'
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print
print 'mix_up'
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
| DevilFruit99/GooglePythonClass | basic/string1.py | Python | apache-2.0 | 3,686 |
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.db import connection
from django.shortcuts import get_object_or_404
from tenant_schemas.utils import get_tenant_model, remove_www_and_dev, get_public_schema_name
class TenantMiddleware(object):
"""
This middleware should be placed at the very top of the middleware stack.
Selects the proper database schema using the request host. Can fail in
various ways which is better than corrupting or revealing data...
"""
def process_request(self, request):
"""
Resets to public schema
Some nasty weird bugs happened at the production environment without this call.
connection.pg_thread.schema_name would already be set and then terrible errors
would occur. Any idea why? My theory is django implements connection as some sort
of threading local variable.
"""
connection.set_schema_to_public()
hostname_without_port = remove_www_and_dev(request.get_host().split(':')[0])
request.tenant = get_object_or_404(get_tenant_model(), domain_url=hostname_without_port)
connection.set_tenant(request.tenant)
# content type can no longer be cached as public and tenant schemas have different
# models. if someone wants to change this, the cache needs to be separated between
# public and shared schemas. if this cache isn't cleared, this can cause permission
# problems. for example, on public, a particular model has id 14, but on the tenants
# it has the id 15. if 14 is cached instead of 15, the permissions for the wrong
# model will be fetched.
ContentType.objects.clear_cache()
Site.objects.clear_cache()
# do we have a public-specific token?
if hasattr(settings, 'PUBLIC_SCHEMA_URLCONF') and request.tenant.schema_name == get_public_schema_name():
request.urlconf = settings.PUBLIC_SCHEMA_URLCONF
| jrutila/django-tenant-schemas | tenant_schemas/middleware.py | Python | mit | 2,043 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2018 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""The main statusbar widget."""
import enum
import attr
from PyQt5.QtCore import pyqtSignal, pyqtSlot, pyqtProperty, Qt, QSize, QTimer
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QStackedLayout, QSizePolicy
from qutebrowser.browser import browsertab
from qutebrowser.config import config
from qutebrowser.utils import usertypes, log, objreg, utils
from qutebrowser.mainwindow.statusbar import (backforward, command, progress,
keystring, percentage, url,
tabindex)
from qutebrowser.mainwindow.statusbar import text as textwidget
@attr.s
class ColorFlags:
"""Flags which change the appearance of the statusbar.
Attributes:
prompt: If we're currently in prompt-mode.
insert: If we're currently in insert mode.
command: If we're currently in command mode.
mode: The current caret mode (CaretMode.off/.on/.selection).
private: Whether this window is in private browsing mode.
passthrough: If we're currently in passthrough-mode.
"""
CaretMode = enum.Enum('CaretMode', ['off', 'on', 'selection'])
prompt = attr.ib(False)
insert = attr.ib(False)
command = attr.ib(False)
caret = attr.ib(CaretMode.off)
private = attr.ib(False)
passthrough = attr.ib(False)
def to_stringlist(self):
"""Get a string list of set flags used in the stylesheet.
This also combines flags in ways they're used in the sheet.
"""
strings = []
if self.prompt:
strings.append('prompt')
if self.insert:
strings.append('insert')
if self.command:
strings.append('command')
if self.private:
strings.append('private')
if self.passthrough:
strings.append('passthrough')
if self.private and self.command:
strings.append('private-command')
if self.caret == self.CaretMode.on:
strings.append('caret')
elif self.caret == self.CaretMode.selection:
strings.append('caret-selection')
else:
assert self.caret == self.CaretMode.off
return strings
def _generate_stylesheet():
flags = [
('private', 'statusbar.private'),
('caret', 'statusbar.caret'),
('caret-selection', 'statusbar.caret.selection'),
('prompt', 'prompts'),
('insert', 'statusbar.insert'),
('command', 'statusbar.command'),
('passthrough', 'statusbar.passthrough'),
('private-command', 'statusbar.command.private'),
]
stylesheet = """
QWidget#StatusBar,
QWidget#StatusBar QLabel,
QWidget#StatusBar QLineEdit {
font: {{ conf.fonts.statusbar }};
background-color: {{ conf.colors.statusbar.normal.bg }};
color: {{ conf.colors.statusbar.normal.fg }};
}
"""
for flag, option in flags:
stylesheet += """
QWidget#StatusBar[color_flags~="%s"],
QWidget#StatusBar[color_flags~="%s"] QLabel,
QWidget#StatusBar[color_flags~="%s"] QLineEdit {
color: {{ conf.colors.%s }};
background-color: {{ conf.colors.%s }};
}
""" % (flag, flag, flag, # noqa: S001
option + '.fg', option + '.bg')
return stylesheet
class StatusBar(QWidget):
"""The statusbar at the bottom of the mainwindow.
Attributes:
txt: The Text widget in the statusbar.
keystring: The KeyString widget in the statusbar.
percentage: The Percentage widget in the statusbar.
url: The UrlText widget in the statusbar.
prog: The Progress widget in the statusbar.
cmd: The Command widget in the statusbar.
_hbox: The main QHBoxLayout.
_stack: The QStackedLayout with cmd/txt widgets.
_win_id: The window ID the statusbar is associated with.
Signals:
resized: Emitted when the statusbar has resized, so the completion
widget can adjust its size to it.
arg: The new size.
moved: Emitted when the statusbar has moved, so the completion widget
can move to the right position.
arg: The new position.
"""
resized = pyqtSignal('QRect')
moved = pyqtSignal('QPoint')
_severity = None
_color_flags = []
STYLESHEET = _generate_stylesheet()
def __init__(self, *, win_id, private, parent=None):
super().__init__(parent)
objreg.register('statusbar', self, scope='window', window=win_id)
self.setObjectName(self.__class__.__name__)
self.setAttribute(Qt.WA_StyledBackground)
config.set_register_stylesheet(self)
self.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Fixed)
self._win_id = win_id
self._color_flags = ColorFlags()
self._color_flags.private = private
self._hbox = QHBoxLayout(self)
self._set_hbox_padding()
self._hbox.setSpacing(5)
self._stack = QStackedLayout()
self._hbox.addLayout(self._stack)
self._stack.setContentsMargins(0, 0, 0, 0)
self.cmd = command.Command(private=private, win_id=win_id)
self._stack.addWidget(self.cmd)
objreg.register('status-command', self.cmd, scope='window',
window=win_id)
self.txt = textwidget.Text()
self._stack.addWidget(self.txt)
self.cmd.show_cmd.connect(self._show_cmd_widget)
self.cmd.hide_cmd.connect(self._hide_cmd_widget)
self._hide_cmd_widget()
self.url = url.UrlText()
self.percentage = percentage.Percentage()
self.backforward = backforward.Backforward()
self.tabindex = tabindex.TabIndex()
self.keystring = keystring.KeyString()
self.prog = progress.Progress(self)
self._draw_widgets()
config.instance.changed.connect(self._on_config_changed)
QTimer.singleShot(0, self.maybe_hide)
def __repr__(self):
return utils.get_repr(self)
@pyqtSlot(str)
def _on_config_changed(self, option):
if option == 'statusbar.hide':
self.maybe_hide()
elif option == 'statusbar.padding':
self._set_hbox_padding()
elif option == 'statusbar.widgets':
self._draw_widgets()
def _draw_widgets(self):
"""Draw statusbar widgets."""
# Start with widgets hidden and show them when needed
for widget in [self.url, self.percentage,
self.backforward, self.tabindex,
self.keystring, self.prog]:
widget.hide()
self._hbox.removeWidget(widget)
tab = self._current_tab()
# Read the list and set widgets accordingly
for segment in config.val.statusbar.widgets:
if segment == 'url':
self._hbox.addWidget(self.url)
self.url.show()
elif segment == 'scroll':
self._hbox.addWidget(self.percentage)
self.percentage.show()
elif segment == 'scroll_raw':
self._hbox.addWidget(self.percentage)
self.percentage.raw = True
self.percentage.show()
elif segment == 'history':
self._hbox.addWidget(self.backforward)
self.backforward.enabled = True
if tab:
self.backforward.on_tab_changed(tab)
elif segment == 'tabs':
self._hbox.addWidget(self.tabindex)
self.tabindex.show()
elif segment == 'keypress':
self._hbox.addWidget(self.keystring)
self.keystring.show()
elif segment == 'progress':
self._hbox.addWidget(self.prog)
self.prog.enabled = True
if tab:
self.prog.on_tab_changed(tab)
@pyqtSlot()
def maybe_hide(self):
"""Hide the statusbar if it's configured to do so."""
tab = self._current_tab()
hide = config.val.statusbar.hide
if hide or (tab is not None and tab.data.fullscreen):
self.hide()
else:
self.show()
def _set_hbox_padding(self):
padding = config.val.statusbar.padding
self._hbox.setContentsMargins(padding.left, 0, padding.right, 0)
@pyqtProperty('QStringList')
def color_flags(self):
"""Getter for self.color_flags, so it can be used as Qt property."""
return self._color_flags.to_stringlist()
def _current_tab(self):
"""Get the currently displayed tab."""
window = objreg.get('tabbed-browser', scope='window',
window=self._win_id)
return window.widget.currentWidget()
def set_mode_active(self, mode, val):
"""Setter for self.{insert,command,caret}_active.
Re-set the stylesheet after setting the value, so everything gets
updated by Qt properly.
"""
if mode == usertypes.KeyMode.insert:
log.statusbar.debug("Setting insert flag to {}".format(val))
self._color_flags.insert = val
if mode == usertypes.KeyMode.passthrough:
log.statusbar.debug("Setting passthrough flag to {}".format(val))
self._color_flags.passthrough = val
if mode == usertypes.KeyMode.command:
log.statusbar.debug("Setting command flag to {}".format(val))
self._color_flags.command = val
elif mode in [usertypes.KeyMode.prompt, usertypes.KeyMode.yesno]:
log.statusbar.debug("Setting prompt flag to {}".format(val))
self._color_flags.prompt = val
elif mode == usertypes.KeyMode.caret:
if not val:
# Turning on is handled in on_current_caret_selection_toggled
log.statusbar.debug("Setting caret mode off")
self._color_flags.caret = ColorFlags.CaretMode.off
config.set_register_stylesheet(self, update=False)
def _set_mode_text(self, mode):
"""Set the mode text."""
if mode == 'passthrough':
key_instance = config.key_instance
all_bindings = key_instance.get_reverse_bindings_for('passthrough')
bindings = all_bindings.get('leave-mode')
if bindings:
suffix = ' ({} to leave)'.format(bindings[0])
else:
suffix = ''
else:
suffix = ''
text = "-- {} MODE --{}".format(mode.upper(), suffix)
self.txt.set_text(self.txt.Text.normal, text)
def _show_cmd_widget(self):
"""Show command widget instead of temporary text."""
self._stack.setCurrentWidget(self.cmd)
self.show()
def _hide_cmd_widget(self):
"""Show temporary text instead of command widget."""
log.statusbar.debug("Hiding cmd widget")
self._stack.setCurrentWidget(self.txt)
self.maybe_hide()
@pyqtSlot(str)
def set_text(self, val):
"""Set a normal (persistent) text in the status bar."""
self.txt.set_text(self.txt.Text.normal, val)
@pyqtSlot(usertypes.KeyMode)
def on_mode_entered(self, mode):
"""Mark certain modes in the commandline."""
keyparsers = objreg.get('keyparsers', scope='window',
window=self._win_id)
if keyparsers[mode].passthrough:
self._set_mode_text(mode.name)
if mode in [usertypes.KeyMode.insert,
usertypes.KeyMode.command,
usertypes.KeyMode.caret,
usertypes.KeyMode.prompt,
usertypes.KeyMode.yesno,
usertypes.KeyMode.passthrough]:
self.set_mode_active(mode, True)
@pyqtSlot(usertypes.KeyMode, usertypes.KeyMode)
def on_mode_left(self, old_mode, new_mode):
"""Clear marked mode."""
keyparsers = objreg.get('keyparsers', scope='window',
window=self._win_id)
if keyparsers[old_mode].passthrough:
if keyparsers[new_mode].passthrough:
self._set_mode_text(new_mode.name)
else:
self.txt.set_text(self.txt.Text.normal, '')
if old_mode in [usertypes.KeyMode.insert,
usertypes.KeyMode.command,
usertypes.KeyMode.caret,
usertypes.KeyMode.prompt,
usertypes.KeyMode.yesno,
usertypes.KeyMode.passthrough]:
self.set_mode_active(old_mode, False)
@pyqtSlot(browsertab.AbstractTab)
def on_tab_changed(self, tab):
"""Notify sub-widgets when the tab has been changed."""
self.url.on_tab_changed(tab)
self.prog.on_tab_changed(tab)
self.percentage.on_tab_changed(tab)
self.backforward.on_tab_changed(tab)
self.maybe_hide()
assert tab.private == self._color_flags.private
@pyqtSlot(bool)
def on_caret_selection_toggled(self, selection):
"""Update the statusbar when entering/leaving caret selection mode."""
log.statusbar.debug("Setting caret selection {}".format(selection))
if selection:
self._set_mode_text("caret selection")
self._color_flags.caret = ColorFlags.CaretMode.selection
else:
self._set_mode_text("caret")
self._color_flags.caret = ColorFlags.CaretMode.on
config.set_register_stylesheet(self, update=False)
def resizeEvent(self, e):
"""Extend resizeEvent of QWidget to emit a resized signal afterwards.
Args:
e: The QResizeEvent.
"""
super().resizeEvent(e)
self.resized.emit(self.geometry())
def moveEvent(self, e):
"""Extend moveEvent of QWidget to emit a moved signal afterwards.
Args:
e: The QMoveEvent.
"""
super().moveEvent(e)
self.moved.emit(e.pos())
def minimumSizeHint(self):
"""Set the minimum height to the text height plus some padding."""
padding = config.val.statusbar.padding
width = super().minimumSizeHint().width()
height = self.fontMetrics().height() + padding.top + padding.bottom
return QSize(width, height)
| toofar/qutebrowser | qutebrowser/mainwindow/statusbar/bar.py | Python | gpl-3.0 | 15,236 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the `MapAndFilterFusion` optimization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import optimization
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
def _map_and_filter_fusion_test_cases():
"""Generates test cases for the MapAndFilterFusion optimization."""
identity = lambda x: x
increment = lambda x: x + 1
minus_five = lambda x: x - 5
def increment_and_square(x):
y = x + 1
return y * y
take_all = lambda x: constant_op.constant(True)
is_zero = lambda x: math_ops.equal(x, 0)
is_odd = lambda x: math_ops.equal(x % 2, 0)
greater = lambda x: math_ops.greater(x + 5, 0)
functions = [identity, increment, minus_five, increment_and_square]
filters = [take_all, is_zero, is_odd, greater]
tests = []
for x, fun in enumerate(functions):
for y, predicate in enumerate(filters):
tests.append(("Mixed{}{}".format(x, y), fun, predicate))
# Multi output
tests.append(("Multi1", lambda x: (x, x),
lambda x, y: constant_op.constant(True)))
tests.append(
("Multi2", lambda x: (x, 2),
lambda x, y: math_ops.equal(x * math_ops.cast(y, dtypes.int64), 0)))
return tuple(tests)
class MapAndFilterFusionTest(test_base.DatasetTestBase, parameterized.TestCase):
def _testMapAndFilter(self, dataset, function, predicate):
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.cached_session() as sess:
for x in range(10):
r = function(x)
if isinstance(r, tuple):
b = predicate(*r) # Pass tuple as multiple arguments.
else:
b = predicate(r)
if sess.run(b):
result = sess.run(get_next)
self.assertAllEqual(r, result)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
@parameterized.named_parameters(*_map_and_filter_fusion_test_cases())
def testMapFilterFusion(self, function, predicate):
dataset = dataset_ops.Dataset.range(10).apply(
optimization.assert_next(
["Map", "FilterByLastComponent"])).map(function).filter(predicate)
options = dataset_ops.Options()
options.experimental_map_and_filter_fusion = True
dataset = dataset.with_options(options)
self._testMapAndFilter(dataset, function, predicate)
def testCapturedInputs(self):
a = constant_op.constant(3, dtype=dtypes.int64)
b = constant_op.constant(4, dtype=dtypes.int64)
some_tensor = math_ops.mul(a, b)
function = lambda x: x * x
def predicate(y):
return math_ops.less(math_ops.cast(y, dtypes.int64), some_tensor)
# We are currently not supporting functions with captured inputs.
dataset = dataset_ops.Dataset.range(10).apply(
optimization.assert_next(["Map",
"Filter"])).map(function).filter(predicate)
options = dataset_ops.Options()
options.experimental_map_and_filter_fusion = True
dataset = dataset.with_options(options)
self._testMapAndFilter(dataset, function, predicate)
if __name__ == "__main__":
test.main()
| alshedivat/tensorflow | tensorflow/python/data/experimental/kernel_tests/optimization/map_and_filter_fusion_test.py | Python | apache-2.0 | 4,199 |
# Copyright 2017 Starbot Discord Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import platform
import sys
import time
import discord
import psutil
import speedtest
from api import settings, logging, command, message, plugin, git
from api.bot import Bot
from libs import progressBar, readableTime, displayname
# Command names.
SERVERSCMD = "servers"
NICKNAMECMD = "nickname"
def commands_detect_dups():
duplicates = []
commands_list = []
for plugin_in in Bot.plugins:
for command_in in plugin_in.commands:
commands_list.append(command_in.name)
for command_in in commands_list:
commandOccurances = 0
for command2 in commands_list:
if command_in == command2:
commandOccurances += 1
if commandOccurances > 1:
duplicates.append(command_in)
return list(set(duplicates))
def convert_size(size_bytes):
if size_bytes == 0:
return '0B'
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size_bytes, 1024)))
p = math.pow(1024, i)
s = round(size_bytes/p, 2)
return '%s %s' % (s, size_name[i])
def onInit(plugin_in):
plugins_command = command.Command(plugin_in, 'plugins', shortdesc='Print a list of plugins', devcommand=True)
commands_command = command.Command(plugin_in, 'commands', shortdesc='Print a list of commands')
help_command = command.Command(plugin_in, 'help', shortdesc='Redirects to !commands')
info_command = command.Command(plugin_in, 'info', shortdesc='Print some basic bot info')
plugintree_command = command.Command(plugin_in, 'plugintree', shortdesc='Print a tree of plugins and commands', devcommand=True)
uptime_command = command.Command(plugin_in, 'uptime', shortdesc='Print the bot\'s uptime', devcommand=True)
hostinfo_command = command.Command(plugin_in, 'hostinfo', shortdesc='Prints information about the bots home', devcommand=True)
cpuinfo_command = command.Command(plugin_in, 'cpuinfo', shortdesc='Prints info about the system CPUs', devcommand=True)
setprefix_command = command.Command(plugin_in, 'setprefix', shortdesc='Set the server prefix', devcommand=True)
getprefix_command = command.Command(plugin_in, 'getprefix', shortdesc='Get the server prefix', devcommand=True)
speedtest_command = command.Command(plugin_in, 'speedtest', shortdesc='Run a speedtest', devcommand=True)
addowner_command = command.Command(plugin_in, 'addowner', shortdesc='Add a bot owner', devcommand=True)
owners_command = command.Command(plugin_in, 'owners', shortdesc='Print the bot owners', devcommand=True)
messages_command = command.Command(plugin_in, 'messages', shortdesc="Show how many messages the bot has seen since start")
servers_command = command.Command(plugin_in, SERVERSCMD, shortdesc="Show how many servers the bot is on")
invite_command = command.Command(plugin_in, 'invite', shortdesc="Invite the bot to your server!")
nickname_command = command.Command(plugin_in, NICKNAMECMD, shortdesc="Change the bot's nickname")
ping_command = command.Command(plugin_in, 'ping', shortdesc='Pong!')
restart_command = command.Command(plugin_in, 'restart', shortdesc='Restart (and update) the bot')
listservers_command= command.Command(plugin_in, 'listservers', shortdesc='List the servers the bot is in')
changegame_command = command.Command(plugin_in, 'changegame', shortdesc="Change the bot's game")
return plugin.Plugin(plugin_in, 'botutils', [plugins_command, commands_command, help_command, info_command, plugintree_command, uptime_command,
hostinfo_command, cpuinfo_command, setprefix_command, getprefix_command, speedtest_command, addowner_command,
owners_command, messages_command, servers_command, invite_command, nickname_command, ping_command,
restart_command, listservers_command, changegame_command])
async def onCommand(message_in):
# Get user.
if message_in.server:
me = message_in.server.me
else:
me = message_in.channel.me
if message_in.command == 'plugins':
plugin_list = []
for plugin_in in Bot.plugins:
plugin_list.append(plugin_in.name)
return message.Message(body='```{}```'.format(', '.join(plugin_list)))
if message_in.command == 'commands' or message_in.command == 'help':
cmd_names = []
cmd_descs = []
for botcommand in Bot.commands:
if botcommand.devcommand != True:
cmd_names.append(botcommand.name)
cmd_descs.append(botcommand.shortdesc)
cmd_list = []
pad_len = len(max(cmd_names, key=len))
for index, value in enumerate(cmd_names):
cmd_list.append('{} - {}'.format(cmd_names[index].ljust(pad_len), cmd_descs[index]))
return message.Message(body='```{}```'.format('\n'.join(cmd_list)))
if message_in.command == 'info':
sha = git.git_commit()
track = git.git_branch()
remote = git.get_remote()
link = git.get_url()
if track == 'master':
embed = discord.Embed(color=discord.Color.red())
elif track == 'unstable':
embed = discord.Embed(color=discord.Color.gold())
elif track == 'stable':
embed = discord.Embed(color=discord.Color.green())
else:
embed = discord.Embed(color=discord.Color.light_grey())
embed.set_author(name='Project StarBot v0.2.0-{} on track {}'.format(sha[:7], track),
url=link,
icon_url='https://pbs.twimg.com/profile_images/616309728688238592/pBeeJQDQ.png')
embed.add_field(name="Bot Team Alpha", value="CorpNewt\nSydney Erickson\nGoldfish64")
embed.add_field(name="Source Code", value="Interested in poking around inside the bot?\nClick on the link above!")
embed.set_footer(text="Pulled from {}".format(remote))
return message.Message(embed=embed)
if message_in.command == 'plugintree':
dups = commands_detect_dups()
plugin_string = '```\n'
for plugin_in in Bot.plugins:
plugin_string += '{}\n'.format(plugin_in.name)
plugin_commands = len(plugin_in.commands)
index = 0
for command_in in plugin_in.commands:
index += 1
if plugin_commands != index:
if command_in.name in dups:
plugin_string += '├ {} <-- duplicate\n'.format(command_in.name)
else:
plugin_string += '├ {}\n'.format(command_in.name)
else:
if command_in.name in dups:
plugin_string += '└ {} <-- duplicate\n'.format(command_in.name)
else:
plugin_string += '└ {}\n'.format(command_in.name)
plugin_string += '```'
return message.Message(body=plugin_string)
if message_in.command == 'uptime':
time_current = int(time.time())
time_str = readableTime.getReadableTimeBetween(Bot.startTime, time_current)
return message.Message(body='I\'ve been up for *{}*.'.format(time_str))
if message_in.command == 'hostinfo':
# Get information about host environment.
time_current = int(time.time())
# CPU stats.
cpu_threads = os.cpu_count()
cpu_usage = psutil.cpu_percent(interval=1)
# Memory stats.
mem_stats = psutil.virtual_memory()
mem_percent = mem_stats.percent
mem_used = convert_size(mem_stats.used)
mem_total = convert_size(mem_stats.total)
# Platform info.
platform_current = platform.platform()
# Python version info.
pyver_major = sys.version_info.major
pyver_minor = sys.version_info.minor
pyver_micro = sys.version_info.micro
pyver_release = sys.version_info.releaselevel
# Storage info.
stor = psutil.disk_usage('/')
stor_used = convert_size(stor.used)
stor_total = convert_size(stor.total)
stor_free = convert_size(stor.total - stor.used)
# Format hostinfo with OS, CPU, RAM, storage, and other bot info.
msg = '***{}\'s*** **Home:**\n'.format(displayname.name(message_in.server.me))
msg += '```Host OS : {}\n'.format(platform_current)
msg += 'Host Python : {}.{}.{} {}\n'.format(pyver_major, pyver_minor, pyver_micro, pyver_release)
if not isinstance(cpu_threads, int):
msg += 'Host CPU usage: {}% of {}\n'.format(cpu_usage, platform.machine())
elif cpu_threads > 1:
msg += 'Host CPU usage: {}% of {} ({} threads)\n'.format(cpu_usage, platform.machine(), cpu_threads)
else:
msg += 'Host CPU usage: {}% of {} ({} thread)\n'.format(cpu_usage, platform.machine(), cpu_threads)
msg += 'Host RAM : {} ({}%) of {}\n'.format(mem_used, mem_percent, mem_total)
msg += 'Host storage : {} ({}%) of {} - {} free\n'.format(stor_used, stor.percent, stor_total, stor_free)
msg += 'Hostname : {}\n'.format(platform.node())
msg += 'Host uptime : {}```'.format(readableTime.getReadableTimeBetween(psutil.boot_time(), time.time()))
# Return completed message.
return message.Message(body=msg)
if message_in.command == 'cpuinfo':
# Get CPU usage and create string for message.
cpu_pcts = psutil.cpu_percent(interval=0.1, percpu=True)
cpu_pct_str = '{}\n'.format(platform.processor())
cpu_threads = psutil.cpu_count()
cpu_cores = psutil.cpu_count(logical=False)
cpu_arch = platform.machine()
# First, check to see if we can accurately determine the number of physical cores. If not, omit the core count.
if not cpu_cores:
if cpu_threads > 1:
cpu_pct_str += '{} threads of {}'.format(cpu_threads, cpu_arch)
else:
cpu_pct_str += '{} thread of {}'.format(cpu_threads, cpu_arch)
elif cpu_cores > 1: # Multiple cores.
cpu_pct_str += '{} threads - {} cores of {}'.format(cpu_threads, cpu_cores, cpu_arch)
else:
if psutil.cpu_count() > 1: # Multiple threads, single core.
cpu_pct_str += '{} threads - {} core of {}'.format(cpu_threads, cpu_cores, cpu_arch)
else: # Single thread, single core.
cpu_pct_str += '{} thread - {} core of {}'.format(cpu_threads, cpu_cores, cpu_arch)
# Build CPU usage graph.
cpu_pct_str += '\n\n'
for index, value in enumerate(cpu_pcts):
cpu_pct_str += 'CPU {}: {}\n'.format(str(index), progressBar.makeBar(cpu_pcts[index]))
# Return completed message.
return message.Message(body='```{}```'.format(cpu_pct_str))
if message_in.command == 'setprefix':
if settings.owners_check(message_in.author.id):
prefix = message_in.body.split(' ', 1)[-1]
settings.prefix_set(message_in.server.id, prefix)
return message.Message(body='Prefix set to {}'.format(prefix))
else:
return message.Message(body='Only my owner can set the prefix!')
if message_in.command == 'getprefix':
return message.Message(body='Prefix is {}'.format(settings.prefix_get(message_in.server.id)))
if message_in.command == 'speedtest':
if settings.owners_check(message_in.author.id):
speed = speedtest.Speedtest()
speed.get_best_server()
msg = '**Speed Test Results:**\n'
msg += '```\n'
# msg += ' Ping: {}\n'.format(round(speed.ping(), 2)) - disabled with new module switch
msg += 'Download: {}MB/s\n'.format(round(speed.download()/1024/1024, 2))
msg += ' Upload: {}MB/s```'.format(round(speed.upload()/1024/1024, 2))
return message.Message(body=msg)
else:
return message.Message(body='You do not have permisison to run a speedtest.')
if message_in.command == "addowner":
if settings.owners_get():
try:
if settings.owners_check(message_in.author.id):
member = message_in.body.strip()
new_member = displayname.memberForName(member, message_in.server, me)
if settings.owners_check(new_member.id):
return message.Message(body="User is already an owner.")
elif new_member.bot:
return message.Message(body="Bots cannot be owners.")
else:
settings.owners_add(new_member.id)
return message.Message(body="Added owner successfully.")
else:
return message.Message(body="You aren't an owner of the bot.")
except AttributeError:
return message.Message(body="Invalid user.")
else:
settings.owners_add(message_in.author.id)
return message.Message(body="You have successfully claimed yourself as the first owner!")
if message_in.command == 'owners':
owners = []
if not settings.owners_get():
return message.Message(body='I have no owners')
for owner in settings.owners_get():
user = displayname.memberForID(str(owner), message_in.server, me)
if user:
owners.append(str(user.name))
else:
owners.append(str(owner))
owner_list = ', '.join(owners)
return message.Message(body=owner_list)
if message_in.command == SERVERSCMD:
# Get server count.
servercount = len(Bot.client.servers)
# Return message.
if servercount == 1:
return message.Message("I am a member of **{} server**!".format(servercount))
else:
return message.Message("I am a member of **{} servers**!".format(servercount))
if message_in.command == 'messages':
# Get server.
server = message_in.server
# If the server is null, show error.
if not server:
return message.Message("This is not a server. :wink:")
msg_count = Bot.messagesSinceStart
msg_count_server = logging.message_count_get(server.id)
msg = "I've witnessed *{} messages* since I started and *{} messages* overall!"
return message.Message(msg.format(msg_count, msg_count_server))
if message_in.command == 'invite':
class perm_admin:
value = 8
return message.Message(body=discord.utils.oauth_url(Bot.client.user.id, perm_admin))
if message_in.command == NICKNAMECMD:
if message_in.channel.permissions_for(message_in.author).manage_nicknames:
# Change nickname.
await Bot.client.change_nickname(message_in.server.me, message_in.body.strip())
# if message_in.server.me.nick:
# return message.Message("My new nickname in this server is **{}**".format(message_in.server.me.nick))
#else:
# return message.Message("My nickname has been removed.")
return message.Message("My nickname has been changed.")
else:
return message.Message("You cannot change nicknames on this server.")
if message_in.command == 'ping':
return message.Message(body='PONG! Bot is up!')
if message_in.command == 'restart':
if settings.owners_check(message_in.author.id):
print("Rebooting...")
os._exit(1)
else:
return message.Message(body="You do not have permisison to reboot the bot.")
if message_in.command == 'listservers':
# this from Sydney
server_names = ""
send = "I am in"
for server in Bot.client.servers:
server_names += " {}".format(server.name)
send += " {},".format(server.name)
send = send[:-1]
send = send + "."
return message.Message(send)
if message_in.command == 'changegame':
if settings.owners_check(message_in.author.id):
await Bot.client.change_presence(game=discord.Game(name=message_in.body.strip()))
return message.Message(body='The game has been changed.')
else:
return message.Message(body="You do not have permisison to change the bot's game.")
| dhinakg/BitSTAR | plugins/botutils.py | Python | apache-2.0 | 17,211 |
"""
Functional tests to test the AppLocation class and related methods.
"""
from unittest import TestCase
from mock import patch
from openlp.core.utils import AppLocation
class TestAppLocation(TestCase):
"""
A test suite to test out various methods around the AppLocation class.
"""
def get_data_path_test(self):
"""
Test the AppLocation.get_data_path() method
"""
with patch(u'openlp.core.utils.Settings') as mocked_class, \
patch(u'openlp.core.utils.AppLocation.get_directory') as mocked_get_directory, \
patch(u'openlp.core.utils.check_directory_exists') as mocked_check_directory_exists, \
patch(u'openlp.core.utils.os') as mocked_os:
# GIVEN: A mocked out Settings class and a mocked out AppLocation.get_directory()
mocked_settings = mocked_class.return_value
mocked_settings.contains.return_value = False
mocked_get_directory.return_value = u'test/dir'
mocked_check_directory_exists.return_value = True
mocked_os.path.normpath.return_value = u'test/dir'
# WHEN: we call AppLocation.get_data_path()
data_path = AppLocation.get_data_path()
# THEN: check that all the correct methods were called, and the result is correct
mocked_settings.contains.assert_called_with(u'advanced/data path')
mocked_get_directory.assert_called_with(AppLocation.DataDir)
mocked_check_directory_exists.assert_called_with(u'test/dir')
assert data_path == u'test/dir', u'Result should be "test/dir"'
def get_data_path_with_custom_location_test(self):
"""
Test the AppLocation.get_data_path() method when a custom location is set in the settings
"""
with patch(u'openlp.core.utils.Settings') as mocked_class,\
patch(u'openlp.core.utils.os') as mocked_os:
# GIVEN: A mocked out Settings class which returns a custom data location
mocked_settings = mocked_class.return_value
mocked_settings.contains.return_value = True
mocked_settings.value.return_value.toString.return_value = u'custom/dir'
mocked_os.path.normpath.return_value = u'custom/dir'
# WHEN: we call AppLocation.get_data_path()
data_path = AppLocation.get_data_path()
# THEN: the mocked Settings methods were called and the value returned was our set up value
mocked_settings.contains.assert_called_with(u'advanced/data path')
mocked_settings.value.assert_called_with(u'advanced/data path')
assert data_path == u'custom/dir', u'Result should be "custom/dir"'
def get_section_data_path_test(self):
"""
Test the AppLocation.get_section_data_path() method
"""
with patch(u'openlp.core.utils.AppLocation.get_data_path') as mocked_get_data_path, \
patch(u'openlp.core.utils.check_directory_exists') as mocked_check_directory_exists:
# GIVEN: A mocked out AppLocation.get_data_path()
mocked_get_data_path.return_value = u'test/dir'
mocked_check_directory_exists.return_value = True
# WHEN: we call AppLocation.get_data_path()
data_path = AppLocation.get_section_data_path(u'section')
# THEN: check that all the correct methods were called, and the result is correct
mocked_check_directory_exists.assert_called_with(u'test/dir/section')
assert data_path == u'test/dir/section', u'Result should be "test/dir/section"'
def get_directory_for_app_dir_test(self):
"""
Test the AppLocation.get_directory() method for AppLocation.AppDir
"""
with patch(u'openlp.core.utils._get_frozen_path') as mocked_get_frozen_path:
mocked_get_frozen_path.return_value = u'app/dir'
# WHEN: We call AppLocation.get_directory
directory = AppLocation.get_directory(AppLocation.AppDir)
# THEN:
assert directory == u'app/dir', u'Directory should be "app/dir"'
def get_directory_for_plugins_dir_test(self):
"""
Test the AppLocation.get_directory() method for AppLocation.PluginsDir
"""
with patch(u'openlp.core.utils._get_frozen_path') as mocked_get_frozen_path, \
patch(u'openlp.core.utils.os.path.abspath') as mocked_abspath, \
patch(u'openlp.core.utils.os.path.split') as mocked_split, \
patch(u'openlp.core.utils.sys') as mocked_sys:
mocked_abspath.return_value = u'plugins/dir'
mocked_split.return_value = [u'openlp']
mocked_get_frozen_path.return_value = u'plugins/dir'
mocked_sys.frozen = 1
mocked_sys.argv = ['openlp']
# WHEN: We call AppLocation.get_directory
directory = AppLocation.get_directory(AppLocation.PluginsDir)
# THEN:
assert directory == u'plugins/dir', u'Directory should be "plugins/dir"'
| marmyshev/transitions | tests/functional/openlp_core_utils/test_applocation.py | Python | gpl-2.0 | 5,072 |
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test PrioritiseTransaction code
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import COIN, MAX_BRICK_SIZE
class PrioritiseTransactionTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_wall = True
self.num_nodes = 1
self.txouts = gen_return_txouts()
def setup_network(self):
self.nodes = []
self.is_network_split = False
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-printpriority=1", "-maxmempool=10"]))
self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']
def run_test(self):
utxo_count = 90
utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], utxo_count)
base_fee = self.relayfee*100 # our transactions are smaller than 100kb
txids = []
# Create 3 batches of transactions at 3 different fee rate levels
range_size = utxo_count // 3
for i in range(3):
txids.append([])
start_range = i * range_size
end_range = start_range + range_size
txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[start_range:end_range], (i+1)*base_fee)
# Make sure that the size of each group of transactions exceeds
# MAX_BRICK_SIZE -- otherwise the test needs to be revised to create
# more transactions.
mempool = self.nodes[0].getrawmempool(True)
sizes = [0, 0, 0]
for i in range(3):
for j in txids[i]:
assert(j in mempool)
sizes[i] += mempool[j]['size']
assert(sizes[i] > MAX_BRICK_SIZE) # Fail => raise utxo_count
# add a fee delta to something in the cheapest bucket and make sure it gets mined
# also check that a different entry in the cheapest bucket is NOT mined (lower
# the priority to ensure its not mined due to priority)
self.nodes[0].prioritisetransaction(txids[0][0], 0, int(3*base_fee*COIN))
self.nodes[0].prioritisetransaction(txids[0][1], -1e15, 0)
self.nodes[0].generate(1)
mempool = self.nodes[0].getrawmempool()
print("Assert that prioritised transaction was mined")
assert(txids[0][0] not in mempool)
assert(txids[0][1] in mempool)
high_fee_tx = None
for x in txids[2]:
if x not in mempool:
high_fee_tx = x
# Something high-fee should have been mined!
assert(high_fee_tx != None)
# Add a prioritisation before a tx is in the mempool (de-prioritising a
# high-fee transaction so that it's now low fee).
self.nodes[0].prioritisetransaction(high_fee_tx, -1e15, -int(2*base_fee*COIN))
# Add everything back to mempool
self.nodes[0].invalidatebrick(self.nodes[0].getbestbrickhash())
# Check to make sure our high fee rate tx is back in the mempool
mempool = self.nodes[0].getrawmempool()
assert(high_fee_tx in mempool)
# Now verify the modified-high feerate transaction isn't mined before
# the other high fee transactions. Keep mining until our mempool has
# decreased by all the high fee size that we calculated above.
while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]):
self.nodes[0].generate(1)
# High fee transaction should not have been mined, but other high fee rate
# transactions should have been.
mempool = self.nodes[0].getrawmempool()
print("Assert that de-prioritised transaction is still in mempool")
assert(high_fee_tx in mempool)
for x in txids[2]:
if (x != high_fee_tx):
assert(x not in mempool)
# Create a free, low priority transaction. Should be rejected.
utxo_list = self.nodes[0].listunspent()
assert(len(utxo_list) > 0)
utxo = utxo_list[0]
inputs = []
outputs = {}
inputs.append({"txid" : utxo["txid"], "vout" : utxo["vout"]})
outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
tx_hex = self.nodes[0].signrawtransaction(raw_tx)["hex"]
txid = self.nodes[0].sendrawtransaction(tx_hex)
# A tx that spends an in-mempool tx has 0 priority, so we can use it to
# test the effect of using prioritise transaction for mempool acceptance
inputs = []
inputs.append({"txid": txid, "vout": 0})
outputs = {}
outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee
raw_tx2 = self.nodes[0].createrawtransaction(inputs, outputs)
tx2_hex = self.nodes[0].signrawtransaction(raw_tx2)["hex"]
tx2_id = self.nodes[0].decoderawtransaction(tx2_hex)["txid"]
try:
self.nodes[0].sendrawtransaction(tx2_hex)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
assert(tx2_id not in self.nodes[0].getrawmempool())
else:
assert(False)
# This is a less than 1000-byte transaction, so just set the fee
# to be the minimum for a 1000 byte transaction and check that it is
# accepted.
self.nodes[0].prioritisetransaction(tx2_id, 0, int(self.relayfee*COIN))
print("Assert that prioritised free transaction is accepted to mempool")
assert_equal(self.nodes[0].sendrawtransaction(tx2_hex), tx2_id)
assert(tx2_id in self.nodes[0].getrawmempool())
if __name__ == '__main__':
PrioritiseTransactionTest().main()
| magacoin/magacoin | qa/rpc-tests/prioritise_transaction.py | Python | mit | 5,960 |
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import unhexlify
from decimal import Decimal, ROUND_DOWN
from subprocess import CalledProcessError
import inspect
import json
import logging
import os
import random
import re
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
from io import BytesIO
logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
def assert_approx(v, vexp, vspan=0.00001):
"""Assert that `v` is within `vspan` of `vexp`"""
if v < vexp - vspan:
raise AssertionError("%s < [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
if v > vexp + vspan:
raise AssertionError("%s > [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = round(tx_size * fee_per_kB / 1000, 8)
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except JSONRPCException:
raise AssertionError("Use assert_raises_rpc_error() to test RPC failures")
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError(
"Expected substring not found in error message:\nsubstring: '{}'\nerror message: '{}'.".format(
message, e.error['message']))
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
Calls function `fun` with arguments `args` and `kwds`. Catches a CalledProcessError
and verifies that the return code and output are as expected. Throws AssertionError if
no CalledProcessError was raised or if the return code and output are not as expected.
Args:
returncode (int): the process return code.
output (string): [a substring of] the process output.
fun (function): the function to call. This should execute a process.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except CalledProcessError as e:
if returncode != e.returncode:
raise AssertionError("Unexpected returncode %i" % e.returncode)
if output not in e.output:
raise AssertionError("Expected substring not found:" + e.output)
else:
raise AssertionError("No exception raised")
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
def try_rpc(code, message, fun, *args, **kwds):
"""Tries to run an rpc command.
Test against error code and message if the rpc fails.
Returns whether a JSONRPCException was raised."""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError(
"Expected substring not found in error message:\nsubstring: '{}'\nerror message: '{}'.".format(
message, e.error['message']))
return True
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
return False
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find:
assert_equal(expected, {})
num_matched = 0
for item in object_array:
all_match = True
for key, value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find:
num_matched = num_matched + 1
for key, value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
num_matched = num_matched + 1
if num_matched == 0 and not should_not_find:
raise AssertionError("No objects matched %s" % (str(to_match)))
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
# Utility functions
###################
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
attempt = 0
time_end = time.time() + timeout
while attempt < attempts and time.time() < time_end:
if lock:
with lock:
if predicate():
return
else:
if predicate():
return
attempt += 1
time.sleep(0.05)
# Print the cause of the timeout
predicate_source = "''''\n" + inspect.getsource(predicate) + "'''"
logger.error("wait_until() failed. Predicate: {}".format(predicate_source))
if attempt >= attempts:
raise AssertionError("Predicate {} not true after {} attempts".format(predicate_source, attempts))
elif time.time() >= time_end:
raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout))
raise RuntimeError('Unreachable')
# RPC/P2P connection constants and functions
############################################
# The maximum number of nodes a single test can spawn
MAX_NODES = 12
# Don't assign rpc or p2p ports lower than this
PORT_MIN = int(os.getenv('TEST_RUNNER_PORT_MIN', default=11000))
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert n <= MAX_NODES
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_url(datadir, i, chain, rpchost):
rpc_u, rpc_p = get_auth_cookie(datadir, chain)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
# Node functions
################
def initialize_datadir(dirname, n, chain):
datadir = get_datadir_path(dirname, n)
if not os.path.isdir(datadir):
os.makedirs(datadir)
# Translate chain name to config name
if chain == 'testnet3':
chain_name_conf_arg = 'testnet'
chain_name_conf_section = 'test'
else:
chain_name_conf_arg = chain
chain_name_conf_section = chain
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("{}=1\n".format(chain_name_conf_arg))
f.write("[{}]\n".format(chain_name_conf_section))
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
f.write("server=1\n")
f.write("keypool=1\n")
f.write("discover=0\n")
f.write("dnsseed=0\n")
f.write("listenonion=0\n")
f.write("printtoconsole=0\n")
f.write("upnp=0\n")
os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True)
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
def append_config(datadir, options):
with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f:
for option in options:
f.write(option + "\n")
def get_auth_cookie(datadir, chain):
user = None
password = None
if os.path.isfile(os.path.join(datadir, "bitcoin.conf")):
with open(os.path.join(datadir, "bitcoin.conf"), 'r', encoding='utf8') as f:
for line in f:
if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line
user = line.split("=")[1].strip("\n")
if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n")
try:
with open(os.path.join(datadir, chain, ".cookie"), 'r', encoding="ascii") as f:
userpass = f.read()
split_userpass = userpass.split(':')
user = split_userpass[0]
password = split_userpass[1]
except OSError:
pass
if user is None or password is None:
raise ValueError("No RPC credentials")
return user, password
# If a cookie file exists in the given datadir, delete it.
def delete_cookie_file(datadir, chain):
if os.path.isfile(os.path.join(datadir, chain, ".cookie")):
logger.debug("Deleting leftover cookie file")
os.remove(os.path.join(datadir, chain, ".cookie"))
def softfork_active(node, key):
"""Return whether a softfork is active."""
return node.getblockchaininfo()['softforks'][key]['active']
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
try:
from_connection.disconnectnode(nodeid=peer_id)
except JSONRPCException as e:
# If this node is disconnected between calculating the peer id
# and issuing the disconnect, don't worry about it.
# This avoids a race condition if we're mass-disconnecting peers.
if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
raise
# wait to disconnect
wait_until(lambda: [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == [], timeout=5)
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
stop_time = time.time() + timeout
while time.time() <= stop_time:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash.count(best_hash[0]) == len(rpc_connections):
return
time.sleep(wait)
raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash)))
def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True):
"""
Wait until everybody has the same transactions in their memory
pools
"""
stop_time = time.time() + timeout
while time.time() <= stop_time:
pool = [set(r.getrawmempool()) for r in rpc_connections]
if pool.count(pool[0]) == len(rpc_connections):
if flush_scheduler:
for r in rpc_connections:
r.syncwithvalidationinterfacequeue()
return
time.sleep(wait)
raise AssertionError("Mempool sync timed out:{}".format("".join("\n {!r}".format(m) for m in pool)))
# Transaction/Block functions
#############################
def find_output(node, txid, amount, *, blockhash=None):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1, blockhash)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert confirmations_required >= 0
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransactionwithwallet(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], 0)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
to_generate = int(0.5 * count) + 101
while to_generate > 0:
node.generate(min(25, to_generate))
to_generate -= 25
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransactionwithwallet(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert len(utxos) >= count
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = []
from .messages import CTxOut
txout = CTxOut()
txout.nValue = 0
txout.scriptPubKey = hex_str_to_bytes(script_pubkey)
for k in range(128):
txouts.append(txout)
return txouts
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
from .messages import CTransaction
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(rawtx)))
for txout in txouts:
tx.vout.append(txout)
newtx = tx.serialize().hex()
signresult = node.signrawtransactionwithwallet(newtx, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], 0)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
def find_vout_for_address(node, txid, addr):
"""
Locate the vout index of the given transaction sending to the
given address. Raises runtime error exception if not found.
"""
tx = node.getrawtransaction(txid, True)
for i in range(len(tx["vout"])):
if any([addr == a for a in tx["vout"][i]["scriptPubKey"]["addresses"]]):
return i
raise RuntimeError("Vout not found for address: txid=%s, addr=%s" % (txid, addr))
| Emercoin/emercoin | test/functional/test_framework/util.py | Python | gpl-3.0 | 22,858 |
import timeit
import gevent
import math
from gevent.queue import Queue
from detail import AnimeDetail
from gen_id_file import IDS_FILENAME
from gevent import monkey; monkey.patch_socket()
DETAILS_FILENAME = 'animes.xml'
BATCH_SIZE = 10
WORKER_NUM = 8
MAXIMUM_WORKER_NUM = 8
NAMES_FOR_WORKER = ['Joe', 'Adam', 'Matt', 'Bob', 'Sam', 'Mary', 'Jack', 'Peter']
FILE_SUFFIX = '_batch.xml'
# stores tuple like (start, end)
tasks = Queue()
def worker(name, work):
with open(name + FILE_SUFFIX, 'w') as f:
f.write('<root>')
gevent.sleep(0)
ad = AnimeDetail()
while not tasks.empty():
task = tasks.get()
request = '/'.join([id.strip() for id in work[task[0]:task[1]]])
print name + ' woke up doing work.. ' + request
batch_data = ad.fetch_details(request)
f.write(batch_data)
f.write('</root>')
def boss(name, work):
print name + ' woke up...'
count = 0
for i in range(int(math.ceil(float(len(work)) / BATCH_SIZE))):
start = i * BATCH_SIZE
end = min((i + 1) * BATCH_SIZE, len(work))
tasks.put((start, end))
count += 1
print 'Work has been divided into ' + str(count) + ' batches.'
def process(list, num_workers):
# make sure worker num doesn't exceeds limitation
num_workers = min(num_workers, MAXIMUM_WORKER_NUM)
# boss starts
gevent.spawn(boss, 'Terence', work).join()
# workers start
gevent.joinall([gevent.spawn(worker, NAMES_FOR_WORKER[i], work) for i in range(num_workers)])
if __name__ == '__main__':
# put all details into string
ad = AnimeDetail()
work = []
try:
with open(IDS_FILENAME, 'r') as idsf:
work = [id for id in idsf]
except IOError as e:
print 'Please run gen_id_file.py first.'
start = timeit.default_timer()
process(work, WORKER_NUM)
stop = timeit.default_timer()
print 'It took ' + str(stop - start) + 's to run ' + str(len(work)) + ' queries.'
| hanjoes/anivis | api/gen_detail_file.py | Python | mit | 2,013 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Mani
# @Date: 2017-08-28 19:20:58
# @Last Modified time: 2017-09-27 12:54:55
#
##############################################
import mani_config
import docker_config
import emby_config
SERVICE_CONFIG_OPTIONS = {
"embyserver" : {
"module":"emby_config"
}
}
class Controller(object):
def __init__(self, con=None, cache=None, debugging=None):
self.con = con
self.cache = cache
self.debugging = debugging
self.services = mani_config.Services()
self.emby_handle = emby_config.EmbyHandle(con, cache, debugging);
self.doc = docker_config.DockerHandler(self.con,self.cache,self.debugging)
def start_stop_handle(self,meta,start=False,stop=False):
if meta[1]["type"] == "docker":
if start:
return self.doc.start_container(meta[0])
elif stop:
return self.doc.stop_container(meta[0])
def start_service(self,command):
res = False
meta = self.services.find_service_meta(command)
if meta: # We know how to handle this command
res = self.start_stop_handle(meta, start=True)
return res
def stop_service(self,command):
res = False
meta = self.services.find_service_meta(command)
if meta:
res = self.start_stop_handle(meta, stop=True)
return res
def service_status(self,command):
meta = self.services.find_service_meta(command)
if meta:
if meta[1]["type"] == "docker":
data = self.doc.find_installed_container(meta[0])
if self.cache:
self.cache.docker_start_progress(data)
return True
return False
def configure(self,command):
result = {"error":True}
# Run the function to handle this command
_object = SERVICE_CONFIG_OPTIONS[command["service"]]["module"]
return getattr(self, _object)(command=command);
def emby_config(self,command):
result = {"error":True}
res = getattr(self.emby_handle, command["function"])(params=command["params"]);
try:
if res:
result["error"] = False
result["result"] = res
except:
result["result"] = "SOMETHING BAD HAPPENED"
return result
| maninator/manimediaserver | setup/lib/mani_controller.py | Python | gpl-3.0 | 2,431 |
# Copyright (C) 2013 University of Southampton
# Copyright (C) 2013 Daniel Alexander Smith
# Copyright (C) 2013 Max Van Kleek
# Copyright (C) 2013 Nigel R. Shadbolt
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3,
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import json
from indx import UNAUTH_USERNAME
from twisted.internet.defer import Deferred
from hashing_passwords import make_hash, check_hash
from twisted.python.failure import Failure
from indx.crypto import generate_rsa_keypair, load_key
class IndxUser:
""" INDX User handler. """
RSA_KEYSIZE = 3072
def __init__(self, db, username):
logging.debug("IndxUser, username: {0}".format(username))
self.db = db
self.username = username
def get_keys(self):
""" Get the user's key pair from the database. """
logging.debug("IndxUser, get_keys for user {0}".format(self.username))
return_d = Deferred()
# XXX TODO FIXME decrypt the private key
def connected_cb(conn):
logging.debug("IndxUser, get_keys, connected_cb")
check_q = "SELECT public_key_rsa, private_key_rsa_env FROM tbl_users WHERE username = %s"
check_p = [self.username]
def check_cb(rows, *args, **kw):
logging.debug("IndxUser, get_keys, connected_cb, check_cb")
if len(rows) < 1:
return_d.callback(None)
return
try:
return_d.callback({"public": load_key(rows[0][0]), "private": load_key(rows[0][1])})
except Exception as e:
logging.error("IndxUser, get_keys, Exception loading keys from database: {0}".format(e))
return_d.errback(Failure(e))
conn.runQuery(check_q, check_p).addCallbacks(check_cb, return_d.errback)
self.db.connect_indx_db().addCallbacks(connected_cb, return_d.errback)
return return_d
def generate_encryption_keys(self, overwrite = False):
""" Generate and save encryption keys for this user.
overwrite -- Force overwriting the keys if they already exist, default is False, which means that the function will check first and do nothing if they user already has encryption keys
"""
logging.debug("IndxUser, generate_encryption_keys for user {0}".format(self.username))
return_d = Deferred()
# TODO FIXME XXX do this in a runInteraction transaction
def connected_cb(conn):
logging.debug("IndxUser, generate_encryption_keys, connected_cb")
check_q = "SELECT EXISTS(SELECT * FROM tbl_users WHERE username = %s AND public_key_rsa IS NOT NULL AND private_key_rsa_env IS NOT NULL)"
check_p = [self.username]
def check_cb(rows, *args, **kw):
logging.debug("IndxUser, generate_encryption_keys, connected_cb, check_cb")
exists = rows[0][0]
if exists and not overwrite:
return_d.callback(False)
return # already has keys, and we are not overwriting
keys = generate_rsa_keypair(self.RSA_KEYSIZE)
public_key = keys['public']
private_key = keys['private'] # TODO FIXME XXX encrypt private key with password / client cert (decide mechanism)
insert_q = "UPDATE tbl_users SET private_key_rsa_env = %s, public_key_rsa = %s WHERE username = %s"
insert_p = [private_key, public_key, self.username]
def insert_cb(empty):
logging.debug("IndxUser, generate_encryption_keys, insert_cb")
return_d.callback(True)
conn.runOperation(insert_q, insert_p).addCallbacks(insert_cb, return_d.errback)
conn.runQuery(check_q, check_p).addCallbacks(check_cb, return_d.errback)
self.db.connect_indx_db().addCallbacks(connected_cb, return_d.errback)
return return_d
def set_password(self, password):
""" Set the user's password. """
logging.debug("IndxUser, set_password for user {0}".format(self.username))
return_d = Deferred()
pw_hash = make_hash(password)
def connected_cb(conn):
logging.debug("IndxUser, set_password, connected_cb")
insert_q = "UPDATE tbl_users SET password_hash = %s WHERE username = %s"
insert_p = [pw_hash, self.username]
def inserted_cb(empty):
logging.debug("IndxUser, set_password, connected_cb, inserted_cb")
return_d.callback(True)
return
conn.runOperation(insert_q, insert_p).addCallbacks(inserted_cb, return_d.errback)
self.db.connect_indx_db().addCallbacks(connected_cb, return_d.errback)
return return_d
def get_user_info(self, decode_json = False):
""" Get user's info (type, metadata) in one call, optionally decode user_metadata JSON string."""
logging.debug("IndxUser, get_user_info for username {0}".format(self.username))
return_d = Deferred()
def connected_cb(conn):
logging.debug("IndxUser, get_user_info, connected_cb")
query = "SELECT username_type, user_metadata_json, username, root_box FROM tbl_users WHERE username = %s"
params = [self.username]
def query_cb(conn, rows):
logging.debug("IndxUser, get_user_info, connected_cb, query_cb, rows: {0}".format(rows))
if len(rows) < 1:
return_d.callback(None) # no user info available
else:
typ, user_metadata, username, root_box = rows[0]
user_info = {"type": typ, "user_metadata": user_metadata or '{}', "username": username, "root_box": root_box}
if decode_json:
user_info['user_metadata'] = json.loads(user_info['user_metadata'])
return_d.callback(user_info)
conn.runQuery(query, params).addCallbacks(lambda rows, *args, **kw: query_cb(conn, rows), return_d.errback)
self.db.connect_indx_db().addCallbacks(connected_cb, return_d.errback)
return return_d
def get_acls(self, database_name):
""" Get all ACLs for the specified database. """
logging.debug("IndxUser, get_acls database_name {0}".format(database_name))
return_d = Deferred()
def connected_cb(conn):
logging.debug("IndxUser, get_acls, connected_cb")
query = "SELECT acl_read, acl_write, acl_owner, acl_control, tbl_users.username FROM tbl_acl JOIN tbl_users ON (tbl_users.id_user = tbl_acl.user_id) WHERE database_name = %s"
params = [database_name]
results = []
def query_cb(conn, rows):
logging.debug("IndxUser, get_acl, connected_cb, query_cb, rows: {0}".format(rows))
for row in rows:
acl = {
"database": database_name,
"username": row[4],
"acl": {"read": row[0], "write": row[1], "owner": row[2], "control": row[3]}
}
results.append(acl)
return_d.callback(results)
conn.runQuery(query, params).addCallbacks(lambda rows, *args, **kwargs: query_cb(conn, rows), return_d.errback)
self.db.connect_indx_db().addCallbacks(connected_cb, return_d.errback)
return return_d
def get_acl(self, database_name):
""" Get the user's ACL permissions for specified database. """
logging.debug("IndxUser, get_acl database_name {0} for user {1}".format(database_name, self.username))
return_d = Deferred()
def connected_cb(conn):
logging.debug("IndxUser, get_acl, connected_cb")
query = "SELECT acl_read, acl_write, acl_owner, acl_control FROM tbl_acl JOIN tbl_users ON (tbl_users.id_user = tbl_acl.user_id) WHERE database_name = %s AND tbl_users.username = %s"
params = [database_name, self.username]
def query_cb(conn, rows):
logging.debug("IndxUser, get_acl, connected_cb, query_cb, rows: {0}".format(rows))
if len(rows) < 1:
permissions = {"read": False, "write": False, "owner": False, "control": False} # no acl available, all permissions set to False
else:
permissions = {"read": rows[0][0], "write": rows[0][1], "owner": rows[0][2], "control": rows[0][3]}
# make an ACL object
acl = {
"database": database_name,
"username": self.username,
"acl": permissions,
}
return_d.callback(acl)
conn.runQuery(query, params).addCallbacks(lambda rows, *args, **kwargs: query_cb(conn, rows), return_d.errback)
self.db.connect_indx_db().addCallbacks(connected_cb, return_d.errback)
return return_d
def set_acl(self, database_name, target_username, acl):
""" Sets an ACL by this user, for a different target user.
RULES (these are in box.py and in user.py)
The logged in user sets an ACL for a different, target user.
The logged in user must have a token, and the box of the token is the box that will have the ACL changed/set.
If there is already an ACL for the target user, it will be replaced.
The logged in user must have "control" permissions on the box.
The logged in user can give/take read, write or control permissions. They cannot change "owner" permissions.
If the user has owner permissions, it doesn't matter if they dont have "control" permissions, they can change anything.
Only the user that created the box has owner permissions.
"""
logging.debug("IndxUser, set_acl database_name {0} for target_username {1} for acl {2}".format(database_name, target_username, acl))
return_d = Deferred()
# verify that 'acl' is valid, throw Failure to errback if not
try:
# check that the properties all exist, and all values are booleans
assert(type(acl['read']) == type(True))
assert(type(acl['write']) == type(True))
assert(type(acl['control']) == type(True))
except Exception as e:
logging.error("IndxUser, set_acl, error asserting types in 'acl', object is invalid: {0}, error: {1}".format(acl, e))
return_d.errback(Failure(e))
return return_d # immediately return
# TODO FIXME XXX do this in a runInteraction transaction !
def connected_cb(conn):
logging.debug("IndxUser, set_acl, connected_cb")
# verify the target user exists
user_check_q = "SELECT EXISTS(SELECT * FROM tbl_users WHERE username = %s)"
user_check_p = [target_username]
def user_check_cb(rows, *args, **kw):
logging.debug("IndxUser, set_acl, connected_cb, user_check_cb")
present = rows[0][0] # EXISTS returns true/false
def post_user_cb(empty):
# verify logged in user has 'control' ACL permissions on that database
# or verify that have owner permissions (either mean they can change something)
acl_check_q = "SELECT acl_control, acl_owner FROM tbl_acl WHERE user_id = (SELECT id_user FROM tbl_users WHERE username = %s) AND DATABASE_NAME = %s"
acl_check_p = [self.username, database_name]
def acl_check_cb(rows, *args, **kw):
logging.debug("IndxUser, set_acl, connected_cb, acl_check_cb")
if len(rows) < 1:
e = Exception("User '{0}' does not have permission to make this ACL change to database '{1}'.".format(self.username, database_name))
failure = Failure(e)
return_d.errback(failure)
return
existing_acl_control = rows[0][0]
existing_acl_owner = rows[0][1]
# check that logged in user is control or owner
if not (existing_acl_control or existing_acl_owner):
e = Exception("User '{0}' does not have permission to make this ACL change to database '{1}'.".format(self.username, database_name))
failure = Failure(e)
return_d.errback(failure)
return
# read the existing ACL - read the owner value and keep it the same (prevent non-owner users from de-ownering the original owner)
acl2_check_q = "SELECT acl_owner FROM tbl_acl WHERE user_id = (SELECT id_user FROM tbl_users WHERE username = %s) AND DATABASE_NAME = %s"
acl2_check_p = [target_username, database_name]
def acl2_check_cb(rows, *args, **kw):
logging.debug("IndxUser, set_acl, connected_cb, acl2_check_cb")
if len(rows) < 1:
current_owner_value = False
else:
current_owner_value = rows[0][0]
# delete any pre-existing acl for this database and target user
del_query = "DELETE FROM tbl_acl WHERE database_name = %s AND user_id = (SELECT id_user FROM tbl_users WHERE username = %s)"
del_params = [database_name, target_username]
def del_query_cb(empty):
logging.debug("IndxUser, set_acl, connected_cb, del_query_cb")
def acl_done_cb(empty):
logging.debug("IndxUser, set_acl, connected_cb, acl_done_cb")
# only transfer the 'rw' user if the user has been given the 'write' permission
# FIXME remove the 'rw' user row if their permission is revoked ?
user_types = ['ro']
if acl['write']:
user_types.append('rw')
def transfer_cb(empty):
logging.debug("IndxUser, set_acl, connected_cb, transfer_cb")
return_d.callback(True)
self.db.transfer_keychain_users(database_name, self.username, target_username, user_types).addCallbacks(transfer_cb, return_d.errback)
# create a new ACL
conn.runOperation("INSERT INTO tbl_acl (database_name, user_id, acl_read, acl_write, acl_owner, acl_control) VALUES (%s, (SELECT id_user FROM tbl_users WHERE username = %s), %s, %s, %s, %s)", [database_name, target_username, acl['read'], acl['write'], current_owner_value, acl['control']]).addCallbacks(acl_done_cb, return_d.errback)
conn.runOperation(del_query, del_params).addCallbacks(del_query_cb, return_d.errback)
conn.runQuery(acl2_check_q, acl2_check_p).addCallbacks(acl2_check_cb, return_d.errback)
conn.runQuery(acl_check_q, acl_check_p).addCallbacks(acl_check_cb, return_d.errback)
# check if username is present in the db
# if it isn't, but is the unauth user, then create the unauth user in the db
if not present and target_username != UNAUTH_USERNAME:
e = Exception("User with username '{0}' does not exist.".format(target_username))
failure = Failure(e)
return_d.errback(failure)
return
elif not present:
# create a new db user for the unauth user
self.db.create_user(UNAUTH_USERNAME, "", "internal").addCallbacks(post_user_cb, return_d.errback)
else:
# continue without creating a user
post_user_cb(None)
conn.runQuery(user_check_q, user_check_p).addCallbacks(user_check_cb, return_d.errback)
self.db.connect_indx_db().addCallbacks(connected_cb, return_d.errback)
return return_d
| sociam/indx | lib/indx/user.py | Python | agpl-3.0 | 17,310 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
from unittest.mock import call
from airflow.providers.google.cloud.transfers.cassandra_to_gcs import CassandraToGCSOperator
TMP_FILE_NAME = "temp-file"
class TestCassandraToGCS(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile")
@mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload")
@mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook")
def test_execute(self, mock_hook, mock_upload, mock_tempfile):
test_bucket = "test-bucket"
schema = "schema.json"
filename = "data.json"
gzip = True
mock_tempfile.return_value.name = TMP_FILE_NAME
operator = CassandraToGCSOperator(
task_id="test-cas-to-gcs",
cql="select * from keyspace1.table1",
bucket=test_bucket,
filename=filename,
schema_filename=schema,
gzip=gzip,
)
operator.execute(None)
mock_hook.return_value.get_conn.assert_called_once_with()
call_schema = call(
bucket_name=test_bucket,
object_name=schema,
filename=TMP_FILE_NAME,
mime_type="application/json",
gzip=gzip,
)
call_data = call(
bucket_name=test_bucket,
object_name=filename,
filename=TMP_FILE_NAME,
mime_type="application/json",
gzip=gzip,
)
mock_upload.assert_has_calls([call_schema, call_data], any_order=True)
def test_convert_value(self):
op = CassandraToGCSOperator
assert op.convert_value(None) is None
assert op.convert_value(1) == 1
assert op.convert_value(1.0) == 1.0
assert op.convert_value("text") == "text"
assert op.convert_value(True) is True
assert op.convert_value({"a": "b"}) == {"a": "b"}
from datetime import datetime
now = datetime.now()
assert op.convert_value(now) == str(now)
from cassandra.util import Date
date_str = "2018-01-01"
date = Date(date_str)
assert op.convert_value(date) == str(date_str)
import uuid
from base64 import b64encode
test_uuid = uuid.uuid4()
encoded_uuid = b64encode(test_uuid.bytes).decode("ascii")
assert op.convert_value(test_uuid) == encoded_uuid
byte_str = b"abc"
encoded_b = b64encode(byte_str).decode("ascii")
assert op.convert_value(byte_str) == encoded_b
from decimal import Decimal
decimal = Decimal(1.0)
assert op.convert_value(decimal) == float(decimal)
from cassandra.util import Time
time = Time(0)
assert op.convert_value(time) == "00:00:00"
date_str_lst = ["2018-01-01", "2018-01-02", "2018-01-03"]
date_lst = [Date(d) for d in date_str_lst]
assert op.convert_value(date_lst) == date_str_lst
date_tpl = tuple(date_lst)
assert op.convert_value(date_tpl) == {
"field_0": "2018-01-01",
"field_1": "2018-01-02",
"field_2": "2018-01-03",
}
| nathanielvarona/airflow | tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py | Python | apache-2.0 | 4,021 |
# -*- coding: utf-8 -*-
"""Installer for this package."""
from setuptools import setup
from setuptools import find_packages
import os
# shamlessly stolen from Hexagon IT guys
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
version = '0.1dev'
setup(name='pipaalarm',
version=version,
description="Send sms when specified devices get out of range.",
long_description=read('README.rst') +
read('LICENSE.txt'),
classifiers=[
"Programming Language :: Python",
],
keywords='web.py Python',
author='Jaka Hudoklin',
author_email='[email protected]',
url='http://www.github.com/offlinehacker/pipaalarm',
license='BSD',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
zip_safe=False,
dependency_links =
["https://github.com/offlinehacker/scapy/tarball/master#egg=scapy-2.1.1-dev",
"https://github.com/offlinehacker/pysms/tarball/master#egg=pysms-0.2-dev"],
install_requires=[
'six',
'setuptools',
'configparser', # Config file parsing
'gevent', # Event loop routine
'web.py', # Web server
'scapy', # Arping
'pysms'
],
tests_require = [
"mock"
],
entry_points="""
[console_scripts]
pipaalarm = pipaalarm.pipaalarm:main""",
test_suite="pipaalarm.tests",
)
| kiberpipa/pipaalarm | setup.py | Python | bsd-3-clause | 1,507 |
from django.conf import settings
from .base import BaseStaticSiteRenderer
from .disk import DiskStaticSiteRenderer
from .appengine import GAEStaticSiteRenderer
from .s3 import S3StaticSiteRenderer
from importlib import import_module
__all__ = ('BaseStaticSiteRenderer', 'DiskStaticSiteRenderer',
'S3StaticSiteRenderer', 'GAEStaticSiteRenderer',
'StaticSiteRenderer')
def get_cls(renderer_name):
mod_path, cls_name = renderer_name.rsplit('.', 1)
mod = import_module(mod_path)
return getattr(mod, cls_name)
DEFAULT_RENDERER = 'medusa.renderers.BaseStaticSiteRenderer'
# Define the default "django_medusa.renderers.StaticSiteRenderer" class as
# whatever class we have chosen in settings (defaulting to Base which will
# throw NotImplementedErrors when attempting to render).
StaticSiteRenderer = get_cls(getattr(settings,
'MEDUSA_RENDERER_CLASS', DEFAULT_RENDERER
))
| botify-labs/django-medusa | django_medusa/renderers/__init__.py | Python | mit | 911 |
def extractIsogashiineetoCom(item):
'''
Parser for 'isogashiineeto.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractIsogashiineetoCom.py | Python | bsd-3-clause | 549 |
#
# Copyright (c) 2009 Canonical
#
# Written by Gustavo Niemeyer <[email protected]>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from ConfigParser import ConfigParser
import os
from smart import sysconf
CLIENT_CONF_PATH = "/etc/landscape/client.conf"
def run():
if (sysconf.get("use-landscape-proxies", False) and
os.path.isfile(CLIENT_CONF_PATH)):
parser = ConfigParser()
parser.read(CLIENT_CONF_PATH)
for type in "http", "https", "ftp", "no":
option = "%s_proxy" % type
if parser.has_option("client", option) and option not in os.environ:
setting = parser.get("client", option)
sysconf.set(option.replace("_", "-"), setting, weak=True)
run()
| blackPantherOS/packagemanagement | smartpm/smart/plugins/landscape.py | Python | apache-2.0 | 1,481 |
# Stores data about the sequence
# NEEDS TO BE SYNCH WITH THE REST OF BIOPYTHON AND BIOPERL
# In particular, the SeqRecord and BioSQL.BioSeq.DBSeqRecord classes
# need to be in sync (this is the BioSQL "Database SeqRecord", see
# also BioSQL.BioSeq.DBSeq which is the "Database Seq" class)
class SeqRecord:
"""A SeqRecord object holds a sequence and information about it.
Main attributes:
id - Identifier such as a locus tag (string)
seq - The sequence itself (Seq object)
Additional attributes:
name - Sequence name, e.g. gene name (string)
description - Additional text (string)
dbxrefs - List of database cross references (list of strings)
features - Any (sub)features defined (list of SeqFeature objects)
annotations - Further information about the whole sequence (dictionary)
Most entries are lists of strings.
"""
def __init__(self, seq, id = "<unknown id>", name = "<unknown name>",
description = "<unknown description>", dbxrefs = None,
features = None):
"""Create a SeqRecord.
Arguments:
seq - Sequence, required (Seq object)
id - Sequence identifier, recommended (string)
name - Seqeuence name, optional (string)
description - Seqeuence description, optional (string)
dbxrefs - Database cross references, optional (list of strings)
features - Any (sub)features, optional (list of SeqFeature objects)
Note that while an id is optional, we strongly recommend you supply a
unique id string for each record. This is especially important
if you wish to write your sequences to a file.
You can create a 'blank' SeqRecord object can then populated the
attributes later. Note that currently the annotations dictionary
cannot be specified when creating the SeqRecord."""
self.seq = seq
self.id = id
self.name = name
self.description = description
if dbxrefs is None:
dbxrefs = []
self.dbxrefs = dbxrefs
# annotations about the whole sequence
self.annotations = {}
# annotations about parts of the sequence
if features is None:
features = []
self.features = features
def __str__(self) :
"""A human readable summary of the record and its annotation."""
lines = []
if self.id : lines.append("ID: %s" % self.id)
if self.name : lines.append("Name: %s" % self.name)
if self.description : lines.append("Desription: %s" % self.description)
if self.dbxrefs : lines.append("Database cross-references: " \
+ ", ".join(self.dbxrefs))
for a in self.annotations:
lines.append("/%s=%s" % (a, str(self.annotations[a])))
#Don't want to include the entire sequence,
#and showing the alphabet is useful:
lines.append(repr(self.seq))
return "\n".join(lines)
def __repr__(self) :
"""A concise summary of the record for debugging."""
return self.__class__.__name__ \
+ "(seq=%s, id=%s, name=%s, description=%s, dbxrefs=%s)" \
% tuple(map(repr, (self.seq, self.id, self.name,
self.description, self.dbxrefs)))
if __name__ == "__main__" :
#The following is a very quick example of how to create a SeqRecord object
from Bio.Seq import Seq
from Bio.Alphabet import generic_protein
record = SeqRecord(Seq("MASRGVNKVILVGNLGQDPEVRYMPNGGAVANITLATSESWRDKAT" \
+"GEMKEQTEWHRVVLFGKLAEVASEYLRKGSQVYIEGQLRTRKWTDQ" \
+"SGQDRYTTEVVVNVGGTMQMLGGRQGGGAPAGGNIGGGQPQGGWGQ" \
+"PQQPQGGNQFSGGAQSRPQQSAPAAPSNEPPMDFDDDIPF",
generic_protein),
id="NP_418483.1", name="b4059",
description="ssDNA-binding protein",
dbxrefs=["ASAP:13298", "GI:16131885", "GeneID:948570"])
#Note that annotations must be added AFTER creating the record
record.annotations["note"] = "This annotation was added later"
print str(record)
print repr(record)
#One way to create a minimal record.
record2 = SeqRecord(Seq(""))
| dbmi-pitt/DIKB-Micropublication | scripts/mp-scripts/Bio/SeqRecord.py | Python | apache-2.0 | 4,392 |
# gcompris - hangman.py
# -*- coding: utf-8 -*-
#
# Copyright (C) 2003, 2008 Bruno Coudoin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
# PythonTemplate Board module
import gtk
import gtk.gdk
import gcompris
import gcompris.utils
import gcompris.skin
import gcompris.bonus
import gcompris.score
import goocanvas
import pango
from gcompris import gcompris_gettext as _
fles = None
class Gcompris_hangman:
"""Empty gcompris python class"""
def __init__(self, gcomprisBoard):
# Save the gcomprisBoard, it defines everything we need
# to know from the core
self.gcomprisBoard = gcomprisBoard
# These are used to let us restart only after the bonus is displayed.
# When the bonus is displayed, it call us first with pause(1) and then with pause(0)
self.board_paused = 0;
self.gamewon = 0;
# global parameter to access object structures from global fonctions
global fles
fles=self
# The current word to search
self.word = ""
# The list of letter to find
self.letters = []
# The list of virtual keys displayed
self.keys = []
# the number of trials left to the user
self.trial = 0
# All the vowel of your language (keep empty if non applicable)
# Separate with /. You can use digraphs, trigraphs etc.
tempvowels = unicode(_("a/e/i/o/u/y"), encoding="utf8")
#tempvowels = "a/e/i/o/u/y"
self.vowels = tempvowels.split("/")
# All the consonants of your language (keep empty if non applicable)
# Separate with /. You can use digraphs, trigraphs etc. For example, both 's' and 'sch' could be in the list for German
tempconsonants = unicode(_("b/c/d/f/g/h/j/k/l/m/n/p/q/r/s/t/v/w/x/z"), encoding="utf8")
self.consonants = tempconsonants.split("/")
# Keys to letters equivalence for the hangman activity. It has the
# form of a space separated list like: "é=e/E è=e/E sch=S"
# Letters on the left of the = can be multigraphs and represent the letters on the buttons.
# Letters on the right are single letters pressed on the keyboard.
# If you wish to allow different key presses for the same letter, separate the letters
# on the right with the chararcter '/'. Keep the word NONE if not available in your language
keyequivs = unicode(_("a=a"), encoding="utf8")
if keyequivs == "a=a":
keyequivs = None
# Create equivs list
self.keyequivList = {}
if keyequivs:
for keyequiv in keyequivs.split(' '):
try:
(k, v) = keyequiv.split('=')
self.keyequivList[k]=v
except:
print ("ERROR: Bad key equivalence list '%s' for hangman: " %(keyequivs, ))
# Letters equivalence for the hangman activity. It has the
# form of a space separated list like: "e=éè a=àâ"
# Keep the word NONE if not available in your language
self.equivs = unicode(_("NONE"), encoding="utf8")
if self.equivs == "NONE":
self.equivs = ""
# Create equivs list
self.equivList = []
for equiv in self.equivs.split(' '):
try:
(k, v) = equiv.split('=')
self.equivList.append(k + v)
except:
print ("ERROR: Bad equivalence list '%s'" %(self.equivs, ))
def start(self):
# Create our rootitem. We put each canvas item in it so at the end we
# only have to kill it. The canvas deletes all the items it contains automaticaly.
self.backitem = goocanvas.Group(parent =
self.gcomprisBoard.canvas.get_root_item())
svghandle = gcompris.utils.load_svg("hangman/back.svgz")
goocanvas.Svg(
parent = self.backitem,
svg_handle = svghandle,
pointer_events = goocanvas.EVENTS_NONE
)
# Create our rootitem. We put each canvas item in it so at the end we
# only have to kill it. The canvas deletes all the items it contains automaticaly.
self.rootitem = goocanvas.Group(parent =
self.backitem)
# Get the name of the language for the current locale
self.wordlist = None
try:
self.language = gcompris.gcompris_gettext( gcompris.get_locale_name(gcompris.get_locale()) )
self.wordlist = gcompris.get_wordlist("hangman/default-$LOCALE.xml")
except:
pass
# Fallback to wordsgame list
if not self.wordlist:
try:
self.language = gcompris.gcompris_gettext( gcompris.get_locale_name(gcompris.get_locale()) )
self.wordlist = gcompris.get_wordlist("wordsgame/default-$LOCALE.xml")
except:
pass
# Fallback to english
if not self.wordlist:
try:
self.wordlist = gcompris.get_wordlist("hangman/default-en.xml")
self.language = _("English")
except:
pass
# Fallback to English wordsgame list
if not self.wordlist:
self.wordlist = gcompris.get_wordlist("wordsgame/default-en.xml")
self.language = _("English")
if not self.wordlist:
gcompris.utils.dialog(_("Could not find the list of words."),
stop_board)
return;
self.gcomprisBoard.level = 1
self.gcomprisBoard.maxlevel = self.wordlist.number_of_level * 3
self.gcomprisBoard.sublevel = 1
self.gcomprisBoard.number_of_sublevel = 10
# Set the buttons we want in the bar
gcompris.bar_set(gcompris.BAR_LEVEL)
gcompris.bar_location(gcompris.BOARD_WIDTH - 160, -1, 0.6)
# Set a background image
gcompris.set_default_background(self.gcomprisBoard.canvas.get_root_item())
self.display_level()
def end(self):
# Remove the root item removes all the others inside it
self.backitem.remove()
gcompris.score.end()
def ok(self):
print("hangman ok.")
def repeat(self):
print("hangman repeat.")
def config(self):
print("hangman config.")
def key_press(self, keyval, commit_str, preedit_str):
if not commit_str:
return
for k in self.keys:
if k.click(commit_str):
break
return True
def pause(self, pause):
self.board_paused = pause
# When the bonus is displayed, it call us first
# with pause(1) and then with pause(0)
# the game is won if 1, lost if 2
if(self.gamewon >= 1 and pause == 0):
if self.gamewon == 1:
self.increment_level()
self.gamewon = 0
self.next_level()
return
def set_level(self, level):
self.gcomprisBoard.level = level;
self.gcomprisBoard.sublevel = 1;
gcompris.bar_set_level(self.gcomprisBoard)
self.next_level()
#-------
def increment_level(self):
self.gcomprisBoard.sublevel += 1
if(self.gcomprisBoard.sublevel > self.gcomprisBoard.number_of_sublevel):
# Try the next level
self.gcomprisBoard.sublevel=1
self.gcomprisBoard.level += 1
if(self.gcomprisBoard.level > self.gcomprisBoard.maxlevel):
self.gcomprisBoard.level = self.gcomprisBoard.maxlevel
def next_level(self):
gcompris.score.end()
self.rootitem.remove()
self.rootitem = goocanvas.Group(parent =
self.backitem)
self.display_level()
def display_letter_set(self, letter_set, w, h,
fill_color, stroke_color):
group = goocanvas.Group(parent = self.rootitem)
max_per_line =gcompris.BOARD_WIDTH / w - 2
x = (gcompris.BOARD_WIDTH - (max_per_line * w)) / 2
wc = 0
line = 0
for line in range(0, len(letter_set) / max_per_line + 1):
for i in range(0, max_per_line):
if wc < len(letter_set):
self.keys.append(
Key(self, group, x + i * w, line * h, w - 5, h - 5, letter_set[wc],
self.get_equiv(letter_set[wc]),
fill_color, stroke_color) )
wc += 1
return (group, (line + 1) * h)
def display_level(self):
w = 40
# Less trial when the level increases
self.trial = 12 - (self.gcomprisBoard.level - 1) / self.wordlist.number_of_level
gcompris.bar_set_level(self.gcomprisBoard);
gcompris.score.start(gcompris.score.STYLE_NOTE,
gcompris.BOARD_WIDTH / 2 - 100, 170, self.trial)
gcompris.score.set(self.trial)
# Display the word to find
self.word = self.get_next_word()
self.letters = []
self.keys= []
# get letters from letter list and parse according to multigraphs:
# append consonants and vowels into a new list for easy reference
multigraphlist = set(self.vowels+self.consonants)
# find out the length of the longest multigraph and store in constant
longestmultigraph=1;
for i,v in enumerate(multigraphlist):
if longestmultigraph < len(v):
longestmultigraph = len(v)
# chop up the word according to multigraphs, so the length of the word can be calculated
parsedword=[]
graphlength=longestmultigraph
wordlength=len(self.word)
i=0
while i < wordlength:
graphlength=longestmultigraph
# make sure the end of the multigraph does not get out of bounds
while (i+graphlength) > wordlength:
graphlength = graphlength-1
# compare substring with list of multigraphs and reduce length if not found
found = False
while (found == False):
# check if multigrasph is valid
if (self.word[i:i+graphlength] in multigraphlist):
parsedword.append(self.word[i:i+graphlength])
found = True
else:
# make sure we don't get an endless loop or empty letters
if (graphlength<= 1):
found = True
parsedword.append(self.word[i])
graphlength=1
# the next time, look for a shorter multigraph
graphlength = graphlength - 1
# skip to the next multigraph in the word
i = i+1+graphlength
# end parsing the word with multigraphs
# now construct and display the word for the game
x = (gcompris.BOARD_WIDTH - (len(parsedword) * w)) / 2
for i in range(0, len(parsedword)):
# dynamic width of last multigraph in the word
if(i>0):
if(len(parsedword[i-1])>1):
xshift=(len(parsedword[i-1])-1)*0.75/len(parsedword[i-1]) #todo factor
else:
xshift=0
self.letters.append(Letter(self, x + (i+xshift)*w, 70,
parsedword[i],
self.get_equiv(parsedword[i])))
# Display the language
goocanvas.Text(
parent = self.rootitem,
x = gcompris.BOARD_WIDTH / 2,
y = gcompris.BOARD_HEIGHT / 2 - 30,
text = self.language,
fill_color = "white",
font = gcompris.skin.get_font("gcompris/board/medium"),
anchor = gtk.ANCHOR_CENTER,
alignment = pango.ALIGN_CENTER
)
# Display the virtual keyboard
key_width = 60
key_height = 40
if len(self.vowels) + len(self.consonants) >= 30:
key_width = 50
key_height = 35
(group_vowels, y_vowels) = self.display_letter_set(self.vowels,
key_width, key_height,
0xFF6633AAL, 0xFF33CCBBL)
(group_cons, y_cons) = self.display_letter_set(self.consonants,
key_width, key_height,
0x66FF33AAL, 0xFF33CCBBL)
group_vowels.translate(0, gcompris.BOARD_HEIGHT - y_cons - y_vowels - key_height)
group_cons.translate(0, gcompris.BOARD_HEIGHT - y_cons - 20)
def get_next_word(self):
return unicode(gcompris.get_random_word(self.wordlist,
self.gcomprisBoard.level),
encoding="utf8")
def found_all_letters(self):
retval = True
for letter in self.letters:
if not letter.found:
retval = False
return retval
def check_letters(self, targetLetter):
retval = False
for letter in self.letters:
if letter.check(targetLetter):
retval = True
return retval
def hide_letters(self, status):
for letter in self.letters:
letter.hide(status)
def get_equiv(self, letter):
""" Get key equivalence for the given letter/multigraph """
letters = [letter]
if letter in self.keyequivList:
try:
tempequivs=self.keyequivList[letter].split('/')
for v in tempequivs:
letters.append(v)
except:
print("Error parsing multigraphs in equivalence list: "+letter)
""" Get equivalence for the given letter """
#print("----- getting equivalence for: "+letter)
if(len(letter)==1):
#print("grabbing from list")
for v in self.equivList:
if v.count(letter):
letters.append(v)
return letters
def ok_event(self, widget, target, event=None):
if self.gamewon == 2:
gcompris.bonus.display(gcompris.bonus.LOOSE, gcompris.bonus.TUX)
else:
gcompris.bonus.display(gcompris.bonus.WIN, gcompris.bonus.TUX)
def display_ok(self):
# The OK Button
item = goocanvas.Svg(parent = self.rootitem,
svg_handle = gcompris.skin.svg_get(),
svg_id = "#OK"
)
zoom = 0.8
item.translate( (item.get_bounds().x1 * -1)
+ ( gcompris.BOARD_WIDTH - 300 ) / zoom,
(item.get_bounds().y1 * -1) + 190.0 / zoom)
item.scale(zoom, zoom)
item.connect("button_press_event", self.ok_event)
gcompris.utils.item_focus_init(item, None)
# A letter to find displayed on the screen
class Letter:
def __init__(self, hangman, x, y,
letter, letters):
self.found = False
parent = hangman.rootitem
fill_color = 0xFF3366AAL
stroke_color = 0xFF33CCAAL
# dynamic display width for multigraphs
if(len(letter)==1):
xshift=1
else:
if(len(letter)==2):
xshift=len(letter)*0.75
else:
if(len(letter)==3):
xshift=len(letter)*0.5
else:
xshift=len(letter)*0.25
w = 30*xshift
h = 30
self.letters = letters
self.rect = goocanvas.Rect(
parent = parent,
x = x,
y = y,
width = w,
height = h,
fill_color_rgba = fill_color,
stroke_color_rgba = stroke_color,
line_width = 1.0)
self.letterItem = goocanvas.Text(
parent = parent,
x = x + w/2,
y = y + h/2,
text = letter,
fill_color = "black",
font = gcompris.skin.get_font("gcompris/board/medium"),
anchor = gtk.ANCHOR_CENTER,
alignment = pango.ALIGN_CENTER
)
self.hide(True)
def hide(self, status):
if status:
self.letterItem.props.visibility = goocanvas.ITEM_INVISIBLE
else:
self.letterItem.props.visibility = goocanvas.ITEM_VISIBLE
def check(self, targetLetter):
for letter in self.letters:
if (letter.count(targetLetter) > 0):
self.rect.set_properties(fill_color_rgba = 0x66CC33AAL)
self.letterItem.props.visibility = goocanvas.ITEM_VISIBLE
self.found = True
return True
return False
# A virtual key on screen
class Key:
def __init__(self, hangman, parent, x, y, w, h,
letter, letters, fill_color, stroke_color):
self.hangman = hangman
self.letter = letter
self.letters = letters
self.disabled = False
self.rect = goocanvas.Rect(
parent = parent,
x = x,
y = y,
width = w,
height = h,
fill_color_rgba = fill_color,
stroke_color_rgba = stroke_color,
line_width = 1.0)
self.letterItem = goocanvas.Text(
parent = parent,
x = x + w/2,
y = y + h/2,
text = letter,
fill_color = "black",
font = gcompris.skin.get_font("gcompris/board/medium"),
anchor = gtk.ANCHOR_CENTER,
alignment = pango.ALIGN_CENTER
)
self.letterItem.connect("button_press_event",
self.letter_event, letter)
self.rect.connect("button_press_event",
self.letter_event, letter)
def click(self, letter):
if(self.hangman.gamewon):
return False
# Check we are the correct key for the given letter
if not self.letters.count(letter):
return False
# Already done
if self.disabled:
return True
# disable this key, mark it as used
self.disabled = True
self.rect.set_properties(fill_color_rgba = 0xCCCCCCCCL,
line_width = 3.0)
self.rect.disconnect_by_func(self.letter_event)
self.letterItem.disconnect_by_func(self.letter_event)
# Decrease user lives and trig bonus if needed
if not self.hangman.check_letters(letter):
self.hangman.trial -= 1
gcompris.score.set(self.hangman.trial)
if self.hangman.trial == 0:
self.hangman.gamewon = 2
self.hangman.hide_letters(False)
self.hangman.display_ok()
elif self.hangman.found_all_letters():
self.hangman.gamewon = 1
self.hangman.display_ok()
return True
#
# Event on a letter
#
def letter_event(self, item, target, event, letter):
self.click(letter)
def stop_board():
global fles
fles.end()
gcompris.end_board()
| bdoin/GCompris | src/hangman-activity/hangman.py | Python | gpl-3.0 | 18,192 |
# Copyright 2013 Big Switch Networks, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from neutron.common import exceptions as n_exception
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron import context as neutron_context
from neutron.db.firewall import firewall_db
from neutron.extensions import firewall as fw_ext
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as const
LOG = logging.getLogger(__name__)
class FirewallCallbacks(n_rpc.RpcCallback):
RPC_API_VERSION = '1.0'
def __init__(self, plugin):
super(FirewallCallbacks, self).__init__()
self.plugin = plugin
def set_firewall_status(self, context, firewall_id, status, **kwargs):
"""Agent uses this to set a firewall's status."""
LOG.debug(_("set_firewall_status() called"))
with context.session.begin(subtransactions=True):
fw_db = self.plugin._get_firewall(context, firewall_id)
# ignore changing status if firewall expects to be deleted
# That case means that while some pending operation has been
# performed on the backend, neutron server received delete request
# and changed firewall status to const.PENDING_DELETE
if fw_db.status == const.PENDING_DELETE:
LOG.debug(_("Firewall %(fw_id)s in PENDING_DELETE state, "
"not changing to %(status)s"),
{'fw_id': firewall_id, 'status': status})
return False
if status in (const.ACTIVE, const.DOWN):
fw_db.status = status
return True
else:
fw_db.status = const.ERROR
return False
def firewall_deleted(self, context, firewall_id, **kwargs):
"""Agent uses this to indicate firewall is deleted."""
LOG.debug(_("firewall_deleted() called"))
with context.session.begin(subtransactions=True):
fw_db = self.plugin._get_firewall(context, firewall_id)
# allow to delete firewalls in ERROR state
if fw_db.status in (const.PENDING_DELETE, const.ERROR):
self.plugin.delete_db_firewall_object(context, firewall_id)
return True
else:
LOG.warn(_('Firewall %(fw)s unexpectedly deleted by agent, '
'status was %(status)s'),
{'fw': firewall_id, 'status': fw_db.status})
fw_db.status = const.ERROR
return False
def get_firewalls_for_tenant(self, context, **kwargs):
"""Agent uses this to get all firewalls and rules for a tenant."""
LOG.debug(_("get_firewalls_for_tenant() called"))
fw_list = [
self.plugin._make_firewall_dict_with_rules(context, fw['id'])
for fw in self.plugin.get_firewalls(context)
]
return fw_list
def get_firewalls_for_tenant_without_rules(self, context, **kwargs):
"""Agent uses this to get all firewalls for a tenant."""
LOG.debug(_("get_firewalls_for_tenant_without_rules() called"))
fw_list = [fw for fw in self.plugin.get_firewalls(context)]
return fw_list
def get_tenants_with_firewalls(self, context, **kwargs):
"""Agent uses this to get all tenants that have firewalls."""
LOG.debug(_("get_tenants_with_firewalls() called"))
ctx = neutron_context.get_admin_context()
fw_list = self.plugin.get_firewalls(ctx)
fw_tenant_list = list(set(fw['tenant_id'] for fw in fw_list))
return fw_tenant_list
class FirewallAgentApi(n_rpc.RpcProxy):
"""Plugin side of plugin to agent RPC API."""
API_VERSION = '1.0'
def __init__(self, topic, host):
super(FirewallAgentApi, self).__init__(topic, self.API_VERSION)
self.host = host
def create_firewall(self, context, firewall):
return self.fanout_cast(
context,
self.make_msg('create_firewall', firewall=firewall,
host=self.host)
)
def update_firewall(self, context, firewall):
return self.fanout_cast(
context,
self.make_msg('update_firewall', firewall=firewall,
host=self.host)
)
def delete_firewall(self, context, firewall):
return self.fanout_cast(
context,
self.make_msg('delete_firewall', firewall=firewall,
host=self.host)
)
class FirewallCountExceeded(n_exception.Conflict):
"""Reference implementation specific exception for firewall count.
Only one firewall is supported per tenant. When a second
firewall is tried to be created, this exception will be raised.
"""
message = _("Exceeded allowed count of firewalls for tenant "
"%(tenant_id)s. Only one firewall is supported per tenant.")
class FirewallPlugin(firewall_db.Firewall_db_mixin):
"""Implementation of the Neutron Firewall Service Plugin.
This class manages the workflow of FWaaS request/response.
Most DB related works are implemented in class
firewall_db.Firewall_db_mixin.
"""
supported_extension_aliases = ["fwaas"]
def __init__(self):
"""Do the initialization for the firewall service plugin here."""
self.endpoints = [FirewallCallbacks(self)]
self.conn = n_rpc.create_connection(new=True)
self.conn.create_consumer(
topics.FIREWALL_PLUGIN, self.endpoints, fanout=False)
self.conn.consume_in_threads()
self.agent_rpc = FirewallAgentApi(
topics.L3_AGENT,
cfg.CONF.host
)
def _make_firewall_dict_with_rules(self, context, firewall_id):
firewall = self.get_firewall(context, firewall_id)
fw_policy_id = firewall['firewall_policy_id']
if fw_policy_id:
fw_policy = self.get_firewall_policy(context, fw_policy_id)
fw_rules_list = [self.get_firewall_rule(
context, rule_id) for rule_id in fw_policy['firewall_rules']]
firewall['firewall_rule_list'] = fw_rules_list
else:
firewall['firewall_rule_list'] = []
# FIXME(Sumit): If the size of the firewall object we are creating
# here exceeds the largest message size supported by rabbit/qpid
# then we will have a problem.
return firewall
def _rpc_update_firewall(self, context, firewall_id):
status_update = {"firewall": {"status": const.PENDING_UPDATE}}
super(FirewallPlugin, self).update_firewall(context, firewall_id,
status_update)
fw_with_rules = self._make_firewall_dict_with_rules(context,
firewall_id)
self.agent_rpc.update_firewall(context, fw_with_rules)
def _rpc_update_firewall_policy(self, context, firewall_policy_id):
firewall_policy = self.get_firewall_policy(context, firewall_policy_id)
if firewall_policy:
for firewall_id in firewall_policy['firewall_list']:
self._rpc_update_firewall(context, firewall_id)
def _ensure_update_firewall(self, context, firewall_id):
fwall = self.get_firewall(context, firewall_id)
if fwall['status'] in [const.PENDING_CREATE,
const.PENDING_UPDATE,
const.PENDING_DELETE]:
raise fw_ext.FirewallInPendingState(firewall_id=firewall_id,
pending_state=fwall['status'])
def _ensure_update_firewall_policy(self, context, firewall_policy_id):
firewall_policy = self.get_firewall_policy(context, firewall_policy_id)
if firewall_policy and 'firewall_list' in firewall_policy:
for firewall_id in firewall_policy['firewall_list']:
self._ensure_update_firewall(context, firewall_id)
def _ensure_update_firewall_rule(self, context, firewall_rule_id):
fw_rule = self.get_firewall_rule(context, firewall_rule_id)
if 'firewall_policy_id' in fw_rule and fw_rule['firewall_policy_id']:
self._ensure_update_firewall_policy(context,
fw_rule['firewall_policy_id'])
def create_firewall(self, context, firewall):
LOG.debug(_("create_firewall() called"))
tenant_id = self._get_tenant_id_for_create(context,
firewall['firewall'])
fw_count = self.get_firewalls_count(context,
filters={'tenant_id': [tenant_id]})
if fw_count:
raise FirewallCountExceeded(tenant_id=tenant_id)
fw = super(FirewallPlugin, self).create_firewall(context, firewall)
fw_with_rules = (
self._make_firewall_dict_with_rules(context, fw['id']))
self.agent_rpc.create_firewall(context, fw_with_rules)
return fw
def update_firewall(self, context, id, firewall):
LOG.debug(_("update_firewall() called"))
self._ensure_update_firewall(context, id)
firewall['firewall']['status'] = const.PENDING_UPDATE
fw = super(FirewallPlugin, self).update_firewall(context, id, firewall)
fw_with_rules = (
self._make_firewall_dict_with_rules(context, fw['id']))
self.agent_rpc.update_firewall(context, fw_with_rules)
return fw
def delete_db_firewall_object(self, context, id):
firewall = self.get_firewall(context, id)
if firewall['status'] == const.PENDING_DELETE:
super(FirewallPlugin, self).delete_firewall(context, id)
def delete_firewall(self, context, id):
LOG.debug(_("delete_firewall() called"))
status_update = {"firewall": {"status": const.PENDING_DELETE}}
fw = super(FirewallPlugin, self).update_firewall(context, id,
status_update)
fw_with_rules = (
self._make_firewall_dict_with_rules(context, fw['id']))
self.agent_rpc.delete_firewall(context, fw_with_rules)
def update_firewall_policy(self, context, id, firewall_policy):
LOG.debug(_("update_firewall_policy() called"))
self._ensure_update_firewall_policy(context, id)
fwp = super(FirewallPlugin,
self).update_firewall_policy(context, id, firewall_policy)
self._rpc_update_firewall_policy(context, id)
return fwp
def update_firewall_rule(self, context, id, firewall_rule):
LOG.debug(_("update_firewall_rule() called"))
self._ensure_update_firewall_rule(context, id)
fwr = super(FirewallPlugin,
self).update_firewall_rule(context, id, firewall_rule)
firewall_policy_id = fwr['firewall_policy_id']
if firewall_policy_id:
self._rpc_update_firewall_policy(context, firewall_policy_id)
return fwr
def insert_rule(self, context, id, rule_info):
LOG.debug(_("insert_rule() called"))
self._ensure_update_firewall_policy(context, id)
fwp = super(FirewallPlugin,
self).insert_rule(context, id, rule_info)
self._rpc_update_firewall_policy(context, id)
return fwp
def remove_rule(self, context, id, rule_info):
LOG.debug(_("remove_rule() called"))
self._ensure_update_firewall_policy(context, id)
fwp = super(FirewallPlugin,
self).remove_rule(context, id, rule_info)
self._rpc_update_firewall_policy(context, id)
return fwp
| samsu/neutron | services/firewall/fwaas_plugin.py | Python | apache-2.0 | 12,362 |
#!/usr/bin/env python
from couchbase.bucket import Bucket
cb = Bucket('couchbase://localhost/default')
manager = cb.bucket_manager()
manager.n1ql_index_create_primary(ignore_exists=True) | couchbaselabs/devguide-examples | python/n1ql-create-primary-index.py | Python | apache-2.0 | 188 |
import subprocess
from flask import Flask, render_template, request
app = Flask(__name__)
def outer(outer_arg):
outer_ret_val = outer_arg + 'hey'
return outer_ret_val
def inner(inner_arg):
inner_ret_val = inner_arg + 'hey'
return inner_ret_val
@app.route('/menu', methods=['POST'])
def menu():
req_param = request.form['suggestion']
result = outer(inner(req_param))
subprocess.call(result, shell=True)
with open('menu.txt','r') as f:
menu = f.read()
return render_template('command_injection.html', menu=menu)
| python-security/pyt | examples/nested_functions_code/sink_with_result_of_user_defined_nested.py | Python | gpl-2.0 | 560 |
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
CURRENT_DIR = os.path.dirname(__file__)
setup(name='datapot',
description='Library for automatic feature extraction from JSON-datasets',
long_description=open(os.path.join(CURRENT_DIR, 'README.rst')).read(),
version='0.1.3',
url='https://github.com/bashalex/datapot',
author='Alex Bash, Yuriy Mokriy, Nikita Saveyev, Michal Rozenwald, Peter Romov',
author_email='[email protected], [email protected], [email protected], [email protected], [email protected]',
license='GNU v3.0',
maintainer='Nikita Savelyev',
maintainer_email='[email protected]',
install_requires=[
'numpy >= 1.6.1',
'scipy >= 0.17.0',
'pandas >= 0.17.1',
'scikit-learn >= 0.17.1',
'iso-639 >= 0.4.5',
'langdetect >= 1.0.7',
'gensim >= 2.1.0',
'nltk >= 3.2.4',
'tsfresh >= 0.7.1',
'python-dateutil >= 2.6.0',
'fastnumbers >= 2.0.1',
'pystemmer >= 1.3.0',
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering',
'Topic :: Software Development',
],
packages=find_packages())
| bashalex/datapot | setup.py | Python | gpl-3.0 | 1,582 |
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from email.utils import parseaddr
def is_pretty_from_address(input):
name, email = parseaddr(input)
if email == 'webmaster@localhost':
# This is the default setting for DEFAULT_FROM_EMAIL, so if
# this is the email we've got, you should probably override
# DEFAULT_FROM_EMAIL in your settings file.
raise ValidationError("Enter a valid value.")
validate_email(email)
if name:
return True
else:
return False
| dominicrodger/djohno | djohno/utils.py | Python | bsd-2-clause | 581 |
# -*- coding: utf-8 -*-
"""Some utility functions."""
# Authors: Alexandre Gramfort <[email protected]>
#
# License: BSD (3-clause)
import contextlib
import inspect
from io import StringIO
import re
import sys
import logging
import os.path as op
import warnings
from ..externals.decorator import FunctionMaker
logger = logging.getLogger('mne') # one selection here used across mne-python
logger.propagate = False # don't propagate (in case of multiple imports)
# class to provide frame information (should be low overhead, just on logger
# calls)
class _FrameFilter(logging.Filter):
def __init__(self):
self.add_frames = 0
def filter(self, record):
record.frame_info = 'Unknown'
if self.add_frames:
# 5 is the offset necessary to get out of here and the logging
# module, reversal is to put the oldest at the top
frame_info = _frame_info(5 + self.add_frames)[5:][::-1]
if len(frame_info):
frame_info[-1] = (frame_info[-1] + ' :').ljust(30)
if len(frame_info) > 1:
frame_info[0] = '┌' + frame_info[0]
frame_info[-1] = '└' + frame_info[-1]
for ii, info in enumerate(frame_info[1:-1], 1):
frame_info[ii] = '├' + info
record.frame_info = '\n'.join(frame_info)
return True
_filter = _FrameFilter()
logger.addFilter(_filter)
def verbose(function):
"""Verbose decorator to allow functions to override log-level.
Parameters
----------
function : callable
Function to be decorated by setting the verbosity level.
Returns
-------
dec : callable
The decorated function.
See Also
--------
set_log_level
set_config
Notes
-----
This decorator is used to set the verbose level during a function or method
call, such as :func:`mne.compute_covariance`. The `verbose` keyword
argument can be 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL', True (an
alias for 'INFO'), or False (an alias for 'WARNING'). To set the global
verbosity level for all functions, use :func:`mne.set_log_level`.
This function also serves as a docstring filler.
Examples
--------
You can use the ``verbose`` argument to set the verbose level on the fly::
>>> import mne
>>> cov = mne.compute_raw_covariance(raw, verbose='WARNING') # doctest: +SKIP
>>> cov = mne.compute_raw_covariance(raw, verbose='INFO') # doctest: +SKIP
Using up to 49 segments
Number of samples used : 5880
[done]
""" # noqa: E501
# See https://decorator.readthedocs.io/en/latest/tests.documentation.html
# #dealing-with-third-party-decorators
from .docs import fill_doc
try:
fill_doc(function)
except TypeError: # nothing to add
pass
# Anything using verbose should either have `verbose=None` in the signature
# or have a `self.verbose` attribute (if in a method). This code path
# will raise an error if neither is the case.
body = """\
def %(name)s(%(signature)s):\n
try:
verbose
except UnboundLocalError:
try:
verbose = self.verbose
except NameError:
raise RuntimeError('Function %%s does not accept verbose parameter'
%% (_function_,))
except AttributeError:
raise RuntimeError('Method %%s class does not have self.verbose'
%% (_function_,))
else:
if verbose is None:
try:
verbose = self.verbose
except (NameError, AttributeError):
pass
if verbose is not None:
with _use_log_level_(verbose):
return _function_(%(shortsignature)s)
else:
return _function_(%(shortsignature)s)"""
evaldict = dict(
_use_log_level_=use_log_level, _function_=function)
fm = FunctionMaker(function, None, None, None, None, function.__module__)
attrs = dict(__wrapped__=function, __qualname__=function.__qualname__)
return fm.make(body, evaldict, addsource=True, **attrs)
class use_log_level(object):
"""Context handler for logging level.
Parameters
----------
level : int
The level to use.
add_frames : int | None
Number of stack frames to include.
"""
def __init__(self, level, add_frames=None): # noqa: D102
self.level = level
self.add_frames = add_frames
self.old_frames = _filter.add_frames
def __enter__(self): # noqa: D105
self.old_level = set_log_level(self.level, True, self.add_frames)
def __exit__(self, *args): # noqa: D105
add_frames = self.old_frames if self.add_frames is not None else None
set_log_level(self.old_level, add_frames=add_frames)
def set_log_level(verbose=None, return_old_level=False, add_frames=None):
"""Set the logging level.
Parameters
----------
verbose : bool, str, int, or None
The verbosity of messages to print. If a str, it can be either DEBUG,
INFO, WARNING, ERROR, or CRITICAL. Note that these are for
convenience and are equivalent to passing in logging.DEBUG, etc.
For bool, True is the same as 'INFO', False is the same as 'WARNING'.
If None, the environment variable MNE_LOGGING_LEVEL is read, and if
it doesn't exist, defaults to INFO.
return_old_level : bool
If True, return the old verbosity level.
add_frames : int | None
If int, enable (>=1) or disable (0) the printing of stack frame
information using formatting. Default (None) does not change the
formatting. This can add overhead so is meant only for debugging.
Returns
-------
old_level : int
The old level. Only returned if ``return_old_level`` is True.
"""
from .config import get_config
from .check import _check_option, _validate_type
_validate_type(verbose, (bool, str, int, None), 'verbose')
if verbose is None:
verbose = get_config('MNE_LOGGING_LEVEL', 'INFO')
elif isinstance(verbose, bool):
if verbose is True:
verbose = 'INFO'
else:
verbose = 'WARNING'
if isinstance(verbose, str):
verbose = verbose.upper()
logging_types = dict(DEBUG=logging.DEBUG, INFO=logging.INFO,
WARNING=logging.WARNING, ERROR=logging.ERROR,
CRITICAL=logging.CRITICAL)
_check_option('verbose', verbose, logging_types, '(when a string)')
verbose = logging_types[verbose]
old_verbose = logger.level
if verbose != old_verbose:
logger.setLevel(verbose)
if add_frames is not None:
_filter.add_frames = int(add_frames)
fmt = '%(frame_info)s ' if add_frames else ''
fmt += '%(message)s'
fmt = logging.Formatter(fmt)
for handler in logger.handlers:
handler.setFormatter(fmt)
return (old_verbose if return_old_level else None)
def set_log_file(fname=None, output_format='%(message)s', overwrite=None):
"""Set the log to print to a file.
Parameters
----------
fname : str, or None
Filename of the log to print to. If None, stdout is used.
To suppress log outputs, use set_log_level('WARN').
output_format : str
Format of the output messages. See the following for examples:
https://docs.python.org/dev/howto/logging.html
e.g., "%(asctime)s - %(levelname)s - %(message)s".
overwrite : bool | None
Overwrite the log file (if it exists). Otherwise, statements
will be appended to the log (default). None is the same as False,
but additionally raises a warning to notify the user that log
entries will be appended.
"""
_remove_close_handlers(logger)
if fname is not None:
if op.isfile(fname) and overwrite is None:
# Don't use warn() here because we just want to
# emit a warnings.warn here (not logger.warn)
warnings.warn('Log entries will be appended to the file. Use '
'overwrite=False to avoid this message in the '
'future.', RuntimeWarning, stacklevel=2)
overwrite = False
mode = 'w' if overwrite else 'a'
lh = logging.FileHandler(fname, mode=mode)
else:
""" we should just be able to do:
lh = logging.StreamHandler(sys.stdout)
but because doctests uses some magic on stdout, we have to do this:
"""
lh = logging.StreamHandler(WrapStdOut())
lh.setFormatter(logging.Formatter(output_format))
# actually add the stream handler
logger.addHandler(lh)
def _remove_close_handlers(logger):
for h in list(logger.handlers):
# only remove our handlers (get along nicely with nose)
if isinstance(h, (logging.FileHandler, logging.StreamHandler)):
if isinstance(h, logging.FileHandler):
h.close()
logger.removeHandler(h)
class ClosingStringIO(StringIO):
"""StringIO that closes after getvalue()."""
def getvalue(self, close=True):
"""Get the value."""
out = super().getvalue()
if close:
self.close()
return out
class catch_logging(object):
"""Store logging.
This will remove all other logging handlers, and return the handler to
stdout when complete.
"""
def __enter__(self): # noqa: D105
self._data = ClosingStringIO()
self._lh = logging.StreamHandler(self._data)
self._lh.setFormatter(logging.Formatter('%(message)s'))
self._lh._mne_file_like = True # monkey patch for warn() use
_remove_close_handlers(logger)
logger.addHandler(self._lh)
return self._data
def __exit__(self, *args): # noqa: D105
logger.removeHandler(self._lh)
set_log_file(None)
class WrapStdOut(object):
"""Dynamically wrap to sys.stdout.
This makes packages that monkey-patch sys.stdout (e.g.doctest,
sphinx-gallery) work properly.
"""
def __getattr__(self, name): # noqa: D105
# Even more ridiculous than this class, this must be sys.stdout (not
# just stdout) in order for this to work (tested on OSX and Linux)
if hasattr(sys.stdout, name):
return getattr(sys.stdout, name)
else:
raise AttributeError("'file' object has not attribute '%s'" % name)
_verbose_dec_re = re.compile('^<decorator-gen-[0-9]+>$')
def warn(message, category=RuntimeWarning, module='mne'):
"""Emit a warning with trace outside the mne namespace.
This function takes arguments like warnings.warn, and sends messages
using both ``warnings.warn`` and ``logger.warn``. Warnings can be
generated deep within nested function calls. In order to provide a
more helpful warning, this function traverses the stack until it
reaches a frame outside the ``mne`` namespace that caused the error.
Parameters
----------
message : str
Warning message.
category : instance of Warning
The warning class. Defaults to ``RuntimeWarning``.
module : str
The name of the module emitting the warning.
"""
import mne
root_dir = op.dirname(mne.__file__)
frame = None
if logger.level <= logging.WARN:
frame = inspect.currentframe()
while frame:
fname = frame.f_code.co_filename
lineno = frame.f_lineno
# in verbose dec
if not _verbose_dec_re.search(fname):
# treat tests as scripts
# and don't capture unittest/case.py (assert_raises)
if not (fname.startswith(root_dir) or
('unittest' in fname and 'case' in fname)) or \
op.basename(op.dirname(fname)) == 'tests':
break
frame = frame.f_back
del frame
# We need to use this instead of warn(message, category, stacklevel)
# because we move out of the MNE stack, so warnings won't properly
# recognize the module name (and our warnings.simplefilter will fail)
warnings.warn_explicit(
message, category, fname, lineno, module,
globals().get('__warningregistry__', {}))
# To avoid a duplicate warning print, we only emit the logger.warning if
# one of the handlers is a FileHandler. See gh-5592
if any(isinstance(h, logging.FileHandler) or getattr(h, '_mne_file_like',
False)
for h in logger.handlers):
logger.warning(message)
def _get_call_line():
"""Get the call line from within a function."""
frame = inspect.currentframe().f_back.f_back
if _verbose_dec_re.search(frame.f_code.co_filename):
frame = frame.f_back
context = inspect.getframeinfo(frame).code_context
context = 'unknown' if context is None else context[0].strip()
return context
def filter_out_warnings(warn_record, category=None, match=None):
r"""Remove particular records from ``warn_record``.
This helper takes a list of :class:`warnings.WarningMessage` objects,
and remove those matching category and/or text.
Parameters
----------
category: WarningMessage type | None
class of the message to filter out
match : str | None
text or regex that matches the error message to filter out
Examples
--------
This can be used as::
>>> import pytest
>>> import warnings
>>> from mne.utils import filter_out_warnings
>>> with pytest.warns(None) as recwarn:
... warnings.warn("value must be 0 or None", UserWarning)
>>> filter_out_warnings(recwarn, match=".* 0 or None")
>>> assert len(recwarn.list) == 0
>>> with pytest.warns(None) as recwarn:
... warnings.warn("value must be 42", UserWarning)
>>> filter_out_warnings(recwarn, match=r'.* must be \d+$')
>>> assert len(recwarn.list) == 0
>>> with pytest.warns(None) as recwarn:
... warnings.warn("this is not here", UserWarning)
>>> filter_out_warnings(recwarn, match=r'.* must be \d+$')
>>> assert len(recwarn.list) == 1
"""
regexp = re.compile('.*' if match is None else match)
is_category = [w.category == category if category is not None else True
for w in warn_record._list]
is_match = [regexp.match(w.message.args[0]) is not None
for w in warn_record._list]
ind = [ind for ind, (c, m) in enumerate(zip(is_category, is_match))
if c and m]
for i in reversed(ind):
warn_record._list.pop(i)
class ETSContext(object):
"""Add more meaningful message to errors generated by ETS Toolkit."""
def __enter__(self): # noqa: D105
pass
def __exit__(self, type, value, traceback): # noqa: D105
if isinstance(value, SystemExit) and value.code.\
startswith("This program needs access to the screen"):
value.code += ("\nThis can probably be solved by setting "
"ETS_TOOLKIT=qt4. On bash, type\n\n $ export "
"ETS_TOOLKIT=qt4\n\nand run the command again.")
@contextlib.contextmanager
def wrapped_stdout(indent='', cull_newlines=False):
"""Wrap stdout writes to logger.info, with an optional indent prefix.
Parameters
----------
indent : str
The indentation to add.
cull_newlines : bool
If True, cull any new/blank lines at the end.
"""
orig_stdout = sys.stdout
my_out = ClosingStringIO()
sys.stdout = my_out
try:
yield
finally:
sys.stdout = orig_stdout
pending_newlines = 0
for line in my_out.getvalue().split('\n'):
if not line.strip() and cull_newlines:
pending_newlines += 1
continue
for _ in range(pending_newlines):
logger.info('\n')
logger.info(indent + line)
def _frame_info(n):
frame = inspect.currentframe()
try:
frame = frame.f_back
infos = list()
for _ in range(n):
try:
name = frame.f_globals['__name__']
except KeyError: # in our verbose dec
pass
else:
infos.append(f'{name.lstrip("mne.")}:{frame.f_lineno}')
frame = frame.f_back
if frame is None:
break
return infos
except Exception:
return ['unknown']
finally:
del frame
| olafhauk/mne-python | mne/utils/_logging.py | Python | bsd-3-clause | 16,826 |
"""Reads vehicle status from BMW connected drive portal."""
import asyncio
import logging
from bimmer_connected.account import ConnectedDriveAccount
from bimmer_connected.country_selector import get_region_from_name
import voluptuous as vol
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import track_utc_time_change
from homeassistant.util import slugify
import homeassistant.util.dt as dt_util
from .const import (
ATTRIBUTION,
CONF_ACCOUNT,
CONF_ALLOWED_REGIONS,
CONF_READ_ONLY,
CONF_REGION,
CONF_USE_LOCATION,
DATA_ENTRIES,
DATA_HASS_CONFIG,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "bmw_connected_drive"
ATTR_VIN = "vin"
ACCOUNT_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_REGION): vol.In(CONF_ALLOWED_REGIONS),
vol.Optional(CONF_READ_ONLY): cv.boolean,
}
)
CONFIG_SCHEMA = vol.Schema({DOMAIN: {cv.string: ACCOUNT_SCHEMA}}, extra=vol.ALLOW_EXTRA)
SERVICE_SCHEMA = vol.Schema({vol.Required(ATTR_VIN): cv.string})
DEFAULT_OPTIONS = {
CONF_READ_ONLY: False,
CONF_USE_LOCATION: False,
}
BMW_PLATFORMS = ["binary_sensor", "device_tracker", "lock", "notify", "sensor"]
UPDATE_INTERVAL = 5 # in minutes
SERVICE_UPDATE_STATE = "update_state"
_SERVICE_MAP = {
"light_flash": "trigger_remote_light_flash",
"sound_horn": "trigger_remote_horn",
"activate_air_conditioning": "trigger_remote_air_conditioning",
"find_vehicle": "trigger_remote_vehicle_finder",
}
UNDO_UPDATE_LISTENER = "undo_update_listener"
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the BMW Connected Drive component from configuration.yaml."""
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][DATA_HASS_CONFIG] = config
if DOMAIN in config:
for entry_config in config[DOMAIN].values():
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=entry_config
)
)
return True
@callback
def _async_migrate_options_from_data_if_missing(hass, entry):
data = dict(entry.data)
options = dict(entry.options)
if CONF_READ_ONLY in data or list(options) != list(DEFAULT_OPTIONS):
options = dict(DEFAULT_OPTIONS, **options)
options[CONF_READ_ONLY] = data.pop(CONF_READ_ONLY, False)
hass.config_entries.async_update_entry(entry, data=data, options=options)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up BMW Connected Drive from a config entry."""
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN].setdefault(DATA_ENTRIES, {})
_async_migrate_options_from_data_if_missing(hass, entry)
try:
account = await hass.async_add_executor_job(
setup_account, entry, hass, entry.data[CONF_USERNAME]
)
except OSError as ex:
raise ConfigEntryNotReady from ex
async def _async_update_all(service_call=None):
"""Update all BMW accounts."""
await hass.async_add_executor_job(_update_all)
def _update_all() -> None:
"""Update all BMW accounts."""
for entry in hass.data[DOMAIN][DATA_ENTRIES].values():
entry[CONF_ACCOUNT].update()
# Add update listener for config entry changes (options)
undo_listener = entry.add_update_listener(update_listener)
hass.data[DOMAIN][DATA_ENTRIES][entry.entry_id] = {
CONF_ACCOUNT: account,
UNDO_UPDATE_LISTENER: undo_listener,
}
# Service to manually trigger updates for all accounts.
hass.services.async_register(DOMAIN, SERVICE_UPDATE_STATE, _async_update_all)
await _async_update_all()
for platform in BMW_PLATFORMS:
if platform != NOTIFY_DOMAIN:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
# set up notify platform, no entry support for notify component yet,
# have to use discovery to load platform.
hass.async_create_task(
discovery.async_load_platform(
hass,
NOTIFY_DOMAIN,
DOMAIN,
{CONF_NAME: DOMAIN},
hass.data[DOMAIN][DATA_HASS_CONFIG],
)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in BMW_PLATFORMS
if component != NOTIFY_DOMAIN
]
)
)
# Only remove services if it is the last account and not read only
if (
len(hass.data[DOMAIN][DATA_ENTRIES]) == 1
and not hass.data[DOMAIN][DATA_ENTRIES][entry.entry_id][CONF_ACCOUNT].read_only
):
services = list(_SERVICE_MAP) + [SERVICE_UPDATE_STATE]
for service in services:
hass.services.async_remove(DOMAIN, service)
for vehicle in hass.data[DOMAIN][DATA_ENTRIES][entry.entry_id][
CONF_ACCOUNT
].account.vehicles:
hass.services.async_remove(NOTIFY_DOMAIN, slugify(f"{DOMAIN}_{vehicle.name}"))
if unload_ok:
hass.data[DOMAIN][DATA_ENTRIES][entry.entry_id][UNDO_UPDATE_LISTENER]()
hass.data[DOMAIN][DATA_ENTRIES].pop(entry.entry_id)
return unload_ok
async def update_listener(hass, config_entry):
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
def setup_account(entry: ConfigEntry, hass, name: str) -> "BMWConnectedDriveAccount":
"""Set up a new BMWConnectedDriveAccount based on the config."""
username = entry.data[CONF_USERNAME]
password = entry.data[CONF_PASSWORD]
region = entry.data[CONF_REGION]
read_only = entry.options[CONF_READ_ONLY]
use_location = entry.options[CONF_USE_LOCATION]
_LOGGER.debug("Adding new account %s", name)
pos = (
(hass.config.latitude, hass.config.longitude) if use_location else (None, None)
)
cd_account = BMWConnectedDriveAccount(
username, password, region, name, read_only, *pos
)
def execute_service(call):
"""Execute a service for a vehicle."""
vin = call.data[ATTR_VIN]
vehicle = None
# Double check for read_only accounts as another account could create the services
for entry_data in [
e
for e in hass.data[DOMAIN][DATA_ENTRIES].values()
if not e[CONF_ACCOUNT].read_only
]:
vehicle = entry_data[CONF_ACCOUNT].account.get_vehicle(vin)
if vehicle:
break
if not vehicle:
_LOGGER.error("Could not find a vehicle for VIN %s", vin)
return
function_name = _SERVICE_MAP[call.service]
function_call = getattr(vehicle.remote_services, function_name)
function_call()
if not read_only:
# register the remote services
for service in _SERVICE_MAP:
hass.services.register(
DOMAIN, service, execute_service, schema=SERVICE_SCHEMA
)
# update every UPDATE_INTERVAL minutes, starting now
# this should even out the load on the servers
now = dt_util.utcnow()
track_utc_time_change(
hass,
cd_account.update,
minute=range(now.minute % UPDATE_INTERVAL, 60, UPDATE_INTERVAL),
second=now.second,
)
# Initialize
cd_account.update()
return cd_account
class BMWConnectedDriveAccount:
"""Representation of a BMW vehicle."""
def __init__(
self,
username: str,
password: str,
region_str: str,
name: str,
read_only: bool,
lat=None,
lon=None,
) -> None:
"""Initialize account."""
region = get_region_from_name(region_str)
self.read_only = read_only
self.account = ConnectedDriveAccount(username, password, region)
self.name = name
self._update_listeners = []
# Set observer position once for older cars to be in range for
# GPS position (pre-7/2014, <2km) and get new data from API
if lat and lon:
self.account.set_observer_position(lat, lon)
self.account.update_vehicle_states()
def update(self, *_):
"""Update the state of all vehicles.
Notify all listeners about the update.
"""
_LOGGER.debug(
"Updating vehicle state for account %s, notifying %d listeners",
self.name,
len(self._update_listeners),
)
try:
self.account.update_vehicle_states()
for listener in self._update_listeners:
listener()
except OSError as exception:
_LOGGER.error(
"Could not connect to the BMW Connected Drive portal. "
"The vehicle state could not be updated"
)
_LOGGER.exception(exception)
def add_update_listener(self, listener):
"""Add a listener for update notifications."""
self._update_listeners.append(listener)
class BMWConnectedDriveBaseEntity(Entity):
"""Common base for BMW entities."""
def __init__(self, account, vehicle):
"""Initialize sensor."""
self._account = account
self._vehicle = vehicle
self._attrs = {
"car": self._vehicle.name,
"vin": self._vehicle.vin,
ATTR_ATTRIBUTION: ATTRIBUTION,
}
@property
def device_info(self) -> dict:
"""Return info for device registry."""
return {
"identifiers": {(DOMAIN, self._vehicle.vin)},
"name": f'{self._vehicle.attributes.get("brand")} {self._vehicle.name}',
"model": self._vehicle.name,
"manufacturer": self._vehicle.attributes.get("brand"),
}
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return self._attrs
@property
def should_poll(self):
"""Do not poll this class.
Updates are triggered from BMWConnectedDriveAccount.
"""
return False
def update_callback(self):
"""Schedule a state update."""
self.schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Add callback after being added to hass.
Show latest data after startup.
"""
self._account.add_update_listener(self.update_callback)
| tboyce021/home-assistant | homeassistant/components/bmw_connected_drive/__init__.py | Python | apache-2.0 | 11,147 |
#!/usr/bin/python3
import sys
import os
def printUsage():
sys.exit('Usage: %s server|client' % sys.argv[0])
if ((len(sys.argv)!=2) or (sys.argv[1] != 'client') and (sys.argv[1] != 'server')):
printUsage()
print("Generating daemon script\n")
fileContents = open('dyndns.sh').read( os.path.getsize('dyndns.sh') )
fileContents = fileContents.replace('{DYNDNS_PATH}', os.getcwd())
fileContents = fileContents.replace('{VERSION}', sys.argv[1])
fileContents = fileContents.replace('{USER}', os.getlogin())
print("Writing daemon script in /etc/init.d\n")
daemonPath = '/etc/init.d/dyndns'
daemon = open(daemonPath, 'w')
daemon.write(fileContents)
daemon.close()
print('Changing permissions\n')
os.chmod(daemonPath, 0o755)
print('Installing the init script')
os.system('update-rc.d dyndns defaults')
print('done.\nYou can start the service by using:\nsudo service dyndns start') | MilkyWeb/dyndns | install.py | Python | mit | 881 |
import bpy
def proxy_sync_loc(self, context):
if context.object == None or context.object.proxy == None:
return
if context.object.arm_proxy_sync_loc:
sync_location(context.object)
def proxy_sync_rot(self, context):
if context.object == None or context.object.proxy == None:
return
if context.object.arm_proxy_sync_rot:
sync_rotation(context.object)
def proxy_sync_scale(self, context):
if context.object == None or context.object.proxy == None:
return
if context.object.arm_proxy_sync_scale:
sync_scale(context.object)
def proxy_sync_materials(self, context):
if context.object == None or context.object.proxy == None:
return
if context.object.arm_proxy_sync_materials:
sync_materials(context.object)
def proxy_sync_modifiers(self, context):
if context.object == None or context.object.proxy == None:
return
if context.object.arm_proxy_sync_modifiers:
sync_modifiers(context.object)
def proxy_sync_traits(self, context):
if context.object == None or context.object.proxy == None:
return
if context.object.arm_proxy_sync_traits:
sync_traits(context.object)
def make(obj):
traverse(obj, is_parent=True)
def traverse(obj, is_parent=False):
if obj == None or obj.library == None or obj.proxy != None:
return
# Make proxy for all linked children
for c in obj.children:
traverse(c)
override = bpy.context.copy()
override['object'] = obj
bpy.context.view_layer.objects.active = obj
bpy.ops.object.proxy_make(override)
# Reparent created proxies
for c in obj.children:
if c.proxy != None:
c.parent = bpy.context.view_layer.objects.active
c.matrix_parent_inverse = bpy.context.view_layer.objects.active.matrix_world.inverted()
active = bpy.context.view_layer.objects.active
sync_modifiers(active)
# No transform sync for parent
if is_parent:
active.arm_proxy_sync_loc = False
active.arm_proxy_sync_rot = False
active.arm_proxy_sync_scale = False
def sync_location(obj):
obj.location = obj.proxy.location
def sync_rotation(obj):
obj.rotation_euler = obj.proxy.rotation_euler
def sync_scale(obj):
obj.scale = obj.proxy.scale
# https://blender.stackexchange.com/questions/4878
def sync_modifiers(obj):
proxy = obj.proxy
obj.modifiers.clear()
for mSrc in obj.proxy.modifiers:
mDst = obj.modifiers.get(mSrc.name, None)
if not mDst:
mDst = obj.modifiers.new(mSrc.name, mSrc.type)
# collect names of writable properties
properties = [p.identifier for p in mSrc.bl_rna.properties
if not p.is_readonly]
# copy those properties
for prop in properties:
setattr(mDst, prop, getattr(mSrc, prop))
def sync_collection(cSrc, cDst):
cDst.clear()
for mSrc in cSrc:
mDst = cDst.get(mSrc.name, None)
if not mDst:
mDst = cDst.add()
# collect names of writable properties
properties = [p.identifier for p in mSrc.bl_rna.properties
if not p.is_readonly]
# copy those properties
for prop in properties:
setattr(mDst, prop, getattr(mSrc, prop))
def sync_traits(obj):
sync_collection(obj.proxy.arm_traitlist, obj.arm_traitlist)
for i in range(0, len(obj.arm_traitlist)):
sync_collection(obj.proxy.arm_traitlist[i].arm_traitpropslist, obj.arm_traitlist[i].arm_traitpropslist)
def sync_materials(obj):
# Blender likes to crash here:(
pass
# proxy_mats = []
# for slot in obj.proxy.material_slots:
# proxy_mats.append(slot.name)
# override = bpy.context.copy()
# override['object'] = obj
# obj.active_material_index = 0
# for i in range(len(obj.material_slots)):
# bpy.ops.object.material_slot_remove(override)
# for slot in proxy_mats:
# bpy.ops.object.material_slot_add(override)
# obj.material_slots[-1].material = bpy.data.materials[slot]
| luboslenco/cyclesgame | blender/arm/proxy.py | Python | lgpl-3.0 | 4,104 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.api import utils
from tempest.common.utils import data_utils
from tempest import config
from tempest import exceptions
from tempest import test
CONF = config.CONF
class ListServerFiltersTestJSON(base.BaseV2ComputeTest):
@classmethod
@test.safe_setup
def setUpClass(cls):
cls.set_network_resources(network=True, subnet=True, dhcp=True)
super(ListServerFiltersTestJSON, cls).setUpClass()
cls.client = cls.servers_client
# Check to see if the alternate image ref actually exists...
images_client = cls.images_client
resp, images = images_client.list_images()
if cls.image_ref != cls.image_ref_alt and \
any([image for image in images
if image['id'] == cls.image_ref_alt]):
cls.multiple_images = True
else:
cls.image_ref_alt = cls.image_ref
# Do some sanity checks here. If one of the images does
# not exist, fail early since the tests won't work...
try:
cls.images_client.get_image(cls.image_ref)
except exceptions.NotFound:
raise RuntimeError("Image %s (image_ref) was not found!" %
cls.image_ref)
try:
cls.images_client.get_image(cls.image_ref_alt)
except exceptions.NotFound:
raise RuntimeError("Image %s (image_ref_alt) was not found!" %
cls.image_ref_alt)
cls.s1_name = data_utils.rand_name(cls.__name__ + '-instance')
resp, cls.s1 = cls.create_test_server(name=cls.s1_name,
wait_until='ACTIVE')
cls.s2_name = data_utils.rand_name(cls.__name__ + '-instance')
resp, cls.s2 = cls.create_test_server(name=cls.s2_name,
image_id=cls.image_ref_alt,
wait_until='ACTIVE')
cls.s3_name = data_utils.rand_name(cls.__name__ + '-instance')
resp, cls.s3 = cls.create_test_server(name=cls.s3_name,
flavor=cls.flavor_ref_alt,
wait_until='ACTIVE')
if (CONF.service_available.neutron and
CONF.compute.allow_tenant_isolation):
network = cls.isolated_creds.get_primary_network()
cls.fixed_network_name = network['name']
else:
cls.fixed_network_name = CONF.compute.fixed_network_name
@utils.skip_unless_attr('multiple_images', 'Only one image found')
@test.attr(type='gate')
def test_list_servers_filter_by_image(self):
# Filter the list of servers by image
params = {'image': self.image_ref}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertIn(self.s1['id'], map(lambda x: x['id'], servers))
self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s3['id'], map(lambda x: x['id'], servers))
@test.attr(type='gate')
def test_list_servers_filter_by_flavor(self):
# Filter the list of servers by flavor
params = {'flavor': self.flavor_ref_alt}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertNotIn(self.s1['id'], map(lambda x: x['id'], servers))
self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s3['id'], map(lambda x: x['id'], servers))
@test.attr(type='gate')
def test_list_servers_filter_by_server_name(self):
# Filter the list of servers by server name
params = {'name': self.s1_name}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertIn(self.s1_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers))
@test.attr(type='gate')
def test_list_servers_filter_by_server_status(self):
# Filter the list of servers by server status
params = {'status': 'active'}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertIn(self.s1['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s2['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s3['id'], map(lambda x: x['id'], servers))
@test.attr(type='gate')
def test_list_servers_filter_by_shutoff_status(self):
# Filter the list of servers by server shutoff status
params = {'status': 'shutoff'}
self.client.stop(self.s1['id'])
self.client.wait_for_server_status(self.s1['id'],
'SHUTOFF')
resp, body = self.client.list_servers(params)
self.client.start(self.s1['id'])
self.client.wait_for_server_status(self.s1['id'],
'ACTIVE')
servers = body['servers']
self.assertIn(self.s1['id'], map(lambda x: x['id'], servers))
self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers))
self.assertNotIn(self.s3['id'], map(lambda x: x['id'], servers))
@test.attr(type='gate')
def test_list_servers_filter_by_limit(self):
# Verify only the expected number of servers are returned
params = {'limit': 1}
resp, servers = self.client.list_servers(params)
# when _interface='xml', one element for servers_links in servers
self.assertEqual(1, len([x for x in servers['servers'] if 'id' in x]))
@test.attr(type='gate')
def test_list_servers_filter_by_zero_limit(self):
# Verify only the expected number of servers are returned
params = {'limit': 0}
resp, servers = self.client.list_servers(params)
self.assertEqual(0, len(servers['servers']))
@test.attr(type='gate')
def test_list_servers_filter_by_exceed_limit(self):
# Verify only the expected number of servers are returned
params = {'limit': 100000}
resp, servers = self.client.list_servers(params)
resp, all_servers = self.client.list_servers()
self.assertEqual(len([x for x in all_servers['servers'] if 'id' in x]),
len([x for x in servers['servers'] if 'id' in x]))
@utils.skip_unless_attr('multiple_images', 'Only one image found')
@test.attr(type='gate')
def test_list_servers_detailed_filter_by_image(self):
# Filter the detailed list of servers by image
params = {'image': self.image_ref}
resp, body = self.client.list_servers_with_detail(params)
servers = body['servers']
self.assertIn(self.s1['id'], map(lambda x: x['id'], servers))
self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s3['id'], map(lambda x: x['id'], servers))
@test.attr(type='gate')
def test_list_servers_detailed_filter_by_flavor(self):
# Filter the detailed list of servers by flavor
params = {'flavor': self.flavor_ref_alt}
resp, body = self.client.list_servers_with_detail(params)
servers = body['servers']
self.assertNotIn(self.s1['id'], map(lambda x: x['id'], servers))
self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s3['id'], map(lambda x: x['id'], servers))
@test.attr(type='gate')
def test_list_servers_detailed_filter_by_server_name(self):
# Filter the detailed list of servers by server name
params = {'name': self.s1_name}
resp, body = self.client.list_servers_with_detail(params)
servers = body['servers']
self.assertIn(self.s1_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers))
@test.attr(type='gate')
def test_list_servers_detailed_filter_by_server_status(self):
# Filter the detailed list of servers by server status
params = {'status': 'active'}
resp, body = self.client.list_servers_with_detail(params)
servers = body['servers']
self.assertIn(self.s1['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s2['id'], map(lambda x: x['id'], servers))
self.assertIn(self.s3['id'], map(lambda x: x['id'], servers))
self.assertEqual(['ACTIVE'] * 3, [x['status'] for x in servers])
@test.attr(type='gate')
def test_list_servers_filtered_by_name_wildcard(self):
# List all servers that contains '-instance' in name
params = {'name': '-instance'}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertIn(self.s1_name, map(lambda x: x['name'], servers))
self.assertIn(self.s2_name, map(lambda x: x['name'], servers))
self.assertIn(self.s3_name, map(lambda x: x['name'], servers))
# Let's take random part of name and try to search it
part_name = self.s1_name[6:-1]
params = {'name': part_name}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertIn(self.s1_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers))
@test.attr(type='gate')
def test_list_servers_filtered_by_ip(self):
# Filter servers by ip
# Here should be listed 1 server
resp, self.s1 = self.client.get_server(self.s1['id'])
ip = self.s1['addresses'][self.fixed_network_name][0]['addr']
params = {'ip': ip}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertIn(self.s1_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers))
self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers))
@test.skip_because(bug="1182883",
condition=CONF.service_available.neutron)
@test.attr(type='gate')
def test_list_servers_filtered_by_ip_regex(self):
# Filter servers by regex ip
# List all servers filtered by part of ip address.
# Here should be listed all servers
resp, self.s1 = self.client.get_server(self.s1['id'])
ip = self.s1['addresses'][self.fixed_network_name][0]['addr'][0:-3]
params = {'ip': ip}
resp, body = self.client.list_servers(params)
servers = body['servers']
self.assertIn(self.s1_name, map(lambda x: x['name'], servers))
self.assertIn(self.s2_name, map(lambda x: x['name'], servers))
self.assertIn(self.s3_name, map(lambda x: x['name'], servers))
@test.attr(type='gate')
def test_list_servers_detailed_limit_results(self):
# Verify only the expected number of detailed results are returned
params = {'limit': 1}
resp, servers = self.client.list_servers_with_detail(params)
self.assertEqual(1, len(servers['servers']))
class ListServerFiltersTestXML(ListServerFiltersTestJSON):
_interface = 'xml'
| vedujoshi/os_tempest | tempest/api/compute/servers/test_list_server_filters.py | Python | apache-2.0 | 12,020 |
import numpy as np
from astropy import units as u
from astropy.wcs import WCS, WCSSUB_SPECTRAL
from .wcs_utils import get_spectral_scale
def slice_wcs(wcs, spatial_scale):
"""
Slice a WCS header for a spectral cube to a Position-Velocity WCS, with
ctype "OFFSET" for the spatial offset direction
Parameters
----------
wcs : :class:`~astropy.wcs.WCS`
The WCS of the spectral cube. This should already be sanitized and
have the spectral axis along the third dimension.
spatial_scale: :class:`~astropy.units.Quantity`
The spatial scale of the position axis
Returns
-------
wcs_slice :class:`~astropy.wcs.WCS`
The resulting WCS slice
"""
# Extract spectral slice
wcs_slice = wcs.sub([0, WCSSUB_SPECTRAL])
# Set spatial parameters
wcs_slice.wcs.crpix[0] = 1.
wcs_slice.wcs.cdelt[0] = spatial_scale.to(u.degree).value
wcs_slice.wcs.crval[0] = 0.
wcs_slice.wcs.ctype[0] = "OFFSET"
wcs_slice.wcs.cunit[0] = 'deg'
return wcs_slice
| bsipocz/glue | glue/external/pvextractor/utils/wcs_slicing.py | Python | bsd-3-clause | 1,042 |
# -*- coding: utf-8 -*-
#
# Copyright 2010-2013 The cygit2 contributors
#
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License, version 2,
# as published by the Free Software Foundation.
#
# In addition to the permissions in the GNU General Public License,
# the authors give you unlimited permission to link the compiled
# version of this file into combinations with other programs,
# and to distribute those combinations without any restriction
# coming from the use of this file. (The General Public License
# restrictions do apply in other respects; for example, they cover
# modification of the file, and distribution when not linked into
# a combined executable.)
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
from .object import Object
class Reference(object):
def __init__(self, reference):
self._reference = reference
def __eq__(self, other):
return self._reference == other._reference
def __ne__(self, other):
return not (self == other)
def __gt__(self, other):
return self._reference > other._reference
def __ge__(self, other):
return not (self < other)
def __lt__(self, other):
return self._reference < other._reference
def __le__(self, other):
return not (self > other)
def get_object(self):
return Object.convert(self._reference.get_object())
def has_log(self):
return self._reference.has_log()
def logs(self):
for entry in self._reference.logs():
yield entry
def is_branch(self):
return self._reference.is_branch()
def is_remote(self):
return self._reference.is_remote()
def resolve(self):
ref = self._reference.resolve()
if ref is self._reference:
return self
return Reference(ref)
@property
def name(self):
return self._reference.name
@property
def target(self):
return self._reference.target
@property
def oid(self):
return self._reference.oid
@property
def hex(self):
return self._reference.hex
@property
def type(self):
return self._reference.type
| sjagoe/cygit2 | cygit2/pygit2/reference.py | Python | gpl-2.0 | 2,663 |
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
""" Simple script to parse and do some corrections HTML exported by the source OpenOffice documents
used to produce the Video Services guides.
It assumes you are using it from indico's bin directory in development mode.
If this isn't right, please change the 'ihelppath' variable and the end of this file.
"""
from HTMLParser import HTMLParser
import htmlentitydefs
import os
class MyHTMLParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
def process(self, target):
if not os.path.exists(target):
print 'Could not find file: ' + target
return
self.reset()
self._inStyleTag = False
outName = target + '.tmp'
self._out = file(outName, 'w')
self.feed(file(target).read())
self._out.close()
os.remove(target)
os.rename(outName, target)
self.close()
@classmethod
def _processAttrs(cls, tag, attrs):
attrs = dict(attrs)
if tag.lower() == 'img':
attrs.pop('height','')
attrs.pop('HEIGHT','')
attrs.pop('width','')
attrs.pop('WIDTH','')
if not 'style' in attrs or attrs['style'].find('text-align: center') == -1:
attrs['style'] = attrs.pop('style','') + ";text-align: center;"
if tag.lower() == 'p' and ('align' in attrs and attrs['align'].lower() == 'center' or 'ALIGN' in attrs and attrs['ALIGN'].lower() == 'center'):
attrs.pop('align','')
attrs.pop('ALIGN','')
if not 'style' in attrs or attrs['style'].find('text-align: center') == -1:
attrs['style'] = attrs.pop('style','') + ";text-align: center;"
return tag, attrs
def handle_starttag(self, tag, attrs):
if tag.lower() == 'style':
self._inStyleTag = True
tag, attrs = MyHTMLParser._processAttrs(tag, attrs)
strattrs = "".join([' %s="%s"' % (key, value) for key, value in attrs.iteritems()])
self._out.write("<%s%s>" % (tag, strattrs))
def handle_startendtag(self, tag, attrs):
tag, attrs = MyHTMLParser._processAttrs(tag, attrs)
strattrs = "".join([' %s="%s"' % (key, value) for key, value in attrs])
self._out.write("<%s%s />" % (tag, strattrs))
def handle_endtag(self, tag):
if tag.lower() == 'style':
self._inStyleTag = False
self._out.write("</%s>" % tag)
def handle_data(self, text):
if self._inStyleTag:
iPStyle1 = text.find("P {")
iPStyle2 = text.find("p {")
iPStyle3 = text.find("P{")
iPStyle4 = text.find("p{")
iPStyle = max(iPStyle1, iPStyle2, iPStyle3, iPStyle4)
endIPStyle = text.find('}', iPStyle)
self._out.write(text[:endIPStyle])
if not text[:endIPStyle].endswith(';margin: 0; padding: 0;'):
self._out.write(';margin: 0; padding: 0;')
self._out.write(text[endIPStyle:])
else:
self._out.write("%s" % text)
def handle_comment(self, comment):
self._out.write("<!-- %s -->\n" % comment)
def handle_entityref(self, ref):
self._out.write("&%s" % ref)
if htmlentitydefs.entitydefs.has_key(ref):
self._out.write(";")
def handle_charref(self, ref):
self._out.write("&#%s;" % ref)
def handle_pi(self, text):
self._out.write("<?%s>" % text)
def handle_decl(self, text):
self._out.write("<!%s>" % text)
if __name__ == "__main__":
p = MyHTMLParser()
ihelpPath = "../../indico/htdocs/ihelp/"
p.process(ihelpPath + "VideoServices/IndicoUserGuide_VS/index.html")
p.process(ihelpPath + "VideoServices/EventManagerUserGuide_VS/index.html")
p.process(ihelpPath + "VideoServices/ServerAdminUserGuide_VS/index.html")
p.process(ihelpPath + "VideoServices/VSAdminUserGuide_VS/index.html")
| pferreir/indico-backup | bin/utils/VSGuideHTMLFix.py | Python | gpl-3.0 | 4,708 |
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Rds20140815RevokeAccountPrivilegeRequest(RestApi):
def __init__(self,domain='rds.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.AccountName = None
self.DBInstanceId = None
self.DBName = None
def getapiname(self):
return 'rds.aliyuncs.com.RevokeAccountPrivilege.2014-08-15'
| wanghe4096/website | aliyun/api/rest/Rds20140815RevokeAccountPrivilegeRequest.py | Python | bsd-2-clause | 401 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-09-08 16:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0006_auto_20160713_0913"),
]
operations = [
migrations.AlterField(
model_name="legacyaccountprofile",
name="origin",
field=models.PositiveSmallIntegerField(
choices=[
(1, "Base de données Abonnements"),
(2, "Base de données Restrictions"),
(4, "Base de données Drupal"),
],
verbose_name="Origine",
),
),
]
| erudit/eruditorg | eruditorg/core/accounts/migrations/0007_auto_20170908_1104.py | Python | gpl-3.0 | 734 |
'''staagg: Python library to retrieve commission data from Stagg Web service
'''
__version__ = '1.0'
__author__ = 'Siddharth Saha ([email protected])'
import xml.etree.ElementTree as ET
import requests
class Staagg(object):
BASE_URL = 'http://www.staagg.com/webservices/v4'
key = ''
start_date = None # The start date from which commission data is required
end_date = None # The end date from which commission data is required
def __init__(self, key, start_date, end_date):
assert key and start_date and end_date, 'missing args'
self.key = key
self.start_date = start_date.strftime('%Y-%m-%dT%H:%M:%S')
self.end_date = end_date.strftime('%Y-%m-%dT%H:%M:%S')
def _get_networks(self):
# This function gets all the networks that have been configured
url = '%s/getNetworkAccounts/userWsKey/%s' %(self.BASE_URL, self.key)
r = requests.get(url = url)
if r.ok:
tree = ET.fromstring(r.content)
res = [] # Return list - [{'tag' : 'GAN', 'id' : <id>}]
networks = tree.find('items').findall('item')
for network in networks:
res.append({
'tag' : network.get('tag'),
'id' : network.get('id')})
return res
else:
raise Exception('Cannot connect to URL')
def _get_page_data(self, page_num, network_id):
url = '%s/getTransactions/userWsKey/%s/startDateTime/%s/endDateTime/%s/dateType/transactionDate/networkAccId/%s/page/%s' \
%(self.BASE_URL, self.key, self.start_date, self.end_date, network_id, page_num)
r = requests.get(url = url)
if r.ok:
tree = ET.fromstring(r.content)
data = {
'page' : int(tree.find('metadata').get('page')),
'total_pages' : int(tree.find('metadata').get('totalPages'))
}
items = tree.find('items').findall('item')
commission_data = {}
for item in items:
advertiser_id = item.get('advertiserId')
commission_amount = int(item.get('commissionAmount'))
advertiser_name = item.get('advertiserName')
if advertiser_id in commission_data:
commission_data[advertiser_id]['commission-amount'] = commission_data.get(advertiser_id).get('commission-amount') + \
commission_amount
else:
commission_data[advertiser_id] = {
'commission-amount' : commission_amount,
'advertiser-name' : advertiser_name
}
data['commission_data'] = commission_data
return data
else:
raise Exception('Cannot connect to URL')
def _get_network_commission_data(self, network):
# This function will get the commission data for a particular network that has been passed
res = {} # Of the format {<advertiser-id> : <comission-amount>}
# We have get the data for all the pages
page_num = 0 # The page num whose data is to be retrieved
total_pages = 10
while page_num < total_pages:
data = self._get_page_data(page_num = page_num + 1, network_id = network.get('id'))
page_num = data.get('page')
total_pages = data.get('total_pages')
for advertiser_id in data.get('commission_data').keys():
if advertiser_id in res:
res[advertiser_id]['commission-amount'] = res.get(advertiser_id).get('commission-amount') + \
data.get('commission_data').get(advertiser_id).get('commission-amount')
else:
res[advertiser_id] = data.get('commission_data').get(advertiser_id)
return res
def get(self):
'''This method is to be called to get the commission data. The data will be returned
in the following format -
{
'<advertiser-id>' : {
'commission-amount': <amount-in-cents>,
'type' : <type-of-network>, (e.g. Commission Junction, Google Affiliate Network etc)
'advertiser-name' : <advertiser-name>
}
}
'''
res = {}
networks = self._get_networks()
for network in networks:
# Get data for all the networks one-by-one and then aggregate into res
commission_data = self._get_network_commission_data(network = network)
for advertiser_id in commission_data.keys():
res[advertiser_id] = {
'commission-amount' : commission_data.get(advertiser_id).get('commission-amount'),
'advertiser-name' : commission_data.get(advertiser_id).get('advertiser-name'),
'type' : network.get('tag')
}
return res
| sidchilling/staagg | staagg/staagg.py | Python | mit | 4,171 |
# REQUIRES: python-psutil
# Test per test timeout using external shell
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --timeout 1 --param external=1 > %t.extsh.out 2> %t.extsh.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.extsh.out %s
# RUN: FileCheck --check-prefix=CHECK-EXTSH-ERR < %t.extsh.err %s
#
# CHECK-EXTSH-ERR: Using external shell
# Test per test timeout using internal shell
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --timeout 1 --param external=0 > %t.intsh.out 2> %t.intsh.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.intsh.out %s
# RUN: FileCheck --check-prefix=CHECK-INTSH-OUT < %t.intsh.out %s
# RUN: FileCheck --check-prefix=CHECK-INTSH-ERR < %t.intsh.err %s
#
# CHECK-INTSH-OUT: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-INTSH-OUT: Command 0 Reached Timeout: True
# CHECK-INTSH-OUT: Command 0 Output:
# CHECK-INTSH-OUT-NEXT: Running infinite loop
# CHECK-INTSH-OUT: TIMEOUT: per_test_timeout :: quick_then_slow.py
# CHECK-INTSH-OUT: Timeout: Reached timeout of 1 seconds
# CHECK-INTSH-OUT: Command Output
# CHECK-INTSH-OUT: Command 0 Reached Timeout: False
# CHECK-INTSH-OUT: Command 0 Output:
# CHECK-INTSH-OUT-NEXT: Running in quick mode
# CHECK-INTSH-OUT: Command 1 Reached Timeout: True
# CHECK-INTSH-OUT: Command 1 Output:
# CHECK-INTSH-OUT-NEXT: Running in slow mode
# CHECK-INTSH-OUT: TIMEOUT: per_test_timeout :: slow.py
# CHECK-INTSH-OUT: Command 0 Reached Timeout: True
# CHECK-INTSH-OUT: Command 0 Output:
# CHECK-INTSH-OUT-NEXT: Running slow program
# CHECK-INTSH-ERR: Using internal shell
# Test per test timeout set via a config file rather than on the command line
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --param external=0 \
# RUN: --param set_timeout=1 > %t.cfgset.out 2> %t.cfgset.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.cfgset.out %s
# RUN: FileCheck --check-prefix=CHECK-CFGSET-ERR < %t.cfgset.err %s
#
# CHECK-CFGSET-ERR: Using internal shell
# CHECK-OUT-COMMON: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-OUT-COMMON: Timeout: Reached timeout of 1 seconds
# CHECK-OUT-COMMON: Command {{([0-9]+ )?}}Output
# CHECK-OUT-COMMON: Running infinite loop
# CHECK-OUT-COMMON: TIMEOUT: per_test_timeout :: quick_then_slow.py
# CHECK-OUT-COMMON: Timeout: Reached timeout of 1 seconds
# CHECK-OUT-COMMON: Command {{([0-9]+ )?}}Output
# CHECK-OUT-COMMON: Running in quick mode
# CHECK-OUT-COMMON: Running in slow mode
# CHECK-OUT-COMMON: PASS: per_test_timeout :: short.py
# CHECK-OUT-COMMON: TIMEOUT: per_test_timeout :: slow.py
# CHECK-OUT-COMMON: Timeout: Reached timeout of 1 seconds
# CHECK-OUT-COMMON: Command {{([0-9]+ )?}}Output
# CHECK-OUT-COMMON: Running slow program
# CHECK-OUT-COMMON: Expected Passes{{ *}}: 1
# CHECK-OUT-COMMON: Individual Timeouts{{ *}}: 3
# Test per test timeout via a config file and on the command line.
# The value set on the command line should override the config file.
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --param external=0 \
# RUN: --param set_timeout=1 --timeout=2 > %t.cmdover.out 2> %t.cmdover.err
# RUN: FileCheck --check-prefix=CHECK-CMDLINE-OVERRIDE-OUT < %t.cmdover.out %s
# RUN: FileCheck --check-prefix=CHECK-CMDLINE-OVERRIDE-ERR < %t.cmdover.err %s
# CHECK-CMDLINE-OVERRIDE-ERR: Forcing timeout to be 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-CMDLINE-OVERRIDE-OUT: Timeout: Reached timeout of 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: Command {{([0-9]+ )?}}Output
# CHECK-CMDLINE-OVERRIDE-OUT: Running infinite loop
# CHECK-CMDLINE-OVERRIDE-OUT: TIMEOUT: per_test_timeout :: quick_then_slow.py
# CHECK-CMDLINE-OVERRIDE-OUT: Timeout: Reached timeout of 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: Command {{([0-9]+ )?}}Output
# CHECK-CMDLINE-OVERRIDE-OUT: Running in quick mode
# CHECK-CMDLINE-OVERRIDE-OUT: Running in slow mode
# CHECK-CMDLINE-OVERRIDE-OUT: PASS: per_test_timeout :: short.py
# CHECK-CMDLINE-OVERRIDE-OUT: TIMEOUT: per_test_timeout :: slow.py
# CHECK-CMDLINE-OVERRIDE-OUT: Timeout: Reached timeout of 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: Command {{([0-9]+ )?}}Output
# CHECK-CMDLINE-OVERRIDE-OUT: Running slow program
# CHECK-CMDLINE-OVERRIDE-OUT: Expected Passes{{ *}}: 1
# CHECK-CMDLINE-OVERRIDE-OUT: Individual Timeouts{{ *}}: 3
| cd80/UtilizedLLVM | utils/lit/tests/shtest-timeout.py | Python | unlicense | 5,027 |
import os
from parglare import Grammar
this_folder = os.path.dirname(__file__)
def test_fqn_constructed_by_first_import_path():
g = Grammar.from_file(os.path.join(this_folder, 'A.pg'))
assert g.get_terminal('B.C.CTerm')
assert not g.get_terminal('C.CTerm')
assert g.get_nonterminal('B.C.CRule')
assert not g.get_nonterminal('C.CRule')
| igordejanovic/parglare | tests/func/import/fqn/test_fqn.py | Python | mit | 360 |
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import CreateMixin, DeleteMixin, ListMixin, ObjectDeleteMixin
__all__ = [
"GroupAccessToken",
"GroupAccessTokenManager",
]
class GroupAccessToken(ObjectDeleteMixin, RESTObject):
pass
class GroupAccessTokenManager(ListMixin, CreateMixin, DeleteMixin, RESTManager):
_path = "/groups/{group_id}/access_tokens"
_obj_cls = GroupAccessToken
_from_parent_attrs = {"group_id": "id"}
| python-gitlab/python-gitlab | gitlab/v4/objects/group_access_tokens.py | Python | lgpl-3.0 | 471 |
from coco.admin.forms import CollaborationGroupAdminForm, ShareAdminForm
from coco.core.models import *
from coco.core.management.commands import import_users
from django_admin_conf_vars.models import ConfigurationVariable
from django.conf.urls import patterns
from django.contrib import admin, messages
from django.contrib.auth.admin import GroupAdmin
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
class CoreAdminSite(admin.AdminSite):
"""
coco application admin site.
"""
site_header = 'coco Administration'
site_title = 'coco - Administration'
index_title = 'Management'
class BackendAdmin(admin.ModelAdmin):
"""
Admin model for the `Backend` model.
"""
list_display = ['module', 'klass', 'arguments']
list_filter = [
'kind'
]
fieldsets = [
('General Properties', {
'fields': ['kind', 'module', 'klass', 'arguments']
})
]
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
return ['kind']
return []
class CollaborationGroupAdmin(GroupAdmin):
"""
Admin model for the `CollaborationGroup` model.
"""
list_display = ['name', 'is_public', 'is_single_user_group']
list_filter = ['creator', 'is_public', 'is_single_user_group']
form = CollaborationGroupAdminForm
fieldsets = [
('General Properties', {
'fields': ['name', 'creator']
}),
('Membership Options', {
'fields': ['admins', 'users', 'is_single_user_group']
}),
('Visibility Options', {
'fields': ['is_public']
})
]
filter_horizontal = ['admins']
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
readonly = ['creator', 'name', 'is_single_user_group']
if obj.is_single_user_group:
readonly += ['admins', 'is_public'] # FIXME: 'users' -> fails
return readonly
return ['is_single_user_group']
class ConfigurationVariableAdmin(admin.ModelAdmin):
"""
Admin model for the `ConfigurationVariable` model.
"""
list_display = ['name']
fieldsets = [
('General Properties', {
'fields': ['name', 'description', 'value']
})
]
readonly_fields = ['description', 'name']
def has_add_permission(self, request):
"""
:inherit.
"""
return False
class ContainerAdmin(admin.ModelAdmin):
"""
Admin model for the `Container` model.
"""
actions = [
'restart_containers',
'resume_containers',
'start_containers',
'stop_containers',
'suspend_containers'
]
list_display = ['name', 'description', 'owner', 'is_clone', 'is_running', 'is_suspended']
list_filter = [
('clone_of', admin.RelatedOnlyFieldListFilter),
('image', admin.RelatedOnlyFieldListFilter),
('owner', admin.RelatedOnlyFieldListFilter),
]
fieldsets = [
('General Properties', {
'fields': ['name', 'description', 'owner']
}),
('Creation Properties', {
'fields': ['image', 'clone_of', 'server']
}),
('Backend Properties', {
'classes': ['collapse'],
'fields': ['backend_pk']
})
]
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
return ['backend_pk', 'clone_of', 'image', 'name', 'owner', 'server']
return ['backend_pk']
def response_change(self, request, obj):
"""
:inherit.
"""
stay = False
if '_clone' in request.POST or '_commit' in request.POST or '_snapshot' in request.POST:
name = request.POST.get('_container-action-name')
if len(name) != 0:
try:
if '_clone' in request.POST:
ret = obj.clone(name)
url = reverse('admin:core_container_change', args=(ret.id,))
elif '_commit' in request.POST:
ret = obj.commit(name)
url = reverse('admin:core_containerimage_change', args=(ret.id,))
else:
ret = obj.create_snapshot(name)
url = reverse('admin:core_containersnapshot_change', args=(ret.id,))
self.message_user(request, "Container action completed successfully.")
return HttpResponseRedirect(url)
except Exception:
self.message_user(request, "Operation failed.", messages.ERROR)
else:
self.message_user(request, "The name field is required.", messages.WARNING)
stay = True
elif '_restart' in request.POST:
self.restart_containers(request, [obj])
stay = True
elif '_resume' in request.POST:
self.resume_containers(request, [obj])
stay = True
elif '_start' in request.POST:
self.start_containers(request, [obj])
stay = True
elif '_stop' in request.POST:
self.stop_containers(request, [obj])
stay = True
elif '_suspend' in request.POST:
self.suspend_containers(request, [obj])
stay = True
if stay:
return HttpResponseRedirect(reverse('admin:core_container_change', args=(obj.id,)))
return super(ContainerAdmin, self).response_change(request, obj)
def restart_containers(self, request, queryset):
"""
Restart all selected containers.
"""
failed = 0
restarted = 0
for container in queryset:
try:
container.restart()
restarted += 1
except Exception:
failed += 1
self.message_user(
request,
"Successfully restarted %i container(s). %i failed." % (restarted, failed)
)
restart_containers.short_description = "Restart selected containers"
def resume_containers(self, request, queryset):
"""
Suspend all selected containers.
"""
failed = 0
resumed = 0
for container in queryset:
try:
container.resume()
resumed += 1
except Exception:
failed += 1
self.message_user(
request,
"Successfully resumed %i container(s). %i failed." % (resumed, failed)
)
resume_containers.short_description = "Resume selected containers"
def start_containers(self, request, queryset):
"""
Start all selected containers.
"""
failed = 0
started = 0
for container in queryset:
try:
container.start()
started += 1
except Exception:
failed += 1
self.message_user(
request,
"Successfully started %i container(s). %i failed." % (started, failed)
)
start_containers.short_description = "Start selected containers"
def stop_containers(self, request, queryset):
"""
Start all selected containers.
"""
failed = 0
stopped = 0
for container in queryset:
try:
container.stop()
stopped += 1
except Exception:
failed += 1
self.message_user(
request,
"Successfully stopped %i container(s). %i failed." % (stopped, failed)
)
stop_containers.short_description = "Stop selected containers"
def suspend_containers(self, request, queryset):
"""
Suspend all selected containers.
"""
failed = 0
suspended = 0
for container in queryset:
try:
container.suspend()
suspended += 1
except Exception:
failed += 1
self.message_user(
request,
"Successfully suspended %i container(s). %i failed." % (suspended, failed)
)
suspend_containers.short_description = "Suspend selected containers"
class ContainerImageAdmin(admin.ModelAdmin):
"""
Admin model for the `ContainerImage` model.
"""
list_display = ['get_friendly_name', 'short_description', 'is_internal', 'is_public']
list_filter = [
'is_internal',
'is_public',
('owner', admin.RelatedOnlyFieldListFilter),
]
fieldsets = [
('General Properties', {
'fields': ['name', 'short_description', 'description', 'owner']
}),
('Backend Properties', {
'classes': ['collapse'],
'fields': ['backend_pk', 'command', 'protected_port', 'public_ports']
}),
('Visibility Options', {
'fields': ['is_public']
})
]
def get_friendly_name(self, obj):
"""
Get the container image's friendly name.
"""
return obj.get_friendly_name()
get_friendly_name.short_description = 'Friendly name'
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
return ['backend_pk', 'command', 'protected_port', 'public_ports', 'owner']
return []
class ContainerSnapshotAdmin(admin.ModelAdmin):
"""
Admin model for the `ContainerSnapshot` model.
"""
actions = [
'restore_snapshots'
]
list_display = ['name', 'description', 'container']
list_filter = [
('container', admin.RelatedOnlyFieldListFilter),
]
fieldsets = [
('General Properties', {
'fields': ['name', 'description']
}),
('Creation Properties', {
'fields': ['container']
}),
('Backend Properties', {
'classes': ['collapse'],
'fields': ['backend_pk']
})
]
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
return ['backend_pk', 'container', 'name']
return ['backend_pk']
def response_change(self, request, obj):
"""
:inherit.
"""
if '_restore' in request.POST:
self.restore_snapshots(request, [obj])
request.POST['_continue'] = True
return super(ContainerSnapshotAdmin, self).response_change(request, obj)
def restore_snapshots(self, request, queryset):
"""
Suspend all selected containers.
"""
failed = 0
restored = 0
for snapshot in queryset:
try:
snapshot.restore()
restored += 1
except Exception:
failed += 1
self.message_user(
request,
"Successfully restored %i container snapshot(s). %i failed." % (restored, failed)
)
restore_snapshots.short_description = "Restore selected container snapshots"
class GroupAdmin(admin.ModelAdmin):
"""
Admin model for the `Group` model.
"""
fieldsets = [
('General Properties', {
'fields': ['name']
})
]
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
return ['name']
return []
class NotificationAdmin(admin.ModelAdmin):
"""
Admin model for the `Notification` model.
"""
list_display = ['message', 'date', 'sender', 'has_related_object', 'is_system_notification']
list_filter = [
('sender', admin.RelatedOnlyFieldListFilter),
'notification_type',
('container', admin.RelatedOnlyFieldListFilter),
('container_image', admin.RelatedOnlyFieldListFilter),
('group', admin.RelatedOnlyFieldListFilter),
('share', admin.RelatedOnlyFieldListFilter),
]
fieldsets = [
('General Properties', {
'fields': ['notification_type', 'message', 'sender']
}),
('Related Objects', {
'classes': ['collapse'],
'fields': ['container', 'container_image', 'group', 'share']
}),
('Receivers', {
'fields': ['receiver_groups']
})
]
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
return ['container', 'container_image', 'date', 'group', 'message',
'notification_type', 'receiver_groups', 'sender', 'share']
return []
class PortMappingAdmin(admin.ModelAdmin):
"""
Admin model for the `PortMapping` model.
"""
list_display = ['container', 'external_port', 'internal_port']
list_filter = [
('container', admin.RelatedOnlyFieldListFilter),
'external_port',
'internal_port',
('server', admin.RelatedOnlyFieldListFilter),
]
fieldsets = [
('Server Properties', {
'fields': ['server', 'external_port']
}),
('Container Properties', {
'fields': ['container', 'internal_port']
})
]
readonly_fields = ['container', 'external_port', 'internal_port', 'server']
def has_add_permission(self, request):
"""
:inherit.
"""
return False
class ServerAdmin(admin.ModelAdmin):
"""
Admin model for the `Server` model.
"""
list_display = ['name', 'internal_ip', 'external_ip', 'is_container_host']
list_filter = [
('container_backend', admin.RelatedOnlyFieldListFilter),
]
fieldsets = [
('General Properties', {
'fields': ['name', 'internal_ip', 'external_ip']
}),
('Container Backend Properties', {
'classes': ['collapse'],
'fields': ['container_backend', 'container_backend_args']
})
]
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
# TODO: make only readonly if hosts containers
return ['container_backend', 'external_ip', 'internal_ip']
return []
class ShareAdmin(admin.ModelAdmin):
"""
Admin model for the `Share` model.
"""
list_display = ['name', 'description', 'owner']
list_filter = [
('owner', admin.RelatedOnlyFieldListFilter),
('tags', admin.RelatedOnlyFieldListFilter),
]
form = ShareAdminForm
fieldsets = [
('General Properties', {
'fields': ['name', 'description', 'tags', 'owner']
}),
('Access Control', {
'fields': ['access_groups']
})
]
filter_horizontal = ['access_groups', 'tags']
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj:
return ['name', 'owner']
return []
class TagAdmin(admin.ModelAdmin):
"""
Admin model for the `Tag` model.
"""
fieldsets = [
('General Properties', {
'fields': ['label']
})
]
class UserAdmin(admin.ModelAdmin):
"""
Admin model for the `User` model.
"""
list_display = ['username', 'is_active', 'is_staff']
list_filter = ['is_active', 'is_staff']
class Media:
# javascript to add custom button to User lits
js = ('admin/js/user_import.js', )
def get_fieldsets(self, request, obj=None):
"""
:inherit.
"""
if obj is not None and hasattr(obj, 'backend_user'):
return [
('General Properties', {
'fields': ['username', 'is_active', 'is_staff']
}),
('Group Memberships', {
'classes': ['collapse'],
'fields': ['groups']
})
]
else:
return [
('General Properties', {
'fields': ['username', 'is_active', 'is_staff']
})
]
def get_readonly_fields(self, request, obj=None):
"""
:inherit.
"""
if obj is not None and hasattr(obj, 'backend_user'):
return ['groups', 'is_staff', 'username']
else:
return ['is_staff']
def get_urls(self):
"""
TODO.
"""
urls = super(UserAdmin, self).get_urls()
my_urls = patterns('', (r'^import_users/$', self.import_users))
return my_urls + urls
def has_add_permission(self, request):
"""
:inherit.
"""
return False
def import_users(self, request):
"""
TODO.
"""
# custom view which should return an HttpResponse
try:
# Todo: imports
new_users = import_users.import_users()
if len(new_users) == 0:
self.message_user(request, "All users already imported.", messages.INFO)
else:
self.message_user(request, "Successfully imported {} users: {}".format(len(new_users), ', '.join(new_users)))
except Exception:
self.message_user(request, "Operation failed.", messages.ERROR)
return HttpResponseRedirect(reverse('admin:auth_user_changelist'))
# register the model admins with the site
admin_site = CoreAdminSite(name='coco')
admin_site.register(Backend, BackendAdmin)
admin_site.register(CollaborationGroup, CollaborationGroupAdmin)
admin_site.register(ConfigurationVariable, ConfigurationVariableAdmin)
admin_site.register(Container, ContainerAdmin)
admin_site.register(ContainerImage, ContainerImageAdmin)
admin_site.register(ContainerSnapshot, ContainerSnapshotAdmin)
admin_site.register(Group, GroupAdmin)
admin_site.register(Notification, NotificationAdmin)
admin_site.register(PortMapping, PortMappingAdmin)
admin_site.register(Server, ServerAdmin)
admin_site.register(Share, ShareAdmin)
admin_site.register(Tag, TagAdmin)
admin_site.register(User, UserAdmin)
| coco-project/coco | coco/admin/admin.py | Python | bsd-3-clause | 18,284 |
import wandb
import keras
import numpy as np
import tensorflow as tf
from wandb.keras import WandbCallback
def main():
#wandb.init(project="tf2")
wandb.init()
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Conv2D(
3, 3, activation="relu", input_shape=(28, 28, 1)))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(10, activation="softmax"))
model.compile(loss="sparse_categorical_crossentropy",
optimizer="sgd", metrics=["accuracy"])
model.fit(np.ones((10, 28, 28, 1)), np.ones((10,)), epochs=7,
validation_split=0.2, callbacks=[WandbCallback()])
if __name__ == '__main__':
main()
| wandb/client | standalone_tests/mixed_keras.py | Python | mit | 698 |
#!/usr/bin/python
import os
import glob
import numpy as np
import networkx as nx
from mcl_clustering import networkx_mcl
from os import path
from datetime import datetime, timedelta
from collections import namedtuple
DATA_DIR = os.getenv('DATA_DIR', r"C:\Users\Jon\Documents\UIUC\CS 538\project\data")
def load_loc_records_from_file(filepath):
records = []
with open(filepath) as f:
for l in f:
l = l.strip()
if not l:
continue
t, loc, macs = l.split()
macs = set(macs.split(','))
records.append((datetime.strptime(t, '%Y-%m-%dT%H:%M:%S'), loc, macs))
return records
def load_loc_records(user):
filepath = path.join(DATA_DIR, "uim_classified", user)
return load_loc_records_from_file(filepath)
# 2 hour buckets
def time_of_day(t):
return t.time().hour / 2
def weekday_type(t):
wd = t.date().weekday()
if wd >= 5:
return 'weekend'
else:
return 'weekday'
def load_graph(filepath):
g = nx.Graph()
with open(filepath, 'r') as f:
nodes_loaded = False
for l in f:
l = l.strip()
if not l:
continue
if not nodes_loaded:
nodes_loaded = True
nodes = l.split()
for n in nodes:
g.add_node(int(n))
else:
e0, e1, w = l.split()
e0, e1, w = int(e0), int(e1), int(w)
g.add_edge(e0, e1, weight=w)
return g
def save_graph(g, filepath):
with open(filepath, 'w') as f:
for node in g.nodes():
f.write('%d ' % node)
f.write('\n')
for e0, e1, data in g.edges(data=True):
f.write('%d %d %d\n' % (e0 ,e1, data['weight']))
def create_raw_graph(user, records, load=True):
filepath = user + '_raw.nxg'
if load:
try:
g = load_graph(filepath)
print('Loaded graph')
return g
except:
pass
print('Constructing graph')
g = nx.Graph()
#for i in range(len(records)):
# g.add_node(i)
for i in range(len(records)):
t0, _, macs0 = records[i]
td0 = time_of_day(t0)
wd0 = weekday_type(t0)
for j in range(i+1, len(records)):
t1, _, macs1 = records[j]
td1 = time_of_day(t1)
wd1 = weekday_type(t1)
w = len(macs0 & macs1)
if td0 == td1 and wd0 == wd1 and w > 1:
print('Adding edge', i, j, w)
g.add_edge(i,j, weight=w)
save_graph(g, filepath)
print('Created graph')
return g
def create_loc_graph(user, records, load=True):
filepath = user + '_loc.nxg'
if load:
try:
g = load_graph(filepath)
print('Loaded graph')
return g
except:
pass
print('Constructing graph')
g = nx.Graph()
#for i in range(len(records)):
# g.add_node(i)
for i in range(len(records)):
t0, loc0, macs0 = records[i]
td0 = time_of_day(t0)
wd0 = weekday_type(t0)
for j in range(i+1, len(records)):
t1, loc1, macs1 = records[j]
td1 = time_of_day(t1)
wd1 = weekday_type(t1)
w = len(macs0 & macs1)
if loc0 == loc1 and td0 == td1 and wd0 == wd1 and w > 1:
print('Adding edge', i, j, w)
g.add_edge(i,j, weight=w)
save_graph(g, filepath)
print('Created graph')
return g
def star_cluster(g):
vs = g.nodes()
es = g.edges()
degrees = []
for v in vs:
degree = 0
for e in es:
if v in e:
degree += 1
degrees.append((v, degree))
degrees = sorted(degrees, key=lambda x:x[1], reverse=True)
marked = {}
for v in vs:
marked[v] = False
locations = []
for v,d in degrees:
if marked[v]:
continue
location = set()
location.add(v)
for e in es:
if e[0] == v:
if not marked[e[1]]:
location.add(e[1])
marked[e[1]] = True
if e[1] == v:
if not marked[e[0]]:
location.add(e[0])
marked[e[0]] = True
locations.append(location)
return locations
if __name__ == '__main__':
user = 'User15'
records = load_loc_records(user)
print('Loaded %d records' % len(records))
raw_g = create_raw_graph(user, records, False)
loc_g = create_loc_graph(user, records, False)
raw_M, raw_clusters = networkx_mcl(
raw_g,
)
print("Raw clusters", len(raw_clusters))
print("Clusters:")
t = 0
for k, v in raw_clusters.items():
t += len(v)
print(k, len(v))
print(t)
| josting/CS538_Project | ideas.py | Python | mit | 4,897 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Nadam for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.keras.optimizer_v2 import adam
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import training_ops
class Nadam(adam.Adam):
r"""Optimizer that implements the NAdam algorithm.
Much like Adam is essentially RMSprop with momentum, Nadam is Adam with
Nesterov momentum.
Initialization:
$$m_0 := 0 \text{(Initialize initial 1st moment vector)}$$
$$v_0 := 0 \text{(Initialize initial 2nd moment vector)}$$
$$t := 0 \text{(Initialize timestep)}$$
Computes:
$$t := t + 1$$
$$lr_t := \text{learning\_rate} * \sqrt{1 - beta_2^t} / (1 - beta_1^t)$$
$$m_t := beta_1 * m_{t-1} + (1 - beta_1) * g$$
$$v_t := beta_2 * v_{t-1} + (1 - beta_2) * g * g$$
$$m_bar_t := beta_1 * v_t + (1 - beta_1) * g$$
$$theta_t := theta_{t-1} - lr_t * m_bar_t / (\sqrt{v_t} + \epsilon)$$
gradient is evaluated at theta(t) + momentum * v(t), and the variables always
store theta + beta_1 * m / sqrt(v) instead of theta.
References
See [Dozat, T., 2015](http://cs229.stanford.edu/proj2015/054_report.pdf).
"""
def __init__(self,
learning_rate=0.001,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-7,
name='Nadam',
**kwargs):
"""Construct a new Nadam optimizer.
Args:
learning_rate: A Tensor or a floating point value. The learning rate.
beta_1: A float value or a constant float tensor. The exponential decay
rate for the 1st moment estimates.
beta_2: A float value or a constant float tensor. The exponential decay
rate for the exponentially weighted infinity norm.
epsilon: A small constant for numerical stability.
name: Optional name for the operations created when applying gradients.
Defaults to "Adamax".
**kwargs: keyword arguments. Allowed to be {`decay`}
"""
# Backwards compatiblity with keras NAdam optimizer.
if 'schedule_decay' in kwargs:
kwargs['decay'] = kwargs.pop('schedule_decay')
# pylint: disable=useless-super-delegation
super(Nadam, self).__init__(
learning_rate=learning_rate,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon,
amsgrad=False,
name=name,
**kwargs)
# pylint: enable=useless-super-delegation
def _resource_apply_dense(self, grad, var):
var_dtype = var.dtype.base_dtype
lr_t = self._decayed_lr(var_dtype)
m = self.get_slot(var, 'm')
v = self.get_slot(var, 'v')
beta_1_t = self._get_hyper('beta_1', var_dtype)
beta_2_t = self._get_hyper('beta_2', var_dtype)
local_step = math_ops.cast(self.iterations + 1, var_dtype)
beta_1_power = math_ops.pow(beta_1_t, local_step)
beta_2_power = math_ops.pow(beta_2_t, local_step)
return training_ops.resource_apply_adam(
var.handle,
m.handle,
v.handle,
beta_1_power,
beta_2_power,
lr_t,
beta_1_t,
beta_2_t,
self._get_hyper('epsilon', var_dtype),
grad,
use_locking=self._use_locking,
use_nesterov=True)
def _resource_apply_sparse(self, grad, var, indices):
var_dtype = var.dtype.base_dtype
lr_t = self._decayed_lr(var_dtype)
beta_1_t = self._get_hyper('beta_1', var_dtype)
beta_2_t = self._get_hyper('beta_2', var_dtype)
local_step = math_ops.cast(self.iterations + 1, var_dtype)
beta_1_power = math_ops.pow(beta_1_t, local_step)
beta_2_power = math_ops.pow(beta_2_t, local_step)
epsilon_t = self._get_hyper('epsilon', var_dtype)
lr = (lr_t * math_ops.sqrt(1 - beta_2_power) / (1 - beta_1_power))
# m_t = beta1 * m + (1 - beta1) * g_t
m = self.get_slot(var, 'm')
m_scaled_g_values = grad * (1 - beta_1_t)
m_t = state_ops.assign(m, m * beta_1_t, use_locking=self._use_locking)
with ops.control_dependencies([m_t]):
m_t = self._resource_scatter_add(m, indices, m_scaled_g_values)
# m_bar = (1 - beta1) * g_t + beta1 * m_t
m_bar = m_scaled_g_values + beta_1_t * array_ops.gather(m_t, indices)
# v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
v = self.get_slot(var, 'v')
v_scaled_g_values = (grad * grad) * (1 - beta_2_t)
v_t = state_ops.assign(v, v * beta_2_t, use_locking=self._use_locking)
with ops.control_dependencies([v_t]):
v_t = self._resource_scatter_add(v, indices, v_scaled_g_values)
v_t_slice = array_ops.gather(v_t, indices)
v_sqrt = math_ops.sqrt(v_t_slice)
var_update = self._resource_scatter_add(var, indices,
-lr * m_bar / (v_sqrt + epsilon_t))
return control_flow_ops.group(*[var_update, m_bar, v_t])
| asimshankar/tensorflow | tensorflow/python/keras/optimizer_v2/nadam.py | Python | apache-2.0 | 5,695 |
# Copyright (c) 2016 Matthew Earl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
__all__ = (
'get_face_mask',
'LandmarkFinder',
'NoFaces',
'TooManyFaces',
)
import cv2
import dlib
import numpy
class TooManyFaces(Exception):
pass
class NoFaces(Exception):
pass
class LandmarkFinder(object):
def __init__(self, predictor_path):
self.detector = dlib.get_frontal_face_detector()
self.predictor = dlib.shape_predictor(str(predictor_path))
def get(self, im):
rects = self.detector(im, 1)
if len(rects) > 1:
raise TooManyFaces
if len(rects) == 0:
raise NoFaces
return numpy.matrix([[p.x, p.y]
for p in self.predictor(im, rects[0]).parts()])
def draw_convex_hull(im, points, color):
points = cv2.convexHull(points)
cv2.fillConvexPoly(im, points, color=color)
def get_face_mask(shape, landmarks):
im = numpy.zeros(shape[:2], dtype=numpy.float64)
draw_convex_hull(im,
landmarks,
color=1)
return im
| matthewearl/photo-a-day-aligner | pada/landmarks.py | Python | mit | 2,163 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-01-30 20:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lots_admin', '0035_application_closing_invite_sent'),
]
operations = [
migrations.AddField(
model_name='application',
name='organization_confirmed',
field=models.BooleanField(default=False),
),
]
| datamade/large-lots | lots_admin/migrations/0036_application_organization_confirmed.py | Python | mit | 487 |
"""The test for the min/max sensor platform."""
from os import path
import statistics
import unittest
from homeassistant import config as hass_config
from homeassistant.components.min_max import DOMAIN
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
PERCENTAGE,
SERVICE_RELOAD,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.setup import async_setup_component, setup_component
from tests.async_mock import patch
from tests.common import get_test_home_assistant
class TestMinMaxSensor(unittest.TestCase):
"""Test the min/max sensor."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.values = [17, 20, 15.3]
self.count = len(self.values)
self.min = min(self.values)
self.max = max(self.values)
self.mean = round(sum(self.values) / self.count, 2)
self.mean_1_digit = round(sum(self.values) / self.count, 1)
self.mean_4_digits = round(sum(self.values) / self.count, 4)
self.median = round(statistics.median(self.values), 2)
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_min_sensor(self):
"""Test the min sensor."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_min",
"type": "min",
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, self.values)).items():
self.hass.states.set(entity_id, value)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_min")
assert str(float(self.min)) == state.state
assert entity_ids[2] == state.attributes.get("min_entity_id")
assert self.max == state.attributes.get("max_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.mean == state.attributes.get("mean")
assert self.median == state.attributes.get("median")
def test_max_sensor(self):
"""Test the max sensor."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_max",
"type": "max",
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, self.values)).items():
self.hass.states.set(entity_id, value)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_max")
assert str(float(self.max)) == state.state
assert entity_ids[2] == state.attributes.get("min_entity_id")
assert self.min == state.attributes.get("min_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.mean == state.attributes.get("mean")
assert self.median == state.attributes.get("median")
def test_mean_sensor(self):
"""Test the mean sensor."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_mean",
"type": "mean",
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, self.values)).items():
self.hass.states.set(entity_id, value)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_mean")
assert str(float(self.mean)) == state.state
assert self.min == state.attributes.get("min_value")
assert entity_ids[2] == state.attributes.get("min_entity_id")
assert self.max == state.attributes.get("max_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.median == state.attributes.get("median")
def test_mean_1_digit_sensor(self):
"""Test the mean with 1-digit precision sensor."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_mean",
"type": "mean",
"round_digits": 1,
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, self.values)).items():
self.hass.states.set(entity_id, value)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_mean")
assert str(float(self.mean_1_digit)) == state.state
assert self.min == state.attributes.get("min_value")
assert entity_ids[2] == state.attributes.get("min_entity_id")
assert self.max == state.attributes.get("max_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.median == state.attributes.get("median")
def test_mean_4_digit_sensor(self):
"""Test the mean with 1-digit precision sensor."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_mean",
"type": "mean",
"round_digits": 4,
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, self.values)).items():
self.hass.states.set(entity_id, value)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_mean")
assert str(float(self.mean_4_digits)) == state.state
assert self.min == state.attributes.get("min_value")
assert entity_ids[2] == state.attributes.get("min_entity_id")
assert self.max == state.attributes.get("max_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.median == state.attributes.get("median")
def test_median_sensor(self):
"""Test the median sensor."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_median",
"type": "median",
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, self.values)).items():
self.hass.states.set(entity_id, value)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_median")
assert str(float(self.median)) == state.state
assert self.min == state.attributes.get("min_value")
assert entity_ids[2] == state.attributes.get("min_entity_id")
assert self.max == state.attributes.get("max_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.mean == state.attributes.get("mean")
def test_not_enough_sensor_value(self):
"""Test that there is nothing done if not enough values available."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_max",
"type": "max",
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
self.hass.states.set(entity_ids[0], STATE_UNKNOWN)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_max")
assert STATE_UNKNOWN == state.state
assert state.attributes.get("min_entity_id") is None
assert state.attributes.get("min_value") is None
assert state.attributes.get("max_entity_id") is None
assert state.attributes.get("max_value") is None
assert state.attributes.get("median") is None
self.hass.states.set(entity_ids[1], self.values[1])
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_max")
assert STATE_UNKNOWN != state.state
assert entity_ids[1] == state.attributes.get("min_entity_id")
assert self.values[1] == state.attributes.get("min_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.values[1] == state.attributes.get("max_value")
self.hass.states.set(entity_ids[2], STATE_UNKNOWN)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_max")
assert STATE_UNKNOWN != state.state
assert entity_ids[1] == state.attributes.get("min_entity_id")
assert self.values[1] == state.attributes.get("min_value")
assert entity_ids[1] == state.attributes.get("max_entity_id")
assert self.values[1] == state.attributes.get("max_value")
self.hass.states.set(entity_ids[1], STATE_UNAVAILABLE)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_max")
assert STATE_UNKNOWN == state.state
assert state.attributes.get("min_entity_id") is None
assert state.attributes.get("min_value") is None
assert state.attributes.get("max_entity_id") is None
assert state.attributes.get("max_value") is None
def test_different_unit_of_measurement(self):
"""Test for different unit of measurement."""
config = {
"sensor": {
"platform": "min_max",
"name": "test",
"type": "mean",
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
self.hass.states.set(
entity_ids[0], self.values[0], {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test")
assert str(float(self.values[0])) == state.state
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
self.hass.states.set(
entity_ids[1], self.values[1], {ATTR_UNIT_OF_MEASUREMENT: TEMP_FAHRENHEIT}
)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test")
assert STATE_UNKNOWN == state.state
assert state.attributes.get("unit_of_measurement") == "ERR"
self.hass.states.set(
entity_ids[2], self.values[2], {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test")
assert STATE_UNKNOWN == state.state
assert state.attributes.get("unit_of_measurement") == "ERR"
def test_last_sensor(self):
"""Test the last sensor."""
config = {
"sensor": {
"platform": "min_max",
"name": "test_last",
"type": "last",
"entity_ids": ["sensor.test_1", "sensor.test_2", "sensor.test_3"],
}
}
assert setup_component(self.hass, "sensor", config)
entity_ids = config["sensor"]["entity_ids"]
state = self.hass.states.get("sensor.test_last")
for entity_id, value in dict(zip(entity_ids, self.values)).items():
self.hass.states.set(entity_id, value)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test_last")
assert str(float(value)) == state.state
assert entity_id == state.attributes.get("last_entity_id")
assert self.min == state.attributes.get("min_value")
assert self.max == state.attributes.get("max_value")
assert self.mean == state.attributes.get("mean")
assert self.median == state.attributes.get("median")
async def test_reload(hass):
"""Verify we can reload filter sensors."""
hass.states.async_set("sensor.test_1", 12345)
hass.states.async_set("sensor.test_2", 45678)
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "min_max",
"name": "test",
"type": "mean",
"entity_ids": ["sensor.test_1", "sensor.test_2"],
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("sensor.test")
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"min_max/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("sensor.test") is None
assert hass.states.get("sensor.second_test")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
| tchellomello/home-assistant | tests/components/min_max/test_sensor.py | Python | apache-2.0 | 13,832 |
"""NAMD molecular dynamics simulation datasets.
"""
from .access import load_tyr2ala
from .access import load_idws
from .access import load_restarted
from .access import load_restarted_reversed
| alchemistry/alchemtest | src/alchemtest/namd/__init__.py | Python | bsd-3-clause | 196 |
# -*- encoding: utf8 -*-
# A daemon to keep SSH forwarding connected
from __future__ import print_function, absolute_import
import os
import sys
import time
import socket
import logging
class Daemon(object):
def __init__(self):
self.heartbeat = 50
def run(self):
logging.basicConfig(filename='daemon.log')
logging.error('daemon started')
self.daemonize()
while True:
if not self.check_connection():
self.reconnect()
logging.warn('reconnecting')
time.sleep(self.heartbeat)
def check_connection(self):
c = socket.socket()
try:
c.connect(('localhost', 3366))
c.close()
return True
except socket.error:
return False
def daemonize(self):
pid = os.fork()
if pid:
os.waitpid(pid, os.WNOHANG)
sys.exit(0)
return
def reconnect(self):
pid = os.fork()
if pid == 0: # child
err = os.execlp('/usr/bin/ssh', 'ssh', '-i',
'/home/xu/.ssh/id_rsa', '-L',
'3366:127.0.0.1:3306', '-p', '42022', '[email protected]')
if err:
logging.error("error to execlp")
sys.exit(1)
elif pid > 0:
os.waitpid(pid, 0)
else:
logging.error('error to fork')
sys.exit(2)
if __name__ == '__main__':
Daemon().run()
| dlutxx/memo | python/daemon.py | Python | mit | 1,497 |
from fabric.api import task, local, run
from fabric.context_managers import lcd
import settings
@task(default=True)
def build():
"""
(Default) Build Sphinx HTML documentation
"""
with lcd('docs'):
local('make html')
@task()
def deploy():
"""
Upload docs to server
"""
build()
destination = '/usr/share/nginx/localhost/mysite/docs/build/html'
if settings.environment == 'vagrant':
local("rsync -avz --rsync-path='sudo rsync' -e 'ssh -p 2222 -i .vagrant/machines/web/virtualbox/private_key -o StrictHostKeyChecking=no' docs/build/html/ %s@%s:%s " % ('vagrant', 'localhost', destination))
elif settings.environment == 'ci':
local("rsync -avz --rsync-path='sudo rsync' -e 'ssh -p 2222 -i /var/go/id_rsa_web -o StrictHostKeyChecking=no' docs/build/html/ %s@%s:%s " % ('vagrant', '192.168.10.10', destination))
| brady-vitrano/full-stack-django-kit | fabfile/docs.py | Python | mit | 877 |
"""
helpers.py: Helper functions for testing
Copyright 2014-2015, Outernet Inc.
Some rights reserved.
This software is free software licensed under the terms of GPLv3. See COPYING
file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
"""
def strip_wrappers(fn):
""" For decorated fn, return function with stirpped decorator """
if not hasattr(fn, 'func_closure') or not fn.func_closure:
return fn
for f in fn.func_closure:
f = f.cell_contents
if hasattr(f, '__call__'):
return strip_wrappers(f)
return fn
| karanisverma/feature_langpop | tests/helpers.py | Python | gpl-3.0 | 585 |
#!/usr/bin/python
#
# Copyright 2012 Red Hat, Inc.
# Portions Copyright (C) 2012,2013 Chris Lalancette <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import oz.TDL
import oz.GuestFactory
import oz.ozutil
import guestfs
# TODO: We've had to migrate to lxml here because of Oz changes
# see if we can't move the libvirt stuff as well
# For now we import both
import libxml2
import lxml
import configparser
import tempfile
import base64
import os
import os.path
from zope.interface import implementer
from imgfac.ApplicationConfiguration import ApplicationConfiguration
from imgfac.CloudDelegate import CloudDelegate
from imgfac.PersistentImageManager import PersistentImageManager
from imgfac.ReservationManager import ReservationManager
# This makes extensive use of parameters with some sensible defaults
# Try to keep an accurate list up here
# Parameter - Default -
# Description
# utility_image - <base_image_id>
# Description: UUID of the image that will be launched to do the modification of the
# the base_image referenced in this target_image build. Note that the
# utility image should itself be a base image and can, if constructed properly,
# be the same as the base image that is being modified. The plugin makes a copy
# of the utility image before launching it, which allows safe modification during
# the target_image creation process.
# input_image_file - /input_image.raw (but only if input_image_device is not specified)
# Description: The name of the file on the working space disk where the base_image is presented
# input_image_device - None
# Description: The name of the device where the base_image is presented to the utility VM.
# (e.g. vdc)
# NOTE: You can specify one or the other of these options but not both. If neither are specified
# you will end up with the default value for input_image_file.
# utility_cpus - None
# Description: Number of CPUs in the utility VM - this can also be set in the global Oz config
# The lmc Live CD creation process benefits greatly from extra CPUs during the squashfs
# creation step. The performance improvement is almost perfectly O(n) w.r.t CPU.
# utility_customizations - None
# Description: A partial TDL document to drive the actions of the utility VM - only repos, packages,
# files and commands will be used - all other content is ignored
# results_location - /results/images/boot.iso
# Description: Location inside of the working space image from which to extract the results.
# Borrowed from Oz by Chris Lalancette
def data_from_type(name, contenttype, content):
'''
A function to get data out of some content, possibly decoding it depending
on the content type. This function understands three types of content:
raw (where no decoding is necessary), base64 (where the data needs to be
base64 decoded), and url (where the data needs to be downloaded). Because
the data might be large, all data is sent to file handle, which is returned
from the function.
'''
out = tempfile.NamedTemporaryFile()
if contenttype == 'raw':
out.write(content)
elif contenttype == 'base64':
base64.decode(StringIO.StringIO(content), out)
elif contenttype == 'url':
url = urlparse.urlparse(content)
if url.scheme == "file":
with open(url.netloc + url.path) as f:
out.write("".join(f.readlines()))
else:
oz.ozutil.http_download_file(content, out.fileno(), False, None)
else:
raise oz.OzException.OzException("Type for %s must be 'raw', 'url' or 'base64'" % (name))
# make sure the data is flushed to disk for uses of the file through
# the name
out.flush()
out.seek(0)
return out
@implementer(CloudDelegate)
class IndirectionCloud(object):
def __init__(self):
super(IndirectionCloud, self).__init__()
self.app_config = ApplicationConfiguration().configuration
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
self.pim = PersistentImageManager.default_manager()
self.res_mgr = ReservationManager()
def builder_should_create_target_image(self, builder, target, image_id, template, parameters):
# This plugin wants to be the only thing operating on the input image
# We do all our work here and then return False which stops any additional activity
self.working_space_image = None
self.utility_image_tmp = None
try:
self._builder_should_create_target_image(builder, target, image_id, template, parameters)
finally:
self.log.debug("Cleaning up temporary utility image and working space image")
for fname in [ self.working_space_image, self.utility_image_tmp ]:
if fname and os.path.isfile(fname):
os.unlink(fname)
return False
def _builder_should_create_target_image(self, builder, target, image_id, template, parameters):
# User may specify a utility image - if they do not we assume we can use the input image
utility_image_id = parameters.get('utility_image', image_id)
# The utility image is what we actually re-animate with Oz
# We borrow these variable names from code that is very similar to the Oz/TinMan OS plugin
self.active_image = self.pim.image_with_id(utility_image_id)
if not self.active_image:
raise Exception("Could not find utility image with ID (%s)" % (utility_image_id) )
self.tdlobj = oz.TDL.TDL(xmlstring=self.active_image.template)
# Later on, we will either copy in the base_image content as a file, or expose it as a device
# to the utility VM. We cannot do both. Detect invalid input here before doing any long running
# work
input_image_device = parameters.get('input_image_device', None)
input_image_file = parameters.get('input_image_filename', None)
if input_image_device and input_image_file:
raise Exception("You can specify either an input_image_device or an input_image_file but not both")
if (not input_image_device) and (not input_image_file):
input_image_file="/input_image.raw"
# We remove any packages, commands and files from the original TDL - these have already been
# installed/executed. We leave the repos in place, as it is possible that commands executed
# later may depend on them
self.tdlobj.packages = [ ]
self.tdlobj.commands = { }
self.tdlobj.files = { }
# This creates a new Oz object - replaces the auto-generated disk file location with
# a copy of the utility image we will make later, and prepares an initial libvirt_xml string
self._init_oz()
self.utility_image_tmp = self.app_config['imgdir'] + "/tmp-utility-image-" + str(builder.target_image.identifier)
self.guest.diskimage = self.utility_image_tmp
# Below we will create this file as a qcow2 image using the original utility image as
# a backing store - For the follow-on XML generation to work correctly, we need to force
# Oz to use qcow2 as the image type
self.guest.image_type = 'qcow2'
if 'utility_cpus' in parameters:
self.guest.install_cpus = int(parameters['utility_cpus'])
libvirt_xml = self.guest._generate_xml("hd", None)
libvirt_doc = libxml2.parseDoc(libvirt_xml)
# Now we create a second disk image as working/scratch space
# Hardcode at 30G
# TODO: Make configurable
# Make it, format it, copy in the base_image
self.working_space_image = self.app_config['imgdir'] + "/working-space-image-" + str(builder.target_image.identifier)
self.create_ext2_image(self.working_space_image)
# Modify the libvirt_xml used with Oz to contain a reference to a second "working space" disk image
working_space_device = parameters.get('working_space_device', 'vdb')
self.add_disk(libvirt_doc, self.working_space_image, working_space_device)
self.log.debug("Updated domain XML with working space image:\n%s" % (libvirt_xml))
# We expect to find a partial TDL document in this parameter - this is what drives the
# tasks performed by the utility image
if 'utility_customizations' in parameters:
self.oz_refresh_customizations(parameters['utility_customizations'])
else:
self.log.info('No additional repos, packages, files or commands specified for utility tasks')
# Create a qcow2 image using the original utility image file (which may be read-only) as a
# backing store.
self.log.debug("Creating temporary writeable qcow2 working copy of utlity image (%s) as (%s)" % (self.active_image.data, self.utility_image_tmp))
self.guest._internal_generate_diskimage(image_filename=self.utility_image_tmp, backing_filename=self.active_image.data)
if input_image_file:
# Here we finally involve the actual Base Image content - it is made available for the utlity image to modify
self.copy_content_to_image(builder.base_image.data, self.working_space_image, input_image_file)
else:
# Note that we know that one or the other of these are set because of code earlier
self.add_disk(libvirt_doc, builder.base_image.data, input_image_device)
# Run all commands, repo injection, etc specified
try:
self.log.debug("Launching utility image and running any customizations specified")
libvirt_xml = libvirt_doc.serialize(None, 1)
self.guest.customize(libvirt_xml)
self.log.debug("Utility image tasks complete")
finally:
self.log.debug("Cleaning up install artifacts")
self.guest.cleanup_install()
# After shutdown, extract the results
results_location = parameters.get('results_location', "/results/images/boot.iso")
self.copy_content_from_image(results_location, self.working_space_image, builder.target_image.data)
def add_disk(self, libvirt_doc, disk_image_file, device_name):
devices = libvirt_doc.xpathEval("/domain/devices")[0]
new_dev = devices.newChild(None, "disk", None)
new_dev.setProp("type", "file")
new_dev.setProp("device", "disk")
source = new_dev.newChild(None, "source", None)
source.setProp("file", disk_image_file)
target = new_dev.newChild(None, "target", None)
target.setProp("dev", device_name)
target.setProp("bus", self.guest.disk_bus)
def oz_refresh_customizations(self, partial_tdl):
# This takes our already created and well formed TDL object with already blank customizations
# and attempts to add in any additional customizations found in partial_tdl
# partial_tdl need not contain the <os>, <name> or <description> sections
# if it does they will be ignored
# TODO: Submit an Oz patch to make this shorter or a utility function within the TDL class
doc = lxml.etree.fromstring(partial_tdl)
self.tdlobj.doc = doc
packageslist = doc.xpath('/template/packages/package')
self.tdlobj._add_packages(packageslist)
for afile in doc.xpath('/template/files/file'):
name = afile.get('name')
if name is None:
raise Exception("File without a name was given")
contenttype = afile.get('type')
if contenttype is None:
contenttype = 'raw'
content = afile.text
if content:
content = content.strip()
else:
content = ''
self.tdlobj.files[name] = data_from_type(name, contenttype, content)
repositorieslist = doc.xpath('/template/repositories/repository')
self.tdlobj._add_repositories(repositorieslist)
try:
# oz ver < 0.13
self.tdlobj.commands = self.tdlobj._parse_commands()
except TypeError:
# oz ver >= 0.13
self.tdlobj.commands = self.tdlobj._parse_commands('/template/commands')
def _init_oz(self):
# populate a config object to pass to OZ; this allows us to specify our
# own output dir but inherit other Oz behavior
self.oz_config = configparser.SafeConfigParser()
if self.oz_config.read("/etc/oz/oz.cfg") != []:
self.oz_config.set('paths', 'output_dir', self.app_config["imgdir"])
if "oz_data_dir" in self.app_config:
self.oz_config.set('paths', 'data_dir', self.app_config["oz_data_dir"])
if "oz_screenshot_dir" in self.app_config:
self.oz_config.set('paths', 'screenshot_dir', self.app_config["oz_screenshot_dir"])
else:
raise ImageFactoryException("No Oz config file found. Can't continue.")
# Use the factory function from Oz directly
try:
# Force uniqueness by overriding the name in the TDL
self.tdlobj.name = "factory-build-" + self.active_image.identifier
self.guest = oz.GuestFactory.guest_factory(self.tdlobj, self.oz_config, None)
# Oz just selects a random port here - This could potentially collide if we are unlucky
self.guest.listen_port = self.res_mgr.get_next_listen_port()
except libvirtError as e:
raise ImageFactoryException("Cannot connect to libvirt. Make sure libvirt is running. [Original message: %s]" % e.message)
except OzException as e:
if "Unsupported" in e.message:
raise ImageFactoryException("TinMan plugin does not support distro (%s) update (%s) in TDL" % (self.tdlobj.distro, self.tdlobj.update) )
else:
raise e
def create_ext2_image(self, image_file, image_size=(1024*1024*1024*30)):
# Why ext2? Why not? There's no need for the overhead of journaling. This disk will be mounted once and thrown away.
self.log.debug("Creating disk image of size (%d) in file (%s) with single partition containint ext2 filesystem" % (image_size, image_file))
raw_fs_image=open(image_file,"w")
raw_fs_image.truncate(image_size)
raw_fs_image.close()
g = guestfs.GuestFS()
g.add_drive(image_file)
g.launch()
g.part_disk("/dev/sda","msdos")
g.part_set_mbr_id("/dev/sda",1,0x83)
g.mkfs("ext2", "/dev/sda1")
g.sync()
def copy_content_to_image(self, filename, target_image, target_filename):
self.log.debug("Copying file (%s) into disk image (%s)" % (filename, target_image))
g = guestfs.GuestFS()
g.add_drive(target_image)
g.launch()
g.mount_options ("", "/dev/sda1", "/")
g.upload(filename, target_filename)
g.sync()
def copy_content_from_image(self, filename, target_image, destination_file):
self.log.debug("Copying file (%s) out of disk image (%s) into (%s)" % (filename, target_image, destination_file))
g = guestfs.GuestFS()
g.add_drive(target_image)
g.launch()
g.mount_options ("", "/dev/sda1", "/")
g.download(filename,destination_file)
g.sync()
| redhat-imaging/imagefactory | imagefactory_plugins/IndirectionCloud/IndirectionCloud.py | Python | apache-2.0 | 16,020 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gallery', '0003_auto_20150925_0642'),
]
operations = [
migrations.CreateModel(
name='Medium',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('medium_name', models.TextField()),
],
),
]
| mudbungie/carrieocoyle | gallery/migrations/0004_medium.py | Python | mit | 523 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "learning_site.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| davejlin/treehouse | python/django/learning_site_forms/manage.py | Python | unlicense | 256 |
# -*- coding: utf-8 -*-
# Copyright 2017 LasLabs Inc.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
import logging
from odoo import api, models
from ..models.module_deprecated import PARAM_DEPRECATED
_logger = logging.getLogger(__name__)
class ModuleUpgrade(models.TransientModel):
_inherit = 'base.module.upgrade'
@api.model
def _autoupdate_deprecated(self):
"""Know if we should enable deprecated features."""
deprecated = (
self.env["ir.config_parameter"].get_param(PARAM_DEPRECATED))
if deprecated is False:
# Enable deprecated features if this is the 1st automated update
# after the version that deprecated them (X.Y.2.0.0)
own_module = self.env["ir.module.module"].search([
("name", "=", "module_auto_update"),
])
try:
if own_module.latest_version.split(".")[2] == "1":
deprecated = "1"
except AttributeError:
pass # 1st install, there's no latest_version
return deprecated == "1"
@api.model
def get_module_list(self):
"""Set modules to upgrade searching by their dir checksum."""
if self._autoupdate_deprecated():
Module = self.env["ir.module.module"]
installed_modules = Module.search([('state', '=', 'installed')])
upgradeable_modules = installed_modules.filtered(
lambda r: r.checksum_dir != r.checksum_installed,
)
upgradeable_modules.button_upgrade()
return super(ModuleUpgrade, self).get_module_list()
@api.multi
def upgrade_module(self):
"""Make a fully automated addon upgrade."""
if self._autoupdate_deprecated():
_logger.warning(
"You are possibly using an unsupported upgrade system; "
"set '%s' system parameter to '0' and start calling "
"`env['ir.module.module'].upgrade_changed_checksum()` from "
"now on to get rid of this message. See module's README's "
"Known Issues section for further information on the matter."
)
# Compute updates by checksum when called in @api.model fashion
self.env.cr.autocommit(True) # Avoid transaction lock
if not self:
self.get_module_list()
Module = self.env["ir.module.module"]
# Get every addon state before updating
pre_states = {addon["name"]: addon["state"] for addon
in Module.search_read([], ["name", "state"])}
# Perform upgrades, possibly in a limited graph that excludes me
result = super(ModuleUpgrade, self).upgrade_module()
if self._autoupdate_deprecated():
self.env.cr.autocommit(False)
# Reload environments, anything may have changed
self.env.clear()
# Update addons checksum if state changed and I wasn't uninstalled
own = Module.search_read(
[("name", "=", "module_auto_update")],
["state"],
limit=1)
if own and own[0]["state"] != "uninstalled":
for addon in Module.search([]):
if addon.state != pre_states.get(addon.name):
# Trigger the write hook that should have been
# triggered when the module was [un]installed/updated
# in the limited module graph inside above call to
# super(), and updates its dir checksum as needed
addon.latest_version = addon.latest_version
return result
| ovnicraft/server-tools | module_auto_update/wizards/module_upgrade_deprecated.py | Python | agpl-3.0 | 3,742 |
# -*- mode: python; indent-tabs-mode: nil; c-basic-offset: 4; tab-width: 4; -*-
# vim: set shiftwidth=4 softtabstop=4 expandtab:
"""Admin objects for ncharts django web app.
2014 Copyright University Corporation for Atmospheric Research
This file is part of the "django-ncharts" package.
The license and distribution terms for this file may be found in the
file LICENSE in this package.
"""
from django.contrib import admin
from ncharts import models as nc_models
class ProjectAdmin(admin.ModelAdmin):
pass
admin.site.register(nc_models.Project, ProjectAdmin)
class PlatformAdmin(admin.ModelAdmin):
pass
admin.site.register(nc_models.Platform, PlatformAdmin)
class FileDatasetAdmin(admin.ModelAdmin):
pass
admin.site.register(nc_models.FileDataset, FileDatasetAdmin)
class DBDatasetAdmin(admin.ModelAdmin):
pass
admin.site.register(nc_models.DBDataset, DBDatasetAdmin)
class ClientStateAdmin(admin.ModelAdmin):
pass
admin.site.register(nc_models.ClientState, ClientStateAdmin)
| ncareol/ncharts | ncharts/admin.py | Python | bsd-2-clause | 1,012 |
from __future__ import unicode_literals
import datetime
import sys
import unittest
from django.contrib.admin import (
AllValuesFieldListFilter, BooleanFieldListFilter, ModelAdmin,
RelatedOnlyFieldListFilter, SimpleListFilter, site,
)
from django.contrib.admin.views.main import ChangeList
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.test import RequestFactory, TestCase, override_settings
from django.utils import six
from django.utils.encoding import force_text
from .models import Book, Bookmark, Department, Employee, TaggedItem
def select_by(dictlist, key, value):
return [x for x in dictlist if x[key] == value][0]
class DecadeListFilter(SimpleListFilter):
def lookups(self, request, model_admin):
return (
('the 80s', "the 1980's"),
('the 90s', "the 1990's"),
('the 00s', "the 2000's"),
('other', "other decades"),
)
def queryset(self, request, queryset):
decade = self.value()
if decade == 'the 80s':
return queryset.filter(year__gte=1980, year__lte=1989)
if decade == 'the 90s':
return queryset.filter(year__gte=1990, year__lte=1999)
if decade == 'the 00s':
return queryset.filter(year__gte=2000, year__lte=2009)
class NotNinetiesListFilter(SimpleListFilter):
title = "Not nineties books"
parameter_name = "book_year"
def lookups(self, request, model_admin):
return (
('the 90s', "the 1990's"),
)
def queryset(self, request, queryset):
if self.value() == 'the 90s':
return queryset.filter(year__gte=1990, year__lte=1999)
else:
return queryset.exclude(year__gte=1990, year__lte=1999)
class DecadeListFilterWithTitleAndParameter(DecadeListFilter):
title = 'publication decade'
parameter_name = 'publication-decade'
class DecadeListFilterWithoutTitle(DecadeListFilter):
parameter_name = 'publication-decade'
class DecadeListFilterWithoutParameter(DecadeListFilter):
title = 'publication decade'
class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
pass
class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter):
def queryset(self, request, queryset):
raise 1 / 0
class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
qs = model_admin.get_queryset(request)
if qs.filter(year__gte=1980, year__lte=1989).exists():
yield ('the 80s', "the 1980's")
if qs.filter(year__gte=1990, year__lte=1999).exists():
yield ('the 90s', "the 1990's")
if qs.filter(year__gte=2000, year__lte=2009).exists():
yield ('the 00s', "the 2000's")
class DecadeListFilterParameterEndsWith__In(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__in' # Ends with '__in"
class DecadeListFilterParameterEndsWith__Isnull(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__isnull' # Ends with '__isnull"
class DepartmentListFilterLookupWithNonStringValue(SimpleListFilter):
title = 'department'
parameter_name = 'department'
def lookups(self, request, model_admin):
return sorted({
(employee.department.id, # Intentionally not a string (Refs #19318)
employee.department.code)
for employee in model_admin.get_queryset(request).all()
})
def queryset(self, request, queryset):
if self.value():
return queryset.filter(department__id=self.value())
class DepartmentListFilterLookupWithUnderscoredParameter(DepartmentListFilterLookupWithNonStringValue):
parameter_name = 'department__whatever'
class DepartmentListFilterLookupWithDynamicValue(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
if self.value() == 'the 80s':
return (('the 90s', "the 1990's"),)
elif self.value() == 'the 90s':
return (('the 80s', "the 1980's"),)
else:
return (('the 80s', "the 1980's"), ('the 90s', "the 1990's"),)
class CustomUserAdmin(UserAdmin):
list_filter = ('books_authored', 'books_contributed')
class BookAdmin(ModelAdmin):
list_filter = ('year', 'author', 'contributors', 'is_best_seller', 'date_registered', 'no')
ordering = ('-id',)
class BookAdminWithTupleBooleanFilter(BookAdmin):
list_filter = (
'year',
'author',
'contributors',
('is_best_seller', BooleanFieldListFilter),
'date_registered',
'no',
)
class BookAdminWithUnderscoreLookupAndTuple(BookAdmin):
list_filter = (
'year',
('author__email', AllValuesFieldListFilter),
'contributors',
'is_best_seller',
'date_registered',
'no',
)
class BookAdminWithCustomQueryset(ModelAdmin):
def __init__(self, user, *args, **kwargs):
self.user = user
super(BookAdminWithCustomQueryset, self).__init__(*args, **kwargs)
list_filter = ('year',)
def get_queryset(self, request):
return super(BookAdminWithCustomQueryset, self).get_queryset(request).filter(author=self.user)
class BookAdminRelatedOnlyFilter(ModelAdmin):
list_filter = (
'year', 'is_best_seller', 'date_registered', 'no',
('author', RelatedOnlyFieldListFilter),
('contributors', RelatedOnlyFieldListFilter),
('employee__department', RelatedOnlyFieldListFilter),
)
ordering = ('-id',)
class DecadeFilterBookAdmin(ModelAdmin):
list_filter = ('author', DecadeListFilterWithTitleAndParameter)
ordering = ('-id',)
class NotNinetiesListFilterAdmin(ModelAdmin):
list_filter = (NotNinetiesListFilter,)
class DecadeFilterBookAdminWithoutTitle(ModelAdmin):
list_filter = (DecadeListFilterWithoutTitle,)
class DecadeFilterBookAdminWithoutParameter(ModelAdmin):
list_filter = (DecadeListFilterWithoutParameter,)
class DecadeFilterBookAdminWithNoneReturningLookups(ModelAdmin):
list_filter = (DecadeListFilterWithNoneReturningLookups,)
class DecadeFilterBookAdminWithFailingQueryset(ModelAdmin):
list_filter = (DecadeListFilterWithFailingQueryset,)
class DecadeFilterBookAdminWithQuerysetBasedLookups(ModelAdmin):
list_filter = (DecadeListFilterWithQuerysetBasedLookups,)
class DecadeFilterBookAdminParameterEndsWith__In(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__In,)
class DecadeFilterBookAdminParameterEndsWith__Isnull(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__Isnull,)
class EmployeeAdmin(ModelAdmin):
list_display = ['name', 'department']
list_filter = ['department']
class DepartmentFilterEmployeeAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithNonStringValue, ]
class DepartmentFilterUnderscoredEmployeeAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithUnderscoredParameter, ]
class DepartmentFilterDynamicValueBookAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithDynamicValue, ]
class BookmarkAdminGenericRelation(ModelAdmin):
list_filter = ['tags__tag']
class ListFiltersTests(TestCase):
def setUp(self):
self.today = datetime.date.today()
self.tomorrow = self.today + datetime.timedelta(days=1)
self.one_week_ago = self.today - datetime.timedelta(days=7)
if self.today.month == 12:
self.next_month = self.today.replace(year=self.today.year + 1, month=1, day=1)
else:
self.next_month = self.today.replace(month=self.today.month + 1, day=1)
self.next_year = self.today.replace(year=self.today.year + 1, month=1, day=1)
self.request_factory = RequestFactory()
# Users
self.alfred = User.objects.create_user('alfred', '[email protected]')
self.bob = User.objects.create_user('bob', '[email protected]')
self.lisa = User.objects.create_user('lisa', '[email protected]')
# Books
self.djangonaut_book = Book.objects.create(
title='Djangonaut: an art of living', year=2009,
author=self.alfred, is_best_seller=True, date_registered=self.today,
)
self.bio_book = Book.objects.create(
title='Django: a biography', year=1999, author=self.alfred,
is_best_seller=False, no=207,
)
self.django_book = Book.objects.create(
title='The Django Book', year=None, author=self.bob,
is_best_seller=None, date_registered=self.today, no=103,
)
self.gipsy_book = Book.objects.create(
title='Gipsy guitar for dummies', year=2002, is_best_seller=True,
date_registered=self.one_week_ago,
)
self.gipsy_book.contributors.set([self.bob, self.lisa])
# Departments
self.dev = Department.objects.create(code='DEV', description='Development')
self.design = Department.objects.create(code='DSN', description='Design')
# Employees
self.john = Employee.objects.create(name='John Blue', department=self.dev)
self.jack = Employee.objects.create(name='Jack Red', department=self.design)
def get_changelist(self, request, model, modeladmin):
return ChangeList(
request, model, modeladmin.list_display,
modeladmin.list_display_links, modeladmin.list_filter,
modeladmin.date_hierarchy, modeladmin.search_fields,
modeladmin.list_select_related, modeladmin.list_per_page,
modeladmin.list_max_show_all, modeladmin.list_editable, modeladmin,
)
def test_datefieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'date_registered__gte': self.today,
'date_registered__lt': self.tomorrow})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Today")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
self.today,
self.tomorrow,
)
)
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(day=1),
'date_registered__lt': self.next_month})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if (self.today.year, self.today.month) == (self.one_week_ago.year, self.one_week_ago.month):
# In case one week ago is in the same month.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This month")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
self.today.replace(day=1),
self.next_month,
)
)
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(month=1, day=1),
'date_registered__lt': self.next_year})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if self.today.year == self.one_week_ago.year:
# In case one week ago is in the same year.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This year")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
self.today.replace(month=1, day=1),
self.next_year,
)
)
request = self.request_factory.get('/', {
'date_registered__gte': str(self.one_week_ago),
'date_registered__lt': str(self.tomorrow),
})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Past 7 days")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
str(self.one_week_ago),
str(self.tomorrow),
)
)
@unittest.skipIf(
sys.platform.startswith('win'),
"Windows doesn't support setting a timezone that differs from the "
"system timezone."
)
@override_settings(USE_TZ=True)
def test_datefieldlistfilter_with_time_zone_support(self):
# Regression for #17830
self.test_datefieldlistfilter()
def test_allvaluesfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'year__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?year__isnull=True')
request = self.request_factory.get('/', {'year': '2002'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?year=2002')
def test_allvaluesfieldlistfilter_custom_qs(self):
# Make sure that correct filters are returned with custom querysets
modeladmin = BookAdminWithCustomQueryset(self.alfred, Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
choices = list(filterspec.choices(changelist))
# Should have 'All', 1999 and 2009 options i.e. the subset of years of
# books written by alfred (which is the filtering criteria set by
# BookAdminWithCustomQueryset.get_queryset())
self.assertEqual(3, len(choices))
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['query_string'], '?year=1999')
self.assertEqual(choices[2]['query_string'], '?year=2009')
def test_relatedfieldlistfilter_foreignkey(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that all users are present in the author's list filter
filterspec = changelist.get_filters(request)[0][1]
expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
request = self.request_factory.get('/', {'author__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?author__isnull=True')
request = self.request_factory.get('/', {'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
# order of choices depends on User model, which has no order
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?author__id__exact=%d' % self.alfred.pk)
def test_relatedfieldlistfilter_manytomany(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that all users are present in the contrib's list filter
filterspec = changelist.get_filters(request)[0][2]
expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
request = self.request_factory.get('/', {'contributors__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.bio_book, self.djangonaut_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(force_text(filterspec.title), 'Verbose Contributors')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?contributors__isnull=True')
request = self.request_factory.get('/', {'contributors__id__exact': self.bob.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(force_text(filterspec.title), 'Verbose Contributors')
choice = select_by(filterspec.choices(changelist), "display", "bob")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?contributors__id__exact=%d' % self.bob.pk)
def test_relatedfieldlistfilter_reverse_relationships(self):
modeladmin = CustomUserAdmin(User, site)
# FK relationship -----
request = self.request_factory.get('/', {'books_authored__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.lisa])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_authored__isnull=True')
request = self.request_factory.get('/', {'books_authored__id__exact': self.bio_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'book')
choice = select_by(filterspec.choices(changelist), "display", self.bio_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_authored__id__exact=%d' % self.bio_book.pk)
# M2M relationship -----
request = self.request_factory.get('/', {'books_contributed__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.alfred])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_contributed__isnull=True')
request = self.request_factory.get('/', {'books_contributed__id__exact': self.django_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'book')
choice = select_by(filterspec.choices(changelist), "display", self.django_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_contributed__id__exact=%d' % self.django_book.pk)
# With one book, the list filter should appear because there is also a
# (None) option.
Book.objects.exclude(pk=self.djangonaut_book.pk).delete()
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 2)
# With no books remaining, no list filters should appear.
Book.objects.all().delete()
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_relatedonlyfieldlistfilter_foreignkey(self):
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that only actual authors are present in author's list filter
filterspec = changelist.get_filters(request)[0][4]
expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_relatedonlyfieldlistfilter_underscorelookup_foreignkey(self):
Department.objects.create(code='TEST', description='Testing')
self.djangonaut_book.employee = self.john
self.djangonaut_book.save()
self.bio_book.employee = self.jack
self.bio_book.save()
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Only actual departments should be present in employee__department's
# list filter.
filterspec = changelist.get_filters(request)[0][6]
expected = [
(self.dev.code, str(self.dev)),
(self.design.code, str(self.design)),
]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_relatedonlyfieldlistfilter_manytomany(self):
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that only actual contributors are present in contrib's list filter
filterspec = changelist.get_filters(request)[0][5]
expected = [(self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_listfilter_genericrelation(self):
django_bookmark = Bookmark.objects.create(url='https://www.djangoproject.com/')
python_bookmark = Bookmark.objects.create(url='https://www.python.org/')
kernel_bookmark = Bookmark.objects.create(url='https://www.kernel.org/')
TaggedItem.objects.create(content_object=django_bookmark, tag='python')
TaggedItem.objects.create(content_object=python_bookmark, tag='python')
TaggedItem.objects.create(content_object=kernel_bookmark, tag='linux')
modeladmin = BookmarkAdminGenericRelation(Bookmark, site)
request = self.request_factory.get('/', {'tags__tag': 'python'})
changelist = self.get_changelist(request, Bookmark, modeladmin)
queryset = changelist.get_queryset(request)
expected = [python_bookmark, django_bookmark]
self.assertEqual(list(queryset), expected)
def test_booleanfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def test_booleanfieldlistfilter_tuple(self):
modeladmin = BookAdminWithTupleBooleanFilter(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def verify_booleanfieldlistfilter(self, modeladmin):
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'is_best_seller__exact': 0})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "No")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=0')
request = self.request_factory.get('/', {'is_best_seller__exact': 1})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Yes")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=1')
request = self.request_factory.get('/', {'is_best_seller__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Unknown")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__isnull=True')
def test_fieldlistfilter_underscorelookup_tuple(self):
"""
Ensure ('fieldpath', ClassName ) lookups pass lookup_allowed checks
when fieldpath contains double underscore in value (#19182).
"""
modeladmin = BookAdminWithUnderscoreLookupAndTuple(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'author__email': '[email protected]'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book, self.djangonaut_book])
def test_simplelistfilter(self):
modeladmin = DecadeFilterBookAdmin(Book, site)
# Make sure that the first option is 'All' ---------------------------
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), list(Book.objects.all().order_by('-id')))
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
# Look for books in the 1980s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 80s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'the 1980\'s')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+80s')
# Look for books in the 1990s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+90s')
# Look for books in the 2000s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], 'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(choices[3]['query_string'], '?publication-decade=the+00s')
# Combine multiple filters -------------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s', 'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.djangonaut_book])
# Make sure the correct choices are selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], 'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(
choices[3]['query_string'],
'?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk
)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk)
def test_listfilter_without_title(self):
"""
Any filter must define a title.
"""
modeladmin = DecadeFilterBookAdminWithoutTitle(Book, site)
request = self.request_factory.get('/', {})
six.assertRaisesRegex(self, ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutTitle' does not specify a 'title'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_without_parameter(self):
"""
Any SimpleListFilter must define a parameter_name.
"""
modeladmin = DecadeFilterBookAdminWithoutParameter(Book, site)
request = self.request_factory.get('/', {})
six.assertRaisesRegex(self, ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutParameter' does not specify a 'parameter_name'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_none_returning_lookups(self):
"""
A SimpleListFilter lookups method can return None but disables the
filter completely.
"""
modeladmin = DecadeFilterBookAdminWithNoneReturningLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_filter_with_failing_queryset(self):
"""
Ensure that when a filter's queryset method fails, it fails loudly and
the corresponding exception doesn't get swallowed (#17828).
"""
modeladmin = DecadeFilterBookAdminWithFailingQueryset(Book, site)
request = self.request_factory.get('/', {})
self.assertRaises(ZeroDivisionError, self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_queryset_based_lookups(self):
modeladmin = DecadeFilterBookAdminWithQuerysetBasedLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(len(choices), 3)
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'the 1990\'s')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+90s')
self.assertEqual(choices[2]['display'], 'the 2000\'s')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+00s')
def test_two_characters_long_field(self):
"""
list_filter works with two-characters long field names (#16080).
"""
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'no': '207'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'number')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?no=207')
def test_parameter_ends_with__in__or__isnull(self):
"""
Ensure that a SimpleListFilter's parameter name is not mistaken for a
model field if it ends with '__isnull' or '__in' (#17091).
"""
# When it ends with '__in' -----------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__In(Book, site)
request = self.request_factory.get('/', {'decade__in': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__in=the+90s')
# When it ends with '__isnull' ---------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__Isnull(Book, site)
request = self.request_factory.get('/', {'decade__isnull': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__isnull=the+90s')
def test_lookup_with_non_string_value(self):
"""
Ensure choices are set the selected class when using non-string values
for lookups in SimpleListFilters (#19318).
"""
modeladmin = DepartmentFilterEmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {'department': self.john.department.pk})
changelist = self.get_changelist(request, Employee, modeladmin)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'DEV')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department=%s' % self.john.department.pk)
def test_lookup_with_non_string_value_underscored(self):
"""
Ensure SimpleListFilter lookups pass lookup_allowed checks when
parameter_name attribute contains double-underscore value (#19182).
"""
modeladmin = DepartmentFilterUnderscoredEmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {'department__whatever': self.john.department.pk})
changelist = self.get_changelist(request, Employee, modeladmin)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'DEV')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department__whatever=%s' % self.john.department.pk)
def test_fk_with_to_field(self):
"""
A filter on a FK respects the FK's to_field attribute (#17972).
"""
modeladmin = EmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.jack, self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'Development')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], 'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
# Filter by Department=='Development' --------------------------------
request = self.request_factory.get('/', {'department__code__exact': 'DEV'})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], False)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'Development')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], 'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
def test_lookup_with_dynamic_value(self):
"""
Ensure SimpleListFilter can access self.value() inside the lookup.
"""
modeladmin = DepartmentFilterDynamicValueBookAdmin(Book, site)
def _test_choices(request, expected_displays):
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = tuple(c['display'] for c in filterspec.choices(changelist))
self.assertEqual(choices, expected_displays)
_test_choices(self.request_factory.get('/', {}),
("All", "the 1980's", "the 1990's"))
_test_choices(self.request_factory.get('/', {'publication-decade': 'the 80s'}),
("All", "the 1990's"))
_test_choices(self.request_factory.get('/', {'publication-decade': 'the 90s'}),
("All", "the 1980's"))
def test_list_filter_queryset_filtered_by_default(self):
"""
A list filter that filters the queryset by default gives the correct
full_result_count.
"""
modeladmin = NotNinetiesListFilterAdmin(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
changelist.get_results(request)
self.assertEqual(changelist.full_result_count, 4)
| benjaminjkraft/django | tests/admin_filters/tests.py | Python | bsd-3-clause | 46,487 |
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Assign User role to all existing users.
Revision ID: 5b33357784a
Revises: 1afc3824d35b
Create Date: 2013-09-20 15:42:01.558543
"""
# revision identifiers, used by Alembic.
revision = '5b33357784a'
down_revision = '1afc3824d35b'
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column, select
person_table = table('people',
column('id', sa.Integer),
)
role_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
)
user_roles_table = table('user_roles',
column('id', sa.Integer),
column('role_id', sa.Integer),
column('person_id', sa.Integer),
column('context_id', sa.Integer),
column('modified_by_id', sa.Integer),
column('created_at', sa.DateTime),
column('updated_at', sa.DateTime),
)
def upgrade():
users = select([person_table.c.id])
object_editor = select([role_table.c.id])\
.where(role_table.c.name == 'ObjectEditor')\
.limit(1)
program_creator = select([role_table.c.id])\
.where(role_table.c.name == 'ProgramCreator')\
.limit(1)
#FIXME this could be done better in a more recent version of sqlalchemy
#once 0.8.3 is released
#op.execute(user_roles_table.insert()\
#.from_select(['user_id'], users)\
#.from_select(['role_id'], role)\
#.values(context_id=None,))
#FIXME workaround until we can do the proper static generation of the sql
#statement
connection = op.get_bind()
users = connection.execute(users).fetchall()
object_editor = connection.execute(object_editor).fetchone()
program_creator = connection.execute(program_creator).fetchone()
current_datetime = datetime.now()
for user in users:
op.execute(user_roles_table.insert().values(
person_id=user['id'],
role_id=object_editor['id'],
context_id=None,
created_at=current_datetime,
updated_at=current_datetime,
))
op.execute(user_roles_table.insert().values(
person_id=user['id'],
role_id=program_creator['id'],
context_id=None,
created_at=current_datetime,
updated_at=current_datetime,
))
def downgrade():
'''Intentionally does nothing as we can't distinguish between migration
added assignments and not.
'''
pass
| AleksNeStu/ggrc-core | src/ggrc_basic_permissions/migrations/versions/20130920154201_5b33357784a_assign_user_role_to_.py | Python | apache-2.0 | 2,399 |
# __init__.py
# Copyright (C) 2005, 2006, 2007, 2008 Michael Bayer [email protected]
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import inspect
from sqlalchemy.types import \
BLOB, BOOLEAN, CHAR, CLOB, DATE, DATETIME, DECIMAL, FLOAT, INT, \
NCHAR, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, \
Binary, Boolean, Date, DateTime, Float, Integer, Interval, Numeric, \
PickleType, SmallInteger, String, Text, Time, Unicode, UnicodeText
from sqlalchemy.sql import \
func, modifier, text, literal, literal_column, null, alias, \
and_, or_, not_, \
select, subquery, union, union_all, insert, update, delete, \
join, outerjoin, \
bindparam, outparam, asc, desc, \
except_, except_all, exists, intersect, intersect_all, \
between, case, cast, distinct, extract
from sqlalchemy.schema import \
MetaData, ThreadLocalMetaData, Table, Column, ForeignKey, \
Sequence, Index, ForeignKeyConstraint, PrimaryKeyConstraint, \
CheckConstraint, UniqueConstraint, Constraint, \
PassiveDefault, ColumnDefault, DDL
from sqlalchemy.engine import create_engine, engine_from_config
__all__ = [ name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)) ]
__version__ = '0.4.5'
| santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/SQLAlchemy-0.4.5-py2.5.egg/sqlalchemy/__init__.py | Python | bsd-3-clause | 1,363 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
QAD Quantum Aided Design plugin
classe per gestire i simboli marcatori
-------------------
begin : 2013-05-22
copyright : iiiii
email : hhhhh
developers : bbbbb aaaaa ggggg
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from qad_msg import QadMsg
from qad_variables import QadVariables
#===============================================================================
# QadVertexmarkerIconTypeEnum class.
#===============================================================================
class QadVertexmarkerIconTypeEnum():
NONE = 0 # nessuno
CROSS = 1 # croce
X = 2 # una X
BOX = 3 # un quadrato
TRIANGLE = 4 # triangolo equilatero con punta in su
CIRCLE = 5 # cerchio
CIRCLE_X = 6 # cerchio con al centro una x
RHOMBUS = 7 # rombo
INFINITY_LINE = 8 # linea infinita (------ . .)
DOUBLE_BOX = 9 # due quadrati sfalsati
PERP = 10 # simbolo di "perpendicolare"
TANGENT = 11 # un cerchio con una retta tangente sopra
DOUBLE_TRIANGLE = 12 # due triangoli uno sull'altro con vertice al centro (clessidra)
BOX_X = 13 # quadrato con al centro una x
PARALLEL = 14 # due righe parallele a 45 gradi
PROGRESS = 15 # linea con X e i puntini (----X-- . .)
X_INFINITY_LINE = 16 # X e i puntini (X-- . .)
PERP_DEFERRED = 17 # come perpendicolare con i puntini
TANGENT_DEFERRED = 18 # come tangente con i puntini
class QadVertexMarker(QgsMapCanvasItem):
"""
Classe che gestisce i marcatori dei vertici
"""
#============================================================================
# __init__
#============================================================================
def __init__(self, mapCanvas):
QgsMapCanvasItem.__init__(self, mapCanvas)
self.__canvas = mapCanvas
self.__iconType = QadVertexmarkerIconTypeEnum.X # icon to be shown
self.__iconSize = QadVariables.get(QadMsg.translate("Environment variables", "AUTOSNAPSIZE"))
self.__center = QgsPoint(0, 0) # coordinates of the point in the center
self.__color = QColor(255, 0, 0) # color of the marker
self.__penWidth = 2 # pen width
def __del__(self):
self.removeItem()
def removeItem(self):
self.__canvas.scene().removeItem(self)
def setCenter(self, point):
self.__center = point
pt = self.toCanvasCoordinates(self.__center)
self.setPos(pt)
def setIconType(self, iconType):
self.__iconType = iconType
def setIconSize(self, iconSize):
self.__iconSize = iconSize
def setColor(self, color):
self.__color = color
def setPenWidth(self, width):
self.__penWidth = width
def paint(self, painter, option, widget):
"""
p é un QPainter
"""
s = self.__iconSize
pen = QPen(self.__color)
pen.setWidth(self.__penWidth)
painter.setPen(pen)
if self.__iconType == QadVertexmarkerIconTypeEnum.NONE:
pass
elif self.__iconType == QadVertexmarkerIconTypeEnum.CROSS:
# croce
painter.drawLine(QLineF(-s, 0, s, 0))
painter.drawLine(QLineF( 0, -s, 0, s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.X:
# una X
painter.drawLine(QLineF(-s, -s, s, s))
painter.drawLine(QLineF(-s, s, s, -s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.BOX:
# un quadrato
painter.drawLine(QLineF(-s, -s, s, -s))
painter.drawLine(QLineF( s, -s, s, s))
painter.drawLine(QLineF( s, s, -s, s))
painter.drawLine(QLineF(-s, s, -s, -s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.TRIANGLE:
# triangolo equilatero con punta in su
painter.drawLine(QLineF(-s, s, s, s))
painter.drawLine(QLineF( s, s, 0, -s))
painter.drawLine(QLineF( 0, -s, -s, s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.CIRCLE:
# cerchio
# la linea é più sottile
pen.setWidth(self.__penWidth / 2)
painter.setPen(pen)
painter.drawEllipse(QPointF(0, 0), s, s)
pen.setWidth(self.__penWidth)
painter.setPen(pen)
elif self.__iconType == QadVertexmarkerIconTypeEnum.CIRCLE_X:
# cerchio con al centro una x
# la linea é più sottile
pen.setWidth(self.__penWidth / 2)
painter.setPen(pen)
painter.drawEllipse(QPointF(0, 0), s, s)
painter.drawLine(QLineF(-s, -s, s, s))
painter.drawLine(QLineF(-s, s, s, -s))
pen.setWidth(self.__penWidth)
painter.setPen(pen)
elif self.__iconType == QadVertexmarkerIconTypeEnum.RHOMBUS:
# rombo
painter.drawLine(QLineF( 0, -s, -s, 0))
painter.drawLine(QLineF(-s, 0, 0, s))
painter.drawLine(QLineF( 0, s, s, 0))
painter.drawLine(QLineF( s, 0, 0, -s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.INFINITY_LINE:
# linea infinita (------ . .)
l = self.__penWidth
painter.drawLine(QLineF(-s, 0, 0, 0))
painter.drawLine(QLineF(2 * l, 0, 2 * l, 0))
painter.drawLine(QLineF(4 * l, 0, 4 * l, 0))
elif self.__iconType == QadVertexmarkerIconTypeEnum.DOUBLE_BOX:
# due quadrati sfalsati
l = (s / 4)
painter.drawLine(QLineF(-s, -s, -s, l))
painter.drawLine(QLineF(-s, l, -l, l))
painter.drawLine(QLineF(-l, l, -l, s))
painter.drawLine(QLineF(-l, s, s, s))
painter.drawLine(QLineF( s, s, s, -l))
painter.drawLine(QLineF( s, -l, l, -l))
painter.drawLine(QLineF( l, -l, l, -s))
painter.drawLine(QLineF( l, -s, -s, -s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.PERP:
# simbolo di "perpendicolare"
painter.drawLine(QLineF(-s, -s, -s, s))
painter.drawLine(QLineF(-s, s, s, s))
painter.drawLine(QLineF(-s, 0, 0, 0))
painter.drawLine(QLineF( 0, 0, 0, s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.TANGENT:
# un cerchio con una retta tangente sopra
# la linea é più sottile
l = s - self.__penWidth
pen.setWidth(self.__penWidth / 2)
painter.setPen(pen)
painter.drawEllipse(QPointF(0, 0), l + 1, l + 1)
pen.setWidth(self.__penWidth)
painter.setPen(pen)
painter.drawLine(QLineF(-s, -s, s, -s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.DOUBLE_TRIANGLE:
# due triangoli uno sull'altro con vertice al centro (clessidra)
# le linee oblique sono più sottili
pen.setWidth(self.__penWidth / 2)
painter.setPen(pen)
painter.drawLine(QLineF(-s, -s, s, s))
painter.drawLine(QLineF( s, -s, -s, s))
pen.setWidth(self.__penWidth)
painter.setPen(pen)
painter.drawLine(QLineF(-s, -s, s, -s))
painter.drawLine(QLineF(-s, s, s, s))
elif self.__iconType == QadVertexmarkerIconTypeEnum.BOX_X:
# quadrato con al centro una x
painter.drawLine(QLineF(-s, -s, s, -s))
painter.drawLine(QLineF( s, -s, s, s))
painter.drawLine(QLineF( s, s, -s, s))
painter.drawLine(QLineF(-s, s, -s, -s))
# le linee oblique della x sono più sottili
pen.setWidth(self.__penWidth / 2)
painter.setPen(pen)
painter.drawLine(QLineF(-s, -s, s, s))
painter.drawLine(QLineF(-s, s, s, -s))
pen.setWidth(self.__penWidth)
painter.setPen(pen)
elif self.__iconType == QadVertexmarkerIconTypeEnum.PARALLEL:
# due righe parallele a 45 gradi
painter.drawLine(QLineF(-s, 0, 0, -s))
painter.drawLine(QLineF( 0, s, s, 0))
elif self.__iconType == QadVertexmarkerIconTypeEnum.PROGRESS:
# linea con X e i puntini (----X-- . .)
l = self.__penWidth
painter.drawLine(QLineF(-s, 0, 0, 0))
painter.drawLine(QLineF(2 * l, 0, 2 * l, 0))
painter.drawLine(QLineF(4 * l, 0, 4 * l, 0))
# le linee oblique della x sono più sottili
pen.setWidth(self.__penWidth / 2)
l = s / 2
painter.setPen(pen)
painter.drawLine(QLineF(-l, -l, l, l))
painter.drawLine(QLineF(-l, l, l, -l))
pen.setWidth(self.__penWidth)
painter.setPen(pen)
elif self.__iconType == QadVertexmarkerIconTypeEnum.X_INFINITY_LINE:
# linea con X e i puntini (X-- . .)
l = self.__penWidth
painter.drawLine(QLineF(2 * l, 0, 2 * l, 0))
painter.drawLine(QLineF(4 * l, 0, 4 * l, 0))
# le linee oblique della x sono più sottili
pen.setWidth(self.__penWidth / 2)
l = s / 2
painter.setPen(pen)
painter.drawLine(QLineF(-l, -l, l, l))
painter.drawLine(QLineF(-l, l, l, -l))
pen.setWidth(self.__penWidth)
painter.setPen(pen)
elif self.__iconType == QadVertexmarkerIconTypeEnum.PERP_DEFERRED:
painter.drawLine(QLineF(-s, -s, -s, s))
painter.drawLine(QLineF(-s, s, s, s))
painter.drawLine(QLineF(-s, 0, 0, 0))
painter.drawLine(QLineF( 0, 0, 0, s))
# simbolo di "perpendicolare" con i puntini
l = s - self.__penWidth
l = l + (self.__penWidth * 2)
painter.drawLine(QLineF(l, 0, l, 0))
l = l + (self.__penWidth * 2)
painter.drawLine(QLineF(l, 0, l, 0))
elif self.__iconType == QadVertexmarkerIconTypeEnum.TANGENT_DEFERRED:
# un cerchio con una retta tangente sopra
# la linea é più sottile
l = s - self.__penWidth
pen.setWidth(self.__penWidth / 2)
painter.setPen(pen)
painter.drawEllipse(QPointF(0, 0), l + 1, l + 1)
pen.setWidth(self.__penWidth)
painter.setPen(pen)
painter.drawLine(QLineF(-s, -s, s, -s))
# come tangente con i puntini
l = l + (self.__penWidth * 2)
painter.drawLine(QLineF(l, 0, l, 0))
l = l + (self.__penWidth * 2)
painter.drawLine(QLineF(l, 0, l, 0))
def boundingRect(self):
a = self.__iconSize / 2.0 + 1
width = 2 * a + self.__penWidth * 2
height = 2 * a
return QRectF(-a, -a, width, height)
def updatePosition(self):
self.setCenter(self.__center)
| geosim/QAD | qad_vertexmarker.py | Python | gpl-3.0 | 11,640 |
from horizon.test import helpers as test
class VmTests(test.TestCase):
# Unit tests for vm.
def test_me(self):
self.assertTrue(1 + 1 == 2)
| jorik041/shmoocon_2014_talk | caravan/caravan/dashboards/infrastructure/vm/tests.py | Python | bsd-2-clause | 157 |
# encoding=utf-8
import unittest
import unicodeblock.lower
class UniversalLower(unittest.TestCase):
def test_lower_kana(self):
self.assertEqual(
'ミカサ・アッカーマン',
unicodeblock.lower.lower_kanas('ミカサ・アッカーマン'))
self.assertEqual(
'ノビ太ノクセニ',
unicodeblock.lower.lower_kanas('のび太のくせに'))
self.assertEqual(
'ノビ太ノクセニ',
unicodeblock.lower.lower_kanas('のび太のクセに'))
def test_fullwidth_letters(self):
self.assertEqual('the quick brown fox jumps over the lazy dog.',
unicodeblock.lower.lower_fullwidths(
'The quick brown fox ' +
'jumps over the lazy dog.'))
def test_ulower(self):
self.assertEqual(
'ノビ太ノクセニ',
unicodeblock.lower.ulower('のび太のクセに'))
self.assertEqual('the quick brown fox jumps over the lazy dog.',
unicodeblock.lower.ulower(
'The quick brown fox ' +
'jumps over the lazy dog.'))
self.assertEqual('ノビ太ノクセニ ' +
'the quick brown fox jumps over the lazy dog. ' +
"the browns' kitsune",
unicodeblock.lower.ulower(
'のび太のクセに ' +
'The quick brown fox ' +
'jumps over the lazy dog. ' +
"The Browns' kitsune"))
| neuront/pyunicodeblock | test/ulower.py | Python | mit | 1,892 |
import bz2
import sys
import numpy as np
from csv import DictReader
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage: train.yzx.txt'
exit(-1)
file_all_name = "../../make-ipinyou-data/" + str(sys.argv[1]) + "/train.log.txt"
arr = []
for t, line in enumerate(DictReader(open(file_all_name), delimiter='\t')):
arr.append(line['payprice'])
arr = np.array(arr, np.float)
per = [1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 99]
qua = np.percentile(arr, per)
print("--- Percentiles For Campaign {0}---- ".format(sys.argv[1]))
print(per)
print("--- Values For Campaign {0} ----".format(sys.argv[1]))
print(qua)
| orenov/optimal-rtb | python/bid_percentile.py | Python | apache-2.0 | 692 |
import looker_sdk
from looker_sdk import models
from looker_sdk.rtl import transport
import configparser
import hashlib
import csv
config_file = "../looker.ini"
sdk = looker_sdk.init31()
def main():
"""Compare the output of content validator runs
in production and development mode. Additional
broken content in development mode will be
outputted to a csv file.
Use this script to test whether LookML changes
will result in new broken content."""
base_url = get_base_url()
space_data = get_space_data()
print("Checking for broken content in production.")
broken_content_prod = parse_broken_content(
base_url, get_broken_content(), space_data
)
checkout_dev_branch()
print("Checking for broken content in dev branch.")
broken_content_dev = parse_broken_content(
base_url, get_broken_content(), space_data
)
new_broken_content = compare_broken_content(broken_content_prod, broken_content_dev)
if new_broken_content:
write_broken_content_to_file(new_broken_content, "new_broken_content.csv")
else:
print("No new broken content in development branch.")
def get_base_url():
""" Pull base url from looker.ini, remove port"""
config = configparser.ConfigParser()
config.read(config_file)
full_base_url = config.get("Looker", "base_url")
base_url = sdk.auth.settings.base_url[: full_base_url.index(":19999")]
return base_url
def get_space_data():
"""Collect all space information"""
space_data = sdk.all_spaces(fields="id, parent_id, name")
return space_data
def get_broken_content():
"""Collect broken content"""
broken_content = sdk.content_validation(
transport_options=transport.TransportSettings(timeout=600)
).content_with_errors
return broken_content
def parse_broken_content(base_url, broken_content, space_data):
"""Parse and return relevant data from content validator"""
output = []
for item in broken_content:
if item.dashboard:
content_type = "dashboard"
else:
content_type = "look"
item_content_type = getattr(item, content_type)
id = item_content_type.id
name = item_content_type.title
space_id = item_content_type.space.id
space_name = item_content_type.space.name
errors = item.errors
url = f"{base_url}/{content_type}s/{id}"
space_url = "{}/spaces/{}".format(base_url, space_id)
if content_type == "look":
element = None
else:
dashboard_element = item.dashboard_element
element = dashboard_element.title if dashboard_element else None
# Lookup additional space information
space = next(i for i in space_data if str(i.id) == str(space_id))
parent_space_id = space.parent_id
# Old version of API has issue with None type for all_space() call
if parent_space_id is None or parent_space_id == "None":
parent_space_url = None
parent_space_name = None
else:
parent_space_url = "{}/spaces/{}".format(base_url, parent_space_id)
parent_space = next(
(i for i in space_data if str(i.id) == str(parent_space_id)), None
)
# Handling an edge case where space has no name. This can happen
# when users are improperly generated with the API
try:
parent_space_name = parent_space.name
except AttributeError:
parent_space_name = None
# Create a unique hash for each record. This is used to compare
# results across content validator runs
unique_id = hashlib.md5(
"-".join(
[str(id), str(element), str(name), str(errors), str(space_id)]
).encode()
).hexdigest()
data = {
"unique_id": unique_id,
"content_type": content_type,
"name": name,
"url": url,
"dashboard_element": element,
"space_name": space_name,
"space_url": space_url,
"parent_space_name": parent_space_name,
"parent_space_url": parent_space_url,
"errors": str(errors),
}
output.append(data)
return output
def compare_broken_content(broken_content_prod, broken_content_dev):
"""Compare output between 2 content_validation runs"""
unique_ids_prod = set([i["unique_id"] for i in broken_content_prod])
unique_ids_dev = set([i["unique_id"] for i in broken_content_dev])
new_broken_content_ids = unique_ids_dev.difference(unique_ids_prod)
new_broken_content = []
for item in broken_content_dev:
if item["unique_id"] in new_broken_content_ids:
new_broken_content.append(item)
return new_broken_content
def checkout_dev_branch():
"""Enter dev workspace"""
sdk.update_session(models.WriteApiSession(workspace_id="dev"))
def write_broken_content_to_file(broken_content, output_csv_name):
"""Export new content errors in dev branch to csv file"""
try:
with open(output_csv_name, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=list(broken_content[0].keys()))
writer.writeheader()
for data in broken_content:
writer.writerow(data)
print("Broken content information outputed to {}".format(output_csv_name))
except IOError:
print("I/O error")
main()
| looker-open-source/sdk-examples | python/content_validator_comparison.py | Python | mit | 5,525 |
""" BloomFilter and BloomFiter on Disk, python implementation
License: MIT
Author: Tyler Barrus ([email protected])
URL: https://github.com/barrust/bloom
"""
import math
import os
from array import array
from binascii import hexlify, unhexlify
from io import BytesIO, IOBase
from mmap import mmap
from numbers import Number
from pathlib import Path
from shutil import copyfile
from struct import Struct
from textwrap import wrap
from typing import ByteString, Tuple, Union
from ..exceptions import InitializationError, NotSupportedError
from ..hashes import HashFuncT, HashResultsT, KeyT, default_fnv_1a
from ..utilities import MMap, is_hex_string, is_valid_file
MISMATCH_MSG = "The parameter second must be of type BloomFilter or a BloomFilterOnDisk"
SimpleBloomT = Union["BloomFilter", "BloomFilterOnDisk"]
def _verify_not_type_mismatch(second: SimpleBloomT) -> bool:
"""verify that there is not a type mismatch"""
return isinstance(second, (BloomFilter, BloomFilterOnDisk))
class BloomFilter:
"""Simple Bloom Filter implementation for use in python; It can read and write the
same format as the c version (https://github.com/barrust/bloom)
Args:
est_elements (int): The number of estimated elements to be added
false_positive_rate (float): The desired false positive rate
filepath (str): Path to file to load
hex_string (str): Hex based representation to be loaded
hash_function (function): Hashing strategy function to use `hf(key, number)`
Returns:
BloomFilter: A Bloom Filter object
Note:
Initialization order of operations:
1) From file
2) From Hex String
3) From params
"""
__slots__ = [
"_on_disk",
"_type",
"_typecode",
"_bits_per_elm",
"_bloom",
"_est_elements",
"_fpr",
"_bloom_length",
"_hash_func",
"_els_added",
"_number_hashes",
"_num_bits",
]
def __init__(
self,
est_elements: Union[int, None] = None,
false_positive_rate: Union[float, None] = None,
filepath: Union[str, Path, None] = None,
hex_string: Union[str, None] = None,
hash_function: Union[HashFuncT, None] = None,
):
# set some things up
self._on_disk = False
self._type = "regular"
self._typecode = "B"
self._bits_per_elm = 8.0
if is_valid_file(filepath):
self._load(filepath, hash_function)
elif is_hex_string(hex_string):
self._load_hex(hex_string, hash_function)
else:
if est_elements is None or false_positive_rate is None:
raise InitializationError("Insufecient parameters to set up the Bloom Filter")
# calc values
fpr, n_hashes, n_bits = self._get_optimized_params(est_elements, false_positive_rate)
self._set_values(est_elements, fpr, n_hashes, n_bits, hash_function)
self._bloom = array(self._typecode, [0]) * self._bloom_length
# NOTE: these should be "FOOTERS" and not headers
_FOOTER_STRUCT = Struct("QQf")
_FOOTER_STRUCT_BE = Struct(">QQf")
_FPR_STRUCT = Struct("f")
_IMPT_STRUCT = Struct("B")
def __contains__(self, key: KeyT) -> Union[int, bool]:
"""setup the `in` keyword"""
return self.check(key)
def __str__(self) -> str:
"""output statistics of the bloom filter"""
on_disk = "no" if self.is_on_disk is False else "yes"
stats = (
"BloomFilter:\n"
"\tbits: {0}\n"
"\testimated elements: {1}\n"
"\tnumber hashes: {2}\n"
"\tmax false positive rate: {3:.6f}\n"
"\tbloom length (8 bits): {4}\n"
"\telements added: {5}\n"
"\testimated elements added: {6}\n"
"\tcurrent false positive rate: {7:.6f}\n"
"\texport size (bytes): {8}\n"
"\tnumber bits set: {9}\n"
"\tis on disk: {10}\n"
)
return stats.format(
self.number_bits,
self.estimated_elements,
self.number_hashes,
self.false_positive_rate,
self.bloom_length,
self.elements_added,
self.estimate_elements(),
self.current_false_positive_rate(),
self.export_size(),
self._cnt_number_bits_set(),
on_disk,
)
def __bytes__(self) -> bytes:
"""Export bloom filter to `bytes`"""
with BytesIO() as f:
self.export(f)
return f.getvalue()
# Some Properties
@property
def false_positive_rate(self) -> float:
"""float: The maximum desired false positive rate
Note:
Not settable"""
return self._fpr
@property
def estimated_elements(self) -> int:
"""int: The maximum number of elements estimated to be added at setup
Note:
Not settable"""
return self._est_elements
@property
def number_hashes(self) -> int:
"""int: The number of hashes required for the Bloom Filter hashing strategy
Note:
Not settable"""
return self._number_hashes
@property
def number_bits(self) -> int:
"""int: Number of bits in the Bloom Filter
Note:
Not settable"""
return self._num_bits
@property
def elements_added(self) -> int:
"""int: Number of elements added to the Bloom Filter
Note:
Changing this can cause the current false positive rate to be reported incorrectly"""
return self._els_added
@elements_added.setter
def elements_added(self, val: int):
"""set the els added"""
self._els_added = val
@property
def is_on_disk(self) -> bool:
"""bool: Is the Bloom Filter on Disk or not
Note:
Not settable"""
return self._on_disk
@property
def bloom_length(self) -> int:
"""int: Length of the Bloom Filter array
Note:
Not settable"""
return self._bloom_length
@property
def bloom(self) -> array:
"""list(int): The bit/int array"""
return self._bloom
@property
def hash_function(self) -> HashFuncT:
"""function: The hash function used
Note:
Not settable"""
return self._hash_func
# Working things
def clear(self) -> None:
"""Clear or reset the Counting Bloom Filter"""
self._els_added = 0
for idx in range(self._bloom_length):
self._bloom[idx] = 0
def hashes(self, key: KeyT, depth: Union[int, None] = None) -> HashResultsT:
"""Return the hashes based on the provided key
Args:
key (str): Description of arg1
depth (int): Number of permutations of the hash to generate; if None, generate `number_hashes`
Returns:
List(int): A list of the hashes for the key in int form"""
tmp = depth if depth is not None else self._number_hashes
return self._hash_func(key, tmp)
def add(self, key: KeyT) -> None:
"""Add the key to the Bloom Filter
Args:
key (str): The element to be inserted"""
self.add_alt(self.hashes(key))
def add_alt(self, hashes: HashResultsT) -> None:
"""Add the element represented by hashes into the Bloom Filter
Args:
hashes (list): A list of integers representing the key to insert"""
for i in range(0, self._number_hashes):
k = hashes[i] % self._num_bits
idx = k // 8
self._bloom[idx] = self._bloom[idx] | (1 << (k % 8))
self._els_added += 1
def check(self, key: KeyT) -> bool:
"""Check if the key is likely in the Bloom Filter
Args:
key (str): The element to be checked
Returns:
bool: True if likely encountered, False if definately not"""
return self.check_alt(self.hashes(key))
def check_alt(self, hashes: HashResultsT) -> bool:
"""Check if the element represented by hashes is in the Bloom Filter
Args:
hashes (list): A list of integers representing the key to check
Returns:
bool: True if likely encountered, False if definately not"""
for i in range(self._number_hashes):
k = hashes[i] % self._num_bits
if (self._bloom[k // 8] & (1 << (k % 8))) == 0:
return False
return True
def export_hex(self) -> str:
"""Export the Bloom Filter as a hex string
Return:
str: Hex representation of the Bloom Filter"""
footer_bytes = self._FOOTER_STRUCT_BE.pack(
self.estimated_elements,
self.elements_added,
self.false_positive_rate,
)
bytes_string = hexlify(bytearray(self._bloom[: self.bloom_length])) + hexlify(footer_bytes)
return str(bytes_string, "utf-8")
def export(self, file: Union[Path, str, IOBase, mmap]) -> None:
"""Export the Bloom Filter to disk
Args:
filename (str): The filename to which the Bloom Filter will be written."""
if not isinstance(file, (IOBase, mmap)):
with open(file, "wb") as filepointer:
self.export(filepointer) # type: ignore
else:
self._bloom.tofile(file) # type: ignore
file.write(
self._FOOTER_STRUCT.pack(
self.estimated_elements,
self.elements_added,
self.false_positive_rate,
)
)
def export_c_header(self, filename: Union[str, Path]) -> None:
"""Export the Bloom Filter to disk as a C header file.
Args:
filename (str): The filename to which the Bloom Filter will be written."""
data = (
" " + line
for line in wrap(", ".join(("0x{:02x}".format(e) for e in bytearray.fromhex(self.export_hex()))), 80)
)
if self._type in ["regular", "regular-on-disk"]:
bloom_type = "standard BloomFilter"
else:
bloom_type = "CountingBloomFilter"
with open(filename, "w") as file:
print("/* BloomFilter Export of a {} */".format(bloom_type), file=file)
print("#include <inttypes.h>", file=file)
print("const uint64_t estimated_elements = ", self.estimated_elements, ";", sep="", file=file)
print("const uint64_t elements_added = ", self.elements_added, ";", sep="", file=file)
print("const float false_positive_rate = ", self.false_positive_rate, ";", sep="", file=file)
print("const uint64_t number_bits = ", self.number_bits, ";", sep="", file=file)
print("const unsigned int number_hashes = ", self.number_hashes, ";", sep="", file=file)
print("const unsigned char bloom[] = {", *data, "};", sep="\n", file=file)
@classmethod
def frombytes(cls, b: ByteString, hash_function: Union[HashFuncT, None] = None) -> "BloomFilter":
"""
Args:
b (ByteString): The bytes to load as a Bloom Filter
hash_function (function): Hashing strategy function to use `hf(key, number)`
Returns:
BloomFilter: A Bloom Filter object
"""
offset = cls._FOOTER_STRUCT.size
est_els, els_added, fpr, _, _ = cls._parse_footer(cls._FOOTER_STRUCT, bytes(b[-offset:]))
blm = BloomFilter(est_elements=est_els, false_positive_rate=fpr, hash_function=hash_function)
blm._load(b, hash_function=blm.hash_function)
blm._els_added = els_added
return blm
def estimate_elements(self) -> int:
"""Estimate the number of unique elements added
Returns:
int: Number of elements estimated to be inserted
Note:
Returns -1 if all bits in the Bloom filter are set"""
setbits = self._cnt_number_bits_set()
if setbits >= self.number_bits:
return -1 # not sure this is the "best", but it would signal something is wrong
log_n = math.log(1 - (float(setbits) / float(self.number_bits)))
tmp = float(self.number_bits) / float(self.number_hashes)
return int(-1 * tmp * log_n)
def export_size(self) -> int:
"""Calculate the size of the bloom on disk
Returns:
int: Size of the Bloom Filter when exported to disk"""
return (self.bloom_length * self._IMPT_STRUCT.size) + self._FOOTER_STRUCT.size
def current_false_positive_rate(self) -> float:
"""Calculate the current false positive rate based on elements added
Return:
float: The current false positive rate"""
num = self.number_hashes * -1 * self.elements_added
dbl = num / self.number_bits
exp = math.exp(dbl)
return math.pow((1 - exp), self.number_hashes)
def intersection(self, second) -> Union["BloomFilter", None]:
"""Return a new Bloom Filter that contains the intersection of the
two
Args:
second (BloomFilter): The Bloom Filter with which to take the intersection
Returns:
BloomFilter: The new Bloom Filter containing the intersection
Raises:
TypeError: When second is not either a :class:`BloomFilter` or :class:`BloomFilterOnDisk`
Note:
`second` may be a BloomFilterOnDisk object
Note:
If `second` is not of the same size (false_positive_rate and est_elements) then this will return `None`"""
if not _verify_not_type_mismatch(second):
raise TypeError(MISMATCH_MSG)
if self._verify_bloom_similarity(second) is False:
return None
res = BloomFilter(
self.estimated_elements,
self.false_positive_rate,
hash_function=self.hash_function,
)
for i in range(0, res.bloom_length):
res._bloom[i] = self._get_element(i) & second._get_element(i)
res.elements_added = res.estimate_elements()
return res
def union(self, second: SimpleBloomT) -> Union["BloomFilter", None]:
"""Return a new Bloom Filter that contains the union of the two
Args:
second (BloomFilter): The Bloom Filter with which to calculate the union
Returns:
BloomFilter: The new Bloom Filter containing the union
Raises:
TypeError: When second is not either a :class:`BloomFilter` or :class:`BloomFilterOnDisk`
Note:
`second` may be a BloomFilterOnDisk object
Note:
If `second` is not of the same size (false_positive_rate and est_elements) then this will return `None`"""
if not _verify_not_type_mismatch(second):
raise TypeError(MISMATCH_MSG)
if self._verify_bloom_similarity(second) is False:
return None
res = BloomFilter(
self.estimated_elements,
self.false_positive_rate,
hash_function=self.hash_function,
)
for i in range(self.bloom_length):
res._bloom[i] = self._get_element(i) | second._get_element(i)
res.elements_added = res.estimate_elements()
return res
def jaccard_index(self, second: SimpleBloomT) -> Union[float, None]:
"""Calculate the jaccard similarity score between two Bloom Filters
Args:
second (BloomFilter): The Bloom Filter to compare with
Returns:
float: A numeric value between 0 and 1 where 1 is identical and 0 means completely different
Raises:
TypeError: When second is not either a :class:`BloomFilter` or :class:`BloomFilterOnDisk`
Note:
`second` may be a BloomFilterOnDisk object
Note:
If `second` is not of the same size (false_positive_rate and est_elements) then this will return `None`"""
if not _verify_not_type_mismatch(second):
raise TypeError(MISMATCH_MSG)
if self._verify_bloom_similarity(second) is False:
return None
count_union = 0
count_int = 0
for i in range(0, self.bloom_length):
el1 = self._get_element(i)
el2 = second._get_element(i)
t_union = el1 | el2
t_intersection = el1 & el2
count_union += bin(t_union).count("1")
count_int += bin(t_intersection).count("1")
if count_union == 0:
return 1.0
return count_int / count_union
# More private functions
@classmethod
def _get_optimized_params(cls, estimated_elements: int, false_positive_rate: float) -> Tuple[float, int, int]:
valid_prms = isinstance(estimated_elements, Number) and estimated_elements > 0
if not valid_prms:
msg = "Bloom: estimated elements must be greater than 0"
raise InitializationError(msg)
valid_prms = isinstance(false_positive_rate, Number) and 0.0 <= false_positive_rate < 1.0
if not valid_prms:
msg = "Bloom: false positive rate must be between 0.0 and 1.0"
raise InitializationError(msg)
fpr = cls._FPR_STRUCT.pack(float(false_positive_rate))
t_fpr = float(cls._FPR_STRUCT.unpack(fpr)[0]) # to mimic the c version!
# optimal caluclations
m_bt = math.ceil((-estimated_elements * math.log(t_fpr)) / 0.4804530139182) # ln(2)^2
number_hashes = int(round(0.6931471805599453 * m_bt / estimated_elements)) # math.log(2.0)
if number_hashes == 0:
raise InitializationError("Bloom: Number hashes is zero; unusable parameters provided")
return t_fpr, number_hashes, m_bt
def _set_values(
self, est_els: int, fpr: float, n_hashes: int, n_bits: int, hash_func: Union[HashFuncT, None]
) -> None:
self._est_elements = est_els
self._fpr = fpr
self._bloom_length = math.ceil(n_bits / self._bits_per_elm)
if hash_func is not None:
self._hash_func = hash_func
else:
self._hash_func = default_fnv_1a
self._els_added = 0
self._number_hashes = n_hashes
self._num_bits = n_bits
def _load_hex(self, hex_string: str, hash_function: Union[HashFuncT, None] = None) -> None:
"""placeholder for loading from hex string"""
offset = self._FOOTER_STRUCT_BE.size * 2
est_els, els_added, fpr, n_hashes, n_bits = self._parse_footer(
self._FOOTER_STRUCT_BE, unhexlify(hex_string[-offset:])
)
self._set_values(est_els, fpr, n_hashes, n_bits, hash_function)
self._bloom = array(self._typecode, unhexlify(hex_string[:-offset]))
self._els_added = els_added
def _load(
self,
file: Union[Path, str, IOBase, mmap, ByteString],
hash_function: Union[HashFuncT, None] = None,
) -> None:
"""load the Bloom Filter from file or bytes"""
if not isinstance(file, (IOBase, mmap, ByteString)):
file = Path(file)
with MMap(file) as filepointer:
self._load(filepointer, hash_function)
else:
offset = self._FOOTER_STRUCT.size
est_els, els_added, fpr, n_hashes, n_bits = self._parse_footer(
self._FOOTER_STRUCT, file[-offset:] # type: ignore
)
self._set_values(est_els, fpr, n_hashes, n_bits, hash_function)
# now read in the bit array!
self._parse_bloom_array(file, self._IMPT_STRUCT.size * self.bloom_length) # type: ignore
self._els_added = els_added
@classmethod
def _parse_footer(cls, stct: Struct, d: ByteString) -> Tuple[int, int, float, int, int]:
"""parse footer returning the data: estimated elements, elements added,
false positive rate, hash function, number hashes, number bits"""
e_elms, e_added, fpr = stct.unpack_from(bytearray(d))
est_elements = e_elms
els_added = e_added
fpr = float(fpr)
fpr, n_hashes, n_bits = cls._get_optimized_params(est_elements, fpr)
return int(est_elements), int(els_added), float(fpr), int(n_hashes), int(n_bits)
def _parse_bloom_array(self, b: ByteString, offset: int) -> None:
"""parse bytes into the bloom array"""
self._bloom = array(self._typecode, bytes(b[:offset]))
def _cnt_number_bits_set(self) -> int:
"""calculate the total number of set bits in the bloom"""
setbits = 0
for i in range(0, self.bloom_length):
setbits += bin(self._bloom[i]).count("1")
return setbits
def _get_element(self, idx: int) -> int:
"""wrappper for getting an element from the Bloom Filter!"""
return self._bloom[idx]
def _verify_bloom_similarity(self, second: SimpleBloomT) -> bool:
"""can the blooms be used in intersection, union, or jaccard index"""
hash_match = self.number_hashes != second.number_hashes
same_bits = self.number_bits != second.number_bits
next_hash = self.hashes("test") != second.hashes("test")
if hash_match or same_bits or next_hash:
return False
return True
class BloomFilterOnDisk(BloomFilter):
"""Simple Bloom Filter implementation directly on disk for use in python;
It can read and write the same format as the c version (https://github.com/barrust/bloom)
Args:
filepath (str): Path to file to load
est_elements (int): The number of estimated elements to be added
false_positive_rate (float): The desired false positive rate
hex_string (str): Hex based representation to be loaded
hash_function (function): Hashing strategy function to use \
`hf(key, number)`
Returns:
BloomFilterOnDisk: A Bloom Filter object
Raises:
NotSupportedError: Loading using a hex string is not supported
Note:
Initialization order of operations:
1) Esimated elements and false positive rate
2) From Hex String
3) Only filepath provided
"""
__slots__ = ["_filepath", "__file_pointer"]
def __init__(
self,
filepath: Union[str, Path],
est_elements: Union[int, None] = None,
false_positive_rate: Union[float, None] = None,
hex_string: Union[str, None] = None,
hash_function: Union[HashFuncT, None] = None,
) -> None:
# set some things up
self._filepath = Path(filepath)
self.__file_pointer = None
self._type = "regular-on-disk"
self._typecode = "B"
self._bits_per_elm = 8.0
self._on_disk = True
if is_hex_string(hex_string):
msg = "Loading from hex_string is currently not supported by the on disk Bloom Filter"
raise NotSupportedError(msg)
if est_elements is not None and false_positive_rate is not None:
fpr, n_hashes, n_bits = self._get_optimized_params(est_elements, false_positive_rate)
self._set_values(est_elements, fpr, n_hashes, n_bits, hash_function)
with open(filepath, "wb") as filepointer:
(array(self._typecode, [0]) * self.bloom_length).tofile(filepointer)
filepointer.write(self._FOOTER_STRUCT.pack(est_elements, 0, false_positive_rate))
filepointer.flush()
self._load(filepath, hash_function)
elif is_valid_file(self._filepath):
self._load(self._filepath.name, hash_function) # need .name for python 3.5
else:
raise InitializationError("Insufecient parameters to set up the On Disk Bloom Filter")
def __del__(self) -> None:
"""handle if user doesn't close the on disk Bloom Filter"""
self.close()
def __bytes__(self) -> bytes:
return bytes(self._bloom)
def close(self) -> None:
"""Clean up the BloomFilterOnDisk object"""
if self.__file_pointer is not None and not self.__file_pointer.closed:
self.__update()
self._bloom.close()
self.__file_pointer.close()
self.__file_pointer = None
def export(self, filename: Union[str, Path]) -> None: # type: ignore
"""Export to disk if a different location
Args:
filename (str): The filename to which the Bloom Filter will be exported
Note:
Only exported if the filename is not the original filename"""
self.__update()
if filename and Path(filename) != self._filepath:
copyfile(self._filepath.name, str(filename))
# otherwise, nothing to do!
def _load(self, filepath: Union[str, Path], hash_function: Union[HashFuncT, None] = None): # type: ignore
"""load the Bloom Filter on disk"""
# read the file, set the optimal params
# mmap everything
with open(filepath, "r+b") as filepointer:
offset = self._FOOTER_STRUCT.size
filepointer.seek(offset * -1, os.SEEK_END)
est_els, _, fpr = self._FOOTER_STRUCT.unpack_from(filepointer.read(offset))
fpr, n_hashes, n_bits = self._get_optimized_params(est_els, fpr)
self._set_values(est_els, fpr, n_hashes, n_bits, hash_function)
# setup a few additional items
self.__file_pointer = open(filepath, "r+b") # type: ignore
self._bloom = mmap(self.__file_pointer.fileno(), 0) # type: ignore
self._on_disk = True
def add_alt(self, hashes: HashResultsT) -> None:
super().add_alt(hashes)
self.__update()
@classmethod
def frombytes(cls, b: ByteString, hash_function: Union[HashFuncT, None] = None) -> "BloomFilterOnDisk":
"""
Raises: NotSupportedError
"""
msg = "Loading from bytes is currently not supported by the on disk Bloom Filter"
raise NotSupportedError(msg)
_EXPECTED_ELM_STRUCT = Struct("Q")
_UPDATE_OFFSET = Struct("Qf")
def _get_element(self, idx: int) -> int:
"""wrappper to use similar functions always!"""
return int(self._IMPT_STRUCT.unpack(bytes([self._bloom[idx]]))[0])
def __update(self):
"""update the on disk Bloom Filter and ensure everything is out to disk"""
self._bloom.flush()
self.__file_pointer.seek(-self._UPDATE_OFFSET.size, os.SEEK_END)
self.__file_pointer.write(self._EXPECTED_ELM_STRUCT.pack(self.elements_added))
self.__file_pointer.flush()
| barrust/pyprobables | probables/blooms/bloom.py | Python | mit | 26,708 |
'''
A displayed box bounding the operand for feedback to user.
'''
'''
Copyright 2010, 2011 Lloyd Konneker
This file is part of Pensool.
Pensool is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
'''
import morph.morph
import config
from decorators import *
# A singleton bounding box is in scheme
class BoundingBox(morph.morph.RectMorph):
'''
A drawable primitive.
Bounding box is feedback only, part of highlighting the focus.
Not a control, does not get events.
Not in the model, does not get printed.
Pointer near bounding box does not open a handle menu.
A handle menu does slide along the bounding box,
but a handle menu only opens on a component of a composite that a bounding box represents.
Is in the scheme.
'''
def __init__(self):
super(BoundingBox, self).__init__()
self.style.color = (0, 40, 40) # greenish blue
self.activated = False
@view_altering
@dump_event
def activate(self, direction, rect=None):
'''
Activate: make visible at given rect in DCS.
Does not receive events.
Rect is given for direction == True.
'''
if direction:
# Special case: if rect is zero, do nothing.
# It wouldn't be visible and it gives assertion errors later.
# This happens if the model is empty.
if rect.width == 0 and rect.height == 0:
return
# Set transform to make the DCS rect bounding box passed in.
# TODO is this the correct call? Supposedly set_dimensions is only for testing.
self.set_dimensions(rect)
# While the bounding box is visible, user cannot change view
# so bounding box need not be a transformed drawable.
config.scheme.transformed_controls.append(self)
self.activated = True
elif self.activated:
# Deactivate
self.activated = False
config.scheme.transformed_controls.remove(self)
| bootchk/pensool | source/gui/boundingbox.py | Python | gpl-3.0 | 2,060 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.