code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
#-*- coding: utf-8 -*-
# Author: Matt Earnshaw <[email protected]>
from __future__ import absolute_import
import os
import sys
import sunpy
from PyQt4.QtGui import QApplication
from sunpy.gui.mainwindow import MainWindow
from sunpy.io.file_tools import UnrecognizedFileTypeError
class Plotman(object):
""" Wraps a MainWindow so PlotMan instances can be created via the CLI.
Examples
--------
from sunpy.gui import Plotman
plots = Plotman("data/examples")
plots.show()
"""
def __init__(self, *paths):
""" *paths: directories containing FITS paths
or FITS paths to be opened in PlotMan """
self.app = QApplication(sys.argv)
self.main = MainWindow()
self.open_files(paths)
def open_files(self, inputs):
VALID_EXTENSIONS = [".jp2", ".fits", ".fts"]
to_open = []
# Determine files to process
for input_ in inputs:
if os.path.isfile(input_):
to_open.append(input_)
elif os.path.isdir(input_):
for file_ in os.listdir(input_):
to_open.append(file_)
else:
raise IOError("Path " + input_ + " does not exist.")
# Load files
for filepath in to_open:
name, ext = os.path.splitext(filepath) #pylint: disable=W0612
if ext.lower() in VALID_EXTENSIONS:
try:
self.main.add_tab(filepath, os.path.basename(filepath))
except UnrecognizedFileTypeError:
pass
def show(self):
self.main.show()
self.app.exec_()
if __name__=="__main__":
from sunpy.gui import Plotman
plots = Plotman(sunpy.AIA_171_IMAGE)
plots.show()
| mjm159/sunpy | sunpy/gui/__init__.py | Python | bsd-2-clause | 1,821 |
import os
from ..cache import set_cache, get_cache
from ..show_error import show_error
from .vcs_upgrader import VcsUpgrader
class HgUpgrader(VcsUpgrader):
"""
Allows upgrading a local mercurial-repository-based package
"""
cli_name = 'hg'
def retrieve_binary(self):
"""
Returns the path to the hg executable
:return: The string path to the executable or False on error
"""
name = 'hg'
if os.name == 'nt':
name += '.exe'
binary = self.find_binary(name)
if not binary:
show_error(
u'''
Unable to find %s.
Please set the "hg_binary" setting by accessing the
Preferences > Package Settings > Package Control > Settings
\u2013 User menu entry.
The Settings \u2013 Default entry can be used for reference,
but changes to that will be overwritten upon next upgrade.
''',
name
)
return False
return binary
def run(self):
"""
Updates the repository with remote changes
:return: False or error, or True on success
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary]
args.extend(self.update_command)
args.append('default')
self.execute(args, self.working_copy, meaningful_output=True)
return True
def incoming(self):
""":return: bool if remote revisions are available"""
cache_key = self.working_copy + '.incoming'
incoming = get_cache(cache_key)
if incoming is not None:
return incoming
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'in', '-q', 'default']
output = self.execute(args, self.working_copy, meaningful_output=True)
if output is False:
return False
incoming = len(output) > 0
set_cache(cache_key, incoming, self.cache_length)
return incoming
def latest_commit(self):
"""
:return:
The latest commit hash
"""
binary = self.retrieve_binary()
if not binary:
return False
args = [binary, 'id', '-i']
output = self.execute(args, self.working_copy)
if output is False:
return False
return output.strip()
| herove/dotfiles | sublime/Packages/Package Control/package_control/upgraders/hg_upgrader.py | Python | mit | 2,518 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ImportAgent
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_generated_dialogflow_v2beta1_Agents_ImportAgent_sync]
from google.cloud import dialogflow_v2beta1
def sample_import_agent():
# Create a client
client = dialogflow_v2beta1.AgentsClient()
# Initialize request argument(s)
request = dialogflow_v2beta1.ImportAgentRequest(
agent_uri="agent_uri_value",
parent="parent_value",
)
# Make the request
operation = client.import_agent(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END dialogflow_generated_dialogflow_v2beta1_Agents_ImportAgent_sync]
| googleapis/python-dialogflow | samples/generated_samples/dialogflow_generated_dialogflow_v2beta1_agents_import_agent_sync.py | Python | apache-2.0 | 1,600 |
# -*- coding: utf-8 -*-
class SSH(object):
""" This class provide ssh-related methods to Bitbucket objects."""
def __init__(self, bitbucket):
self.bitbucket = bitbucket
self.bitbucket.URLS.update(self.URLS)
def all(self):
""" Get all ssh keys associated with your account.
"""
url = self.bitbucket.url('GET_SSH_KEYS')
return self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)
def get(self, key_id=None):
""" Get one of the ssh keys associated with your account.
"""
url = self.bitbucket.url('GET_SSH_KEY', key_id=key_id)
return self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)
def create(self, key=None, label=None):
""" Associate an ssh key with your account and return it.
"""
key = '%s' % key
url = self.bitbucket.url('SET_SSH_KEY')
return self.bitbucket.dispatch('POST', url, auth=self.bitbucket.auth, key=key, label=label)
def delete(self, key_id=None):
""" Delete one of the ssh keys associated with your account.
Please use with caution as there is NO confimation and NO undo.
"""
url = self.bitbucket.url('DELETE_SSH_KEY', key_id=key_id)
return self.bitbucket.dispatch('DELETE', url, auth=self.bitbucket.auth)
URLS = {
# SSH keys
'GET_SSH_KEYS': 'ssh-keys/',
'GET_SSH_KEY': 'ssh-keys/%(key_id)s',
'SET_SSH_KEY': 'ssh-keys/',
'DELETE_SSH_KEY': 'ssh-keys/%(key_id)s',
}
| affinitic/BitBucket-api | bitbucket/ssh.py | Python | isc | 1,544 |
# fly ArduPlane in SIL
import util, pexpect, sys, time, math, shutil, os
from common import *
from pymavlink import mavutil
import random
# get location of scripts
testdir=os.path.dirname(os.path.realpath(__file__))
HOME_LOCATION='-35.362938,149.165085,585,354'
WIND="0,180,0.2" # speed,direction,variance
homeloc = None
def takeoff(mavproxy, mav):
'''takeoff get to 30m altitude'''
# wait for EKF to settle
wait_seconds(mav, 15)
mavproxy.send('arm throttle\n')
mavproxy.expect('ARMED')
mavproxy.send('switch 4\n')
wait_mode(mav, 'FBWA')
# some rudder to counteract the prop torque
mavproxy.send('rc 4 1700\n')
# some up elevator to keep the tail down
mavproxy.send('rc 2 1200\n')
# get it moving a bit first
mavproxy.send('rc 3 1300\n')
mav.recv_match(condition='VFR_HUD.groundspeed>6', blocking=True)
# a bit faster again, straighten rudder
mavproxy.send('rc 3 1600\n')
mavproxy.send('rc 4 1500\n')
mav.recv_match(condition='VFR_HUD.groundspeed>12', blocking=True)
# hit the gas harder now, and give it some more elevator
mavproxy.send('rc 2 1100\n')
mavproxy.send('rc 3 2000\n')
# gain a bit of altitude
if not wait_altitude(mav, homeloc.alt+150, homeloc.alt+180, timeout=30):
return False
# level off
mavproxy.send('rc 2 1500\n')
print("TAKEOFF COMPLETE")
return True
def fly_left_circuit(mavproxy, mav):
'''fly a left circuit, 200m on a side'''
mavproxy.send('switch 4\n')
wait_mode(mav, 'FBWA')
mavproxy.send('rc 3 2000\n')
if not wait_level_flight(mavproxy, mav):
return False
print("Flying left circuit")
# do 4 turns
for i in range(0,4):
# hard left
print("Starting turn %u" % i)
mavproxy.send('rc 1 1000\n')
if not wait_heading(mav, 270 - (90*i), accuracy=10):
return False
mavproxy.send('rc 1 1500\n')
print("Starting leg %u" % i)
if not wait_distance(mav, 100, accuracy=20):
return False
print("Circuit complete")
return True
def fly_RTL(mavproxy, mav):
'''fly to home'''
print("Flying home in RTL")
mavproxy.send('switch 2\n')
wait_mode(mav, 'RTL')
if not wait_location(mav, homeloc, accuracy=120,
target_altitude=homeloc.alt+100, height_accuracy=20,
timeout=180):
return False
print("RTL Complete")
return True
def fly_LOITER(mavproxy, mav, num_circles=4):
'''loiter where we are'''
print("Testing LOITER for %u turns" % num_circles)
mavproxy.send('loiter\n')
wait_mode(mav, 'LOITER')
m = mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
print("Initial altitude %u\n" % initial_alt)
while num_circles > 0:
if not wait_heading(mav, 0, accuracy=10, timeout=60):
return False
if not wait_heading(mav, 180, accuracy=10, timeout=60):
return False
num_circles -= 1
print("Loiter %u circles left" % num_circles)
m = mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
print("Final altitude %u initial %u\n" % (final_alt, initial_alt))
mavproxy.send('mode FBWA\n')
wait_mode(mav, 'FBWA')
if abs(final_alt - initial_alt) > 20:
print("Failed to maintain altitude")
return False
print("Completed Loiter OK")
return True
def fly_CIRCLE(mavproxy, mav, num_circles=1):
'''circle where we are'''
print("Testing CIRCLE for %u turns" % num_circles)
mavproxy.send('mode CIRCLE\n')
wait_mode(mav, 'CIRCLE')
m = mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
print("Initial altitude %u\n" % initial_alt)
while num_circles > 0:
if not wait_heading(mav, 0, accuracy=10, timeout=60):
return False
if not wait_heading(mav, 180, accuracy=10, timeout=60):
return False
num_circles -= 1
print("CIRCLE %u circles left" % num_circles)
m = mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
print("Final altitude %u initial %u\n" % (final_alt, initial_alt))
mavproxy.send('mode FBWA\n')
wait_mode(mav, 'FBWA')
if abs(final_alt - initial_alt) > 20:
print("Failed to maintain altitude")
return False
print("Completed CIRCLE OK")
return True
def wait_level_flight(mavproxy, mav, accuracy=5, timeout=30):
'''wait for level flight'''
tstart = get_sim_time(mav)
print("Waiting for level flight")
mavproxy.send('rc 1 1500\n')
mavproxy.send('rc 2 1500\n')
mavproxy.send('rc 4 1500\n')
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='ATTITUDE', blocking=True)
roll = math.degrees(m.roll)
pitch = math.degrees(m.pitch)
print("Roll=%.1f Pitch=%.1f" % (roll, pitch))
if math.fabs(roll) <= accuracy and math.fabs(pitch) <= accuracy:
print("Attained level flight")
return True
print("Failed to attain level flight")
return False
def change_altitude(mavproxy, mav, altitude, accuracy=30):
'''get to a given altitude'''
mavproxy.send('mode FBWA\n')
wait_mode(mav, 'FBWA')
alt_error = mav.messages['VFR_HUD'].alt - altitude
if alt_error > 0:
mavproxy.send('rc 2 2000\n')
else:
mavproxy.send('rc 2 1000\n')
if not wait_altitude(mav, altitude-accuracy/2, altitude+accuracy/2):
return False
mavproxy.send('rc 2 1500\n')
print("Reached target altitude at %u" % mav.messages['VFR_HUD'].alt)
return wait_level_flight(mavproxy, mav)
def axial_left_roll(mavproxy, mav, count=1):
'''fly a left axial roll'''
# full throttle!
mavproxy.send('rc 3 2000\n')
if not change_altitude(mavproxy, mav, homeloc.alt+300):
return False
# fly the roll in manual
mavproxy.send('switch 6\n')
wait_mode(mav, 'MANUAL')
while count > 0:
print("Starting roll")
mavproxy.send('rc 1 1000\n')
if not wait_roll(mav, -150, accuracy=90):
mavproxy.send('rc 1 1500\n')
return False
if not wait_roll(mav, 150, accuracy=90):
mavproxy.send('rc 1 1500\n')
return False
if not wait_roll(mav, 0, accuracy=90):
mavproxy.send('rc 1 1500\n')
return False
count -= 1
# back to FBWA
mavproxy.send('rc 1 1500\n')
mavproxy.send('switch 4\n')
wait_mode(mav, 'FBWA')
mavproxy.send('rc 3 1700\n')
return wait_level_flight(mavproxy, mav)
def inside_loop(mavproxy, mav, count=1):
'''fly a inside loop'''
# full throttle!
mavproxy.send('rc 3 2000\n')
if not change_altitude(mavproxy, mav, homeloc.alt+300):
return False
# fly the loop in manual
mavproxy.send('switch 6\n')
wait_mode(mav, 'MANUAL')
while count > 0:
print("Starting loop")
mavproxy.send('rc 2 1000\n')
if not wait_pitch(mav, -60, accuracy=20):
return False
if not wait_pitch(mav, 0, accuracy=20):
return False
count -= 1
# back to FBWA
mavproxy.send('rc 2 1500\n')
mavproxy.send('switch 4\n')
wait_mode(mav, 'FBWA')
mavproxy.send('rc 3 1700\n')
return wait_level_flight(mavproxy, mav)
def test_stabilize(mavproxy, mav, count=1):
'''fly stabilize mode'''
# full throttle!
mavproxy.send('rc 3 2000\n')
mavproxy.send('rc 2 1300\n')
if not change_altitude(mavproxy, mav, homeloc.alt+300):
return False
mavproxy.send('rc 2 1500\n')
mavproxy.send("mode STABILIZE\n")
wait_mode(mav, 'STABILIZE')
count = 1
while count > 0:
print("Starting roll")
mavproxy.send('rc 1 2000\n')
if not wait_roll(mav, -150, accuracy=90):
return False
if not wait_roll(mav, 150, accuracy=90):
return False
if not wait_roll(mav, 0, accuracy=90):
return False
count -= 1
mavproxy.send('rc 1 1500\n')
if not wait_roll(mav, 0, accuracy=5):
return False
# back to FBWA
mavproxy.send('mode FBWA\n')
wait_mode(mav, 'FBWA')
mavproxy.send('rc 3 1700\n')
return wait_level_flight(mavproxy, mav)
def test_acro(mavproxy, mav, count=1):
'''fly ACRO mode'''
# full throttle!
mavproxy.send('rc 3 2000\n')
mavproxy.send('rc 2 1300\n')
if not change_altitude(mavproxy, mav, homeloc.alt+300):
return False
mavproxy.send('rc 2 1500\n')
mavproxy.send("mode ACRO\n")
wait_mode(mav, 'ACRO')
count = 1
while count > 0:
print("Starting roll")
mavproxy.send('rc 1 1000\n')
if not wait_roll(mav, -150, accuracy=90):
return False
if not wait_roll(mav, 150, accuracy=90):
return False
if not wait_roll(mav, 0, accuracy=90):
return False
count -= 1
mavproxy.send('rc 1 1500\n')
# back to FBWA
mavproxy.send('mode FBWA\n')
wait_mode(mav, 'FBWA')
wait_level_flight(mavproxy, mav)
mavproxy.send("mode ACRO\n")
wait_mode(mav, 'ACRO')
count = 2
while count > 0:
print("Starting loop")
mavproxy.send('rc 2 1000\n')
if not wait_pitch(mav, -60, accuracy=20):
return False
if not wait_pitch(mav, 0, accuracy=20):
return False
count -= 1
mavproxy.send('rc 2 1500\n')
# back to FBWA
mavproxy.send('mode FBWA\n')
wait_mode(mav, 'FBWA')
mavproxy.send('rc 3 1700\n')
return wait_level_flight(mavproxy, mav)
def test_FBWB(mavproxy, mav, count=1, mode='FBWB'):
'''fly FBWB or CRUISE mode'''
mavproxy.send("mode %s\n" % mode)
wait_mode(mav, mode)
mavproxy.send('rc 3 1700\n')
mavproxy.send('rc 2 1500\n')
# lock in the altitude by asking for an altitude change then releasing
mavproxy.send('rc 2 1000\n')
wait_distance(mav, 50, accuracy=20)
mavproxy.send('rc 2 1500\n')
wait_distance(mav, 50, accuracy=20)
m = mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
print("Initial altitude %u\n" % initial_alt)
print("Flying right circuit")
# do 4 turns
for i in range(0,4):
# hard left
print("Starting turn %u" % i)
mavproxy.send('rc 1 1800\n')
if not wait_heading(mav, 0 + (90*i), accuracy=20, timeout=60):
mavproxy.send('rc 1 1500\n')
return False
mavproxy.send('rc 1 1500\n')
print("Starting leg %u" % i)
if not wait_distance(mav, 100, accuracy=20):
return False
print("Circuit complete")
print("Flying rudder left circuit")
# do 4 turns
for i in range(0,4):
# hard left
print("Starting turn %u" % i)
mavproxy.send('rc 4 1900\n')
if not wait_heading(mav, 360 - (90*i), accuracy=20, timeout=60):
mavproxy.send('rc 4 1500\n')
return False
mavproxy.send('rc 4 1500\n')
print("Starting leg %u" % i)
if not wait_distance(mav, 100, accuracy=20):
return False
print("Circuit complete")
m = mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
print("Final altitude %u initial %u\n" % (final_alt, initial_alt))
# back to FBWA
mavproxy.send('mode FBWA\n')
wait_mode(mav, 'FBWA')
if abs(final_alt - initial_alt) > 20:
print("Failed to maintain altitude")
return False
return wait_level_flight(mavproxy, mav)
def setup_rc(mavproxy):
'''setup RC override control'''
for chan in [1,2,4,5,6,7]:
mavproxy.send('rc %u 1500\n' % chan)
mavproxy.send('rc 3 1000\n')
mavproxy.send('rc 8 1800\n')
def fly_mission(mavproxy, mav, filename, height_accuracy=-1, target_altitude=None):
'''fly a mission from a file'''
global homeloc
print("Flying mission %s" % filename)
mavproxy.send('wp load %s\n' % filename)
mavproxy.expect('flight plan received')
mavproxy.send('wp list\n')
mavproxy.expect('Requesting [0-9]+ waypoints')
mavproxy.send('switch 1\n') # auto mode
wait_mode(mav, 'AUTO')
if not wait_waypoint(mav, 1, 7, max_dist=60):
return False
if not wait_groundspeed(mav, 0, 0.5, timeout=60):
return False
print("Mission OK")
return True
def fly_ArduPlane(viewerip=None, map=False):
'''fly ArduPlane in SIL
you can pass viewerip as an IP address to optionally send fg and
mavproxy packets too for local viewing of the flight in real time
'''
global homeloc
options = '--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --streamrate=10'
if viewerip:
options += " --out=%s:14550" % viewerip
if map:
options += ' --map'
cmd = util.reltopdir("Tools/autotest/jsb_sim/runsim.py")
cmd += " --speedup=100 --home=%s --wind=%s" % (HOME_LOCATION, WIND)
if viewerip:
cmd += " --fgout=%s:5503" % viewerip
runsim = pexpect.spawn(cmd, timeout=10)
runsim.delaybeforesend = 0
util.pexpect_autoclose(runsim)
runsim.expect('Simulator ready to fly')
sil = util.start_SIL('ArduPlane', wipe=True)
print("Starting MAVProxy")
mavproxy = util.start_MAVProxy_SIL('ArduPlane', options=options)
util.expect_setup_callback(mavproxy, expect_callback)
mavproxy.expect('Logging to (\S+)')
mavproxy.expect('Received [0-9]+ parameters',timeout=3000)
# setup test parameters
mavproxy.send("param load %s/ArduPlane.parm\n" % testdir)
mavproxy.expect('Loaded [0-9]+ parameters')
mavproxy.send("param fetch\n")
# restart with new parms
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
util.pexpect_close(runsim)
runsim = pexpect.spawn(cmd, logfile=sys.stdout, timeout=10)
runsim.delaybeforesend = 0
util.pexpect_autoclose(runsim)
runsim.expect('Simulator ready to fly')
sil = util.start_SIL('ArduPlane')
mavproxy = util.start_MAVProxy_SIL('ArduPlane', options=options)
mavproxy.expect('Logging to (\S+)')
logfile = mavproxy.match.group(1)
print("LOGFILE %s" % logfile)
buildlog = util.reltopdir("../buildlogs/ArduPlane-test.tlog")
print("buildlog=%s" % buildlog)
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
pass
util.expect_setup_callback(mavproxy, expect_callback)
mavproxy.expect('Received [0-9]+ parameters')
expect_list_clear()
expect_list_extend([runsim, sil, mavproxy])
print("Started simulator")
# get a mavlink connection going
try:
mav = mavutil.mavlink_connection('127.0.0.1:19550', robust_parsing=True)
except Exception, msg:
print("Failed to start mavlink connection on 127.0.0.1:19550" % msg)
raise
mav.message_hooks.append(message_hook)
mav.idle_hooks.append(idle_hook)
failed = False
e = 'None'
try:
print("Waiting for a heartbeat with mavlink protocol %s" % mav.WIRE_PROTOCOL_VERSION)
mav.wait_heartbeat()
print("Setting up RC parameters")
setup_rc(mavproxy)
print("Waiting for GPS fix")
mav.recv_match(condition='VFR_HUD.alt>10', blocking=True)
mav.wait_gps_fix()
while mav.location().alt < 10:
mav.wait_gps_fix()
homeloc = mav.location()
print("Home location: %s" % homeloc)
if not takeoff(mavproxy, mav):
print("Failed takeoff")
failed = True
if not fly_left_circuit(mavproxy, mav):
print("Failed left circuit")
failed = True
if not axial_left_roll(mavproxy, mav, 1):
print("Failed left roll")
failed = True
if not inside_loop(mavproxy, mav):
print("Failed inside loop")
failed = True
if not test_stabilize(mavproxy, mav):
print("Failed stabilize test")
failed = True
if not test_acro(mavproxy, mav):
print("Failed ACRO test")
failed = True
if not test_FBWB(mavproxy, mav):
print("Failed FBWB test")
failed = True
if not test_FBWB(mavproxy, mav, mode='CRUISE'):
print("Failed CRUISE test")
failed = True
if not fly_RTL(mavproxy, mav):
print("Failed RTL")
failed = True
if not fly_LOITER(mavproxy, mav):
print("Failed LOITER")
failed = True
if not fly_CIRCLE(mavproxy, mav):
print("Failed CIRCLE")
failed = True
if not fly_mission(mavproxy, mav, os.path.join(testdir, "ap1.txt"), height_accuracy = 10,
target_altitude=homeloc.alt+100):
print("Failed mission")
failed = True
if not log_download(mavproxy, mav, util.reltopdir("../buildlogs/ArduPlane-log.bin")):
print("Failed log download")
failed = True
except pexpect.TIMEOUT, e:
print("Failed with timeout")
failed = True
mav.close()
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
util.pexpect_close(runsim)
if os.path.exists('ArduPlane-valgrind.log'):
os.chmod('ArduPlane-valgrind.log', 0644)
shutil.copy("ArduPlane-valgrind.log", util.reltopdir("../buildlogs/ArduPlane-valgrind.log"))
if failed:
print("FAILED: %s" % e)
return False
return True
| Yndal/ArduPilot-SensorPlatform | ardupilot/Tools/autotest/arduplane.py | Python | mit | 17,552 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ursa_rest_sqlserver.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| LowerSilesians/ursa-rest-sqlserver | ursa_rest_sqlserver/manage.py | Python | apache-2.0 | 817 |
import imaplib
import getpass
IMAP_SERVER_ADDR = '127.0.0.1'
IMAP_SERVER_PORT = 143
def show_emails(server, port, username, password):
mailbox = imaplib.IMAP4(server, port)
resp, msg = mailbox.login(username, password)
if resp != 'OK':
print 'Auth error: %s' % msg[0]
exit(1)
resp, msg = mailbox.select('Inbox')
if resp != 'OK':
print 'Select mailbox error: %s' % msg[0]
exit(1)
resp, data = mailbox.search(None, 'ALL')
for num in data[0].split():
resp, msg = mailbox.fetch(num, '(RFC822)')
resp, text = mailbox.fetch(num, '(BODY[TEXT])')
print 'Message no. %s\n' % (num,)
print 'Message body:'
print text[0][1]
print 'Whole message:'
print msg[0][1]
print '---------------'
mailbox.close()
mailbox.logout()
if __name__ == '__main__':
username = raw_input("Login: ")
password = getpass.getpass(prompt="Password:")
show_emails(IMAP_SERVER_ADDR, IMAP_SERVER_PORT, username, password) | damianrusinek/classes-pas | email/get_emails.py | Python | mit | 965 |
from app import db
class Carrera(db.Model):
__tablename__ = 'carrera'
id = db.Column(db.Integer, primary_key=True)
codigo = db.Column(db.String(4), nullable=False, unique=True, server_default='')
nombre = db.Column(db.String(50), nullable=False, server_default='')
plan = db.Column(db.String(4), nullable=False, server_default='')
duracion_estimada_en_cuatrimestres = db.Column(db.Integer, nullable=False)
requiere_prueba_suficiencia_de_idioma = db.Column('requiere_prueba_suficiencia_de_idioma', db.Boolean(), nullable=False)
creditos = db.relationship('Creditos', backref='carrera', lazy='dynamic')
materias = db.relationship('Materia', backref='carrera', lazy='dynamic')
orientaciones = db.relationship('Orientacion', lazy='dynamic')
def __str__(self):
return "{} - {}".format(self.codigo, self.nombre)
def get_descripcion_carrera(self):
if not self.plan:
return "{} - {}".format(self.codigo, self.nombre)
return "{} - {} (Plan {})".format(self.codigo, self.nombre, self.plan)
class Creditos(db.Model):
__tablename__ = 'creditos'
id = db.Column(db.Integer, primary_key=True)
creditos_obligatorias = db.Column(db.Integer, nullable=False)
creditos_electivas_general = db.Column(db.Integer, nullable=False)
creditos_orientacion = db.Column(db.Integer, nullable=False)
creditos_electivas_con_tp = db.Column(db.Integer, nullable=False)
creditos_electivas_con_tesis = db.Column(db.Integer, nullable=False)
creditos_tesis = db.Column(db.Integer, nullable=False)
creditos_tp_profesional = db.Column(db.Integer, nullable=False)
carrera_id = db.Column(db.Integer, db.ForeignKey('carrera.id'))
class TipoMateria(db.Model):
__tablename__ = 'tipo_materia'
id = db.Column(db.Integer, primary_key=True)
descripcion = db.Column(db.String(50), nullable=False, server_default='')
class Materia(db.Model):
__tablename__ = 'materia'
id = db.Column(db.Integer, primary_key=True)
codigo = db.Column(db.String(4), nullable=False, server_default='')
nombre = db.Column(db.String(80), nullable=False, server_default='')
objetivos = db.Column(db.String(250), nullable=True, server_default='')
creditos_minimos_para_cursarla = db.Column(db.Integer, nullable=False)
creditos = db.Column(db.Integer, nullable=False)
tipo_materia_id = db.Column(db.Integer, db.ForeignKey('tipo_materia.id'))
carrera_id = db.Column(db.Integer, db.ForeignKey('carrera.id'))
def __str__(self):
return "{} - {}".format(self.codigo, self.nombre)
class Correlativas(db.Model):
"""
Si la materia C tiene como correlativas a A y B,
significa que A y B deben hacerse antes que C
"""
__tablename__ = 'correlativas'
id = db.Column(db.Integer, primary_key=True)
materia_id = db.Column(db.Integer, db.ForeignKey('materia.id'))
materia_correlativa_id = db.Column(db.Integer, db.ForeignKey('materia.id'))
def __str__(self):
return "La materia {} tiene como correlativa a {}".format(self.materia_id, self.materia_correlativa_id)
class MateriasIncompatibles(db.Model):
__tablename__ = 'materias_incompatibles'
id = db.Column(db.Integer, primary_key=True)
materia_id = db.Column(db.Integer, db.ForeignKey('materia.id'))
materia_incompatible_id = db.Column(db.Integer, db.ForeignKey('materia.id'))
class Orientacion(db.Model):
__tablename__ = 'orientacion'
id = db.Column(db.Integer, primary_key=True)
descripcion = db.Column(db.String(125), nullable=False, server_default='')
clave_reducida = db.Column(db.String(50), nullable=False, server_default='')
carrera_id = db.Column(db.Integer, db.ForeignKey('carrera.id'))
| jennywoites/MUSSA | MUSSA_Flask/app/models/carreras_models.py | Python | gpl-3.0 | 3,752 |
# python3
# ==============================================================================
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Cloud function to create and update entities in Dialogflow.
This module is an example how to create and update entities for Dialogflow.
"""
import dialogflow_v2
import flask
import os
from typing import Dict, List
def entities_builder(request: flask.Request):
"""HTTP Cloud Function that create and update entities in Dialogflow.
Args:
request (flask.Request): The request object. More info:
<http://flask.pocoo.org/docs/1.0/api/#flask.Request>
"""
request_json = request.get_json(silent=True)
arguments = Arguments(**request_json)
project_id = arguments.project_id
client = get_dialogflow_client()
parent = get_agent(client, project_id)
if request_json and arguments.entities:
# Create entities one by one.
create_entities_type(client, arguments.entities, parent)
return
elif request_json and arguments.entities_batch:
# Create in batch using entity_type_batch_inline.
arguments.pre_process_entities_batch_name()
client.batch_update_entity_types(
parent=parent, entity_type_batch_inline=arguments.entities_batch)
return
else:
# Create in batch using entity_type_batch_uri.
response = client.batch_update_entity_types(
parent=parent, entity_type_batch_uri=arguments.bucket)
def callback(operation_future):
"""Returns a callback.
This example uses futures for long-running operations returned from Google Cloud APIs.
These futures are used asynchronously using callbacks and Operation.add_done_callback
More info: https://googleapis.dev/python/google-api-core/1.14.3/futures.html
"""
operation_future.result()
response.add_done_callback(callback)
def create_entities_type(client, entities, parent):
"""Creates entities.
Args:
client: dialogflow_v2.EntityTypesClient
entities: list of EntityTypes to create
parent: fully-qualified project_agent string
"""
for entity_type in entities:
client.create_entity_type(parent, entity_type)
def get_dialogflow_client():
"""Returns the dialogflow entity types client."""
return dialogflow_v2.EntityTypesClient()
def get_agent(client: dialogflow_v2.EntityTypesClient, project_id):
"""Returns a fully-qualified project_agent string."""
return client.project_agent_path(project_id)
class Arguments:
"""Returns the arguments pass to the cloud function or default values.
Args:
entities: a list of EntityType
entities_batch: a dict of EntityTypeBatch
project_id: id of a project in GCP
bucket: a URI to a Google Cloud Storage file containing entity types to update or create.
"""
def __init__(self,
entities: List = [],
entities_batch: Dict = {},
project_id: str = '<project-id>',
bucket: str = 'gs://dialog_entities/entities.json'):
"""Initialize the cloud function with the information pass in the call"""
self.project_id = project_id
self.entities = entities
self.entities_batch = entities_batch
self.bucket = bucket
def pre_process_entities_batch_name(self):
"""Returns a fully qualify name of the entities name.
The format is projects/<project-id>/agent/entityTypes/<entity-id>
"""
for entity in self.entities_batch['entity_types']:
if all(x in entity for x in ['name']):
entity['name'] = os.path.join('projects', self.project_id,
'agent/entityTypes',
entity['name'])
| CloudVLab/professional-services | examples/dialogflow-entities-example/main.py | Python | apache-2.0 | 4,374 |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Sanitizer
# Example valid input:
# 3
# 0 1 1
# 1 0 2
# 0 0 1
# (size, then size*size grid)
from sys import exit, stdin
from sol_reference import solveGomoku
if __name__ == '__main__':
# Read grid size
try:
size = int(stdin.readline().strip())
except:
print "Line 1 must be size (an int)."
exit(1)
# Read grid
curLine = 0
wasEmpty = False
grid = []
for l in stdin:
# Stop on empty line
if l.strip():
curLine += 1
elif wasEmpty:
# Check the rest of the file is empty
print "Unexpected extra data at the end of the test case."
exit(1)
else:
wasEmpty = True
continue
# Try to read line
try:
gridLine = map(int, l.split())
except:
print "Couldn't parse grid line #%d: `%s`." % (curLine, l.strip())
exit(1)
# Check size and contents
if len(gridLine) != size:
print "Wrong size for grid line #%d: %d instead of %d." % (curLine, len(gridLine), size)
exit(1)
for (i, p) in enumerate(gridLine):
if p not in [0, 1, 2]:
print "Wrong player for line #%d, pawn #%d: %d (must be 0, 1 or 2)." % (curLine, i+1, p)
exit(1)
grid.append(gridLine)
# Check number of lines
if curLine != size:
print "Wrong number of grid lines: got %d lines, expected %d." % (curLine, size)
exit(1)
# All tests passed, we check for multiple solutions
allAligns = solveGomoku(size, grid, findAll=True)
if len(allAligns) > 1:
print "Found multiple sets of 5 aligned pawns:"
for (row, col, player) in allAligns:
print "winning set from (%d, %d) for player %d" % (row+1, col+1, player)
exit(1)
exit(0)
| France-ioi/taskgrader | examples/taskTestchecker/tests/gen/sanitizer.py | Python | mit | 1,917 |
import pytest
from pluggy import PluginManager, PluginValidationError, HookimplMarker, HookspecMarker
hookspec = HookspecMarker("example")
hookimpl = HookimplMarker("example")
def test_argmismatch(pm: PluginManager) -> None:
class Api:
@hookspec
def hello(self, arg):
"api hook 1"
pm.add_hookspecs(Api)
class Plugin:
@hookimpl
def hello(self, argwrong):
pass
with pytest.raises(PluginValidationError) as exc:
pm.register(Plugin())
assert "argwrong" in str(exc.value)
def test_only_kwargs(pm: PluginManager) -> None:
class Api:
@hookspec
def hello(self, arg):
"api hook 1"
pm.add_hookspecs(Api)
with pytest.raises(TypeError) as exc:
pm.hook.hello(3) # type: ignore[call-arg]
message = "__call__() takes 1 positional argument but 2 were given"
assert message in str(exc.value)
def test_opt_in_args(pm: PluginManager) -> None:
"""Verfiy that two hookimpls with mutex args can serve
under the same spec.
"""
class Api:
@hookspec
def hello(self, arg1, arg2, common_arg):
"api hook 1"
class Plugin1:
@hookimpl
def hello(self, arg1, common_arg):
return arg1 + common_arg
class Plugin2:
@hookimpl
def hello(self, arg2, common_arg):
return arg2 + common_arg
pm.add_hookspecs(Api)
pm.register(Plugin1())
pm.register(Plugin2())
results = pm.hook.hello(arg1=1, arg2=2, common_arg=0)
assert results == [2, 1]
def test_call_order(pm: PluginManager) -> None:
class Api:
@hookspec
def hello(self, arg):
"api hook 1"
pm.add_hookspecs(Api)
class Plugin1:
@hookimpl
def hello(self, arg):
return 1
class Plugin2:
@hookimpl
def hello(self, arg):
return 2
class Plugin3:
@hookimpl
def hello(self, arg):
return 3
class Plugin4:
@hookimpl(hookwrapper=True)
def hello(self, arg):
assert arg == 0
outcome = yield
assert outcome.get_result() == [3, 2, 1]
pm.register(Plugin1())
pm.register(Plugin2())
pm.register(Plugin3())
pm.register(Plugin4()) # hookwrapper should get same list result
res = pm.hook.hello(arg=0)
assert res == [3, 2, 1]
def test_firstresult_definition(pm: PluginManager) -> None:
class Api:
@hookspec(firstresult=True)
def hello(self, arg):
"api hook 1"
pm.add_hookspecs(Api)
class Plugin1:
@hookimpl
def hello(self, arg):
return arg + 1
class Plugin2:
@hookimpl
def hello(self, arg):
return arg - 1
class Plugin3:
@hookimpl
def hello(self, arg):
return None
class Plugin4:
@hookimpl(hookwrapper=True)
def hello(self, arg):
assert arg == 3
outcome = yield
assert outcome.get_result() == 2
pm.register(Plugin1()) # discarded - not the last registered plugin
pm.register(Plugin2()) # used as result
pm.register(Plugin3()) # None result is ignored
pm.register(Plugin4()) # hookwrapper should get same non-list result
res = pm.hook.hello(arg=3)
assert res == 2
def test_firstresult_force_result(pm: PluginManager) -> None:
"""Verify forcing a result in a wrapper."""
class Api:
@hookspec(firstresult=True)
def hello(self, arg):
"api hook 1"
pm.add_hookspecs(Api)
class Plugin1:
@hookimpl
def hello(self, arg):
return arg + 1
class Plugin2:
@hookimpl(hookwrapper=True)
def hello(self, arg):
assert arg == 3
outcome = yield
assert outcome.get_result() == 4
outcome.force_result(0)
class Plugin3:
@hookimpl
def hello(self, arg):
return None
pm.register(Plugin1())
pm.register(Plugin2()) # wrapper
pm.register(Plugin3()) # ignored since returns None
res = pm.hook.hello(arg=3)
assert res == 0 # this result is forced and not a list
def test_firstresult_returns_none(pm: PluginManager) -> None:
"""If None results are returned by underlying implementations ensure
the multi-call loop returns a None value.
"""
class Api:
@hookspec(firstresult=True)
def hello(self, arg):
"api hook 1"
pm.add_hookspecs(Api)
class Plugin1:
@hookimpl
def hello(self, arg):
return None
pm.register(Plugin1())
res = pm.hook.hello(arg=3)
assert res is None
def test_firstresult_no_plugin(pm: PluginManager) -> None:
"""If no implementations/plugins have been registered for a firstresult
hook the multi-call loop should return a None value.
"""
class Api:
@hookspec(firstresult=True)
def hello(self, arg):
"api hook 1"
pm.add_hookspecs(Api)
res = pm.hook.hello(arg=3)
assert res is None
def test_no_hookspec(pm: PluginManager) -> None:
"""A hook with hookimpls can still be called even if no hookspec
was registered for it (and call_pending wasn't called to check
against it).
"""
class Plugin:
@hookimpl
def hello(self, arg):
return "Plugin.hello"
pm.register(Plugin())
assert pm.hook.hello(arg=10, extra=20) == ["Plugin.hello"]
| pytest-dev/pluggy | testing/test_invocations.py | Python | mit | 5,546 |
from . import Cl, conformalize
layout_orig, blades_orig = Cl(3)
layout, blades, stuff = conformalize(layout_orig)
locals().update(blades)
locals().update(stuff)
# for shorter reprs
layout.__name__ = 'layout'
layout.__module__ = __name__
| arsenovic/clifford | clifford/g3c.py | Python | bsd-3-clause | 240 |
# Copyright 2015-19 ForgeFlow S.L. -
# Jordi Ballester Alomar
# Copyright 2015-19 Serpent Consulting Services Pvt. Ltd. - Sudhir Arya
# Copyright 2018-19 ACSONE SA/NV
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import fields, models
class MisReportInstance(models.Model):
_inherit = "mis.report.instance"
operating_unit_ids = fields.Many2many(
"operating.unit",
string="Operating Unit",
)
class MisReportInstancePeriod(models.Model):
_inherit = "mis.report.instance.period"
operating_unit_ids = fields.Many2many(
"operating.unit",
string="Operating Unit",
)
def _get_additional_move_line_filter(self):
aml_domain = super(
MisReportInstancePeriod, self
)._get_additional_move_line_filter()
# we need sudo because, imagine a user having access
# to operating unit A, viewing a report with 3 columns
# for OU A, B, C: in columns B and C, self.operating_unit_ids
# would be empty for him, and the query on a.m.l would be only
# restricted by the record rules (ie showing move lines
# for OU A only). So the report would display values
# for OU A in all 3 columns.
sudoself = self.sudo()
if sudoself.report_instance_id.operating_unit_ids:
aml_domain.append(
(
"operating_unit_id",
"in",
sudoself.report_instance_id.operating_unit_ids.ids,
)
)
if sudoself.operating_unit_ids:
aml_domain.append(
("operating_unit_id", "in", sudoself.operating_unit_ids.ids)
)
return aml_domain
| OCA/operating-unit | mis_builder_operating_unit/model/mis_builder.py | Python | agpl-3.0 | 1,748 |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='graphmap',
version='0.0.6',
description='Images on a quad graph. Create an infinite canvas',
author='Abhishek Rao',
author_email='[email protected]',
url='https://github.com/abhishekraok/GraphMap',
download_url='https://github.com/abhishekraok/GraphMap/archive/v0.0.4.tar.gz',
packages=find_packages(exclude=('tests', 'docs'))
)
| abhishekraok/GraphMap | setup.py | Python | apache-2.0 | 456 |
# A set of regression tests for open issues
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge, Timer, ReadOnly
from cocotb.result import TestFailure
from cocotb.binary import BinaryValue
@cocotb.test()
def issue_142_overflow_error(dut):
"""Tranparently convert ints too long to pass
through the GPI interface natively into BinaryValues"""
cocotb.fork(Clock(dut.clk, 2500).start())
def _compare(value):
if int(dut.stream_in_data_wide.value) != int(value):
raise TestFailure("Expecting 0x%x but got 0x%x on %s" % (
int(value), int(dut.stream_in_data_wide.value),
str(dut.stream_in_data_wide)))
# Wider values are transparently converted to BinaryValues
for value in [0, 0x7FFFFFFF, 0x7FFFFFFFFFFF, BinaryValue(0x7FFFFFFFFFFFFF)]:
dut.stream_in_data_wide <= value
yield RisingEdge(dut.clk)
_compare(value)
dut.stream_in_data_wide = value
yield RisingEdge(dut.clk)
_compare(value)
| mkreider/cocotb2 | tests/test_cases/issue_142/issue_142.py | Python | bsd-3-clause | 1,044 |
# -*- Mode: python; coding: utf-8; tab-width: 8; indent-tabs-mode: t; -*-
#
# Copyright (C) 2006 - Martin Szulecki
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# The Rhythmbox authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and Rhythmbox. This permission is above and beyond the permissions granted
# by the GPL license by which Rhythmbox is covered. If you modify this code
# you may extend this exception to your version of the code, but you are not
# obligated to do so. If you do not wish to do so, delete this exception
# statement from your version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
from gi.repository import RB
class PodcastCoverArtSearch (object):
def __init__ (self):
pass
def search (self, db, entry, is_playing, on_search_completed_callback, *args):
# Check if entry is a podcast for performance
if entry.get_entry_type() != db.entry_type_get_by_name("podcast-post"):
on_search_completed_callback (self, entry, None, *args)
return
# Retrieve corresponding feed for this entry
podcast_location = entry.get_string(RB.RhythmDBPropType.SUBTITLE)
podcast_feed_entry = db.entry_lookup_by_location(podcast_location)
# Check for PROP_IMAGE in feed
image_url = podcast_feed_entry.get_string(RB.RhythmDBPropType.IMAGE)
on_search_completed_callback (self, entry, image_url, *args)
def search_next (self):
return False
def get_result_meta (self, search_results):
return (None, None)
def get_result_pixbuf (self, search_results):
return None
def get_best_match_urls (self, search_results):
# Return image URL
return [search_results]
| wangd/rhythmbox | plugins/artdisplay/PodcastCoverArtSearch.py | Python | gpl-2.0 | 2,278 |
from __future__ import print_function
import os
debug = os.environ.get("DEBUG", False)
input_directory = "."
output_directory = "."
skip_shows = False
assertions = "raise"
show_timings = True
autodump = None
autodump_file = None
autodiff = None
| LouisePaulDelvaux/Til-Liam | src_liam/config.py | Python | gpl-3.0 | 247 |
# $Id: TestAll.py 1047 2009-01-15 14:48:58Z graham $
#
# Unit testing for WebBrick library functions (Functions.py)
# See http://pyunit.sourceforge.net/pyunit.html
#
import sys, unittest, logging, zipfile, re, StringIO, os, logging, cgi
from os.path import normpath, abspath
sys.path.append("..")
sys.path.append("../cgi-bin")
try:
# Running Python 2.5 with simplejson?
import simplejson as json
except ImportError:
import json as json
import DirectoryListingHandler, SubmitDatasetUtils, TestConfig
from MiscLib import TestUtils
logger = logging.getLogger("TestDirectoryListingHandler")
class TestDirectoryListingHandler(unittest.TestCase):
def setUp(self):
return
def tearDown(self):
return
# Tests
# Test that the Dataset handler returned a HTML page back to the client that requested it:
def testDirectoryListingHandlerResponse(self):
outputStr = StringIO.StringIO()
# Invoke dataset submission program, passing faked form submission parameters
DirectoryListingHandler.processDirectoryListingRequest(TestConfig.DirPath, TestConfig.DatasetsBaseDir, outputStr)
#logger.debug("Output String from output stream: "+outputStr.getvalue())
# print "Output String from output stream: "+outputStr.getvalue()
outputStr.seek(0, os.SEEK_SET)
firstLine = outputStr.readline()
self.assertEqual( firstLine, "Content-type: application/JSON\n", "Expected directory list as application/JSON")
# Check retrieving sub-directories
directoryCollection = json.load(outputStr)
logger.debug("Directory Collection = " + repr(directoryCollection))
self.assertEquals(len(directoryCollection), 2, "Expected 2 directories to be returned")
expectdirs = \
[ "DatasetsTopDir/DatasetsEmptySubDir",
"DatasetsTopDir/DatasetsSubDir"
]
for d in expectdirs:
self.failUnless(d in directoryCollection,
"Expected directory %s in result (received %s)"%(d,repr(directoryCollection)))
print repr(directoryCollection)
return
def getTestSuite(select="unit"):
"""
Get test suite
select is one of the following:
"unit" return suite of unit tests only
"component" return suite of unit and component tests
"all" return suite of unit, component and integration tests
"pending" return suite of pending tests
name a single named test to be run
"""
testdict = {
"unit":
[ #"testUnits"
"testDirectoryListingHandlerResponse"
],
"component":
[ #"testComponents"
],
"integration":
[ #"testIntegration"
],
"pending":
[ #"testPending"
]
}
return TestUtils.getTestSuite(TestDirectoryListingHandler, testdict, select=select)
if __name__ == "__main__":
#logging.basicConfig(level=logging.DEBUG)
TestConfig.setDatasetsBaseDir(".")
TestUtils.runTests("TestDirectoryListingHandler.log", getTestSuite, sys.argv)
#runner = unittest.TextTestRunner()
#runner.run(getTestSuite()) | bhavanaananda/DataStage | src/SubmitDatasetHandler/tests/TestDirectoryListingHandler.py | Python | mit | 3,278 |
# -*- coding: utf-8 -*-
# Kuulemma
# Copyright (C) 2014, Fast Monkeys Oy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytest
from flask import url_for
from kuulemma.models import User
from kuulemma.serializers import account_activation_serializer
from tests.factories import UserFactory
@pytest.mark.usefixtures('database')
class ActivateAccountTest(object):
@pytest.fixture
def user(self):
return UserFactory()
@pytest.fixture
def activation_hash(self, user):
return account_activation_serializer.dumps(user.email)
@pytest.fixture
def response(self, client, activation_hash):
return client.get(
url_for(
'auth.activate_account',
activation_hash=activation_hash
),
follow_redirects=True,
)
class TestActivateAccountWithCorrectLink(ActivateAccountTest):
def test_should_return_200(self, response):
assert response.status_code == 200
def test_should_correct_flash_message(self, response):
message = 'Tilisi on aktivoitu'
assert message in response.data.decode('utf8')
def test_should_activate_account(self, user, response):
assert User.query.get(user.id).active
class TestActivateAccountWithAlreadyActivatedUser(ActivateAccountTest):
@pytest.fixture
def user(self):
return UserFactory(active=True)
def test_should_return_200(self, response):
assert response.status_code == 200
def test_should_return_correct_error_flash(self, response):
message = 'Olet jo aktivoinut tilisi.'
assert message in response.data.decode('utf8')
class TestActivateAccountWithWrongHash(ActivateAccountTest):
@pytest.fixture
def activation_hash(self):
return 'random'
def test_should_return_200(self, response):
assert response.status_code == 200
def test_should_return_correct_error_flash(self, response):
message = 'Tarkista osoite'
assert message in response.data.decode('utf8')
| fastmonkeys/kuulemma | tests/views/auth/test_activate_account.py | Python | agpl-3.0 | 2,656 |
from __future__ import division
from libtbx.path import walk_source_tree
from libtbx.str_utils import show_string
from libtbx.utils import Sorry
from libtbx.option_parser import option_parser
from fnmatch import fnmatch
import re
import sys, os
def read_lines_if_possible(file_path):
try: f = open(file_path, "r")
except IOError: return []
return f.read().splitlines()
def run(args, command_name="libtbx.find_files"):
if (len(args) == 0): args = ["--help"]
command_line = (option_parser(
usage="%s [options] pattern ..." % command_name,
description="Recursively finds all files matching patterns,\n"
"excluding CVS and .svn directories and .pyc files.")
.option("-t", "--top",
action="append",
type="string",
metavar="PATH",
help="top-level directory where search starts"
" (default is current working directory)")
.option("-g", "--grep",
action="append",
type="string",
metavar="PATTERN",
help="find regular expression pattern in each file (multiple"
" -g/--grep options can be given)")
.option("-i", "--ignore_case",
action="store_true",
default=False,
help="with -g/--grep: case-insensitive match")
.option("-f", "--file_names_only",
action="store_true",
default=False,
help="with -g/--grep: show file names only, not the matching lines")
.option("-q", "--quote",
action="store_true",
default=False,
help="quote file names")
).process(args=args)
fn_patterns = command_line.args
co = command_line.options
grep_flags = 0
if (co.ignore_case):
grep_flags |= re.IGNORECASE
if (len(fn_patterns) == 0):
fn_patterns = ["*"]
tops = co.top
if (tops is None):
tops = ["."]
for top in tops:
if (not os.path.isdir(top)):
raise Sorry("Not a directory: %s" % show_string(top))
for file_path in walk_source_tree(top=top):
file_name = os.path.basename(file_path)
for fn_pattern in fn_patterns:
if (fnmatch(file_name, fn_pattern)):
if (co.quote): fp = show_string(file_path)
else: fp = file_path
if (co.grep is None):
print fp
else:
is_binary_file = co.file_names_only
for line in read_lines_if_possible(file_path=file_path):
if (not is_binary_file):
is_binary_file = "\0" in line
def line_matches_all_grep_patterns():
for grep_pattern in co.grep:
if (re.search(
pattern=grep_pattern,
string=line,
flags=grep_flags) is None):
return False
return True
if (line_matches_all_grep_patterns()):
if (co.file_names_only):
print fp
break
elif (is_binary_file):
print "%s: match in binary file" % fp
break
else:
print "%s: %s" % (fp, line)
if (__name__ == "__main__"):
run(sys.argv[1:])
| hickerson/bbn | fable/fable_sources/libtbx/command_line/find_files.py | Python | mit | 3,114 |
# -*- coding:utf-8 -*-
import test_core
import demjson
import datetime
test_core.title("登录测试")
f = open("testconfig.json", 'r')
lines = f.read()
f.close()
jsonfiledata = demjson.decode(lines)
if jsonfiledata["url"] == "":
test_core.terr("错误: 'testconfig.json' 配置不完全。")
exit()
uurl = jsonfiledata["url"]+"nyalogin.php"
udataarr = {
'user':"[email protected]",
'password':"testpassword",
'ua':"test"
}
dataarr = test_core.postarray(uurl,udataarr,True)
jsonfiledata["update"] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
jsonfiledata["token"] = dataarr["token"]
lines = demjson.encode(jsonfiledata)
f = open("testconfig.json", 'w')
f.write(lines)
f.close() | cxchope/YashiLogin | tests/test_login.py | Python | mit | 712 |
#!/usr/bin/env python3
'''Test insecure<->secure delegation transitions with NSEC3PARAM changes.'''
import random
from dnstest.test import Test
t = Test()
master = t.server("knot")
zones = t.zone("example.")
t.link(zones, master)
master.dnssec(zones[0]).enable = True
master.dnssec(zones[0]).nsec3 = True
master.dnssec(zones[0]).nsec3_opt_out = True
master.dnssec(zones[0]).nsec3_iters = 1
t.start()
master.zones_wait(zones)
master.dnssec(zones[0]).nsec3_iters = 2
master.gen_confile()
master.reload()
t.sleep(8)
up = master.update(zones)
up.add("b.example.", 3600, "DS", "57855 5 1 B6DCD485719ADCA18E5F3D48A2331627FDD3636B")
up.send()
t.sleep(4)
resp = master.dig("b.example.", "NS", dnssec=True)
resp.check_count(0, rtype="NSEC3", section="authority")
if random.random() < 0.5:
master.dnssec(zones[0]).nsec3_iters = 3
master.gen_confile()
master.reload()
t.sleep(6)
up = master.update(zones)
up.delete("a.example.", "DS")
up.send()
t.sleep(4)
resp = master.dig("a.example.", "NS", dnssec=True)
if resp.count("NSEC3", section="authority") < 1:
resp.check_count(1, rtype="NSEC3", section="authority") # correct is 1 or 2
t.end()
| CZ-NIC/knot | tests-extra/tests/ixfr/insec_sec_deleg/test.py | Python | gpl-3.0 | 1,164 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Isaku Yamahata <yamahata@valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
DEFAULT_ROOT_DEV_NAME = '/dev/sda1'
def properties_root_device_name(properties):
"""get root device name from image meta data.
If it isn't specified, return None.
"""
root_device_name = None
# NOTE(yamahata): see image_service.s3.s3create()
for bdm in properties.get('mappings', []):
if bdm['virtual'] == 'root':
root_device_name = bdm['device']
# NOTE(yamahata): register_image's command line can override
# <machine>.manifest.xml
if 'root_device_name' in properties:
root_device_name = properties['root_device_name']
return root_device_name
_ephemeral = re.compile('^ephemeral(\d|[1-9]\d+)$')
def is_ephemeral(device_name):
return _ephemeral.match(device_name)
def ephemeral_num(ephemeral_name):
assert is_ephemeral(ephemeral_name)
return int(_ephemeral.sub('\\1', ephemeral_name))
def is_swap_or_ephemeral(device_name):
return device_name == 'swap' or is_ephemeral(device_name)
def mappings_prepend_dev(mappings):
"""Prepend '/dev/' to 'device' entry of swap/ephemeral virtual type"""
for m in mappings:
virtual = m['virtual']
if (is_swap_or_ephemeral(virtual) and
(not m['device'].startswith('/'))):
m['device'] = '/dev/' + m['device']
return mappings
_dev = re.compile('^/dev/')
def strip_dev(device_name):
"""remove leading '/dev/'"""
return _dev.sub('', device_name)
| sileht/deb-openstack-nova | nova/block_device.py | Python | apache-2.0 | 2,165 |
# Copyright 2022 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
"""
This class encapsulates the interactions with
the uwnetid subscription resource.
"""
import logging
from uw_uwnetid.password import get_uwnetid_password
from myuw.dao.term import get_comparison_datetime_with_tz
from myuw.dao import get_netid_of_current_user
logger = logging.getLogger(__name__)
def get_password_info(request):
"""
returns uw_netid.models.UwPassword object
for a given uwnetid
"""
if not hasattr(request, "myuw_netid_password"):
request.myuw_netid_password = get_uwnetid_password(
get_netid_of_current_user(request))
return request.myuw_netid_password
def password_prefetch():
def _method(request):
get_password_info(request)
return [_method]
def get_pw_json(request):
"""
return data attributes:
{
"uwnetid":
"netid_status": list of strings
"has_active_med_pw": boolean
"last_change_med": date
"days_after_last_med_pw_change": interger
"expires_med": date
"interval_med": seconds
"med_pw_expired": boolean
"last_change": date
"days_after_last_pw_change": integer
"interval": seconds or null
"minimum_length": integer
"time_stamp": date
"kerb_status": string
}
"""
pw = get_password_info(request)
if pw is None:
return None
now_dt = get_comparison_datetime_with_tz(request)
json_data = pw.json_data()
json_data["days_after_last_pw_change"] =\
get_days_after(pw.last_change, now_dt)
json_data["has_active_med_pw"] = False
if pw.last_change_med:
json_data["has_active_med_pw"] = True
json_data["days_after_last_med_pw_change"] =\
get_days_after(pw.last_change_med, now_dt)
if pw.expires_med < now_dt:
json_data["med_pw_expired"] = True
else:
json_data["med_pw_expired"] = False
json_data["days_before_med_pw_expires"] =\
get_days_before(pw.expires_med, now_dt)
if json_data["days_before_med_pw_expires"] <= 30:
json_data["expires_in_30_days_or_less"] = True
else:
json_data["expires_in_30_days_or_less"] = False
return json_data
def get_days_after(last_change_dt, now_dt):
delta = now_dt - last_change_dt
return delta.days
def get_days_before(expires_dt, now_dt):
delta = expires_dt - now_dt
return delta.days
| uw-it-aca/myuw | myuw/dao/password.py | Python | apache-2.0 | 2,488 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This bot applies to wikisource sites to upload text.
Text is uploaded to pages in Page ns, for a specified Index.
Text to be stored, if the page is not-existing, is preloaded from the file used
to create the Index page, making the upload feature independent from the format
of the file, as long as it is supported by the MW ProofreadPage extension.
As alternative, if '-ocr' option is selected, https://tools.wmflabs.org/phetools
OCR tool will be used to get text.
In this case, also already existing pages with quality value 'Not Proofread'
can be treated. '-force' will override existing page in this case.
The following parameters are supported:
# TODO: update params + handle quality level
-index:... name of the index page
-pages:<start>-<end>,...<start>-<end>,<start>-<end>
Page range to upload;
optional, start=1, end=djvu file number of images.
Page ranges can be specified as:
A-B -> pages A until B
A- -> pages A until number of images
A -> just page A
-B -> pages 1 until B
-showdiff: show difference between curent text and new text when
saving the page
-ocr: use https://tools.wmflabs.org/phetools OCR tool to get text;
default is False, i.e. only not-(yet)-existing pages
in Page ns will be treated and text will be fetched via preload.
-force: overwrite existing pages;
default is False; valid only if '-ocr' is selected.
-summary: custom edit summary.
Use quotes if edit summary contains spaces.
-always don't bother asking to confirm any of the changes.
"""
#
# (C) Pywikibot team, 2016-2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
import itertools
import pywikibot
from pywikibot import i18n
from pywikibot.bot import SingleSiteBot
from pywikibot.proofreadpage import IndexPage, ProofreadPage
class UploadTextBot(SingleSiteBot):
"""
A bot that uploads text-layer to Page:namespace.
Text is fetched via preload as on Wikisource wikis, text can be preloaded
only if a page does not exist, if an Index page is present.
Works only on sites with Proofread Page extension installed.
"""
def __init__(self, generator, **kwargs):
"""
Constructor.
@param generator: page generator
@type generator: generator
"""
self.availableOptions.update({
'showdiff': False,
'force': False,
'ocr': False,
'summary': 'Bot: uploading text'
})
super(UploadTextBot, self).__init__(**kwargs)
self.generator = generator
# TODO: create i18 files
# Get edit summary message if it's empty.
if not self.getOption('summary'):
self.options['summary'] = i18n.twtranslate(
self.site, 'djvutext-creating')
def treat(self, page):
"""Process one ProofreadPage page.
@param page: page to be treated.
@type page: ProofreadPage
@raises: pywikibot.Error
"""
if not isinstance(page, ProofreadPage):
raise pywikibot.Error('Page %s must be a ProofreadPage object.'
% page)
summary = self.getOption('summary')
if page.exists():
old_text = page.text
else:
old_text = ''
if self.getOption('ocr'):
page.body = page.ocr()
if (page.exists() and
not (self.getOption('ocr') and self.getOption('force'))):
pywikibot.output('Page %s already exists, not adding!' % page)
else:
self.userPut(page, old_text, page.text, summary=summary,
show_diff=self.getOption('showdiff'))
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
index = None
pages = '1-'
options = {}
# Parse command line arguments.
local_args = pywikibot.handle_args(args)
for arg in local_args:
arg, sep, value = arg.partition(':')
if arg == '-index':
index = value
elif arg == '-pages':
pages = value
elif arg == '-showdiff':
options['showdiff'] = True
elif arg == '-summary':
options['summary'] = value
elif arg == '-ocr':
options['ocr'] = True
elif arg == '-force':
options['force'] = True
elif arg == '-always':
options['always'] = True
else:
pywikibot.output('Unknown argument %s' % arg)
# index is mandatory.
if not index:
pywikibot.bot.suggest_help(missing_parameters=['-index'])
return False
# '-force' can be used with '-ocr' only.
if 'force' in options and 'ocr' not in options:
pywikibot.error("'-force' can be used with '-ocr' option only.")
return False
site = pywikibot.Site()
if not site.has_extension('ProofreadPage'):
pywikibot.error('Site %s must have ProofreadPage extension.' % site)
return False
index = IndexPage(site, index)
if not index.exists():
pywikibot.error("Page %s doesn't exist." % index)
return False
# Parse pages param.
# Create a list of (start, end) tuples.
pages = pages.split(',')
for interval in range(len(pages)):
start, sep, end = pages[interval].partition('-')
start = 1 if not start else int(start)
if not sep:
end = start
else:
end = int(end) if end else index.num_pages
pages[interval] = (start, end)
# gen yields ProofreadPage objects.
gen_list = []
for start, end in sorted(pages):
gen = index.page_gen(start=start, end=end,
filter_ql=[1], content=False)
gen_list.append(gen)
gen = itertools.chain(*gen_list)
pywikibot.output('\nUploading text to %s\n' % index.title(asLink=True))
bot = UploadTextBot(gen, site=index.site, **options)
bot.run()
if __name__ == '__main__':
try:
main()
except Exception:
pywikibot.error('Fatal error:', exc_info=True)
| hasteur/g13bot_tools_new | scripts/wikisourcetext.py | Python | mit | 6,576 |
#!/usr/bin/env python
# Copyright 2015 Google, Inc.
# Routines for the dual battery charging
# Author: TaiWen Ko
# Date: 2015-1-13
import xpf6020
import bk8500
from datetime import datetime
from time import sleep
import tools.utils as tools
from tools import shell
import twk_utils
import sys
utils = twk_utils.Twk_utils()
from blessings import Terminal
t = Terminal()
batt_num = None
while True:
batt_num = raw_input('How many batteries would you like to charge? [ 1, 2, 3, or 4 ]: ')
message = raw_input('Charging %s battery(s). Is this correct? [y/N]' % batt_num )
if message == 'y' and (batt_num == '1' or batt_num == '2' or batt_num == '3' or batt_num == '4'):
break;
# Make sure to use the correct cables
ps1_path = '/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH028NK1-if00-port0'
ps2_path = '/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_A703U4HD-if00-port0'
pfc1_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_1-if01-port0'
pfc2_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_2-if01-port0'
pfc3_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_3-if01-port0'
pfc4_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_4-if01-port0'
print "Accessing the XPF6020 Power Supply"
charge_v = 50.7 # 50.4 + 0.3 diode drop
charge_i = 7#10
ps1 = xpf6020.Xpf6020(ps1_path)
ps1.reset_ps()
ps2 = xpf6020.Xpf6020(ps2_path)
ps2.reset_ps()
ps1.set_voltage(1, charge_v)
ps1.set_currentlimit(1, charge_i)
ps1.set_voltage(2, charge_v)
ps1.set_currentlimit(2, charge_i)
ps2.set_voltage(1, charge_v)
ps2.set_currentlimit(1, charge_i)
ps2.set_voltage(2, charge_v)
ps2.set_currentlimit(2, charge_i)
def batt_charging(target_volt, target_current, max_temp, min_temp, max_diff, min_diff, monitor_freq, batt_num):
is_it_done_yet = False
ps1.ind_output('1','on')
sleep(1)
if batt_num == '2':
ps1.ind_output('2','on')
sleep(1)
elif batt_num == '3':
ps1.ind_output('2','on')
sleep(1)
ps2.ind_output('1','on')
sleep(1)
elif batt_num == '4':
ps1.ind_output('2','on')
sleep(1)
ps2.ind_output('1','on')
sleep(1)
ps2.ind_output('2','on')
sleep(1)
else:
if batt_num != '1':
raise Exception, 'Unknown command.'
while True:
for i in range(int(batt_num)):
print i
if i == 1:
path = pfc1_path
elif i == 2:
path = pfc2_path
elif i == 3:
path = pfc3_path
elif i == 4:
path = pfc4_path
else:
raise Exception, 'Unknown range.'
tom = shell.Shell(path)
sb = shell.Scoreboard(tom,'battery')
state = check_batt_charge(target_volt, target_current, max_temp, min_temp, max_diff, min_diff, monitor_freq, batt_num)
if i == 1:
ch1_state = state
elif i == 2:
ch2_state = state
elif i == 3:
ch3_state = state
elif i == 4:
ch4_state = state
else:
raise Exception, 'Unknown range.'
tom.close()
if batt_num == '1':
if ch1_state == 1:
break
elif batt_num == '2':
if ch1_state == 1 and ch2_state == 1:
break
elif batt_num == '3':
if ch1_state == 1 and ch2_state == 1 and ch3_state == 1:
break
elif batt_num == '4':
if ch1_state == 1 and ch2_state == 1 and ch3_state == 1 and ch4_state == 1:
break
print "Checking measurement again in another %s seconds" % monitor_freq
sleep(monitor_freq)
def check_batt_temp(max_temp, min_temp, batt_num):
temp1 = str(sb.query('battery0_temperature0'))
current_temp1 = temp1.split("'")[3]
temp2 = str(sb.query('battery0_temperature1'))
current_temp2 = temp2.split("'")[3]
temp3 = str(sb.query('battery0_temperature2'))
current_temp3 = temp3.split("'")[3]
print 'Temperature0 = %sC' % current_temp1
print 'Temperature1 = %sC' % current_temp2
print 'Temperature2 = %sC' % current_temp3
overheat_check(current_temp1, current_temp2, current_temp3, max_temp, min_temp, batt_num)
def overheat_check(t1, t2, t3, max_temp, min_temp, batt_num):
if (float(t1) > float(max_temp)) or (float(t2) > float(max_temp)) or (float(t3) > float(max_temp)):
print ''
print 'ERROR: Battery%s temperature is over the max limit of %sC!!' % (batt_num, max_temp)
print ''
print '***Please let the batteries cool off before restarting the test.***'
print ''
cleanup()
if (float(t1) < float(min_temp)) or (float(t2) < float(min_temp)) or (float(t3) < float(min_temp)):
print ''
print 'ERROR: Battery%s temperature is under the min limit of %sC!!' % (batt_num, min_temp)
print ''
print '***Please let the batteries warm up before restarting the test.***'
print ''
cleanup()
def check_cell_diff(max_diff, min_diff, batt_num):
min_cv = str(sb.query('battery0_min_cell_voltage'))
current_min_cv = min_cv.split("'")[3]
max_cv = str(sb.query('battery0_max_cell_voltage'))
current_max_cv = max_cv.split("'")[3]
print 'Min cell voltage = %s' % current_min_cv
print 'Max cell voltage = %s' % current_max_cv
diff_check(max_cv, min_cv, max_diff, min_diff, batt_num)
def print_status_errors():
sbvolt = str(sb.query('battery0_voltage'))
current_sbvolt = sbvolt.split("'")[3]
sbcurr = str(sb1.query('battery0_current'))
current_sbcurr = sbcurr.split("'")[3]
sbstatus = str(sb.query('battery0_status'))
current_sbstatus = sbstatus.split("'")[3]
sberror = str(sb1.query('battery0_error'))
current_sberror = sberror.split("'")[3]
sbalert = str(sb1.query('battery0_safety_alert'))
current_sbalert = sbalert.split("'")[3]
sbopstatus = str(sb.query('battery0_operation_status'))
current_sbopstatus = sbopstatus.split("'")[3]
print 'Battery Voltage = %s' % current_sbvolt
print 'Battery Current = %s' % curren_sbcurr
print 'Battery Status = %s' % current_sbstatus
print 'Error = %s' % current_sberror
print 'Safety Alert = %s' % current_sbalert
print 'Op Status = %s' % current_sbopstatus
def diff_check(max_reading, min_reading, max_limit, min_limit, batt_num):
if (float(max_reading) > float(max_limit)):
print ''
print 'ERROR: Battery%s cell voltage is over the max limit of %sV!!' % (batt_num, max_limit)
print ''
print '***Enter solution here.***'
print ''
cleanup()
if (float(min_reading) < float(min_limit)):
print ''
print 'ERROR: Battery%s cell voltage is under the min limit of %sV!!' % (batt_num, min_limit)
print ''
print '***Enter solution here.***'
print ''
cleanup()
def cleanup():
ps1.all_output('off')
ps2.all_output('off')
sys.exit()
def check_batt_charge(target_volt, target_current, max_temp, min_temp, max_diff, min_diff, channel):
state = 0
result = t.bold_red('Charging')
if channel == 1:
[volt, curr] = ps1.measure('1')
elif channel == 2:
[volt, curr] = ps1.measure('2')
elif channel == 3:
[volt, curr] = ps2.measure('1')
elif channel == 4:
[volt, curr] = ps2.measure('2')
volt = volt.split("V")[0]
curr = curr.split("A")[0]
if float(volt) >= float(target_volt):
if float(curr) <= float(target_current):
state = 1
result = t.bold_green('Charged')
if channel == 1:
ps1.set_currentlimit(1, 0)
elif channel == 2:
ps1.set_currentlimit(2, 0)
elif channel == 3:
ps2.set_currentlimit(1, 0)
elif channel == 4:
ps2.set_currentlimit(2, 0)
print 'Battery state is %s ' % ch1_result
print 'PS_Reading: Voltage = %sV, Current = %sA' % (volt, curr)
soc = str(sb.query('battery0_soc_percent'))
current_soc = soc.split("'")[3]
print 'BatterySOC is %s %' % current_soc
check_batt_temp(max_temp, min_temp)
check_cell_diff(max_diff, min_diff)
print_status_errors()
target_volt = 50.4
target_current = 0.2
monitor_freq = 60
max_temp = 323
min_temp = 283
max_diff = 4.25
min_diff = 2.5
ts = utils.get_timestamp()
message = '***Battery Charging Started @ %s***' % ts
print message
utils.send_email('Battery Charging', message)
batt_charging(target_volt, target_current, max_temp, min_temp, max_diff, min_diff, monitor_freq, batt_num)
ts = utils.get_timestamp()
message = '***Battery Charging Completed @ %s***' % ts
print message
utils.send_email('Battery Charging', message)
| taiwenko/python | battery/new_batt_charging_rev1.py | Python | mit | 8,336 |
# Copyright 2009-2013 Justin Riley
#
# This file is part of StarCluster.
#
# StarCluster is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# StarCluster is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with StarCluster. If not, see <http://www.gnu.org/licenses/>.
import sys
import time
from threading import Thread
class Spinner(Thread):
#Set the screen position of the spinner (chars from the left).
spin_screen_pos = 1
#Set the current index position in the spinner character list.
char_index_pos = 0
#Set the time between character changes in the spinner.
sleep_time = 1
#Set the spinner type: 0-3
spin_type = 2
def __init__(self, type=spin_type):
Thread.__init__(self)
self.setDaemon(True)
self.stop_spinner = False
self.stopped = False
if type == 0:
self.char = ['O', 'o', '-', 'o', '0']
elif type == 1:
self.char = ['.', 'o', 'O', 'o', '.']
elif type == 2:
self.char = ['|', '/', '-', '\\', '-']
else:
self.char = ['*', '#', '@', '%', '+']
self.len = len(self.char)
def Print(self, crnt):
str, crnt = self.curr(crnt)
sys.stdout.write("\b \b%s" % str)
sys.stdout.flush() # Flush stdout to get output before sleeping!
time.sleep(self.sleep_time)
return crnt
def curr(self, crnt):
"""
Iterator for the character list position
"""
if crnt == 4:
return self.char[4], 0
elif crnt == 0:
return self.char[0], 1
else:
test = crnt
crnt += 1
return self.char[test], crnt
def done(self):
sys.stdout.write("\b \b\n")
def stop(self):
self.stop_spinner = True
while not self.stopped:
time.sleep(0.5) # give time for run to get the message
def run(self):
# the comma keeps print from ending with a newline.
print " " * self.spin_screen_pos,
while True:
if self.stop_spinner:
self.done()
self.stopped = True
return
self.char_index_pos = self.Print(self.char_index_pos)
def test(self, sleep=3.4):
print 'Waiting for process...',
self.start()
time.sleep(sleep)
self.stop()
print 'Process is finished...'
if __name__ == "__main__":
for i in range(0, 10):
s = Spinner()
s.test(sleep=float('3.' + str(i)))
| Courtagen/StarCluster | starcluster/spinner.py | Python | gpl-3.0 | 2,981 |
# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from behave import given, when, then
from toolium.driver_wrapper import DriverWrapper
from web_behave.pageobjects.login import LoginPageObject
@given('the home page is open in {browser}')
def step_impl(context, browser):
if browser == 'browser1':
# Use default driver
context.current_page = {'browser1': LoginPageObject()}
else:
# Create a second driver
second_wrapper = DriverWrapper()
second_wrapper.connect()
context.current_page['browser2'] = LoginPageObject(second_wrapper)
# Open home page
context.current_page[browser].open()
@when('the user logs in with username "{username}" and password "{password}" in {browser}')
def step_impl(context, username, password, browser):
user = {'username': username, 'password': password}
context.current_page[browser] = context.current_page[browser].login(user)
@then('the message "{message}" is shown in {browser}')
def step_impl(context, message, browser):
assert message in context.current_page[browser].message.get_message()
| Telefonica/toolium-examples | web_behave/steps/multiple_drivers.py | Python | apache-2.0 | 1,706 |
from tkinter import *
from tkinter.ttk import *
class NavBar(Menu):
"""
This Widget controls the nav bar options and selection
"""
def __init__(self, master_frame):
super().__init__(master_frame.root)
self.parent = master_frame.root
self.master = master_frame
self.file_menu = None
# Creates the File Tab
self.create_tab("File", self.file_menu, self.get_file_menu_data())
# displays the menus
master_frame.root.config(menu=self)
def create_tab(self, name, menu, options, tearoff=0):
"""
Creates the tab for the Nav Bar and fills it with options
:param name: str - name of the tab
:param menu: property - A reference link between the class and the
menu within the tab
:param options: iterable of two item tuples (label, command)
:param tearoff: int - sets the value for the tearoff
:return: None
"""
menu = Menu(self, tearoff=tearoff)
# Creates the list of options
for label, command in options:
menu.add_command(label=label, command=command)
# Adds this menu to the main Nav bar
self.add_cascade(label=name, menu=menu)
def get_file_menu_data(self):
return(
('New', self.master.new_file),
('Open', self.master.open_file),
('Save', self.master.save_existing_file),
('Save as', self.master.save_new_file),
('Close', self.close),
)
def close(self):
self.parent.quit() | crazcalm/Py3.4_exploration | Tk_practice/Tutorial/SimpleTextEditorTutorial/navBarSection.py | Python | mit | 1,586 |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2011 Lukáš Lalinský
# Copyright (C) 2017-2018 Sambhav Kothari
# Copyright (C) 2018 Vishal Choudhary
# Copyright (C) 2018-2019 Laurent Monin
# Copyright (C) 2018-2020 Philipp Wolfer
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from collections import deque
from functools import partial
import json
from PyQt5 import QtCore
from picard import (
config,
log,
)
from picard.acoustid.json_helpers import parse_recording
from picard.const import FPCALC_NAMES
from picard.const.sys import IS_FROZEN
from picard.util import find_executable
def get_score(node):
try:
return float(node.get('score', 1.0))
except (TypeError, ValueError):
return 1.0
class AcoustIDClient(QtCore.QObject):
def __init__(self, acoustid_api):
super().__init__()
self._queue = deque()
self._running = 0
self._max_processes = 2
self._acoustid_api = acoustid_api
# The second condition is checked because in case of a packaged build of picard
# the temp directory that pyinstaller decompresses picard into changes on every
# launch, thus we need to ignore the existing config values.
if not config.setting["acoustid_fpcalc"] or IS_FROZEN:
fpcalc_path = find_executable(*FPCALC_NAMES)
if fpcalc_path:
config.setting["acoustid_fpcalc"] = fpcalc_path
def init(self):
pass
def done(self):
pass
def _on_lookup_finished(self, next_func, file, document, http, error):
doc = {}
if error:
mparms = {
'error': http.errorString(),
'body': document,
'filename': file.filename,
}
log.error(
"AcoustID: Lookup network error for '%(filename)s': %(error)r, %(body)s" %
mparms)
self.tagger.window.set_statusbar_message(
N_("AcoustID lookup network error for '%(filename)s'!"),
mparms,
echo=None
)
else:
try:
recording_list = doc['recordings'] = []
status = document['status']
if status == 'ok':
results = document.get('results') or []
for result in results:
recordings = result.get('recordings') or []
max_sources = max([r.get('sources', 1) for r in recordings] + [1])
result_score = get_score(result)
for recording in recordings:
parsed_recording = parse_recording(recording)
if parsed_recording is not None:
# Calculate a score based on result score and sources for this
# recording relative to other recordings in this result
score = recording.get('sources', 1) / max_sources * 100
parsed_recording['score'] = score * result_score
parsed_recording['acoustid'] = result['id']
recording_list.append(parsed_recording)
log.debug("AcoustID: Lookup successful for '%s'", file.filename)
else:
mparms = {
'error': document['error']['message'],
'filename': file.filename
}
log.error(
"AcoustID: Lookup error for '%(filename)s': %(error)r" %
mparms)
self.tagger.window.set_statusbar_message(
N_("AcoustID lookup failed for '%(filename)s'!"),
mparms,
echo=None
)
except (AttributeError, KeyError, TypeError) as e:
log.error("AcoustID: Error reading response", exc_info=True)
error = e
next_func(doc, http, error)
def _lookup_fingerprint(self, next_func, filename, result=None, error=None):
try:
file = self.tagger.files[filename]
except KeyError:
# The file has been removed. do nothing
return
mparms = {
'filename': file.filename
}
if not result:
log.debug(
"AcoustID: lookup returned no result for file '%(filename)s'" %
mparms
)
self.tagger.window.set_statusbar_message(
N_("AcoustID lookup returned no result for file '%(filename)s'"),
mparms,
echo=None
)
file.clear_pending()
return
log.debug(
"AcoustID: looking up the fingerprint for file '%(filename)s'" %
mparms
)
self.tagger.window.set_statusbar_message(
N_("Looking up the fingerprint for file '%(filename)s' ..."),
mparms,
echo=None
)
params = dict(meta='recordings releasegroups releases tracks compress sources')
if result[0] == 'fingerprint':
fp_type, fingerprint, length = result
params['fingerprint'] = fingerprint
params['duration'] = str(length)
else:
fp_type, recordingid = result
params['recordingid'] = recordingid
self._acoustid_api.query_acoustid(partial(self._on_lookup_finished, next_func, file), **params)
def _on_fpcalc_finished(self, next_func, file, exit_code, exit_status):
process = self.sender()
finished = process.property('picard_finished')
if finished:
return
process.setProperty('picard_finished', True)
result = None
try:
self._running -= 1
self._run_next_task()
if exit_code == 0 and exit_status == 0:
output = bytes(process.readAllStandardOutput()).decode()
jsondata = json.loads(output)
# Use only integer part of duration, floats are not allowed in lookup
duration = int(jsondata.get('duration'))
fingerprint = jsondata.get('fingerprint')
if fingerprint and duration:
result = 'fingerprint', fingerprint, duration
else:
log.error(
"Fingerprint calculator failed exit code = %r, exit status = %r, error = %s",
exit_code,
exit_status,
process.errorString())
except (json.decoder.JSONDecodeError, UnicodeDecodeError, ValueError):
log.error("Error reading fingerprint calculator output", exc_info=True)
finally:
if result and result[0] == 'fingerprint':
fp_type, fingerprint, length = result
file.set_acoustid_fingerprint(fingerprint, length)
next_func(result)
def _on_fpcalc_error(self, next_func, filename, error):
process = self.sender()
finished = process.property('picard_finished')
if finished:
return
process.setProperty('picard_finished', True)
try:
self._running -= 1
self._run_next_task()
log.error("Fingerprint calculator failed error = %s (%r)", process.errorString(), error)
finally:
next_func(None)
def _run_next_task(self):
try:
file, next_func = self._queue.popleft()
except IndexError:
return
fpcalc = config.setting["acoustid_fpcalc"] or "fpcalc"
self._running += 1
process = QtCore.QProcess(self)
process.setProperty('picard_finished', False)
process.finished.connect(partial(self._on_fpcalc_finished, next_func, file))
process.error.connect(partial(self._on_fpcalc_error, next_func, file))
process.start(fpcalc, ["-json", "-length", "120", file.filename])
log.debug("Starting fingerprint calculator %r %r", fpcalc, file.filename)
def analyze(self, file, next_func):
fpcalc_next = partial(self._lookup_fingerprint, next_func, file.filename)
fingerprint = file.acoustid_fingerprint
if not fingerprint and not config.setting["ignore_existing_acoustid_fingerprints"]:
# use cached fingerprint from file metadata
fingerprints = file.metadata.getall('acoustid_fingerprint')
if fingerprints:
fingerprint = fingerprints[0]
file.set_acoustid_fingerprint(fingerprint)
# If the fingerprint already exists skip calling fpcalc
if fingerprint:
length = file.acoustid_length
fpcalc_next(result=('fingerprint', fingerprint, length))
return
# calculate the fingerprint
self.fingerprint(file, fpcalc_next)
def fingerprint(self, file, next_func):
task = (file, next_func)
self._queue.append(task)
if self._running < self._max_processes:
self._run_next_task()
def stop_analyze(self, file):
new_queue = deque()
for task in self._queue:
if task[0] != file:
new_queue.appendleft(task)
self._queue = new_queue
| Sophist-UK/Sophist_picard | picard/acoustid/__init__.py | Python | gpl-2.0 | 10,124 |
from optparse import OptionParser
import importlib, time
class CodeTester:
def __init__(self, options, args):
self.objects = []
self.options = options
self.module = options.module
self.args = args
self.avgs = []
self.moduleObj = None
self.tests_results = []
def init(self):
self.print_verbose("==== Initialisating CodeTester modules ====")
for arg in self.args:
self.load_module(arg)
def load_module(self, language):
try:
if self.moduleObj == None:
self.print_verbose("Loading module %s" %self.module)
self.moduleObj = importlib.import_module('test_modules.%s' %self.module)
self.objects.append(getattr(self.moduleObj, self.module + "Test")(language))
except Exception, e:
print e
exit()
def print_verbose(self, msg):
if self.options.verbose:
print msg
def execute_tests(self):
start_time = time.time()
self.print_verbose("==== Starting tests ====")
for module in self.objects:
module_results = []
self.print_verbose("Executing module %s for %s langugage..." %(module.get_module_name(), module.language))
self.print_verbose("Initialisating datas...")
module.init()
self.print_verbose("Executing tests...")
test_total_time = 0
test_num = 0
func_call_time = time.time()
while module.has_more_test():
test_name = module.get_test_name()
exec_time = time.time()
ret = module.execute_next_test()
exec_time = time.time() - exec_time
self.print_verbose("Test '%s' executed in %s (python)/%s (test return) seconds." %(test_name, exec_time, ret))
test_num += 1
test_total_time += ret
module_results.append((test_name, exec_time, ret))
if test_num > 0:
self.print_verbose("Tests total execution time: %s seconds (python method)" %(time.time() - func_call_time))
self.print_verbose("Tests total execution time: %s seconds (ret method)" %test_total_time)
average = test_total_time / test_num
self.print_verbose("Average time: %s" %average)
self.print_verbose("==========")
self.avgs.append((module.get_module_name(), average))
self.tests_results.append((module.language, module_results))
self.print_verbose("==== Tests finished in %s seconds ====" %(time.time() - start_time))
def show_results(self):
print "RESULTS:"
test_str = ""
diff = []
for test in self.tests_results[0][1]:
test_str += "\t%s" %test[0]
diff.append([])
print test_str
for module in self.tests_results:
module_str = "%s" %module[0]
i = 0
for test in module[1]:
module_str += "\t%s" %test[1]
diff[i].append((module[0], test[1]))
i += 1
print module_str
diffstr = "Best"
for test in diff:
diffstr += "\t%s" %self.find_best(test)
print diffstr
def find_best(self, tab):
start = 100000
best = "None"
for val in tab:
if val[1] < start:
start = val[1]
best = val[0]
return best
if __name__ == "__main__":
parser = OptionParser(usage="usage: python %prog [options] languages_to_test...", version="%prog 0.0.1")
parser.add_option("--module", default='Allocation', type='string', action="store", dest='module', help="Module used to do the tests")
parser.add_option("-v", action='store_true', dest='verbose', help="Verbose option")
(options, args) = parser.parse_args()
if (len(args) < 1):
print "You need more than 1 argument."
exit()
codetester = CodeTester(options, args)
codetester.init()
codetester.execute_tests()
codetester.show_results()
| AlexMog/CodePerfComparator | src/compare.py | Python | mit | 4,159 |
from setuptools import setup, find_packages
requires = [
"pyramid",
"SQLAlchemy >= 1.4",
"transaction",
"pyramid_tm",
"zope.sqlalchemy >= 1.3",
"waitress",
"cryptography>=0.5.dev1",
"pyOpenSSL>=0.14",
"python-dateutil",
]
deplinks = []
setup(
name="caramel",
version="1.9.2",
python_requires=">=3.6",
description="caramel",
long_description="""
Caramel is a certificate management system that makes it easy to use client
certificates in web applications, mobile applications, embedded use and
other places. It solves the certificate signing and certificate
management headache, while attempting to be easy to deploy, maintain and
use in a secure manner.
Caramel makes it easier (it's never completely easy) to run your own
certificate authority and manage and maintain keys and signing periods.
Caramel focuses on reliably and continuously updating short-lived certificates
where clients (and embedded devices) continue to "phone home" and fetch
updated certificates. This means that we do not have to provide OCSP and
CRL endpoints to handle compromised certificates, but only have to stop
updating the certificate. This also means that expired certificates
should be considered broken.
""",
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author="D.S. Ljungmark",
author_email="[email protected]",
url="https://github.com/MyTemp/caramel",
keywords="web wsgi bfg pylons pyramid certificates x509 ca cert ssl tls",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite="tests",
install_requires=requires,
dependency_links=deplinks,
entry_points="""\
[paste.app_factory]
main = caramel:main
[console_scripts]
caramel_initialize_db = caramel.scripts.initializedb:main
caramel_tool = caramel.scripts.tool:main
caramel_ca = caramel.scripts.generate_ca:main
caramel_autosign = caramel.scripts.autosign:main
""",
)
| ModioAB/caramel | setup.py | Python | agpl-3.0 | 2,149 |
# Copyright 2013 Daniel Narvaez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fnmatch
import os
import multiprocessing
import shutil
import subprocess
import logging
from osbuild import command
from osbuild import config
from osbuild import state
from osbuild import utils
from osbuild import git
_builders = {}
_distributors = {}
def build_one(module_name):
for module in config.load_modules():
if module.name == module_name:
return _build_module(module)
return False
def pull_one(module_name):
for module in config.load_modules():
if module.name == module_name:
return _pull_module(module)
return False
def pull(sources={}):
to_pull = config.load_modules()
if to_pull:
print("\n= Pulling =\n")
for module in to_pull:
if state.pulled_module_should_clean(module):
source_dir = module.get_source_dir()
if os.path.exists(source_dir):
if not _clean_module(module):
print("! Could not clean module, pull failed.")
return False
shutil.rmtree(source_dir, ignore_errors=True)
for module in to_pull:
source = sources.get(module.name, {})
if not _pull_module(module, source):
return False
return True
def build():
to_build = []
for module in config.load_modules():
if not state.built_module_is_unchanged(module):
to_build.append(module)
if not to_build:
return True
print("\n= Building =\n")
for module in to_build:
if not _build_module(module):
return False
return True
def _clean_module(module):
print("* Cleaning %s" % module.name)
git_module = git.get_module(module)
if os.path.exists(module.get_source_dir()):
return git_module.clean()
else:
return True
def clean_one(module_name):
for module in config.load_modules():
if module.name == module_name:
return _clean_module(module)
return False
def clean(continue_on_error=True):
print("* Removing install directory")
shutil.rmtree(config.install_dir, ignore_errors=True)
for module in config.load_modules():
if not _clean_module(module) and not continue_on_error:
return False
return True
def _unlink_libtool_files():
def func(arg, dirname, fnames):
for fname in fnmatch.filter(fnames, "*.la"):
os.unlink(os.path.join(dirname, fname))
os.path.walk(config.lib_dir, func, None)
def _pull_module(module, source=None):
print("* Pulling %s" % module.name)
git_module = git.get_module(module)
try:
git_module.update(source)
except subprocess.CalledProcessError:
return False
logging.info("{0} HEAD: {1}".format(module.name, git_module.get_head()))
state.pulled_module_touch(module)
return True
def _eval_option(option):
return eval(option, {"prefix": config.install_dir})
def _build_autotools(module):
# Workaround for aclocal 1.11 (fixed in 1.12)
aclocal_path = os.path.join(config.share_dir, "aclocal")
utils.ensure_dir(aclocal_path)
makefile_path = os.path.join(module.get_source_dir(), module.makefile_name)
if not os.path.exists(makefile_path):
configure = os.path.join(module.get_source_dir(), "autogen.sh")
if not os.path.exists(configure):
configure = os.path.join(module.get_source_dir(), "configure")
args = [configure, "--prefix", config.install_dir]
if not module.no_libdir:
args.extend(["--libdir", config.lib_dir])
args.extend(module.options)
for option in module.options_evaluated:
args.append(_eval_option(option))
command.run(args)
jobs = multiprocessing.cpu_count() * 2
command.run(["make", "-j", "%d" % jobs])
command.run(["make", "install"])
_unlink_libtool_files()
_builders["autotools"] = _build_autotools
def _build_distutils(module):
site_packages = os.path.join(config.install_dir, "lib", "python2.7",
"site-packages")
utils.ensure_dir(site_packages)
setup = os.path.join(module.get_source_dir(), "setup.py")
command.run(["python2.7", setup, "install", "--prefix",
config.install_dir])
_builders["distutils"] = _build_distutils
def _build_grunt(module):
command.run(["volo", "-nostamp", "-f", "add"], retry=10)
command.run(["npm", "install"], retry=10)
_builders["grunt"] = _build_grunt
def _build_npm(module):
command.run(["npm", "install", "-g", "--prefix", config.install_dir])
_builders["npm"] = _build_npm
def _build_module(module):
print("* Building %s" % module.name)
source_dir = module.get_source_dir()
if not os.path.exists(source_dir):
print("Source directory does not exist. Please pull the sources "
"before building.")
return False
os.chdir(source_dir)
try:
if module.build_system is not None:
_builders[module.build_system](module)
except subprocess.CalledProcessError:
return False
state.built_module_touch(module)
return True
| dnarvaez/osbuild | osbuild/build.py | Python | apache-2.0 | 5,746 |
"""
A Simple server used to show deli images.
"""
import flask
from .flask_utils import open_app
# Set the project root directory as the static folder.
app = flask.Flask(__name__, static_url_path='')
@app.route('/static/flot/<path:filename>')
def send_flot_files(filename):
return flask.send_from_directory('static/flot', filename)
@app.route('/static/css/<path:filename>')
def send_css_files(filename):
return flask.send_from_directory('static/css', filename)
def create_plot(data, url='/', template='base.html'):
"""Create web page displaying the given data and route the given URL there.
"""
def server():
return flask.render_template(template, data=data)
app.add_url_rule(url, view_func=server)
def show():
open_app(app)
| tonysyu/deli | deli/app/js/main.py | Python | bsd-3-clause | 772 |
from pyparsing import ParseException
class EDTFParseException(ParseException):
pass
| ixc/python-edtf | edtf/parser/edtf_exceptions.py | Python | mit | 90 |
from __future__ import unicode_literals
import itertools
import json
import re
from .common import InfoExtractor, SearchInfoExtractor
from ..utils import (
compat_urllib_parse,
compat_urlparse,
clean_html,
int_or_none,
)
class YahooIE(InfoExtractor):
IE_DESC = 'Yahoo screen and movies'
_VALID_URL = r'(?P<url>https?://(?:screen|movies)\.yahoo\.com/.*?-(?P<id>[0-9]+)(?:-[a-z]+)?\.html)'
_TESTS = [
{
'url': 'http://screen.yahoo.com/julian-smith-travis-legg-watch-214727115.html',
'md5': '4962b075c08be8690a922ee026d05e69',
'info_dict': {
'id': '2d25e626-2378-391f-ada0-ddaf1417e588',
'ext': 'mp4',
'title': 'Julian Smith & Travis Legg Watch Julian Smith',
'description': 'Julian and Travis watch Julian Smith',
},
},
{
'url': 'http://screen.yahoo.com/wired/codefellas-s1-ep12-cougar-lies-103000935.html',
'md5': 'd6e6fc6e1313c608f316ddad7b82b306',
'info_dict': {
'id': 'd1dedf8c-d58c-38c3-8963-e899929ae0a9',
'ext': 'mp4',
'title': 'Codefellas - The Cougar Lies with Spanish Moss',
'description': 'Agent Topple\'s mustache does its dirty work, and Nicole brokers a deal for peace. But why is the NSA collecting millions of Instagram brunch photos? And if your waffles have nothing to hide, what are they so worried about?',
},
},
{
'url': 'https://movies.yahoo.com/video/world-loves-spider-man-190819223.html',
'md5': '410b7104aa9893b765bc22787a22f3d9',
'info_dict': {
'id': '516ed8e2-2c4f-339f-a211-7a8b49d30845',
'ext': 'mp4',
'title': 'The World Loves Spider-Man',
'description': '''People all over the world are celebrating the release of \"The Amazing Spider-Man 2.\" We're taking a look at the enthusiastic response Spider-Man has received from viewers all over the world.''',
}
},
{
'url': 'https://screen.yahoo.com/community/community-sizzle-reel-203225340.html?format=embed',
'md5': '60e8ac193d8fb71997caa8fce54c6460',
'info_dict': {
'id': '4fe78544-8d48-39d8-97cd-13f205d9fcdb',
'ext': 'mp4',
'title': "Yahoo Saves 'Community'",
'description': 'md5:4d4145af2fd3de00cbb6c1d664105053',
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
url = mobj.group('url')
webpage = self._download_webpage(url, video_id)
items_json = self._search_regex(
r'mediaItems: ({.*?})$', webpage, 'items', flags=re.MULTILINE,
default=None)
if items_json is None:
CONTENT_ID_REGEXES = [
r'YUI\.namespace\("Media"\)\.CONTENT_ID\s*=\s*"([^"]+)"',
r'root\.App\.Cache\.context\.videoCache\.curVideo = \{"([^"]+)"'
]
long_id = self._search_regex(CONTENT_ID_REGEXES, webpage, 'content ID')
video_id = long_id
else:
items = json.loads(items_json)
info = items['mediaItems']['query']['results']['mediaObj'][0]
# The 'meta' field is not always in the video webpage, we request it
# from another page
long_id = info['id']
return self._get_info(long_id, video_id, webpage)
def _get_info(self, long_id, video_id, webpage):
query = ('SELECT * FROM yahoo.media.video.streams WHERE id="%s"'
' AND plrs="86Gj0vCaSzV_Iuf6hNylf2" AND region="US"'
' AND protocol="http"' % long_id)
data = compat_urllib_parse.urlencode({
'q': query,
'env': 'prod',
'format': 'json',
})
query_result = self._download_json(
'http://video.query.yahoo.com/v1/public/yql?' + data,
video_id, 'Downloading video info')
info = query_result['query']['results']['mediaObj'][0]
meta = info['meta']
formats = []
for s in info['streams']:
format_info = {
'width': int_or_none(s.get('width')),
'height': int_or_none(s.get('height')),
'tbr': int_or_none(s.get('bitrate')),
}
host = s['host']
path = s['path']
if host.startswith('rtmp'):
format_info.update({
'url': host,
'play_path': path,
'ext': 'flv',
})
else:
format_url = compat_urlparse.urljoin(host, path)
format_info['url'] = format_url
formats.append(format_info)
self._sort_formats(formats)
return {
'id': video_id,
'title': meta['title'],
'formats': formats,
'description': clean_html(meta['description']),
'thumbnail': meta['thumbnail'] if meta.get('thumbnail') else self._og_search_thumbnail(webpage),
}
class YahooNewsIE(YahooIE):
IE_NAME = 'yahoo:news'
_VALID_URL = r'http://news\.yahoo\.com/video/.*?-(?P<id>\d*?)\.html'
_TESTS = [{
'url': 'http://news.yahoo.com/video/china-moses-crazy-blues-104538833.html',
'md5': '67010fdf3a08d290e060a4dd96baa07b',
'info_dict': {
'id': '104538833',
'ext': 'mp4',
'title': 'China Moses Is Crazy About the Blues',
'description': 'md5:9900ab8cd5808175c7b3fe55b979bed0',
},
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
long_id = self._search_regex(r'contentId: \'(.+?)\',', webpage, 'long id')
return self._get_info(long_id, video_id, webpage)
class YahooSearchIE(SearchInfoExtractor):
IE_DESC = 'Yahoo screen search'
_MAX_RESULTS = 1000
IE_NAME = 'screen.yahoo:search'
_SEARCH_KEY = 'yvsearch'
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
for pagenum in itertools.count(0):
result_url = 'http://video.search.yahoo.com/search/?p=%s&fr=screen&o=js&gs=0&b=%d' % (compat_urllib_parse.quote_plus(query), pagenum * 30)
info = self._download_json(result_url, query,
note='Downloading results page '+str(pagenum+1))
m = info['m']
results = info['results']
for (i, r) in enumerate(results):
if (pagenum * 30) + i >= n:
break
mobj = re.search(r'(?P<url>screen\.yahoo\.com/.*?-\d*?\.html)"', r)
e = self.url_result('http://' + mobj.group('url'), 'Yahoo')
entries.append(e)
if (pagenum * 30 + i >= n) or (m['last'] >= (m['total'] - 1)):
break
return {
'_type': 'playlist',
'id': query,
'entries': entries,
}
| riking/youtube-dl | youtube_dl/extractor/yahoo.py | Python | unlicense | 7,264 |
#!/usr/bin/env python3
# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
import struct
import time
import unittest
from .address import (
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
from .messages import (
CBlock,
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
hash256,
ser_uint256,
tx_from_hex,
uint256_from_str,
)
from .script import (
CScript,
CScriptNum,
CScriptOp,
OP_1,
OP_CHECKMULTISIG,
OP_CHECKSIG,
OP_RETURN,
OP_TRUE,
)
from .script_util import (
key_to_p2wpkh_script,
script_to_p2wsh_script,
)
from .util import assert_equal
WITNESS_SCALE_FACTOR = 4
MAX_BLOCK_SIGOPS = 20000
MAX_BLOCK_SIGOPS_WEIGHT = MAX_BLOCK_SIGOPS * WITNESS_SCALE_FACTOR
# Genesis block time (regtest)
TIME_GENESIS_BLOCK = 1296688602
# Coinbase transaction outputs can only be spent after this number of new blocks (network rule)
COINBASE_MATURITY = 100
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
NORMAL_GBT_REQUEST_PARAMS = {"rules": ["segwit"]}
VERSIONBITS_LAST_OLD_BLOCK_VERSION = 4
def create_block(hashprev=None, coinbase=None, ntime=None, *, version=None, tmpl=None, txlist=None):
"""Create a block (with regtest difficulty)."""
block = CBlock()
if tmpl is None:
tmpl = {}
block.nVersion = version or tmpl.get('version') or VERSIONBITS_LAST_OLD_BLOCK_VERSION
block.nTime = ntime or tmpl.get('curtime') or int(time.time() + 600)
block.hashPrevBlock = hashprev or int(tmpl['previousblockhash'], 0x10)
if tmpl and not tmpl.get('bits') is None:
block.nBits = struct.unpack('>I', bytes.fromhex(tmpl['bits']))[0]
else:
block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
if coinbase is None:
coinbase = create_coinbase(height=tmpl['height'])
block.vtx.append(coinbase)
if txlist:
for tx in txlist:
if not hasattr(tx, 'calc_sha256'):
tx = tx_from_hex(tx)
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
def get_witness_script(witness_root, witness_nonce):
witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root) + ser_uint256(witness_nonce)))
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment)
return CScript([OP_RETURN, output_data])
def add_witness_commitment(block, nonce=0):
"""Add a witness commitment to the block's coinbase transaction.
According to BIP141, blocks with witness rules active must commit to the
hash of all in-block transactions including witness."""
# First calculate the merkle root of the block's
# transactions, with witnesses.
witness_nonce = nonce
witness_root = block.calc_witness_merkle_root()
# witness_nonce should go to coinbase witness.
block.vtx[0].wit.vtxinwit = [CTxInWitness()]
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(witness_nonce)]
# witness commitment is the last OP_RETURN output in coinbase
block.vtx[0].vout.append(CTxOut(0, get_witness_script(witness_root, witness_nonce)))
block.vtx[0].rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
def script_BIP34_coinbase_height(height):
if height <= 16:
res = CScriptOp.encode_op_n(height)
# Append dummy to increase scriptSig size above 2 (see bad-cb-length consensus rule)
return CScript([res, OP_1])
return CScript([CScriptNum(height)])
def create_coinbase(height, pubkey=None, extra_output_script=None, fees=0, nValue=50):
"""Create a coinbase transaction.
If pubkey is passed in, the coinbase output will be a P2PK output;
otherwise an anyone-can-spend output.
If extra_output_script is given, make a 0-value output to that
script. This is useful to pad block weight/sigops as needed. """
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), script_BIP34_coinbase_height(height), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = nValue * COIN
if nValue == 50:
halvings = int(height / 150) # regtest
coinbaseoutput.nValue >>= halvings
coinbaseoutput.nValue += fees
if pubkey is not None:
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [coinbaseoutput]
if extra_output_script is not None:
coinbaseoutput2 = CTxOut()
coinbaseoutput2.nValue = 0
coinbaseoutput2.scriptPubKey = extra_output_script
coinbase.vout.append(coinbaseoutput2)
coinbase.calc_sha256()
return coinbase
def create_tx_with_script(prevtx, n, script_sig=b"", *, amount, script_pub_key=CScript()):
"""Return one-input, one-output transaction object
spending the prevtx's n-th output with the given amount.
Can optionally pass scriptPubKey and scriptSig, default is anyone-can-spend output.
"""
tx = CTransaction()
assert n < len(prevtx.vout)
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), script_sig, 0xffffffff))
tx.vout.append(CTxOut(amount, script_pub_key))
tx.calc_sha256()
return tx
def create_transaction(node, txid, to_address, *, amount):
""" Return signed transaction spending the first output of the
input txid. Note that the node must have a wallet that can
sign for the output that is being spent.
"""
raw_tx = create_raw_transaction(node, txid, to_address, amount=amount)
tx = tx_from_hex(raw_tx)
return tx
def create_raw_transaction(node, txid, to_address, *, amount):
""" Return raw signed transaction spending the first output of the
input txid. Note that the node must have a wallet that can sign
for the output that is being spent.
"""
psbt = node.createpsbt(inputs=[{"txid": txid, "vout": 0}], outputs={to_address: amount})
for _ in range(2):
for w in node.listwallets():
wrpc = node.get_wallet_rpc(w)
signed_psbt = wrpc.walletprocesspsbt(psbt)
psbt = signed_psbt['psbt']
final_psbt = node.finalizepsbt(psbt)
assert_equal(final_psbt["complete"], True)
return final_psbt['hex']
def get_legacy_sigopcount_block(block, accurate=True):
count = 0
for tx in block.vtx:
count += get_legacy_sigopcount_tx(tx, accurate)
return count
def get_legacy_sigopcount_tx(tx, accurate=True):
count = 0
for i in tx.vout:
count += i.scriptPubKey.GetSigOpCount(accurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
count += CScript(j.scriptSig).GetSigOpCount(accurate)
return count
def witness_script(use_p2wsh, pubkey):
"""Create a scriptPubKey for a pay-to-witness TxOut.
This is either a P2WPKH output for the given pubkey, or a P2WSH output of a
1-of-1 multisig for the given pubkey. Returns the hex encoding of the
scriptPubKey."""
if not use_p2wsh:
# P2WPKH instead
pkscript = key_to_p2wpkh_script(pubkey)
else:
# 1-of-1 multisig
witness_script = CScript([OP_1, bytes.fromhex(pubkey), OP_1, OP_CHECKMULTISIG])
pkscript = script_to_p2wsh_script(witness_script)
return pkscript.hex()
def create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount):
"""Return a transaction (in hex) that spends the given utxo to a segwit output.
Optionally wrap the segwit output using P2SH."""
if use_p2wsh:
program = CScript([OP_1, bytes.fromhex(pubkey), OP_1, OP_CHECKMULTISIG])
addr = script_to_p2sh_p2wsh(program) if encode_p2sh else script_to_p2wsh(program)
else:
addr = key_to_p2sh_p2wpkh(pubkey) if encode_p2sh else key_to_p2wpkh(pubkey)
if not encode_p2sh:
assert_equal(node.getaddressinfo(addr)['scriptPubKey'], witness_script(use_p2wsh, pubkey))
return node.createrawtransaction([utxo], {addr: amount})
def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
"""Create a transaction spending a given utxo to a segwit output.
The output corresponds to the given pubkey: use_p2wsh determines whether to
use P2WPKH or P2WSH; encode_p2sh determines whether to wrap in P2SH.
sign=True will have the given node sign the transaction.
insert_redeem_script will be added to the scriptSig, if given."""
tx_to_witness = create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount)
if (sign):
signed = node.signrawtransactionwithwallet(tx_to_witness)
assert "errors" not in signed or len(["errors"]) == 0
return node.sendrawtransaction(signed["hex"])
else:
if (insert_redeem_script):
tx = tx_from_hex(tx_to_witness)
tx.vin[0].scriptSig += CScript([bytes.fromhex(insert_redeem_script)])
tx_to_witness = tx.serialize().hex()
return node.sendrawtransaction(tx_to_witness)
class TestFrameworkBlockTools(unittest.TestCase):
def test_create_coinbase(self):
height = 20
coinbase_tx = create_coinbase(height=height)
assert_equal(CScriptNum.decode(coinbase_tx.vin[0].scriptSig), height)
| instagibbs/bitcoin | test/functional/test_framework/blocktools.py | Python | mit | 9,570 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-19 18:14
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('crowdsourcing', '0067_auto_20160112_2225'),
]
operations = [
migrations.CreateModel(
name='MTurkAssignment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assignment_id', models.TextField(max_length=128)),
('worker_id', models.TextField(max_length=128)),
('status', models.IntegerField(choices=[(1, b'In Progress'), (2, b'Submitted'), (3, b'Accepted'), (4, b'Rejected'), (5, b'Returned'), (6, b'Skipped')], default=1)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='MTurkHIT',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hit_id', models.TextField(max_length=256)),
('hit_type_id', models.TextField(default='', max_length=256)),
('hit_group_id', models.TextField(default='', max_length=128)),
('status', models.IntegerField(choices=[(1, 'Created'), (2, 'Completed'), (3, 'Done on Daemo'), (4, 'Forked')], default=1)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='mturk_hits', to='crowdsourcing.Task')),
],
),
migrations.AddField(
model_name='mturkassignment',
name='hit',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='mturk_assignments', to='mturk.MTurkHIT'),
),
migrations.AddField(
model_name='mturkassignment',
name='task_worker',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='mturk_assignments', to='crowdsourcing.TaskWorker'),
),
migrations.AlterUniqueTogether(
name='mturkhit',
unique_together=set([('task', 'status')]),
),
]
| aginzberg/crowdsource-platform | mturk/migrations/0001_initial.py | Python | mit | 2,578 |
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.http import (HttpResponse, HttpResponseRedirect)
from django.contrib.auth.views import (login, logout)
from django.contrib.auth.forms import UserCreationForm
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import (Group, Permission, User)
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
# Create your views here.
@login_required
def settings(request):
form = UserCreationForm()
print request.user.get_all_permissions()
return render(request, 'settings.html', {
'form': form,
})
| creamidea/Mushroom | NodeSite/NodeSite/mushroom/views.py | Python | mit | 713 |
# Copyright (C) 2013 Joshua L. Adelman
#
# This file is part of WESTPA.
#
# WESTPA is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WESTPA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WESTPA. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division; __metaclass__ = type
import logging
log = logging.getLogger(__name__)
import numpy as np
import types
import westpa, west
from westpa import extloader
from westpa.yamlcfg import check_bool, ConfigItemMissing
from westext.stringmethod import WESTStringMethod, DefaultStringMethod
from westpa.binning import VoronoiBinMapper
class StringDriver(object):
def __init__(self, sim_manager, plugin_config):
super(StringDriver, self).__init__()
if not sim_manager.work_manager.is_master:
return
self.sim_manager = sim_manager
self.data_manager = sim_manager.data_manager
self.system = sim_manager.system
# Parameters from config file
self.windowsize = plugin_config.get('windowsize', 10)
self.update_interval = plugin_config.get('update_interval', 10)
self.initial_update = plugin_config.get('initial_update', 20)
self.priority = plugin_config.get('priority', 0)
self.write_avg_pos = check_bool(plugin_config.get('write_avgpos', True))
self.do_update = check_bool(plugin_config.get('do_update', True))
self.init_from_data = check_bool(plugin_config.get('init_from_data', True))
self.dfunc = self.get_dfunc_method(plugin_config)
# Load method to calculate average position in a bin
# If the method is defined in an external module, correctly bind it
ap = self.get_avgpos_method(plugin_config)
if hasattr(ap, 'im_class'):
self.get_avgpos = ap
else:
self.get_avgpos = types.MethodType(ap, self)
# Get initial set of string centers
centers = self.get_initial_centers()
try:
sm_params = self.system.sm_params
except AttributeError as e:
log.error('String Driver Error: system does not define sm_params. \
This is required and should be added to the system definition; {}'.format(e))
raise
# Initialize the string
str_method = self.get_string_method(plugin_config)
try:
self.strings = str_method(centers, **sm_params)
except (TypeError, AssertionError) as e:
log.error('String Driver Error: Failed during initialization of string method: {}'.format(e))
raise
# Update the BinMapper
self.update_bin_mapper()
# Register callback
sim_manager.register_callback(sim_manager.prepare_new_iteration, self.prepare_new_iteration, self.priority)
westpa.rc.pstatus('-westext.stringmethod -----------------\n')
westpa.rc.pstatus('windowsize: {}\n'.format(self.windowsize))
westpa.rc.pstatus('update interval: {}\n'.format(self.update_interval))
westpa.rc.pstatus('initial update: {}\n'.format(self.initial_update))
westpa.rc.pstatus('priority: {}\n'.format(self.priority))
westpa.rc.pstatus('write average positions: {}\n'.format(self.write_avg_pos))
westpa.rc.pstatus('do update: {}\n'.format(self.do_update))
westpa.rc.pstatus('initialize from WE data: {}\n'.format(self.init_from_data))
westpa.rc.pstatus('----------------------------------------\n')
westpa.rc.pflush()
def dfunc(self):
raise NotImplementedError
def get_avgpos(self, n_iter):
raise NotImplementedError
def get_dfunc_method(self, plugin_config):
try:
methodname = plugin_config['dfunc_method']
except KeyError:
raise ConfigItemMissing('dfunc_method')
dfunc_method = extloader.get_object(methodname)
log.info('loaded stringmethod dfunc method {!r}'.format(dfunc_method))
return dfunc_method
def get_avgpos_method(self, plugin_config):
try:
methodname = plugin_config['avgpos_method']
except KeyError:
raise ConfigItemMissing('avgpos_method')
if methodname.lower() == 'cartesian':
avgpos_method = self.avgpos_cartesian
else:
avgpos_method = extloader.get_object(methodname)
log.info('loaded stringmethod avgpos method {!r}'.format(avgpos_method))
return avgpos_method
def get_string_method(self, plugin_config):
try:
methodname = plugin_config['string_method']
except KeyError:
raise ConfigItemMissing('string_method')
if methodname.lower() == 'default':
str_method = DefaultStringMethod
else:
str_method = extloader.get_object(methodname)
assert issubclass(str_method, WESTStringMethod)
log.debug('loaded stringmethod string method {!r}'.format(str_method))
return str_method
def get_initial_centers(self):
self.data_manager.open_backing()
with self.data_manager.lock:
n_iter = max(self.data_manager.current_iteration - 1, 1)
iter_group = self.data_manager.get_iter_group(n_iter)
# First attempt to initialize string from data rather than system
centers = None
if self.init_from_data:
log.info('Attempting to initialize stringmethod from data')
try:
binhash = iter_group.attrs['binhash']
bin_mapper = self.data_manager.get_bin_mapper(binhash)
centers = bin_mapper.centers
except:
log.warning('Initializing string centers from data failed; Using definition in system instead.')
centers = self.system.bin_mapper.centers
else:
log.info('Initializing string centers from system definition')
centers = self.system.bin_mapper.centers
self.data_manager.close_backing()
return centers
def update_bin_mapper(self):
'''Update the bin_mapper using the current string'''
westpa.rc.pstatus('westext.stringmethod: Updating bin mapper\n')
westpa.rc.pflush()
try:
dfargs = getattr(self.system, 'dfargs', None)
dfkwargs = getattr(self.system, 'dfkwargs', None)
self.system.bin_mapper = VoronoiBinMapper(self.dfunc, self.strings.centers,
dfargs=dfargs,
dfkwargs=dfkwargs)
except (ValueError, TypeError) as e:
log.error('StringDriver Error: Failed updating the bin mapper: {}'.format(e))
raise
def avgpos_cartesian(self, n_iter):
'''Get average position of replicas in each bin as of n_iter for the
the user selected update interval'''
nbins = self.system.bin_mapper.nbins
ndim = self.system.pcoord_ndim
avg_pos = np.zeros((nbins, ndim), dtype=self.system.pcoord_dtype)
sum_bin_weight = np.zeros((nbins,), dtype=self.system.pcoord_dtype)
start_iter = max(n_iter - min(self.windowsize, n_iter), 2)
stop_iter = n_iter + 1
for n in xrange(start_iter, stop_iter):
with self.data_manager.lock:
iter_group = self.data_manager.get_iter_group(n)
seg_index = iter_group['seg_index'][...]
pcoords = iter_group['pcoord'][:,-1,:] # Only read final point
bin_indices = self.system.bin_mapper.assign(pcoords)
weights = seg_index['weight']
pcoord_w = pcoords * weights[:,np.newaxis]
uniq_indices = np.unique(bin_indices)
for indx in uniq_indices:
avg_pos[indx,:] += pcoord_w[bin_indices == indx].sum(axis=0)
sum_bin_weight += np.bincount(bin_indices.astype(np.int), weights=weights, minlength=nbins)
# Some bins might have zero samples so exclude to avoid divide by zero
occ_ind = np.nonzero(sum_bin_weight)
avg_pos[occ_ind] /= sum_bin_weight[occ_ind][:,np.newaxis]
return avg_pos, sum_bin_weight
def prepare_new_iteration(self):
n_iter = self.sim_manager.n_iter
with self.data_manager.lock:
iter_group = self.data_manager.get_iter_group(n_iter)
try:
del iter_group['stringmethod']
except KeyError:
pass
sm_global_group = self.data_manager.we_h5file.require_group('stringmethod')
last_update = long(sm_global_group.attrs.get('last_update', 0))
if n_iter - last_update < self.update_interval or n_iter < self.initial_update or not self.do_update:
log.debug('Not updating string this iteration')
return
else:
log.debug('Updating string - n_iter: {}'.format(n_iter))
westpa.rc.pstatus('-westext.stringmethod -----------------\n')
westpa.rc.pstatus('westext.stringmethod: Calculating average position in string images\n')
westpa.rc.pflush()
avg_pos, sum_bin_weight = self.get_avgpos(n_iter)
westpa.rc.pstatus('westext.stringmethod: Updating string\n')
westpa.rc.pflush()
self.strings.update_string_centers(avg_pos, sum_bin_weight)
westpa.rc.pstatus('westext.stringmethod: String lengths: {}\n'.format(self.strings.length))
westpa.rc.pflush()
# Update the bin definitions
self.update_bin_mapper()
sm_global_group.attrs['last_update'] = n_iter
| KarlTDebiec/westpa | src/westext/stringmethod/string_driver.py | Python | gpl-3.0 | 10,149 |
#-*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib import auth
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from oauth2app.models import Client, AccessRange
from .forms import SignupForm, LoginForm, CreateClientForm, ClientRemoveForm
@login_required
def clients(request):
if request.method == "POST":
form = CreateClientForm(request.POST)
remove_form = ClientRemoveForm(request.POST)
if form.is_valid():
Client.objects.create(
name=form.cleaned_data["name"],
user=request.user)
elif remove_form.is_valid():
Client.objects.filter(
id=remove_form.cleaned_data["client_id"]).delete()
form = CreateClientForm()
else:
form = CreateClientForm()
template = {
"form":form,
"clients":Client.objects.filter(user=request.user)}
return render_to_response(
'account/clients.html',
template,
RequestContext(request))
def login(request):
if request.method == "POST":
form = LoginForm(request.POST)
if form.is_valid():
user = auth.authenticate(
username=form.cleaned_data["username"],
password=form.cleaned_data["password"])
auth.login(request, user)
return HttpResponseRedirect("/")
else:
form = LoginForm()
template = {"form":form}
return render_to_response(
'account/login.html',
template,
RequestContext(request))
@login_required
def logout(request):
auth.logout(request)
return render_to_response(
'account/logout.html',
{},
RequestContext(request))
def signup(request):
if request.method == "POST":
form = SignupForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
form.cleaned_data["username"],
form.cleaned_data["email"],
form.cleaned_data["password1"],)
user = auth.authenticate(
username=form.cleaned_data["username"],
password=form.cleaned_data["password1"])
auth.login(request, user)
return HttpResponseRedirect("/")
else:
form = SignupForm()
template = {"form":form}
return render_to_response(
'account/signup.html',
template,
RequestContext(request))
| hiidef/oauth2app | examples/mysite/apps/account/views.py | Python | mit | 2,659 |
#!/usr/bin/env python
'''
Project Euler
Problem 004 - Largest palindrome product
Haoliang Wang
01/27/2015
'''
'''
Description:
Find the largest palindrome made from the product of two 3-digit numbers.
Solution:
906609
'''
def is_palindrome(n):
return str(n) == str(n)[::-1]
s = 0
for i in reversed(xrange(100, 1000)):
for j in reversed(xrange(100, 1000)):
n = i * j
if n < s:
break
elif is_palindrome(n):
s = n if n > s else s
print s | haoliangx/Project-Euler | src/P-004.py | Python | gpl-2.0 | 493 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import rospy
from pnp_plugin_server.pnp_simple_plugin_server import PNPSimplePluginServer
from pnp_msgs.msg import ActionResult
from pepper_route_description.msg import RouteDescriptionAction, RouteDescriptionResult
from pymongo import MongoClient
from std_msgs.msg import String
import tf
from geometry_msgs.msg import PoseStamped
from nao_interaction_msgs.srv import TrackerPointAt, TrackerPointAtRequest
from nao_interaction_msgs.srv import TrackerLookAt, TrackerLookAtRequest
from nao_interaction_msgs.srv import SetBreathEnabled, SetBreathEnabledRequest
from nao_interaction_msgs.srv import Say, SayRequest
from nav_msgs.msg import Odometry
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from actionlib import SimpleActionClient
from geometry_msgs.msg import Twist
from pepper_move_base.msg import TrackPersonAction, TrackPersonGoal
from nao_interaction_msgs.srv import GoToPosture, GoToPostureRequest
from naoqi_bridge_msgs.msg import JointAnglesWithSpeed
import threading
class ServiceThread(threading.Thread):
def __init__(self, srv_name, srv_type, srv_req):
super(ServiceThread, self).__init__()
self.srv_name = srv_name
self.srv_type = srv_type
self.srv_req = srv_req
def run(self):
while not rospy.is_shutdown():
try:
s = rospy.ServiceProxy(
self.srv_name,
self.srv_type
)
s.wait_for_service(timeout=1.)
except rospy.ROSException:
rospy.logwarn("Could not communicate with '%s' service. Retrying in 1 second." % self.srv_name)
rospy.sleep(1.)
else:
s(self.srv_req)
break
class DescribeRoute(object):
BASE_LINK = "base_link"
def __init__(self, name):
rospy.loginfo("Starting %s ..." % name)
self.pub = rospy.Publisher("/cmd_vel", Twist, queue_size=10)
self._as = PNPSimplePluginServer(
name,
RouteDescriptionAction,
execute_cb=self.execute_cb,
auto_start=False
)
self.listener = tf.TransformListener()
client = MongoClient(
rospy.get_param("~db_host", "localhost"),
int(rospy.get_param("~db_port", 62345))
)
rospy.loginfo("Creating move_base client")
self.move_client = SimpleActionClient("/move_base", MoveBaseAction)
self.move_client.wait_for_server()
rospy.loginfo("Move_base client connected")
rospy.loginfo("Creating tracker client")
self.start_client = SimpleActionClient("/start_tracking_person", TrackPersonAction)
self.start_client.wait_for_server()
self.stop_client = SimpleActionClient("/stop_tracking_person", TrackPersonAction)
self.stop_client.wait_for_server()
rospy.loginfo("Tracker client connected")
self.db_name = rospy.get_param("~db_name", "semantic_map")
self.collection_name = rospy.get_param("~collection_name", "idea_park")
self.semantic_map_name = rospy.get_param("~semantic_map_name")
self._as.start()
self.db = client[self.db_name]
self.tts = rospy.Publisher("/speech", String, queue_size=1)
self.joints = rospy.Publisher("/joint_angles", JointAnglesWithSpeed, queue_size=10)
rospy.loginfo("... done")
def stand(self):
print "STANDING"
self.__call_service(
"/naoqi_driver/robot_posture/go_to_posture",
GoToPosture,
GoToPostureRequest(GoToPostureRequest.STAND_INIT, 0.5)
)
j = JointAnglesWithSpeed()
j.joint_names = ['HeadYaw', 'HeadPitch']
j.joint_angles = [0.,-.5]
j.speed = .05
self.joints.publish(j)
def __call_service(self, srv_name, srv_type, req):
while not rospy.is_shutdown():
try:
s = rospy.ServiceProxy(
srv_name,
srv_type
)
s.wait_for_service(timeout=1.)
except rospy.ROSException, rospy.ServiceException:
rospy.logwarn("Could not communicate with '%s' service. Retrying in 1 second." % srv_name)
rospy.sleep(1.)
else:
return s(req)
def execute_cb(self, goal):
self.stop_client.send_goal(TrackPersonGoal())
shop_id = goal.shop_id.split('_')[1]
result = self.db[self.collection_name].find_one(
{
"shop_id": shop_id,
"semantic_map_name": self.semantic_map_name
}
)
rospy.loginfo("Waiting for current pose from odometry.")
o = rospy.wait_for_message("/naoqi_driver_node/odom", Odometry)
current_pose = PoseStamped()
current_pose.header = o.header
current_pose.pose = o.pose.pose
rospy.loginfo("Received pose.")
loc = PoseStamped()
loc.header.frame_id = "semantic_map"
loc.pose.position.x = float(result["loc_x"])
loc.pose.position.y = float(result["loc_y"])
target = self.transform_pose(DescribeRoute.BASE_LINK, loc)
t = TrackerPointAtRequest()
t.effector = t.EFFECTOR_RARM
t.frame = t.FRAME_TORSO
t.max_speed_fraction = 0.5
t.target.x = target.pose.position.x
t.target.y = target.pose.position.y
s1 = ServiceThread("/naoqi_driver/tracker/point_at", TrackerPointAt, t)
self.set_breathing(False)
s1.start()
s1.join(timeout=1.0)
target = self.transform_pose(DescribeRoute.BASE_LINK, loc)
t = TrackerLookAtRequest()
t.use_whole_body = False
t.frame = t.FRAME_TORSO
t.max_speed_fraction = 0.6
t.target.x = target.pose.position.x
t.target.y = target.pose.position.y
s2 = ServiceThread("/naoqi_driver/tracker/look_at", TrackerLookAt, t)
s2.start()
s1.join()
s2.join()
self.pub.publish(Twist())
current_pose = self.transform_pose(DescribeRoute.BASE_LINK, current_pose)
current_pose.pose.position.x = 0.
current_pose.pose.position.y = 0.
current_pose.pose.position.z = 0.
self.pub.publish(Twist())
self.__call_service("/naoqi_driver/tts/say", Say, SayRequest(result["directions"]%result["shopName"]))
self.set_breathing(True)
g = MoveBaseGoal()
g.target_pose = current_pose
self.move_client.send_goal_and_wait(g)
self.stand()
self.start_client.send_goal(TrackPersonGoal())
res = RouteDescriptionResult()
res.result.append(ActionResult(cond="described_route__%s__%s" % (goal.shop_id,goal.waypoint), truth_value=True))
res.result.append(ActionResult(cond="finished_description__%s__%s" % (goal.shop_id,goal.waypoint), truth_value=False))
self._as.set_succeeded(res)
def transform_pose(self, target_frame, pose):
while not rospy.is_shutdown() and not self._as.is_preempt_requested():
try:
self.listener.waitForTransform(target_frame, pose.header.frame_id, rospy.Time.now(), rospy.Duration(1.))
t = self.listener.getLatestCommonTime(target_frame, pose.header.frame_id)
pose.header.stamp = t
return self.listener.transformPose(target_frame, pose)
except (tf.Exception, tf.LookupException, tf.ConnectivityException) as ex:
rospy.logdebug(ex)
def set_breathing(self, flag):
self.__call_service(
"/naoqi_driver/motion/set_breath_enabled",
SetBreathEnabled,
SetBreathEnabledRequest(SetBreathEnabledRequest.ARMS, flag)
)
if __name__ == "__main__":
rospy.init_node("describe_route")
DescribeRoute(rospy.get_name())
rospy.spin()
| cdondrup/pepper_planning | pepper_route_description/scripts/describe_route.py | Python | mit | 7,996 |
# -*- coding: utf-8 -*-
#
# CommonsDownloader documentation build configuration file, created by
# sphinx-quickstart on Sat Jan 17 20:09:34 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.extend([os.path.join(current_dir, '..'),
os.path.join(current_dir, '..', 'tests')])
try:
import commonsdownloader
package_version = commonsdownloader.__version__
except ImportError:
package_version = 'Undefined'
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'CommonsDownloader'
copyright = u'2013-2015, Jean-Frédéric'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = str(package_version)
# The full version, including alpha/beta/rc tags.
release = str(package_version)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'CommonsDownloaderdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'CommonsDownloader.tex', u'CommonsDownloader Documentation',
u'Jean-Frédéric', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'commonsdownloader', u'CommonsDownloader Documentation',
[u'Jean-Frédéric'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'CommonsDownloader', u'CommonsDownloader Documentation',
u'Jean-Frédéric', 'CommonsDownloader', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| Commonists/CommonsDownloader | docs/conf.py | Python | mit | 8,607 |
# -*- coding: utf-8 -*-
# Copyright © 2017 Kevin Thibedeau
# Distributed under the terms of the MIT license
import os
import math
def rounded_corner(start, apex, end, rad):
# Translate all points with apex at origin
start = (start[0] - apex[0], start[1] - apex[1])
end = (end[0] - apex[0], end[1] - apex[1])
# Get angles of each line segment
enter_a = math.atan2(start[1], start[0]) % math.radians(360)
leave_a = math.atan2(end[1], end[0]) % math.radians(360)
#print('## enter, leave', math.degrees(enter_a), math.degrees(leave_a))
# Determine bisector angle
ea2 = abs(enter_a - leave_a)
if ea2 > math.radians(180):
ea2 = math.radians(360) - ea2
bisect = ea2 / 2.0
if bisect > math.radians(82): # Nearly colinear: Skip radius
return (apex, apex, apex, -1)
q = rad * math.sin(math.radians(90) - bisect) / math.sin(bisect)
# Check that q is no more than half the shortest leg
enter_leg = math.sqrt(start[0]**2 + start[1]**2)
leave_leg = math.sqrt(end[0]**2 + end[1]**2)
short_leg = min(enter_leg, leave_leg)
if q > short_leg / 2:
q = short_leg / 2
# Compute new radius
rad = q * math.sin(bisect) / math.sin(math.radians(90) - bisect)
h = math.sqrt(q**2 + rad**2)
# Center of circle
# Determine which direction is the smallest angle to the leave point
# Determine direction of arc
# Rotate whole system so that enter_a is on x-axis
delta = (leave_a - enter_a) % math.radians(360)
if delta < math.radians(180): # CW
bisect = enter_a + bisect
else: # CCW
bisect = enter_a - bisect
#print('## Bisect2', math.degrees(bisect))
center = (h * math.cos(bisect) + apex[0], h * math.sin(bisect) + apex[1])
# Find start and end point of arcs
start_p = (q * math.cos(enter_a) + apex[0], q * math.sin(enter_a) + apex[1])
end_p = (q * math.cos(leave_a) + apex[0], q * math.sin(leave_a) + apex[1])
return (center, start_p, end_p, rad)
def rotate_bbox(box, a):
'''Rotate a bounding box 4-tuple by an angle in degrees'''
corners = ( (box[0], box[1]), (box[0], box[3]), (box[2], box[3]), (box[2], box[1]) )
a = -math.radians(a)
sa = math.sin(a)
ca = math.cos(a)
rot = []
for p in corners:
rx = p[0]*ca + p[1]*sa
ry = -p[0]*sa + p[1]*ca
rot.append((rx,ry))
# Find the extrema of the rotated points
rot = list(zip(*rot))
rx0 = min(rot[0])
rx1 = max(rot[0])
ry0 = min(rot[1])
ry1 = max(rot[1])
#print('## RBB:', box, rot)
return (rx0, ry0, rx1, ry1)
class BaseSurface(object):
def __init__(self, fname, def_styles, padding=0, scale=1.0):
self.fname = fname
self.def_styles = def_styles
self.padding = padding
self.scale = scale
self.draw_bbox = False
self.markers = {}
self.shape_drawers = {}
def add_shape_class(self, sclass, drawer):
self.shape_drawers[sclass] = drawer
def render(self, canvas, transparent=False):
pass
def text_bbox(self, text, font_params, spacing):
pass
#################################
## NuCANVAS objects
#################################
class DrawStyle(object):
def __init__(self):
# Set defaults
self.weight = 1
self.line_color = (0,0,255)
self.line_cap = 'butt'
# self.arrows = True
self.fill = None
self.text_color = (0,0,0)
self.font = ('Helvetica', 12, 'normal')
self.anchor = 'center'
class BaseShape(object):
def __init__(self, options, **kwargs):
self.options = {} if options is None else options
self.options.update(kwargs)
self._bbox = [0,0,1,1]
self.tags = set()
@property
def points(self):
return tuple(self._bbox)
@property
def bbox(self):
if 'weight' in self.options:
w = self.options['weight'] / 2.0
else:
w = 0
x0 = min(self._bbox[0], self._bbox[2])
x1 = max(self._bbox[0], self._bbox[2])
y0 = min(self._bbox[1], self._bbox[3])
y1 = max(self._bbox[1], self._bbox[3])
x0 -= w
x1 += w
y0 -= w
y1 += w
return (x0,y0,x1,y1)
@property
def width(self):
x0, _, x1, _ = self.bbox
return x1 - x0
@property
def height(self):
_, y0, _, y1 = self.bbox
return y1 - y0
@property
def size(self):
x0, y1, x1, y1 = self.bbox
return (x1-x0, y1-y0)
def param(self, name, def_styles=None):
if name in self.options:
return self.options[name]
elif def_styles is not None:
return getattr(def_styles, name)
else:
return None
def is_tagged(self, item):
return item in self.tags
def update_tags(self):
if 'tags' in self.options:
self.tags = self.tags.union(self.options['tags'])
del self.options['tags']
def move(self, dx, dy):
if self._bbox is not None:
self._bbox[0] += dx
self._bbox[1] += dy
self._bbox[2] += dx
self._bbox[3] += dy
def dtag(self, tag=None):
if tag is None:
self.tags.clear()
else:
self.tags.discard(tag)
def addtag(self, tag=None):
if tag is not None:
self.tags.add(tag)
def draw(self, c):
pass
def make_group(self):
'''Convert a shape into a group'''
parent = self.options['parent']
# Walk up the parent hierarchy until we find a GroupShape with a surface ref
p = parent
while not isinstance(p, GroupShape):
p = p.options['parent']
surf = p.surf
g = GroupShape(surf, 0,0, {'parent': parent})
# Add this shape as a child of the new group
g.shapes.append(self)
self.options['parent'] = g
# Replace this shape in the parent's child list
parent.shapes = [c if c is not self else g for c in parent.shapes]
return g
class GroupShape(BaseShape):
def __init__(self, surf, x0, y0, options, **kwargs):
BaseShape.__init__(self, options, **kwargs)
self._pos = (x0,y0)
self._bbox = None
self.shapes = []
self.surf = surf # Needed for TextShape to get font metrics
# self.parent = None
# if 'parent' in options:
# self.parent = options['parent']
# del options['parent']
self.update_tags()
def ungroup(self):
if self.parent is None:
return # Can't ungroup top level canvas group
x, y = self._pos
for s in self.shapes:
s.move(x, y)
if isinstance(s, GroupShape):
s.parent = self.parent
# Transfer group children to our parent
pshapes = self.parent.shapes
pos = pshapes.index(self)
# Remove this group
self.parent.shapes = pshapes[:pos] + self.shapes + pshapes[pos+1:]
def ungroup_all(self):
for s in self.shapes:
if isinstance(s, GroupShape):
s.ungroup_all()
self.ungroup()
def move(self, dx, dy):
BaseShape.move(self, dx, dy)
self._pos = (self._pos[0] + dx, self._pos[1] + dy)
def create_shape(self, sclass, x0, y0, x1, y1, **options):
options['parent'] = self
shape = sclass(x0, y0, x1, y1, options)
self.shapes.append(shape)
self._bbox = None # Invalidate memoized box
return shape
def create_group(self, x0, y0, **options):
options['parent'] = self
shape = GroupShape(self.surf, x0, y0, options)
self.shapes.append(shape)
self._bbox = None # Invalidate memoized box
return shape
def create_group2(self, sclass, x0, y0, **options):
options['parent'] = self
shape = sclass(self.surf, x0, y0, options)
self.shapes.append(shape)
self._bbox = None # Invalidate memoized box
return shape
def create_arc(self, x0, y0, x1, y1, **options):
return self.create_shape(ArcShape, x0, y0, x1, y1, **options)
def create_line(self, x0, y0, x1, y1, **options):
return self.create_shape(LineShape, x0, y0, x1, y1, **options)
def create_oval(self, x0, y0, x1, y1, **options):
return self.create_shape(OvalShape, x0, y0, x1, y1, **options)
def create_rectangle(self, x0, y0, x1, y1, **options):
return self.create_shape(RectShape, x0, y0, x1, y1, **options)
def create_text(self, x0, y0, **options):
# Must set default font now so we can use its metrics to get bounding box
if 'font' not in options:
options['font'] = self.surf.def_styles.font
shape = TextShape(x0, y0, self.surf, options)
self.shapes.append(shape)
self._bbox = None # Invalidate memoized box
# Add a unique tag to serve as an ID
id_tag = 'id' + str(TextShape.next_text_id)
shape.tags.add(id_tag)
#return id_tag # FIXME
return shape
def create_path(self, nodes, **options):
shape = PathShape(nodes, options)
self.shapes.append(shape)
self._bbox = None # Invalidate memoized box
return shape
@property
def bbox(self):
if self._bbox is None:
bx0 = 0
bx1 = 0
by0 = 0
by1 = 0
boxes = [s.bbox for s in self.shapes]
boxes = list(zip(*boxes))
if len(boxes) > 0:
bx0 = min(boxes[0])
by0 = min(boxes[1])
bx1 = max(boxes[2])
by1 = max(boxes[3])
if 'scale' in self.options:
sx = sy = self.options['scale']
bx0 *= sx
by0 *= sy
bx1 *= sx
by1 *= sy
if 'angle' in self.options:
bx0, by0, bx1, by1 = rotate_bbox((bx0, by0, bx1, by1), self.options['angle'])
tx, ty = self._pos
self._bbox = [bx0+tx, by0+ty, bx1+tx, by1+ty]
return self._bbox
def dump_shapes(self, indent=0):
print('{}{}'.format(' '*indent, repr(self)))
indent += 1
for s in self.shapes:
if isinstance(s, GroupShape):
s.dump_shapes(indent)
else:
print('{}{}'.format(' '*indent, repr(s)))
class LineShape(BaseShape):
def __init__(self, x0, y0, x1, y1, options=None, **kwargs):
BaseShape.__init__(self, options, **kwargs)
self._bbox = [x0, y0, x1, y1]
self.update_tags()
class RectShape(BaseShape):
def __init__(self, x0, y0, x1, y1, options=None, **kwargs):
BaseShape.__init__(self, options, **kwargs)
self._bbox = [x0, y0, x1, y1]
self.update_tags()
class OvalShape(BaseShape):
def __init__(self, x0, y0, x1, y1, options=None, **kwargs):
BaseShape.__init__(self, options, **kwargs)
self._bbox = [x0, y0, x1, y1]
self.update_tags()
class ArcShape(BaseShape):
def __init__(self, x0, y0, x1, y1, options=None, **kwargs):
if 'closed' not in options:
options['closed'] = False
BaseShape.__init__(self, options, **kwargs)
self._bbox = [x0, y0, x1, y1]
self.update_tags()
@property
def bbox(self):
lw = self.param('weight')
if lw is None:
lw = 0
lw /= 2.0
# Calculate bounding box for arc segment
x0, y0, x1, y1 = self.points
xc = (x0 + x1) / 2.0
yc = (y0 + y1) / 2.0
hw = abs(x1 - x0) / 2.0
hh = abs(y1 - y0) / 2.0
start = self.options['start'] % 360
extent = self.options['extent']
stop = (start + extent) % 360
if extent < 0:
start, stop = stop, start # Swap points so we can rotate CCW
if stop < start:
stop += 360 # Make stop greater than start
angles = [start, stop]
# Find the extrema of the circle included in the arc
ortho = (start // 90) * 90 + 90
while ortho < stop:
angles.append(ortho)
ortho += 90 # Rotate CCW
# Convert all extrema points to cartesian
points = [(hw * math.cos(math.radians(a)), -hh * math.sin(math.radians(a))) for a in angles]
points = list(zip(*points))
x0 = min(points[0]) + xc - lw
y0 = min(points[1]) + yc - lw
x1 = max(points[0]) + xc + lw
y1 = max(points[1]) + yc + lw
if 'weight' in self.options:
w = self.options['weight'] / 2.0
# FIXME: This doesn't properly compensate for the true extrema of the stroked outline
x0 -= w
x1 += w
y0 -= w
y1 += w
#print('@@ ARC BB:', (bx0,by0,bx1,by1), hw, hh, angles, start, extent)
return (x0,y0,x1,y1)
class PathShape(BaseShape):
def __init__(self, nodes, options=None, **kwargs):
BaseShape.__init__(self, options, **kwargs)
self.nodes = nodes
self.update_tags()
@property
def bbox(self):
extrema = []
for p in self.nodes:
if len(p) == 2:
extrema.append(p)
elif len(p) == 6: # FIXME: Compute tighter extrema of spline
extrema.append(p[0:2])
extrema.append(p[2:4])
extrema.append(p[4:6])
elif len(p) == 5: # Arc
extrema.append(p[0:2])
extrema.append(p[2:4])
extrema = list(zip(*extrema))
x0 = min(extrema[0])
y0 = min(extrema[1])
x1 = max(extrema[0])
y1 = max(extrema[1])
if 'weight' in self.options:
w = self.options['weight'] / 2.0
# FIXME: This doesn't properly compensate for the true extrema of the stroked outline
x0 -= w
x1 += w
y0 -= w
y1 += w
return (x0, y0, x1, y1)
class TextShape(BaseShape):
text_id = 1
def __init__(self, x0, y0, surf, options=None, **kwargs):
BaseShape.__init__(self, options, **kwargs)
self._pos = (x0, y0)
if 'spacing' not in options:
options['spacing'] = -8
if 'anchor' not in options:
options['anchor'] = 'c'
spacing = options['spacing']
bx0,by0, bx1,by1, baseline = surf.text_bbox(options['text'], options['font'], spacing)
w = bx1 - bx0
h = by1 - by0
self._baseline = baseline
self._bbox = [x0, y0, x0+w, y0+h]
self._anchor_off = self.anchor_offset
self.update_tags()
@property
def bbox(self):
x0, y0, x1, y1 = self._bbox
ax, ay = self._anchor_off
return (x0+ax, y0+ay, x1+ax, y1+ay)
@property
def anchor_decode(self):
anchor = self.param('anchor').lower()
anchor = anchor.replace('center','c')
anchor = anchor.replace('east','e')
anchor = anchor.replace('west','w')
if 'e' in anchor:
anchorh = 'e'
elif 'w' in anchor:
anchorh = 'w'
else:
anchorh = 'c'
if 'n' in anchor:
anchorv = 'n'
elif 's' in anchor:
anchorv = 's'
else:
anchorv = 'c'
return (anchorh, anchorv)
@property
def anchor_offset(self):
x0, y0, x1, y1 = self._bbox
w = abs(x1 - x0)
h = abs(y1 - y0)
hw = w / 2.0
hh = h / 2.0
spacing = self.param('spacing')
anchorh, anchorv = self.anchor_decode
ax = 0
ay = 0
if 'n' in anchorv:
ay = hh + (spacing // 2)
elif 's' in anchorv:
ay = -hh - (spacing // 2)
if 'e' in anchorh:
ax = -hw
elif 'w' in anchorh:
ax = hw
# Convert from center to upper-left corner
return (ax - hw, ay - hh)
@property
def next_text_id(self):
rval = TextShape.text_id
TextShape.text_id += 1
return rval
class DoubleRectShape(BaseShape):
def __init__(self, x0, y0, x1, y1, options=None, **kwargs):
BaseShape.__init__(self, options, **kwargs)
self._bbox = [x0, y0, x1, y1]
self.update_tags()
def cairo_draw_DoubleRectShape(shape, surf):
c = surf.ctx
x0, y0, x1, y1 = shape.points
c.rectangle(x0,y0, x1-x0,y1-y0)
stroke = True if shape.options['weight'] > 0 else False
if 'fill' in shape.options:
c.set_source_rgba(*rgb_to_cairo(shape.options['fill']))
if stroke:
c.fill_preserve()
else:
c.fill()
if stroke:
# FIXME c.set_source_rgba(*default_pen)
c.set_source_rgba(*rgb_to_cairo((100,200,100)))
c.stroke()
c.rectangle(x0+4,y0+4, x1-x0-8,y1-y0-8)
c.stroke()
| SymbiFlow/symbolator | nucanvas/shapes.py | Python | mit | 15,247 |
from kapal.algo import *
from kapal.world import *
from kapal.state import *
import kapal.tools
import time
# TODO: walk through example with comments
# TODO: post this example on wiki as a tutorial
n = 50 # width/height of world
c = kapal.tools.rand_cost_map(n, n, min_val=1, max_val=3, flip=True)
w = World2d(c, state_type = State2dAStar)
astar = AStar(w, w.state(0,0), w.state(n-1, n-1))
start_time = time.time()
path = astar.plan()
total_time = time.time() - start_time
print total_time, "seconds."
# TODO: finish the example. show the output in a human-readable format.
# perhaps possible interface with Seaship.
| elben/kapal | examples/simple01.py | Python | mit | 634 |
#!/usr/bin/env python
# -*- coding: Latin-1 -*-
"""
This is the main module, the main interface classes and functions
are available in the top level hid package
"""
from __future__ import absolute_import
from __future__ import print_function
import sys
import ctypes
import threading
import collections
if sys.version_info >= (3,):
import winreg
else:
import _winreg as winreg
from ctypes import c_ubyte, c_ulong, c_ushort, c_wchar, byref, sizeof, \
create_unicode_buffer
from ctypes.wintypes import DWORD
#local modules
from . import helpers
HIDError = helpers.HIDError
from . import winapi
setup_api = winapi.setup_api
hid_dll = winapi.hid_dll
HidP_Input = winapi.HidP_Input
HidP_Output = winapi.HidP_Output
HidP_Feature = winapi.HidP_Feature
HidStatus = winapi.HidStatus
MAX_HID_STRING_LENGTH = 128
if not hasattr(threading.Thread, "is_alive"):
# in python <2.6 is_alive was called isAlive
threading.Thread.is_alive = threading.Thread.isAlive
USAGE = c_ushort
INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
USAGE_EVENTS = [
HID_EVT_NONE,
HID_EVT_ALL,
HID_EVT_CHANGED,
HID_EVT_PRESSED,
HID_EVT_RELEASED,
HID_EVT_SET,
HID_EVT_CLEAR,
] = list(range(7))
def get_full_usage_id(page_id, usage_id):
"""Convert to composite 32 bit page and usage ids"""
return (page_id << 16) | usage_id
def get_usage_page_id(full_usage_id):
"""Extract 16 bits page id from full usage id (32 bits)"""
return (full_usage_id >> 16) & 0xffff
def get_short_usage_id(full_usage_id):
"""Extract 16 bits usage id from full usage id (32 bits)"""
return full_usage_id & 0xffff
def hid_device_path_exists(device_path, guid = None):
"""Test if required device_path is still valid
(HID device connected to host)
"""
# expecing HID devices
if not guid:
guid = winapi.GetHidGuid()
info_data = winapi.SP_DEVINFO_DATA()
info_data.cb_size = sizeof(winapi.SP_DEVINFO_DATA)
with winapi.DeviceInterfaceSetInfo(guid) as h_info:
for interface_data in winapi.enum_device_interfaces(h_info, guid):
test_device_path = winapi.get_device_path(h_info,
interface_data,
byref(info_data))
if test_device_path == device_path:
return True
# Not any device now with that path
return False
def find_all_hid_devices():
"Finds all HID devices connected to the system"
#
# From DDK documentation (finding and Opening HID collection):
# After a user-mode application is loaded, it does the following sequence
# of operations:
#
# * Calls HidD_GetHidGuid to obtain the system-defined GUID for HIDClass
# devices.
#
# * Calls SetupDiGetClassDevs to obtain a handle to an opaque device
# information set that describes the device interfaces supported by all
# the HID collections currently installed in the system. The
# application should specify DIGCF_PRESENT and DIGCF_INTERFACEDEVICE
# in the Flags parameter passed to SetupDiGetClassDevs.
#
# * Calls SetupDiEnumDeviceInterfaces repeatedly to retrieve all the
# available interface information.
#
# * Calls SetupDiGetDeviceInterfaceDetail to format interface information
# for each collection as a SP_INTERFACE_DEVICE_DETAIL_DATA structure.
# The device_path member of this structure contains the user-mode name
# that the application uses with the Win32 function CreateFile to
# obtain a file handle to a HID collection.
#
# get HID device class guid
guid = winapi.GetHidGuid()
# retrieve all the available interface information.
results = []
required_size = DWORD()
info_data = winapi.SP_DEVINFO_DATA()
info_data.cb_size = sizeof(winapi.SP_DEVINFO_DATA)
with winapi.DeviceInterfaceSetInfo(guid) as h_info:
for interface_data in winapi.enum_device_interfaces(h_info, guid):
device_path = winapi.get_device_path(h_info,
interface_data,
byref(info_data))
parent_device = c_ulong()
#get parent instance id (so we can discriminate on port)
if setup_api.CM_Get_Parent(byref(parent_device),
info_data.dev_inst, 0) != 0: #CR_SUCCESS = 0
parent_device.value = 0 #null
#get unique instance id string
required_size.value = 0
winapi.SetupDiGetDeviceInstanceId(h_info, byref(info_data),
None, 0,
byref(required_size) )
device_instance_id = create_unicode_buffer(required_size.value)
if required_size.value > 0:
winapi.SetupDiGetDeviceInstanceId(h_info, byref(info_data),
device_instance_id, required_size,
byref(required_size) )
hid_device = HidDevice(device_path,
parent_device.value, device_instance_id.value )
else:
hid_device = HidDevice(device_path, parent_device.value )
# add device to results, if not protected
if hid_device.vendor_id:
results.append(hid_device)
return results
class HidDeviceFilter(object):
"""This class allows searching for HID devices currently connected to
the system, it also allows to search for specific devices (by filtering)
"""
def __init__(self, **kwrds):
"""Initialize filter from a named target parameters.
I.e. product_id=0x0123
"""
self.filter_params = kwrds
def get_devices_by_parent(self, hid_filter=None):
"""Group devices returned from filter query in order \
by devcice parent id.
"""
all_devs = self.get_devices(hid_filter)
dev_group = dict()
for hid_device in all_devs:
#keep a list of known devices matching parent device Ids
parent_id = hid_device.get_parent_instance_id()
device_set = dev_group.get(parent_id, [])
device_set.append(hid_device)
if parent_id not in dev_group:
#add new
dev_group[parent_id] = device_set
return dev_group
def get_devices(self, hid_filter = None):
"""Filter a HID device list by current object parameters. Devices
must match the all of the filtering parameters
"""
if not hid_filter: #empty list or called without any parameters
if type(hid_filter) == type(None):
#request to query connected devices
hid_filter = find_all_hid_devices()
else:
return hid_filter
#initially all accepted
results = {}.fromkeys(hid_filter)
#the filter parameters
validating_attributes = list(self.filter_params.keys())
#first filter out restricted access devices
if not len(results):
return {}
for device in list(results.keys()):
if not device.is_active():
del results[device]
if not len(results):
return {}
#filter out
for item in validating_attributes:
if item.endswith("_includes"):
item = item[:-len("_includes")]
elif item.endswith("_mask"):
item = item[:-len("_mask")]
elif item +"_mask" in self.filter_params or item + "_includes" \
in self.filter_params:
continue # value mask or string search is being queried
elif item not in HidDevice.filter_attributes:
continue # field does not exist sys.error.write(...)
#start filtering out
for device in list(results.keys()):
if not hasattr(device, item):
del results[device]
elif item + "_mask" in validating_attributes:
#masked value
if getattr(device, item) & self.filter_params[item + \
"_mask"] != self.filter_params[item] \
& self.filter_params[item + "_mask"]:
del results[device]
elif item + "_includes" in validating_attributes:
#subset item
if self.filter_params[item + "_includes"] not in \
getattr(device, item):
del results[device]
else:
#plain comparison
if getattr(device, item) != self.filter_params[item]:
del results[device]
#
return list(results.keys())
MAX_DEVICE_ID_LEN = 200 + 1 #+EOL (just in case)
class HidDeviceBaseClass(object):
"Utility parent class for main HID device class"
_raw_reports_lock = threading.Lock()
def __init__(self):
"initializer"
pass
class HidDevice(HidDeviceBaseClass):
"""This class is the main interface to physical HID devices"""
MAX_MANUFACTURER_STRING_LEN = 128 #it's actually 126 + 1 (null)
MAX_PRODUCT_STRING_LEN = 128 #it's actually 126 + 1 (null)
MAX_SERIAL_NUMBER_LEN = 64
filter_attributes = ["vendor_id", "product_id", "version_number",
"product_name", "vendor_name"]
def get_parent_instance_id(self):
"""Retreive system instance id (numerical value)"""
return self.parent_instance_id
def get_parent_device(self):
"""Retreive parent device string id"""
if not self.parent_instance_id:
return ""
dev_buffer_type = winapi.c_tchar * MAX_DEVICE_ID_LEN
dev_buffer = dev_buffer_type()
try:
if winapi.CM_Get_Device_ID(self.parent_instance_id, byref(dev_buffer),
MAX_DEVICE_ID_LEN, 0) == 0: #success
return dev_buffer.value
return ""
finally:
del dev_buffer
del dev_buffer_type
def __init__(self, device_path, parent_instance_id = 0, instance_id=""):
"Interface for HID device as referenced by device_path parameter"
#allow safe access (and object browsing)
self.__open_status = False
self.__input_report_templates = dict()
#initialize hardware related vars
self.__button_caps_storage = list()
self.report_set = dict()
self.__evt_handlers = dict()
self.__reading_thread = None
self.__input_processing_thread = None
self.__raw_handler = None
self._input_report_queue = None
self.hid_caps = None
self.ptr_preparsed_data = None
self.hid_handle = None
self.usages_storage = dict()
self.device_path = device_path
self.instance_id = instance_id
self.parent_instance_id = parent_instance_id
self.product_name = ""
self.vendor_name = ""
self.serial_number = ""
self.vendor_id = 0
self.product_id = 0
self.version_number = 0
HidDeviceBaseClass.__init__(self)
# HID device handle first
h_hid = INVALID_HANDLE_VALUE
try:
h_hid = int( winapi.CreateFile(device_path,
winapi.GENERIC_READ | winapi.GENERIC_WRITE,
winapi.FILE_SHARE_READ | winapi.FILE_SHARE_WRITE,
None, winapi.OPEN_EXISTING, 0, 0))
except:
pass
if h_hid == INVALID_HANDLE_VALUE:
return
try:
# get device attributes
hidd_attributes = winapi.HIDD_ATTRIBUTES()
hidd_attributes.cb_size = sizeof(hidd_attributes)
if not hid_dll.HidD_GetAttributes(h_hid, byref(hidd_attributes)):
del hidd_attributes
return #can't read attributes
#set local references
self.vendor_id = hidd_attributes.vendor_id
self.product_id = hidd_attributes.product_id
self.version_number = hidd_attributes.version_number
del hidd_attributes
# manufacturer string
vendor_string_type = c_wchar * self.MAX_MANUFACTURER_STRING_LEN
vendor_name = vendor_string_type()
if not hid_dll.HidD_GetManufacturerString(h_hid,
byref(vendor_name),
sizeof(vendor_name)) or not len(vendor_name.value):
# would be any possibility to get a vendor id table?,
# maybe not worth it
self.vendor_name = "Unknown manufacturer"
else:
self.vendor_name = vendor_name.value
del vendor_name
del vendor_string_type
# string buffer for product string
product_name_type = c_wchar * self.MAX_PRODUCT_STRING_LEN
product_name = product_name_type()
if not hid_dll.HidD_GetProductString(h_hid,
byref(product_name),
sizeof(product_name)) or not len(product_name.value):
# alternate method, refer to windows registry for product
# information
path_parts = device_path[len("\\\\.\\"):].split("#")
h_register = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
"SYSTEM\\CurrentControlSet\\Enum\\" + \
path_parts[0] + "\\" + \
path_parts[1] + "\\" + \
path_parts[2] )
self.product_name, other = winreg.QueryValueEx(h_register,
"DeviceDesc")
winreg.CloseKey(h_register)
else:
self.product_name = product_name.value
del product_name
del product_name_type
# serial number string
serial_number_string = c_wchar * self.MAX_SERIAL_NUMBER_LEN
serial_number = serial_number_string()
if not hid_dll.HidD_GetSerialNumberString(h_hid,
byref(serial_number),
sizeof(serial_number)) or not len(serial_number.value):
self.serial_number = ""
else:
self.serial_number = serial_number.value
del serial_number
del serial_number_string
finally:
# clean up
winapi.CloseHandle(h_hid)
def is_active(self):
"""Poll if device is still valid"""
if not self.vendor_id:
return False
return True
def open(self, output_only = False, shared = True):
"""Open HID device and obtain 'Collection Information'.
It effectively prepares the HidDevice object for reading and writing
"""
if self.is_opened():
raise HIDError("Device already opened")
sharing_flags = 0
if shared:
sharing_flags = winapi.FILE_SHARE_READ | winapi.FILE_SHARE_WRITE
hid_handle = winapi.CreateFile(
self.device_path,
winapi.GENERIC_READ | winapi.GENERIC_WRITE,
sharing_flags,
None, # no security
winapi.OPEN_EXISTING,
winapi.FILE_ATTRIBUTE_NORMAL | winapi.FILE_FLAG_OVERLAPPED,
0 )
if not hid_handle or hid_handle == INVALID_HANDLE_VALUE:
raise HIDError("Error opening HID device: %s\n"%self.product_name)
#get pre parsed data
ptr_preparsed_data = ctypes.c_void_p()
if not hid_dll.HidD_GetPreparsedData(int(hid_handle),
byref(ptr_preparsed_data)):
winapi.CloseHandle(int(hid_handle))
raise HIDError("Failure to get HID pre parsed data")
self.ptr_preparsed_data = ptr_preparsed_data
self.hid_handle = hid_handle
#get top level capabilities
self.hid_caps = winapi.HIDP_CAPS()
HidStatus( hid_dll.HidP_GetCaps(ptr_preparsed_data,
byref(self.hid_caps)) )
#proceed with button capabilities
caps_length = c_ulong()
all_items = [\
(HidP_Input, winapi.HIDP_BUTTON_CAPS,
self.hid_caps.number_input_button_caps,
hid_dll.HidP_GetButtonCaps
),
(HidP_Input, winapi.HIDP_VALUE_CAPS,
self.hid_caps.number_input_value_caps,
hid_dll.HidP_GetValueCaps
),
(HidP_Output, winapi.HIDP_BUTTON_CAPS,
self.hid_caps.number_output_button_caps,
hid_dll.HidP_GetButtonCaps
),
(HidP_Output, winapi.HIDP_VALUE_CAPS,
self.hid_caps.number_output_value_caps,
hid_dll.HidP_GetValueCaps
),
(HidP_Feature, winapi.HIDP_BUTTON_CAPS,
self.hid_caps.number_feature_button_caps,
hid_dll.HidP_GetButtonCaps
),
(HidP_Feature, winapi.HIDP_VALUE_CAPS,
self.hid_caps.number_feature_value_caps,
hid_dll.HidP_GetValueCaps
),
]
for report_kind, struct_kind, max_items, get_control_caps in all_items:
if not int(max_items):
continue #nothing here
#create storage for control/data
ctrl_array_type = struct_kind * max_items
ctrl_array_struct = ctrl_array_type()
#target max size for API function
caps_length.value = max_items
HidStatus( get_control_caps(\
report_kind,
byref(ctrl_array_struct),
byref(caps_length),
ptr_preparsed_data) )
#keep reference of usages
for idx in range(caps_length.value):
usage_item = HidPUsageCaps( ctrl_array_struct[idx] )
#by report type
if report_kind not in self.usages_storage:
self.usages_storage[report_kind] = list()
self.usages_storage[report_kind].append( usage_item )
#also add report_id to known reports set
if report_kind not in self.report_set:
self.report_set[report_kind] = set()
self.report_set[report_kind].add( usage_item.report_id )
del ctrl_array_struct
del ctrl_array_type
# now is the time to consider the device opened, as report
# handling threads enforce it
self.__open_status = True
#now prepare the input report handler
self.__input_report_templates = dict()
if not output_only and self.hid_caps.input_report_byte_length and \
HidP_Input in self.report_set:
#first make templates for easy parsing input reports
for report_id in self.report_set[HidP_Input]:
self.__input_report_templates[report_id] = \
HidReport( self, HidP_Input, report_id )
#prepare input reports handlers
self._input_report_queue = HidDevice.InputReportQueue( \
self.max_input_queue_size,
self.hid_caps.input_report_byte_length)
self.__input_processing_thread = \
HidDevice.InputReportProcessingThread(self)
self.__reading_thread = HidDevice.InputReportReaderThread( \
self, self.hid_caps.input_report_byte_length)
# clean up
def get_physical_descriptor(self):
"""Returns physical HID device descriptor
"""
raw_data_type = c_ubyte * 1024
raw_data = raw_data_type()
if hid_dll.HidD_GetPhysicalDescriptor(self.hid_handle,
byref(raw_data), 1024 ):
return [x for x in raw_data]
return []
def send_output_report(self, data):
"""Send input/output/feature report ID = report_id, data should be a
c_ubyte object with included the required report data
"""
assert( self.is_opened() )
#make sure we have c_ubyte array storage
if not ( isinstance(data, ctypes.Array) and \
issubclass(data._type_, c_ubyte) ):
raw_data_type = c_ubyte * len(data)
raw_data = raw_data_type()
for index in range( len(data) ):
raw_data[index] = data[index]
else:
raw_data = data
#
# Adding a lock when writing (overlapped writes)
over_write = winapi.OVERLAPPED()
over_write.h_event = winapi.CreateEvent(None, 0, 0, None)
if over_write.h_event:
try:
overlapped_write = over_write
winapi.WriteFile(int(self.hid_handle), byref(raw_data), len(raw_data),
None, byref(overlapped_write)) #none overlapped
error = ctypes.GetLastError()
if error == winapi.ERROR_IO_PENDING:
# overlapped operation in progress
result = error
elif error == 1167:
raise HIDError("Error device disconnected before write")
else:
raise HIDError("Error %d when trying to write to HID "\
"device: %s"%(error, ctypes.FormatError(error)) )
result = winapi.WaitForSingleObject(overlapped_write.h_event, 10000 )
if result != winapi.WAIT_OBJECT_0:
# If the write times out make sure to
# cancel it, otherwise memory could
# get corrupted if the async write
# completes after this functions returns
winapi.CancelIo( int(self.hid_handle) )
raise HIDError("Write timed out")
finally:
# Make sure the event is closed so resources aren't leaked
winapi.CloseHandle(over_write.h_event)
else:
return winapi.WriteFile(int(self.hid_handle), byref(raw_data),
len(raw_data),
None, None) #none overlapped
return True #completed
def send_feature_report(self, data):
"""Send input/output/feature report ID = report_id, data should be a
c_byte object with included the required report data
"""
assert( self.is_opened() )
#make sure we have c_ubyte array storage
if not ( isinstance(data, ctypes.Array) and issubclass(data._type_,
c_ubyte) ):
raw_data_type = c_ubyte * len(data)
raw_data = raw_data_type()
for index in range( len(data) ):
raw_data[index] = data[index]
else:
raw_data = data
return hid_dll.HidD_SetFeature(int(self.hid_handle), byref(raw_data),
len(raw_data))
def __reset_vars(self):
"""Reset vars (for init or gc)"""
self.__button_caps_storage = list()
self.usages_storage = dict()
self.report_set = dict()
self.ptr_preparsed_data = None
self.hid_handle = None
#don't clean up the report queue because the
#consumer & producer threads might needed it
self.__evt_handlers = dict()
#other
self.__reading_thread = None
self.__input_processing_thread = None
self._input_report_queue = None
#
def is_plugged(self):
"""Check if device still plugged to USB host"""
return self.device_path and hid_device_path_exists(self.device_path)
def is_opened(self):
"""Check if device path resource open status"""
return self.__open_status
def close(self):
"""Release system resources"""
# free parsed data
if not self.is_opened():
return
self.__open_status = False
# abort all running threads first
if self.__reading_thread and self.__reading_thread.is_alive():
self.__reading_thread.abort()
#avoid posting new reports
if self._input_report_queue:
self._input_report_queue.release_events()
if self.__input_processing_thread and \
self.__input_processing_thread.is_alive():
self.__input_processing_thread.abort()
#properly close API handlers and pointers
if self.ptr_preparsed_data:
ptr_preparsed_data = self.ptr_preparsed_data
self.ptr_preparsed_data = None
hid_dll.HidD_FreePreparsedData(ptr_preparsed_data)
# wait for the reading thread to complete before closing device handle
if self.__reading_thread:
self.__reading_thread.join()
if self.hid_handle:
winapi.CloseHandle(self.hid_handle)
# make sure report procesing thread is closed
if self.__input_processing_thread:
self.__input_processing_thread.join()
#reset vars (for GC)
button_caps_storage = self.__button_caps_storage
self.__reset_vars()
while button_caps_storage:
item = button_caps_storage.pop()
del item
def __find_reports(self, report_type, usage_page, usage_id = 0):
"Find input report referencing HID usage control/data item"
if not self.is_opened():
raise HIDError("Device must be opened")
#
results = list()
if usage_page:
for report_id in self.report_set.get( report_type, set() ):
#build report object, gathering usages matching report_id
report_obj = HidReport(self, report_type, report_id)
if get_full_usage_id(usage_page, usage_id) in report_obj:
results.append( report_obj )
else:
#all (any one)
for report_id in self.report_set.get(report_type, set()):
report_obj = HidReport(self, report_type, report_id)
results.append( report_obj )
return results
def count_all_feature_reports(self):
"""Retreive total number of available feature reports"""
return self.hid_caps.number_feature_button_caps + \
self.hid_caps.number_feature_value_caps
def find_input_reports(self, usage_page = 0, usage_id = 0):
"Find input reports referencing HID usage item"
return self.__find_reports(HidP_Input, usage_page, usage_id)
def find_output_reports(self, usage_page = 0, usage_id = 0):
"Find output report referencing HID usage control/data item"
return self.__find_reports(HidP_Output, usage_page, usage_id)
def find_feature_reports(self, usage_page = 0, usage_id = 0):
"Find feature report referencing HID usage control/data item"
return self.__find_reports(HidP_Feature, usage_page, usage_id)
def find_any_reports(self, usage_page = 0, usage_id = 0):
"""Find any report type referencing HID usage control/data item.
Results are returned in a dictionary mapping report_type to usage
lists.
"""
items = [
(HidP_Input, self.find_input_reports(usage_page, usage_id)),
(HidP_Output, self.find_output_reports(usage_page, usage_id)),
(HidP_Feature, self.find_feature_reports(usage_page, usage_id)),
]
return dict([(t, r) for t, r in items if r])
max_input_queue_size = 20
evt_decision = {
#a=old_value, b=new_value
HID_EVT_NONE: lambda a,b: False,
HID_EVT_ALL: lambda a,b: True, #usage in report
HID_EVT_CHANGED: lambda a,b: a != b,
HID_EVT_PRESSED: lambda a,b: b and not a,
HID_EVT_RELEASED: lambda a,b: a and not b,
HID_EVT_SET: lambda a,b: bool(b),
HID_EVT_CLEAR: lambda a,b: not b,
}
@helpers.synchronized(HidDeviceBaseClass._raw_reports_lock)
def _process_raw_report(self, raw_report):
"Default raw input report data handler"
if not self.is_opened():
return
if not self.__evt_handlers and not self.__raw_handler:
return
if not raw_report[0] and \
(raw_report[0] not in self.__input_report_templates):
# windows sends an empty array when disconnecting
# but, this might have a collision with report_id = 0
if not hid_device_path_exists(self.device_path):
#windows XP sends empty report when disconnecting
self.__reading_thread.abort() #device disconnected
return
if self.__raw_handler:
#this might slow down data throughput, but at the expense of safety
self.__raw_handler(helpers.ReadOnlyList(raw_report))
return
# using pre-parsed report templates, by report id
report_template = self.__input_report_templates[raw_report[0]]
# old condition snapshot
old_values = report_template.get_usages()
# parse incoming data
report_template.set_raw_data(raw_report)
# and compare it
event_applies = self.evt_decision
evt_handlers = self.__evt_handlers
for key in report_template.keys():
if key not in evt_handlers:
continue
#check if event handler exist!
for event_kind, handlers in evt_handlers[key].items():
#key=event_kind, values=handler set
new_value = report_template[key].value
if not event_applies[event_kind](old_values[key], new_value):
continue
#decision applies, call handlers
for function_handler in handlers:
#check if the application wants some particular parameter
if handlers[function_handler]:
function_handler(new_value,
event_kind, handlers[function_handler])
else:
function_handler(new_value, event_kind)
def set_raw_data_handler(self, funct):
"Set external raw data handler, set to None to restore default"
self.__raw_handler = funct
def find_input_usage(self, full_usage_id):
"""Check if full usage Id included in input reports set
Parameters:
full_usage_id Full target usage, use get_full_usage_id
Returns:
Report ID as integer value, or None if report does not exist with
target usage. Nottice that report ID 0 is a valid report.
"""
for report_id, report_obj in self.__input_report_templates.items():
if full_usage_id in report_obj:
return report_id
return None #report_id might be 0
def add_event_handler(self, full_usage_id, handler_function,
event_kind = HID_EVT_ALL, aux_data = None):
"""Add event handler for usage value/button changes,
returns True if the handler function was updated"""
report_id = self.find_input_usage(full_usage_id)
if report_id != None:
# allow first zero to trigger changes and releases events
self.__input_report_templates[report_id][full_usage_id].__value = None
if report_id == None or not handler_function:
# do not add handler
return False
assert(isinstance(handler_function, collections.Callable)) # must be a function
# get dictionary for full usages
top_map_handler = self.__evt_handlers.get(full_usage_id, dict())
event_handler_set = top_map_handler.get(event_kind, dict())
# update handler
event_handler_set[handler_function] = aux_data
if event_kind not in top_map_handler:
top_map_handler[event_kind] = event_handler_set
if full_usage_id not in self.__evt_handlers:
self.__evt_handlers[full_usage_id] = top_map_handler
return True
class InputReportQueue(object):
"""Multi-threaded queue. Allows to queue reports from reading thread"""
def __init__(self, max_size, report_size):
self.__locked_down = False
self.max_size = max_size
self.repport_buffer_type = c_ubyte * report_size
self.used_queue = []
self.fresh_queue = []
self.used_lock = threading.Lock()
self.fresh_lock = threading.Lock()
self.posted_event = threading.Event()
#@logging_decorator
def get_new(self):
"Allocates storage for input report"
if self.__locked_down:
return None
self.used_lock.acquire()
if len(self.used_queue):
#we can reuse items
empty_report = self.used_queue.pop(0)
self.used_lock.release()
ctypes.memset(empty_report, 0, sizeof(empty_report))
else:
self.used_lock.release()
#create brand new storage
#auto initialized to '0' by ctypes
empty_report = self.repport_buffer_type()
return empty_report
def reuse(self, raw_report):
"Reuse not posted report"
if self.__locked_down:
return
if not raw_report:
return
self.used_lock.acquire()
#we can reuse this item
self.used_queue.append(raw_report)
self.used_lock.release()
#@logging_decorator
def post(self, raw_report):
"""Used by reading thread to post a new input report."""
if self.__locked_down:
self.posted_event.set()
return
self.fresh_lock.acquire()
self.fresh_queue.append( raw_report )
self.posted_event.set()
self.fresh_lock.release()
#@logging_decorator
def get(self):
"""Used to retreive one report form the queue"""
if self.__locked_down:
return None
#wait for data
self.posted_event.wait()
self.fresh_lock.acquire()
if self.__locked_down:
self.fresh_lock.release()
return None
item = self.fresh_queue.pop(0)
if not self.fresh_queue:
# emtpy
self.posted_event.clear()
self.fresh_lock.release()
return item
def release_events(self):
"""Release thread locks."""
self.__locked_down = True
self.posted_event.set()
class InputReportProcessingThread(threading.Thread):
"Input reports handler helper class"
def __init__(self, hid_object):
threading.Thread.__init__(self)
self.__abort = False
self.hid_object = hid_object
self.daemon = True
self.start()
def abort(self):
"""Cancel processing."""
self.__abort = True
def run(self):
"""Start collecting input reports and post it to subscribed
Hid device"""
hid_object = self.hid_object
report_queue = hid_object._input_report_queue
while not self.__abort and hid_object.is_opened():
raw_report = report_queue.get()
if not raw_report or self.__abort:
break
hid_object._process_raw_report(raw_report)
# reuse the report (avoid allocating new memory)
report_queue.reuse(raw_report)
class InputReportReaderThread(threading.Thread):
"Helper to receive input reports"
def __init__(self, hid_object, raw_report_size):
threading.Thread.__init__(self)
self.__abort = False
self.__active = False
self.hid_object = hid_object
self.report_queue = hid_object._input_report_queue
hid_handle = int( hid_object.hid_handle )
self.raw_report_size = raw_report_size
self.__h_read_event = None
self.__abort_lock = threading.RLock()
if hid_object and hid_handle and self.raw_report_size \
and self.report_queue:
#only if input reports are available
self.daemon = True
self.start()
else:
hid_object.close()
def abort(self):
"""Stop collectiong reports."""
with self.__abort_lock:
if not self.__abort and self.__h_read_event:
# force overlapped events competition
# The abort variable must be set to true
# before sending the event, otherwise
# the reader thread might skip
# CancelIo
self.__abort = True
winapi.SetEvent(self.__h_read_event)
def is_active(self):
"main reading loop is running (bool)"
return bool(self.__active)
def run(self):
if not self.raw_report_size:
# don't raise any error as the hid object can still be used
# for writing reports
raise HIDError("Attempting to read input reports on non "\
"capable HID device")
over_read = winapi.OVERLAPPED()
self.__h_read_event = winapi.CreateEvent(None, 0, 0, None)
over_read.h_event = self.__h_read_event
if not over_read.h_event:
raise HIDError("Error when create hid event resource")
try:
bytes_read = c_ulong()
#
hid_object = self.hid_object
input_report_queue = self.report_queue
report_len = self.raw_report_size
#main loop active
self.__active = True
while not self.__abort:
#get storage
buf_report = input_report_queue.get_new()
if not buf_report or self.__abort:
break
bytes_read.value = 0
with self.__abort_lock:
# Call to ReadFile must only be done if
# abort isn't set.
if self.__abort:
break
# async read from device
result = winapi.ReadFile(hid_object.hid_handle,
byref(buf_report), report_len, byref(bytes_read),
byref(over_read) )
if not result:
error = ctypes.GetLastError()
if error == winapi.ERROR_IO_PENDING:
# overlapped operation in progress
result = error
elif error == 1167:
# device disconnected
break
else:
raise HIDError("Error %d when trying to read from HID "\
"device: %s"%(error, ctypes.FormatError(error)) )
if result == winapi.ERROR_IO_PENDING:
#wait for event
result = winapi.WaitForSingleObject( \
over_read.h_event,
winapi.INFINITE )
if result != winapi.WAIT_OBJECT_0 or self.__abort: #success
#Cancel the ReadFile call. The read must not be in
#progress when run() returns, since the buffers used
#in the call will go out of scope and get freed. If
#new data arrives (the read finishes) after these
#buffers have been freed then this can cause python
#to crash.
winapi.CancelIo( hid_object.hid_handle )
break #device has being disconnected
# signal raw data already read
input_report_queue.post( buf_report )
finally:
#clean up
self.__active = False
self.__abort = True
self.__h_read_event = None #delete read event so it isn't be used by abort()
winapi.CloseHandle(over_read.h_event)
del over_read
def __repr__(self):
return "HID device (vID=0x%04x, pID=0x%04x, v=0x%04x); %s; %s, " \
"Path: %s" % (self.vendor_id, self.product_id, self.version_number,\
self.vendor_name, self.product_name, self.device_path)
class ReportItem(object):
"""Represents a single usage field in a report."""
def __init__(self, hid_report, caps_record, usage_id = 0):
# from here we can get the parent hid_object
self.hid_report = hid_report
self.__is_button = caps_record.is_button
self.__is_value = caps_record.is_value
self.__is_value_array = bool(self.__is_value and \
caps_record.report_count > 1)
self.__bit_size = 1
self.__report_count = 1
if not caps_record.is_range:
self.usage_id = caps_record.usage
else:
self.usage_id = usage_id
self.__report_id_value = caps_record.report_id
self.page_id = caps_record.usage_page
self.__value = 0
if caps_record.is_range:
#reference to usage within usage range
offset = usage_id - caps_record.usage_min
self.data_index = caps_record.data_index_min + offset
self.string_index = caps_record.string_min + offset
self.designator_index = caps_record.designator_min + offset
else:
#straight reference
self.data_index = caps_record.data_index
self.string_index = caps_record.string_index
self.designator_index = caps_record.designator_index
#verify it item is value array
if self.__is_value:
if self.__is_value_array:
byte_size = int((caps_record.bit_size * caps_record.report_count)//8)
if (caps_record.bit_size * caps_record.report_count) % 8:
# TODO: This seems not supported by Windows
byte_size += 1
value_type = c_ubyte * byte_size
self.__value = value_type()
self.__bit_size = caps_record.bit_size
self.__report_count = caps_record.report_count
def __len__(self):
return self.__report_count
def __setitem__(self, index, value):
"Allow to access value array by index"
if not self.__is_value_array:
raise ValueError("Report item is not value usage array")
if index < self.__report_count:
byte_index = int( (index * self.__bit_size) // 8 )
bit_index = (index * self.__bit_size) % 8
bit_mask = ((1 << self.__bit_size) - 1)
self.__value[byte_index] &= ~(bit_mask << bit_index)
self.__value[byte_index] |= (value & bit_mask) << bit_index
else:
raise IndexError
def __getitem__(self, index):
"Allow to access value array by index"
if not self.__is_value_array:
raise ValueError("Report item is not value usage array")
if index < self.__report_count:
byte_index = int( (index * self.__bit_size) // 8 )
bit_index = (index * self.__bit_size) % 8
return ((self.__value[byte_index] >> bit_index) & \
((1 << self.__bit_size) - 1) )
else:
raise IndexError
def set_value(self, value):
"""Set usage value within report"""
if self.__is_value_array:
if len(value) == self.__report_count:
for index, item in enumerate(value):
self.__setitem__(index, item)
else:
raise ValueError("Value size should match report item size "\
"length" )
else:
self.__value = value & ((1 << self.__bit_size) - 1) #valid bits only
def get_value(self):
"""Retreive usage value within report"""
if self.__is_value_array:
if self.__bit_size == 8: #matching c_ubyte
return list(self.__value)
else:
result = []
for i in range(self.__report_count):
result.append(self.__getitem__(i))
return result
else:
return self.__value
#value property
value = property(get_value, set_value)
@property
def value_array(self):
"""Retreive usage value as value array"""
#read only property
return self.__value
def key(self):
"returns unique usage page & id long value"
return (self.page_id << 16) | self.usage_id
def is_value(self):
"""Validate if usage is value (not 'button')"""
return self.__is_value
def is_button(self):
"""Validate if usage is button (not value)"""
return self.__is_button
def is_value_array(self):
"""Validate if usage was described as value array"""
return self.__is_value_array
def get_usage_string(self):
"""Returns usage representation string (as embedded in HID device
if available)
"""
if self.string_index:
usage_string_type = c_wchar * MAX_HID_STRING_LENGTH
# 128 max string length
abuffer = usage_string_type()
hid_dll.HidD_GetIndexedString(
self.hid_report.get_hid_object().hid_handle,
self.string_index,
byref(abuffer), MAX_HID_STRING_LENGTH-1 )
return abuffer.value
return ""
#read only properties
@property
def report_id(self):
"""Retreive Report Id numeric value"""
return self.__report_id_value
def __repr__(self):
res = []
if self.string_index:
res.append( self.get_usage_string() )
res.append( "page_id=%s"%hex(self.page_id) )
res.append( "usage_id=%s"%hex(self.usage_id) )
if self.__value != None:
res.append( "value=%s" % str(self.get_value()))
else:
res.append( "value=[None])" )
usage_type = ""
if self.is_button():
usage_type = "Button"
elif self.is_value():
usage_type = "Value"
return usage_type + "Usage item, %s (" % hex(get_full_usage_id ( \
self.page_id, self.usage_id)) + ', '.join(res) + ')'
# class ReportItem finishes ***********************
class HidReport(object):
"""This class interfaces an actual HID physical report, providing a wrapper
that exposes specific usages (usage page and usage ID) as a usage_id value
map (dictionary).
Example: A HID device might have an output report ID = 0x01, with the
following usages; 0x20 as a boolean (button), and 0x21 as a 3 bit value,
then querying the HID object for the output report (by using
hid_object.get_output_report(0x01))
"""
#
def __init__(self, hid_object, report_type, report_id):
hid_caps = hid_object.hid_caps
if report_type == HidP_Input:
self.__raw_report_size = hid_caps.input_report_byte_length
elif report_type == HidP_Output:
self.__raw_report_size = hid_caps.output_report_byte_length
elif report_type == HidP_Feature:
self.__raw_report_size = hid_caps.feature_report_byte_length
else:
raise HIDError("Unsupported report type")
self.__report_kind = report_type #target report type
self.__value_array_items = list() #array of usages items
self.__hid_object = hid_object #parent hid object
self.__report_id = c_ubyte(report_id) #target report Id
self.__items = dict() #access items by 'full usage' key
self.__idx_items = dict() #access internal items by HID DLL usage index
self.__raw_data = None #buffer storage (if needed)
self.__usage_data_list = None #hid API HIDP_DATA array (if allocated)
#build report items list, browse parent hid object for report items
for item in hid_object.usages_storage.get(report_type, []):
if item.report_id == report_id:
if not item.is_range:
#regular 'single' usage
report_item = ReportItem(self, item)
self.__items[report_item.key()] = report_item
self.__idx_items[report_item.data_index] = report_item
#item is value array?
if report_item.is_value_array():
self.__value_array_items.append(report_item)
else:
for usage_id in range(item.usage_min,
item.usage_max):
report_item = ReportItem(self, item, usage_id)
self.__items[report_item.key()] = report_item
self.__idx_items[report_item.data_index] = report_item
#
#
__report_kind_dict = {
HidP_Input: "Input",
HidP_Output: "Output",
HidP_Feature: "Feature",
}
#read only properties
@property
def report_id(self):
"""Retreive asociated report Id value"""
return self.__report_id.value
@property
def report_type(self):
"""Retreive report type as numeric value (input, output, feature)"""
return self.__report_kind_dict[self.__report_kind]
@property
def hid_object(self):
"""Retreive asociated HID device instance"""
return self.__hid_object
def __repr__(self):
return "HID report object (%s report, id=0x%02x), %d items included" \
% (self.report_type, self.__report_id.value, len(self.__items) )
def __getitem__(self, key):
if isinstance(key, ReportItem):
key = key.key()
return self.__items[key]
def __setitem__(self, key, value):
"""set report item value"""
item = self.__getitem__(key)
item.value = value
def __contains__(self, key):
if isinstance(key, ReportItem):
key = key.key()
return key in self.__items
def __len__(self):
return len(self.__items)
def has_key(self, key):
"""Test for key (as standard dicts)"""
return self.__contains__(key)
def items(self):
"""Return key, value pairs (as standard dicts)"""
return list(self.__items.items())
def keys(self):
"""Return stored element keys (as standard dicts)"""
return self.__items.keys()
def values(self):
"""Return stored elements (as standard dicts)"""
return self.__items.values()
def get_hid_object(self):
"""Retreive reference to parent HID device"""
return self.__hid_object
def get_usages(self):
"Return a dictionary mapping full usages Ids to plain values"
result = dict()
for key, usage in self.items():
result[key] = usage.value
return result
def __alloc_raw_data(self, initial_values=None):
"""Pre-allocate re-usagle memory"""
#allocate c_ubyte storage
if self.__raw_data == None: #first time only, create storage
raw_data_type = c_ubyte * self.__raw_report_size
self.__raw_data = raw_data_type()
elif initial_values == self.__raw_data:
# already
return
else:
#initialize
ctypes.memset(self.__raw_data, 0, len(self.__raw_data))
if initial_values:
for index in range(len(initial_values)):
self.__raw_data[index] = initial_values[index]
def set_raw_data(self, raw_data):
"""Set usage values based on given raw data, item[0] is report_id,
length should match 'raw_data_length' value, best performance if
raw_data is c_ubyte ctypes array object type
"""
#pre-parsed data should exist
assert(self.__hid_object.is_opened())
#valid length
if len(raw_data) != self.__raw_report_size:
raise HIDError( "Report size has to be %d elements (bytes)" \
% self.__raw_report_size )
# copy to internal storage
self.__alloc_raw_data(raw_data)
if not self.__usage_data_list: # create HIDP_DATA buffer
max_items = hid_dll.HidP_MaxDataListLength(self.__report_kind,
self.__hid_object.ptr_preparsed_data)
data_list_type = winapi.HIDP_DATA * max_items
self.__usage_data_list = data_list_type()
#reference HIDP_DATA buffer
data_list = self.__usage_data_list
data_len = c_ulong(len(data_list))
#reset old values
for item in self.values():
if item.is_value_array():
item.value = [0, ]*len(item)
else:
item.value = 0
#ready, parse raw data
HidStatus( hid_dll.HidP_GetData(self.__report_kind,
byref(data_list), byref(data_len),
self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), len(self.__raw_data)) )
#set values on internal report item objects
for idx in range(data_len.value):
value_item = data_list[idx]
report_item = self.__idx_items.get(value_item.data_index)
if not report_item:
# This is not expected to happen
continue
if report_item.is_value():
report_item.value = value_item.value.raw_value
elif report_item.is_button():
report_item.value = value_item.value.on
else:
pass # HID API should give us either, at least one of 'em
#get values of array items
for item in self.__value_array_items:
#ask hid API to parse
HidStatus( hid_dll.HidP_GetUsageValueArray(self.__report_kind,
item.page_id,
0, #link collection
item.usage_id, #short usage
byref(item.value_array), #output data (c_ubyte storage)
len(item.value_array), self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), len(self.__raw_data)) )
#
def __prepare_raw_data(self):
"Format internal __raw_data storage according to usages setting"
#pre-parsed data should exist
if not self.__hid_object.ptr_preparsed_data:
raise HIDError("HID object close or unable to request pre parsed "\
"report data")
# make sure pre-memory allocation already done
self.__alloc_raw_data()
try:
HidStatus( hid_dll.HidP_InitializeReportForID(self.__report_kind,
self.__report_id, self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), self.__raw_report_size) )
#
except HIDError:
self.__raw_data[0] = self.__report_id
#check if we have pre-allocated usage storage
if not self.__usage_data_list: # create HIDP_DATA buffer
max_items = hid_dll.HidP_MaxDataListLength(self.__report_kind,
self.__hid_object.ptr_preparsed_data)
if not max_items:
raise HIDError("Internal error while requesting usage length")
data_list_type = winapi.HIDP_DATA * max_items
self.__usage_data_list = data_list_type()
#reference HIDP_DATA buffer
data_list = self.__usage_data_list
#set buttons and values usages first
n_total_usages = 0
single_usage = USAGE()
single_usage_len = c_ulong()
for data_index, report_item in self.__idx_items.items():
if (not report_item.is_value_array()) and \
report_item.value != None:
#set by user, include in request
if report_item.is_button() and report_item.value:
# windows just can't handle button arrays!, we just don't
# know if usage is button array or plain single usage, so
# we set all usages at once
single_usage.value = report_item.usage_id
single_usage_len.value = 1
HidStatus( hid_dll.HidP_SetUsages(self.__report_kind,
report_item.page_id, 0,
byref(single_usage), byref(single_usage_len),
self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), self.__raw_report_size) )
continue
elif report_item.is_value() and \
not report_item.is_value_array():
data_list[n_total_usages].value.raw_value = report_item.value
else:
continue #do nothing
data_list[n_total_usages].reserved = 0 #reset
data_list[n_total_usages].data_index = data_index #reference
n_total_usages += 1
#set data if any usage is not 'none' (and not any value array)
if n_total_usages:
#some usages set
usage_len = c_ulong(n_total_usages)
HidStatus( hid_dll.HidP_SetData(self.__report_kind,
byref(data_list), byref(usage_len),
self.__hid_object.ptr_preparsed_data,
byref(self.__raw_data), self.__raw_report_size) )
#set values based on value arrays
for report_item in self.__value_array_items:
HidStatus( hid_dll.HidP_SetUsageValueArray(self.__report_kind,
report_item.page_id,
0, #all link collections
report_item.usage_id,
byref(report_item.value_array),
len(report_item.value_array),
self.__hid_object.ptr_preparsed_data, byref(self.__raw_data),
len(self.__raw_data)) )
def get_raw_data(self):
"""Get raw HID report based on internal report item settings,
creates new c_ubytes storage
"""
if self.__report_kind != HidP_Output \
and self.__report_kind != HidP_Feature:
raise HIDError("Only for output or feature reports")
self.__prepare_raw_data()
#return read-only object for internal storage
return helpers.ReadOnlyList(self.__raw_data)
def send(self, raw_data = None):
"""Prepare HID raw report (unless raw_data is provided) and send
it to HID device
"""
if self.__report_kind != HidP_Output \
and self.__report_kind != HidP_Feature:
raise HIDError("Only for output or feature reports")
#valid length
if raw_data and (len(raw_data) != self.__raw_report_size):
raise HIDError("Report size has to be %d elements (bytes)" \
% self.__raw_report_size)
#should be valid report id
if raw_data and raw_data[0] != self.__report_id.value:
#hint, raw_data should be a plain list of integer values
raise HIDError("Not matching report id")
#
if self.__report_kind != HidP_Output and \
self.__report_kind != HidP_Feature:
raise HIDError("Can only send output or feature reports")
#
if not raw_data:
# we'll construct the raw report
self.__prepare_raw_data()
elif not ( isinstance(raw_data, ctypes.Array) and \
issubclass(raw_data._type_, c_ubyte) ):
# pre-memory allocation for performance
self.__alloc_raw_data(raw_data)
#reference proper object
raw_data = self.__raw_data
if self.__report_kind == HidP_Output:
return self.__hid_object.send_output_report(raw_data)
elif self.__report_kind == HidP_Feature:
return self.__hid_object.send_feature_report(raw_data)
else:
pass #can't get here (yet)
def get(self, do_process_raw_report = True):
"Read report from device"
assert( self.__hid_object.is_opened() )
if self.__report_kind != HidP_Input and \
self.__report_kind != HidP_Feature:
raise HIDError("Only for input or feature reports")
# pre-alloc raw data
self.__alloc_raw_data()
# now use it
raw_data = self.__raw_data
raw_data[0] = self.__report_id
read_function = None
if self.__report_kind == HidP_Feature:
read_function = hid_dll.HidD_GetFeature
elif self.__report_kind == HidP_Input:
read_function = hid_dll.HidD_GetInputReport
if read_function and read_function( \
int(self.__hid_object.hid_handle),
byref(raw_data), len(raw_data) ):
#success
self.set_raw_data(raw_data)
if do_process_raw_report:
self.__hid_object._process_raw_report(raw_data)
return helpers.ReadOnlyList(raw_data)
return helpers.ReadOnlyList([])
#class HIDReport finishes ***********************
class HidPUsageCaps(object):
"""Allow to keep usage parameters (regarless of windows type)
in a common class."""
def __init__(self, caps):
# keep pylint happy
self.report_id = 0
for fname, ftype in caps._fields_:
if fname.startswith('reserved'):
continue
if fname == 'union':
continue
setattr(self, fname, int(getattr(caps, fname)))
if caps.is_range:
range_struct = caps.union.range
else:
range_struct = caps.union.not_range
for fname, ftype in range_struct._fields_:
if fname.startswith('reserved'):
continue
if fname == 'union':
continue
setattr(self, fname, int(getattr(range_struct, fname)))
self.is_value = False
self.is_button = False
if isinstance(caps, winapi.HIDP_BUTTON_CAPS):
self.is_button = True
elif isinstance(caps, winapi.HIDP_VALUE_CAPS):
self.is_value = True
else:
pass
def inspect(self):
"""Retreive dictionary of 'Field: Value' attributes"""
results = {}
for fname in dir(self):
if not fname.startswith('_'):
value = getattr(self, fname)
if isinstance(value, collections.Callable):
continue
results[fname] = value
return results
def show_hids(target_vid = 0, target_pid = 0, output = None):
"""Check all HID devices conected to PC hosts."""
# first be kind with local encodings
if not output:
# beware your script should manage encodings
output = sys.stdout
# then the big cheese...
from . import tools
all_hids = None
if target_vid:
if target_pid:
# both vendor and product Id provided
device_filter = HidDeviceFilter(vendor_id = target_vid,
product_id = target_pid)
else:
# only vendor id
device_filter = HidDeviceFilter(vendor_id = target_vid)
all_hids = device_filter.get_devices()
else:
all_hids = find_all_hid_devices()
if all_hids:
print("Found HID class devices!, writting details...")
for dev in all_hids:
device_name = str(dev)
output.write(device_name)
output.write('\n\n Path: %s\n' % dev.device_path)
output.write('\n Instance: %s\n' % dev.instance_id)
output.write('\n Port (ID): %s\n' % dev.get_parent_instance_id())
output.write('\n Port (str):%s\n' % str(dev.get_parent_device()))
#
try:
dev.open()
tools.write_documentation(dev, output)
finally:
dev.close()
print("done!")
else:
print("There's not any non system HID class device available")
#
| anonimoanemico/opentension | lib/pywinusb/pywinusb/hid/core.py | Python | gpl-3.0 | 64,798 |
#! /usr/bin/env python
import h5py
import scipy as sp
import numpy as np
import time
import pickle
import sys
import math
import cmath
from os import path
import os, sys
sys.path.append('../../')
from mycolor_fun import *
cwd = os.getcwd()
abs_error_limit = 0.05;
rel_error_limit = 0.05;
benchmark_PC_coef = h5py.File('stochastic_result_verified.feioutput')['Model']['Nodes']['Generalized_Displacements']
test_PC_coef = h5py.File('stochastic_elastic_dynamic_analysis.h5.feioutput')['Model']['Nodes']['Generalized_Displacements']
# print benchmark_PC_coef.shape
check_component1 = -1.38e-3;
component1 = test_PC_coef[100, 800];
check_component2 = 2.89e-3;
component2 = test_PC_coef[500, 1000];
abs_error_x = abs(component1-check_component1);
abs_error_z = abs(component2-check_component2);
rel_error_x = abs_error_x/abs(check_component1);
rel_error_z = abs_error_z/abs(check_component2);
case_flag = 1;
print headrun() , "Test directory:: ", cwd;
print headrun() , "-----------Testing results-----------------";
print headstep() ,'{0} {1} {2} {3}'.format('analytic_solution ','numeric_result ','absolute error ', 'relative error[%]');
if ((abs_error_x < abs_error_limit) and (rel_error_x < rel_error_limit)):
print headOK() ,'{0:+e} {1:+e} {2:+2.3f} {3:+2.2f} '.format(check_component1, component1, abs_error_x, 100*rel_error_x);
else:
print headFailed(), '{0:+e} {1:+e} {2:+2.3f} {3:+2.2f} '.format(check_component1, component1, abs_error_x, 100*rel_error_x);
case_flag = 0;
if ((abs_error_z < abs_error_limit) and (rel_error_z < rel_error_limit)):
print headOK() ,'{0:+e} {1:+e} {2:+2.3f} {3:+2.2f} '.format(check_component2, component2, abs_error_z, 100*rel_error_z);
else:
print headFailed(), '{0:+e} {1:+e} {2:+2.3f} {3:+2.2f} '.format(check_component2, component2, abs_error_z, 100*rel_error_z);
case_flag = 0;
if case_flag ==1 :
print headOKCASE(),"-----------Done this case!-----------------"
| BorisJeremic/Real-ESSI-Examples | stochastic_FEM/stochastic_wave_propagation/elastic/comparison.py | Python | cc0-1.0 | 2,067 |
# -*- coding: utf-8 -*-
class ContainerModel(object):
"""Model of Container, representation saved on disk.
This class has no logic. Its only purpose is to contains data about
a container and the related custom user settings.
It includes container's id and container's name, as well as user preference
on synchronization (local path, ignore sync, ...)
attributes:
id (str): Id of the container
name (unicode): container's name
path (unicode, optional): absolute path of the container folder, if it
exists.
type (str) one of 'teamshare' or 'my_bajoo'
do_not_sync (boolean): if True, the user don't want to sync it on disk,
even if the path attribute is defined.
"""
def __init__(self, id, name, path=None, container_type=None,
do_not_sync=False):
"""container model constructor.
Args:
id (str): container ID
name (unicode): container's name
path (unicode, optional): if set, absolute path of the folder
present on disk.
container_type (str): one of 'teamshare' or 'my_bajoo'
do_not_sync (boolean, optional): if True, the container should not
be sync on the disk. Default to False.
"""
self.id = id
self.name = name
self.path = path
self.type = container_type
self.do_not_sync = do_not_sync
| Bajoo/client-pc | bajoo/container_model.py | Python | gpl-3.0 | 1,464 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Module tests."""
from __future__ import absolute_import, print_function
from datetime import timedelta
from time import sleep
from flask import url_for
from flask_login import login_required
from flask_mail import Message
from flask_security import url_for_security
from invenio_accounts.models import SessionActivity
from invenio_accounts.tasks import clean_session_table, send_security_email
from invenio_accounts.testutils import create_test_user
def test_send_message_outbox(task_app):
"""Test sending a security message using Task module."""
with task_app.app_context():
with task_app.extensions['mail'].record_messages() as outbox:
msg = Message('Test1',
sender='[email protected]',
recipients=['[email protected]'])
send_security_email(msg.__dict__)
assert len(outbox) == 1
sent_msg = outbox[0]
assert sent_msg.subject == 'Test1'
assert sent_msg.sender == '[email protected]'
assert sent_msg.recipients == ['[email protected]']
def test_send_message_through_security(task_app):
"""Test sending a message through security extension."""
with task_app.app_context():
with task_app.extensions['mail'].record_messages() as outbox:
msg = Message('Test1',
sender='[email protected]',
recipients=['[email protected]'])
task_app.extensions['security']._send_mail_task(msg)
assert len(outbox) == 1
sent_msg = outbox[0]
assert sent_msg.subject == 'Test1'
assert sent_msg.sender == '[email protected]'
assert sent_msg.recipients == ['[email protected]']
def test_clean_session_table(task_app):
"""Test clean session table."""
# set session lifetime
task_app.permanent_session_lifetime = timedelta(seconds=20)
with task_app.test_request_context():
user1 = create_test_user(email='[email protected]')
user2 = create_test_user(email='[email protected]')
with task_app.test_client() as client:
client.post(url_for_security('login'), data=dict(
email=user1.email,
password=user1.password_plaintext,
))
assert len(SessionActivity.query.all()) == 1
sleep(15)
with task_app.test_client() as client:
# protected page
@task_app.route('/test', methods=['GET'])
@login_required
def test():
return 'test'
client.post(url_for_security('login'), data=dict(
email=user2.email,
password=user2.password_plaintext,
))
assert len(SessionActivity.query.all()) == 2
sleep(10)
clean_session_table.s().apply()
assert len(SessionActivity.query.all()) == 1
protected_url = url_for('test')
res = client.get(protected_url)
assert res.status_code == 200
sleep(15)
clean_session_table.s().apply()
assert len(SessionActivity.query.all()) == 0
res = client.get(protected_url)
# check if the user is really logout
assert res.status_code == 302
| tiborsimko/invenio-accounts | tests/test_tasks.py | Python | mit | 3,585 |
# (C) British Crown Copyright 2011 - 2018, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
"""
Provide shapely geometry <-> matplotlib path support.
See also `Shapely Geometric Objects <see_also_shapely>`_
and `Matplotlib Path API <http://matplotlib.org/api/path_api.html>`_.
.. see_also_shapely:
http://toblerity.org/shapely/manual.html#geometric-objects
"""
from __future__ import (absolute_import, division, print_function)
import numpy as np
import matplotlib
from matplotlib.path import Path
import shapely.geometry as sgeom
def geos_to_path(shape):
"""
Create a list of :class:`matplotlib.path.Path` objects that describe
a shape.
Parameters
----------
shape
A list, tuple or single instance of any of the following
types: :class:`shapely.geometry.point.Point`,
:class:`shapely.geometry.linestring.LineString`,
:class:`shapely.geometry.linestring.LinearRing`,
:class:`shapely.geometry.polygon.Polygon`,
:class:`shapely.geometry.multipoint.MultiPoint`,
:class:`shapely.geometry.multipolygon.MultiPolygon`,
:class:`shapely.geometry.multilinestring.MultiLineString`,
:class:`shapely.geometry.collection.GeometryCollection`,
or any type with a _as_mpl_path() method.
Returns
-------
paths
A list of :class:`matplotlib.path.Path` objects.
"""
if isinstance(shape, (list, tuple)):
paths = []
for shp in shape:
paths.extend(geos_to_path(shp))
return paths
if isinstance(shape, sgeom.LinearRing):
return [Path(np.column_stack(shape.xy), closed=True)]
elif isinstance(shape, (sgeom.LineString, sgeom.Point)):
return [Path(np.column_stack(shape.xy))]
elif isinstance(shape, sgeom.Polygon):
def poly_codes(poly):
codes = np.ones(len(poly.xy[0])) * Path.LINETO
codes[0] = Path.MOVETO
codes[-1] = Path.CLOSEPOLY
return codes
if shape.is_empty:
return []
vertices = np.concatenate([np.array(shape.exterior.xy)] +
[np.array(ring.xy) for ring in
shape.interiors], 1).T
codes = np.concatenate([poly_codes(shape.exterior)] +
[poly_codes(ring) for ring in shape.interiors])
return [Path(vertices, codes)]
elif isinstance(shape, (sgeom.MultiPolygon, sgeom.GeometryCollection,
sgeom.MultiLineString, sgeom.MultiPoint)):
paths = []
for geom in shape.geoms:
paths.extend(geos_to_path(geom))
return paths
elif hasattr(shape, '_as_mpl_path'):
vertices, codes = shape._as_mpl_path()
return [Path(vertices, codes)]
else:
raise ValueError('Unsupported shape type {}.'.format(type(shape)))
def path_segments(path, **kwargs):
"""
Create an array of vertices and a corresponding array of codes from a
:class:`matplotlib.path.Path`.
Parameters
----------
path
A :class:`matplotlib.path.Path` instance.
Other Parameters
----------------
kwargs
See :func:`matplotlib.path.iter_segments` for details of the keyword
arguments.
Returns
-------
vertices, codes
A (vertices, codes) tuple, where vertices is a numpy array of
coordinates, and codes is a numpy array of matplotlib path codes.
See :class:`matplotlib.path.Path` for information on the types of
codes and their meanings.
"""
pth = path.cleaned(**kwargs)
return pth.vertices[:-1, :], pth.codes[:-1]
def path_to_geos(path, force_ccw=False):
"""
Create a list of Shapely geometric objects from a
:class:`matplotlib.path.Path`.
Parameters
----------
path
A :class:`matplotlib.path.Path` instance.
Other Parameters
----------------
force_ccw
Boolean flag determining whether the path can be inverted to enforce
ccw. Defaults to False.
Returns
-------
A list of instances of the following type(s):
:class:`shapely.geometry.polygon.Polygon`,
:class:`shapely.geometry.linestring.LineString` and/or
:class:`shapely.geometry.multilinestring.MultiLineString`.
"""
# Convert path into numpy array of vertices (and associated codes)
path_verts, path_codes = path_segments(path, curves=False)
# Split into subarrays such that each subarray consists of connected
# line segments based on the start of each one being marked by a
# matplotlib MOVETO code.
verts_split_inds = np.where(path_codes == Path.MOVETO)[0]
verts_split = np.split(path_verts, verts_split_inds)
codes_split = np.split(path_codes, verts_split_inds)
# Iterate through the vertices generating a list of
# (external_geom, [internal_polygons]) tuples.
other_result_geoms = []
collection = []
for path_verts, path_codes in zip(verts_split, codes_split):
if len(path_verts) == 0:
continue
verts_same_as_first = np.all(path_verts[0, :] == path_verts[1:, :],
axis=1)
if all(verts_same_as_first):
geom = sgeom.Point(path_verts[0, :])
elif path_verts.shape[0] > 4 and path_codes[-1] == Path.CLOSEPOLY:
geom = sgeom.Polygon(path_verts[:-1, :])
elif (matplotlib.__version__ < '2.2.0' and
# XXX A path can be given which does not end with close poly,
# in that situation, we have to guess?
path_verts.shape[0] > 3 and verts_same_as_first[-1]):
geom = sgeom.Polygon(path_verts)
else:
geom = sgeom.LineString(path_verts)
# If geom is a Polygon and is contained within the last geom in
# collection, add it to its list of internal polygons, otherwise
# simply append it as a new external geom.
if geom.is_empty:
pass
elif (len(collection) > 0 and
isinstance(collection[-1][0], sgeom.Polygon) and
isinstance(geom, sgeom.Polygon) and
collection[-1][0].contains(geom.exterior)):
collection[-1][1].append(geom.exterior)
elif isinstance(geom, sgeom.Point):
other_result_geoms.append(geom)
else:
collection.append((geom, []))
# Convert each (external_geom, [internal_polygons]) pair into a
# a shapely Polygon that encapsulates the internal polygons, if the
# external geom is a LineString leave it alone.
geom_collection = []
for external_geom, internal_polys in collection:
if internal_polys:
# XXX worry about islands within lakes
geom = sgeom.Polygon(external_geom.exterior, internal_polys)
else:
geom = external_geom
# Correctly orientate the polygon (ccw)
if isinstance(geom, sgeom.Polygon):
if force_ccw and not geom.exterior.is_ccw:
geom = sgeom.polygon.orient(geom)
geom_collection.append(geom)
# If the geom_collection only contains LineStrings combine them
# into a single MultiLinestring.
if geom_collection and all(isinstance(geom, sgeom.LineString) for
geom in geom_collection):
geom_collection = [sgeom.MultiLineString(geom_collection)]
# Remove any zero area Polygons
def not_zero_poly(geom):
return ((isinstance(geom, sgeom.Polygon) and not geom._is_empty and
geom.area != 0) or
not isinstance(geom, sgeom.Polygon))
result = list(filter(not_zero_poly, geom_collection))
return result + other_result_geoms
| pelson/cartopy | lib/cartopy/mpl/patch.py | Python | lgpl-3.0 | 8,401 |
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
account = client.api.accounts.create(friendly_name="Submarine")
print(account.sid)
| teoreteetik/api-snippets | rest/subaccounts/creating-subaccounts-example-1/creating-subaccounts-example-1.6.x.py | Python | mit | 377 |
"""
TKinter based ground station
"""
import os
import time
import random
import threading
import Queue
import csv
from argparse import ArgumentParser
from pymavlink import mavutil
import simplekml
import Tkinter
import tkMessageBox
# list of message types that will be used
MSG_TYPE_RADIO = 1
MSG_TYPE_HEARTBEAT = 2
MSG_TYPE_PRESSURE = 3
MSG_TYPE_GPS = 4
MSG_TYPE_TEMPERATURE = 5
MSG_TYPE_COMMAND_ACK = 6
class GuiPart:
def __init__(self, master, queue, endCommand):
self.queue = queue
self.main_container = Tkinter.Frame(master, background="bisque")
self.main_container.pack(side="top", fill="both", expand=True)
self.defaultbg = master.cget('bg')
self.top_frame = Tkinter.Frame(self.main_container)
self.main_frame = Tkinter.Frame(self.main_container, background="yellow")
self.base_frame = Tkinter.Frame(self.main_container)
self.top_frame.pack(side="top", fill="x", expand=False)
self.base_frame.pack(side="bottom", fill="x", expand=False)
self.main_frame.pack(side="bottom", fill="both", expand=True)
# the button at the bottom
console = Tkinter.Button(self.base_frame, text='Done', command=endCommand)
console.pack()
self.unit_button = Tkinter.Button(self.base_frame, text="Meters", command=self.change_unit)
self.unit_dist_mult = 1.0
self.unit_text = "m"
self.unit_button.pack()
self.cut_button = Tkinter.Button(
self.base_frame, text="Cutdown", command=self.cutdown_button_fn)
self.cutdown_ack_received = False
self.send_cutdown_command = False
self.cut_button.pack()
# top bar with the valid states
self.state_ascent = Tkinter.Label(self.top_frame, text="Ascent", bg="green")
self.state_ascent.pack(padx=5, pady=10, side="left")
self.state_burst = Tkinter.Label(self.top_frame, text="Burst")
self.state_burst.pack(padx=5, pady=10, side="left")
self.state_free_fall = Tkinter.Label(self.top_frame, text="Free Fall")
self.state_free_fall.pack(padx=5, pady=10, side="left")
self.state_descent = Tkinter.Label(self.top_frame, text="Descent")
self.state_descent.pack(padx=5, pady=10, side="left")
# mission time
left_mission_time = Tkinter.Frame(self.main_frame)
left_mission_time.pack(side="top", fill="x", expand=True)
mission_time_name = Tkinter.Label(left_mission_time, text="Mission Time: ")
mission_time_name.pack(padx=5, pady=10, side="left")
self.mission_time = Tkinter.Label(left_mission_time, text="0")
self.mission_time.pack(padx=0, pady=10, side="left")
# the heater information
left_heater = Tkinter.Frame(self.main_frame)
left_heater.pack(side="top", fill="x", expand=True)
heater_title = Tkinter.Label(left_heater, text="Heater Information")
heater_title.pack(side="top")
self.heater1 = Tkinter.Label(left_heater, text="h1")
self.heater1.pack(padx=5, pady=10, side="left")
self.heater2 = Tkinter.Label(left_heater, text="h2")
self.heater2.pack(padx=5, pady=10, side="left")
self.heater3 = Tkinter.Label(left_heater, text="h3")
self.heater3.pack(padx=5, pady=10, side="left")
self.heater4 = Tkinter.Label(left_heater, text="h4")
self.heater4.pack(padx=5, pady=10, side="left")
self.heater5 = Tkinter.Label(left_heater, text="h5")
self.heater5.pack(padx=5, pady=10, side="left")
self.heater6 = Tkinter.Label(left_heater, text="h6")
self.heater6.pack(padx=5, pady=10, side="left")
# temp data
left_temp = Tkinter.Frame(self.main_frame)
left_temp.pack(side="top", fill="x", expand=True)
sensor_title = Tkinter.Label(left_temp, text="Sensor Info")
sensor_title.pack(side="top")
temp_name = Tkinter.Label(left_temp, text="Board Temperature: ")
temp_name.pack(padx=5, pady=5, side="left")
self.temp = Tkinter.Label(left_temp, text="0")
self.temp.pack(padx=0, pady=5, side="left")
thermistor_frame = Tkinter.Frame(self.main_frame)
thermistor_frame.pack(side="top", fill="x", expand=True)
thermistor_title = Tkinter.Label(thermistor_frame, text="Thermistors")
thermistor_title.pack(side="top")
thermistor_left_frame = Tkinter.Frame(thermistor_frame)
thermistor_left_frame.pack(side="left", fill="x", expand=True)
t1_frame = Tkinter.Frame(thermistor_left_frame)
t1_frame.pack(side="top", fill="x", expand=True)
t1 = Tkinter.Label(t1_frame, text="t1: ")
t1.pack(padx=5, pady=10, side="left")
self.temp1 = Tkinter.Label(t1_frame, text="0")
self.temp1.pack(padx=0, pady=5, side="left")
t2_frame = Tkinter.Frame(thermistor_left_frame)
t2_frame.pack(side="top", fill="x", expand=True)
t2 = Tkinter.Label(t2_frame, text="t2: ")
t2.pack(padx=5, pady=10, side="left")
self.temp2 = Tkinter.Label(t2_frame, text="0")
self.temp2.pack(padx=0, pady=5, side="left")
t3_frame = Tkinter.Frame(thermistor_left_frame)
t3_frame.pack(side="top", fill="x", expand=True)
t3 = Tkinter.Label(t3_frame, text="t3: ")
t3.pack(padx=5, pady=10, side="left")
self.temp3 = Tkinter.Label(t3_frame, text="0")
self.temp3.pack(padx=0, pady=5, side="left")
t4_frame = Tkinter.Frame(thermistor_left_frame)
t4_frame.pack(side="top", fill="x", expand=True)
t4 = Tkinter.Label(t4_frame, text="t4: ")
t4.pack(padx=5, pady=10, side="left")
self.temp4 = Tkinter.Label(t4_frame, text="0")
self.temp4.pack(padx=0, pady=5, side="left")
thermistor_right_frame = Tkinter.Frame(thermistor_frame)
thermistor_right_frame.pack(side="left", fill="x", expand=True)
t5_frame = Tkinter.Frame(thermistor_right_frame)
t5_frame.pack(side="top", fill="x", expand=True)
t5 = Tkinter.Label(t5_frame, text="t5: ")
t5.pack(padx=5, pady=10, side="left")
self.temp5 = Tkinter.Label(t5_frame, text="0")
self.temp5.pack(padx=0, pady=5, side="left")
t6_frame = Tkinter.Frame(thermistor_right_frame)
t6_frame.pack(side="top", fill="x", expand=True)
t6 = Tkinter.Label(t6_frame, text="t6: ")
t6.pack(padx=5, pady=10, side="left")
self.temp6 = Tkinter.Label(t6_frame, text="0")
self.temp6.pack(padx=0, pady=5, side="left")
t7_frame = Tkinter.Frame(thermistor_right_frame)
t7_frame.pack(side="top", fill="x", expand=True)
t7 = Tkinter.Label(t7_frame, text="t7: ")
t7.pack(padx=5, pady=10, side="left")
self.temp7 = Tkinter.Label(t7_frame, text="0")
self.temp7.pack(padx=0, pady=5, side="left")
t8_frame = Tkinter.Frame(thermistor_right_frame)
t8_frame.pack(side="top", fill="x", expand=True)
t8 = Tkinter.Label(t8_frame, text="t8: ")
t8.pack(padx=5, pady=10, side="left")
self.temp8 = Tkinter.Label(t8_frame, text="0")
self.temp8.pack(padx=0, pady=5, side="left")
# pressure data
left_pressure = Tkinter.Frame(self.main_frame)
left_pressure.pack(side="top", fill="x", expand=True)
pressure_name = Tkinter.Label(left_pressure, text="Pressure: ")
pressure_name.pack(padx=5, pady=5, side="left")
self.pressure = Tkinter.Label(left_pressure, text="0")
self.pressure.pack(padx=0, pady=5, side="left")
# baro alt
left_baroalt = Tkinter.Frame(self.main_frame)
left_baroalt.pack(side="top", fill="x", expand=True)
baroalt_name = Tkinter.Label(left_baroalt, text="Baro Alt: ")
baroalt_name.pack(padx=5, pady=5, side="left")
self.baro_alt = Tkinter.Label(left_baroalt, text="0")
self.baro_alt.pack(padx=0, pady=5, side="left")
# gps alt
left_gpsalt = Tkinter.Frame(self.main_frame)
left_gpsalt.pack(side="top", fill="x", expand=True)
gpsalt_name = Tkinter.Label(left_gpsalt, text="GPS Alt: ")
gpsalt_name.pack(padx=5, pady=5, side="left")
self.gps_alt = Tkinter.Label(left_gpsalt, text="0")
self.gps_alt.pack(padx=0, pady=5, side="left")
# radio data
left_radio1 = Tkinter.Frame(self.main_frame)
left_radio1.pack(side="top", fill="x", expand=True)
radio_title = Tkinter.Label(left_radio1, text="Radio Info")
radio_title.pack(side="top")
rssi_name = Tkinter.Label(left_radio1, text="RSSI: ")
rssi_name.pack(padx=5, pady=2, side="left")
self.rssi = Tkinter.Label(left_radio1, text="0")
self.rssi.pack(padx=0, pady=2, side="left")
remrssi_name = Tkinter.Label(left_radio1, text="Remote RSSI: ")
remrssi_name.pack(padx=5, pady=2, side="left")
self.remote_rssi = Tkinter.Label(left_radio1, text="0")
self.remote_rssi.pack(padx=0, pady=2, side="left")
# radio noise
left_radio2 = Tkinter.Frame(self.main_frame)
left_radio2.pack(side="top", fill="x", expand=True)
noise_name = Tkinter.Label(left_radio2, text="Noise: ")
noise_name.pack(padx=5, pady=5, side="left")
self.noise = Tkinter.Label(left_radio2, text="0")
self.noise.pack(padx=0, pady=5, side="left")
remnoise_name = Tkinter.Label(left_radio2, text="Remote Noise: ")
remnoise_name.pack(padx=5, pady=5, side="left")
self.remote_noise = Tkinter.Label(left_radio2, text="0")
self.remote_noise.pack(padx=0, pady=5, side="left")
# radio fade margin
left_radio3 = Tkinter.Frame(self.main_frame)
left_radio3.pack(side="top", fill="x", expand=True)
fm_name = Tkinter.Label(left_radio3, text="Fade Margin: ")
fm_name.pack(padx=5, pady=5, side="left")
self.fademargin = Tkinter.Label(left_radio3, text="0")
self.fademargin.pack(padx=0, pady=5, side="left")
remfm_name = Tkinter.Label(left_radio3, text="Remote Fade Margin: ")
remfm_name.pack(padx=5, pady=5, side="left")
self.remote_fademargin = Tkinter.Label(left_radio3, text="0")
self.remote_fademargin.pack(padx=0, pady=5, side="left")
# radio distance multiplier
left_radio4 = Tkinter.Frame(self.main_frame)
left_radio4.pack(side="top", fill="x", expand=True)
dist_mult_name = Tkinter.Label(left_radio4, text="Dist Mult: ")
dist_mult_name.pack(padx=5, pady=5, side="left")
self.dist_mult = Tkinter.Label(left_radio4, text="0")
self.dist_mult.pack(padx=0, pady=5, side="left")
# Add more GUI stuff here
def processHeartbeat(self, msg):
if msg['heater1']:
self.heater1.config(bg="green")
else:
self.heater1.config(bg=self.defaultbg)
if msg['heater2']:
self.heater2.config(bg="green")
else:
self.heater2.config(bg=self.defaultbg)
if msg['heater3']:
self.heater3.config(bg="green")
else:
self.heater3.config(bg=self.defaultbg)
if msg['heater4']:
self.heater4.config(bg="green")
else:
self.heater4.config(bg=self.defaultbg)
if msg['heater5']:
self.heater5.config(bg="green")
else:
self.heater5.config(bg=self.defaultbg)
if msg['heater6']:
self.heater6.config(bg="green")
else:
self.heater6.config(bg=self.defaultbg)
if msg['state'] == "Ascent":
self.state_ascent.config(bg="green")
self.state_burst.config(bg=self.defaultbg)
self.state_free_fall.config(bg=self.defaultbg)
self.state_descent.config(bg=self.defaultbg)
elif msg['state'] == "Burst":
self.state_ascent.config(bg=self.defaultbg)
self.state_burst.config(bg="green")
self.state_free_fall.config(bg=self.defaultbg)
self.state_descent.config(bg=self.defaultbg)
elif msg['state'] == "Free Fall":
self.state_ascent.config(bg=self.defaultbg)
self.state_burst.config(bg=self.defaultbg)
self.state_free_fall.config(bg="green")
self.state_descent.config(bg=self.defaultbg)
elif msg['state'] == "Descent":
self.state_ascent.config(bg=self.defaultbg)
self.state_burst.config(bg=self.defaultbg)
self.state_free_fall.config(bg=self.defaultbg)
self.state_descent.config(bg="green")
def processRadio(self, msg):
self.rssi.config(text='{:04.2f}'.format(msg['rssi']))
self.remote_rssi.config(text='{:04.2f}'.format(msg['remote_rssi']))
self.noise.config(text='{:04.2f}'.format(msg['noise']))
self.remote_noise.config(text='{:04.2f}'.format(msg['remote_noise']))
self.fademargin.config(text='{:04.2f}'.format(msg['fade_margin']))
self.remote_fademargin.config(text='{:04.2f}'.format(msg['remote_fade_margin']))
self.dist_mult.config(text='{:04.2f}'.format(msg['dist_mult']))
def processPressure(self, msg):
self.mission_time.config(text='{:04.2f}'.format(msg['mission_time']))
self.baro_alt.config(text='{:04.2f} {}'.format(
msg['baro_altitude'] * self.unit_dist_mult, self.unit_text))
self.pressure.config(text='{:04.2f} Pa'.format(msg['pressure']))
def processGPS(self, msg):
self.gps_alt.config(text='{:04.2f} {}'.format(
msg['gps_alt'] * self.unit_dist_mult, self.unit_text))
def processTemperature(self, msg):
self.temp.config(text='{:04.2f} C'.format(msg['board_temperature']))
self.temp1.config(text='{:04.3f} C'.format(msg['temperature_array'][0]))
self.temp2.config(text='{:04.3f} C'.format(msg['temperature_array'][1]))
self.temp3.config(text='{:04.3f} C'.format(msg['temperature_array'][2]))
self.temp4.config(text='{:04.3f} C'.format(msg['temperature_array'][3]))
self.temp5.config(text='{:04.3f} C'.format(msg['temperature_array'][4]))
self.temp6.config(text='{:04.3f} C'.format(msg['temperature_array'][5]))
self.temp7.config(text='{:04.3f} C'.format(msg['temperature_array'][6]))
self.temp8.config(text='{:04.3f} C'.format(msg['temperature_array'][7]))
def processAck(self, msg):
# mark the command as having been successfully received (no longer need to
# send the command)
self.cutdown_ack_received = True
self.send_cutdown_command = False
print("cmd ack received")
def processIncoming(self):
"""
Handle all the messages currently in the queue (if any).
"""
while self.queue.qsize():
try:
msg = self.queue.get(0)
if msg['type'] == MSG_TYPE_HEARTBEAT: # heartbeat message
self.processHeartbeat(msg)
elif msg['type'] == MSG_TYPE_RADIO: # radio status
self.processRadio(msg)
elif msg['type'] == MSG_TYPE_PRESSURE: # temp pressure sensor
self.processPressure(msg)
elif msg['type'] == MSG_TYPE_GPS: # gps altitude
self.processGPS(msg)
elif msg['type'] == MSG_TYPE_TEMPERATURE:
self.processTemperature(msg)
elif msg['type'] == MSG_TYPE_COMMAND_ACK:
self.processAck(msg)
except Queue.Empty:
pass
def change_unit(self):
if self.unit_dist_mult == 1.0: # currently in meters
self.unit_dist_mult = 3.28 # feet in a meter
self.unit_text = "ft"
self.unit_button.config(text="Feet")
else:
self.unit_dist_mult = 1.0
self.unit_text = "m"
self.unit_button.config(text="Meters")
def cutdown_button_fn(self):
# mark as having to need to send the cutdown command
# TODO: this needs to somehow actually continually try sending the command
# need to see how to send data back to the thread command
result = tkMessageBox.askquestion(
"Cutdown", "you sure you want to do that?", icon='warning')
if result == 'yes':
print "you wanted it!"
if not self.cutdown_ack_received:
self.send_cutdown_command = True
else:
print "don't click the button then!"
class ThreadedClient:
"""
Launch the main part of the GUI and the worker thread. periodicCall and
endApplication could reside in the GUI part, but putting them here
means that you have all the thread controls in a single place.
"""
def __init__(self, master, device, baudrate, source_system):
"""
Start the GUI and the asynchronous threads. We are in the main
(original) thread of the application, which will later be used by
the GUI. We spawn a new thread for the worker.
"""
self.master = master
self.device = device
self.baudrate = baudrate
self.source_system = source_system
# Create the queue
self.queue = Queue.Queue()
# Set up the GUI part
self.gui = GuiPart(master, self.queue, self.endApplication)
# Set up the thread to do asynchronous I/O
# More can be made if necessary
self.running = 1
self.thread1 = threading.Thread(target=self.workerThread1)
self.thread1.start()
# Start the periodic call in the GUI to check if the queue contains
# anything
self.periodicCall()
def periodicCall(self):
"""
Check every 100 ms if there is something new in the queue.
"""
self.gui.processIncoming()
if not self.running:
# This is the brutal stop of the system. You may want to do
# some cleanup before actually shutting it down.
import sys
sys.exit(1)
self.master.after(100, self.periodicCall)
def get_next_msg(self, m):
# wait for the next message
msg = m.recv_match(blocking=True)
return msg
def create_network_link(self):
# create a network link file for for the balloon
linkKml = simplekml.Kml()
networklink = linkKml.newnetworklink(name="Refresh Link")
networklink.link.href = os.getcwd() + "/balloon.kml"
networklink.link.refreshmode = simplekml.RefreshMode.oninterval
networklink.link.refreshinterval = 1.0
linkKml.save("balloon_link.kml")
def parseRadioStatus(self, status):
returnMsg = {}
localdBm = (status.rssi / 1.9) - 127
localNoisedBm = (status.noise / 1.9) - 127
remdBm = (status.remrssi / 1.9) - 127
remNoisedBm = (status.remnoise / 1.9) - 127
localFadeMargin = localdBm - localNoisedBm
remFadeMargin = remdBm - remNoisedBm
distMultipler = 2**(localFadeMargin / 6)
returnMsg['type'] = MSG_TYPE_RADIO
returnMsg['rssi'] = localdBm
returnMsg['remote_rssi'] = remdBm
returnMsg['noise'] = localNoisedBm
returnMsg['remote_noise'] = remNoisedBm
returnMsg['fade_margin'] = localFadeMargin
returnMsg['remote_fade_margin'] = remFadeMargin
returnMsg['dist_mult'] = distMultipler
print("rssi: %f dBm, remrssi: %f dBm, noise: %f dBm, remnoise: %f dBm, rxerrors: %f\n" %
(localdBm, remdBm, localNoisedBm, remNoisedBm, status.rxerrors))
print("local fade margin: %f dB, remote fade margin: %f dB, distance multiplier: %f\n" %
(localFadeMargin, remFadeMargin, distMultipler))
return returnMsg
def parseHeartbeat(self, hrt):
returnMsg = {}
# all of the flags
flightTermFlag = False
flightTermBurnStart = False
flightTermBurnEnd = False
parachuteArmed = False
parachuteDeployed = False
heatOn = False
heatPriority = False
# parse out the base mode elements
baseMode = hrt.base_mode
if baseMode & 1 > 0:
flightTermination = True
if baseMode & 2 > 0:
flightTermBurnStart = True
if baseMode & 4 > 0:
flightTermBurnEnd = True
if baseMode & 8 > 0:
parachuteArmed = True
if baseMode & 16 > 0:
parachuteDeployed = True
if baseMode & 32 > 0:
heatOn = True
if baseMode & 64 > 0:
heatPriority = True
# heater state
heater1on = False
heater2on = False
heater3on = False
heater4on = False
heater5on = False
heater6on = False
nichromeon = False
customMode = hrt.custom_mode
if customMode & 1 > 0:
heater1on = True
if customMode & 2 > 0:
heater2on = True
if customMode & 4 > 0:
heater3on = True
if customMode & 8 > 0:
heater4on = True
if customMode & 16 > 0:
heater5on = True
if customMode & 32 > 0:
heater6on = True
if customMode & 65536 > 0:
nichromeon = True
state = "unknown"
sysState = hrt.system_status
if sysState == 0:
state = "Ascent"
elif sysState == 1:
state = "Burst"
elif sysState == 2:
state = "Free Fall"
elif sysState == 3:
state = "Descent"
print("heartbeat received: base: %d, custom: %d, state: %d\n" %
(hrt.base_mode, hrt.custom_mode, hrt.system_status))
returnMsg['type'] = MSG_TYPE_HEARTBEAT
returnMsg['heater1'] = heater1on
returnMsg['heater2'] = heater2on
returnMsg['heater3'] = heater3on
returnMsg['heater4'] = heater4on
returnMsg['heater5'] = heater5on
returnMsg['heater6'] = heater6on
returnMsg['state'] = state
return returnMsg
def parseTemperature(self, msg):
returnMsg = {}
timestamp = msg.time_usec # I don't think this is really needed
tempArray = msg.temperature
boardTemp = msg.board_temperature
print("temp received: board temp: %f\n" % (boardTemp))
returnMsg['type'] = MSG_TYPE_TEMPERATURE
returnMsg['temperature_array'] = tempArray
returnMsg['board_temperature'] = boardTemp
return returnMsg
def parsePressure(self, msg):
returnMsg = {}
missionTime = msg.mission_time
baroAlt = msg.baro_altitude
pressure = msg.pressure
print("sensor received: mission time: %d, baro alt: %f, pressure: %f\n" %
(missionTime, baroAlt, pressure))
returnMsg['type'] = MSG_TYPE_PRESSURE
returnMsg['mission_time'] = float(missionTime) / 60 # convert milliseconds to minutes
returnMsg['baro_altitude'] = baroAlt
returnMsg['pressure'] = pressure
return returnMsg
def parseAck(self, msg):
returnMsg = {}
res = msg.result
print("received ack of the nichrome command\n")
returnMsg['type'] = MSG_TYPE_COMMAND_ACK
return returnMsg
def parseRadioMsg(self, msg):
returnMsg = {}
# get the radio status
if msg.get_type() == "RADIO_STATUS":
returnMsg = self.parseRadioStatus(msg)
# get the heartbeat (doing this to be able to send a heartbeat)
elif msg.get_type() == "HEARTBEAT":
returnMsg = self.parseHeartbeat(msg)
elif msg.get_type() == "TEMP_SENSORS":
returnMsg = self.parseTemperature(msg)
elif msg.get_type() == "PRESSURE_SENSOR":
returnMsg = self.parsePressure(msg)
elif msg.get_type() == "COMMAND_ACK":
returnMsg = self.parseAck(msg)
return returnMsg
def workerThread1(self):
"""
This is where we handle the asynchronous I/O. For example, it may be
a 'select()'.
One important thing to remember is that the thread has to yield
control.
"""
# create a mavlink serial instance
mavutil.set_dialect('ncl_ground')
master = mavutil.mavlink_connection(
self.device, baud=self.baudrate, source_system=self.source_system)
# create a network link file
self.create_network_link()
# style to be used
styleGreen = simplekml.Style()
styleGreen.linestyle.color = simplekml.Color.green
styleGreen.polystyle.color = simplekml.Color.changealphaint(
180, simplekml.Color.forestgreen)
styleGreen.linestyle.width = 2
# setup the ballon animation KML
balloonKml = simplekml.Kml()
balloonKml.document.name = "Balloon"
balloonPt = balloonKml.newpoint(name="balloon")
balloonPt.style.iconstyle.icon.href = "http://maps.google.com/mapfiles/kml/paddle/grn-blank.png"
balloonPt.style.iconstyle.heading = 0.0
balloonPt.altitudemode = simplekml.AltitudeMode.relativetoground
balloonPt.extrude = 1
balloonTrace = balloonKml.newlinestring(name="path")
balloonTrace.style = styleGreen
balloonTrace.altitudemode = simplekml.AltitudeMode.relativetoground
balloonTrace.extrude = 1
prevLocs = []
# how many points to save in the trace (if set to -1, then will add all
# points, this will generate a huge file)
pathLength = 150
haveFirstPos = False
# define all the variables here
lat = 0.0
lon = 0.0
alt = 0.0
while self.running:
# get the nextr mavlink message
msg = self.get_next_msg(master)
# the raw gps values (not sure this is used...)
if msg.get_type() == "GPS_RAW_INT":
gps = msg
lat = gps.lat / 10000000.
lon = gps.lon / 10000000.
alt = gps.alt / 1000.
print("gps raw lat: %f, lon: %f, alt: %d\n" % (lat, lon, alt))
if len(prevLocs) < pathLength:
prevLocs.append((lon, lat, alt))
else:
prevLocs.pop(0)
prevLocs.append((lon, lat, alt))
# update the plane kml file
balloonPt.coords = [(lon, lat, alt)]
balloonTrace.coords = prevLocs
balloonKml.save("balloon.kml")
toSend = {}
toSend['type'] = MSG_TYPE_GPS
toSend['gps_alt'] = alt
self.queue.put(toSend)
else:
toSend = self.parseRadioMsg(msg)
if toSend:
self.queue.put(toSend)
if msg.get_type() == "HEARTBEAT":
# send a heartbeat if received one
target_system = 1
target_component = 2
master.mav.heartbeat_send(target_system, target_component, 2, 2, 2)
# send a cutdown command if needed
if self.gui.send_cutdown_command:
print("sending cutdown command")
target_system = 1
target_component = 2
command = 21 # MAV_CMD_NAV_LAND
confirmation = 0
param1 = 0
param2 = 0
master.mav.command_short_send(
target_system, target_component, command, confirmation, param1, param2)
def endApplication(self):
self.running = 0
# set up the argument parser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--baudrate", type=int,
help="antenna baud rate", default=57600)
parser.add_argument("--device", required=True, help="serial device")
parser.add_argument("--source-system", dest='SOURCE_SYSTEM', type=int,
default=250, help='MAVLink source system for this GCS')
args = parser.parse_args()
rand = random.Random()
root = Tkinter.Tk()
client = ThreadedClient(root, args.device, args.baudrate, args.SOURCE_SYSTEM)
root.mainloop()
| adrnp/SSMGS | visual-groundstation.py | Python | mit | 28,310 |
# -*- coding: utf-8 -*-
from __future__ import annotations
import sys
from typing import TYPE_CHECKING
from PyQt5.QtGui import QIcon, QMovie, QPixmap
from PyQt5.QtWidgets import QSystemTrayIcon
if TYPE_CHECKING:
from gridsync.gui import Gui
from gridsync import resource, settings
from gridsync.gui.menu import Menu
from gridsync.gui.pixmap import BadgedPixmap
class SystemTrayIcon(QSystemTrayIcon):
def __init__(self, gui: Gui) -> None:
super().__init__()
self.gui = gui
self._operations: set = set()
tray_icon_path = resource(settings["application"]["tray_icon"])
self.app_pixmap = QPixmap(tray_icon_path)
self.app_icon = QIcon(tray_icon_path)
self.setIcon(self.app_icon)
self.menu = Menu(self.gui)
self.setContextMenu(self.menu)
self.activated.connect(self.on_click)
self.messageClicked.connect(self.gui.show_main_window)
self.animation = QMovie()
self.animation.setFileName(
resource(settings["application"]["tray_icon_sync"])
)
self.animation.updated.connect(self.update)
self.animation.setCacheMode(True)
def add_operation(self, operation: tuple) -> None:
self._operations.add(operation)
def remove_operation(self, operation: tuple) -> None:
try:
self._operations.remove(operation)
except KeyError:
pass
def update(self) -> None:
if self._operations:
self.animation.setPaused(False)
pixmap = self.animation.currentPixmap()
if self.gui.unread_messages:
pixmap = BadgedPixmap(
pixmap, len(self.gui.unread_messages), 0.6
)
self.setIcon(QIcon(pixmap))
else:
self.animation.setPaused(True)
if self.gui.unread_messages:
self.setIcon(
QIcon(
BadgedPixmap(
self.app_pixmap, len(self.gui.unread_messages), 0.6
)
)
)
else:
self.setIcon(self.app_icon)
def on_click(self, value: int) -> None:
if value == QSystemTrayIcon.Trigger and sys.platform != "darwin":
self.gui.show_main_window()
| gridsync/gridsync | gridsync/gui/systray.py | Python | gpl-3.0 | 2,343 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import datetime
from datetime import timedelta
from parameterized import parameterized
import mock
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.configurators import janitor
from buildbot.configurators.janitor import JANITOR_NAME
from buildbot.configurators.janitor import BuildDataJanitor
from buildbot.configurators.janitor import JanitorConfigurator
from buildbot.configurators.janitor import LogChunksJanitor
from buildbot.process.results import SUCCESS
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.schedulers.timed import Nightly
from buildbot.test.reactor import TestReactorMixin
from buildbot.test.steps import TestBuildStepMixin
from buildbot.test.util import config as configmixin
from buildbot.test.util import configurators
from buildbot.util import datetime2epoch
from buildbot.worker.local import LocalWorker
class JanitorConfiguratorTests(configurators.ConfiguratorMixin, unittest.SynchronousTestCase):
ConfiguratorClass = JanitorConfigurator
def test_nothing(self):
self.setupConfigurator()
self.assertEqual(self.config_dict, {
})
@parameterized.expand([
('logs', {'logHorizon': timedelta(weeks=1)}, [LogChunksJanitor]),
('build_data', {'build_data_horizon': timedelta(weeks=1)}, [BuildDataJanitor]),
('logs_build_data', {'build_data_horizon': timedelta(weeks=1),
'logHorizon': timedelta(weeks=1)},
[LogChunksJanitor, BuildDataJanitor]),
])
def test_steps(self, name, configuration, exp_steps):
self.setupConfigurator(**configuration)
self.expectWorker(JANITOR_NAME, LocalWorker)
self.expectScheduler(JANITOR_NAME, Nightly)
self.expectScheduler(JANITOR_NAME + "_force", ForceScheduler)
self.expectBuilderHasSteps(JANITOR_NAME, exp_steps)
self.expectNoConfigError()
class LogChunksJanitorTests(TestBuildStepMixin,
configmixin.ConfigErrorsMixin,
TestReactorMixin,
unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
self.setup_test_reactor()
yield self.setup_test_build_step()
self.patch(janitor, "now", lambda: datetime.datetime(year=2017, month=1, day=1))
def tearDown(self):
return self.tear_down_test_build_step()
@defer.inlineCallbacks
def test_basic(self):
self.setup_step(
LogChunksJanitor(logHorizon=timedelta(weeks=1)))
self.master.db.logs.deleteOldLogChunks = mock.Mock(return_value=3)
self.expect_outcome(result=SUCCESS,
state_string="deleted 3 logchunks")
yield self.run_step()
expected_timestamp = datetime2epoch(datetime.datetime(year=2016, month=12, day=25))
self.master.db.logs.deleteOldLogChunks.assert_called_with(expected_timestamp)
@defer.inlineCallbacks
def test_build_data(self):
self.setup_step(BuildDataJanitor(build_data_horizon=timedelta(weeks=1)))
self.master.db.build_data.deleteOldBuildData = mock.Mock(return_value=4)
self.expect_outcome(result=SUCCESS, state_string="deleted 4 build data key-value pairs")
yield self.run_step()
expected_timestamp = datetime2epoch(datetime.datetime(year=2016, month=12, day=25))
self.master.db.build_data.deleteOldBuildData.assert_called_with(expected_timestamp)
| pmisik/buildbot | master/buildbot/test/unit/test_janitor_configurator.py | Python | gpl-2.0 | 4,167 |
# This script is actually for Cyber Security on Windows 7. Should mostly work
# for Windows 8 and 10 too. I just absolutely hate using Windows 8 and refuse
# to test it on any Windows 8 machine.
from __future__ import print_function
from subprocess import call
from subprocess import check_output
import os
############################# User Management #############################
# Get username
username = os.getenv('username')
# Make alphanumeric variable
alpha = 'abcdefghijklmnopqrstuvwxyz'
numbers = '1234567890'
alpha_numeric = alpha + alpha.upper() + numbers
registry_commands = open("commands.txt", "r")
# Initialize important variables
users = []
incoming_user = ''
times_through = 1
temp_users = str(check_output('net user'))
for not_allowed_characters in '"/\[]:;|=,+*?<>':
temp_users.replace(not_allowed_characters, '')
temp_users.replace("\r\n","")
temp_users.replace("\r","")
temp_users.replace("\n","")
# " / \ [ ] : ; | = , + * ? < > are the characters not allowed in usernames
# Get a list of all users on the system
for character in temp_users:
if character in alpha_numeric or character in "-#\'.!@$%^&()}{":
incoming_user += character
elif len(incoming_user) > 0:
if times_through > 5:
users.append(incoming_user)
incoming_user = ''
times_through += 1
# Remove unnecessary stuff at end
users = users[0:len(users)-4]
# Print all users
print('All the users currently on this computer are ' + str(users))
def user_management(users):
def should_be_admin(user):
# Should the user be an admin
should_be_admin = raw_input(user + " is an administrator. Should they be? y/n. ")
if should_be_admin == 'y':
return True
if should_be_admin == 'n':
return False
def should_be_user(user):
# Should the user be a user
should_be_user = raw_input(user + " is a user. Should they be? y/n. ")
if should_be_user == 'y':
return True
if should_be_user == 'n':
return False
for user in users:
# Iterate through user list
if user in check_output('net localgroup Administrators'):
# If user is in the Administrators localgroup
if not should_be_admin(user):
print('Removing ' + user + ' from the Administrators group')
os.system('net localgroup Administrators ' + user + ' /delete')
else:
print('OK. We are keeping ' + user + ' in the Administrators group.')
else:
should_be_user_answer = should_be_user(user)
if not should_be_user_answer:
print('Removing ' + user)
os.system('net user ' + user + ' /delete')
if should_be_admin(user):
if user not in check_output('net localgroup Administrators'):
if should_be_admin(user):
print('Adding ' + user + 'to the Administrators group')
os.system('net localgroup Administrators ' + user + ' /add')
# Ask if we should do user management stuff.
do_user_management = raw_input("Shall we manage users? y/n. ")
if do_user_management == 'y':
user_management(users)
############################# Registry keys and such #############################
if raw_input("Shall we change some registry stuff? y/n. ") == 'y':
# Password policy automagic
print('Chaning password policies and such...')
os.system('net accounts /FORCELOGOFF:30 /MINPWLEN:8 /MAXPWAGE:30 /MINPWAGE:10 /UNIQUEPW:5')
# Clean DNS cache, cause why not
print('Bro, I cleaned your DNS cache. Deal with it.')
os.system('ipconfig /flushdns')
# Disable built-in accounts
print('I really hope you weren\'t the default Administrator account')
os.system('net user Guest /active:NO')
os.system('net user Administrator /active:NO')
# Make auditing great again.
print('Auditing now on! Yay!!!!')
os.system('auditpol /set /category:* /success:enable')
os.system('auditpol /set /category:* /failure:enable')
# Enable firewall
print('The firewall torch has been passed on to you')
os.system('netsh advfirewall set allprofiles state on')
os.system('echo You\'re going to have to type exit')
#I have no idea what I was doing here....
os.system('secedit /import /db secedit.sdb /cfg cyber.inf /overwrite /log MyLog.txt')
reg_dir = '"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System\\ '
for command in (('FilterAdministratorToken"','1'),('ConsentPromptBehaviorAdmin"','1'),('ConsentPromptBehaviorUser"','1'),('EnableInstallerDetection"','1'),('ValidateAdminCodeSignatures"','1'),('EnableLUA"','1'),('PromptOnSecureDesktop"','1'),('EnableVirtualization"','1'),):
os.system('reg add ' + reg_dir + ' /v ' + command[0] + ' /t REG_DWORD /d ' + command[1] + ' /f')
reg_dir = '"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\WindowsUpdate\Auto Update\\'
for command in (('AUOptions"', '4'),('ElevateNonAdmins"', '1'),('IncludeRecommendedUpdates"', '1'),('ScheduledInstallTime"', '22')):
os.system('reg add ' + reg_dir + ' /v ' + command[0] + ' /t REG_DWORD /d ' + command[1] + ' /f')
reg_dir = '"HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Terminal Server\\'
for command in (('fDenyTSConnections"', '1'),('AllowRemoteRPC"', '0')):
os.system('reg add ' + reg_dir + ' /v ' + command[0] + ' /t REG_DWORD /d ' + command[1] + ' /f')
reg_dir = '"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control\Remote Assistance\\'
for command in (('fAllowFullControl"','0'),('fAllowToGetHelp"','0')):
os.system('reg add ' + reg_dir + ' /v ' + command[0] + ' /t REG_DWORD /d ' + command[1] + ' /f')
reg_dir = '"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Terminal Server\WinStations\RDP-Tcp\\'
command = ('UserAuthentication"','1')
os.system('reg add ' + reg_dir + ' /v ' + command[0] + ' /t REG_DWORD /d ' + command[1] + ' /f')
reg_dir = '"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control\Remote Assistance\\'
command = ('CreateEncryptedOnlyTickets"','1')
os.system('reg add ' + reg_dir + ' /v ' + command[0] + ' /t REG_DWORD /d ' + command[1] + ' /f')
reg_dir = '"HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Terminal Server\WinStations\RDP-Tcp\\'
command = ('fDisableEncryption"','0')
os.system('reg add ' + reg_dir + ' /v ' + command[0] + ' /t REG_DWORD /d ' + command[1] + ' /f')
# I have found additional commands. This one 'might' fix the host file.
os.system('attrib -r -s C:\WINDOWS\system32\drivers\etc\hosts')
os.system('echo > C:\Windows\System32\drivers\etc\hosts')
# This isn't really appropriate for this option, but...
os.system('net start > started_services.txt')
# Remote registry
os.system('net stop RemoteRegistry')
os.system('sc config RemoteRegistry start=disabled')
for service in ('RemoteAccess', 'Telephony', 'tlntsvr', 'p2pimsvc', 'simptcp', 'fax', 'msftpsvc'):
os.system('net stop ' + service)
os.system('sc config ' + service + ' start = disabled')
for command in registry_commands.readlines():
os.system(command)
############################# Search for media files #############################
if raw_input("Shall we search for media files? y/n. ") == 'y':
file_list = []
# Ask for directory to be scanned.
directory_to_scan = input('What directory would you like to scan for media files? Remember to enclose your directory in \'s or "s, and use two \s if your directory ends in a \. ')
# Inefficient but I spent too much time looking how to do this to delete it.
'''for root, dirs, files in os.walk(directory_to_scan):
for f_name in files:
file_path = os.path.join(root, f_name)
# If the file ends with common media extension, add file path to text_file
for extension in ('.mp3','.wav','.png','wmv','.jpg','.jpeg','.mp4','.avi','.mov','.aif','.iff','.php','.m3u','.m4a','.wma','.m4v','.mpg','.bmp','.gif','.bat','.exe','.zip','.7z'):
if root in file_list:
pass
else:
file_list.append(root)'''
os.system('dir /s /b ' + directory_to_scan + ' > allfiles.txt')
input_file = open('allfiles.txt', 'r')
text_file = open('media_files.txt','w')
for line in input_file:
for extension in ('.mp3','.wav','.png','wmv','.jpg','.jpeg','.mp4','.avi','.mov','.aif','.iff','.m3u','.m4a','.wma','.m4v','.mpg','.bmp','.gif','.bat','.txt','.exe','.zip','.7z','.php','.html'):
if line.endswith(extension + '\n'):
text_file.write(line)
for line in input_file.readlines():
for bad_stuff in ['cain','able','nmap','keylogger','armitage','metasploit','shellter','clean']:
if bad_stuff in line:
text_file.write(line)
text_file.close()
print('Available commands are addUser, passwords, and exit.')
command = raw_input('What would you like to do? ')
if command == 'addUser':
username = raw_input('What is the desired username? ')
os.system('net user ' + username + ' P@55w0rd /ADD')
if command == 'passwords':
users_string = str(users).replace('[','')
users_string = str(users).replace(']','')
username = raw_input('The current users on the machine are ' + users_string + '. Who\'s password would you like to change? ')
new_password = raw_input('What shall the password be? ')
os.system('net user ' + username + ' P@55w0rd')
if command == 'exit':
os.system('pause')
| road2ge/cyber-defense-scripts | main-for-windows.py | Python | gpl-3.0 | 9,651 |
# -*- coding: utf-8 -*-
#
#
# Author: Nicolas Bessi
# Copyright 2013-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{"name": "Base Comments Templates",
"summary": "Comments templates on documents",
"version": "1.0",
"depends": ["base"],
"author": "Camptocamp,Odoo Community Association (OCA)",
"data": ["comment_view.xml",
'security/ir.model.access.csv',
],
"category": "Sale",
"installable": True,
"active": False, }
| damdam-s/account-invoice-reporting | base_comment_template/__openerp__.py | Python | agpl-3.0 | 1,115 |
from httmock import HTTMock
import json
import unittest
from flask import url_for
from app import create_app, db
from app.models import User, Arc
from . import utils
class TestChex(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
self.client = self.app.test_client(use_cookies=True)
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_register_and_login_with_valid_token(self):
assert User.query.count() == 0
# register a new account
with HTTMock(utils.google_auth):
response = self.client.post(url_for('chex.login'),
data=json.dumps({'token': 'cat'}),
content_type='application/json')
assert User.query.count() == 1
def test_register_and_login_without_valid_token(self):
assert User.query.count() == 0
# register a new account
with HTTMock(utils.google_auth):
response = self.client.post(url_for('chex.login'),
content_type='application/json',
data=json.dumps({'token': ''}))
assert User.query.count() == 0
def test_log_arc_with_valid_token(self):
email = '[email protected]'
User.get_or_create(email)
assert Arc.query.count() == 0
with HTTMock(utils.google_auth):
response = self.client.post(url_for('chex.log_arc'),
content_type='application/json',
data=json.dumps({
'token': email,
'tail_url': 'www.someurl.com/1',
'head_url': 'www.someurl.com/2',
})
)
assert 'saved' in response.data
assert Arc.query.count() == 1
def test_log_arc_without_valid_token(self):
assert Arc.query.count() == 0
with HTTMock(utils.google_auth):
response = self.client.post(url_for('chex.log_arc'),
content_type='application/json',
data=json.dumps({
'token': '',
'tail': 'www.someurl.com/1',
'head': 'www.someurl.com/2',
})
)
assert 'invalid' in response.data
assert Arc.query.count() == 0 | kronosapiens/thena | thena/tests/test_chex.py | Python | gpl-2.0 | 2,452 |
from urllib.parse import urljoin
from django.contrib import messages
from django.db import models
from django.db.models.functions import (
Cast, Concat
)
from django.http import HttpResponseRedirect
from django.template.response import TemplateResponse
from django.views import View
from django.views.decorators.csrf import csrf_protect
from django.urls import reverse
from django.utils.decorators import method_decorator
from dal import autocomplete
from core.decorators import (
login_required, user_passes_test
)
from core.models import (
User, Batch, Election, CandidateParty, CandidatePosition,
UserType, Vote, VoterProfile
)
class CandidateUserAutoCompleteView(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Only admins should be able to access this view.
if (not self.request.user.is_authenticated
or self.request.user.type == UserType.VOTER):
return []
qs = User.objects.filter(candidate__isnull=True)
election = self.forwarded.get('election', None)
if not election:
return []
else:
qs = qs.filter(
voter_profile__batch__election__id=election,
type=UserType.VOTER
)
if self.q:
qs = qs.annotate(
name=Concat('last_name', models.Value(', '), 'first_name')
)
qs = qs.filter(name__istartswith=self.q)
return qs
class CandidatePartyAutoCompleteView(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Only admins should be able to access this view.
if (not self.request.user.is_authenticated
or self.request.user.type == UserType.VOTER):
return []
qs = CandidateParty.objects.all()
election = self.forwarded.get('election', None)
if not election:
return []
else:
qs = qs.filter(election__id=election)
if self.q:
qs = qs.filter(party_name__istartswith=self.q)
return qs
class CandidatePositionAutoCompleteView(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Only admins should be able to access this view.
if (not self.request.user.is_authenticated
or self.request.user.type == UserType.VOTER):
return []
qs = CandidatePosition.objects.all()
election = self.forwarded.get('election', None)
if not election:
return []
else:
qs = qs.filter(election__id=election)
if self.q:
qs = qs.filter(position_name__istartswith=self.q)
return qs
class ElectionBatchesAutoCompleteView(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Only admins should be able to access this view.
if (not self.request.user.is_authenticated
or self.request.user.type == UserType.VOTER):
return []
qs = Batch.objects.all()
election = self.forwarded.get('election', None)
if not election:
return []
else:
qs = qs.filter(election__id=election)
if self.q:
qs = qs.annotate(
year_as_char=Cast(
'year', output_field=models.CharField(max_length=32)
)
)
qs = qs.filter(year_as_char__istartswith=int(self.q))
return qs
@method_decorator(csrf_protect, name='dispatch')
class ClearElectionConfirmationView(View):
def get(self, request, *args, **kwargs):
if request.user.is_anonymous or request.user.type == UserType.VOTER:
index_url = urljoin(
reverse('index'),
'?next={}'.format(request.path)
)
return HttpResponseRedirect(index_url)
# At this point, we can assume that the election ID parameter will
# always be an integer. Django will complain if the user enters a
# non-integer value.
election_id = int(kwargs['election_id'])
try:
election = Election.objects.get(id=election_id)
except Election.DoesNotExist:
messages.error(
request,
'Attempted to clear votes in a non-existent election.'
)
return HttpResponseRedirect(
reverse('admin:core_election_changelist')
)
opts = Election._meta
context = {
'site_header': 'Botos Administration',
'title': 'Are you sure?',
'opts': opts,
'election': election
}
template_name = 'default/admin/clear_election_action_confirmation.html'
return TemplateResponse(
request,
template_name,
context
)
def post(self, request, *args, **kwargs):
if request.user.is_anonymous or request.user.type == UserType.VOTER:
index_url = urljoin(
reverse('index'),
'?next={}'.format(request.path)
)
return HttpResponseRedirect(index_url)
# At this point, we can assume that the election ID parameter will
# always be an integer. Django will complain if the user enters a
# non-integer value.
election_id = int(kwargs['election_id'])
try:
election = Election.objects.get(id=election_id)
except Election.DoesNotExist:
messages.error(
request,
'Attempted to clear votes in a non-existent election.'
)
return HttpResponseRedirect(
reverse('admin:core_election_changelist')
)
if 'clear_election' in request.POST:
Vote.objects.filter(election=election).delete()
voter_profiles = VoterProfile.objects.filter(
batch__election=election
)
voter_profiles.update(has_voted=False)
messages.success(
request,
'Votes in \'{}\' were cleared successfully.'.format(
election.name
)
)
return HttpResponseRedirect(
reverse('admin:core_election_change', args=(election_id,))
)
| seanballais/botos | core/views/admin/admin.py | Python | gpl-3.0 | 6,318 |
# -*- coding: UTF-8 -*-
# Copyright 2011-2017 Luc Saffre
# License: BSD (see file COPYING for details)
"""Model mixins for this plugin.
"""
from __future__ import unicode_literals
from builtins import object
import logging
logger = logging.getLogger(__name__)
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from lino.api import dd
from lino.core.exceptions import ChangedAPI
from lino.core import model
from lino.core import actions
from lino.core import dbtables
from lino.core.roles import SiteStaff
from lino.modlib.printing.mixins import Printable
from .roles import Helper, AuthorshipTaker
# class TimezoneHolder(models.Model):
# class Meta(object):
# abstract = True
# if settings.USE_TZ:
# timezone = models.CharField(_("Time zone"), max_length=15, blank=True)
# else:
# timezone = dd.DummyField()
# # @dd.chooser(simple_values=True)
# # def timezone_choices(cls, partner):
# # import pytz
# # if partner and partner.country:
# # return pytz.country_timezones[partner.country.isocode]
# # return pytz.common_timezones
# @dd.chooser(simple_values=True)
# def timezone_choices(cls):
# import pytz
# return pytz.common_timezones
class Authored(Printable):
class Meta(object):
abstract = True
# author_field_name = None
manager_roles_required = dd.login_required(SiteStaff)
def get_author(self):
return self.user
# return getattr(self, self.author_field_name)
def set_author(self, user):
raise NotImplementedError()
def on_duplicate(self, ar, master):
"""The default behaviour after duplicating is to change the author to
the user who requested the duplicate.
"""
if ar.user is not None:
self.set_author(ar.user)
super(Authored, self).on_duplicate(ar, master)
def get_row_permission(self, ar, state, ba):
"""Only "managers" can edit other users' work.
See also :attr:`manager_roles_required`.
"""
if not super(Authored, self).get_row_permission(ar, state, ba):
return False
if ba.action.select_rows:
user = ar.get_user()
author = self.get_author()
if author != ar.user \
and (ar.subst_user is None or author != ar.subst_user) \
and not user.user_type.has_required_roles(
self.manager_roles_required):
return ba.action.readonly
return True
@classmethod
def on_analyze(cls, site):
if hasattr(cls, 'manager_level_field'):
raise ChangedAPI("{0} has a manager_level_field".format(cls))
super(Authored, cls).on_analyze(site)
# no longer needed after 20170826
# @classmethod
# def setup_parameters(cls, **fields):
# """Adds the :attr:`user` filter parameter field."""
# fld = cls._meta.get_field('user')
# fields.setdefault(
# 'user', models.ForeignKey(
# 'users.User', verbose_name=fld.verbose_name,
# blank=True, null=True))
# return super(Authored, cls).setup_parameters(**fields)
@classmethod
def get_simple_parameters(cls):
for p in super(Authored, cls).get_simple_parameters():
yield p
yield 'user' # cls.author_field_name)
def get_print_language(self):
u = self.get_author()
if u is None or not u.language:
return super(Authored, self).get_print_language()
return u.language
class UserAuthored(Authored):
class Meta(object):
abstract = True
workflow_owner_field = 'user'
# author_field_name = 'user'
user = dd.ForeignKey(
'users.User',
verbose_name=_("Author"),
related_name="%(app_label)s_%(class)s_set_by_user",
blank=True, null=True)
def set_author(self, user):
self.user = user
# setattr(self, self.author_field_name, user)
def on_create(self, ar):
"""
Adds the requesting user to the `user` field.
When acting as another user, the default implementation
still inserts the real user, not subst_user.
This is important for cal.Event.
"""
if self.user_id is None:
u = ar.user
if u is not None:
self.user = u
super(UserAuthored, self).on_create(ar)
def get_time_zone(self):
"""Return the author's timezone. Used by
:class:`lino_xl.lib.cal.mixins.Started`.
"""
if self.user_id is None:
# return settings.TIME_ZONE
return rt.models.about.TimeZones.default
return self.user.time_zone or rt.models.about.TimeZones.default
# return self.user.timezone or settings.TIME_ZONE
AutoUser = UserAuthored # old name for backwards compatibility
class My(dbtables.Table):
"""Mixin for tables on :class:`Authored` which sets the requesting
user as default value for the :attr:`author` filter parameter.
If the table's model does *not* inherit from :class:`Authored`,
then it must define a parameter field named 'user' and a model
attribute `user`. This feature is used by
:class:`lino_xl.lib.reception.models.MyWaitingVisitors`.
Used by
:mod:`lino_xl.lib.excerpts` and
:mod:`lino_xl.lib.reception`.
"""
@classmethod
def get_actor_label(self):
if self.model is None:
return self._label or self.__name__
return self._label or \
_("My %s") % self.model._meta.verbose_name_plural
@classmethod
def param_defaults(self, ar, **kw):
kw = super(My, self).param_defaults(ar, **kw)
# kw.update(user=ar.get_user())
# k = self.author_field_name or self.model.author_field_name
# kw[k] = ar.get_user()
# kw[self.model.author_field_name] = ar.get_user()
kw['user'] = ar.get_user()
return kw
# class ByUser(dbtables.Table):
# """Deprecated mixin for slave tables whose master is the requesting
# user.
# """
# master_key = 'user'
# #~ details_of_master_template = _("%(details)s of %(master)s")
# details_of_master_template = _("%(details)s")
# @classmethod
# def get_actor_label(self):
# if self.model is None:
# return self._label or self.__name__
# return self._label or \
# _("My %s") % self.model._meta.verbose_name_plural
# @classmethod
# def setup_request(self, ar):
# #~ logger.info("ByUser.setup_request")
# if ar.master_instance is None:
# u = ar.get_user()
# if not isinstance(u, AnonymousUser):
# ar.master_instance = u
# super(ByUser, self).setup_request(ar)
# @classmethod
# def get_view_permission(self, user_type):
# if not user_type.has_required_roles([SiteUser]):
# return False
# return super(ByUser, self).get_view_permission(user_type)
# if settings.SITE.user_model is None:
# # dummy Table for userless sites
# ByUser = dbtables.Table
# class AuthorAction(actions.Action):
# """Mixin for actions that are reserved to the author of the database
# object.
# """
# manager_roles_required = dd.login_required(SiteStaff)
# def get_action_permission(self, ar, obj, state):
# user = ar.get_user()
# if obj.user != user and \
# not user.user_type.has_required_roles(self.manager_roles_required):
# return self.readonly
# return super(
# AuthorAction, self).get_action_permission(ar, obj, state)
class AssignToMe(dd.Action):
"""Set yourself as assigned user.
This will ask for confirmation and then set
:attr:`Assignable.assigned_to`.
"""
label = _("Assign to me")
show_in_workflow = True
show_in_bbar = False # added 20180515 for noi. possible side
# effects in welfare.
# readonly = False
required_roles = dd.login_required(Helper)
# button_text = u"\u2698" # FLOWER (⚘)
# button_text = u"\u26d1" # ⛑
# button_text = u"\u261D" # ☝
button_text = u"\u270B" # ✋
# help_text = _("You become assigned to this.")
# def get_action_permission(self, ar, obj, state):
# user = ar.get_user()
# if obj.assigned_to == user:
# return False
# if user == obj.get_author():
# return False
# return super(AssignToMe,
# self).get_action_permission(ar, obj, state)
def run_from_ui(self, ar, **kw):
obj = ar.selected_rows[0]
def ok(ar):
obj.assigned_to = ar.get_user()
obj.save()
ar.set_response(refresh=True)
ar.confirm(ok, _("You become assigned to this."),
_("Are you sure?"))
class TakeAuthorship(dd.Action):
"""
You declare to become the fully responsible user for this database
object.
Accordingly, this action is available only when you are not
already fully responsible. You are fully responsible when (1)
:attr:`Assignable.user` is set to *you* **and** (2)
:attr:`Event.assigned_to` is *not empty*.
Basically anybody can take any event, even if it is not assigned
to them.
New since 20160814 : I think that the Take action has never been
really used. The original use case is when a user creates an
apointment for their colleague: that colleague goes to assigned_to
and is invited to "take" the appointment which has been agreed for
him.
"""
label = _("Take")
show_in_workflow = True
show_in_bbar = False
# This action modifies the object, but we don't tell Lino about it
# because we want that even non-manager users can run it on
# objects authored by others.
# readonly = False
required_roles = dd.login_required(AuthorshipTaker)
button_text = u"\u2691"
# def get_action_permission(self, ar, obj, state):
# # new since 20160814
# if obj.get_author() == ar.get_user():
# return False
# # if obj.assigned_to != ar.get_user():
# # return False
# # if obj.get_author() == ar.get_user():
# # if obj.assigned_to is None:
# # return False
# # elif obj.assigned_to != ar.get_user():
# # return False
# return super(TakeAuthorship,
# self).get_action_permission(ar, obj, state)
def run_from_ui(self, ar, **kw):
obj = ar.selected_rows[0]
# obj is an Assignable
def ok(ar):
obj.set_author(ar.get_user())
# obj.user = ar.get_user()
obj.assigned_to = None
#~ kw = super(TakeAuthorship,self).run(obj,ar,**kw)
obj.save()
ar.set_response(refresh=True)
ar.confirm(ok,
_("You take responsibility for {}.").format(obj),
_("Are you sure?"))
class Assignable(Authored):
""".. attribute:: assigned_to
This field is usually empty. Setting it to another user means
"I am not fully responsible for this item".
This field is cleared when somebody calls
:class:`TakeAuthorship` on the object.
"""
class Meta(object):
abstract = True
assigned_to = dd.ForeignKey(
settings.SITE.user_model,
verbose_name=_("Assigned to"),
related_name="%(app_label)s_%(class)s_set_assigned",
blank=True, null=True)
take = TakeAuthorship()
assign_to_me = AssignToMe()
disable_author_assign = True
"""
Set this to False if you want that the author of an object can
also assign themselves.
In Lino Noi you can be author of a ticket and then assign it to
yourself, but e.g. in group calendar management we don't want this
behaviour.
"""
def disabled_fields(self, ar):
s = super(Assignable, self).disabled_fields(ar)
user = ar.get_user()
if self.assigned_to == user:
s.add('assign_to_me')
if self.disable_author_assign and user == self.get_author():
s.add('assign_to_me')
s.add('take')
return s
def on_create(self, ar):
# 20130722 e.g. CreateClientEvent sets assigned_to it explicitly
if self.assigned_to is None:
self.assigned_to = ar.subst_user
super(Assignable, self).on_create(ar)
| khchine5/lino | lino/modlib/users/mixins.py | Python | bsd-2-clause | 12,688 |
"""
Classes for char-to-int mapping and int-to-int mapping.
:Author: James Taylor ([email protected])
The char-to-int mapping can be used to translate a list of strings
over some alphabet to a single int array (example for encoding a multiple
sequence alignment).
The int-to-int mapping is particularly useful for creating partitions,
and provides methods to merge/split symbols in the output mapping.
The two forms of mapping can be combined, for example to encode a
multiple sequence alignment in a reduced alphabet defined by a partition
of alignment columns. Many of the helper methods provided are for
solving such alignment oriented problems.
This code was originally written for the `ESPERR`_ project which includes
software for searcing for alignment encodings that work well for specific
classification problems using various Markov chain classifiers over the
reduced encodings.
Most of the core implementation is in the pyrex/C extension
"_seqmapping.pyx" for performance reasons (specifically to avoid the
excessive bounds checking that would make a sequence/array lookup heavy
problem like this slow in pure python).
.. _ESPERR: http://www.bx.psu.edu/projects/esperr/
"""
from ._seqmapping import (
CharToIntArrayMapping,
IntToIntMapping,
)
# Char->Int mapping for DNA characters with missing data
DNA = CharToIntArrayMapping()
DNA.set_mapping("a", 0)
DNA.set_mapping("A", 0)
DNA.set_mapping("c", 1)
DNA.set_mapping("C", 1)
DNA.set_mapping("g", 2)
DNA.set_mapping("G", 2)
DNA.set_mapping("t", 3)
DNA.set_mapping("T", 3)
DNA.set_mapping("-", 4)
DNA.set_mapping("*", 5)
# Creating mappings
def alignment_mapping_from_file(f, char_mapping=DNA):
"""
Create a mapping from a file of alignment columns.
"""
columns, symbols = [], []
for line in f:
column, symbol = line.split()
columns.append(column)
symbols.append(int(symbol))
align_count = len(columns[0])
mapping = IntToIntMapping(char_mapping.get_out_size() ** align_count)
for column, symbol in zip(columns, symbols):
index = char_mapping.translate_list(list(column))[0]
mapping.set_mapping(index, symbol)
return align_count, mapping
def second_mapping_from_file(f, first_mapping, char_mapping=DNA):
columns, symbols = [], []
for line in f:
column, symbol = line.split()
columns.append(column)
symbols.append(int(symbol))
mapping = IntToIntMapping(first_mapping.get_out_size())
for column, symbol in zip(columns, symbols):
index = char_mapping.translate_list(list(column))[0]
if first_mapping[index] >= 0:
mapping.set_mapping(first_mapping[index], symbol)
return mapping
def identity_mapping(size):
mapping = IntToIntMapping(size)
for i in range(size):
mapping.set_mapping(i, i)
return mapping
| bxlab/bx-python | lib/bx/seqmapping.py | Python | mit | 2,856 |
from __future__ import absolute_import
__all__ = ('Device', )
from sentry.interfaces.base import Interface, InterfaceValidationError
from sentry.utils.safe import trim, trim_dict
class Device(Interface):
"""
An interface which describes the device.
>>> {
>>> "name": "Windows",
>>> "version": "95",
>>> "build": "95.0.134.1651",
>>> "arbitrary": "data"
>>> }
"""
@classmethod
def to_python(cls, data):
data = data.copy()
extra_data = data.pop('data', data)
if not isinstance(extra_data, dict):
extra_data = {}
try:
name = trim(data.pop('name'), 64)
except KeyError:
raise InterfaceValidationError("Missing or invalid value for 'name'")
try:
version = trim(data.pop('version'), 64)
except KeyError:
raise InterfaceValidationError("Missing or invalid value for 'version'")
build = trim(data.pop('build', None), 64)
kwargs = {
'name': name,
'version': version,
'build': build,
'data': trim_dict(extra_data),
}
return cls(**kwargs)
def get_api_context(self, is_public=False):
return {
'name': self.name,
'version': self.version,
'build': self.build,
'data': self.data,
}
def get_path(self):
return 'device'
def get_hash(self):
return []
| jean/sentry | src/sentry/interfaces/device.py | Python | bsd-3-clause | 1,495 |
"""
Weighting functions
-------------------
Module which depends on density_assignation module and creates some util
functions for the computation of weighted values.
This function can act as a compute_characs function.
"""
from density_assignation import from_distance_to_weights, compute_measure_i
def create_weighted_function(f_weighs, params_w, f_dens, params_d):
"""Functions which acts in order to create a weighted function. You have to
give the needed parameters and it returns to you a function.
Parameters
----------
f_weighs: functions, str
function of weighs assignation. It transforms the distance to weights.
params_w: dict
parameters needed to apply f_weighs.
f_dens: function, set_scale_surgauss
function of density assignation.
params_d: dict
parameters needed to apply f_dens.
Returns
-------
f: function
a function which has as inputs the dists and values of the neighs
points.
"""
def f(values, dists):
weights = from_distance_to_weights(dists, f_weighs, params_w)
M = compute_measure_i(weights, values, f_dens, params_d)
return M
return f
| tgquintela/pySpatialTools | pySpatialTools/FeatureManagement/Interpolation_utils/weighting_functions.py | Python | mit | 1,198 |
#!/usr/bin/env python
# coding=utf-8
"""todotxt-machine
Usage:
todotxt-machine
todotxt-machine TODOFILE [DONEFILE]
todotxt-machine [--config FILE]
todotxt-machine (-h | --help)
todotxt-machine --version
todotxt-machine --show-default-bindings
Options:
-c FILE --config=FILE Path to your todotxt-machine configuraton file [default: ~/.todotxt-machinerc]
-h --help Show this screen.
--version Show version.
--show-default-bindings Show default keybindings in config parser format
Add this to your config file and edit to customize
"""
import sys
import os
import random
from collections import OrderedDict
# import ipdb; # ipdb.set_trace()
# import pprint
# pp = pprint.PrettyPrinter(indent=4).pprint
# Import the correct version of configparser
if sys.version_info[0] >= 3:
import configparser
config_parser_module = configparser
elif sys.version_info[0] < 3:
import ConfigParser
config_parser_module = ConfigParser
from docopt import docopt
import todotxt_machine
from todotxt_machine.todo import Todos
from todotxt_machine.urwid_ui import UrwidUI
from todotxt_machine.colorscheme import ColorScheme
from todotxt_machine.keys import KeyBindings
def exit_with_error(message):
sys.stderr.write(message.strip(' \n')+'\n')
print(__doc__.split('\n\n')[1])
exit(1)
def get_real_path(filename, description):
# expand enviroment variables and username, get canonical path
file_path = os.path.realpath(os.path.expanduser(os.path.expandvars(filename)))
if os.path.isdir(file_path):
exit_with_error("ERROR: Specified {0} file is a directory.".format(description))
if not os.path.exists(file_path):
directory = os.path.dirname(file_path)
if os.path.isdir(directory):
# directory exists, but no todo.txt file - create an empty one
open(file_path, 'a').close()
else:
exit_with_error("ERROR: The directory: '{0}' does not exist\n\nPlease create the directory or specify a different\n{0} file on the command line.".format(directory, description))
return file_path
def main():
random.seed()
# Parse command line
arguments = docopt(__doc__, version=todotxt_machine.version)
# Validate readline editing mode option (docopt doesn't handle this)
# if arguments['--readline-editing-mode'] not in ['vi', 'emacs']:
# exit_with_error("--readline-editing-mode must be set to either vi or emacs\n")
# Parse config file
cfg = config_parser_module.ConfigParser(allow_no_value=True)
cfg.add_section('keys')
if arguments['--show-default-bindings']:
d = {k: ", ".join(v) for k,v in KeyBindings({}).key_bindings.items()}
cfg._sections['keys'] = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
cfg.write(sys.stdout)
exit(0)
cfg.add_section('settings')
cfg.read(os.path.expanduser(arguments['--config']))
# Load keybindings specified in the [keys] section of the config file
keyBindings = KeyBindings(dict( cfg.items('keys') ))
# load the colorscheme defined in the user config, else load the default scheme
colorscheme = ColorScheme(dict( cfg.items('settings') ).get('colorscheme', 'default'), cfg)
# Load the todo.txt file specified in the [settings] section of the config file
# a todo.txt file on the command line takes precedence
todotxt_file = dict( cfg.items('settings') ).get('file', arguments['TODOFILE'])
if arguments['TODOFILE']:
todotxt_file = arguments['TODOFILE']
if todotxt_file is None:
exit_with_error("ERROR: No todo file specified. Either specify one as an argument on the command line or set it in your configuration file ({0}).".format(arguments['--config']))
# Load the done.txt file specified in the [settings] section of the config file
# a done.txt file on the command line takes precedence
donetxt_file = dict( cfg.items('settings') ).get('archive', arguments['DONEFILE'])
if arguments['DONEFILE']:
donetxt_file = arguments['DONEFILE']
todotxt_file_path = get_real_path(todotxt_file, 'todo.txt')
if donetxt_file is not None:
donetxt_file_path = get_real_path(donetxt_file, 'done.txt')
else:
donetxt_file_path = None
try:
with open(todotxt_file_path, "r") as todotxt_file:
todos = Todos(todotxt_file.readlines(), todotxt_file_path, donetxt_file_path)
except:
exit_with_error("ERROR: unable to open {0}\n\nEither specify one as an argument on the command line or set it in your configuration file ({0}).".format(todotxt_file_path, arguments['--config']))
todos = Todos([], todotxt_file_path, donetxt_file_path)
view = UrwidUI(todos, keyBindings, colorscheme)
view.main()
# print("Writing: {0}".format(todotxt_file_path))
view.todos.save()
exit(0)
if __name__ == '__main__':
main()
| rpesche/todotxt-machine | todotxt_machine/cli.py | Python | gpl-3.0 | 5,012 |
import wandb
import multiprocessing
def mp_func():
"""This needs to be defined at the module level to be picklable and sendable to
the spawned process via multiprocessing"""
print("hello from the other side")
def main():
wandb.init()
context = multiprocessing.get_context("spawn")
p = context.Process(target=mp_func)
p.start()
p.join()
wandb.finish()
| wandb/client | tests/utils/test_mod.py | Python | mit | 391 |
from easypy.humanize import from_hexdump, hexdump, IndentableTextBuffer, format_table
_SAMPLE_DATA = b'J\x9c\xe8Z!\xc2\xe6\x8b\xa0\x01\xcb\xc3.x]\x11\x9bsC\x1c\xb2\xcd\xb3\x9eM\xf7\x13`\xc8\xce\xf8g1H&\xe2\x9b' \
b'\xd1\xa8\xfd\x14\x08U\x175\xc7\x03q\xac\xda\xe6)q}}T44\x9e\xb5;\xf1.\xf6*\x16\xba\xe0~m\x96o\xb8\xa4Tl\x96\x8a\xc7' \
b'\x9a\xc9\xc4\xf2\xb1\x9e\x13\x0b\xe2i\xc6\xd8\x92\xde\xfabn6\xea\xf5_y>\x15\xc5\xd5\xa0\x05\xbd\xea\xb8\xba\x80+P' \
b'\xa7\xd8\xad\xbf\x91<\xca\xc5\x94\xe6\xfc-\xab4ABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABAB' \
b'ABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABA' \
b'BABABABABABABABABABABABABABABABABABABABABCABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABAB' \
b'ABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABA' \
b'BABABABABABABABABABABABABABABABABABABABABAB\xdc)n/\x9aNy\x9f\x03\xc7j\x14\x08\x1a\x08\x91@\xad\xac\xa9(\x1a\x8b\x9f' \
b'\x81\xb0us\x87\x9e4\xf9\x991w39\xd0\x98XokH\xa6\xc9Rv\xbc\xac\x90;\xac\x83\xc8\xba`V\xa9\xc3u\xb2\xccV\x9d\x06\xb3' \
b'\xf0\x1e\xb4K\x10\x9c\x83\xdc\xe7\xcb\x0c\x9a\x8c\x80\x010\x8ca\xf85Z\x9c'
def test_hexdump_functions():
assert from_hexdump(hexdump(_SAMPLE_DATA)) == _SAMPLE_DATA
assert from_hexdump(hexdump(_SAMPLE_DATA, 24, 2)) == _SAMPLE_DATA
assert from_hexdump(hexdump(_SAMPLE_DATA, 16, 1, False)) == _SAMPLE_DATA
assert from_hexdump(hexdump(_SAMPLE_DATA, 4, 4)) == _SAMPLE_DATA
assert _SAMPLE_DATA.decode("hexdump_24_2") == hexdump(_SAMPLE_DATA, 24, 2)
assert hexdump(_SAMPLE_DATA, 24, 2).encode("hexdump") == _SAMPLE_DATA
def test_indentable_text_buffer():
from io import StringIO
buff = IndentableTextBuffer("Exc")
buff.write("a")
buff.write("b")
with buff.indent("Header2"):
buff.write(hexdump(_SAMPLE_DATA, 24, 8))
buff.write("hello")
buff.write("world")
with buff.indent("Header2"):
# buff.write(format_in_columns([str(i) for i in range(100)], 50))
with buff.indent("This should be pruned away"):
with buff.indent("This should be pruned away"):
pass
with buff.indent("Header3"):
buff.write("text3")
buff.write("text2")
f = StringIO()
buff.render(prune=True, textual=True, width=120, file=f)
assert open("tests/indentable_buffer1.txt", "r").read() == f.getvalue()
f = StringIO()
buff.render(prune=True, textual=False, width=40, overflow="ignore", file=f)
assert open("tests/indentable_buffer2.txt", "r").read() == f.getvalue()
f = StringIO()
buff.render(prune=True, textual=False, width=40, edges=False, file=f)
assert open("tests/indentable_buffer3.txt", "r").read() == f.getvalue()
def test_format_table_with_titles():
table = [
'abc',
range(3),
[None, True, False],
[dict(x='x'), b'bytes', 'string']
]
output = (
"a |b |c \n"
"--------------------------\n"
" 0| 1| 2\n"
"None |True |False \n"
"{'x': 'x'}|b'bytes'|string\n")
assert output == format_table(table)
def test_format_table_without_titles():
table = [
'abc',
range(3),
[None, True, False],
[dict(x='x'), b'bytes', 'string']
]
output = (
"a |b |c \n"
" 0| 1| 2\n"
"None |True |False \n"
"{'x': 'x'}|b'bytes'|string\n")
assert output == format_table(table, titles=False)
| weka-io/easypy | tests/test_humanize.py | Python | bsd-3-clause | 3,712 |
# Copyright 2017 the Isard-vdi project authors:
# Josep Maria Viñolas Auquer
# Alberto Larraz Dalmases
# License: AGPLv3
#!/usr/bin/env python
# coding=utf-8
class CategoryNotFound(Exception):
pass
class GroupNotFound(Exception):
pass
class UserTemplateNotFound(Exception):
pass
class TemplateNotFound(Exception):
pass
class NewUserNotInserted(Exception):
pass
class NewDesktopNotInserted(Exception):
pass
class DesktopNotStarted(Exception):
pass
class DesktopFailed(Exception):
pass
class DomainNotFound(Exception):
pass
class DomainNotStarted(Exception):
pass
class HypervisorPoolNotFound(Exception):
pass
class DomainHypervisorSSLPortNotFound(Exception):
pass
class DomainHypervisorPortNotFound(Exception):
pass
| isard-vdi/isard | webapp/webapp/webapp/lib/viewer_exc.py | Python | agpl-3.0 | 805 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Relaxed OneHotCategorical distribution classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import bijectors
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import transformed_distribution
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.framework import tensor_shape
class Bernoulli(distribution.Distribution):
"""Bernoulli distribution.
The Bernoulli distribution with `probs` parameter, i.e., the probability of a
`1` outcome (vs a `0` outcome).
"""
def __init__(self,
logits=None,
probs=None,
uniform_noise=None,
dtype=dtypes.int32,
validate_args=False,
allow_nan_stats=True,
name="Bernoulli"):
"""Construct Bernoulli distributions.
Args:
logits: An N-D `Tensor` representing the log-odds of a `1` event. Each
entry in the `Tensor` parametrizes an independent Bernoulli distribution
where the probability of an event is sigmoid(logits). Only one of
`logits` or `probs` should be passed in.
probs: An N-D `Tensor` representing the probability of a `1`
event. Each entry in the `Tensor` parameterizes an independent
Bernoulli distribution. Only one of `logits` or `probs` should be passed
in.
dtype: The type of the event samples. Default: `int32`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: If p and logits are passed, or if neither are passed.
"""
parameters = locals()
with ops.name_scope(name):
self._logits, self._probs = distribution_util.get_logits_and_probs(
logits=logits,
probs=probs,
validate_args=validate_args,
name=name)
self._noise = array_ops.identity(uniform_noise, name="uniform_noise")
super(Bernoulli, self).__init__(
dtype=dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._logits, self._probs],
name=name)
@staticmethod
def _param_shapes(sample_shape):
return {"logits": ops.convert_to_tensor(sample_shape, dtype=dtypes.int32)}
@property
def logits(self):
"""Log-odds of a `1` outcome (vs `0`)."""
return self._logits
@property
def probs(self):
"""Probability of a `1` outcome (vs `0`)."""
return self._probs
@property
def noise(self):
return self._noise
def _batch_shape_tensor(self):
return array_ops.shape(self._logits)
def _batch_shape(self):
return self._logits.get_shape()
def _event_shape_tensor(self):
return array_ops.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
new_shape = array_ops.concat([[n], self.batch_shape_tensor()], 0)
#uniform = random_ops.random_uniform(
# new_shape, seed=seed, dtype=self.probs.dtype)
sample = math_ops.less(self.noise, self.probs)
sample = math_ops.cast(sample, self.dtype)
ret = array_ops.reshape(sample, new_shape)
return ret
def _log_prob(self, event):
if self.validate_args:
event = distribution_util.embed_check_integer_casting_closed(
event, target_dtype=dtypes.bool)
# TODO(jaana): The current sigmoid_cross_entropy_with_logits has
# inconsistent behavior for logits = inf/-inf.
event = math_ops.cast(event, self.logits.dtype)
logits = self.logits
# sigmoid_cross_entropy_with_logits doesn't broadcast shape,
# so we do this here.
def _broadcast(logits, event):
return (array_ops.ones_like(event) * logits,
array_ops.ones_like(logits) * event)
# First check static shape.
if (event.get_shape().is_fully_defined() and
logits.get_shape().is_fully_defined()):
if event.get_shape() != logits.get_shape():
logits, event = _broadcast(logits, event)
else:
logits, event = control_flow_ops.cond(
distribution_util.same_dynamic_shape(logits, event),
lambda: (logits, event),
lambda: _broadcast(logits, event))
return -nn.sigmoid_cross_entropy_with_logits(labels=event, logits=logits)
def _prob(self, event):
return math_ops.exp(self._log_prob(event))
def _entropy(self):
return (-self.logits * (math_ops.sigmoid(self.logits) - 1) +
nn.softplus(-self.logits))
def _mean(self):
return array_ops.identity(self.probs)
def _variance(self):
return self._mean() * (1. - self.probs)
def _mode(self):
"""Returns `1` if `prob > 0.5` and `0` otherwise."""
return math_ops.cast(self.probs > 0.5, self.dtype)
class ExpRelaxedOneHotCategorical(distribution.Distribution):
"""ExpRelaxedOneHotCategorical distribution with temperature and logits.
An ExpRelaxedOneHotCategorical distribution is a log-transformed
RelaxedOneHotCategorical distribution. The RelaxedOneHotCategorical is a
distribution over random probability vectors, vectors of positive real
values that sum to one, which continuously approximates a OneHotCategorical.
The degree of approximation is controlled by a temperature: as the temperature
goes to 0 the RelaxedOneHotCategorical becomes discrete with a distribution
described by the logits, as the temperature goes to infinity the
RelaxedOneHotCategorical becomes the constant distribution that is identically
the constant vector of (1/event_size, ..., 1/event_size).
Because computing log-probabilities of the RelaxedOneHotCategorical can
suffer from underflow issues, this class is one solution for loss
functions that depend on log-probabilities, such as the KL Divergence found
in the variational autoencoder loss. The KL divergence between two
distributions is invariant under invertible transformations, so evaluating
KL divergences of ExpRelaxedOneHotCategorical samples, which are always
followed by a `tf.exp` op, is equivalent to evaluating KL divergences of
RelaxedOneHotCategorical samples. See the appendix of Maddison et al., 2016
for more mathematical details, where this distribution is called the
ExpConcrete.
#### Examples
Creates a continuous distribution, whose exp approximates a 3-class one-hot
categorical distribution. The 2nd class is the most likely to be the
largest component in samples drawn from this distribution. If those samples
are followed by a `tf.exp` op, then they are distributed as a relaxed onehot
categorical.
```python
temperature = 0.5
p = [0.1, 0.5, 0.4]
dist = ExpRelaxedOneHotCategorical(temperature, probs=p)
samples = dist.sample()
exp_samples = tf.exp(samples)
# exp_samples has the same distribution as samples from
# RelaxedOneHotCategorical(temperature, probs=p)
```
Creates a continuous distribution, whose exp approximates a 3-class one-hot
categorical distribution. The 2nd class is the most likely to be the
largest component in samples drawn from this distribution.
```python
temperature = 0.5
logits = [-2, 2, 0]
dist = ExpRelaxedOneHotCategorical(temperature, logits=logits)
samples = dist.sample()
exp_samples = tf.exp(samples)
# exp_samples has the same distribution as samples from
# RelaxedOneHotCategorical(temperature, probs=p)
```
Creates a continuous distribution, whose exp approximates a 3-class one-hot
categorical distribution. Because the temperature is very low, samples from
this distribution are almost discrete, with one component almost 0 and the
others very negative. The 2nd class is the most likely to be the largest
component in samples drawn from this distribution.
```python
temperature = 1e-5
logits = [-2, 2, 0]
dist = ExpRelaxedOneHotCategorical(temperature, logits=logits)
samples = dist.sample()
exp_samples = tf.exp(samples)
# exp_samples has the same distribution as samples from
# RelaxedOneHotCategorical(temperature, probs=p)
```
Creates a continuous distribution, whose exp approximates a 3-class one-hot
categorical distribution. Because the temperature is very high, samples from
this distribution are usually close to the (-log(3), -log(3), -log(3)) vector.
The 2nd class is still the most likely to be the largest component
in samples drawn from this distribution.
```python
temperature = 10
logits = [-2, 2, 0]
dist = ExpRelaxedOneHotCategorical(temperature, logits=logits)
samples = dist.sample()
exp_samples = tf.exp(samples)
# exp_samples has the same distribution as samples from
# RelaxedOneHotCategorical(temperature, probs=p)
```
Chris J. Maddison, Andriy Mnih, and Yee Whye Teh. The Concrete Distribution:
A Continuous Relaxation of Discrete Random Variables. 2016.
"""
def __init__(
self,
temperature,
logits=None,
probs=None,
gumbel_noise=None,
dtype=dtypes.float32,
validate_args=False,
allow_nan_stats=True,
name="ExpRelaxedOneHotCategorical"):
"""Initialize ExpRelaxedOneHotCategorical using class log-probabilities.
Args:
temperature: An 0-D `Tensor`, representing the temperature
of a set of ExpRelaxedCategorical distributions. The temperature should
be positive.
logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities
of a set of ExpRelaxedCategorical distributions. The first
`N - 1` dimensions index into a batch of independent distributions and
the last dimension represents a vector of logits for each class. Only
one of `logits` or `probs` should be passed in.
probs: An N-D `Tensor`, `N >= 1`, representing the probabilities
of a set of ExpRelaxedCategorical distributions. The first
`N - 1` dimensions index into a batch of independent distributions and
the last dimension represents a vector of probabilities for each
class. Only one of `logits` or `probs` should be passed in.
dtype: The type of the event samples (default: float32).
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = locals()
with ops.name_scope(name, values=[logits, probs, temperature]):
with ops.control_dependencies([check_ops.assert_positive(temperature)]
if validate_args else []):
self._temperature = array_ops.identity(temperature, name="temperature")
self._temperature_2d = array_ops.reshape(temperature, [-1, 1],
name="temperature_2d")
self._logits, self._probs = distribution_util.get_logits_and_probs(
name=name, logits=logits, probs=probs, validate_args=validate_args,
multidimensional=True)
self._gumbel_noise = array_ops.identity(gumbel_noise, name="gumbel_noise")
logits_shape_static = self._logits.get_shape().with_rank_at_least(1)
if logits_shape_static.ndims is not None:
self._batch_rank = ops.convert_to_tensor(
logits_shape_static.ndims - 1,
dtype=dtypes.int32,
name="batch_rank")
else:
with ops.name_scope(name="batch_rank"):
self._batch_rank = array_ops.rank(self._logits) - 1
with ops.name_scope(name="event_size"):
self._event_size = array_ops.shape(self._logits)[-1]
super(ExpRelaxedOneHotCategorical, self).__init__(
dtype=dtype,
reparameterization_type=distribution.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._logits,
self._probs,
self._temperature],
name=name)
@property
def event_size(self):
"""Scalar `int32` tensor: the number of classes."""
return self._event_size
@property
def temperature(self):
"""Batchwise temperature tensor of a RelaxedCategorical."""
return self._temperature
@property
def logits(self):
"""Vector of coordinatewise logits."""
return self._logits
@property
def probs(self):
"""Vector of probabilities summing to one."""
return self._probs
@property
def gumbel_noise(self):
"""Distribution parameter for scale."""
return self._gumbel_noise
def _batch_shape_tensor(self):
return array_ops.shape(self._logits)[:-1]
def _batch_shape(self):
return self.logits.get_shape()[:-1]
def _event_shape_tensor(self):
return array_ops.shape(self.logits)[-1:]
def _event_shape(self):
return self.logits.get_shape().with_rank_at_least(1)[-1:]
def _sample_n(self, n, seed=None):
sample_shape = array_ops.concat([[n], array_ops.shape(self.logits)], 0)
logits = self.logits * array_ops.ones(sample_shape)
logits_2d = array_ops.reshape(logits, [-1, self.event_size])
# Uniform variates must be sampled from the open-interval `(0, 1)` rather
# than `[0, 1)`. To do so, we use `np.finfo(self.dtype.as_numpy_dtype).tiny`
# because it is the smallest, positive, "normal" number. A "normal" number
# is such that the mantissa has an implicit leading 1. Normal, positive
# numbers x, y have the reasonable property that, `x + y >= max(x, y)`. In
# this case, a subnormal number (i.e., np.nextafter) can cause us to sample
# 0.
"""uniform = random_ops.random_uniform(
shape=array_ops.shape(logits_2d),
minval=np.finfo(self.dtype.as_numpy_dtype).tiny,
maxval=1.,
dtype=self.dtype,
seed=seed)
print(uniform.shape)
raise Exception()"""
#gumbel = -math_ops.log(-math_ops.log(uniform))
noisy_logits = math_ops.div(self.gumbel_noise + logits_2d, self._temperature_2d)
samples = nn_ops.log_softmax(noisy_logits)
ret = array_ops.reshape(samples, sample_shape)
return ret
def _log_prob(self, x):
x = self._assert_valid_sample(x)
# broadcast logits or x if need be.
logits = self.logits
if (not x.get_shape().is_fully_defined() or
not logits.get_shape().is_fully_defined() or
x.get_shape() != logits.get_shape()):
logits = array_ops.ones_like(x, dtype=logits.dtype) * logits
x = array_ops.ones_like(logits, dtype=x.dtype) * x
logits_shape = array_ops.shape(math_ops.reduce_sum(logits, axis=[-1]))
logits_2d = array_ops.reshape(logits, [-1, self.event_size])
x_2d = array_ops.reshape(x, [-1, self.event_size])
# compute the normalization constant
k = math_ops.cast(self.event_size, x.dtype)
log_norm_const = (math_ops.lgamma(k)
+ (k - 1.)
* math_ops.log(self.temperature))
# compute the unnormalized density
log_softmax = nn_ops.log_softmax(logits_2d - x_2d * self._temperature_2d)
log_unnorm_prob = math_ops.reduce_sum(log_softmax, [-1], keep_dims=False)
# combine unnormalized density with normalization constant
log_prob = log_norm_const + log_unnorm_prob
# Reshapes log_prob to be consistent with shape of user-supplied logits
ret = array_ops.reshape(log_prob, logits_shape)
return ret
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
def _assert_valid_sample(self, x):
if not self.validate_args:
return x
return control_flow_ops.with_dependencies([
check_ops.assert_non_positive(x),
distribution_util.assert_close(
array_ops.zeros([], dtype=self.dtype),
math_ops.reduce_logsumexp(x, axis=[-1])),
], x)
class RelaxedOneHotCategorical(
transformed_distribution.TransformedDistribution):
"""RelaxedOneHotCategorical distribution with temperature and logits.
The RelaxedOneHotCategorical is a distribution over random probability
vectors, vectors of positive real values that sum to one, which continuously
approximates a OneHotCategorical. The degree of approximation is controlled by
a temperature: as the temperaturegoes to 0 the RelaxedOneHotCategorical
becomes discrete with a distribution described by the `logits` or `probs`
parameters, as the temperature goes to infinity the RelaxedOneHotCategorical
becomes the constant distribution that is identically the constant vector of
(1/event_size, ..., 1/event_size).
The RelaxedOneHotCategorical distribution was concurrently introduced as the
Gumbel-Softmax (Jang et al., 2016) and Concrete (Maddison et al., 2016)
distributions for use as a reparameterized continuous approximation to the
`Categorical` one-hot distribution. If you use this distribution, please cite
both papers.
#### Examples
Creates a continuous distribution, which approximates a 3-class one-hot
categorical distribution. The 2nd class is the most likely to be the
largest component in samples drawn from this distribution.
```python
temperature = 0.5
p = [0.1, 0.5, 0.4]
dist = RelaxedOneHotCategorical(temperature, probs=p)
```
Creates a continuous distribution, which approximates a 3-class one-hot
categorical distribution. The 2nd class is the most likely to be the
largest component in samples drawn from this distribution.
```python
temperature = 0.5
logits = [-2, 2, 0]
dist = RelaxedOneHotCategorical(temperature, logits=logits)
```
Creates a continuous distribution, which approximates a 3-class one-hot
categorical distribution. Because the temperature is very low, samples from
this distribution are almost discrete, with one component almost 1 and the
others nearly 0. The 2nd class is the most likely to be the largest component
in samples drawn from this distribution.
```python
temperature = 1e-5
logits = [-2, 2, 0]
dist = RelaxedOneHotCategorical(temperature, logits=logits)
```
Creates a continuous distribution, which approximates a 3-class one-hot
categorical distribution. Because the temperature is very high, samples from
this distribution are usually close to the (1/3, 1/3, 1/3) vector. The 2nd
class is still the most likely to be the largest component
in samples drawn from this distribution.
```python
temperature = 10
logits = [-2, 2, 0]
dist = RelaxedOneHotCategorical(temperature, logits=logits)
```
Eric Jang, Shixiang Gu, and Ben Poole. Categorical Reparameterization with
Gumbel-Softmax. 2016.
Chris J. Maddison, Andriy Mnih, and Yee Whye Teh. The Concrete Distribution:
A Continuous Relaxation of Discrete Random Variables. 2016.
"""
def __init__(
self,
temperature,
logits=None,
probs=None,
dtype=dtypes.float32,
validate_args=False,
allow_nan_stats=True,
name="RelaxedOneHotCategorical"):
"""Initialize RelaxedOneHotCategorical using class log-probabilities.
Args:
temperature: An 0-D `Tensor`, representing the temperature
of a set of RelaxedOneHotCategorical distributions. The temperature
should be positive.
logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities
of a set of RelaxedOneHotCategorical distributions. The first
`N - 1` dimensions index into a batch of independent distributions and
the last dimension represents a vector of logits for each class. Only
one of `logits` or `probs` should be passed in.
probs: An N-D `Tensor`, `N >= 1`, representing the probabilities
of a set of RelaxedOneHotCategorical distributions. The first `N - 1`
dimensions index into a batch of independent distributions and the last
dimension represents a vector of probabilities for each class. Only one
of `logits` or `probs` should be passed in.
dtype: The type of the event samples (default: float32).
validate_args: Unused in this distribution.
allow_nan_stats: Python `bool`, default `True`. If `False`, raise an
exception if a statistic (e.g. mean/mode/etc...) is undefined for any
batch member. If `True`, batch members with valid parameters leading to
undefined statistics will return NaN for this statistic.
name: A name for this distribution (optional).
"""
dist = ExpRelaxedOneHotCategorical(temperature,
logits=logits,
probs=probs,
dtype=dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats)
super(RelaxedOneHotCategorical, self).__init__(dist,
bijectors.Exp(event_ndims=1),
name=name)
| stefanwebb/tensorflow-models | tensorflow_models/relaxed_onehot_categorical_fixed_noise.py | Python | mit | 23,164 |
#
# fedora.py
#
# Copyright (C) 2007 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from pyanaconda.installclass import BaseInstallClass
from pyanaconda.product import productName
from pyanaconda import network
from pyanaconda import nm
class FedoraBaseInstallClass(BaseInstallClass):
name = "Fedora"
sortPriority = 10000
if productName.startswith("Red Hat "):
hidden = True
_l10n_domain = "anaconda"
efi_dir = "fedora"
help_placeholder = "FedoraPlaceholder.html"
help_placeholder_with_links = "FedoraPlaceholderWithLinks.html"
def configure(self, anaconda):
BaseInstallClass.configure(self, anaconda)
BaseInstallClass.setDefaultPartitioning(self, anaconda.storage)
def setNetworkOnbootDefault(self, ksdata):
if network.has_some_wired_autoconnect_device():
return
# choose first wired device having link
for dev in nm.nm_devices():
if nm.nm_device_type_is_wifi(dev):
continue
try:
link_up = nm.nm_device_carrier(dev)
except (nm.UnknownDeviceError, nm.PropertyNotFoundError):
continue
if link_up:
network.update_onboot_value(dev, "yes", ksdata)
break
def __init__(self):
BaseInstallClass.__init__(self)
| maxamillion/anaconda | pyanaconda/installclasses/fedora.py | Python | gpl-2.0 | 1,974 |
from ..helpers import assert_equal
from subprocess import Popen, PIPE
import json
def test_tagging():
p = Popen('echo hello | logtag --no-stamp -t handbags',
shell=True,
stdout=PIPE,
stdin=PIPE)
data_out, _ = p.communicate()
assert_equal({'@message': 'hello', '@tags': ['handbags']},
json.loads(data_out.decode("utf-8")))
def test_single_field():
p = Popen('echo hello | logtag --no-stamp -f handbags=great',
shell=True,
stdout=PIPE,
stdin=PIPE)
data_out, _ = p.communicate()
assert_equal({'@message': 'hello', '@fields': { 'handbags': 'great'}},
json.loads(data_out.decode("utf-8")))
def test_multiple_fields():
p = Popen('echo hello | logtag --no-stamp -f handbags=great why=because',
shell=True,
stdout=PIPE,
stdin=PIPE)
data_out, _ = p.communicate()
assert_equal({'@message': 'hello', '@fields': { 'handbags': 'great', 'why': 'because'}},
json.loads(data_out.decode("utf-8"))) | nickstenning/tagalog | test/integration/test_commandline.py | Python | mit | 1,096 |
#!/usr/bin/env python
# coding=utf-8
import sys
import time
import serial
from PIL import Image
MAXWIDTH = 500
if len(sys.argv) != 2:
print("Usage:\n\t./sendImage image")
quit()
im = Image.open(sys.argv[1])
im = im.convert('RGB')
# resize to at most MAXWIDTH wide
if im.width > MAXWIDTH:
wpercent = (MAXWIDTH / float(im.width))
hsize = int((float(im.height) * float(wpercent)))
im = im.resize((MAXWIDTH, hsize), Image.ANTIALIAS)
b = im.tobytes()
b2 = ''
for idx, byte in enumerate(b):
if ord(byte) < 150:
b2 += chr(0)
else:
b2 += chr(255)
im2 = Image.frombytes('RGB', (im.width, im.height), b2)
im2.show()
# convert to run-length encoded stream
# Format for bytes: (number of bits low byte)(number of bits high byte)(char to print)
# char to print will normally be a period (.) or a space ( ) but will eventually
# include an underscore (_) for faster printing
runs = [(1, 0, 0)]
for r in range(0, im2.height * 3, 3):
prevBit = ord(b2[r * im2.width])
bitCount = 0
lineBits = 0
for c in range(0, im2.width * 3, 3):
currentBit = ord(b2[r * im2.width + c])
if currentBit != prevBit:
# sys.stdout.write(str(bitCount))
if prevBit == 0:
# sys.stdout.write('.')
runs.append((bitCount & 0xff, bitCount >> 8, ord('.')))
else:
# sys.stdout.write('x')
runs.append((bitCount & 0xff, bitCount >> 8, ord(' ')))
lineBits += bitCount
bitCount = 0
prevBit = currentBit
bitCount += 1
# print(ord(b2[r * im2.width + c]))
# print(r,c)
# if ord(b2[r * im2.width + c]) == 0:
# sys.stdout.write('.')
# bits.append(chr(1));
# else:
# sys.stdout.write(' ')
# bits.append(chr(0))
if prevBit == 0: # 0 means black
# don't bother printing a string of spaces at the end, just 1s
# sys.stdout.write(str(bitCount)+'.')
runs.append((bitCount & 0xff, bitCount >> 8, ord('.')))
lineBits += bitCount
# sys.stdout.write('\n')
runs.append((lineBits & 0xff, lineBits >> 8, ord('\n')))
runs.append((0, 0, 0)) # signal to end the image printing
# print runs
# quit()
ser = serial.Serial('/dev/cu.LightBlue-Bean', 57600, timeout=0.1)
# wait a bit
time.sleep(0.5)
stringHeader = chr(0x01)
try:
ser.write(stringHeader)
while len(runs) > 0:
run = runs[0]
runs = runs[1:]
# ser.write(''.join([chr(x) for x in run]))
print("Sent " + str(run))
response = ""
while True:
response += ser.read(10)
print(response)
if len(response) > 0 and '\n' in response:
# print("(bytes written:"+response.rstrip()+")")
break
time.sleep(0.01)
if "timeout" in response or "done" in response:
print(response)
break
except KeyboardInterrupt:
pass
ser.close()
| tofergregg/IBM-Wheelwriter-Hack | software/python_scripts/sendImage.py | Python | mit | 3,020 |
# -*- coding: utf-8 -*-
# © 2014-2016 Oihane Crucelaegui - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import crm_claim
from . import project
| alfredoavanzosc/odoo-addons | project_claims/models/__init__.py | Python | agpl-3.0 | 181 |
import os
import boto3
import sys
from datetime import datetime
from dateutil.tz import tzutc
from ._version import __version__
from . import create_logger
logger = create_logger(__name__)
if boto3.session.Session().get_credentials() is None:
logger.info('Please provide AWS credentials.')
sys.exit(-1)
# AWS account info
AWS_ACCOUNT_NUMBER = os.environ.get('AWS_ACCOUNT_NUMBER', '')
if not AWS_ACCOUNT_NUMBER:
try:
AWS_ACCOUNT_NUMBER = boto3.client('sts').get_caller_identity().get('Account')
except Exception as e:
raise Exception("Cannot obtain AWS account number. Please provide AWS credentials")
AWS_REGION = os.environ.get('TIBANNA_AWS_REGION', '')
if not AWS_REGION:
# I'm a lambda
AWS_REGION = os.environ.get('AWS_REGION', '') # reserved variable in lambda
# I'm a user
if not AWS_REGION:
try:
AWS_REGION = boto3.session.Session().region_name # for a user
except Exception as e:
raise Exception("Cannot find AWS_REGION: %s" % e)
# Tibanna AMI info
# Override this mapping to use a custom AMI scheme
AMI_PER_REGION = {
'us-east-1': 'ami-06e2266f85063aabc', # latest as of Oct 25 2021
'us-east-2': 'ami-03a4e3e84b6a1813d',
'us-west-1': 'ami-0c5e8147be760a354',
'us-west-2': 'ami-068589fed9c8d5950',
'ap-south-1': 'ami-05ef59bc4f359c93b',
'ap-northeast-2': 'ami-0d8618a76aece8a8e',
'ap-southeast-1': 'ami-0c22dc3b05714bda1',
'ap-southeast-2': 'ami-03dc109bbf412aac5',
'ap-northeast-1': 'ami-0f4c520515c41ff46',
'ca-central-1': 'ami-01af127710fadfe74',
'eu-central-1': 'ami-0887bcb1c901c1769',
'eu-west-1': 'ami-08db59692e4371ea6',
'eu-west-2': 'ami-036d3ce7a21e07012',
'eu-west-3': 'ami-0cad0ec4160a6b940',
'eu-north-1': 'ami-00a6f0f9fee951aa0',
'sa-east-1': 'ami-0b2164f9680f97099',
'me-south-1': 'ami-03479b7a590f97945',
'af-south-1': 'ami-080baa4ec59c456aa',
'ap-east-1': 'ami-0a9056eb817bc3928',
'eu-south-1': 'ami-0a72279e56849415e'
}
if AWS_REGION not in AMI_PER_REGION:
logger.warning("Public Tibanna AMI for region %s is not available." % AWS_REGION)
AMI_ID = AMI_PER_REGION.get(AWS_REGION, '')
AWS_REGION_NAMES = {
'us-east-1': 'US East (N. Virginia)',
'us-east-2': 'US East (Ohio)',
'us-west-1': 'US West (N. California)',
'us-west-2': 'US West (Oregon)',
'ca-central-1': 'Canada (Central)',
'eu-north-1': 'EU (Stockholm)',
'eu-west-1': 'EU (Ireland)',
'eu-central-1': 'EU (Frankfurt)',
'eu-west-2': 'EU (London)',
'eu-west-3': 'EU (Paris)',
'ap-northeast-1': 'Asia Pacific (Tokyo)',
'ap-northeast-2': 'Asia Pacific (Seoul)',
'ap-northeast-3': 'Asia Pacific (Osaka-Local)',
'ap-southeast-1': 'Asia Pacific (Singapore)',
'ap-southeast-2': 'Asia Pacific (Sydney)',
'ap-south-1': 'Asia Pacific (Mumbai)',
'sa-east-1': 'South America (Sao Paulo)', # intentionally no unicode,
'us-gov-west-1': 'AWS GovCloud (US)',
'us-gov-east-1': 'AWS GovCloud (US-East)'
}
# Tibanna repo from which awsf scripts are pulled
TIBANNA_REPO_NAME = os.environ.get('TIBANNA_REPO_NAME', '4dn-dcic/tibanna')
TIBANNA_REPO_BRANCH = os.environ.get('TIBANNA_REPO_BRANCH', 'master')
TIBANNA_AWSF_DIR = 'awsf3'
# Tibanna roles
AWS_S3_ROLE_NAME = os.environ.get('AWS_S3_ROLE_NAME', 'S3_access')
S3_ACCESS_ARN = 'arn:aws:iam::' + AWS_ACCOUNT_NUMBER + ':instance-profile/' + AWS_S3_ROLE_NAME
# Profile keys (optional) to use on AWSEM EC2
TIBANNA_PROFILE_ACCESS_KEY = os.environ.get('TIBANNA_PROFILE_ACCESS_KEY', '')
TIBANNA_PROFILE_SECRET_KEY = os.environ.get('TIBANNA_PROFILE_SECRET_KEY', '')
# default step function name
TIBANNA_DEFAULT_STEP_FUNCTION_NAME = os.environ.get('TIBANNA_DEFAULT_STEP_FUNCTION_NAME', 'tibanna_unicorn')
# S3_ENCRYPT_KEY_ID for Tibanna output buckets
S3_ENCRYT_KEY_ID = os.environ.get('S3_ENCRYPT_KEY_ID', None)
# dynamo table (optional) for fast searching
DYNAMODB_TABLE = 'tibanna-master'
DYNAMODB_KEYNAME = 'Job Id'
# field name reserved for Tibanna setting
_tibanna = '_tibanna'
# Awsem time stamp format
AWSEM_TIME_STAMP_FORMAT = '%Y%m%d-%H:%M:%S-UTC'
def PARSE_AWSEM_TIME(t_str):
t = datetime.strptime(t_str, AWSEM_TIME_STAMP_FORMAT)
return t.replace(tzinfo=tzutc())
# EBS mount path for cloudwatch metric collection
EBS_MOUNT_POINT = '/mnt/data1'
# Default root EBS size
DEFAULT_ROOT_EBS_SIZE = 8
# Default awsf image
DEFAULT_AWSF_IMAGE = '4dndcic/tibanna-awsf:' + __version__
SFN_TYPE = 'unicorn'
LAMBDA_TYPE = ''
RUN_TASK_LAMBDA_NAME = 'run_task_awsem'
CHECK_TASK_LAMBDA_NAME = 'check_task_awsem'
UPDATE_COST_LAMBDA_NAME = 'update_cost_awsem'
# step function and execution ARN generators
BASE_ARN = 'arn:aws:states:' + AWS_REGION + ':' + AWS_ACCOUNT_NUMBER + ':%s:%s'
BASE_EXEC_ARN = 'arn:aws:states:' + AWS_REGION + ':' + AWS_ACCOUNT_NUMBER + ':execution:%s:%s'
BASE_METRICS_URL = 'https://%s.s3.amazonaws.com/%s.metrics/metrics.html'
def STEP_FUNCTION_ARN(sfn=TIBANNA_DEFAULT_STEP_FUNCTION_NAME):
return BASE_ARN % ('stateMachine', sfn)
def EXECUTION_ARN(exec_name, sfn=TIBANNA_DEFAULT_STEP_FUNCTION_NAME):
return BASE_EXEC_ARN % (sfn, exec_name)
def METRICS_URL(log_bucket, job_id):
return BASE_METRICS_URL % (log_bucket, job_id)
| 4dn-dcic/tibanna | tibanna/vars.py | Python | mit | 5,249 |
from django.contrib import admin # noqa
from .models import ScheduleItem
admin.site.register(ScheduleItem)
| akshayaurora/junction | junction/schedule/admin.py | Python | mit | 110 |
#
# exception.py - general exception formatting and saving
#
# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Red Hat, Inc.
# All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Matt Wilson <[email protected]>
# Erik Troan <[email protected]>
# Chris Lumens <[email protected]>
#
from meh.handler import *
from meh.dump import *
import isys
import sys
import os
import shutil
import signal
from flags import flags
import kickstart
import logging
log = logging.getLogger("anaconda")
class AnacondaExceptionHandler(ExceptionHandler):
def postWriteHook(self, (ty, value, tb), anaconda):
# See if /mnt/sysimage is present and put exception there as well
if os.access("/mnt/sysimage/root", os.X_OK):
try:
dest = "/mnt/sysimage/root/%s" % os.path.basename(self.exnFile)
shutil.copyfile(self.exnFile, dest)
except:
log.error("Failed to copy %s to /mnt/sysimage/root" % self.exnFile)
pass
# run kickstart traceback scripts (if necessary)
try:
if anaconda.isKickstart:
kickstart.runTracebackScripts(anaconda)
except:
pass
def runDebug(self, (ty, value, tb)):
# vtActivate does not work on certain ppc64 machines, so just skip
# that and continue with the rest of the debugger setup.
try:
isys.vtActivate(1)
except SystemError:
pass
self.intf.__del__ ()
pidfl = "/tmp/vncshell.pid"
if os.path.exists(pidfl) and os.path.isfile(pidfl):
pf = open(pidfl, "r")
for pid in pf.readlines():
if not int(pid) == os.getpid():
os.kill(int(pid), signal.SIGKILL)
pf.close()
os.open("/dev/console", os.O_RDWR) # reclaim stdin
os.dup2(0, 1) # reclaim stdout
os.dup2(0, 2) # reclaim stderr
# ^
# |
# +------ dup2 is magic, I tells ya!
# bring back the echo
import termios
si = sys.stdin.fileno()
attr = termios.tcgetattr(si)
attr[3] = attr[3] & termios.ECHO
termios.tcsetattr(si, termios.TCSADRAIN, attr)
print("\nEntering debugger...")
import pdb
pdb.post_mortem (tb)
os.kill(os.getpid(), signal.SIGKILL)
def initExceptionHandling(anaconda):
fileList = [ "/tmp/anaconda.log", "/tmp/lvmout", "/tmp/resize.out",
"/tmp/program.log", "/tmp/storage.log", "/tmp/ifcfg.log",
"/tmp/yum.log", anaconda.rootPath + "/root/install.log",
anaconda.rootPath + "/root/upgrade.log", "/proc/cmdline" ]
if flags.livecdInstall:
fileList.extend(["/var/log/dmesg"])
else:
fileList.extend(["/tmp/syslog"])
conf = Config(programName="anaconda",
programVersion=isys.getAnacondaVersion(),
attrSkipList=[ "backend.ayum",
"backend.dlpkgs",
"id.accounts",
"id.bootloader.password",
"id.comps",
"id.dispatch",
"id.hdList",
"id.ksdata",
"id.instLanguage.font",
"id.instLanguage.kbd",
"id.instLanguage.info",
"id.instLanguage.localeInfo",
"id.instLanguage.nativeLangNames",
"id.instLanguage.tz",
"id.keyboard._mods._modelDict",
"id.keyboard.modelDict",
"id.storage.encryptionPassphrase",
"id.rootPassword",
"id.tmpData",
"intf.icw.buff",
"intf.icw.currentWindow.storage.encryptionPassphrase",
"intf.icw.stockButtons",
],
localSkipList=[ "passphrase", "password" ],
fileList=fileList)
handler = AnacondaExceptionHandler(conf, anaconda.intf, ReverseExceptionDump)
handler.install(anaconda)
return conf
| icomfort/anaconda | exception.py | Python | gpl-2.0 | 5,107 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
In order to use this lib, you have to call the method 'get_shipping_label'
with the right arguments. To know which keys to send to this method read these lists :
- required fields
- fields
"""
from datetime import datetime
import re
from suds.client import Client, WebFault
from .label_helper import AbstractLabel
from .exception_helper import (
InvalidSequence,
InvalidWeight,
InvalidSize,
InvalidType,
InvalidMissingField,
InvalidZipCode,
InvalidCountry,
InvalidDate,
InvalidCode,
InvalidValue,
InvalidValueNotInList,
)
WEBSERVICE_URL = 'https://ws.chronopost.fr/shipping-cxf/ShippingServiceWS?wsdl'
ESD_MODEL = {
"retrievalDateTime": {'max_size': 17},
"closingDateTime": {'max_size': 17},
"specificInstructions": {'max_size': 255},
"height": {'required': True},
"width": {'required': True},
"length": {'required': True},
"shipperCarriesCode": {'max_size': 38},
"shipperBuildingFloor": {'max_size': 32},
"shipperServiceDirection": {'max_size': 32},
}
HEADER_MODEL = {
"accountNumber": {'required': True, 'max_size': 8},
"subAccount": {'max_size': 3},
}
ADDRESS_MODEL = {
"civility": {'in': ['E', 'L', 'M']},
"name": {'required': True, 'max_size': 100},
"name2": {'required': True, 'max_size': 100},
"street": {'required': True, 'max_size': 38},
"street2": {'max_size': 38},
"zip": {'required': True, 'max_size': 9},
"city": {'required': True, 'max_size': 50},
"country_code": {'required': True, 'max_size': 2},
"phone": {'max_size': 17},
"mobile": {'max_size': 17},
"email": {'max_size': 80},
"alert": {}, #FIXME
}
REF_MODEL = {
"shipperRef": {'required': True, 'max_size': 35},
"recipientRef": {'required': True},
"customerSkybillNumber": {'max_size': 38},
}
SKYBILL_MODEL = {
"productCode": {'required': True},
"shipDate": {'max_size': 38},
"shipHour": {'required': True, 'max_size': 9},
"weight": {'required': True, 'type': float},
"weightUnit": {'required': True},
"insuredValue": {'type': int},
"insuredCurrency": {'max_size': 17},
"codValue": {'type': int},
"codCurrency": {'max_size': 80},
"customsValue": {'type': int},
"customsCurrency": {'max_size': 80},
"service": {'max_size': 1},
"objectType": {'max_size': 80},
"content1": {'max_size': 80},
"content2": {'max_size': 80},
"content3": {'max_size': 80},
"content4": {'max_size': 80},
"content5": {'max_size': 80},
}
def is_digit(s):
return re.search("[^0-9]", s) is None
class Chronopost(AbstractLabel):
_client = None
def __init__(self):
self._client = Client(WEBSERVICE_URL)
def _send_request(self, request, *args):
""" Wrapper for API requests
:param request: callback for API request
:param **kwargs: params forwarded to the callback
"""
res = {}
try:
res['value'] = request(*args)
res['success'] = True
except WebFault as e:
res['success'] = False
res['errors'] = [e[0]]
except Exception as e:
# if authentification error
#if isinstance(e[0], tuple) and e[0][0] == 401:
#raise e[0][0]
raise e
return res
def _prepare_skybillparams(self, mode):
skybillparams_obj = self._client.factory.create('skybillParamsValue')
valid_values = ['PDF', 'PPR', 'SPD', 'THE', 'ZPL', 'XML']
if mode in valid_values:
skybillparams_obj['mode'] = mode
else:
raise InvalidValueNotInList(
"The printing mode must be in %s" % valid_values)
return skybillparams_obj
def _check_password(self, password):
if is_digit(password) is False:
raise InvalidType(
"Only digit chars are authorised for 'account' '%s'"
% account)
if len(str(password)) != 6:
raise InvalidSize(
"The password have to contain 6 characters")
return password
def _prepare_skybill(self, info):
self.check_model(info, SKYBILL_MODEL, 'skybill')
skybill_obj = self._client.factory.create('skybillValue')
#for key in info.keys():
# skybill_obj[key] = info[key]
skybill_obj = info.copy()
skybill_obj['evtCode'] = 'DC'
return skybill_obj
def _prepare_ref(self, info):
self.check_model(info, REF_MODEL, 'ref')
ref_obj = self._client.factory.create('refValue')
#for key in info.keys():
# ref_obj[key] = info[key]
ref_obj = info.copy()
return ref_obj
def _prepare_esd(self, info):
self.check_model(info, ESD_MODEL, 'esd')
esd_obj = self._client.factory.create('esdValue')
#esd_obj['retrievalDateTime'] = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
#esd_obj['closingDateTime'] = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
esd_obj = info.copy()
return esd_obj
def _prepare_customer_address(self, address):
customer_model = ADDRESS_MODEL.copy()
customer_model['civility'] = {'in': ['E', 'L', 'M'], 'required': True}
customer_model['print_as_sender'] = {'in': ['Y', 'N']}
print "****", address, "***"
self.check_model(address, customer_model, 'address')
elements = {
'customerCivility': 'civility',
'customerName': 'name',
'customerName2': 'name2',
'customerAdress1': 'street',
'customerAdress2': 'street2',
'customerZipCode': 'zip',
'customerCity': 'city',
'customerCountry': 'country_code',
'customerCountryName': 'country_name',
'customerContactName': 'contact_name',
'customerEmail': 'email',
'customerPhone': 'phone',
'customerMobilePhone': 'mobile',
'customerPreAlert': 'alert',
'printAsSender': 'print_as_sender'
}
customer = self._client.factory.create('customerValue')
return customer, elements
def _prepare_shipper_address(self, address):
shipper_model = ADDRESS_MODEL.copy()
shipper_model['civility'] = {'in': ['E', 'L', 'M'], 'required': True}
self.check_model(address, shipper_model, 'address')
elements = {
'shipperCivility': 'civility',
'shipperName': 'name',
'shipperName2': 'name2',
'shipperAdress1': 'street',
'shipperAdress2': 'street2',
'shipperZipCode': 'zip',
'shipperCity': 'city',
'shipperCountry': 'country_code',
'shipperCountryName': 'country_name',
'shipperContactName': False,
'shipperEmail': 'email',
'shipperPhone': 'phone',
'shipperMobilePhone': 'mobile',
'shipperPreAlert': 'alert',
}
shipper = self._client.factory.create('shipperValue')
return shipper, elements
def _prepare_recipient_address(self, address):
print "address", address
self.check_model(address, ADDRESS_MODEL, 'address')
elements = {
'recipientName': 'name',
'recipientName2': 'name2',
'recipientAdress1': 'street',
'recipientAdress2': 'street2',
'recipientZipCode': 'zip',
'recipientCity': 'city',
'recipientCountry': 'country_code',
'recipientCountryName': 'country_name',
'recipientContactName': 'contact_name',
'recipientEmail': 'email',
'recipientPhone': 'phone',
'recipientMobilePhone': 'mobile',
'recipientPreAlert': 'alert',
}
recipient = self._client.factory.create('recipientValue')
return recipient, elements
def _prepare_address(self, values, info_type):
if info_type == 'recipient':
obj, elements = self._prepare_recipient_address(values)
if info_type == 'shipper':
obj, elements = self._prepare_shipper_address(values)
if info_type == 'customer':
obj, elements = self._prepare_customer_address(values)
if obj and elements and values:
for elm, elm_v in elements.items():
obj[elm] = ''
if elm_v in values:
obj[elm] = values[elm_v]
return obj
def _check_account(self, account):
if is_digit(account) is False:
raise InvalidType(
"Only digit chars are authorised for 'account' '%s'"
% account)
return account
def _prepare_header(self, vals):
self.check_model(vals, HEADER_MODEL, 'header')
self._check_account(vals['accountNumber'])
header = self._client.factory.create('headerValue')
header['idEmit'] = 'CHRFR'
header['accountNumber'] = vals['accountNumber']
if vals.get('subAccount', False):
self._check_account(vals['subAccount'])
header['subAccount'] = vals['subAccount']
return header
def get_shipping_label(self, recipient, shipper, header, ref, skybill,
password, esd=None, mode=False, customer = None):
"""
Call Chronopost 'shipping' web service and return the label in binary.
Params TODO
"""
if not customer:
customer = shipper.copy()
header_obj = self._prepare_header(header.copy())
recipient_obj = self._prepare_address(recipient.copy(), 'recipient')
shipper_obj = self._prepare_address(shipper.copy(), 'shipper')
customer_obj = self._prepare_address(customer.copy(), 'customer')
if esd:
esd_obj = self._prepare_esd(esd.copy())
else:
esd_obj = self._client.factory.create('esdValue')
ref_obj = self._prepare_ref(ref.copy())
skybill_obj = self._prepare_skybill(skybill.copy())
password = self._check_password(password)
if mode:
skybillparams_obj = self._prepare_skybillparams(mode)
else:
skybillparams_obj = self._client.factory.create('skybillParamsValue')
#test = self._client.service.shipping(esd, head, shiping, customer, recipient, ref, sky, bill, '255562')
request = self._client.service.shipping
response = self._send_request(request, esd_obj, header_obj, shipper_obj,
customer_obj, recipient_obj, ref_obj,
skybill_obj, skybillparams_obj, password)
return response
| florian-dacosta/chronopost | chronopost_api/chronopost.py | Python | agpl-3.0 | 11,111 |
log_filename="/tmp/fmbt.test.aal-python.mycounter.log"
file(log_filename, "w").close()
def log(msg):
file(log_filename, "a").write(msg + "\n")
def foo():
pass
def direction_changed(i):
log('change direction on value %s' % (i,))
log(' dec called: %s' % (dec_called,))
| pyykkis/fMBT | test/aalpython/mycounter.py | Python | lgpl-2.1 | 289 |
# -*- coding: utf-8 -*-
class AutocompleteMeta:
"""
Simple meta class to allow the model to define aspects of the autocomplete.
:var name: used for the named url
:var path: the path to autocomplete view
:var follow_fks: when searching should ForeignKey fields be followed.
:var fields: list of fields, if empty then all searchable fields are used
:var permissions: bool, string or iter
* if ``permissions`` ``False`` (default) no authentication is checked.
* if ``permissions`` ``True`` then request.user must be authenticated.
* if ``permissions`` ``string`` then request.user must have the permission defined by ``string``.
* if ``permissions`` ``iter`` then request.user must have all the permissionis defined in the ``iter``
See :class:`django_autocomplete.views.AutocompleteView` for more clarification.
For example as a simple object:
>>> from django_autocomplete.meta import AutocompleteMeta
>>> class TestModel(object):
... autocomplete = AutocompleteMeta(
... name='silly',
... path='api/filter/silly',
... )
The model autocomplete configures the model for use:
>>> m = TestModel()
>>> m.autocomplete
<django_autocomplete.meta.AutocompleteMeta object at 0x...>
>>> m.autocomplete.path
'api/filter/silly'
>>> m.autocomplete.name
'silly'
>>> m.autocomplete.follow_fks
True
>>> m.autocomplete.fields
[]
"""
name = ''
path = ''
fields = []
permissions = None
follow_fks = True
def __init__(self, autocomplete=None, **kwargs):
if autocomplete:
autocomplete_attrs = autocomplete.__dict__
else:
autocomplete_attrs = kwargs
for attr in self.__class__.__dict__:
if attr in autocomplete_attrs:
self.__dict__[attr] = autocomplete_attrs[attr]
| darrylcousins/django-autocomplete | django_autocomplete/meta.py | Python | apache-2.0 | 1,979 |
from typing import Dict, Optional
from zerver.lib.url_preview.parsers.base import BaseParser
class GenericParser(BaseParser):
def extract_data(self) -> Dict[str, Optional[str]]:
return {
'title': self._get_title(),
'description': self._get_description(),
'image': self._get_image()}
def _get_title(self) -> Optional[str]:
soup = self._soup
if (soup.title and soup.title.text != ''):
return soup.title.text
if (soup.h1 and soup.h1.text != ''):
return soup.h1.text
return None
def _get_description(self) -> Optional[str]:
soup = self._soup
meta_description = soup.find('meta', attrs={'name': 'description'})
if (meta_description and meta_description.get('content', '') != ''):
return meta_description['content']
first_h1 = soup.find('h1')
if first_h1:
first_p = first_h1.find_next('p')
if (first_p and first_p.text != ''):
return first_p.text
first_p = soup.find('p')
if (first_p and first_p.text != ''):
return first_p.text
return None
def _get_image(self) -> Optional[str]:
"""
Finding a first image after the h1 header.
Presumably it will be the main image.
"""
soup = self._soup
first_h1 = soup.find('h1')
if first_h1:
first_image = first_h1.find_next_sibling('img')
if first_image and first_image['src'] != '':
return first_image['src']
return None
| tommyip/zulip | zerver/lib/url_preview/parsers/generic.py | Python | apache-2.0 | 1,605 |
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open('VERSION.txt', 'r') as v:
version = v.read().strip()
except FileNotFoundError:
version = '0.0.0.dev0'
with open('DESCRIPTION', 'r') as d:
long_description = d.read()
setup(
name='tukio',
description='An event-based workflow library built around asyncio',
long_description=long_description,
url='https://github.com/surycat/tukio',
author='Enovacom Surycat',
author_email='[email protected]',
version=version,
packages=find_packages(exclude=['tests']),
license='Apache 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: AsyncIO',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3 :: Only',
],
)
| optiflows/tukio | setup.py | Python | apache-2.0 | 1,060 |
from django.db import models
class PrintFormat(models.Model):
"""Форматы печати"""
name = models.CharField('Название', max_length=16)
denominator = models.IntegerField(
'Знаменатель',
help_text='Знаменатель для получения количества полос из условного А1')
def __str__(self):
return '{}/{}'.format(self.name, self.denominator)
class Meta:
ordering = ['denominator']
verbose_name = 'Формат печати'
verbose_name_plural = 'Форматы печати'
class CirculationLimit(models.Model):
"""Пороги тиража при которых меняются нормы отходов"""
limit = models.IntegerField(unique=True)
def __str__(self):
other = type(self).objects.filter(limit__gt=self.limit).first()
if other:
if self.limit:
return 'от {} до {}'.format(self.limit, other.limit)
else:
return 'менее {}'.format(other.limit)
else:
return 'свыше {}'.format(self.limit)
@classmethod
def get_limit(cls, value):
return cls.objects.filter(limit__lt=value).last()
class Meta:
ordering = ['limit']
verbose_name = 'Ограничение тиража'
verbose_name_plural = 'Ограничения тиража'
class InkFaceBack(models.Model):
face = models.IntegerField()
back = models.IntegerField()
values = models.ManyToManyField(CirculationLimit,
through='WasteRatio')
def __str__(self):
return '{}+{}'.format(self.face, self.back)
def plates_count(self):
return self.face + self.back
class Meta:
ordering = ['face', 'back']
verbose_name = 'Красочность'
verbose_name_plural = 'Красочность'
class WasteRatio(models.Model):
limit = models.ForeignKey('CirculationLimit')
ink = models.ForeignKey('InkFaceBack')
value = models.IntegerField('Норматив')
def __str__(self):
return '{} ({}) - {}'.format(self.limit, self.ink, self.value)
class Meta:
verbose_name = 'Норма отходов'
verbose_name_plural = 'Нормы отходов'
class PubSquare(models.Model):
"""Площадь издания"""
value = models.IntegerField('Площадь издания')
def __str__(self):
return '[{}]'.format(self.value)
| caroten/omicalc | src/utils/models.py | Python | apache-2.0 | 2,556 |
""" Module containing all the possible exceptions that dogapi can raise.
It should be safe to do a `from dogapi.exceptions import *`
"""
import socket
__all__ = [
'DatadogException',
'UnknownDelivery',
'ClientError',
'HttpTimeout',
'HttpBackoff',
'ApiError',
'timeout_exceptions',
]
class DatadogException(Exception): pass
class UnknownDelivery(DatadogException): pass
class ClientError(DatadogException): pass
class HttpTimeout(DatadogException): pass
class HttpBackoff(DatadogException): pass
class ApiError(DatadogException): pass
timeout_exceptions = (socket.timeout, )
try:
import ssl
timeout_exceptions += (ssl.SSLError, )
except ImportError:
pass
| DataDog/dogapi | src/dogapi/exceptions.py | Python | bsd-3-clause | 670 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Reference_Domain'
db.create_table(u'django_reference_data_reference_domain', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('domain_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('domain_path', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('long_name', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('source', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('source_details', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('domain_type', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('is_news', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_multimedia', self.gf('django.db.models.fields.BooleanField')(default=False)),
('rank', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('address', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('state', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('county', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('zip_code', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('create_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('last_update', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal(u'django_reference_data', ['Reference_Domain'])
def backwards(self, orm):
# Deleting model 'Reference_Domain'
db.delete_table(u'django_reference_data_reference_domain')
models = {
u'django_reference_data.reference_domain': {
'Meta': {'object_name': 'Reference_Domain'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'county': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'create_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'domain_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'domain_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'domain_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_multimedia': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_news': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'long_name': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'source_details': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['django_reference_data'] | jonathanmorgan/django_reference_data | migrations/0001_initial.py | Python | gpl-3.0 | 4,641 |
import webapp2
import jinja2
import os
import logging
from google.appengine.api import users
from apiclient.discovery import build
from oauth2client.contrib.appengine import OAuth2Decorator
profileauthdecorator = OAuth2Decorator(
client_id='314326393426-6bvmm9sds571kgnnd3k886i1sjq7co82.apps.googleusercontent.com',
client_secret='QgKCMayAA5t2C1nmBbeg-Itn',
scope='https://www.googleapis.com/auth/plus.login')
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class _BaseHandler(webapp2.RequestHandler):
def initialize(self, request, response):
super(_BaseHandler, self).initialize(request, response)
self.user = users.get_current_user()
if self.user:
self.template_values = {
'user': self.user,
'is_admin': users.is_current_user_admin(),
'logout_url': users.create_logout_url('/')}
else:
self.template_values = {
'login_url': users.create_login_url(self.request.url)}
class MainPage(_BaseHandler):
def get(self):
logging.error('MainPage class requested')
template = jinja_environment.get_template('home.template')
self.response.out.write(template.render(self.template_values))
class ProfilePage(_BaseHandler):
@profileauthdecorator.oauth_required
def get(self):
logging.info('ProfilePage class requested')
auth_http = profileauthdecorator.http()
logging.info(auth_http)
service = build('plus', 'v1', http=auth_http)
people_resource = service.people()
people_document = people_resource.get(userId='me').execute()
self.template_values['cheever'] = {
'imgUrl' : people_document['image']['url']
}
template = jinja_environment.get_template('profile.template')
self.response.out.write(template.render(self.template_values))
app = webapp2.WSGIApplication([
('/admin', MainPage),
('/profile', ProfilePage),
(profileauthdecorator.callback_path, profileauthdecorator.callback_handler()),
('/', MainPage)
], debug=True) | benfinkelcbt/CPD200 | CPD200-Lab06-Python/main.py | Python | gpl-3.0 | 2,260 |
import os
from setuptools import setup, find_packages
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='smashbenchmarking',
version='1.0.1',
packages=find_packages(exclude=["tests*","scripts*"]),
description='Check the accuracy of one VCF callset against another',
long_description=(read('README.md')),
url='http://github.com/amplab/smash/',
license='BSD',
author='AMPlab, UC Berkeley',
author_email='[email protected]',
py_modules=['smashbenchmarking'],
install_requires=["pyvcf","pyfasta","numpy"],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
) | amplab/smash | setup.py | Python | bsd-2-clause | 1,354 |
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
testinfo = "s, t 0.5, s, t 1.5, s, t 2.1, s, q"
tags = "Layer, RotateBy"
import summa
from summa.director import director
from summa.actions import RotateBy
from summa.layer import *
def main():
director.init()
main_scene = summa.scene.Scene()
test_layer = ColorLayer(64,64,64,255)
test_layer.scale = 0.75
main_scene.add( test_layer )
test_layer.do( RotateBy( 360, 2 ) )
director.run (main_scene)
if __name__ == '__main__':
main()
| shackra/thomas-aquinas | tests/test_layer_rotate.py | Python | bsd-3-clause | 630 |
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from typing import List
from django.core.exceptions import PermissionDenied
from base.models.education_group_publication_contact import EducationGroupPublicationContact
from base.models.education_group_year import EducationGroupYear
def can_postpone_publication_contact(education_group_year: 'EducationGroupYear') -> bool:
return not education_group_year.academic_year.is_past
def bulk_postpone_publication_contact(education_group_years_from: List['EducationGroupYear']) -> None:
for egy in education_group_years_from:
postpone_publication_contact(egy)
def postpone_publication_contact(education_group_year_from: 'EducationGroupYear') -> None:
if not can_postpone_publication_contact(education_group_year_from):
raise PermissionDenied
next_qs = EducationGroupYear.objects.filter(
education_group=education_group_year_from.education_group,
academic_year__year__gt=education_group_year_from.academic_year.year
)
publication_contact_to_postpone = list(EducationGroupPublicationContact.objects.filter(
education_group_year=education_group_year_from
))
for egy in next_qs:
_purge_publication_contact(egy)
_postpone_publication_contact(egy, publication_contact_to_postpone)
def _purge_publication_contact(education_group_year: 'EducationGroupYear') -> None:
qs = EducationGroupPublicationContact.objects.filter(education_group_year=education_group_year)
for publication_contact in qs:
publication_contact.delete()
def _postpone_publication_contact(
education_group_year: 'EducationGroupYear',
publication_contacts: List['EducationGroupPublicationContact']
) -> None:
for contact in publication_contacts:
contact.pk = None
contact.id = None
contact.education_group_year = education_group_year
contact.save()
def bulk_postpone_publication_entity(education_group_years_from: List['EducationGroupYear']) -> None:
for egy in education_group_years_from:
postpone_publication_entity(egy)
def postpone_publication_entity(education_group_year_from: 'EducationGroupYear') -> None:
if not can_postpone_publication_contact(education_group_year_from):
raise PermissionDenied
next_qs = EducationGroupYear.objects.filter(
education_group=education_group_year_from.education_group,
academic_year__year__gt=education_group_year_from.academic_year.year
)
publication_entity_to_postpone = education_group_year_from.publication_contact_entity
for egy in next_qs:
_postpone_publication_entity(egy, publication_entity_to_postpone)
def _postpone_publication_entity(egy: 'EducationGroupYear', publication_entity) -> None:
egy.publication_contact_entity = publication_entity
egy.save()
| uclouvain/OSIS-Louvain | base/business/education_groups/publication_contact.py | Python | agpl-3.0 | 4,006 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import cbi
from . import account_bank_statement_import_cbi
| Comunitea/CMNT_004_15 | project-addons/account_bank_statement_import_cbi/wizard/__init__.py | Python | agpl-3.0 | 166 |
from datetime import datetime, timedelta
import sys
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
import django
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.models import User
from django.contrib.sessions.backends.base import CreateError
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.db import IntegrityError
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.timezone import now
from user_sessions.backends.db import SessionStore
from user_sessions.models import Session
from user_sessions.templatetags.user_sessions import location, device
from user_sessions.utils.tests import Client
if sys.version_info[:2] < (2, 7):
from django.utils.unittest.case import skipUnless
else:
from unittest import skipUnless
try:
from django.contrib.gis.geoip import GeoIP
geoip = GeoIP()
geoip_msg = None
except Exception as e:
geoip = None
geoip_msg = str(e)
class MiddlewareTest(TestCase):
def test_unmodified_session(self):
self.client.get('/', HTTP_USER_AGENT='Python/2.7')
self.assertNotIn(settings.SESSION_COOKIE_NAME, self.client.cookies)
def test_modify_session(self):
self.client.get('/modify_session/', HTTP_USER_AGENT='Python/2.7')
self.assertIn(settings.SESSION_COOKIE_NAME, self.client.cookies)
session = Session.objects.get(
pk=self.client.cookies[settings.SESSION_COOKIE_NAME].value
)
self.assertEqual(session.user_agent, 'Python/2.7')
self.assertEqual(session.ip, '127.0.0.1')
def test_login(self):
if django.VERSION < (1, 7):
admin_login_url = '/admin/'
else:
admin_login_url = reverse('admin:login')
user = User.objects.create_superuser('bouke', '', 'secret')
response = self.client.post(admin_login_url,
data={
'username': 'bouke',
'password': 'secret',
'this_is_the_login_form': '1',
'next': '/admin/'},
HTTP_USER_AGENT='Python/2.7')
self.assertRedirects(response, '/admin/')
session = Session.objects.get(
pk=self.client.cookies[settings.SESSION_COOKIE_NAME].value
)
self.assertEqual(user, session.user)
def test_long_ua(self):
self.client.get('/modify_session/',
HTTP_USER_AGENT=''.join('a' for _ in range(400)))
class ViewsTest(TestCase):
client_class = Client
def setUp(self):
self.user = User.objects.create_user('bouke', '', 'secret')
assert self.client.login(username='bouke', password='secret')
def test_list(self):
response = self.client.get(reverse('user_sessions:session_list'))
self.assertContains(response, 'Active Sessions')
self.assertContains(response, 'End Session', 2)
def test_delete(self):
session_key = self.client.cookies[settings.SESSION_COOKIE_NAME].value
response = self.client.post(reverse('user_sessions:session_delete',
args=[session_key]))
self.assertRedirects(response, reverse('user_sessions:session_list'))
def test_delete_other(self):
self.user.session_set.create(ip='127.0.0.1', expire_date=datetime.now() + timedelta(days=1))
self.assertEqual(self.user.session_set.count(), 2)
response = self.client.post(reverse('user_sessions:session_delete_other'))
self.assertRedirects(response, reverse('user_sessions:session_list'))
self.assertEqual(self.user.session_set.count(), 1)
class AdminTest(TestCase):
client_class = Client
def setUp(self):
User.objects.create_superuser('bouke', '', 'secret')
assert self.client.login(username='bouke', password='secret')
expired = SessionStore('Python/2.5', '20.13.1.1')
expired.set_expiry(-365 * 86400)
expired.save()
unexpired = SessionStore('Python/2.7', '1.1.1.1')
unexpired.save()
self.admin_url = reverse('admin:user_sessions_session_changelist')
def test_list(self):
response = self.client.get(self.admin_url)
self.assertContains(response, 'Select session to change')
self.assertContains(response, '127.0.0.1')
self.assertContains(response, '20.13.1.1')
self.assertContains(response, '1.1.1.1')
def test_search(self):
response = self.client.get(self.admin_url, {'q': 'bouke'})
self.assertContains(response, '127.0.0.1')
self.assertNotContains(response, '20.13.1.1')
self.assertNotContains(response, '1.1.1.1')
def test_mine(self):
my_sessions = '%s?%s' % (self.admin_url, urlencode({'owner': 'my'}))
response = self.client.get(my_sessions)
self.assertContains(response, '127.0.0.1')
self.assertNotContains(response, '1.1.1.1')
def test_expired(self):
expired = '%s?%s' % (self.admin_url, urlencode({'active': '0'}))
response = self.client.get(expired)
self.assertContains(response, '20.13.1.1')
self.assertNotContains(response, '1.1.1.1')
def test_unexpired(self):
unexpired = '%s?%s' % (self.admin_url, urlencode({'active': '1'}))
response = self.client.get(unexpired)
self.assertContains(response, '1.1.1.1')
self.assertNotContains(response, '20.13.1.1')
class SessionStoreTest(TestCase):
def setUp(self):
self.store = SessionStore('Python/2.7', '127.0.0.1', None)
def test_untouched_init(self):
self.assertFalse(self.store.modified)
self.assertFalse(self.store.accessed)
def test_auth_session_key(self):
self.assertFalse(auth.SESSION_KEY in self.store)
self.assertFalse(self.store.modified)
self.assertTrue(self.store.accessed)
self.store.get(auth.SESSION_KEY)
self.assertFalse(self.store.modified)
self.store[auth.SESSION_KEY] = 1
self.assertTrue(self.store.modified)
def test_save(self):
self.store[auth.SESSION_KEY] = 1
self.store.save()
session = Session.objects.get(pk=self.store.session_key)
self.assertEqual(session.user_agent, 'Python/2.7')
self.assertEqual(session.ip, '127.0.0.1')
self.assertEqual(session.user_id, 1)
self.assertAlmostEqual(now(), session.last_activity,
delta=timedelta(seconds=5))
def test_load_unmodified(self):
self.store[auth.SESSION_KEY] = 1
self.store.save()
store2 = SessionStore('Python/2.7', '127.0.0.1',
self.store.session_key)
store2.load()
self.assertEqual(store2.user_agent, 'Python/2.7')
self.assertEqual(store2.ip, '127.0.0.1')
self.assertEqual(store2.user_id, 1)
self.assertEqual(store2.modified, False)
def test_load_modified(self):
self.store[auth.SESSION_KEY] = 1
self.store.save()
store2 = SessionStore('Python/3.3', '8.8.8.8', self.store.session_key)
store2.load()
self.assertEqual(store2.user_agent, 'Python/3.3')
self.assertEqual(store2.ip, '8.8.8.8')
self.assertEqual(store2.user_id, 1)
self.assertEqual(store2.modified, True)
def test_duplicate_create(self):
s1 = SessionStore('Python/2.7', '127.0.0.1', 'DUPLICATE')
s1.create()
s2 = SessionStore('Python/2.7', '127.0.0.1', 'DUPLICATE')
s2.create()
self.assertNotEqual(s1.session_key, s2.session_key)
s3 = SessionStore('Python/2.7', '127.0.0.1', s1.session_key)
with self.assertRaises(CreateError):
s3.save(must_create=True)
def test_integrity(self):
self.store.user_agent = None
with self.assertRaisesRegexp(
IntegrityError,
'(user_sessions_session.user_agent may not be NULL|'
'NOT NULL constraint failed: user_sessions_session.user_agent)'
):
self.store.save()
def test_delete(self):
# not persisted, should just return
self.store.delete()
# create, then delete
self.store.create()
session_key = self.store.session_key
self.store.delete()
# non-existing sessions, should not raise
self.store.delete()
self.store.delete(session_key)
def test_clear(self):
"""
Clearing the session should clear all non-browser information
"""
self.store[auth.SESSION_KEY] = 1
self.store.clear()
self.store.save()
session = Session.objects.get(pk=self.store.session_key)
self.assertEqual(session.user_id, None)
class ModelTest(TestCase):
def test_get_decoded(self):
store = SessionStore('Python/2.7', '127.0.0.1', None)
store[auth.SESSION_KEY] = 1
store['foo'] = 'bar'
store.save()
session = Session.objects.get(pk=store.session_key)
self.assertEqual(session.get_decoded(),
{'foo': 'bar', auth.SESSION_KEY: 1})
def test_very_long_ua(self):
ua = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; ' \
'Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; ' \
'.NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; ' \
'InfoPath.3; ms-office; MSOffice 14)'
store = SessionStore(ua, '127.0.0.1', None)
store.save()
session = Session.objects.get(pk=store.session_key)
self.assertEqual(session.user_agent, ua[:200])
class ClientTest(TestCase):
def test_invalid_login(self):
client = Client()
self.assertFalse(client.login())
def test_restore_session(self):
store = SessionStore('Python/2.7', '127.0.0.1', None)
store['foo'] = 'bar'
store.save()
client = Client()
client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key
User.objects.create_user('bouke', '', 'secret')
assert client.login(username='bouke', password='secret')
self.assertEqual(client.session['foo'], 'bar')
def test_login_logout(self):
client = Client()
User.objects.create_user('bouke', '', 'secret')
assert client.login(username='bouke', password='secret')
assert settings.SESSION_COOKIE_NAME in client.cookies
client.logout()
assert settings.SESSION_COOKIE_NAME not in client.cookies
# should not raise
client.logout()
@override_settings(INSTALLED_APPS=())
def test_no_session(self):
self.assertIsNone(Client().session)
class LocationTemplateFilterTest(TestCase):
@override_settings(GEOIP_PATH=None)
def test_no_location(self):
self.assertEqual(location('127.0.0.1'), '<i>unknown</i>')
@skipUnless(geoip, geoip_msg)
def test_locations(self):
self.assertEqual(location('8.8.8.8'), 'United States')
self.assertEqual(location('44.55.66.77'), 'San Diego, United States')
class DeviceTemplateFilterTest(TestCase):
def test_ie(self):
self.assertEqual(
'Internet Explorer on Windows XP',
device('Mozilla/4.0 (Windows; MSIE 6.0; Windows NT 5.1; SV1; '
'.NET CLR 2.0.50727)')
)
self.assertEqual(
'Internet Explorer on Windows Vista',
device('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; '
'Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322;'
' InfoPath.2; .NET CLR 3.5.21022; .NET CLR 3.5.30729; '
'MS-RTC LM 8; OfficeLiveConnector.1.4; OfficeLivePatch.1.3;'
' .NET CLR 3.0.30729)')
)
self.assertEqual(
'Internet Explorer on Windows 7',
device('Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; '
'Trident/6.0)')
)
self.assertEqual(
'Internet Explorer on Windows 8',
device('Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; '
'Win64; x64; Trident/6.0)')
)
self.assertEqual(
'Internet Explorer on Windows 8.1',
device('Mozilla/5.0 (IE 11.0; Windows NT 6.3; Trident/7.0; '
'.NET4.0E; .NET4.0C; rv:11.0) like Gecko')
)
def test_apple(self):
self.assertEqual(
'Safari on iPad',
device('Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; ja-jp) '
'AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 '
'Mobile/8C148 Safari/6533.18.5')
)
self.assertEqual(
'Safari on iPhone',
device('Mozilla/5.0 (iPhone; CPU iPhone OS 7_0 like Mac OS X) '
'AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 '
'Mobile/11A465 Safari/9537.53')
)
self.assertEqual(
'Safari on OS X',
device('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) '
'AppleWebKit/536.26.17 (KHTML, like Gecko) Version/6.0.2 '
'Safari/536.26.17')
)
def test_android(self):
# androids identify themselves as Safari to get the good stuff
self.assertEqual(
'Safari on Android',
device('Mozilla/5.0 (Linux; U; Android 1.5; de-de; HTC Magic '
'Build/CRB17) AppleWebKit/528.5+ (KHTML, like Gecko) '
'Version/3.1.2 Mobile Safari/525.20.1')
)
def test_firefox(self):
self.assertEqual(
'Firefox on Windows 7',
device('Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:22.0) '
'Gecko/20130328 Firefox/22.0')
)
def test_chrome(self):
self.assertEqual(
'Chrome on Windows 8.1',
device('Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 ('
'KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36')
)
@skipUnless(django.VERSION >= (1, 5), "Django 1.5 and higher")
class ClearsessionsCommandTest(TestCase):
def test_can_call(self):
Session.objects.create(expire_date=datetime.now() - timedelta(days=1),
ip='127.0.0.1')
call_command('clearsessions')
self.assertEqual(Session.objects.count(), 0)
| jmp0xf/django-user-sessions | tests/tests.py | Python | mit | 14,675 |
import os
from StringIO import StringIO
import pickle
import sqlite3
from python_zipcodes.exceptions import ImproperlyConfiguredError
from python_zipcodes.django_app.zipcodes.models import ZipCode
class DummyStorage(object):
def __init__(self, *args, **kwargs):
try:
self.importer = kwargs['importer']
self.fill_cache()
except KeyError:
raise ImproperlyConfiguredError
def fill_cache(self):
self.cached_data = self.read()
def drop(self):
"""Delete all the data"""
self.cached_data = {}
def update(self):
self.drop()
self.cached_data = self.importer.parse(self.importer.download())
self.save(self.cached_data)
def read(self):
"""Reads data from the storage format"""
return self.compile()
def save(self, zip_codes):
pass
def compile(self):
"""Opens the source file and converst it to a dict"""
try:
content = open(os.path.join(self.importer.cache_dir,self.importer.txt), 'rb')
except IOError:
content = self.importer.download()
content.seek(0)
zip_codes = self.importer.parse(content)
self.save(zip_codes)
content.close()
return zip_codes
class PickleStorage(DummyStorage):
pickled = 'zipcodes.pickled'
def drop(self):
os.remove(os.path.join(self.importer.cache_dir, self.pickled))
super(PickleStorage, self).drop()
def read(self):
try:
f = open(os.path.join(self.importer.cache_dir, self.pickled, 'rb'))
zip_codes = pickle.load(f)
f.close()
return zip_codes
except IOError:
return self.compile()
def save(self, zip_codes):
f = open(os.path.join(self.importer.cache_dir, self.pickled), 'wb')
pickle.dump(zip_codes, f)
f.close()
class SqliteStorage(DummyStorage):
db = 'zipcodes.db'
def fill_cache(self):
self.conn = sqlite3.connect(os.path.join(self.importer.cache_dir, self.db))
self.conn.row_factory = sqlite3.Row
try:
super(SqliteStorage, self).fill_cache()
except sqlite3.OperationalError:
self.cached_data = self.compile()
self.save(self.cached_data)
def drop(self):
c = self.conn.cursor()
c.execute("""drop table if exists zipcodes""")
c.execute("""create table zipcodes
(zipcode text, city text, state text)""")
self.conn.commit()
# or we could just delete the file
#os.remove(os.path.join(self.importer.cache_dir, self.db))
c.close()
super(SqliteStorage, self).drop()
def read(self):
c = self.conn.cursor()
zipcodes = {}
c.execute('select * from zipcodes')
for r in c:
zipcodes[r['zipcode']] = {'city':r['city'], 'state':r['state']}
c.close()
return zipcodes
def save(self, zip_codes):
c = self.conn.cursor()
for k, r in zip_codes.items():
c.execute('insert into zipcodes values (?,?,?)', [k, r['city'], r['state']])
self.conn.commit()
c.close()
class DjangoStorage(DummyStorage):
def __init__(self, *args, **kwargs):
self.model = kwargs.get('model', ZipCode)
super(DjangoStorage, self).__init__(*args, **kwargs)
def drop(self):
self.model.objects.filter(country=self.importer.country).delete()
super(DjangoStorage, self).drop()
def read(self):
zipcodes = self.model.objects.filter(country=self.importer.country)
zip_dict = {}
for z in zipcodes:
zip_dict[z.zipcode] = {'city':z.city, 'state':z.state}
return zip_dict
def save(self, zip_codes):
for k, r in zip_codes.items():
self.model.objects.create(zipcode=k, city=r['city'], state=r['state'], country=self.importer.country)
| fcurella/python_zipcodes | python_zipcodes/storages.py | Python | mit | 3,958 |
import os
import random
MIN = 1
MAX = 6
def main():
again = 'y'
while again == 'y' or again == 'Y':
print('Rolling the dice ...')
print('Their values are:')
print(random.randint(MIN, MAX))
print(random.randint(MIN, MAX))
again = str(raw_input('Roll them again? (y - yes): '))
main()
os._exit(1)
| bradyhouse/house | fiddles/python/template/script.py | Python | mit | 347 |
import gzip
import sys
opener = gzip.open
if __name__ == '__main__':
f = gzip.open(sys.argv[1], "wt")
f.write(' '.join(sys.argv[1]))
f.close()
| devak23/python | reader/compressed/gizpped.py | Python | mit | 150 |
# Copyright (c) 2016, Science and Technology Facilities Council
# This software is distributed under a BSD licence. See LICENSE.txt.
"""
Tests for mrcfile validation functions.
"""
# Import Python 3 features for future-proofing
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import io
import os
import shutil
import sys
import tempfile
import unittest
import warnings
import numpy as np
import mrcfile
from mrcfile.validator import validate_all
from . import helpers
class ValidationTest(helpers.AssertRaisesRegexMixin, unittest.TestCase):
"""Unit tests for MRC validation functions.
"""
def setUp(self):
super(ValidationTest, self).setUp()
# Set up test files and names to be used
self.test_data = helpers.get_test_data_path()
self.test_output = tempfile.mkdtemp()
self.temp_mrc_name = os.path.join(self.test_output, 'test_mrcfile.mrc')
self.example_mrc_name = os.path.join(self.test_data, 'EMD-3197.map')
self.gzip_mrc_name = os.path.join(self.test_data, 'emd_3197.map.gz')
self.bzip2_mrc_name = os.path.join(self.test_data, 'EMD-3197.map.bz2')
self.ext_header_mrc_name = os.path.join(self.test_data, 'EMD-3001.map')
self.fei1_ext_header_mrc_name = os.path.join(self.test_data, 'fei-extended.mrc')
self.fei2_ext_header_mrc_name = os.path.join(self.test_data, 'epu2.9_example.mrc')
# Set up stream to catch print output from validate()
self.print_stream = io.StringIO()
# Replace stdout and stderr to capture output for checking
self.orig_stdout = sys.stdout
self.orig_stderr = sys.stderr
sys.stdout = io.StringIO()
sys.stderr = io.StringIO()
def tearDown(self):
# Restore stdout and stderr
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
self.print_stream.close()
if os.path.exists(self.test_output):
shutil.rmtree(self.test_output)
super(ValidationTest, self).tearDown()
def test_good_file(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(np.arange(36, dtype=np.float32).reshape(3, 3, 4))
mrc.voxel_size = 2.3
result = mrcfile.validate(self.temp_mrc_name, self.print_stream)
assert result == True
print_output = self.print_stream.getvalue()
assert len(print_output) == 0
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_emdb_file(self):
result = mrcfile.validate(self.example_mrc_name, self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert print_output.strip() == ("File does not declare MRC format "
"version 20140: nversion = 0")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_gzip_emdb_file(self):
result = mrcfile.validate(self.gzip_mrc_name, self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert print_output.strip() == ("File does not declare MRC format "
"version 20140: nversion = 0")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_bzip2_emdb_file(self):
result = mrcfile.validate(self.bzip2_mrc_name, self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert print_output.strip() == ("File does not declare MRC format "
"version 20140: nversion = 0")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_emdb_cryst_file(self):
result = mrcfile.validate(self.ext_header_mrc_name, self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert print_output.strip() == ("File does not declare MRC format "
"version 20140: nversion = 0\n"
"Extended header type is undefined or "
"unrecognised: exttyp = ''")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def check_temp_mrc_invalid_with_warning(self, message):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert message.lower() in print_output.lower()
assert len(w) == 1
assert issubclass(w[0].category, RuntimeWarning)
assert message in str(w[0].message)
def test_incorrect_map_id(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.map = b'fake'
self.check_temp_mrc_invalid_with_warning("Map ID string")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_short_map_id(self):
"""This tests the case of files where the map ID is almost correct.
For example, MotionCor2 writes files with ID 'MAP\0', which is not
valid according to the MRC2014 spec on the CCP-EM website, but could
be considered valid according to the MRC2014 paper (which just
specifies 'MAP', i.e. without the final byte). We should read such
files without errors or warnings, but they should fail a strict
validation check."""
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.map = b'MAP\0'
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Map ID string is incorrect" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_incorrect_machine_stamp(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.machst = bytearray(b' ')
self.check_temp_mrc_invalid_with_warning("machine stamp")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_invalid_mode(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(np.arange(12, dtype=np.float32).reshape(1, 3, 4))
mrc.header.mode = 8
self.check_temp_mrc_invalid_with_warning("mode")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_file_too_small(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(np.arange(12, dtype=np.float32).reshape(1, 3, 4))
mrc.header.nz = 2
self.check_temp_mrc_invalid_with_warning("data block")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_file_too_large(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(np.arange(36, dtype=np.float32).reshape(3, 3, 4))
mrc.header.nz = 2
self.check_temp_mrc_invalid_with_warning("larger than expected")
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_negative_mx(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.mx = -10
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Header field 'mx' is negative" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_negative_my(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.my = -10
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Header field 'my' is negative" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_negative_mz(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.mz = -10
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Header field 'mz' is negative" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_negative_ispg(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.ispg = -10
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Header field 'ispg' is negative" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_negative_nlabl(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.nlabl = -3
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Header field 'nlabl' is negative" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_negative_cella_x(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.cella.x = -10
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Cell dimension 'x' is negative" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_invalid_axis_mapping(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.mapc = 3
mrc.header.mapr = 4
mrc.header.maps = -200
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Invalid axis mapping: found [-200, 3, 4]" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_mz_correct_for_volume_stack(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(np.arange(120, dtype=np.float32).reshape(3, 2, 4, 5))
with warnings.catch_warnings(record=True):
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == True
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_mz_incorrect_for_volume_stack(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(np.arange(120, dtype=np.float32).reshape(3, 2, 4, 5))
mrc.header.mz = 5
with warnings.catch_warnings(record=True):
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in dimensions for volume stack: nz should be "
"divisible by mz. Found nz = 6, mz = 5" in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_nlabl_too_large(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.label[1] = 'test label'
mrc.header.nlabl = 3
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in header labels: "
"nlabl is 3 but 2 labels contain text" in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_nlabl_too_small(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.label[1] = 'test label'
mrc.header.nlabl = 1
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in header labels: "
"nlabl is 1 but 2 labels contain text" in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_empty_labels_in_list(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.label[2] = 'test label'
mrc.header.nlabl = 2
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in header labels: empty labels appear between "
"text-containing labels" in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_incorrect_format_version(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.header.nversion = 20139
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "File does not declare MRC format version 20140" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_missing_exttyp(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_extended_header(np.arange(10))
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Extended header type is undefined or unrecognised" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_unknown_exttyp(self):
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_extended_header(np.arange(10))
mrc.header.exttyp = 'Fake'
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert "Extended header type is undefined or unrecognised" in print_output
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_incorrect_rms(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.rms = 9.0
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in data statistics: RMS deviation is {0} but the value "
"in the header is 9.0".format(data.std()) in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_rms_undetermined(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.rms = -15
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == True
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_incorrect_dmin(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.dmin = -11
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in data statistics: minimum is {0} but the value "
"in the header is -11".format(data.min()) in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_incorrect_dmax(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.dmax = 15
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in data statistics: maximum is {0} but the value "
"in the header is 15".format(data.max()) in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_min_and_max_undetermined(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.dmin = 30.1
mrc.header.dmax = 30.0
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == True
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_incorrect_dmean(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.dmean = -2.5
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in data statistics: mean is {0} but the value "
"in the header is -2.5".format(data.mean()) in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_incorrect_dmean_with_undetermined_dmin_and_dmax(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.dmin = 20
mrc.header.dmax = -30.1
mrc.header.dmean = -2.5
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert ("Error in data statistics: mean is {0} but the value "
"in the header is -2.5".format(data.mean()) in print_output)
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_mean_undetermined(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.dmean = -11
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == True
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_min_max_and_mean_undetermined(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(2, 3, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.header.dmin = 30.1
mrc.header.dmax = 30.0
mrc.header.dmean = 29.9
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == True
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_many_problems_simultaneously(self):
data = np.arange(-10, 20, dtype=np.float32).reshape(3, 2, 5)
with mrcfile.new(self.temp_mrc_name) as mrc:
mrc.set_data(data)
mrc.set_extended_header(data)
mrc.header.nz = 2
mrc.header.my = -1000
mrc.header.mz = -5
mrc.header.cella.y = -12.1
mrc.header.mapc = 5
mrc.header.dmin = 10
mrc.header.dmax = 11
mrc.header.dmean = 19.0
mrc.header.ispg = -20
mrc.header.exttyp = 'fake'
mrc.header.nversion = 0
mrc.header.rms = 0.0
mrc.header.nlabl = 4
mrc.header.label[9] = 'test label'
with warnings.catch_warnings(record=True):
result = mrcfile.validate(self.temp_mrc_name,
print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert len(print_output.split('\n')) == 15
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def create_good_files(self):
"""Create some files known to be valid and return their names."""
good_mrc_name_1 = os.path.join(self.test_output, 'good_file_1.mrc')
good_mrc_name_2 = os.path.join(self.test_output, 'good_file_2.mrc')
# Make good files which will pass validation
with mrcfile.new(good_mrc_name_1) as mrc:
mrc.set_data(np.arange(36, dtype=np.float32).reshape(3, 3, 4))
with mrcfile.new(good_mrc_name_2) as mrc:
mrc.set_data(np.arange(36, dtype=np.uint16).reshape(3, 3, 4))
return [
good_mrc_name_1,
good_mrc_name_2,
self.fei1_ext_header_mrc_name,
self.fei2_ext_header_mrc_name
]
def test_validate_good_files(self):
good_files = self.create_good_files()
result = validate_all(good_files, print_file=self.print_stream)
assert result == True
print_output = self.print_stream.getvalue()
assert len(print_output) == 0
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_validate_bad_files(self):
bad_files = [
self.example_mrc_name,
self.ext_header_mrc_name,
self.gzip_mrc_name
]
result = validate_all(bad_files, print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert len(print_output) > 0
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
def test_validate_good_and_bad_files(self):
files = self.create_good_files() + [
self.example_mrc_name,
self.ext_header_mrc_name,
self.gzip_mrc_name
]
result = validate_all(files, print_file=self.print_stream)
assert result == False
print_output = self.print_stream.getvalue()
assert len(print_output) > 0
assert len(sys.stdout.getvalue()) == 0
assert len(sys.stderr.getvalue()) == 0
if __name__ == '__main__':
unittest.main()
| ccpem/mrcfile | tests/test_validation.py | Python | bsd-3-clause | 24,573 |
class Solution:
def buddyStrings(self, A: str, B: str) -> bool:
if len(A) != len(B):
return False
swap = 0
sa, sb = None, None
for a, b in zip(A, B):
if a == b:
continue
if swap > 1 or (sa and (sa != b or sb != a)):
return False
swap += 1
sa, sb = a, b
return bool(swap) or (len(set(A)) != len(A))
| pterodragon/programming | Python/leetcode/Buddy String/solution.py | Python | mit | 434 |
from os import environ
# if you set a property in SESSION_CONFIG_DEFAULTS, it will be inherited by all configs
# in SESSION_CONFIGS, except those that explicitly override it.
# the session config can be accessed from methods in your apps as self.session.config,
# e.g. self.session.config['participation_fee']
SESSION_CONFIG_DEFAULTS = {
'real_world_currency_per_point': 1.00,
'participation_fee': 0.00,
'doc': "",
}
SESSION_CONFIGS = [
{
'name': 'dill_resp_punish_first',
'display_name': "Dilution de responsabilité, Punish First",
'num_demo_participants': 12,
'app_sequence': ['dill_resp'],
'treatment_order': 'punish_first'
},
{
'name': 'dill_resp_punish_last',
'display_name': "Dilution de responsabilité, Punish Last",
'num_demo_participants': 12,
'app_sequence': ['dill_resp'],
'treatment_order': 'punish_last'
},
]
# ISO-639 code
# for example: de, fr, ja, ko, zh-hans
LANGUAGE_CODE = 'en'
# e.g. EUR, GBP, CNY, JPY
REAL_WORLD_CURRENCY_CODE = 'USD'
USE_POINTS = True
ROOMS = []
CHANNEL_ROUTING = 'redirect.routing.channel_routing'
# AUTH_LEVEL:
# this setting controls which parts of your site are freely accessible,
# and which are password protected:
# - If it's not set (the default), then the whole site is freely accessible.
# - If you are launching a study and want visitors to only be able to
# play your app if you provided them with a start link, set it to STUDY.
# - If you would like to put your site online in public demo mode where
# anybody can play a demo version of your game, but not access the rest
# of the admin interface, set it to DEMO.
# for flexibility, you can set it in the environment variable OTREE_AUTH_LEVEL
AUTH_LEVEL = environ.get('OTREE_AUTH_LEVEL')
ADMIN_USERNAME = 'admin'
# for security, best to set admin password in an environment variable
ADMIN_PASSWORD = environ.get('OTREE_ADMIN_PASSWORD')
# Consider '', None, and '0' to be empty/false
DEBUG = (environ.get('OTREE_PRODUCTION') in {None, '', '0'})
DEMO_PAGE_INTRO_HTML = """ """
# don't share this with anybody.
SECRET_KEY = '29*rluv^s95qdbcfe6&mql^2$-_^e7nvtxi_j7r%wl#8g27p(q'
# if an app is included in SESSION_CONFIGS, you don't need to list it here
INSTALLED_APPS = ['otree']
| anthropo-lab/XP | EPHEMER/dill_resp_project/settings.py | Python | gpl-3.0 | 2,310 |
import sys
import pkg_resources
import sphinx_rtd_theme
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon'
]
templates_path = ['_templates']
master_doc = 'index'
project = 'bottom'
copyright = '2016, Joe Cross'
author = 'Joe Cross'
try:
release = pkg_resources.get_distribution('bottom').version
except pkg_resources.DistributionNotFound:
print('To build the documentation, The distribution information of bottom')
print('Has to be available. Either install the package into your')
print('development environment or run "setup.py develop" to setup the')
print('metadata. A virtualenv is recommended!')
sys.exit(1)
del pkg_resources
version = '.'.join(release.split('.')[:2])
language = 'en'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_use_smartypants = False
html_static_path = ["_static"]
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_context = {
"favicon": "favicon-cog.ico",
"show_sphinx": False
}
intersphinx_mapping = {
'python': ('https://docs.python.org/3.6', None),
}
def setup(app):
app.add_stylesheet("bottom.css")
| numberoverzero/bottom | docs/conf.py | Python | mit | 1,217 |
import os
import platform
class TBBTarballVerificationError(Exception):
pass
class TBBSigningKeyImportError(Exception):
pass
class TBBGetRecommendedVersionError(Exception):
pass
class DumpcapTimeoutError(Exception):
pass
env_vars = os.environ
# whether we're running on Travis CI or not
running_in_CI = "CONTINUOUS_INTEGRATION" in env_vars and "TRAVIS" in env_vars
architecture = platform.architecture()
if '64' in architecture[0]:
arch = '64'
machine = 'x86_64'
elif '32' in architecture[0]:
arch = '32'
machine = 'i686'
else:
raise RuntimeError('Architecture is not known: %s' % architecture)
# shortcuts
path = os.path
join = path.join
dirname = os.path.dirname
expanduser = os.path.expanduser
# timeouts and pauses
PAUSE_BETWEEN_SITES = 5 # pause before crawling a new site
WAIT_IN_SITE = 5 # time to wait after the page loads
PAUSE_BETWEEN_INSTANCES = 4 # pause before visiting the same site (instances)
SOFT_VISIT_TIMEOUT = 120 # timeout used by selenium and dumpcap
# signal based hard timeout in case soft timeout fails
HARD_VISIT_TIMEOUT = SOFT_VISIT_TIMEOUT + 10
# max dumpcap size in KB
MAX_DUMP_SIZE = 30000
# max filename length
MAX_FNAME_LENGTH = 200
DISABLE_RANDOMIZEDPIPELINENING = False # use with caution!
STREAM_CLOSE_TIMEOUT = 20 # wait 20 seconds before raising an alarm signal
# otherwise we had many cases where get_streams hanged
XVFB_W = 1280
XVFB_H = 720
# Tor browser version suffixes
# The version used by Wang & Goldberg
TBB_V_2_4_7_A1 = "2.4.7-alpha-1"
TBB_WANG_AND_GOLDBERG = TBB_V_2_4_7_A1
TBB_V_3_5 = "3.5"
TBB_V_4_0_8 = "4.0.8"
TBB_V_6_5_2 = "6.5.2"
TBB_DEFAULT_VERSION = TBB_V_6_5_2
TBB_KNOWN_VERSIONS = [TBB_V_2_4_7_A1, TBB_V_3_5, TBB_V_4_0_8, TBB_V_6_5_2]
# Default paths
BASE_DIR = path.abspath(os.path.dirname(__file__))
DATASET_DIR = join(BASE_DIR, "datasets")
ALEXA_DIR = join(DATASET_DIR, "alexa")
TEST_DIR = join(BASE_DIR, 'test')
TEST_FILES_DIR = join(TEST_DIR, 'files')
DUMMY_TEST_DIR = join(TEST_FILES_DIR, 'dummy')
DUMMY_TEST_DIR_TARGZIPPED = DUMMY_TEST_DIR + ".tar.gz"
TBB_TEST_TARBALL = join(TEST_FILES_DIR,
'tor-browser-linux64-4.0.99_en-US.tar.xz')
TBB_TEST_TARBALL_EXTRACTED = join(TEST_FILES_DIR,
'tor-browser-linux64-4.0.99_en-US')
RESULTS_DIR = join(BASE_DIR, 'results')
ETC_DIR = join(BASE_DIR, 'etc')
PERMISSIONS_DB = join(ETC_DIR, 'permissions.sqlite')
HOME_PATH = expanduser('~')
TBB_BASE_DIR = join(BASE_DIR, 'tbb')
# Top URLs localized (DE) to prevent the effect of localization
LOCALIZED_DATASET = join(ETC_DIR, "localized-urls-100-top.csv")
# Experiment type determines what to do during the visits
EXP_TYPE_WANG_AND_GOLDBERG = "wang_and_goldberg" # setting from WPES'13 paper
EXP_TYPE_MULTITAB_ALEXA = "multitab_alexa" # open Alexa sites in multiple tabs
# Tor ports
SOCKS_PORT = 9050
CONTROLLER_PORT = 9051
MAX_ENTRY_GUARDS = "1"
# defaults for batch and instance numbers
NUM_BATCHES = 2
NUM_INSTANCES = 4
MAX_SITES_PER_TOR_PROCESS = 100 # reset tor process after crawling 100 sites
# torrc dictionaries
TORRC_DEFAULT = {'SocksPort': str(SOCKS_PORT),
'ControlPort': str(CONTROLLER_PORT)}
TORRC_WANG_AND_GOLDBERG = {'SocksPort': str(SOCKS_PORT),
'ControlPort': str(CONTROLLER_PORT),
'MaxCircuitDirtiness': '600000',
'UseEntryGuards': '0'
}
# Directory structure and paths depend on TBB versions
# Path to Firefox binary in TBB dir
TBB_V2_FF_BIN_PATH = join('App', 'Firefox', 'firefox')
TBB_V3_FF_BIN_PATH = join('Browser', 'firefox')
TBB_V4_FF_BIN_PATH = join('Browser', 'firefox')
TBB_V6_FF_BIN_PATH = TBB_V4_FF_BIN_PATH
TBB_FF_BIN_PATH_DICT = {"2": TBB_V2_FF_BIN_PATH,
"3": TBB_V3_FF_BIN_PATH,
"4": TBB_V4_FF_BIN_PATH,
"6": TBB_V6_FF_BIN_PATH,
}
# Path to Firefox profile in TBB dir
TBB_V2_PROFILE_PATH = join('Data', 'profile')
TBB_V3_PROFILE_PATH = join('Data', 'Browser', 'profile.default')
TBB_V4_PROFILE_PATH = join('Browser', 'TorBrowser', 'Data',
'Browser', 'profile.default')
TBB_V6_PROFILE_PATH = TBB_V4_PROFILE_PATH
TBB_PROFILE_DIR_DICT = {"2": TBB_V2_PROFILE_PATH,
"3": TBB_V3_PROFILE_PATH,
"4": TBB_V4_PROFILE_PATH,
"6": TBB_V6_PROFILE_PATH,
}
# Path to Tor binary in TBB dir
TOR_V2_BINARY_PATH = join('App', 'tor')
TOR_V3_BINARY_PATH = join('Tor', 'tor')
TOR_V4_BINARY_PATH = join('Browser', 'TorBrowser', 'Tor', 'tor')
TOR_V6_BINARY_PATH = TOR_V4_BINARY_PATH
TOR_BINARY_PATH_DICT = {"2": TOR_V2_BINARY_PATH,
"3": TOR_V3_BINARY_PATH,
"4": TOR_V4_BINARY_PATH,
"6": TOR_V6_BINARY_PATH,
}
# Path to Tor binary in TBB dir
TOR_V2_DATA_DIR = join('Data', 'Tor')
TOR_V3_DATA_DIR = join('Data', 'Tor')
TOR_V4_DATA_DIR = join('Browser', 'TorBrowser', 'Data', 'Tor')
TOR_V6_DATA_DIR = join('Browser', 'TorBrowser', 'Data', 'Tor')
TOR_DATA_DIR_DICT = {"2": TOR_V2_DATA_DIR,
"3": TOR_V3_DATA_DIR,
"4": TOR_V4_DATA_DIR,
"6": TOR_V6_DATA_DIR,
}
def get_tbb_major_version(version):
"""Return major version of TBB."""
return version.split(".")[0]
def get_tbb_dirname(version, os_name="linux", lang="en-US"):
"""Return path for Tor Browser Bundle for given version and bits."""
return "tor-browser-%s%s-%s_%s" % (os_name, arch, version, lang)
def get_tbb_path(version, os_name="linux", lang="en-US"):
"""Return path for Tor Browser Bundle for given version and bits."""
dirname = get_tbb_dirname(version, os_name, lang)
return join(TBB_BASE_DIR, dirname)
def get_tb_bin_path(version, os_name="linux", lang="en-US"):
"""Return a binary path for Tor Browser."""
major = get_tbb_major_version(version)
# bin_path = TBB_V3_FF_BIN_PATH if major is "3" else TBB_V2_FF_BIN_PATH
bin_path = TBB_FF_BIN_PATH_DICT[major]
dir_path = get_tbb_path(version, os_name, lang)
return join(dir_path, bin_path)
def get_tor_bin_path(version, os_name="linux", lang="en-US"):
"""Return a binary path for Tor."""
major = get_tbb_major_version(version)
bin_path = TOR_BINARY_PATH_DICT[major]
dir_path = get_tbb_path(version, os_name, lang)
return join(dir_path, bin_path)
def get_tbb_profile_path(version, os_name="linux", lang="en-US"):
"""Return profile path for Tor Browser Bundle."""
major = get_tbb_major_version(version)
profile = TBB_PROFILE_DIR_DICT[major]
dir_path = get_tbb_path(version, os_name, lang)
return join(dir_path, profile)
def get_tor_data_path(version, os_name="linux", lang="en-US"):
"""Return the path for Data dir of Tor."""
major = get_tbb_major_version(version)
data_path = TOR_DATA_DIR_DICT[major]
tbb_path = get_tbb_path(version, os_name, lang)
return join(tbb_path, data_path)
| pankajb64/webfp-crawler-phantomjs | tor-browser-crawler-webfp-paper/common.py | Python | gpl-2.0 | 7,072 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_config_manager
short_description: Manage advance configurations about an ESXi host
description:
- This module can be used to manage advance configuration information about an ESXi host when ESXi hostname or Cluster name is given.
version_added: '2.5'
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
cluster_name:
description:
- Name of the cluster.
- Settings are applied to every ESXi host system in given cluster.
- If C(esxi_hostname) is not given, this parameter is required.
esxi_hostname:
description:
- ESXi hostname.
- Settings are applied to this ESXi host system.
- If C(cluster_name) is not given, this parameter is required.
options:
description:
- A dictionary of advance configuration parameters.
- Invalid options will cause module to error.
default: {}
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Manage Log level setting for all ESXi Host in given Cluster
vmware_host_config_manager:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: cluster_name
options:
'Config.HostAgent.log.level': 'info'
delegate_to: localhost
- name: Manage Log level setting for an ESXi Host
vmware_host_config_manager:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
options:
'Config.HostAgent.log.level': 'verbose'
delegate_to: localhost
- name: Manage multiple settings for an ESXi Host
vmware_host_config_manager:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
options:
'Config.HostAgent.log.level': 'verbose'
'Annotations.WelcomeMessage': 'Hello World'
'Config.HostAgent.plugins.solo.enableMob': false
delegate_to: localhost
'''
RETURN = r'''#
'''
try:
from pyVmomi import vim, vmodl, VmomiSupport
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi
from ansible.module_utils._text import to_native
from ansible.module_utils.six import integer_types, string_types
class VmwareConfigManager(PyVmomi):
def __init__(self, module):
super(VmwareConfigManager, self).__init__(module)
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.options = self.params.get('options', dict())
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
@staticmethod
def is_integer(value, type_of='int'):
try:
VmomiSupport.vmodlTypes[type_of](value)
return True
except (TypeError, ValueError):
return False
@staticmethod
def is_boolean(value):
if str(value).lower() in ['true', 'on', 'yes', 'false', 'off', 'no']:
return True
return False
@staticmethod
def is_truthy(value):
if str(value).lower() in ['true', 'on', 'yes']:
return True
return False
def set_host_configuration_facts(self):
changed = False
for host in self.hosts:
option_manager = host.configManager.advancedOption
host_facts = {}
for option in option_manager.QueryOptions():
host_facts[option.key] = dict(value=option.value)
for s_option in option_manager.supportedOption:
host_facts[s_option.key].update(
option_type=s_option.optionType,
)
change_option_list = []
for option_key, option_value in self.options.items():
if option_key in host_facts:
# Make sure option_type is defined some values do not have
# it defined and appear to be read only.
if 'option_type' in host_facts[option_key]:
# We handle all supported types here so we can give meaningful errors.
option_type = host_facts[option_key]['option_type']
if self.is_boolean(option_value) and isinstance(option_type, vim.option.BoolOption):
option_value = self.is_truthy(option_value)
elif (isinstance(option_value, integer_types) or self.is_integer(option_value))\
and isinstance(option_type, vim.option.IntOption):
option_value = VmomiSupport.vmodlTypes['int'](option_value)
elif (isinstance(option_value, integer_types) or self.is_integer(option_value, 'long'))\
and isinstance(option_type, vim.option.LongOption):
option_value = VmomiSupport.vmodlTypes['long'](option_value)
elif isinstance(option_value, float) and isinstance(option_type, vim.option.FloatOption):
pass
elif isinstance(option_value, string_types) and isinstance(option_type, (vim.option.StringOption, vim.option.ChoiceOption)):
pass
else:
self.module.fail_json(msg="Provided value is of type %s."
" Option %s expects: %s" % (type(option_value), option_key, type(option_type)))
else:
self.module.fail_json(msg="Cannot change read only option %s to %s." % (option_key, option_value))
if option_value != host_facts[option_key]['value']:
change_option_list.append(vim.option.OptionValue(key=option_key, value=option_value))
changed = True
else: # Don't silently drop unknown options. This prevents typos from falling through the cracks.
self.module.fail_json(msg="Unknown option %s" % option_key)
if changed:
try:
option_manager.UpdateOptions(changedValue=change_option_list)
except (vmodl.fault.SystemError, vmodl.fault.InvalidArgument) as e:
self.module.fail_json(msg="Failed to update option/s as one or more OptionValue "
"contains an invalid value: %s" % to_native(e.msg))
except vim.fault.InvalidName as e:
self.module.fail_json(msg="Failed to update option/s as one or more OptionValue "
"objects refers to a non-existent option : %s" % to_native(e.msg))
self.module.exit_json(changed=changed)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
options=dict(type='dict', default=dict(), required=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
]
)
vmware_host_config = VmwareConfigManager(module)
vmware_host_config.set_host_configuration_facts()
if __name__ == "__main__":
main()
| caphrim007/ansible | lib/ansible/modules/cloud/vmware/vmware_host_config_manager.py | Python | gpl-3.0 | 7,992 |
#! /usr/bin/python3
# This class defines a "BeerEntry" object.
class BeerEntry:
def __init__(self):
self.__name = ""
self.__brewery = ""
self.__rating = 0
self.__gps = [0.0, 0.0]
def __str__(self):
return("Beer: " + self.getName() +\
"\nBrewery: " + self.getBrewery() +\
"\nRating: " + str(self.getRating()) +\
"\nGPS: " + str(self.getGps()))
################################################################################
# Getters and setters.
def getName(self):
return(self.__name)
def setName(self, str):
self.__name = str
def getBrewery(self):
return(self.__brewery)
def setBrewery(self, str):
self.__brewery = str
def getRating(self):
return(self.__rating)
def setRating(self, rate):
self.__rating = int(rate)
def getGps(self):
return(self.__gps)
def setGps(self, lat, lon):
self.__gps[0] = float(lat)
self.__gps[1] = float(lon)
| tjhughes1990/tjhughes1990.github.io | beergps/BeerEntry.py | Python | gpl-3.0 | 1,060 |
#
# Copyright (C) 2018 University of Oxford
#
# This file is part of msprime.
#
# msprime is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# msprime is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with msprime. If not, see <http://www.gnu.org/licenses/>.
#
"""
Tests for the recombination map functionality, mapping continuous physical
coordinates to discrete genetic loci and vice versa.
"""
from __future__ import print_function
from __future__ import division
import unittest
import random
import tempfile
import os
import gzip
import numpy as np
import msprime
class PythonRecombinationMap(object):
"""
A Python implementation of the RecombinationMap interface.
"""
def __init__(self, positions, rates, num_loci):
assert len(positions) == len(rates)
assert len(positions) >= 2
assert sorted(positions) == positions
assert positions[0] == 0
self._positions = positions
self._sequence_length = positions[-1]
self._rates = rates
self._num_loci = num_loci
def get_total_recombination_rate(self):
"""
Returns the effective recombination rate for this genetic map.
This is the weighted mean of the rates across all intervals.
"""
x = self._positions
effective_rate = 0
for j in range(len(x) - 1):
length = (x[j + 1] - x[j])
effective_rate += self._rates[j] * length
return effective_rate
def _physical_to_genetic_zero_rate(self, x):
"""
If we have a zero recombination rate throughout, then we only have
two possible values. Any value < L maps to 0, as this is the start
of the interval. If x = L, then we map to num_loci.
"""
ret = 0
if x >= self._sequence_length:
ret = self._num_loci
return ret
def _genetic_to_physical_zero_rate(self, v):
"""
If we have a zero recombination rate throughout then everything except
L maps to 0.
"""
return self._sequence_length if v == self._num_loci else 0
def physical_to_genetic(self, x):
if self.get_total_recombination_rate() == 0:
return self._physical_to_genetic_zero_rate(x)
s = 0
last_phys_x = 0
j = 1
while j < len(self._positions) - 1 and x > self._positions[j]:
phys_x = self._positions[j]
rate = self._rates[j - 1]
s += (phys_x - last_phys_x) * rate
j += 1
last_phys_x = phys_x
rate = self._rates[j - 1]
s += (x - last_phys_x) * rate
ret = s / self.get_total_recombination_rate()
return ret * self._num_loci
def physical_to_discrete_genetic(self, x):
return int(round(self.physical_to_genetic(x)))
def genetic_to_physical(self, v):
if self.get_total_recombination_rate() == 0:
return self._genetic_to_physical_zero_rate(v)
# v is expressed in [0, m]. Rescale it back into the range
# (0, total_mass).
u = (v / self._num_loci) * self.get_total_recombination_rate()
s = 0
last_phys_x = 0
rate = self._rates[0]
j = 1
while j < len(self._positions) and s < u:
phys_x = self._positions[j]
rate = self._rates[j - 1]
s += (phys_x - last_phys_x) * rate
j += 1
last_phys_x = phys_x
y = last_phys_x
if rate != 0:
y = last_phys_x - (s - u) / rate
return y
class TestCoordinateConversion(unittest.TestCase):
"""
Tests that we convert coordinates correctly.
"""
def verify_coordinate_conversion(self, positions, rates, num_loci=10):
"""
Verifies coordinate conversions by the specified RecombinationMap
instance.
"""
L = positions[-1]
rm = msprime.RecombinationMap(positions, rates, num_loci)
other_rm = PythonRecombinationMap(positions, rates, num_loci)
if rm.get_size() == 2:
# When we have very large numbers of loci, this calculations for
# max distance is off by very small amounts, probably because of
# machine precision. But if the expected diff is less than say, 10^-10
# anyway, there's no point in worrying about it.
max_discretisation_distance = max(1e-10, L / (2 * num_loci))
else:
# The above calculation works for a uniform map, but I couldn't
# figure out how to generalise it. Cop out:
max_discretisation_distance = L
self.assertEqual(
rm.get_total_recombination_rate(),
other_rm.get_total_recombination_rate())
num_random_trials = 10
num_systematic_trials = 10
values = [L * random.random() for j in range(num_random_trials)]
for j in range(num_systematic_trials):
values.append(L * j * 1 / num_systematic_trials)
values += positions
for x in values:
# x is a physical coordinate
y = rm.physical_to_genetic(x)
self.assertEqual(y, other_rm.physical_to_genetic(x))
self.assertTrue(0 <= y <= num_loci)
# Check if we can round trip approximately in real coordinates.
xp = rm.genetic_to_physical(y)
self.assertAlmostEqual(x, xp)
# The different implementations might differ by very small amounts.
self.assertAlmostEqual(xp, other_rm.genetic_to_physical(y))
# Verify the discrete coordinate conversion.
k = other_rm.physical_to_discrete_genetic(x)
if y != 0.5:
# Yuck. Glibc and Python seem to disagree on which way to round
# when the argument is 1/2. Easiest just skip.
self.assertEqual(rm.physical_to_discrete_genetic(x), k)
self.assertTrue(0 <= k <= num_loci)
x_hat = other_rm.genetic_to_physical(k)
delta = abs(x - x_hat)
self.assertGreaterEqual(max_discretisation_distance, delta)
def test_zero_rate_two_intervals(self):
# When we have a zero rate in some interval we no longer have a
# bijective function, since all the physical coordinates in this
# interval map to a single genetic coordinate.
positions = [0, 0.25, 0.5, 0.75, 1]
rates = [1, 0, 1, 0, 0]
num_loci = 100
maps = [
msprime.RecombinationMap(positions, rates, num_loci),
PythonRecombinationMap(positions, rates, num_loci)]
for rm in maps:
self.assertEqual(0.5, rm.get_total_recombination_rate())
# Between 0 and 0.25 and 0.5 and 0.75 we should be able to map 1-1
# in physical coordinates.
for x in [0, 0.125, 0.25, 0.50001, 0.66, 0.75]:
y = rm.physical_to_genetic(x)
self.assertTrue(0 <= y <= num_loci)
z = rm.genetic_to_physical(y)
self.assertAlmostEqual(x, z)
self.assertEqual(0, rm.physical_to_discrete_genetic(0))
self.assertEqual(25, rm.physical_to_discrete_genetic(0.125))
self.assertEqual(50, rm.physical_to_discrete_genetic(0.25))
# Everything withinin the 0.25 to 0.5 interval should map to 50
self.assertEqual(50, rm.physical_to_discrete_genetic(0.2500001))
self.assertEqual(50, rm.physical_to_discrete_genetic(0.4))
self.assertEqual(50, rm.physical_to_discrete_genetic(0.4999999))
# The discretisation means that we can push values on one side of the
# interval back to the other side.
self.assertEqual(50, rm.physical_to_discrete_genetic(0.5000))
self.assertEqual(51, rm.physical_to_discrete_genetic(0.505))
# Anything above or equalto 0.75 should map to 100
self.assertEqual(100, rm.physical_to_discrete_genetic(0.75))
self.assertEqual(100, rm.physical_to_discrete_genetic(0.751))
self.assertEqual(100, rm.physical_to_discrete_genetic(0.999))
self.assertEqual(100, rm.physical_to_discrete_genetic(1.0))
# All physical coordinates within the 0 region should map down to
# the first point.
for start, end in [(0.25, 0.5), (0.75, 1)]:
for x in [start + delta for delta in [0, 0.01, 0.1]] + [end]:
y = rm.physical_to_genetic(x)
self.assertTrue(0 <= y <= num_loci)
z = rm.genetic_to_physical(y)
self.assertEqual(z, start)
# We should map exactly in discrete space.
k = rm.physical_to_discrete_genetic(x)
self.assertEqual(start, rm.genetic_to_physical(k))
def test_zero_rate_start(self):
positions = [0, 50, 100]
rates = [0, 1, 0]
num_loci = 50
maps = [
msprime.RecombinationMap(positions, rates, num_loci),
PythonRecombinationMap(positions, rates, num_loci)]
for rm in maps:
# Anything <= 50 maps to 0
for x in [0, 10, 49, 50]:
self.assertEqual(0, rm.physical_to_genetic(x))
self.assertEqual(0, rm.genetic_to_physical(0))
# values > 50 should map to x - 50
for x in [51, 55, 99, 100]:
genetic_x = x - 50
self.assertEqual(genetic_x, rm.physical_to_genetic(x))
self.assertEqual(rm.genetic_to_physical(genetic_x), x)
def test_zero_rate_end(self):
positions = [0, 50, 100]
rates = [1, 0, 0]
num_loci = 50
maps = [
msprime.RecombinationMap(positions, rates, num_loci),
PythonRecombinationMap(positions, rates, num_loci)]
for rm in maps:
# Anything <= 50 maps to x
for x in [0, 10, 49, 50]:
self.assertEqual(x, rm.physical_to_genetic(x))
self.assertEqual(x, rm.genetic_to_physical(x))
# values > 50 should map to 50
for x in [51, 55, 99, 100]:
self.assertEqual(50, rm.physical_to_genetic(x))
self.assertEqual(50, rm.genetic_to_physical(50))
def test_one_rate(self):
for num_loci in [1, 10, 1024, 2**31 - 1]:
for rate in [0.1, 1.0, 10]:
for L in [0.1, 1, 10, 1024, 1e6]:
positions = [0, L]
rates = [rate, 0]
rm = msprime.RecombinationMap(positions, rates, num_loci)
self.assertEqual(rate * L, rm.get_total_recombination_rate())
self.verify_coordinate_conversion(positions, rates, num_loci)
def test_simple_map(self):
for num_loci in [1, 10, 100, 1025, 2**32 - 1]:
positions = [0, 0.25, 0.5, 0.75, 1]
rates = [0.125, 0.25, 0.5, 0.75, 0]
self.verify_coordinate_conversion(positions, rates, num_loci)
def test_random_map(self):
for size in [2, 3, 4, 100]:
positions = [0] + sorted(
random.random() for _ in range(size - 2)) + [1]
rates = [random.random() for _ in range(size - 1)] + [0]
self.verify_coordinate_conversion(positions, rates)
def test_simple_examples(self):
rm = msprime.RecombinationMap([0, 0.9, 1], [2, 1, 0], 10)
self.assertAlmostEqual(rm.get_total_recombination_rate(), 1.9)
rm = msprime.RecombinationMap([0, 0.5, 0.6, 1], [2, 1, 2, 0], 100)
self.assertAlmostEqual(rm.get_total_recombination_rate(), 1.9)
def test_integer_round_trip(self):
# We should be able to round trip integer coordinates exactly using
# a flat recombination map with the right number of loci.
for L in [1, 10, 100]:
for rate in [0.1, 1, 100]:
maps = [
msprime.RecombinationMap.uniform_map(L, rate, num_loci=L),
PythonRecombinationMap([0, L], [rate, 0], L)]
for rm in maps:
for x in range(L + 1):
self.assertEqual(x, rm.physical_to_discrete_genetic(x))
self.assertAlmostEqual(x, rm.genetic_to_physical(x))
def test_single_locus(self):
eps = 1e-14
for L in [0.1, 0.99, 1.0, 2, 3.3333, 1e6]:
for rate in [0.1, 1, 100]:
maps = [
msprime.RecombinationMap.uniform_map(L, rate, num_loci=1),
PythonRecombinationMap([0, L], [rate, 0], 1)]
for rm in maps:
self.assertEqual(0, rm.physical_to_discrete_genetic(0))
self.assertEqual(0, rm.physical_to_discrete_genetic(eps))
self.assertEqual(0, rm.physical_to_discrete_genetic(L / 4))
self.assertEqual(1, rm.physical_to_discrete_genetic(L))
self.assertEqual(1, rm.physical_to_discrete_genetic(L - eps))
self.assertEqual(1, rm.physical_to_discrete_genetic(L - L / 4))
def test_zero_recombination_rate(self):
eps = 1e-10
for L in [0.1, 0.99, 1.0, 2, 3.3333, 1e6]:
maps = [
msprime.RecombinationMap.uniform_map(L, 0, num_loci=1),
PythonRecombinationMap([0, L], [0, 0], 1)]
for rm in maps:
self.assertEqual(0, rm.physical_to_discrete_genetic(0))
self.assertEqual(0, rm.physical_to_discrete_genetic(eps))
self.assertEqual(0, rm.physical_to_discrete_genetic(L / 4))
self.assertEqual(1, rm.physical_to_discrete_genetic(L))
# Even things that are closer to L are mapped down to zero
# because interval is empty. L only maps to 1 because the interval
# is half-open and so L is outside of it.
self.assertEqual(0, rm.physical_to_discrete_genetic(L - L / 4))
self.assertEqual(0, rm.physical_to_discrete_genetic(L - eps))
def test_zero_rate_many_loci(self):
for L in [0.125, 1, 100]:
positions = [0, L]
rates = [0, 0]
for m in [1, 10, 16]:
maps = [
msprime.RecombinationMap(positions, rates, m),
PythonRecombinationMap(positions, rates, m)]
for rm in maps:
self.assertEqual(0.0, rm.get_total_recombination_rate())
# Any physical value < L should map to 0
for x in np.array([0, 0.24, 0.33, 0.99]) * L:
self.assertEqual(rm.physical_to_genetic(x), 0)
self.assertEqual(rm.physical_to_discrete_genetic(x), 0)
self.assertEqual(rm.physical_to_discrete_genetic(L), m)
self.assertEqual(rm.physical_to_genetic(L), m)
# Any genetic value from 0 to L - 1 should map to 0 in physical
# coordinates
for y in range(0, m):
self.assertEqual(rm.genetic_to_physical(y), 0)
self.assertEqual(rm.genetic_to_physical(m), L)
class TestReadHapmap(unittest.TestCase):
"""
Tests file reading code.
"""
def setUp(self):
fd, self.temp_file = tempfile.mkstemp(suffix="msp_recomb_map")
os.close(fd)
def tearDown(self):
try:
os.unlink(self.temp_file)
except Exception:
pass
def test_read_hapmap_simple(self):
with open(self.temp_file, "w+") as f:
print("HEADER", file=f)
print("chr1 0 1", file=f)
print("chr1 1 5 x", file=f)
print("s 2 0 x x x", file=f)
rm = msprime.RecombinationMap.read_hapmap(self.temp_file)
self.assertEqual(rm.get_positions(), [0, 1, 2])
self.assertEqual(rm.get_rates(), [1e-8, 5e-8, 0])
def test_read_hapmap_nonzero_start(self):
with open(self.temp_file, "w+") as f:
print("HEADER", file=f)
print("chr1 1 5 x", file=f)
print("s 2 0 x x x", file=f)
rm = msprime.RecombinationMap.read_hapmap(self.temp_file)
self.assertEqual(rm.get_positions(), [0, 1, 2])
self.assertEqual(rm.get_rates(), [0, 5e-8, 0])
def test_read_hapmap_nonzero_end(self):
with open(self.temp_file, "w+") as f:
print("HEADER", file=f)
print("chr1 0 5 x", file=f)
print("s 2 1 x x x", file=f)
self.assertRaises(
ValueError, msprime.RecombinationMap.read_hapmap, self.temp_file)
def test_read_hapmap_gzipped(self):
try:
filename = self.temp_file + ".gz"
with gzip.open(filename, "w+") as f:
f.write(b"HEADER\n")
f.write(b"chr1 0 1\n")
f.write(b"chr1 1 5.5\n")
f.write(b"s 2 0\n")
rm = msprime.RecombinationMap.read_hapmap(filename)
self.assertEqual(rm.get_positions(), [0, 1, 2])
self.assertEqual(rm.get_rates(), [1e-8, 5.5e-8, 0])
finally:
os.unlink(filename)
| shajoezhu/msprime | tests/test_recombination_map.py | Python | gpl-3.0 | 17,689 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.