max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
app/sub_app2/views.py | darklab8/darklab_fastapi | 0 | 12794351 | from fastapi import APIRouter
router = APIRouter()
@router.get("/items2/{item_id}")
async def read_item2(item_id: int):
return {"item_id": item_id}
| 2.578125 | 3 |
sim/script/plot_score.py | bx3/perigee-bandit | 0 | 12794352 | <filename>sim/script/plot_score.py
#!/usr/bin/env python
import sys
import matplotlib.pyplot as plt
import pandas as pd
from collections import defaultdict
if len(sys.argv) < 3:
print('require input_file, outfile')
sys.exit(1)
filename = sys.argv[1]
outname = sys.argv[2]
s_line = []
miners = []
with open(filename) as f:
for line in f:
if 'miners' in line:
s = line.index(':')+1
miners = line[s:].split()
else:
scores_str = line.split()
data = []
for score_str in scores_str:
m, conn, score = score_str.split(',')
data.append((int(m), int(conn), float(score)))
s_line.append(data)
print(miners)
print(s_line)
miners_scores = defaultdict(list)
indices = []
for line in s_line:
conns = []
for slot in line:
m, conn, score = slot
miners_scores[m].append(score)
conns.append(conn)
conns_str = [str(c) for c in conns]
indices.append(' '.join(conns_str))
num_epoch = len(s_line)
epochs = [str(i) for i in range(num_epoch)]
df = pd.DataFrame(data=miners_scores)
df.index = indices #['C1', 'C2', 'C3']
plt.style.use('ggplot')
ax = df.plot(stacked=True, kind='bar', figsize=(12, 8), rot='horizontal')
# miner_labels = [str(i) for i in miners]
# for i, scores in miners_scores.items():
# ax.bar(epochs, scores, label=str(i) )
plt.xticks(rotation=270)
ax.set_ylabel('')
# plt.xticks(epochs, indices, rotation=270)
ax.legend()
plt.savefig(outname)
| 2.65625 | 3 |
tests/tests_custom_indicator.py | JustalK/Indicators | 0 | 12794353 | from src.init import Init
import pytest
def test_file1_method1():
x=5
y=6
assert x+1 == y,"test success"
def test_file1_method2():
x=5
y=6
assert x+1 == y,"test success"
| 2.390625 | 2 |
piCode/rotary_class.py | aaravzen/Synthy | 1 | 12794354 | <reponame>aaravzen/Synthy
#!/usr/bin/env python3
#
# Raspberry Pi Rotary Encoder Class
# $Id: rotary_class.py,v 1.4 2021/04/23 08:15:57 bob Exp $
#
# Author : <NAME>
# Site : http://www.bobrathbone.com
#
# This class uses standard rotary encoder with push switch
#
#
import RPi.GPIO as GPIO
class RotaryEncoder:
CLOCKWISE=1
ANTICLOCKWISE=2
BUTTONDOWN=3
BUTTONUP=4
rotary_a = 0
rotary_b = 0
rotary_c = 0
last_state = 0
direction = 0
# Initialise rotary encoder object
def __init__(self,pinA,pinB,button,callback,name):
self.pinA = pinA
self.pinB = pinB
self.button = button
self.callback = callback
self.name = name
GPIO.setmode(GPIO.BCM)
# The following lines enable the internal pull-up resistors
# on version 2 (latest) boards
GPIO.setwarnings(False)
GPIO.setup(self.pinA, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(self.pinB, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# For version 1 (old) boards comment out the above four lines
# and un-comment the following 3 lines
#GPIO.setup(self.pinA, GPIO.IN)
#GPIO.setup(self.pinB, GPIO.IN)
#GPIO.setup(self.button, GPIO.IN)
# Add event detection to the GPIO inputs
GPIO.add_event_detect(self.pinA, GPIO.BOTH, callback=self.switch_event)
GPIO.add_event_detect(self.pinB, GPIO.BOTH, callback=self.switch_event)
GPIO.add_event_detect(self.button, GPIO.BOTH, callback=self.button_event, bouncetime=200)
return
# Call back routine called by switch events
def switch_event(self,switch):
if GPIO.input(self.pinA):
self.rotary_a = 1
else:
self.rotary_a = 0
if GPIO.input(self.pinB):
self.rotary_b = 1
else:
self.rotary_b = 0
# print(str(self.rotary_a) + str(self.rotary_b))
self.rotary_c = self.rotary_a ^ self.rotary_b
new_state = self.rotary_a * 4 + self.rotary_b * 2 + self.rotary_c * 1
delta = (new_state - self.last_state) % 4
self.last_state = new_state
event = 0
if delta == 1:
if self.direction == self.CLOCKWISE:
# print "Clockwise"
event = self.direction
else:
self.direction = self.CLOCKWISE
elif delta == 3:
if self.direction == self.ANTICLOCKWISE:
# print "Anticlockwise"
event = self.direction
else:
self.direction = self.ANTICLOCKWISE
if event > 0:
self.callback(event, self.name)
return
# Push button up event
def button_event(self,button):
if GPIO.input(button):
event = self.BUTTONUP
else:
event = self.BUTTONDOWN
self.callback(event, self.name)
return
# Get a switch state
def getSwitchState(self, switch):
return GPIO.input(switch)
# End of RotaryEncoder class
| 2.921875 | 3 |
Buzznauts/__init__.py | eduardojdiniz/Buzznauts | 2 | 12794355 | #!/usr/bin/env python
# coding=utf-8
from __future__ import absolute_import, division, print_function
from .version import __version__ # noqa
from .Buzznauts import * # noqa
| 1.117188 | 1 |
chapter_8/produce_permutations.py | prabal1997/uva_judge | 1 | 12794356 | <reponame>prabal1997/uva_judge
import sys
import math
#NOTE: this program individually generates permutations iteratively
#string to be permuted
input_string = "pineapple";
not_unique = (len(input_string) > len(set(input_string)));
#number of nested loops we need, their starting and ending ranges, and their state
loop_count = len(input_string);
loop_ranges = [ (0, len(input_string)-1-idx) for idx in range(len(input_string)) ];
loop_states = [ 0 for idx in range(len(input_string)) ];
#increment the loop
def increment_loop():
#constants for notational convenience
LAST_INDEX = -1;
START_VALUE, END_VALUE = 0, 1;
#incrementing the loop, starting from the deepest loop
update_loop = True;
for loop_depth in range(len(input_string)-1, -1, -1):
#increment counter, check if other loops need to be updated
if (update_loop):
loop_states[loop_depth] = (loop_states[loop_depth]+1)%(loop_ranges[loop_depth][END_VALUE]+1);
update_loop = (loop_states[loop_depth] == 0);
#return 'False' if the entire loop has ended
return (not update_loop);
#reads the loop state, prints a permutations accordingly
def give_string_perm():
#convert string to list for easier processing
str_list = list(input_string);
#create a word using the list of all chars from the original word
used_letters = 0;
word_permute = "";
for loop_depth in range(len(input_string)):
#swap letters
a, b = str_list[loop_states[loop_depth]], str_list[len(input_string)-1-used_letters];
str_list[loop_states[loop_depth]], str_list[len(input_string)-1-used_letters] = b, a;
#increment counter, update word
used_letters += 1;
word_permute = word_permute + a;
#return the word
return word_permute;
#print each permutation
full_list = [];
for idx in range(math.factorial(len(input_string))):
full_list.append(give_string_perm());
increment_loop();
#we find the unique permutations, print them
if (not_unique):
full_list.sort();
full_list = [permutation for index, permutation in enumerate(full_list) if permutation!=full_list[index-1]];
for element in full_list:
print(element); | 3.546875 | 4 |
Chapter 02/438_chapter_2_basic_syntax.py | bpbpublications/A-Journey-to-Core-Python | 0 | 12794357 | <reponame>bpbpublications/A-Journey-to-Core-Python
# -*- coding: utf-8 -*-
"""Chapter 2 - Basic Syntax
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1ZCgjPQh22xnQbcARKoosyaj5Ul10efpa
"""
#Interactive Mode Programming
#On Linux
# 1. $ python
# 2. Python 3.3.2 (default, Dec 10 2013, 11:35:01)
# 3. [GCC 4.6.3] on Linux
# 4. Type "help", "copyright", "credits", or "license" for more information.
# On Windows
# 1. Python 3.4.3 (v3.4.3:9b73f1c3e601, Feb 24 2015, 22:43:06) [MSC v.1600 32 bit (Intel)] on win32
# 2. Type "copyright", "credits" or "license()" for more information.
# Program Output and Print Statement
# 1. print ("Hello, Python!")
# 1. print("%s is number %d" %("python",1))
# Script Mode Programming
# 1. #!/usr/bin/python3
# 2. print ("Hello, Python!")
# Multi-line Statements
# 1. total = item_one + \
# 2. item_two + \
# 3. item_three
# Quotations used in Python
# 1. Name = 'XYZ'
# 2. Fathername = "<NAME>"
# 3. About = """I love Python.
# 4. I can study it the whole day."""
# 5. print(Name)
# 6. print(Fathername)
# Comments in Python
# 1. #a is initialized to hello
# 2. a="hello"
# 3. #we will print hello now
# 4. print(a)
| 3.109375 | 3 |
ML/code/linear_model.py | DistributedML/Biscotti | 61 | 12794358 | from __future__ import division
import numpy as np
import utils
import pdb
lammy = 0.1
verbose = 1
maxEvals = 10000
X = 0
y = 0
iteration = 1
alpha = 1e-2
d = 0
hist_grad = 0
def init(dataset):
global X
X = utils.load_dataset(dataset)['X']
global y
y = utils.load_dataset(dataset)['y']
global d
d = X.shape[1]
global hist_grad
hist_grad = np.zeros(d)
return d
def funObj(ww, X, y):
xwy = (X.dot(ww) - y)
f = 0.5 * xwy.T.dot(xwy)
g = X.T.dot(xwy)
return f, g
def funObjL2(ww, X, y):
xwy = (X.dot(ww) - y)
f = 0.5 * xwy.T.dot(xwy) + 0.5 * self.lammy * ww.T.dot(ww)
g = X.T.dot(xwy) + self.lammy * ww
return f, g
# Reports the direct change to w, based on the given one.
# Batch size could be 1 for SGD, or 0 for full gradient.
def privateFun(theta, ww, batch_size=0):
global iteration
print 'python iteration ' + str(iteration) + ' starting'
ww = np.array(ww)
# Define constants and params
nn, dd = X.shape
threshold = int(d * theta)
if batch_size > 0 and batch_size < nn:
idx = np.random.choice(nn, batch_size, replace=False)
else:
# Just take the full range
idx = range(nn)
f, g = funObj(ww, X[idx, :], y[idx])
# AdaGrad
global hist_grad
hist_grad += g**2
ada_grad = g / (1e-6 + np.sqrt(hist_grad))
# Determine the actual step magnitude
delta = -alpha * ada_grad
# Weird way to get NON top k values
if theta < 1:
param_filter = np.argpartition(
abs(delta), -threshold)[:d - threshold]
delta[param_filter] = 0
w_new = ww + delta
f_new, g_new = funObj(w_new, X[idx, :], y[idx])
print 'python iteration ' + str(iteration) + ' ending'
iteration = iteration + 1
return delta
def privateFunL2(theta, ww, batch_size=0):
global iteration
print 'python iteration ' + str(iteration) + ' starting'
ww = np.array(ww)
# Define constants and params
nn, dd = X.shape
threshold = int(d * theta)
if batch_size > 0 and batch_size < nn:
idx = np.random.choice(nn, batch_size, replace=False)
else:
# Just take the full range
idx = range(nn)
f, g = funObjL2(ww, X[idx, :], y[idx])
# AdaGrad
global hist_grad
hist_grad += g**2
ada_grad = g / (1e-6 + np.sqrt(hist_grad))
# Determine the actual step magnitude
delta = -alpha * ada_grad
# Weird way to get NON top k values
if theta < 1:
param_filter = np.argpartition(
abs(delta), -threshold)[:d - threshold]
delta[param_filter] = 0
w_new = ww + delta
f_new, g_new = funObjL2(w_new, X[idx, :], y[idx])
print 'python iteration ' + str(iteration) + ' ending'
iteration = iteration + 1
return delta | 2.5625 | 3 |
evan/site/views/errors.py | eillarra/evan | 0 | 12794359 | <filename>evan/site/views/errors.py
from django.shortcuts import render
from django.views.decorators.csrf import requires_csrf_token
from sentry_sdk import last_event_id
@requires_csrf_token
def server_error(request):
return render(
request,
"errors/500.html",
{
"sentry_event_id": last_event_id(),
},
status=500,
)
| 1.984375 | 2 |
DPGAnalysis/Skims/python/DoubleMuon_cfg.py | ckamtsikis/cmssw | 852 | 12794360 | <reponame>ckamtsikis/cmssw<gh_stars>100-1000
import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:/afs/cern.ch/cms/CAF/CMSCOMM/COMM_GLOBAL/CRUZET3/CMSSW_2_1_2/src/DPGAnalysis/Skims/python/reco_50908_210_CRZT210_V1P.root')
)
process.configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.5 $'),
name = cms.untracked.string('$Source: /cvs_server/repositories/CMSSW/CMSSW/DPGAnalysis/Skims/python/DoubleMuon_cfg.py,v $'),
annotation = cms.untracked.string('CRUZET4 DoubleMuon skim')
)
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(-1))
process.options = cms.untracked.PSet(wantSummary = cms.untracked.bool(True))
process.load("Configuration.StandardSequences.MagneticField_cff")
process.load("Configuration.StandardSequences.Geometry_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.GlobalTag.globaltag = 'CRZT210_V1::All'
process.prefer("GlobalTag")
process.load("Configuration.StandardSequences.ReconstructionCosmics_cff")
process.doubleMuonFilter = cms.EDFilter("TrackCountFilter",
src = cms.InputTag('cosmicMuonsBarrelOnly'),
minNumber = cms.uint32(2)
)
process.doubleMuonPath = cms.Path(process.doubleMuonFilter)
process.out = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('doubleMuonPath')),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('RECO'),
filterName = cms.untracked.string('doubleMuonPath')),
fileName = cms.untracked.string('doubleMuon.root')
)
process.this_is_the_end = cms.EndPath(process.out)
| 1.53125 | 2 |
generate_password.py | smlerman/generate_password | 0 | 12794361 | <reponame>smlerman/generate_password
#!/usr/bin/python3
import argparse
import random
import string
import sys
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("-l", "--length", dest="length", type=int, required=True)
arg_parser.add_argument("-s", "--symbols", dest="symbols", required=False, action="store_true")
arg_parser.add_argument("--all-symbols", dest="all_symbols", required=False, action="store_true")
args = arg_parser.parse_args()
symbols = '!@#$%^&*'
all_symbols = set(range(ord('!'), ord('@') + 1))
chars = string.ascii_letters + string.digits
if args.symbols:
chars += symbols
if args.all_symbols:
chars += all_symbols
chars = list(chars)
password = random.choices(chars, k=args.length)
print("".join(password))
| 3.5 | 4 |
edzapp/settings.py | jamesadney/edzapp-scraper | 1 | 12794362 | # Scrapy settings for edzapp project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/topics/settings.html
#
from edzapp import constants
BOT_NAME = 'edzapp'
BOT_VERSION = '1.0'
SPIDER_MODULES = ['edzapp.spiders']
NEWSPIDER_MODULE = 'edzapp.spiders'
USER_AGENT = 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1'
ITEM_PIPELINES = [
# 'edzapp.pipelines.DjangoJobPipeline',
]
DOWNLOAD_DELAY = 3
ROLE = constants.ROLES['TEACHER/CLASSIFIED']
PARSE_JOB_PAGES = True
import sys
import os
# Directory containing django project
PROJECT_ROOT = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(PROJECT_ROOT, 'django_edzapp'))
# Set the django settings environment variable
os.environ['DJANGO_SETTINGS_MODULE'] = 'django_edzapp.settings'
try:
from local_settings import *
except ImportError:
pass
| 1.773438 | 2 |
tests/_jobs/hmmsearch/steps/test_hmm.py | agonopol/pyspark-hmm-search | 0 | 12794363 | from jobs.hmmsearch.steps.hmm import parse
def test_parse_hmm():
stream = parse(open("/data/src/tara-ocean-analysis/data/Pfam-A.hmm"))
hmms = [hmm for hmm in stream]
assert len(hmms) > 0
assert hmms[0]['EFFN'] == '19.774048'
| 2.484375 | 2 |
clovek.py | SamoFMF/Five-in-a-row | 0 | 12794364 | ####################
## IGRALEC ČLOVEK ##
####################
class Clovek():
def __init__(self, gui):
self.gui = gui
def igraj(self):
# To metodo kliče GUI, ko je igralec na potezi.
# Ko je clovek na potezi, čakamo na uporabniški
# vmesnik, da sporoči, da je uporabnik kliknil na
# ploščo.
pass
def prekini(self):
# To metodo kliče GUI, če je treba prekiniti razmišljanje.
# Človek jo lahko ignorira.
pass
def klik(self, p):
# Povlečemo potezo. Če ni veljavna, se ne bo zgodilo nič.
self.gui.povleci_potezo(p) | 2.65625 | 3 |
characters/forms.py | Sult/evehub | 0 | 12794365 | <filename>characters/forms.py
from django import forms
from .models import CharacterJournal, RefType
class FilterJournalForm(forms.Form):
def __init__(self, *args, **kwargs):
self.characterapi = kwargs.pop("characterapi")
super(FilterJournalForm, self).__init__(*args, **kwargs)
self.fields['filter'] = forms.ChoiceField(
required=False,
choices=self.filter_choices(),
)
def filter_choices(self):
choices = []
reftypes = CharacterJournal.objects.filter(
characterapi=self.characterapi
).order_by().values_list("reftypeid", flat=True).distinct()
for reftypeid in reftypes:
try:
name = RefType.objects.get(reftypeid=reftypeid).reftypename
except RefType.DoesNotExist:
name = "Unknown"
choices.append([reftypeid, name])
print reftypeid, name
#sort alfabeticly and ad first
choices = sorted(choices, key=lambda x: x[1])
choices.insert(0, [None, "Everything"])
return choices
| 2.15625 | 2 |
regression/fwiNET_classifier.py | JayanthMouli/fwi-NET | 2 | 12794366 | <filename>regression/fwiNET_classifier.py<gh_stars>1-10
#fwiNET Random Forest regressor
#created by <NAME> 2019
###########################################################################################################################################
import numpy as np
import pandas
from keras.layers import Dense, Activation
from keras.models import Sequential
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
import matplotlib.pyplot as plt
from sklearn.metrics import classification_report, confusion_matrix, accuracy_score
#read data
dataframe = pandas.read_csv("fwinormalizedwithclass.csv").dropna()#.astype(np.float32)
dataset = dataframe.values
X = dataset[:,2:7]
y = dataset[:,8] #FIRE_SIZE_CLASS
#split train, test data
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.08, random_state = 0)
#initialize regressor as RF classifier
regressor = RandomForestClassifier(n_estimators=20, random_state=0)
#train regressor
regressor.fit(X_train, y_train)
#predict test array
y_pred = regressor.predict(X_test)
#print accuracy metrics
print(confusion_matrix(y_test,y_pred))
print(classification_report(y_test,y_pred))
print(accuracy_score(y_test, y_pred))
#predict test array
finalpred = regressor.predict(X)
n1 = 0
n2 = 0
for x in range(0,6000): #test the first 6000 elements of the data (total data is near 6000)
if finalpred[x] == y[x]: #prediction is correct
n1 = n1 + 1
else: #prediction is incorrect
n2 = n2 + 1
#concatonate string
s = 'accuracy = ' + str(n1) + '/' + str(n1+n2)
#plot results as bar graph
plt.bar(['Correct Classification','Incorrect Classification'], [n1, n2])
plt.title('Random Forest Classification Results')
plt.text(0.5, 100, s , fontsize=12)
plt.show()
# plt.plot(y_test, color = 'red', label = 'Real data')
# plt.plot(y_pred, color = 'blue', label = 'Predicted data')
# plt.title('Prediction')
# plt.legend()
| 2.703125 | 3 |
2021/Day9/cave.py | dh256/adventofcode | 0 | 12794367 | <gh_stars>0
import re
class Cave:
def __init__(self,filename):
with open(filename,'r') as input_file:
lines = [line.strip('\n') for line in input_file]
self.locations = []
for line in lines:
self.locations.append([int(loc) for loc in re.findall(r'\d',line)])
self.max_y = len(lines)
self.max_x = len(self.locations[0])
def _find_low_points(self):
self.low_point_locations = [] # used in Part 2
for y in range(0,self.max_y):
for x in range(0,self.max_x):
north = 9
south = 9
east = 9
west = 9
curr_val = self.locations[y][x]
if y > 0:
north = self.locations[y-1][x]
if y < self.max_y-1:
south = self.locations[y+1][x]
if x < self.max_x-1:
east = self.locations[y][x+1]
if x > 0:
west = self.locations[y][x-1]
if (curr_val < north) and (curr_val < south) and (curr_val < east) and (curr_val < west):
self.low_point_locations.append((y,x))
def find_risk_level(self):
# find low points
self._find_low_points()
# calculate risk level
return sum([self.locations[loc[0]][loc[1]] for loc in self.low_point_locations]) + len(self.low_point_locations)
def find_largest_basins(self,number):
# find low points
self._find_low_points()
basin_counts = []
num_points = 0
for low_point_loc in self.low_point_locations:
self.basin_points_count = 1 # reset basin point count
self.locations_visited = [] # reset list of locations already visited
self._higher_points(low_point_loc) # find all the higher points for this low point
basin_counts.append(self.basin_points_count)
result = 1
for p in sorted(basin_counts,reverse=True)[0:number]:
result *= p
return result
def _higher_points(self,location):
# for each adjacent point (North, South, East, West
# check if higher (unless 9) and recurse if not already visited
self.locations_visited.append(location)
y = location[0]
x = location[1]
curr_val = self.locations[y][x]
# North
if not (y-1,x) in self.locations_visited and y > 0 and curr_val < self.locations[y-1][x] and self.locations[y-1][x] != 9:
self.basin_points_count += 1
self._higher_points((y-1,x))
# South
if not (y+1,x) in self.locations_visited and y < self.max_y-1 and curr_val < self.locations[y+1][x] and self.locations[y+1][x] != 9:
self.basin_points_count += 1
self._higher_points((y+1,x))
# East
if not (y,x+1) in self.locations_visited and x < self.max_x-1 and curr_val < self.locations[y][x+1] and self.locations[y][x+1] != 9:
self.basin_points_count += 1
self._higher_points((y,x+1))
# West
if not (y,x-1) in self.locations_visited and x > 0 and curr_val < self.locations[y][x-1] and self.locations[y][x-1] != 9:
self.basin_points_count += 1
self._higher_points((y,x-1))
| 3.1875 | 3 |
app.py | manisha-jaiswal/Artificial-Intelligence-and-COVID-19-Deep-Learning-Approaches-for-Diagnosis-and-Treatment | 0 | 12794368 | #!/usr/bin/env python
import os
import sys
from flask import Flask, request, jsonify, send_file, render_template
from io import BytesIO #used to help file-related input and output operations
from import Image, ImageOps #PILLOW is usd for data processing
import base64 #used to encode binary file such as image with script
import urllib #used to read the url in python
import numpy as np #used for working with array
import scipy.misc #it is used the python imaging library to read the image
from tensorflow.keras.preprocessing import image
from tensorflow.keras.models import load_model
import os
import tensorflow as tf #used to develop and train models using python and deploy it in cloud or database
import numpy as np
from tensorflow import keras #used to provide the python interface for ANN
from skimage import io #used for input output processing
from tensorflow.keras.preprocessing import image
# Flask utils
from flask import Flask, redirect, url_for, request, render_template #used for web development
from werkzeug.utils import secure_filename #it returns the secure vesion of file
from gevent.pywsgi import WSGIServer #it is used to handl many request concurrently
from tensorflow.keras.models import load_model #used for loading image in webdesk
app = Flask(__name__)
MODEL_PATH ='model.h5'
# Load your trained model
model = load_model(MODEL_PATH)
def model_predict(img_path, model):
img = image.load_img(img_path, grayscale=False, target_size=(64, 64))
show_img = image.load_img(img_path, grayscale=False, target_size=(64, 64))
x = image.img_to_array(img) #Converts a PIL Image instance to a Numpy array.
x = np.expand_dims(x, axis=0)
x = np.array(x, 'float32') #converted the elements to float32
x /= 255
preds = model.predict(x)
return preds
@app.route("/")
@app.route('/first')
def first():
return render_template('first.html')
@app.route('/abstract')
def abstract():
return render_template('abstract.html')
@app.route('/future')
def future():
return render_template('future.html')
@app.route('/login')
def login():
return render_template('login.html')
@app.route('/pie')
def pie():
return render_template('pie.html')
@app.route('/chart')
def chart():
return render_template('chart.html')
@app.route("/index",methods=['GET'])
def index():
return render_template('index.html')
@app.route("/upload", methods=['POST'])
def upload_file():
print("Hello")
try:
img = Image.open(BytesIO(request.files['imagefile'].read())).convert('RGB')
img = ImageOps.fit(img, (224, 224), Image.ANTIALIAS)
except:
error_msg = "Please choose an image file!"
return render_template('index.html', **locals())
# Call Function to predict
args = {'input' : img}
out_pred, out_prob = predict(args)
out_prob = out_prob * 100
print(out_pred, out_prob)
danger = "danger"
if out_pred=="You Are Safe, But Do keep precaution":
danger = "success"
print(danger)
img_io = BytesIO()
img.save(img_io, 'PNG')
png_output = base64.b64encode(img_io.getvalue())
processed_file = urllib.parse.quote(png_output)
return render_template('result.html',**locals())
def predict(args):
img = np.array(args['input']) / 255.0
img = np.expand_dims(img, axis = 0)
model = 'covid_model_v4.h5'
# Load weights into the new model
model = load_model(model)
pred = model.predict(img)
if np.argmax(pred, axis=1)[0] == 1:
out_pred = 'You Are Safe, But Do keep precaution'
else:
out_pred = 'You may have Coronavirus'
return out_pred, float(np.max(pred))
@app.route('/second', methods=['GET'])
#used for ct scan
def second():
# Main page
return render_template('second.html')
@app.route('/predict', methods=['GET', 'POST'])
def upload():
if request.method == 'POST':
# Get the file from post request
f = request.files['file']
# Save the file to ./uploads
basepath = os.path.dirname(__file__)
file_path = os.path.join(
basepath, 'uploads', secure_filename(f.filename))
f.save(file_path)
# Make prediction
preds = model_predict(file_path, model)
print(preds[0])
# x = x.reshape([64, 64]);
disease_class = ['Covid-19','Non Covid-19']
a = preds[0]
ind=np.argmax(a)
print('Prediction:', disease_class[ind])
result=disease_class[ind]
return result
return None
if __name__ == '__main__':
app.run(debug=True)
| 2.515625 | 3 |
main.py | AndrVLDZ/Cost_planning_program | 0 | 12794369 | <gh_stars>0
import profile
from tkinter import N
import SQLite_tools as sq
import sqlite3
from sqlite3 import Error
from dataclasses import dataclass
from typing import Any
from rich.console import Console
from rich.table import Table
import traceback
@dataclass(frozen=True)
class data:
db: str = 'Fare.db'
menu_1 = {
1: 'Открыть таблицу',
2: 'Добавить таблицу',
3: 'Удалить таблицу',
5: 'Выход',
}
menu_2 = {
1: 'Открыть',
2: 'Назад',
3: 'Изменить трату',
4: 'Удалить трату',
5: 'Выход',
}
menu_options = {
1: 'Показать таблицу',
2: 'Добавить',
3: 'Изменить',
4: 'Удалить',
5: 'Выход',
}
console: Any = Console()
def menu() -> None:
print('\n')
for key in data.menu_options.keys():
console.print(key, '--', data.menu_options[key])
print('\n')
def calculation(table) -> int:
with sqlite3.connect(data.db) as db:
c = db.cursor()
c.execute(f"SELECT price, value FROM {table}")
records = c.fetchall()
res = 0
for row in records:
res += row[0] * row[1]
return res
def table_print_rich(table: str, title: str, column_1: str, column_2: str, column_3: str):
rtable = Table(title=title, show_header=True, header_style='bold blue')
rtable.add_column(column_1)
rtable.add_column(column_2)
rtable.add_column(column_3)
rows = sq.rows_cnt(table)
for row in range(rows):
rtable.add_row(str(sq.read_data(table,row,1)), str(sq.read_data(table,row,2)), str(sq.read_data(table,row,3)), style='yellow')
rtable.add_row('Итого', str(calculation(table)), str(sq.rows_cnt(table)), style='bold blue')
global console
console.print(rtable)
def dialog():
try:
global console
option = int(input('Выберите пункт меню: '))
table = 'Tab_1'
if option == 1:
cnt = sq.rows_cnt(table)
if cnt == 0:
console.print('В таблице нет записей', style='bold red')
table_print_rich(table, 'Таблица доходов и расходов', 'Тип', 'Цена', 'Кол-во')
else:
table_print_rich(table, 'Таблица доходов и расходов', 'Тип', 'Цена', 'Кол-во')
menu()
dialog()
elif option == 2:
console.print('\nВведите данные')
answ: int = ''
row_data = []
while answ != 0:
type = str(input('Название: '))
price = int(input('Цена: '))
value = int(input('Кол-во: '))
row_data.append((type,price,value))
console.print('Запись добавлена', style='bold green')
try:
answ = int(input('\nЗакончить ввод - 0 \nПродолжить - 1\n -> '))
except:
console.print('Введите 0 или 1:\n', traceback.format_exc())
console.print("\nВсего записей добавлено: ", sq.insert_data(table, row_data, 'type, price, value'), style="bold green")
menu()
dialog()
elif option == 3:
console = Console()
console.print("Расходы: ", str(calculation(table)), style='bold red')
console.print('Расходы: ' + str(calculation(table)))
menu()
dialog()
elif option == 4:
answ: int = ''
row_data = []
while answ != 0:
type = str(input('\nВведите тип для удаления: '))
row_data.append(type)
console.print('Запись добавлена на удаление', style='red')
try:
answ = int(input('\nЗакончить ввод - 0 \nПродолжить - 1\n -> '))
except:
console.print('Введите 0 или 1:\n', traceback.format_exc())
try:
print(row_data)
console.print("\nВсего записей удалено: ", sq.remove_rows_by_names(table, 'type', row_data), style="bold red")
except:
console.print('Ошибка в одном из типов:\n', traceback.format_exc())
menu()
dialog()
return
elif option == 5:
console.print('Работа программы завершена')
return
else:
console.print('Такого пункта нет, введите целое число от 1 до 4')
menu()
dialog()
except:
console.print('Введите цифру:\n', traceback.format_exc())
menu()
dialog()
if __name__ == '__main__':
sq.db('Fare.db')
rows = ('id INTEGER PRIMARY KEY, type TEXT, price INTEGER, value INTEGER')
sq.create_table('Tab_1', rows)
# sq.remove_rows_by_id('Tab_1', [1])
menu()
dialog()
| 3.1875 | 3 |
Pacote download/PythonExercíciosHARDMODE/ex014HM.py | RodrigoMASRamos/Projects.py | 0 | 12794370 | #ex014: Escreva um programa que converta um a temperatura digitada em ºC e converta para ºF
| 2.53125 | 3 |
example/03_GenerateRt.py | jaedong27/calipy | 1 | 12794371 | import numpy as np
import cv2
import glob
import sys
sys.path.append("../")
import calipy
Rt_path = "./CameraData/Rt.json"
TVRt_path = "./Cameradata/TVRt.json"
Rt_back_to_front = calipy.Transform(Rt_path).inv()
Rt_TV_to_back = calipy.Transform(TVRt_path)
Rt_TV_to_front = Rt_back_to_front.dot(Rt_TV_to_back)
#origin = calipy.Transform()
#ren = calipy.vtkRenderer()
#ren.addCamera("front_cam", Rt_TV_to_front.inv().R, Rt_TV_to_front.inv().T, cs=0.3)
#ren.addCamera("back_cam", Rt_TV_to_back.inv().R, Rt_TV_to_back.inv().T, cs=0.5)
#TV_width = 1.70
#TV_height = 0.95
#objectPoints = np.array( [ [0,0,0],
# [TV_width, 0, 0],
# [0, TV_height,0],
# [TV_width, TV_height, 0] ] ).astype(np.float64)
#tvpoints_on_camera = np.transpose(objectPoints)
#ren.addLines("TV", np.transpose(tvpoints_on_camera), [0,1,3,2,0])
##ren.addCamera("TV_origin", TVRt.R, TVRt.T, cs=0.5)
#ren.render()
#exit()
origin = calipy.Transform()
ren = calipy.vtkRenderer()
ren.addCamera("front_cam", cs=0.5)
ren.addCamera("back_cam", Rt_back_to_front.R, Rt_back_to_front.T, cs=0.5)
TV_width = 1.70
TV_height = 0.95
objectPoints = np.array( [ [0,0,0],
[TV_width, 0, 0],
[0, TV_height,0],
[TV_width, TV_height, 0] ] ).astype(np.float64)
tvpoints_on_camera = Rt_TV_to_front.move(np.transpose(objectPoints))
ren.addLines("TV", np.transpose(tvpoints_on_camera), [0,1,3,2,0])
#ren.addCamera("TV_origin", TVRt.R, TVRt.T, cs=0.5)
ren.render()
Rt_back_to_front.saveJson("./CameraData/Rt_back_to_front.json")
Rt_TV_to_front.saveJson("./CameraData/Rt_TV_to_front.json") | 2.390625 | 2 |
array_api_tests/pytest_helpers.py | asmeurer/array-api-tests | 0 | 12794372 | <filename>array_api_tests/pytest_helpers.py<gh_stars>0
import math
from inspect import getfullargspec
from typing import Any, Dict, Optional, Sequence, Tuple, Union
from . import _array_module as xp
from . import dtype_helpers as dh
from . import shape_helpers as sh
from . import stubs
from .typing import Array, DataType, Scalar, ScalarType, Shape
__all__ = [
"raises",
"doesnt_raise",
"nargs",
"fmt_kw",
"is_pos_zero",
"is_neg_zero",
"assert_dtype",
"assert_kw_dtype",
"assert_default_float",
"assert_default_int",
"assert_default_index",
"assert_shape",
"assert_result_shape",
"assert_keepdimable_shape",
"assert_0d_equals",
"assert_fill",
"assert_array_elements",
]
def raises(exceptions, function, message=""):
"""
Like pytest.raises() except it allows custom error messages
"""
try:
function()
except exceptions:
return
except Exception as e:
if message:
raise AssertionError(
f"Unexpected exception {e!r} (expected {exceptions}): {message}"
)
raise AssertionError(f"Unexpected exception {e!r} (expected {exceptions})")
raise AssertionError(message)
def doesnt_raise(function, message=""):
"""
The inverse of raises().
Use doesnt_raise(function) to test that function() doesn't raise any
exceptions. Returns the result of calling function.
"""
if not callable(function):
raise ValueError("doesnt_raise should take a lambda")
try:
return function()
except Exception as e:
if message:
raise AssertionError(f"Unexpected exception {e!r}: {message}")
raise AssertionError(f"Unexpected exception {e!r}")
def nargs(func_name):
return len(getfullargspec(stubs.name_to_func[func_name]).args)
def fmt_kw(kw: Dict[str, Any]) -> str:
return ", ".join(f"{k}={v}" for k, v in kw.items())
def is_pos_zero(n: float) -> bool:
return n == 0 and math.copysign(1, n) == 1
def is_neg_zero(n: float) -> bool:
return n == 0 and math.copysign(1, n) == -1
def assert_dtype(
func_name: str,
in_dtype: Union[DataType, Sequence[DataType]],
out_dtype: DataType,
expected: Optional[DataType] = None,
*,
repr_name: str = "out.dtype",
):
"""
Assert the output dtype is as expected.
If expected=None, we infer the expected dtype as in_dtype, to test
out_dtype, e.g.
>>> x = xp.arange(5, dtype=xp.uint8)
>>> out = xp.abs(x)
>>> assert_dtype('abs', x.dtype, out.dtype)
is equivalent to
>>> assert out.dtype == xp.uint8
Or for multiple input dtypes, the expected dtype is inferred from their
resulting type promotion, e.g.
>>> x1 = xp.arange(5, dtype=xp.uint8)
>>> x2 = xp.arange(5, dtype=xp.uint16)
>>> out = xp.add(x1, x2)
>>> assert_dtype('add', [x1.dtype, x2.dtype], out.dtype)
is equivalent to
>>> assert out.dtype == xp.uint16
We can also specify the expected dtype ourselves, e.g.
>>> x = xp.arange(5, dtype=xp.int8)
>>> out = xp.sum(x)
>>> default_int = xp.asarray(0).dtype
>>> assert_dtype('sum', x, out.dtype, default_int)
"""
in_dtypes = in_dtype if isinstance(in_dtype, Sequence) and not isinstance(in_dtype, str) else [in_dtype]
f_in_dtypes = dh.fmt_types(tuple(in_dtypes))
f_out_dtype = dh.dtype_to_name[out_dtype]
if expected is None:
expected = dh.result_type(*in_dtypes)
f_expected = dh.dtype_to_name[expected]
msg = (
f"{repr_name}={f_out_dtype}, but should be {f_expected} "
f"[{func_name}({f_in_dtypes})]"
)
assert out_dtype == expected, msg
def assert_kw_dtype(func_name: str, kw_dtype: DataType, out_dtype: DataType):
"""
Assert the output dtype is the passed keyword dtype, e.g.
>>> kw = {'dtype': xp.uint8}
>>> out = xp.ones(5, **kw)
>>> assert_kw_dtype('ones', kw['dtype'], out.dtype)
"""
f_kw_dtype = dh.dtype_to_name[kw_dtype]
f_out_dtype = dh.dtype_to_name[out_dtype]
msg = (
f"out.dtype={f_out_dtype}, but should be {f_kw_dtype} "
f"[{func_name}(dtype={f_kw_dtype})]"
)
assert out_dtype == kw_dtype, msg
def assert_default_float(func_name: str, out_dtype: DataType):
"""
Assert the output dtype is the default float, e.g.
>>> out = xp.ones(5)
>>> assert_default_float('ones', out.dtype)
"""
f_dtype = dh.dtype_to_name[out_dtype]
f_default = dh.dtype_to_name[dh.default_float]
msg = (
f"out.dtype={f_dtype}, should be default "
f"floating-point dtype {f_default} [{func_name}()]"
)
assert out_dtype == dh.default_float, msg
def assert_default_int(func_name: str, out_dtype: DataType):
"""
Assert the output dtype is the default int, e.g.
>>> out = xp.full(5, 42)
>>> assert_default_int('full', out.dtype)
"""
f_dtype = dh.dtype_to_name[out_dtype]
f_default = dh.dtype_to_name[dh.default_int]
msg = (
f"out.dtype={f_dtype}, should be default "
f"integer dtype {f_default} [{func_name}()]"
)
assert out_dtype == dh.default_int, msg
def assert_default_index(func_name: str, out_dtype: DataType, repr_name="out.dtype"):
"""
Assert the output dtype is the default index dtype, e.g.
>>> out = xp.argmax(xp.arange(5))
>>> assert_default_int('argmax', out.dtype)
"""
f_dtype = dh.dtype_to_name[out_dtype]
msg = (
f"{repr_name}={f_dtype}, should be the default index dtype, "
f"which is either int32 or int64 [{func_name}()]"
)
assert out_dtype in (xp.int32, xp.int64), msg
def assert_shape(
func_name: str,
out_shape: Union[int, Shape],
expected: Union[int, Shape],
/,
repr_name="out.shape",
**kw,
):
"""
Assert the output shape is as expected, e.g.
>>> out = xp.ones((3, 3, 3))
>>> assert_shape('ones', out.shape, (3, 3, 3))
"""
if isinstance(out_shape, int):
out_shape = (out_shape,)
if isinstance(expected, int):
expected = (expected,)
msg = (
f"{repr_name}={out_shape}, but should be {expected} [{func_name}({fmt_kw(kw)})]"
)
assert out_shape == expected, msg
def assert_result_shape(
func_name: str,
in_shapes: Sequence[Shape],
out_shape: Shape,
/,
expected: Optional[Shape] = None,
*,
repr_name="out.shape",
**kw,
):
"""
Assert the output shape is as expected.
If expected=None, we infer the expected shape as the result of broadcasting
in_shapes, to test against out_shape, e.g.
>>> out = xp.add(xp.ones((3, 1)), xp.ones((1, 3)))
>>> assert_shape('add', [(3, 1), (1, 3)], out.shape)
is equivalent to
>>> assert out.shape == (3, 3)
"""
if expected is None:
expected = sh.broadcast_shapes(*in_shapes)
f_in_shapes = " . ".join(str(s) for s in in_shapes)
f_sig = f" {f_in_shapes} "
if kw:
f_sig += f", {fmt_kw(kw)}"
msg = f"{repr_name}={out_shape}, but should be {expected} [{func_name}({f_sig})]"
assert out_shape == expected, msg
def assert_keepdimable_shape(
func_name: str,
in_shape: Shape,
out_shape: Shape,
axes: Tuple[int, ...],
keepdims: bool,
/,
**kw,
):
"""
Assert the output shape from a keepdimable function is as expected, e.g.
>>> x = xp.asarray([[0, 1, 2], [3, 4, 5], [6, 7, 8]])
>>> out1 = xp.max(x, keepdims=False)
>>> out2 = xp.max(x, keepdims=True)
>>> assert_keepdimable_shape('max', x.shape, out1.shape, (0, 1), False)
>>> assert_keepdimable_shape('max', x.shape, out2.shape, (0, 1), True)
is equivalent to
>>> assert out1.shape == ()
>>> assert out2.shape == (1, 1)
"""
if keepdims:
shape = tuple(1 if axis in axes else side for axis, side in enumerate(in_shape))
else:
shape = tuple(side for axis, side in enumerate(in_shape) if axis not in axes)
assert_shape(func_name, out_shape, shape, **kw)
def assert_0d_equals(
func_name: str, x_repr: str, x_val: Array, out_repr: str, out_val: Array, **kw
):
"""
Assert a 0d array is as expected, e.g.
>>> x = xp.asarray([0, 1, 2])
>>> res = xp.asarray(x, copy=True)
>>> res[0] = 42
>>> assert_0d_equals('asarray', 'x[0]', x[0], 'x[0]', res[0])
is equivalent to
>>> assert res[0] == x[0]
"""
msg = (
f"{out_repr}={out_val}, but should be {x_repr}={x_val} "
f"[{func_name}({fmt_kw(kw)})]"
)
if dh.is_float_dtype(out_val.dtype) and xp.isnan(out_val):
assert xp.isnan(x_val), msg
else:
assert x_val == out_val, msg
def assert_scalar_equals(
func_name: str,
type_: ScalarType,
idx: Shape,
out: Scalar,
expected: Scalar,
/,
repr_name: str = "out",
**kw,
):
"""
Assert a 0d array, convered to a scalar, is as expected, e.g.
>>> x = xp.ones(5, dtype=xp.uint8)
>>> out = xp.sum(x)
>>> assert_scalar_equals('sum', int, (), int(out), 5)
is equivalent to
>>> assert int(out) == 5
"""
repr_name = repr_name if idx == () else f"{repr_name}[{idx}]"
f_func = f"{func_name}({fmt_kw(kw)})"
if type_ in [bool, int]:
msg = f"{repr_name}={out}, but should be {expected} [{f_func}]"
assert out == expected, msg
elif math.isnan(expected):
msg = f"{repr_name}={out}, but should be {expected} [{f_func}]"
assert math.isnan(out), msg
else:
msg = f"{repr_name}={out}, but should be roughly {expected} [{f_func}]"
assert math.isclose(out, expected, rel_tol=0.25, abs_tol=1), msg
def assert_fill(
func_name: str, fill_value: Scalar, dtype: DataType, out: Array, /, **kw
):
"""
Assert all elements of an array is as expected, e.g.
>>> out = xp.full(5, 42, dtype=xp.uint8)
>>> assert_fill('full', 42, xp.uint8, out, 5)
is equivalent to
>>> assert xp.all(out == 42)
"""
msg = f"out not filled with {fill_value} [{func_name}({fmt_kw(kw)})]\n{out=}"
if math.isnan(fill_value):
assert xp.all(xp.isnan(out)), msg
else:
assert xp.all(xp.equal(out, xp.asarray(fill_value, dtype=dtype))), msg
def assert_array_elements(
func_name: str, out: Array, expected: Array, /, *, out_repr: str = "out", **kw
):
"""
Assert array elements are (strictly) as expected, e.g.
>>> x = xp.arange(5)
>>> out = xp.asarray(x)
>>> assert_array_elements('asarray', out, x)
is equivalent to
>>> assert xp.all(out == x)
"""
dh.result_type(out.dtype, expected.dtype) # sanity check
assert_shape(func_name, out.shape, expected.shape, **kw) # sanity check
f_func = f"[{func_name}({fmt_kw(kw)})]"
if dh.is_float_dtype(out.dtype):
for idx in sh.ndindex(out.shape):
at_out = out[idx]
at_expected = expected[idx]
msg = (
f"{sh.fmt_idx(out_repr, idx)}={at_out}, should be {at_expected} "
f"{f_func}"
)
if xp.isnan(at_expected):
assert xp.isnan(at_out), msg
elif at_expected == 0.0 or at_expected == -0.0:
scalar_at_expected = float(at_expected)
scalar_at_out = float(at_out)
if is_pos_zero(scalar_at_expected):
assert is_pos_zero(scalar_at_out), msg
else:
assert is_neg_zero(scalar_at_expected) # sanity check
assert is_neg_zero(scalar_at_out), msg
else:
assert at_out == at_expected, msg
else:
assert xp.all(
out == expected
), f"{out_repr} not as expected {f_func}\n{out_repr}={out!r}\n{expected=}"
| 2.609375 | 3 |
pfpimgen/__init__.py | TheGuywithTheHat/phen-cogs | 0 | 12794373 | import json
from asyncio import create_task
from pathlib import Path
from redbot.core.bot import Red
from .pfpimgen import PfpImgen
with open(Path(__file__).parent / "info.json") as fp:
__red_end_user_data_statement__ = json.load(fp)["end_user_data_statement"]
# from https://github.com/phenom4n4n/Fixator10-Cogs/blob/V3/adminutils/__init__.py
async def setup_after_ready(bot):
await bot.wait_until_red_ready()
cog = PfpImgen(bot)
for name, command in cog.all_commands.items():
if not command.parent:
if bot.get_command(name):
command.name = f"i{command.name}"
for alias in command.aliases:
if bot.get_command(alias):
command.aliases[command.aliases.index(alias)] = f"i{alias}"
bot.add_cog(cog)
def setup(bot):
create_task(setup_after_ready(bot))
| 2.265625 | 2 |
examples/recognition/async_bot_lib.py | uhlive/python-sdk | 0 | 12794374 | <gh_stars>0
"""Mini Bot Framework (French), async version
This code is provided as-is for demonstration purposes only and is not
suitable for production. Use at your own risk.
"""
import asyncio
import base64
from typing import Dict
import sounddevice as sd # type: ignore
from aiohttp import ClientSession # type: ignore
from uhlive.stream.recognition import Closed
from uhlive.stream.recognition import CompletionCause as CC
from uhlive.stream.recognition import (
DefaultParams,
GrammarDefined,
Opened,
ParamsSet,
RecognitionComplete,
RecognitionInProgress,
Recognizer,
StartOfInput,
)
async def play_buffer(buffer, channels=1, samplerate=8000, dtype="int16", **kwargs):
loop = asyncio.get_event_loop()
event = asyncio.Event()
idx = 0
def callback(outdata, frame_count, time_info, status):
nonlocal idx
bcount = frame_count * 2
if status:
print(status)
remainder = len(buffer) - idx
if remainder == 0:
loop.call_soon_threadsafe(event.set)
raise sd.CallbackStop
valid_frames = bcount if remainder >= bcount else remainder
outdata[:valid_frames] = buffer[idx : idx + valid_frames]
idx += valid_frames
stream = sd.RawOutputStream(
callback=callback,
dtype=dtype,
samplerate=samplerate,
channels=channels,
**kwargs,
)
with stream:
await event.wait()
async def inputstream_generator(channels=1, samplerate=8000, dtype="int16", **kwargs):
"""Generator that yields blocks of input data as NumPy arrays."""
q_in = asyncio.Queue()
loop = asyncio.get_event_loop()
def callback(indata, frame_count, time_info, status):
loop.call_soon_threadsafe(q_in.put_nowait, bytes(indata))
stream = sd.RawInputStream(
callback=callback,
channels=channels,
samplerate=samplerate,
dtype=dtype,
**kwargs,
)
with stream:
while True:
indata = await q_in.get()
yield indata
class Bot:
TTF_CACHE: Dict[str, bytes] = {}
def __init__(self, google_ttf_key):
self.client = Recognizer()
self.session = None
self.socket = None
self.google_ttf_key = google_ttf_key
async def stream_mic(self):
try:
async for block in inputstream_generator(blocksize=960):
await self.socket.send_bytes(self.client.send_audio_chunk(block))
except asyncio.CancelledError:
pass
async def _ttf(self, text) -> bytes:
if text in self.TTF_CACHE:
return self.TTF_CACHE[text]
payload = {
"audioConfig": {"audioEncoding": "LINEAR16", "sampleRateHertz": 8000},
"input": {"text": text},
"voice": {"languageCode": "fr-FR", "name": "fr-FR-Wavenet-C"},
}
# url = "https://texttospeech.googleapis.com/v1/text:synthesize"
url = f"https://texttospeech.googleapis.com/v1beta1/text:synthesize?key={self.google_ttf_key}"
h = {"Content-Type": "application/json; charset=utf-8"}
async with self.session.post(url, headers=h, json=payload) as response:
json = await response.json()
audio = base64.b64decode(json["audioContent"])[44:]
self.TTF_CACHE[text] = audio
return audio
async def say(self, text):
audio = await self._ttf(text)
await play_buffer(audio)
async def expect(self, *event_classes, ignore=None):
while True:
msg = await self.socket.receive()
event = self.client.receive(msg.data)
if isinstance(event, event_classes):
return event
elif ignore is None or not isinstance(event, ignore):
raise AssertionError(f"Expected one of {event_classes}, got {event}")
async def ask_until_success(self, text, *args, **kwargs):
choice = None
while choice is None:
await self.say(text)
await self.socket.send_str(self.client.recognize(*args, **kwargs))
await self.expect(RecognitionInProgress)
resp = await self.expect(RecognitionComplete, ignore=(StartOfInput,))
if resp.completion_cause == CC.Success:
choice = resp.body.nlu
else:
if resp.body.asr:
await self.say("Je n'ai pas compris")
print("user said:", resp.body.asr.transcript)
else:
await self.say("Je n'ai rien entendu")
return choice
async def confirm(self, text: str) -> bool:
res = await self.ask_until_success(
text,
"builtin:speech/boolean",
recognition_mode="hotword",
hotword_max_duration=5000,
)
return res.value
async def run(self, uhlive_url: str, uhlive_token: str):
async with ClientSession() as session:
self.session = session
async with session.ws_connect(
uhlive_url, headers={"Authorization": f"bearer {uhlive_token}"}
) as socket:
self.socket = socket
await socket.send_str(self.client.open("deskbot"))
await self.expect(Opened)
streamer = asyncio.create_task(self.stream_mic())
try:
await self.scenario()
except Exception as e:
await self.say("Nous subissons une avarie. Rappelez plus tard.")
raise e
finally:
streamer.cancel()
await streamer
await socket.send_str(self.client.close())
await self.expect(Closed)
async def set_params(self, **kwargs):
await self.socket.send_str(self.client.set_params(**kwargs))
await self.expect(ParamsSet)
async def get_params(self):
await self.socket.send_str(self.client.get_params())
res = await self.expect(DefaultParams)
return res
async def define_grammar(self, builtin, alias):
await self.socket.send_str(self.client.define_grammar(builtin, alias))
await self.expect(GrammarDefined)
async def recognize(self, *args, **kwargs):
await self.socket.send_str(self.client.recognize(*args, **kwargs))
await self.expect(RecognitionInProgress)
async def scenario(self):
"""To be overridden in subclasses"""
raise NotImplementedError
| 2.046875 | 2 |
wildlifelicensing/apps/applications/tests/test_conditions.py | jawaidm/wildlifelicensing | 0 | 12794375 | from datetime import date
from django.test import TestCase
from django.shortcuts import reverse
from django_dynamic_fixture import G
from wildlifelicensing.apps.main.tests.helpers import get_or_create_default_customer, is_login_page, \
get_or_create_default_assessor, add_assessor_to_assessor_group, SocialClient, get_or_create_default_officer, \
add_to_group, clear_mailbox, get_emails
from wildlifelicensing.apps.applications.tests import helpers as app_helpers
from wildlifelicensing.apps.applications.models import AssessmentCondition, Condition, Assessment
from wildlifelicensing.apps.main.helpers import is_assessor, get_user_assessor_groups
from wildlifelicensing.apps.main.models import AssessorGroup
from ledger.accounts.models import EmailUser
class TestViewAccess(TestCase):
fixtures = ['licences.json', 'conditions.json', 'returns.json']
def setUp(self):
self.client = SocialClient()
self.user = get_or_create_default_customer()
self.officer = get_or_create_default_officer()
self.application = app_helpers.create_and_lodge_application(self.user, **{
'data': {
'title': 'My Application'
}
})
self.assessment = app_helpers.get_or_create_assessment(self.application)
self.condition = Condition.objects.first()
self.assessment_condition = AssessmentCondition.objects.create(assessment=self.assessment,
condition=self.condition,
order=1)
self.urls_get = [
reverse('wl_applications:enter_conditions', args=[self.application.pk]),
reverse('wl_applications:search_conditions')
]
self.urls_post = [
{
'url': reverse('wl_applications:create_condition', args=[self.application.pk]),
'data': {
'code': '123488374',
'text': 'condition text'
}
},
{
'url': reverse('wl_applications:set_assessment_condition_state'),
'data': {
'assessmentConditionID': self.assessment_condition.pk,
'acceptanceStatus': 'accepted',
}
},
{
'url': reverse('wl_applications:enter_conditions', args=[self.application.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
def tearDown(self):
self.client.logout()
def test_customer_access(self):
"""
A Customer cannot access any URL
"""
# not logged-in
for url in self.urls_get:
response = self.client.get(url, follow=True)
self.assertTrue(is_login_page(response))
for url in self.urls_post:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertTrue(is_login_page(response))
# logged-in. Should throw a 403 or redirect to login
self.client.login(self.user.email)
for url in self.urls_get:
response = self.client.get(url, follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_customer'), status_code=302,
target_status_code=200)
for url in self.urls_post:
response = self.client.post(url['url'], url['data'], follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_customer'), status_code=302,
target_status_code=200)
def test_officer_access(self):
"""
Officer should be able to access any views
"""
self.client.login(self.officer.email)
for url in self.urls_get:
response = self.client.get(url, follow=False)
self.assertEqual(200, response.status_code)
for url in self.urls_post:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertEquals(200, response.status_code)
def test_assessor_access_limited(self):
"""
Test that an assessor cannot edit an assessment that doesn't belong to their group
All accessor can search conditions
"""
assessor = get_or_create_default_assessor()
self.client.login(assessor.email)
# This assessor doesn't belong to a group
self.assertTrue(is_assessor(assessor))
self.assertFalse(get_user_assessor_groups(assessor))
# forbidden
urls_get_forbidden = [
reverse('wl_applications:enter_conditions', args=[self.application.pk]),
reverse('wl_applications:enter_conditions_assessor', args=[self.application.pk, self.assessment.pk]),
]
urls_post_forbidden = [
{
'url': reverse('wl_applications:create_condition', args=[self.application.pk]),
'data': {
'code': '123488374',
'text': 'condition text'
}
},
{
'url': reverse('wl_applications:set_assessment_condition_state'),
'data': {
'assessmentConditionID': self.assessment_condition.pk,
'acceptanceStatus': 'accepted',
}
},
{
'url': reverse('wl_applications:enter_conditions', args=[self.application.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
{
'url': reverse('wl_applications:enter_conditions_assessor',
args=[self.application.pk, self.assessment.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
# Allowed
urls_get_allowed = [
reverse('wl_applications:search_conditions')
]
urls_post_allowed = [
]
for url in urls_get_forbidden:
response = self.client.get(url, follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_post_forbidden:
response = self.client.post(url['url'], url['data'], follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_get_allowed:
response = self.client.get(url, follow=True)
self.assertEqual(200, response.status_code)
for url in urls_post_allowed:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertEqual(200, response.status_code)
def test_assessor_access_normal(self):
"""
Test that an assessor can edit an assessment that belongs to their group
"""
assessor = get_or_create_default_assessor()
self.client.login(assessor.email)
# This assessor doesn't belong to a group
self.assertTrue(is_assessor(assessor))
# add the assessor to the assessment group
self.assertTrue(Assessment.objects.filter(application=self.application).count() > 0)
for assessment in Assessment.objects.filter(application=self.application):
add_assessor_to_assessor_group(assessor, assessment.assessor_group)
# forbidden
urls_get_forbidden = [
reverse('wl_applications:enter_conditions', args=[self.application.pk]),
]
urls_post_forbidden = [
{
'url': reverse('wl_applications:create_condition', args=[self.application.pk]),
'data': {
'code': '123488374',
'text': 'condition text'
}
},
{
'url': reverse('wl_applications:set_assessment_condition_state'),
'data': {
'assessmentConditionID': self.assessment_condition.pk,
'acceptanceStatus': 'accepted',
}
},
{
'url': reverse('wl_applications:enter_conditions', args=[self.application.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
# Allowed
urls_get_allowed = [
reverse('wl_applications:search_conditions'),
reverse('wl_applications:enter_conditions_assessor', args=[self.application.pk, self.assessment.pk]),
]
urls_post_allowed = [
{
'url': reverse('wl_applications:enter_conditions_assessor',
args=[self.application.pk, self.assessment.pk]),
'data': {
'conditionID': [self.condition.pk],
}
},
]
for url in urls_get_forbidden:
response = self.client.get(url, follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_post_forbidden:
response = self.client.post(url['url'], url['data'], follow=True)
if response.status_code != 403:
self.assertRedirects(response, reverse('wl_dashboard:tables_assessor'), status_code=302,
target_status_code=200)
for url in urls_get_allowed:
response = self.client.get(url, follow=True)
self.assertEqual(200, response.status_code)
for url in urls_post_allowed:
response = self.client.post(url['url'], url['data'], follow=True)
self.assertEqual(200, response.status_code)
class TestAssignAssessor(TestCase):
fixtures = ['licences.json', 'conditions.json']
def setUp(self):
self.client = SocialClient()
self.user = get_or_create_default_customer()
self.officer = get_or_create_default_officer()
self.application = app_helpers.create_and_lodge_application(self.user, **{
'data': {
'title': 'My Application'
}
})
self.assessor_group = G(AssessorGroup, name='District7', email='<EMAIL>')
self.assessor_1 = G(EmailUser, email='<EMAIL>', dob='1967-04-04')
add_to_group(self.assessor_1, 'Assessors')
add_to_group(self.assessor_1, self.assessor_group)
self.assessor_2 = G(EmailUser, email='<EMAIL>', dob='1968-04-04')
add_to_group(self.assessor_2, 'Assessors')
add_to_group(self.assessor_2, self.assessor_group)
def _issue_assessment(self, application, assessor_group):
self.client.login(self.officer.email)
url = reverse('wl_applications:send_for_assessment')
payload = {
'applicationID': application.pk,
'assGroupID': assessor_group.pk
}
resp = self.client.post(url, data=payload)
self.assertEqual(resp.status_code, 200)
self.client.logout()
clear_mailbox()
data = resp.json()
return Assessment.objects.filter(pk=data['assessment']['id']).first()
def test_email_sent_to_assessor_group(self):
"""
Test that when an officer issue an assessment an email is sent to the group email
"""
# officer issue assessment
self.client.login(self.officer.email)
url = reverse('wl_applications:send_for_assessment')
payload = {
'applicationID': self.application.pk,
'assGroupID': self.assessor_group.pk
}
resp = self.client.post(url, data=payload)
self.assertEqual(resp.status_code, 200)
# we should have one email sent to the assessor
emails = get_emails()
self.assertEqual(len(emails), 1)
email = emails[0]
recipients = email.to
self.assertEqual(len(recipients), 1)
expected_recipient = self.assessor_group.email
self.assertEqual(recipients[0], expected_recipient)
# the response is a json response. It should contain the assessment id
expected_content_type = 'application/json'
self.assertEqual(resp['content-type'], expected_content_type)
data = resp.json()
self.assertTrue('assessment' in data)
self.assertTrue('id' in data['assessment'])
assessment = Assessment.objects.filter(pk=data['assessment']['id']).first()
self.assertIsNotNone(assessment)
self.assertEqual(assessment.application, self.application)
expected_status = 'awaiting_assessment'
self.assertEqual(assessment.status, expected_status)
# check more data
self.assertEqual(assessment.assessor_group, self.assessor_group)
self.assertEqual(assessment.officer, self.officer)
self.assertEqual(assessment.date_last_reminded, date.today())
self.assertEqual(assessment.conditions.count(), 0)
self.assertEqual(assessment.comment, '')
self.assertEqual(assessment.purpose, '')
def test_assign_assessment_send_email(self):
"""
Use case: assessor_1 assign the assessment to assessor_2.
Test that assessor_2 should receive an email with a link.
The email should be also log in the communication log
"""
assessment = self._issue_assessment(self.application, self.assessor_group)
previous_comm_log = app_helpers.get_communication_log(assessment.application)
previous_action_list = app_helpers.get_action_log(assessment.application)
url = reverse('wl_applications:assign_assessor')
self.client.login(self.assessor_1.email)
payload = {
'assessmentID': assessment.id,
'userID': self.assessor_2.id
}
resp = self.client.post(url, data=payload)
self.assertEqual(resp.status_code, 200)
# the response is a json response. It should contain the assessment id
expected_content_type = 'application/json'
self.assertEqual(resp['content-type'], expected_content_type)
# we should have one email sent to the assessor
emails = get_emails()
self.assertEqual(len(emails), 1)
email = emails[0]
recipients = email.to
self.assertEqual(len(recipients), 1)
expected_recipient = self.assessor_2.email
self.assertEqual(recipients[0], expected_recipient)
# the subject should contains 'assessment assigned'
self.assertTrue(email.subject.find('assessment assigned') > -1)
# the body should get a url to assess the application
expected_url = reverse('wl_applications:enter_conditions_assessor',
args=[assessment.application.pk, assessment.pk])
self.assertTrue(email.body.find(expected_url) > -1)
# test that the email has been logged.
new_comm_log = app_helpers.get_communication_log(assessment.application)
self.assertEqual(len(new_comm_log), len(previous_comm_log) + 1)
previous_recipients = [entry['to'] for entry in previous_comm_log]
self.assertNotIn(self.assessor_2.email, previous_recipients)
new_recipients = [entry['to'] for entry in new_comm_log]
self.assertIn(self.assessor_2.email, new_recipients)
# it should also be recorded in the action list
new_action_list = app_helpers.get_action_log(assessment.application)
self.assertEqual(len(new_action_list), len(previous_action_list) + 1)
def test_assign_to_me_no_email(self):
"""
Use case: assessor_1 assign the assessment to himself.
test that no email is sent
"""
assessment = self._issue_assessment(self.application, self.assessor_group)
previous_comm_log = app_helpers.get_communication_log(assessment.application)
previous_action_list = app_helpers.get_action_log(assessment.application)
url = reverse('wl_applications:assign_assessor')
self.client.login(self.assessor_1.email)
payload = {
'assessmentID': assessment.id,
'userID': self.assessor_1.id
}
resp = self.client.post(url, data=payload)
# the response is a json response. It should contain the assessment id
expected_content_type = 'application/json'
self.assertEqual(resp['content-type'], expected_content_type)
# we should have one email sent to the assessor
emails = get_emails()
self.assertEqual(len(emails), 0)
# com log should be unchanged.
new_comm_log = app_helpers.get_communication_log(assessment.application)
self.assertEqual(new_comm_log, previous_comm_log)
# but should be recorded in the action list
new_action_list = app_helpers.get_action_log(assessment.application)
self.assertEqual(len(new_action_list), len(previous_action_list) + 1)
| 2.078125 | 2 |
examples/build_example5.py | berkeman/examples | 0 | 12794376 | def main(urd):
urd.build('example7')
| 1.078125 | 1 |
pruebas.py | hafid4827/bot-binomo-redesigned | 5 | 12794377 | from binomo import apiAlfaBinomo
if __name__ == '__main__':
aApiAlfa = apiAlfaBinomo('','', timeBotWait = 120, loginError = True) # timeBotWait 120seg (tiempo de espera hasta resolver el capcha), loginError True
aApiAlfa.actionDV('EURUSD') # par
aApiAlfa.buy("CALL") # compra o venta (PUT)
aApiAlfa.listOder() # lista de activos y su profit
aApiAlfa.mount(3000) # 3000,15000,30000,60000,150000,300000,600000,3000000 montos de compra
aApiAlfa.timeBuy(1) # 1,2,3,4,5 minutos | 1.90625 | 2 |
src/icemac/addressbook/browser/person/export.py | icemac/icemac.addressbook | 1 | 12794378 | from icemac.addressbook.i18n import _
from .interfaces import IBirthDate
import icemac.addressbook.browser.base
import zope.component
class ExportList(icemac.addressbook.browser.base.BaseView):
"""List available export formats."""
title = _('Export person data')
def exporters(self):
"""Iterable of exporters having enough data so export something."""
# XXX: This has no API, the exporters should be subscription adapters
# which return None if they have not enough data to export
# something and a dict consting of title and URL otherwise.
birthdate_data = zope.component.getMultiAdapter(
(self.context, self.request), IBirthDate)
if birthdate_data.icalendar_event is not None:
yield dict(title=_('iCalendar export of birth date (.ics file)'),
url=self.url(self.context, 'iCalendar'))
def back_url(self):
return self.url(self.context)
| 2.3125 | 2 |
src/privas/bases.py | sunoru/splatoon-privates | 1 | 12794379 | import os
PKG_PATH = os.path.abspath(os.path.dirname(__file__))
class PrivaError(Exception):
def __init__(self, code, message):
super().__init__(code, message)
def __str__(self):
return ': '.join(map(str, self.args))
class BasePriva:
"""Base class for Priva (private battles)."""
@classmethod
def rules(cls, language='en'):
"""Return descriptions of the rules."""
return 'No rules for %s' % cls.__name__
def start(self, *args, **kwargs):
"""Start the Priva."""
raise PrivaError(-1, 'Priva unimplemented.')
def end(self, *args, **kwargs):
"""End the Priva."""
raise PrivaError(-1, 'Priva unimplemented.')
def start_battle(self, *args, **kwargs):
"""Start a battle."""
raise PrivaError(-1, 'Priva unimplemented.')
def end_battle(self, *args, **kwargs):
"""End a battle."""
raise PrivaError(-1, 'Priva unimplemented.')
| 3.03125 | 3 |
P2_ALF.py | manojkumar1053/P2_ALF | 0 | 12794380 | <reponame>manojkumar1053/P2_ALF<gh_stars>0
import glob
import os
import shutil
import time
import cv2
import matplotlib.pyplot as plt
import numpy as np
from moviepy.editor import VideoFileClip
########################################################################################################################
INPUT_DIR = "camera_cal/"
OUTPUT_DIR = "output_images/"
DIR_TEST_IMG = "test_images/"
TEST_IMAGES = glob.glob(DIR_TEST_IMG + "test*.jpg")
# Verify the existing directory and saving the images
# Calibration of images
########################################################################################################################
# Function to clean the output image directory
def clean_output_dir():
for file in os.listdir(OUTPUT_DIR):
file_path = os.path.join(OUTPUT_DIR, file)
# print(file_path)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Failed to delete %s. Reason: %s' % (file_path, e))
clean_output_dir()
print("Output Image Directory Cleaned Successfully ! ... ")
########################################################################################################################
# Define List and Variables for corners and points
nx = 9
ny = 6
objp = np.zeros((ny * nx, 3), np.float32)
objp[:, :2] = np.mgrid[0:nx, 0:ny].T.reshape(-1, 2)
objpoints = [] # 3d points in real world space (x,y,z) co-ordinates
imgpoints = [] # 2d points in image plane.
img_err = [] # Images which were failed to open will be stored here
########################################################################################################################
def calibrate_camera(input_dir, output_dir, nx, ny):
# Clean the output directory
for id, name in enumerate(os.listdir(input_dir)):
img = cv2.imread(input_dir + name)
# print(id, name)
# Convert to gray scale
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# cornerSubPix use the criteria function to fine tune the images
# https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_calib3d/py_calibration/py_calibration.html
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
ret, corners = cv2.findChessboardCorners(gray, (nx, ny), None)
# Verify if the corners were returned
if ret == True:
objpoints.append(objp)
corners = cv2.cornerSubPix(gray, corners, (11, 11), (-1, -1), criteria)
imgpoints.append(corners)
# Draw the corners
cv2.drawChessboardCorners(img, (nx, ny), corners, ret)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(cv2.imread(INPUT_DIR + "/" + name), cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + name, fontsize=18)
ax2.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax2.set_title("Corners:: " + name, fontsize=18)
f.savefig(output_dir + "output_calibrate_camera_" + str(time.time()) + ".jpg")
else: # saving the failed to open images in the list
img_err.append(name)
calibrate_camera(INPUT_DIR, OUTPUT_DIR, nx, ny)
print("Camera Calibration Completed! ... ")
########################################################################################################################
# Distortion correction passing img,object and image points and returning the undisorted images points
def undistort(img_name, objpoints, imgpoints):
img = cv2.imread(img_name)
# print("UND_TEST", img)
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, img.shape[1:], None, None)
undist = cv2.undistort(img, mtx, dist, None, mtx)
return undist
########################################################################################################################
# Undisort Test Single_Images and save to directory
undist = undistort(INPUT_DIR + "calibration10.jpg", objpoints, imgpoints)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(cv2.imread(INPUT_DIR + "/calibration10.jpg"), cv2.COLOR_BGR2RGB))
ax1.set_title("Original_Image:: calibration10.jpg", fontsize=18)
ax2.imshow(cv2.cvtColor(undist, cv2.COLOR_BGR2RGB))
ax2.set_title("Undistorted_Image:: calibration10.jpg", fontsize=18)
f.savefig(OUTPUT_DIR + "output_undistort_single_test_file_" + str(time.time()) + ".jpg")
########################################################################################################################
# Undisort all the images in the test folder
########################################################################################################################
def undisort_images(img, objpoints, imgpoints):
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, img.shape[1:], None, None)
undist = cv2.undistort(img, mtx, dist, None, mtx)
return undist
for image in TEST_IMAGES:
undist = undistort(image, objpoints, imgpoints)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(cv2.imread(image), cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(cv2.cvtColor(undist, cv2.COLOR_BGR2RGB))
ax2.set_title("Undistorted:: " + image, fontsize=18)
f.savefig(OUTPUT_DIR + "output_undistort_" + str((time.time())) + ".jpg")
########################################################################################################################
# Prespective Transformation
def transform_image(img, offset=250, src=None, dst=None, lane_width=9):
image_dimension = (img.shape[1], img.shape[0])
# Copy the Image
out_img_orig = np.copy(img)
# Define the area
leftupper = (585, 460)
rightupper = (705, 460)
leftlower = (210, img.shape[0])
rightlower = (1080, img.shape[0])
warped_leftupper = (offset, 0)
warped_rightupper = (offset, img.shape[0])
warped_leftlower = (img.shape[1] - offset, 0)
warped_rightlower = (img.shape[1] - offset, img.shape[0])
# define the color to be drawn on the border of the image
color_red = [0, 0, 255]
color_cyan = [255, 255, 0]
lane_width = 9
if src is not None:
src = src
src = np.float32([leftupper, leftlower, rightupper, rightlower])
if dst is not None:
dst = dst
dst = np.float32([warped_leftupper, warped_rightupper, warped_leftlower, warped_rightlower])
cv2.line(out_img_orig, leftlower, leftupper, color_red, lane_width)
cv2.line(out_img_orig, leftlower, rightlower, color_red, lane_width * 2)
cv2.line(out_img_orig, rightupper, rightlower, color_red, lane_width)
cv2.line(out_img_orig, rightupper, leftupper, color_cyan, lane_width)
# calculate the perspective transform matrix
M = cv2.getPerspectiveTransform(src, dst)
minv = cv2.getPerspectiveTransform(dst, src)
# Warp the image
warped = cv2.warpPerspective(img, M, image_dimension, flags=cv2.WARP_FILL_OUTLIERS + cv2.INTER_CUBIC)
# (warped)
out_warped_img = np.copy(warped)
cv2.line(out_warped_img, warped_rightupper, warped_leftupper, color_red, lane_width)
cv2.line(out_warped_img, warped_rightupper, warped_rightlower, color_red, lane_width * 2)
cv2.line(out_warped_img, warped_leftlower, warped_rightlower, color_red, lane_width)
cv2.line(out_warped_img, warped_leftlower, warped_leftupper, color_cyan, lane_width)
return warped, M, minv, out_img_orig, out_warped_img
########################################################################################################################
# Run the function
for image in TEST_IMAGES:
img = cv2.imread(image)
warped, M, minv, out_img_orig, out_warped_img = transform_image(img) # Calling the transform Image Output
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(out_img_orig, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(cv2.cvtColor(out_warped_img, cv2.COLOR_BGR2RGB))
ax2.set_title("Warped:: " + image, fontsize=18)
f.savefig(OUTPUT_DIR + "output_transform_image_" + str(time.time()) + ".jpg")
########################################################################################################################
# Gradient and Color Transformation
# Applying Sobel Operator and returning binary image in the return
def abs_sobel_thresh(img, sobel_kernel=3, mag_thresh=(0, 255), return_grad=False,
direction='x'): # Default Direction is X
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Applying Sobel operator in X Direction
if direction.lower() == 'x':
grad = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=sobel_kernel) # Take the derivative in x
# Sobel in Y direction
else:
grad = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=sobel_kernel) # Take the derivative in y
if return_grad == True:
return grad
abs_sobel = np.absolute(
grad) # Absolute x/y based on the input in the function derivative to accentuate lines away from horizontal
scaled_sobel = np.uint8(255 * abs_sobel / np.max(abs_sobel))
# Applying the threshold in the image and returning the output
grad_binary = np.zeros_like(scaled_sobel)
grad_binary[(scaled_sobel >= mag_thresh[0]) & (scaled_sobel < mag_thresh[1])] = 1
return grad_binary
########################################################################################################################
# Running the function on the first Undisorted Image and getting the binary Image in the X Direction
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = abs_sobel_thresh(img, sobel_kernel=3, mag_thresh=(30, 100), direction='x')
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
# plt.show()
ax2.set_title("Transformed:: " + image, fontsize=18)
# plt.show()
# Running the function on the first Undisorted Image and getting the binary Image in the X Direction
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = abs_sobel_thresh(img, sobel_kernel=3, mag_thresh=(30, 120), direction='y')
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
############################################################################################################################
# Calulating the magnitude of the gradient threshold
def mag_threshold(img, sobel_kernel=3, mag_thresh=(0, 255)):
xgrad = abs_sobel_thresh(img, sobel_kernel=sobel_kernel, mag_thresh=mag_thresh, return_grad=True) # X Direction
ygrad = abs_sobel_thresh(img, sobel_kernel=sobel_kernel, mag_thresh=mag_thresh, return_grad=True,
direction='y') # In Y direction
magnitude = np.sqrt(np.square(xgrad) + np.square(ygrad)) # Calculating the magnitude of Gradient
abs_magnitude = np.absolute(magnitude)
scaled_magnitude = np.uint8(255 * abs_magnitude / np.max(abs_magnitude))
mag_binary = np.zeros_like(scaled_magnitude)
mag_binary[(scaled_magnitude >= mag_thresh[0]) & (scaled_magnitude < mag_thresh[1])] = 1
return mag_binary
# Run the function
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = mag_threshold(img, mag_thresh=(30, 100)) # Get Binary Image
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary,
offset=300) # Transform the Image
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
########################################################################################################################
# Calculating the Direction of the Gradient
def dir_threshold(img, sobel_kernel=3, thresh=(0, np.pi / 2)):
# Grayscale Images conversion
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Calculating the absolute value X and Y gradients
# xgrad = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=sobel_kernel)
# ygrad = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=sobel_kernel)
xabs = np.absolute(cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=sobel_kernel))
yabs = np.absolute(cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=sobel_kernel))
grad_dir = np.arctan2(yabs, xabs)
# Applying the threshold and creating the binary image
binary_output = np.zeros_like(grad_dir).astype(np.uint8)
binary_output[(grad_dir >= thresh[0]) & (grad_dir < thresh[1])] = 1
return binary_output
########################################################################################################################
# Calulating the RGB Threshold and returning the binary Image
def calulate_RGB_threshold_image(img, channel='R', thresh=(0, 255)):
img1 = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
if channel == 'R':
bin_img = img1[:, :, 0]
if channel == 'G':
bin_img = img1[:, :, 1]
if channel == 'B':
bin_img = img1[:, :, 2]
binary_img = np.zeros_like(bin_img).astype(np.uint8)
binary_img[(bin_img >= thresh[0]) & (bin_img < thresh[1])] = 1
return binary_img
########################################################################################################################
# Calculate the RGB Threshold on the First Binary Image for R Channel
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = calulate_RGB_threshold_image(img, thresh=(230, 255))
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
# ***************************************************************************************************************
# Calculate the RGB Threshold on the First Binary Image for G Channel
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = calulate_RGB_threshold_image(img, thresh=(200, 255), channel='G')
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
# ***************************************************************************************************************
# Calculate the RGB Threshold on the First Binary Image for B Channel
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = calulate_RGB_threshold_image(img, thresh=(185, 255), channel='B')
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
########################################################################################################################
##################### HLS Color Threshold #################################################
# Calulate the threshold the h-channel of HLS
def get_hls_hthresh_img(img, thresh=(0, 255)):
hls_img = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)
h_channel = hls_img[:, :, 0]
# Matching the condition for the criteria
binary_output = np.zeros_like(h_channel).astype(np.uint8)
binary_output[(h_channel >= thresh[0]) & (h_channel < thresh[1])] = 1
# return the image output
return binary_output
########################################################################################################################
# Calulate the threshold the l-channel of HLS
def get_hls_lthresh_img(img, thresh=(0, 255)):
hls_img = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)
l_channel = hls_img[:, :, 1]
# Matching the condition for the criteria
binary_output = np.zeros_like(l_channel).astype(np.uint8)
binary_output[(l_channel >= thresh[0]) & (l_channel < thresh[1])] = 1
# return the image output
return binary_output
########################################################################################################################
# Calculate the threshold the s-channel of HLS
def get_hls_sthresh_img(img, thresh=(0, 255)):
hls_img = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)
s_channel = hls_img[:, :, 2]
# Matching the condition for the criteria
binary_output = np.zeros_like(s_channel).astype(np.uint8)
binary_output[(s_channel >= thresh[0]) & (s_channel < thresh[1])] = 1
# return the image output
return binary_output
# Run the function on the Test Image
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
# combined_binary = get_hls_hthresh_img(img, thresh=(201, 255))
combined_binary = get_hls_lthresh_img(img, thresh=(201, 255))
# combined_binary = get_hls_sthresh_img(img, thresh=(201, 255))
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
########################################################################################################################
# Run the function
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = get_hls_sthresh_img(img, thresh=(150, 255))
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
########################################################################################################################
# The Lab color space is quite different from the RGB color space. In RGB color
# space the color information is separated into three channels but the same
# three channels also encode brightness information
# On the other hand, in Lab color space, the L channel is independent of color information and encodes brightness only.
# The other two channels encode color.
# https://www.learnopencv.com/color-spaces-in-opencv-cpp-python/
########################################################################################################################
# calculate b channel in LAB color Space
def get_lab_bthresh_img(img, thresh=(0, 255)):
lab_img = cv2.cvtColor(img, cv2.COLOR_BGR2LAB)
b_channel = lab_img[:, :, 2]
# Matching the condition for the criteria
bin_op = np.zeros_like(b_channel).astype(np.uint8)
bin_op[(b_channel >= thresh[0]) & (b_channel < thresh[1])] = 1
return bin_op
# Test the Image
img = undistort(TEST_IMAGES[0], objpoints, imgpoints)
combined_binary = get_lab_bthresh_img(img, thresh=(147, 255))
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary, offset=300)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
########################################################################################################################
def get_binary_image(img, kernel_size=3, sobel_dirn='X', sobel_thresh=(0, 255), r_thresh=(0, 255),
s_thresh=(0, 255), b_thresh=(0, 255), g_thresh=(0, 255)):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
if sobel_dirn == 'X':
sobel = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=kernel_size)
else:
sobel = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=kernel_size)
abs_sobel = np.absolute(sobel)
scaled_sobel = np.uint8(255 * abs_sobel / np.max(abs_sobel))
sbinary = np.zeros_like(scaled_sobel)
sbinary[(scaled_sobel >= sobel_thresh[0]) & (scaled_sobel <= sobel_thresh[1])] = 1
combined = np.zeros_like(sbinary)
combined[(sbinary == 1)] = 1
# Threshold R color channel
r_binary = calulate_RGB_threshold_image(img, thresh=r_thresh)
# Threshold G color channel
g_binary = calulate_RGB_threshold_image(img, thresh=g_thresh, channel='G')
# Threshold B in LAB
b_binary = get_lab_bthresh_img(img, thresh=b_thresh)
# Threshold color channel
s_binary = get_hls_sthresh_img(img, thresh=s_thresh)
# If two of the three are activated, activate in the binary image
combined_binary = np.zeros_like(combined)
combined_binary[(r_binary == 1) | (combined == 1) | (s_binary == 1) | (b_binary == 1) | (g_binary == 1)] = 1
# combined_binary[(r_binary == 1) | (combined == 1) | (s_binary == 1) | (g_binary == 1)] = 1
# Return the output Binary Image which matches the above criteria
return combined_binary
########################################################################################################################
# Testing the threshing
# Defining threshold values for getting the binary images
kernel_size = 5 # Kernel Size
mag_thresh = (30, 100) # Magnitude of the threshold
r_thresh = (230, 255) # Threshold value of R in RGB Color space
s_thresh = (165, 255) # Threshold value of S in HLS Color space
b_thresh = (160, 255) # Threshold value of B in LAB Color space
g_thresh = (210, 255) # Threshold value of G in RGB Color space
for image_name in TEST_IMAGES:
img = undistort(image_name, objpoints, imgpoints)
combined_binary = get_binary_image(img, kernel_size=kernel_size, sobel_thresh=mag_thresh, r_thresh=r_thresh,
s_thresh=s_thresh, b_thresh=b_thresh, g_thresh=g_thresh)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(combined_binary, cmap='gray')
ax2.set_title("Threshold Binary:: " + image, fontsize=18)
# Saving the output in the output directory
f.savefig(OUTPUT_DIR + "output_get_bin_images_" + str(time.time()) + ".jpg")
########################################################################################################################
for image in TEST_IMAGES:
img = cv2.imread(image)
warped, M, minv, out_img_orig, out_warped_img = transform_image(img)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(out_img_orig, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(cv2.cvtColor(out_warped_img, cv2.COLOR_BGR2RGB))
ax2.set_title("Warped:: " + image, fontsize=18)
f.savefig(OUTPUT_DIR + "output_transform_image_" + str(time.time()) + ".jpg")
for image in TEST_IMAGES:
img = undistort(image, objpoints, imgpoints)
combined_binary = get_binary_image(img, kernel_size=kernel_size, sobel_thresh=mag_thresh,
r_thresh=r_thresh, s_thresh=s_thresh, b_thresh=b_thresh, g_thresh=g_thresh)
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(warped, cmap='gray')
ax2.set_title("Transformed:: " + image, fontsize=18)
f.savefig(OUTPUT_DIR + "output_get_binary_image_" + str(time.time()) + ".jpg")
########################################################################################################################
########################### Lane Line Detection and Ploynomial fitting ################################################
def find_lines(warped_img, nwindows=9, margin=80, minpix=40):
# Take a histogram of the bottom half of the image
histogram = np.sum(warped_img[warped_img.shape[0] // 2:, :], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((warped_img, warped_img, warped_img)) * 255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0] // 2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# Set height of windows - based on nwindows above and image shape
window_height = np.int(warped_img.shape[0] // nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = warped_img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
leftx_current = leftx_base
rightx_current = rightx_base
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = warped_img.shape[0] - (window + 1) * window_height
win_y_high = warped_img.shape[0] - window * window_height
### Find the four below boundaries of the window ###
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img, (win_xleft_low, win_y_low), (win_xleft_high, win_y_high), (0, 255, 0), 2)
cv2.rectangle(out_img, (win_xright_low, win_y_low), (win_xright_high, win_y_high), (0, 255, 0), 2)
### Identify the nonzero pixels in x and y within the window ###
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & \
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & \
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
### If you found > minpix pixels, recenter next window ###
### (`right` or `leftx_current`) on their mean position ###
if len(good_left_inds) > minpix:
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds) > minpix:
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Concatenate the arrays of indices (previously was a list of lists of pixels)
try:
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
except ValueError:
# Avoids an error if the above is not implemented fully
pass
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
return leftx, lefty, rightx, righty, left_lane_inds, right_lane_inds, out_img
########################################################################################################################
# Fitting a polynomial for a window size, margin, ad minimum number of pixel in the window area
def fit_polynomial(binary_warped, nwindows=9, margin=100, minpix=50, show=True):
# Find our lane pixels first
leftx, lefty, rightx, righty, left_lane_inds, right_lane_inds, out_img \
= find_lines(binary_warped, nwindows=nwindows, margin=margin, minpix=minpix)
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
# Generate x and y values for plotting
ploty = np.linspace(0, binary_warped.shape[0] - 1, binary_warped.shape[0])
try:
left_fitx = left_fit[0] * ploty ** 2 + left_fit[1] * ploty + left_fit[2]
right_fitx = right_fit[0] * ploty ** 2 + right_fit[1] * ploty + right_fit[2]
except TypeError:
# defining a value if left and right are incorrect and values are None
print("Unable to plot printing the default values")
left_fitx = 1 * ploty ** 2 + 1 * ploty
right_fitx = 1 * ploty ** 2 + 1 * ploty
# Colors in the left and right lane regions
out_img[lefty, leftx] = [255, 0, 0] # Filling RED Color on the lane region
out_img[righty, rightx] = [0, 0, 255] # Filling the BLUE Color in the Lane Region
# Plots the left and right polynomials on the lane lines
if show == True:
plt.plot(left_fitx, ploty, color='yellow')
plt.plot(right_fitx, ploty, color='yellow')
# Returning the values required for the polynimal fir
return left_fit, right_fit, left_fitx, right_fitx, left_lane_inds, right_lane_inds, out_img
########################################################################################################################
# Searching the values around the polynmonial for the
def search_around_poly(binary_warped, left_fit, right_fit, ymtr_per_pixel, xmtr_per_pixel, margin=80):
# Getting the activated pixel
nonzero = binary_warped.nonzero() # Non Zero tuples indices for X and y
nonzeroy = np.array(nonzero[0]) # Non Zero Indices for X
nonzerox = np.array(nonzero[1]) # Non Zero Indices for Y
left_lane_inds = ((nonzerox > (left_fit[0] * (nonzeroy ** 2) + left_fit[1] * nonzeroy +
left_fit[2] - margin)) & (nonzerox < (left_fit[0] * (nonzeroy ** 2) +
left_fit[1] * nonzeroy + left_fit[
2] + margin)))
right_lane_inds = ((nonzerox > (right_fit[0] * (nonzeroy ** 2) + right_fit[1] * nonzeroy +
right_fit[2] - margin)) & (nonzerox < (right_fit[0] * (nonzeroy ** 2) +
right_fit[1] * nonzeroy + right_fit[
2] + margin)))
# Generating the lef and right lane pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fitting a 2nd order polynomial for each pixel
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
# Fit second order polynomial to for for points on real world
left_lane_indices = np.polyfit(lefty * ymtr_per_pixel, leftx * xmtr_per_pixel, 2)
right_lane_indices = np.polyfit(righty * ymtr_per_pixel, rightx * xmtr_per_pixel, 2)
return left_fit, right_fit, left_lane_indices, right_lane_indices
# -----------------------------------------------------------------------------------------------------------------------
left_fit, right_fit, left_fitx, right_fitx, left_lane_indices, right_lane_indices, out_img = fit_polynomial(warped,
nwindows=11)
plt.imshow(out_img)
plt.savefig(OUTPUT_DIR + "output_fit_polynomial_" + str(time.time()) + ".jpg")
########################################################################################################################
# Measuring Radius of the curvature
def radius_curvature(img, left_fit, right_fit, xmtr_per_pixel, ymtr_per_pixel):
# Generating the data to represent lane-line pixels
ploty = np.linspace(0, img.shape[0] - 1, img.shape[0])
left_fitx = left_fit[0] * ploty ** 2 + left_fit[1] * ploty + left_fit[2]
right_fitx = right_fit[0] * ploty ** 2 + right_fit[1] * ploty + right_fit[2]
y_eval = np.max(ploty)
left_fit_cr = np.polyfit(ploty * ymtr_per_pixel, left_fitx * xmtr_per_pixel, 2)
right_fit_cr = np.polyfit(ploty * ymtr_per_pixel, right_fitx * xmtr_per_pixel, 2)
# getting left and right curvature
left_curverad = ((1 + (
2 * left_fit_cr[0] * y_eval * ymtr_per_pixel + left_fit_cr[1]) ** 2) ** 1.5) / np.absolute(
2 * left_fit_cr[0])
# print(left_curverad)
right_curverad = ((1 + (
2 * right_fit_cr[0] * y_eval * ymtr_per_pixel + right_fit_cr[1]) ** 2) ** 1.5) / np.absolute(
2 * right_fit_cr[0])
# print(right_curverad)
# returning the left and right lane radius
return (left_curverad, right_curverad)
########################################################################################################################
# Finding distance from Center
def dist_from_center(img, left_fit, right_fit, xmtr_per_pixel, ymtr_per_pixel):
ymax = img.shape[0] * ymtr_per_pixel
center = img.shape[1] / 2
lineLeft = left_fit[0] * ymax ** 2 + left_fit[1] * ymax + left_fit[2]
lineRight = right_fit[0] * ymax ** 2 + right_fit[1] * ymax + right_fit[2]
# Finding the Mid Value and distance from the center
mid = lineLeft + (lineRight - lineLeft) / 2
dist = (mid - center) * xmtr_per_pixel
if dist >= 0.:
message = 'Vehicle location: {:.2f} m right'.format(dist)
else:
message = 'Vehicle location: {:.2f} m left'.format(abs(dist))
return message
########################################################################################################################
# Drawing lines in the region
def draw_lines(img, left_fit, right_fit, minv):
ploty = np.linspace(0, img.shape[0] - 1, img.shape[0])
color_warp = np.zeros_like(img).astype(np.uint8)
# Find left and right points.
left_fitx = left_fit[0] * ploty ** 2 + left_fit[1] * ploty + left_fit[2]
right_fitx = right_fit[0] * ploty ** 2 + right_fit[1] * ploty + right_fit[2]
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])
pts = np.hstack((pts_left, pts_right))
# Draw the Green value lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0, 255, 0))
# Warp the blank back to original image space using inverse perspective matrix
unwarp_img = cv2.warpPerspective(color_warp, minv, (img.shape[1], img.shape[0]),
flags=cv2.WARP_FILL_OUTLIERS + cv2.INTER_CUBIC)
return cv2.addWeighted(img, 1, unwarp_img, 0.3, 0)
########################################################################################################################
# Displaying the curvature in the output image
def show_curvatures(img, leftx, rightx, xmtr_per_pixel, ymtr_per_pixel):
(left_curvature, right_curvature) = radius_curvature(img, leftx, rightx, xmtr_per_pixel, ymtr_per_pixel)
dist_txt = dist_from_center(img, leftx, rightx, xmtr_per_pixel, ymtr_per_pixel)
out_img = np.copy(img)
avg_rad = round(np.mean([left_curvature, right_curvature]), 0)
cv2.putText(out_img, 'Average Lane Curvature: {:.2f} m'.format(avg_rad),
(50, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
cv2.putText(out_img, dist_txt, (50, 100), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
return out_img
for image in TEST_IMAGES:
img = undistort(image, objpoints, imgpoints)
combined_binary = get_binary_image(img, kernel_size=kernel_size, sobel_thresh=mag_thresh, r_thresh=r_thresh,
s_thresh=s_thresh, b_thresh=b_thresh, g_thresh=g_thresh)
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary)
xmtr_per_pixel = 3.7 / 800
ymtr_per_pixel = 30 / 720
left_fit, right_fit, left_fitx, right_fitx, left_lane_indices, right_lane_indices, out_img = fit_polynomial(warped,
nwindows=12,
show=False)
lane_img = draw_lines(img, left_fit, right_fit, minv)
out_img = show_curvatures(lane_img, left_fit, right_fit, xmtr_per_pixel, ymtr_per_pixel)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8))
f.tight_layout()
ax1.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
ax1.set_title("Original:: " + image, fontsize=18)
ax2.imshow(cv2.cvtColor(out_img, cv2.COLOR_BGR2RGB))
ax2.set_title("Lane:: " + image, fontsize=18)
f.savefig(OUTPUT_DIR + "output_curvature_" + str(time.time()) + ".jpg")
########################################################################################################################################
# Defining a class and initializing and storing the values
class Video_Pipeline():
def __init__(self, max_counter):
self.current_fit_left = None
self.best_fit_left = None
self.history_left = [np.array([False])]
self.current_fit_right = None
self.best_fit_right = None
self.history_right = [np.array([False])]
self.counter = 0
self.max_counter = 1
self.src = None
self.dst = None
def set_presp_indices(self, src, dst):
self.src = src
self.dst = dst
def reset(self):
self.current_fit_left = None
self.best_fit_left = None
self.history_left = [np.array([False])]
self.current_fit_right = None
self.best_fit_right = None
self.history_right = [np.array([False])]
self.counter = 0
def update_fit(self, left_fit, right_fit):
if self.counter > self.max_counter:
self.reset()
else:
self.current_fit_left = left_fit
self.current_fit_right = right_fit
self.history_left.append(left_fit)
self.history_right.append(right_fit)
self.history_left = self.history_left[-self.max_counter:] if len(
self.history_left) > self.max_counter else self.history_left
self.history_right = self.history_right[-self.max_counter:] if len(
self.history_right) > self.max_counter else self.history_right
self.best_fit_left = np.mean(self.history_left, axis=0)
self.best_fit_right = np.mean(self.history_right, axis=0)
def process_image(self, image):
img = undisort_images(image, objpoints, imgpoints)
combined_binary = get_binary_image(img, kernel_size=kernel_size, sobel_thresh=mag_thresh,
r_thresh=r_thresh, s_thresh=s_thresh, b_thresh=b_thresh, g_thresh=g_thresh)
if self.src is not None or self.dst is not None:
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary,
src=self.src,
dst=self.dst)
else:
warped, warp_matrix, unwarp_matrix, out_img_orig, out_warped_img = transform_image(combined_binary)
xmtr_per_pixel = 3.7 / 800
ymtr_per_pixel = 30 / 720
if self.best_fit_left is None and self.best_fit_right is None:
left_fit, right_fit, left_fitx, right_fitx, left_lane_indices, right_lane_indices, out_img = fit_polynomial(
warped, nwindows=15, show=False)
else:
left_fit, right_fit, left_lane_indices, right_lane_indices = search_around_poly(warped, self.best_fit_left,
self.best_fit_right,
xmtr_per_pixel,
ymtr_per_pixel)
self.counter += 1
lane_img = draw_lines(img, left_fit, right_fit, unwarp_matrix)
out_img = show_curvatures(lane_img, left_fit, right_fit, xmtr_per_pixel, ymtr_per_pixel)
self.update_fit(left_fit, right_fit)
return out_img
########################################################################################################################
######################### INPUT Files for the Video ################################
PRJCT_VIDEO = "project_video.mp4"
CHLNG_VIDEO = "challenge_video.mp4"
HARDC_VIDEO = "harder_challenge_video.mp4"
VIDEO_OPDIR = "output_video/"
########################################################################################################################
########################## Function to create Video output ###################################
def create_video_output(input_video_file, output_dir=VIDEO_OPDIR):
lane_lines = Video_Pipeline(max_counter=11) # calling the class and getting the output Image
leftupper = (585, 460)
rightupper = (705, 460)
leftlower = (210, img.shape[0])
rightlower = (1080, img.shape[0])
# Defining the area
warped_leftupper = (250, 0)
warped_rightupper = (250, img.shape[0])
warped_leftlower = (1050, 0)
warped_rightlower = (1050, img.shape[0])
# Defining the src and dst values for prespective transformation
src = np.float32([leftupper, leftlower, rightupper, rightlower])
dst = np.float32([warped_leftupper, warped_rightupper, warped_leftlower, warped_rightlower])
# Getting the video input frame and running the pipeline and saving to output directory
lane_lines.set_presp_indices(src, dst)
image_clip = VideoFileClip(input_video_file)
white_clip = image_clip.fl_image(lane_lines.process_image)
white_clip.write_videofile(output_dir + input_video_file, audio=False)
########################################################################################################################
# print("Output Video Generated Successfully ! ", create_video_output(PRJCT_VIDEO))
def chooose_input_video():
choice = input("Enter a Number to run the pipeline for the video! \n"
"1 Run pipeline on Project Video\n"
"2 Run pipeline on Challenge Video\n"
"3 Run pipeline on Hard Challenge Video\n"
"4 Exit \n")
if choice == "1":
print("Output Video Generated Successfully ! ", create_video_output(PRJCT_VIDEO))
elif choice == "2":
print("Output Video Generated Successfully ! ", create_video_output(CHLNG_VIDEO))
elif choice == "3":
print("Output Video Generated Successfully ! ", create_video_output(HARDC_VIDEO))
elif choice == 4:
print("Exiting the Code", exit(0))
else:
print("Exiting the Code", exit(0))
chooose_input_video()
########################################################################################################################
| 2.671875 | 3 |
TeamX/TeamXapp/migrations/0043_auto_20190712_1449.py | rootfinlay/SageTeamX | 0 | 12794381 | <filename>TeamX/TeamXapp/migrations/0043_auto_20190712_1449.py
# Generated by Django 2.2.3 on 2019-07-12 13:49
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('TeamXapp', '0042_auto_20190712_1444'),
]
operations = [
migrations.AlterModelOptions(
name='jobrolegroup',
options={'verbose_name': 'Job Role Group', 'verbose_name_plural': 'Job Role Groups'},
),
migrations.AlterField(
model_name='allmembers',
name='scrum_team_name',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='TeamXapp.ScrumTeam', verbose_name='Scrum team: '),
),
migrations.AlterField(
model_name='allmembers',
name='scrum_team_roles',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='TeamXapp.ScrumTeamRole', verbose_name='Scrum Team Roles: '),
),
]
| 1.523438 | 2 |
server/User/serialize.py | adamA113/servize | 0 | 12794382 | <gh_stars>0
from rest_framework import serializers
from User.models import User,ProviderUser
class ProviderUserSerializer(serializers.ModelSerializer):
providerName = serializers.CharField(source='providerId.provider.name', read_only=True)
# providerName = serializers.CharField(source='providerId.provider.id', read_only=True)
userName=serializers.CharField(source='userId.user.name', read_only=True)
class Meta:
model=ProviderUser
fields=('providerName','userName')
class UserSerializer(serializers.ModelSerializer):
# name = serializers.CharField(source="user.name",read_only=True)
email = serializers.CharField(source="user",read_only=True)
providers = ProviderUserSerializer(many=True, read_only=True)
name = serializers.CharField(source="user.name",read_only=True)
class Meta:
model=User
fields=('id','pk','image','role','user','email','name','providers','phone')
| 2.234375 | 2 |
nodeeditor/dev_Surface.py | madhusenthilvel/NodeEditor | 53 | 12794383 | # implemenation of the compute methods for category
import numpy as np
import random
import time
import os.path
from os import path
import matplotlib.pyplot as plt
import scipy.interpolate
from nodeeditor.say import *
import nodeeditor.store as store
import nodeeditor.pfwrap as pfwrap
print ("reloaded: "+ __file__)
from nodeeditor.cointools import *
def run_FreeCAD_FillEdge(self,produce=False, **kwargs):
return #+# muss mparametriz werden
wire=FreeCAD.ActiveDocument.BePlane.Shape.Wires[0]
#_=Part.makeFilledFace(Part.__sortEdges__([App.ActiveDocument.Shape004.Shape.Edge2, ]))
_=Part.makeFilledFace(wire.Edges)
Part.show(_)
| 2.046875 | 2 |
plugins/python/container/test/testdata.py | proglang/servercodetest | 0 | 12794384 | <reponame>proglang/servercodetest
user = """
#import os
def sort(lst):
for i in range(len(lst)):
for j in range(i+1, len(lst)):
if lst[i]>lst[j]:
tmp = lst[j]
lst[j] = lst[i]
lst[i] = tmp
return lst
def test_t1():
assert sort([])==[]
assert sort([1])==[1]
assert sort([1,1])==[1,1]
assert sort([1,2,3])==[1,2,3]
assert sort([4,2,3])==[2,3,4]
assert sort([]) == 1
"""
compare = """
set_function("sort")
def blackbox(lst):
lst = sorted(lst)
return lst
from hypothesis import given
import hypothesis.strategies as st
def sort(lst):
return blackbox(lst)
@test(1)
@given(st.lists(st.integers()))
def test_random(fn, lst):
assert sort(lst)==fn(lst)
assert False
@test(1)
def test_1(fn):
lst = [1,52,3]
assert sort(lst)==fn(lst)
@check_args(5, "blubber1")
def test_2(lst, *args, **kwargs):
print(3)
assert len(lst)==0
""" | 3.125 | 3 |
venv/lib/python3.6/site-packages/ansible_collections/community/mysql/plugins/module_utils/user.py | usegalaxy-no/usegalaxy | 1 | 12794385 | <reponame>usegalaxy-no/usegalaxy
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
import string
import re
from ansible.module_utils.six import iteritems
from ansible_collections.community.mysql.plugins.module_utils.mysql import (
mysql_driver,
)
EXTRA_PRIVS = ['ALL', 'ALL PRIVILEGES', 'GRANT', 'REQUIRESSL']
# This list is kept for backwards compatibility after release 2.3.0,
# see https://github.com/ansible-collections/community.mysql/issues/232 for details
VALID_PRIVS = [
'CREATE', 'DROP', 'GRANT', 'GRANT OPTION',
'LOCK TABLES', 'REFERENCES', 'EVENT', 'ALTER',
'DELETE', 'INDEX', 'INSERT', 'SELECT', 'UPDATE',
'CREATE TEMPORARY TABLES', 'TRIGGER', 'CREATE VIEW',
'SHOW VIEW', 'ALTER ROUTINE', 'CREATE ROUTINE',
'EXECUTE', 'FILE', 'CREATE TABLESPACE', 'CREATE USER',
'PROCESS', 'PROXY', 'RELOAD', 'REPLICATION CLIENT',
'REPLICATION SLAVE', 'SHOW DATABASES', 'SHUTDOWN',
'SUPER', 'ALL', 'ALL PRIVILEGES', 'USAGE',
'REQUIRESSL', # Deprecated, to be removed in version 3.0.0
'CREATE ROLE', 'DROP ROLE', 'APPLICATION_PASSWORD_ADMIN',
'AUDIT_ADMIN', 'BACKUP_ADMIN', 'BINLOG_ADMIN',
'BINLOG_ENCRYPTION_ADMIN', 'CLONE_ADMIN', 'CONNECTION_ADMIN',
'ENCRYPTION_KEY_ADMIN', 'FIREWALL_ADMIN', 'FIREWALL_USER',
'GROUP_REPLICATION_ADMIN', 'INNODB_REDO_LOG_ARCHIVE',
'NDB_STORED_USER', 'PERSIST_RO_VARIABLES_ADMIN',
'REPLICATION_APPLIER', 'REPLICATION_SLAVE_ADMIN',
'RESOURCE_GROUP_ADMIN', 'RESOURCE_GROUP_USER',
'ROLE_ADMIN', 'SESSION_VARIABLES_ADMIN', 'SET_USER_ID',
'SYSTEM_USER', 'SYSTEM_VARIABLES_ADMIN', 'SYSTEM_USER',
'TABLE_ENCRYPTION_ADMIN', 'VERSION_TOKEN_ADMIN',
'XA_RECOVER_ADMIN', 'LOAD FROM S3', 'SELECT INTO S3',
'INVOKE LAMBDA',
'ALTER ROUTINE',
'BINLOG ADMIN',
'BINLOG MONITOR',
'BINLOG REPLAY',
'CONNECTION ADMIN',
'READ_ONLY ADMIN',
'REPLICATION MASTER ADMIN',
'REPLICATION SLAVE ADMIN',
'SET USER',
'SHOW_ROUTINE',
'SLAVE MONITOR',
'REPLICA MONITOR',
]
class InvalidPrivsError(Exception):
pass
def get_mode(cursor):
cursor.execute('SELECT @@GLOBAL.sql_mode')
result = cursor.fetchone()
mode_str = result[0]
if 'ANSI' in mode_str:
mode = 'ANSI'
else:
mode = 'NOTANSI'
return mode
def user_exists(cursor, user, host, host_all):
if host_all:
cursor.execute("SELECT count(*) FROM mysql.user WHERE user = %s", (user,))
else:
cursor.execute("SELECT count(*) FROM mysql.user WHERE user = %s AND host = %s", (user, host))
count = cursor.fetchone()
return count[0] > 0
def sanitize_requires(tls_requires):
sanitized_requires = {}
if tls_requires:
for key in tls_requires.keys():
sanitized_requires[key.upper()] = tls_requires[key]
if any(key in ["CIPHER", "ISSUER", "SUBJECT"] for key in sanitized_requires.keys()):
sanitized_requires.pop("SSL", None)
sanitized_requires.pop("X509", None)
return sanitized_requires
if "X509" in sanitized_requires.keys():
sanitized_requires = "X509"
else:
sanitized_requires = "SSL"
return sanitized_requires
return None
def mogrify_requires(query, params, tls_requires):
if tls_requires:
if isinstance(tls_requires, dict):
k, v = zip(*tls_requires.items())
requires_query = " AND ".join(("%s %%s" % key for key in k))
params += v
else:
requires_query = tls_requires
query = " REQUIRE ".join((query, requires_query))
return query, params
def do_not_mogrify_requires(query, params, tls_requires):
return query, params
def get_tls_requires(cursor, user, host):
if user:
if not impl.use_old_user_mgmt(cursor):
query = "SHOW CREATE USER '%s'@'%s'" % (user, host)
else:
query = "SHOW GRANTS for '%s'@'%s'" % (user, host)
cursor.execute(query)
require_list = [tuple[0] for tuple in filter(lambda x: "REQUIRE" in x[0], cursor.fetchall())]
require_line = require_list[0] if require_list else ""
pattern = r"(?<=\bREQUIRE\b)(.*?)(?=(?:\bPASSWORD\b|$))"
requires_match = re.search(pattern, require_line)
requires = requires_match.group().strip() if requires_match else ""
if any((requires.startswith(req) for req in ('SSL', 'X509', 'NONE'))):
requires = requires.split()[0]
if requires == 'NONE':
requires = None
else:
import shlex
items = iter(shlex.split(requires))
requires = dict(zip(items, items))
return requires or None
def get_valid_privs(cursor):
cursor.execute("SHOW PRIVILEGES")
show_privs = [priv[0].upper() for priv in cursor.fetchall()]
# See the comment above VALID_PRIVS declaration
all_privs = show_privs + EXTRA_PRIVS + VALID_PRIVS
return frozenset(all_privs)
def get_grants(cursor, user, host):
cursor.execute("SHOW GRANTS FOR %s@%s", (user, host))
grants_line = list(filter(lambda x: "ON *.*" in x[0], cursor.fetchall()))[0]
pattern = r"(?<=\bGRANT\b)(.*?)(?=(?:\bON\b))"
grants = re.search(pattern, grants_line[0]).group().strip()
return grants.split(", ")
def user_add(cursor, user, host, host_all, password, encrypted,
plugin, plugin_hash_string, plugin_auth_string, new_priv,
tls_requires, check_mode):
# we cannot create users without a proper hostname
if host_all:
return False
if check_mode:
return True
# Determine what user management method server uses
old_user_mgmt = impl.use_old_user_mgmt(cursor)
mogrify = do_not_mogrify_requires if old_user_mgmt else mogrify_requires
if password and encrypted:
if impl.supports_identified_by_password(cursor):
query_with_args = "CREATE USER %s@%s IDENTIFIED BY PASSWORD %s", (user, host, password)
else:
query_with_args = "CREATE USER %s@%s IDENTIFIED WITH mysql_native_password AS %s", (user, host, password)
elif password and not encrypted:
if old_user_mgmt:
query_with_args = "CREATE USER %s@%s IDENTIFIED BY %s", (user, host, password)
else:
cursor.execute("SELECT CONCAT('*', UCASE(SHA1(UNHEX(SHA1(%s)))))", (password,))
encrypted_password = cursor.fetchone()[0]
query_with_args = "CREATE USER %s@%s IDENTIFIED WITH mysql_native_password AS %s", (user, host, encrypted_password)
elif plugin and plugin_hash_string:
query_with_args = "CREATE USER %s@%s IDENTIFIED WITH %s AS %s", (user, host, plugin, plugin_hash_string)
elif plugin and plugin_auth_string:
query_with_args = "CREATE USER %s@%s IDENTIFIED WITH %s BY %s", (user, host, plugin, plugin_auth_string)
elif plugin:
query_with_args = "CREATE USER %s@%s IDENTIFIED WITH %s", (user, host, plugin)
else:
query_with_args = "CREATE USER %s@%s", (user, host)
query_with_args_and_tls_requires = query_with_args + (tls_requires,)
cursor.execute(*mogrify(*query_with_args_and_tls_requires))
if new_priv is not None:
for db_table, priv in iteritems(new_priv):
privileges_grant(cursor, user, host, db_table, priv, tls_requires)
if tls_requires is not None:
privileges_grant(cursor, user, host, "*.*", get_grants(cursor, user, host), tls_requires)
return True
def is_hash(password):
ishash = False
if len(password) == 41 and password[0] == '*':
if frozenset(password[1:]).issubset(string.hexdigits):
ishash = True
return ishash
def user_mod(cursor, user, host, host_all, password, encrypted,
plugin, plugin_hash_string, plugin_auth_string, new_priv,
append_privs, tls_requires, module, role=False, maria_role=False):
changed = False
msg = "User unchanged"
grant_option = False
# Determine what user management method server uses
old_user_mgmt = impl.use_old_user_mgmt(cursor)
if host_all and not role:
hostnames = user_get_hostnames(cursor, user)
else:
hostnames = [host]
for host in hostnames:
# Handle clear text and hashed passwords.
if not role:
if bool(password):
# Get a list of valid columns in mysql.user table to check if Password and/or authentication_string exist
cursor.execute("""
SELECT COLUMN_NAME FROM information_schema.COLUMNS
WHERE TABLE_SCHEMA = 'mysql' AND TABLE_NAME = 'user' AND COLUMN_NAME IN ('Password', 'authentication_string')
ORDER BY COLUMN_NAME DESC LIMIT 1
""")
colA = cursor.fetchone()
cursor.execute("""
SELECT COLUMN_NAME FROM information_schema.COLUMNS
WHERE TABLE_SCHEMA = 'mysql' AND TABLE_NAME = 'user' AND COLUMN_NAME IN ('Password', 'authentication_string')
ORDER BY COLUMN_NAME ASC LIMIT 1
""")
colB = cursor.fetchone()
# Select hash from either Password or authentication_string, depending which one exists and/or is filled
cursor.execute("""
SELECT COALESCE(
CASE WHEN %s = '' THEN NULL ELSE %s END,
CASE WHEN %s = '' THEN NULL ELSE %s END
)
FROM mysql.user WHERE user = %%s AND host = %%s
""" % (colA[0], colA[0], colB[0], colB[0]), (user, host))
current_pass_hash = cursor.fetchone()[0]
if isinstance(current_pass_hash, bytes):
current_pass_hash = current_pass_hash.decode('ascii')
if encrypted:
encrypted_password = password
if not is_hash(encrypted_password):
module.fail_json(msg="encrypted was specified however it does not appear to be a valid hash expecting: *SHA1(SHA1(your_password))")
else:
if old_user_mgmt:
cursor.execute("SELECT PASSWORD(%s)", (password,))
else:
cursor.execute("SELECT CONCAT('*', UCASE(SHA1(UNHEX(SHA1(%s)))))", (password,))
encrypted_password = cursor.fetchone()[0]
if current_pass_hash != encrypted_password:
msg = "Password updated"
if module.check_mode:
return (True, msg)
if old_user_mgmt:
cursor.execute("SET PASSWORD FOR %s@%s = %s", (user, host, encrypted_password))
msg = "Password updated (old style)"
else:
try:
cursor.execute("ALTER USER %s@%s IDENTIFIED WITH mysql_native_password AS %s", (user, host, encrypted_password))
msg = "Password updated (new style)"
except (mysql_driver.Error) as e:
# https://stackoverflow.com/questions/51600000/authentication-string-of-root-user-on-mysql
# Replacing empty root password with new authentication mechanisms fails with error 1396
if e.args[0] == 1396:
cursor.execute(
"UPDATE mysql.user SET plugin = %s, authentication_string = %s, Password = '' WHERE User = %s AND Host = %s",
('mysql_native_password', encrypted_password, user, host)
)
cursor.execute("FLUSH PRIVILEGES")
msg = "Password forced update"
else:
raise e
changed = True
# Handle plugin authentication
if plugin and not role:
cursor.execute("SELECT plugin, authentication_string FROM mysql.user "
"WHERE user = %s AND host = %s", (user, host))
current_plugin = cursor.fetchone()
update = False
if current_plugin[0] != plugin:
update = True
if plugin_hash_string and current_plugin[1] != plugin_hash_string:
update = True
if plugin_auth_string and current_plugin[1] != plugin_auth_string:
# this case can cause more updates than expected,
# as plugin can hash auth_string in any way it wants
# and there's no way to figure it out for
# a check, so I prefer to update more often than never
update = True
if update:
if plugin_hash_string:
query_with_args = "ALTER USER %s@%s IDENTIFIED WITH %s AS %s", (user, host, plugin, plugin_hash_string)
elif plugin_auth_string:
query_with_args = "ALTER USER %s@%s IDENTIFIED WITH %s BY %s", (user, host, plugin, plugin_auth_string)
else:
query_with_args = "ALTER USER %s@%s IDENTIFIED WITH %s", (user, host, plugin)
cursor.execute(*query_with_args)
changed = True
# Handle privileges
if new_priv is not None:
curr_priv = privileges_get(cursor, user, host, maria_role)
# If the user has privileges on a db.table that doesn't appear at all in
# the new specification, then revoke all privileges on it.
for db_table, priv in iteritems(curr_priv):
# If the user has the GRANT OPTION on a db.table, revoke it first.
if "GRANT" in priv:
grant_option = True
if db_table not in new_priv:
if user != "root" and "PROXY" not in priv and not append_privs:
msg = "Privileges updated"
if module.check_mode:
return (True, msg)
privileges_revoke(cursor, user, host, db_table, priv, grant_option, maria_role)
changed = True
# If the user doesn't currently have any privileges on a db.table, then
# we can perform a straight grant operation.
for db_table, priv in iteritems(new_priv):
if db_table not in curr_priv:
msg = "New privileges granted"
if module.check_mode:
return (True, msg)
privileges_grant(cursor, user, host, db_table, priv, tls_requires, maria_role)
changed = True
# If the db.table specification exists in both the user's current privileges
# and in the new privileges, then we need to see if there's a difference.
db_table_intersect = set(new_priv.keys()) & set(curr_priv.keys())
for db_table in db_table_intersect:
# If appending privileges, only the set difference between new privileges and current privileges matter.
# The symmetric difference isn't relevant for append because existing privileges will not be revoked.
if append_privs:
priv_diff = set(new_priv[db_table]) - set(curr_priv[db_table])
else:
priv_diff = set(new_priv[db_table]) ^ set(curr_priv[db_table])
if len(priv_diff) > 0:
msg = "Privileges updated"
if module.check_mode:
return (True, msg)
if not append_privs:
privileges_revoke(cursor, user, host, db_table, curr_priv[db_table], grant_option, maria_role)
privileges_grant(cursor, user, host, db_table, new_priv[db_table], tls_requires, maria_role)
changed = True
if role:
continue
# Handle TLS requirements
current_requires = get_tls_requires(cursor, user, host)
if current_requires != tls_requires:
msg = "TLS requires updated"
if module.check_mode:
return (True, msg)
if not old_user_mgmt:
pre_query = "ALTER USER"
else:
pre_query = "GRANT %s ON *.* TO" % ",".join(get_grants(cursor, user, host))
if tls_requires is not None:
query = " ".join((pre_query, "%s@%s"))
query_with_args = mogrify_requires(query, (user, host), tls_requires)
else:
query = " ".join((pre_query, "%s@%s REQUIRE NONE"))
query_with_args = query, (user, host)
cursor.execute(*query_with_args)
changed = True
return (changed, msg)
def user_delete(cursor, user, host, host_all, check_mode):
if check_mode:
return True
if host_all:
hostnames = user_get_hostnames(cursor, user)
else:
hostnames = [host]
for hostname in hostnames:
cursor.execute("DROP USER %s@%s", (user, hostname))
return True
def user_get_hostnames(cursor, user):
cursor.execute("SELECT Host FROM mysql.user WHERE user = %s", (user,))
hostnames_raw = cursor.fetchall()
hostnames = []
for hostname_raw in hostnames_raw:
hostnames.append(hostname_raw[0])
return hostnames
def privileges_get(cursor, user, host, maria_role=False):
""" MySQL doesn't have a better method of getting privileges aside from the
SHOW GRANTS query syntax, which requires us to then parse the returned string.
Here's an example of the string that is returned from MySQL:
GRANT USAGE ON *.* TO 'user'@'localhost' IDENTIFIED BY 'pass';
This function makes the query and returns a dictionary containing the results.
The dictionary format is the same as that returned by privileges_unpack() below.
"""
output = {}
if not maria_role:
cursor.execute("SHOW GRANTS FOR %s@%s", (user, host))
else:
cursor.execute("SHOW GRANTS FOR %s", (user))
grants = cursor.fetchall()
def pick(x):
if x == 'ALL PRIVILEGES':
return 'ALL'
else:
return x
for grant in grants:
if not maria_role:
res = re.match("""GRANT (.+) ON (.+) TO (['`"]).*\\3@(['`"]).*\\4( IDENTIFIED BY PASSWORD (['`"]).+\\6)? ?(.*)""", grant[0])
else:
res = re.match("""GRANT (.+) ON (.+) TO (['`"]).*\\3""", grant[0])
if res is None:
raise InvalidPrivsError('unable to parse the MySQL grant string: %s' % grant[0])
privileges = res.group(1).split(",")
privileges = [pick(x.strip()) for x in privileges]
# Handle cases when there's privs like GRANT SELECT (colA, ...) in privs.
# To this point, the privileges list can look like
# ['SELECT (`A`', '`B`)', 'INSERT'] that is incorrect (SELECT statement is splitted).
# Columns should also be sorted to compare it with desired privileges later.
# Determine if there's a case similar to the above:
privileges = normalize_col_grants(privileges)
if not maria_role:
if "WITH GRANT OPTION" in res.group(7):
privileges.append('GRANT')
db = res.group(2)
output.setdefault(db, []).extend(privileges)
return output
def normalize_col_grants(privileges):
"""Fix and sort grants on columns in privileges list
Make ['SELECT (A, B)', 'INSERT (A, B)', 'DETELE']
from ['SELECT (A', 'B)', 'INSERT (B', 'A)', 'DELETE'].
See unit tests in tests/unit/plugins/modules/test_mysql_user.py
"""
for grant in ('SELECT', 'UPDATE', 'INSERT', 'REFERENCES'):
start, end = has_grant_on_col(privileges, grant)
# If not, either start and end will be None
if start is not None:
privileges = handle_grant_on_col(privileges, start, end)
return privileges
def has_grant_on_col(privileges, grant):
"""Check if there is a statement like SELECT (colA, colB)
in the privilege list.
Return (start index, end index).
"""
# Determine elements of privileges where
# columns are listed
start = None
end = None
for n, priv in enumerate(privileges):
if '%s (' % grant in priv:
# We found the start element
start = n
if start is not None and ')' in priv:
# We found the end element
end = n
break
if start is not None and end is not None:
# if the privileges list consist of, for example,
# ['SELECT (A', 'B), 'INSERT'], return indexes of related elements
return start, end
else:
# If start and end position is the same element,
# it means there's expression like 'SELECT (A)',
# so no need to handle it
return None, None
def handle_grant_on_col(privileges, start, end):
"""Handle cases when the privs like SELECT (colA, ...) is in the privileges list."""
# When the privileges list look like ['SELECT (colA,', 'colB)']
# (Notice that the statement is splitted)
if start != end:
output = list(privileges[:start])
select_on_col = ', '.join(privileges[start:end + 1])
select_on_col = sort_column_order(select_on_col)
output.append(select_on_col)
output.extend(privileges[end + 1:])
# When it look like it should be, e.g. ['SELECT (colA, colB)'],
# we need to be sure, the columns is sorted
else:
output = list(privileges)
output[start] = sort_column_order(output[start])
return output
def sort_column_order(statement):
"""Sort column order in grants like SELECT (colA, colB, ...).
MySQL changes columns order like below:
---------------------------------------
mysql> GRANT SELECT (testColA, testColB), INSERT ON `testDb`.`testTable` TO 'testUser'@'localhost';
Query OK, 0 rows affected (0.04 sec)
mysql> flush privileges;
Query OK, 0 rows affected (0.00 sec)
mysql> SHOW GRANTS FOR testUser@localhost;
+---------------------------------------------------------------------------------------------+
| Grants for testUser@localhost |
+---------------------------------------------------------------------------------------------+
| GRANT USAGE ON *.* TO 'testUser'@'localhost' |
| GRANT SELECT (testColB, testColA), INSERT ON `testDb`.`testTable` TO 'testUser'@'localhost' |
+---------------------------------------------------------------------------------------------+
We should sort columns in our statement, otherwise the module always will return
that the state has changed.
"""
# 1. Extract stuff inside ()
# 2. Split
# 3. Sort
# 4. Put between () and return
# "SELECT/UPDATE/.. (colA, colB) => "colA, colB"
tmp = statement.split('(')
priv_name = tmp[0]
columns = tmp[1].rstrip(')')
# "colA, colB" => ["colA", "colB"]
columns = columns.split(',')
for i, col in enumerate(columns):
col = col.strip()
columns[i] = col.strip('`')
columns.sort()
return '%s(%s)' % (priv_name, ', '.join(columns))
def privileges_unpack(priv, mode, valid_privs):
""" Take a privileges string, typically passed as a parameter, and unserialize
it into a dictionary, the same format as privileges_get() above. We have this
custom format to avoid using YAML/JSON strings inside YAML playbooks. Example
of a privileges string:
mydb.*:INSERT,UPDATE/anotherdb.*:SELECT/yetanother.*:ALL
The privilege USAGE stands for no privileges, so we add that in on *.* if it's
not specified in the string, as MySQL will always provide this by default.
"""
if mode == 'ANSI':
quote = '"'
else:
quote = '`'
output = {}
privs = []
for item in priv.strip().split('/'):
pieces = item.strip().rsplit(':', 1)
dbpriv = pieces[0].rsplit(".", 1)
# Check for FUNCTION or PROCEDURE object types
parts = dbpriv[0].split(" ", 1)
object_type = ''
if len(parts) > 1 and (parts[0] == 'FUNCTION' or parts[0] == 'PROCEDURE'):
object_type = parts[0] + ' '
dbpriv[0] = parts[1]
# Do not escape if privilege is for database or table, i.e.
# neither quote *. nor .*
for i, side in enumerate(dbpriv):
if side.strip('`') != '*':
dbpriv[i] = '%s%s%s' % (quote, side.strip('`'), quote)
pieces[0] = object_type + '.'.join(dbpriv)
if '(' in pieces[1]:
output[pieces[0]] = re.split(r',\s*(?=[^)]*(?:\(|$))', pieces[1].upper())
for i in output[pieces[0]]:
privs.append(re.sub(r'\s*\(.*\)', '', i))
else:
output[pieces[0]] = pieces[1].upper().split(',')
privs = output[pieces[0]]
# Handle cases when there's privs like GRANT SELECT (colA, ...) in privs.
output[pieces[0]] = normalize_col_grants(output[pieces[0]])
new_privs = frozenset(privs)
if not new_privs.issubset(valid_privs):
raise InvalidPrivsError('Invalid privileges specified: %s' % new_privs.difference(valid_privs))
if '*.*' not in output:
output['*.*'] = ['USAGE']
return output
def privileges_revoke(cursor, user, host, db_table, priv, grant_option, maria_role=False):
# Escape '%' since mysql db.execute() uses a format string
db_table = db_table.replace('%', '%%')
if grant_option:
query = ["REVOKE GRANT OPTION ON %s" % db_table]
if not maria_role:
query.append("FROM %s@%s")
else:
query.append("FROM %s")
query = ' '.join(query)
cursor.execute(query, (user, host))
priv_string = ",".join([p for p in priv if p not in ('GRANT', )])
query = ["REVOKE %s ON %s" % (priv_string, db_table)]
if not maria_role:
query.append("FROM %s@%s")
params = (user, host)
else:
query.append("FROM %s")
params = (user)
query = ' '.join(query)
cursor.execute(query, params)
def privileges_grant(cursor, user, host, db_table, priv, tls_requires, maria_role=False):
# Escape '%' since mysql db.execute uses a format string and the
# specification of db and table often use a % (SQL wildcard)
db_table = db_table.replace('%', '%%')
priv_string = ",".join([p for p in priv if p not in ('GRANT', )])
query = ["GRANT %s ON %s" % (priv_string, db_table)]
if not maria_role:
query.append("TO %s@%s")
params = (user, host)
else:
query.append("TO %s")
params = (user)
if tls_requires and impl.use_old_user_mgmt(cursor):
query, params = mogrify_requires(" ".join(query), params, tls_requires)
query = [query]
if 'GRANT' in priv:
query.append("WITH GRANT OPTION")
query = ' '.join(query)
cursor.execute(query, params)
def convert_priv_dict_to_str(priv):
"""Converts privs dictionary to string of certain format.
Args:
priv (dict): Dict of privileges that needs to be converted to string.
Returns:
priv (str): String representation of input argument.
"""
priv_list = ['%s:%s' % (key, val) for key, val in iteritems(priv)]
return '/'.join(priv_list)
def handle_requiressl_in_priv_string(module, priv, tls_requires):
module.deprecate('The "REQUIRESSL" privilege is deprecated, use the "tls_requires" option instead.',
version='3.0.0', collection_name='community.mysql')
priv_groups = re.search(r"(.*?)(\*\.\*:)([^/]*)(.*)", priv)
if priv_groups.group(3) == "REQUIRESSL":
priv = priv_groups.group(1) + priv_groups.group(4) or None
else:
inner_priv_groups = re.search(r"(.*?),?REQUIRESSL,?(.*)", priv_groups.group(3))
priv = '{0}{1}{2}{3}'.format(
priv_groups.group(1),
priv_groups.group(2),
','.join(filter(None, (inner_priv_groups.group(1), inner_priv_groups.group(2)))),
priv_groups.group(4)
)
if not tls_requires:
tls_requires = "SSL"
else:
module.warn('Ignoring "REQUIRESSL" privilege as "tls_requires" is defined and it takes precedence.')
return priv, tls_requires
# Alter user is supported since MySQL 5.6 and MariaDB 10.2.0
def server_supports_alter_user(cursor):
"""Check if the server supports ALTER USER statement or doesn't.
Args:
cursor (cursor): DB driver cursor object.
Returns: True if supports, False otherwise.
"""
cursor.execute("SELECT VERSION()")
version_str = cursor.fetchone()[0]
version = version_str.split('.')
if 'mariadb' in version_str.lower():
# MariaDB 10.2 and later
if int(version[0]) * 1000 + int(version[1]) >= 10002:
return True
else:
return False
else:
# MySQL 5.6 and later
if int(version[0]) * 1000 + int(version[1]) >= 5006:
return True
else:
return False
def get_resource_limits(cursor, user, host):
"""Get user resource limits.
Args:
cursor (cursor): DB driver cursor object.
user (str): User name.
host (str): User host name.
Returns: Dictionary containing current resource limits.
"""
query = ('SELECT max_questions AS MAX_QUERIES_PER_HOUR, '
'max_updates AS MAX_UPDATES_PER_HOUR, '
'max_connections AS MAX_CONNECTIONS_PER_HOUR, '
'max_user_connections AS MAX_USER_CONNECTIONS '
'FROM mysql.user WHERE User = %s AND Host = %s')
cursor.execute(query, (user, host))
res = cursor.fetchone()
if not res:
return None
current_limits = {
'MAX_QUERIES_PER_HOUR': res[0],
'MAX_UPDATES_PER_HOUR': res[1],
'MAX_CONNECTIONS_PER_HOUR': res[2],
'MAX_USER_CONNECTIONS': res[3],
}
return current_limits
def match_resource_limits(module, current, desired):
"""Check and match limits.
Args:
module (AnsibleModule): Ansible module object.
current (dict): Dictionary with current limits.
desired (dict): Dictionary with desired limits.
Returns: Dictionary containing parameters that need to change.
"""
if not current:
# It means the user does not exists, so we need
# to set all limits after its creation
return desired
needs_to_change = {}
for key, val in iteritems(desired):
if key not in current:
# Supported keys are listed in the documentation
# and must be determined in the get_resource_limits function
# (follow 'AS' keyword)
module.fail_json(msg="resource_limits: key '%s' is unsupported." % key)
try:
val = int(val)
except Exception:
module.fail_json(msg="Can't convert value '%s' to integer." % val)
if val != current.get(key):
needs_to_change[key] = val
return needs_to_change
def limit_resources(module, cursor, user, host, resource_limits, check_mode):
"""Limit user resources.
Args:
module (AnsibleModule): Ansible module object.
cursor (cursor): DB driver cursor object.
user (str): User name.
host (str): User host name.
resource_limit (dict): Dictionary with desired limits.
check_mode (bool): Run the function in check mode or not.
Returns: True, if changed, False otherwise.
"""
if not server_supports_alter_user(cursor):
module.fail_json(msg="The server version does not match the requirements "
"for resource_limits parameter. See module's documentation.")
current_limits = get_resource_limits(cursor, user, host)
needs_to_change = match_resource_limits(module, current_limits, resource_limits)
if not needs_to_change:
return False
if needs_to_change and check_mode:
return True
# If not check_mode
tmp = []
for key, val in iteritems(needs_to_change):
tmp.append('%s %s' % (key, val))
query = "ALTER USER %s@%s"
query += ' WITH %s' % ' '.join(tmp)
cursor.execute(query, (user, host))
return True
def get_impl(cursor):
global impl
cursor.execute("SELECT VERSION()")
if 'mariadb' in cursor.fetchone()[0].lower():
from ansible_collections.community.mysql.plugins.module_utils.implementations.mariadb import user as mariauser
impl = mariauser
else:
from ansible_collections.community.mysql.plugins.module_utils.implementations.mysql import user as mysqluser
impl = mysqluser
| 1.507813 | 2 |
engine.py | VETURISRIRAM/smart-search | 4 | 12794386 | """
@author: <NAME>
@title: SmartSearch - An Intelligent Search Engine.
@date: 05/06/2019
"""
import time
import argparse
from crawl_all_sites import crawl_for_sites
from generate_data import create_documents
from generate_data import create_data_directory
from clean_documents import remove_extra_lines_and_tabs
# Parse Arguments
parser = argparse.ArgumentParser(description="Crawler for Search Engine")
parser.add_argument(
"--initial_url",
type=str,
help="The initial URL to start the crawling process from. For example: 'https://www.cs.uic.edu/'"
)
parser.add_argument(
"--number_of_pages",
type=int,
help="The number of pages to crawl and create database from."
)
parser.add_argument(
"--domain",
type=str,
help="The domain in which crawling should happen. For example: 'uic.edu'"
)
args = parser.parse_args()
def crawler_driving_function():
"""
Driver Function to crawl for sites and create database.
"""
# Time to record the start time of the program execution.
db_creation_start_time = time.time()
# Time to record the start time of the crawling.
crawl_start_time = time.time()
print("################################################################################################")
print("Web Crawling startes now.\n\n")
# Initialize the user arguments.
main_url = args.initial_url
min_pages_to_crawl = args.number_of_pages
domain = args.domain
# Get the crawled sites and unknown sites.
sites_list, unknown_urls, broken_urls, parent_children_url_map = crawl_for_sites(main_url, min_pages_to_crawl, domain)
# Record crawl end time.
crawl_end_time = time.time()
print("\n\nWeb Crawling finished now.\n")
print("################################################################################################")
print("Total time to crawl the web: {} Minutes".format((crawl_end_time - crawl_start_time)/60))
# Check if there are any duplicate pages in the list.
if len(sites_list) == len(list(set(sites_list))):
print("No duplicate sites included.")
else:
print("Duplicates found. Removing Duplicates.")
sites_list = list(set(sites_list))
print("################################################################################################")
print("Now, extracting the text data from the crawled websites.")
print("################################################################################################")
if create_data_directory():
print("################################################################################################\n\n")
creation_flag = create_documents(sites_list, parent_children_url_map)
print("\n\nText extracted from the crawled pages.")
else:
raise Exception("DirectoryError: You do not have write privilege in the directory.")
print("################################################################################################")
print("Total time to create the database: {db_creation_time} Minutes.".format(db_creation_time=(time.time() - db_creation_start_time) / 60))
print("################################################################################################")
print("Unknown Achors Found:\n")
print(unknown_urls)
print("################################################################################################")
if broken_urls != []:
print("Broken / Unreachable URLs Found:\n")
print(broken_urls)
print("################################################################################################")
# Main funciton starts here..
if __name__ == "__main__":
crawler_driving_function()
| 3.25 | 3 |
test/quadrant.py | icyfankai/python | 0 | 12794387 | #!/usr/bin/python
def Quadrant(x,y):
if x>=0:
if y >=0:
print "1"
else:
print "2"
else:
if y >=0:
print "3"
else:
print "4"
x=input('what first num you input:')
y=input('what senced num you input:')
Quadrant(x,y)
| 4.0625 | 4 |
project10/objects.py | panda0881/Beginning-Python | 2 | 12794388 | from random import randrange
from pygame import *
import project10.config
class SquishSprite(pygame.sprite.Sprite):
def __init__(self, image):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.__loader__(image).convert()
self.rect = self.image.get_rect()
screen = pygame.display.get_surface()
shrink = -project10.config.margin * 2
self.area = screen.get_rect().inflate(shrink, shrink)
class Weight(SquishSprite):
def __init__(self, speed):
SquishSprite.__init__(self, project10.config.Weight_image)
self.speed = speed
self.reset()
self.landed = False
def reset(self):
x = randrange(self.area.left, self.area.right)
self.rect.midbottom = x, 0
def update(self):
self.rect.top += self.speed
self.landed = self.rect.top >= self.area.bottom
class Banana(SquishSprite):
def __init__(self):
SquishSprite.__init__(self, project10.config.Banana_image)
self.rect.bottom = self.area.bottom
self.pad_top = project10.config.Banana_pad_top
self.pad_side = project10.config.Banana_pad_side
def update(self):
self.rect.centerx = pygame.mouse.get_pos()[0]
self.rect = self.rect.clamp(self.area)
def touches(self, other):
bounds = self.rect.inflate(-self.pad_side, -self.pad_top)
bounds.bottom = self.rect.bottom
return bounds.colliderect(other.rect)
| 3.171875 | 3 |
tests/spot/sub_account/test_sub_account_api_get_ip_restriction.py | Banging12/binance-connector-python | 512 | 12794389 | import responses
import pytest
from binance.spot import Spot as Client
from tests.util import mock_http_response
from tests.util import random_str
from binance.lib.utils import encoded_string
from binance.error import ParameterRequiredError
mock_item = {"key_1": "value_1", "key_2": "value_2"}
key = random_str()
secret = random_str()
email = "<EMAIL>"
subAccountApiKey = random_str()
complete_params = {"email": email, "subAccountApiKey": subAccountApiKey}
parameterized_test_params = [
({"email": None, "subAccountApiKey": None}),
({"email": "", "subAccountApiKey": subAccountApiKey}),
({"email": email, "subAccountApiKey": ""}),
]
client = Client(key, secret)
@pytest.mark.parametrize("params", parameterized_test_params)
def test_sub_account_api_get_ip_restriction_without_missing_param(params):
"""Tests the API endpoint to get IP Restriction for a sub-account API key without subAccountApiKey"""
client.sub_account_api_get_ip_restriction.when.called_with(**params).should.throw(
ParameterRequiredError
)
@mock_http_response(
responses.GET,
"/sapi/v1/sub-account/subAccountApi/ipRestriction\\?"
+ encoded_string(complete_params),
mock_item,
200,
)
def test_sub_account_api_get_ip_restriction():
"""Tests the API endpoint to get IP Restriction for a sub-account API key"""
client.sub_account_api_get_ip_restriction(**complete_params).should.equal(mock_item)
| 2.078125 | 2 |
models/connect.py | appkabob/adobe_connect_controller | 0 | 12794390 | <gh_stars>0
import urllib.request
import xml.etree.ElementTree as ET
import constants
class Connect:
cookie = None
@classmethod
def __init__(cls):
cls.cookie = None
with urllib.request.urlopen('{}common-info'.format(constants.CONNECT_BASE_URL)) as response:
xml = response.read()
root = ET.fromstring(xml)
status = root.find('status').attrib['code']
if status != 'ok':
raise IOError('ERROR RETRIEVING CONNECT COOKIE: {}'.format(status))
# cls.status('ERROR RETRIEVING CONNECT COOKIE: {}'.format(status))
else:
cls.status = root.find('status').attrib['code']
cls.cookie = root.find('common/cookie').text
cls.login()
@classmethod
def login(cls):
with urllib.request.urlopen(
'{}login&login={}&password={}&session={}'.format(constants.CONNECT_BASE_URL,
constants.CONNECT_LOGIN,
constants.CONNECT_PWD,
cls.cookie)) as response:
xml = response.read()
root = ET.fromstring(xml)
status = root.find('status').attrib['code']
if status != 'ok':
cls.status = 'ERROR LOGGING IN TO ADOBE CONNECT: {}'.format(status)
else:
cls.status = root.find('status').attrib['code']
@classmethod
def send_request(cls, action, conditions):
url = '{}{}&session={}'.format(constants.CONNECT_BASE_URL, action, cls.cookie)
if isinstance(conditions, list):
url += '&{}'.format('&'.join(conditions))
elif isinstance(conditions, str):
url += '&{}'.format(conditions)
else:
raise ValueError('conditions must be string or list')
# if isinstance(*args, str):
# url += '&{}'.format(*args)
# else:
# url += '&'.join(*args)
with urllib.request.urlopen(url) as response: # , timeout=600
xml = response.read()
root = ET.fromstring(xml)
status = root.find('status').attrib['code']
if status != 'ok':
raise IOError('ERROR SENDING {} REQUEST TO ADOBE CONNECT: {}'.format(action, status))
cls.status = 'ERROR SENDING {} REQUEST TO ADOBE CONNECT: {}'.format(action, status)
return cls.status
else:
cls.status = root.find('status').attrib['code']
if action == 'sco-contents':
xml_records = root.findall('scos/sco')
elif action == 'principal-list':
xml_records = root.findall('principal-list/principal')
elif action == 'principal-info' or action == 'principal-update':
xml_records = root.find('principal')
elif action == 'report-course-status':
xml_records = root.find('report-course-status')
elif action == 'sco-info':
xml_records = root.find('sco')
elif action == 'group-membership-update':
return True
else:
xml_records = root.findall('{}/row'.format(action))
return cls.convert_xml_to_object(xml_records)
@staticmethod
def convert_xml_to_object(xml):
output = []
if not isinstance(xml, list):
xml = [xml]
for row in xml:
cleaned_data = {}
data = row.attrib
for item in row:
data[item.tag] = item.text
for key in data: # replace all dashes in parameter names so that they can be recalled more easily in Python
cleaned_data[key.replace('-', '_')] = data[key]
output.append(cleaned_data)
return output
| 2.546875 | 3 |
nfv/nfv-vim/nfv_vim/tables/_image_table.py | SidneyAn/nfv | 2 | 12794391 | #
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_vim import database
from nfv_vim.tables._table import Table
_image_table = None
class ImageTable(Table):
"""
Image Table
"""
def __init__(self):
super(ImageTable, self).__init__()
def _persist_value(self, value):
database.database_image_add(value)
def _unpersist_value(self, key):
database.database_image_delete(key)
def tables_get_image_table():
"""
Get the image table
"""
return _image_table
def image_table_initialize():
"""
Initialize the image table
"""
global _image_table
_image_table = ImageTable()
_image_table.persist = False
images = database.database_image_get_list()
for image in images:
_image_table[image.uuid] = image
_image_table.persist = True
def image_table_finalize():
"""
Finalize the image table
"""
global _image_table
del _image_table
| 2.203125 | 2 |
mcmandelbrot/html.py | edsuom/AsynQueue | 0 | 12794392 | #!/usr/bin/env python
#
# mcmandelbrot
#
# An example package for AsynQueue:
# Asynchronous task queueing based on the Twisted framework, with task
# prioritization and a powerful worker interface.
#
# Copyright (C) 2015 by <NAME>,
# http://edsuom.com/AsynQueue
#
# See edsuom.com for API documentation as well as information about
# Ed's background and other projects, software and otherwise.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS
# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
A Twisted web C{Resource} that serves clickable, zoomable
Mandelbrot Set images.
"""
import sys
from twisted.application import internet, service
from twisted.internet import defer
from twisted.web import server, resource, static, util, http
from mcmandelbrot import vroot, image
MY_PORT = 8080
VERBOSE = True
HTML_FILE = "mcm.html"
HOWTO = """
Click anywhere in the image to zoom in 5x at that location. Try
exploring the edges of the black “lakes.”
"""
ABOUT = """
Images genera­ted by the <i>mcmandelbrot</i> demo package
bun­dled with my <a
href="http://edsuom.com/AsynQueue">AsynQueue</a> asyn­chronous
processing pack­age, which is freely available per the Apache
License. A link back to <a
href="http://mcm.edsuom.com"><b>mcm.edsuom.com</b></a> would be
apprec­iated.
"""
BYLINE = " —<NAME>"
MORE_INFO = """
CPU and bandwidth resources for this site were con­tributed by <a
href="http://tellectual.com">Tellectual Press</a>, publisher of my
book <em>Evolving out of Eden</em>.
"""
class ResourceBag(object):
blankImage = ("blank.jpg", 'image/jpeg')
children = {}
def __init__(self, descriptions):
self.children[''] = RootResource(self.blankImage[0])
with vroot.openPackageFile(self.blankImage[0]) as fh:
imageData = fh.read()
self.children[self.blankImage[0]] = static.Data(
imageData, self.blankImage[1])
self.children['image.png'] = ImageResource(descriptions)
def shutdown(self):
return self.ir.shutdown()
def putChildren(self, root):
for path, res in self.children.iteritems():
root.putChild(path, res)
class RootResource(resource.Resource):
defaultParams = {
'cr': "-0.630",
'ci': "+0.000",
'crpm': "1.40" }
defaultTitle = \
"Interactive Mandelbrot Set: Driven by Twisted and AsynQueue"
formItems = (
("Real:", "cr" ),
("Imag:", "ci" ),
("+/-", "crpm" ))
inputSize = 10
pxHD = 2048
def __init__(self, blankImage):
self.blankImage = blankImage
self.vr = self.vRoot()
resource.Resource.__init__(self)
def render_GET(self, request):
request.setHeader("content-type", 'text/html')
kw = {'permalink': request.uri}
kw.update(self.defaultParams)
if request.args:
for key, values in request.args.iteritems():
kw[key] = http.unquote(values[0])
kw['onload'] = None
kw['img'] = self.imageURL(kw)
kw['hd'] = self.imageURL(kw, N=self.pxHD)
else:
kw['onload'] = "updateImage()"
kw['img'] = self.blankImage
kw['hd'] = self.imageURL(self.defaultParams, N=self.pxHD)
return self.vr(**kw)
def imageURL(self, params, **kw):
"""
Returns a URL for obtaining a Mandelbrot Set image with the
parameters in the supplied dict I{params}.
"""
def addPart():
parts.append("{}={}".format(name, value))
parts = []
for name, value in params.iteritems():
if name in self.defaultParams:
addPart()
for name, value in kw.iteritems():
addPart()
return "/image.png?{}".format('&'.join(parts))
def vRoot(self):
"""
Populates my vroot I{vr} with an etree that renders into the HTML
page.
"""
def heading():
with v.context():
v.nc('div', 'heading')
v.nc('p', 'bigger')
v.textX("Interactive Mandelbrot Set")
v.nc('div', 'subheading')
v.nc('p', 'smaller')
v.text("Powered by ")
v.nc('a')
v.text("Twisted")
v.set('href', "http://twistedmatrix.com")
v.tailX(" and ")
v.ns('a')
v.text("AsynQueue")
v.set('href', "http://edsuom.com/AsynQueue")
v.tail(".")
vr = vroot.VRoot(self.defaultTitle)
with vr as v:
v.nc('body')
v.addToMap('onload', 'onload')
v.nc('div', 'container')
v.set('id', 'container')
v.nc('div', 'first_part')
#--------------------------------------------------------
with v.context():
heading()
v.ngc('div', 'clear').text = " "
with v.context():
v.nc('div')
with v.context():
v.nc('form')
v.nc('div', 'form')
v.set('name', "position")
v.set('action', "javascript:updateImage()")
for label, name in v.nci(
self.formItems, 'div', 'form_item'):
v.nc('span', 'form_item')
v.text(label)
v.ns('input', 'position')
v.addToMap(name, 'value')
v.set('type', "text")
v.set('size', str(self.inputSize))
v.set('id', name)
v.nc('div', 'form_item')
e = v.ngc('input')
e.set('type', "submit")
e.set('value', "Reload")
v.ns('div', 'form_item')
e = v.ngc('button')
e.set('type', "button")
e.set('onclick', "zoomOut()")
e.text = "Zoom Out"
with v.context():
v.nc('div', 'about')
v.textX(ABOUT)
v.nc('span', 'byline')
v.textX(BYLINE)
v.nc('div', 'about large_only')
v.textX(MORE_INFO)
v.ns('div', 'second_part')
#--------------------------------------------------------
with v.context():
v.nc('div', 'image')
v.set('id', 'image')
with v.context():
v.nc('img', 'mandelbrot')
v.addToMap('img', 'src')
v.set('id', 'mandelbrot')
v.set('onclick', "zoomIn(event)")
v.set('onmousemove', "hover(event)")
v.nc('div', 'footer')
v.nc('div', 'left')
v.set('id', 'hover')
v.textX(HOWTO)
v.ns('div', 'right')
v.nc('a', 'bold')
v.text("2048px")
v.tailX(" | ")
v.set('id', 'hd')
v.addToMap('hd', 'href')
v.ns('a', 'bold')
v.text("Permalink")
v.set('id', 'permalink')
v.addToMap('permalink', 'href')
v.ns('div', 'about small_only')
v.set('id', 'more_info')
v.textX(MORE_INFO)
return vr
class ImageResource(resource.Resource):
isLeaf = True
def __init__(self, descriptions):
self.imager = image.Imager(descriptions, verbose=VERBOSE)
resource.Resource.__init__(self)
def shutdown(self):
return self.imager.shutdown()
def render_GET(self, request):
request.setHeader("content-disposition", "image.png")
request.setHeader("content-type", 'image/png')
self.imager.renderImage(request)
return server.NOT_DONE_YET
class MandelbrotSite(server.Site):
def __init__(self):
self.rb = ResourceBag([None])
siteResource = resource.Resource()
self.rb.putChildren(siteResource)
server.Site.__init__(self, siteResource)
def stopFactory(self):
super(MandelbrotSite, self).stopFactory()
return self.rb.shutdown()
if '/twistd' in sys.argv[0]:
site = MandelbrotSite()
application = service.Application("Interactive Mandelbrot Set HTTP Server")
internet.TCPServer(MY_PORT, site).setServiceParent(application)
| 2.890625 | 3 |
recipes/VoxCeleb/SpeakerRecShards/voxceleb_create_webdatasets.py | nikvaessen/speechbrain-recipes | 3 | 12794393 | <filename>recipes/VoxCeleb/SpeakerRecShards/voxceleb_create_webdatasets.py
################################################################################
#
# Converts the unzipped wav/<SPEAKER_ID>/<YT_VIDEO_ID>/<UTT_NUM>.wav folder
# structure of voxceleb into a WebDataset format
#
# Author(s): <NAME>
################################################################################
import json
import pathlib
import argparse
import random
import subprocess
from collections import defaultdict
from typing import Tuple
import torch
import torchaudio
import webdataset as wds
import yaspin
from util import remove_directory
################################################################################
# methods for writing the shards
ID_SEPARATOR = "&"
def load_audio(audio_file_path: pathlib.Path) -> torch.Tensor:
t, sr = torchaudio.load(audio_file_path)
if sr != 16000:
raise ValueError("expected sampling rate of 16 kHz")
return t
def write_shards(
voxceleb_folder_path: pathlib.Path,
shards_path: pathlib.Path,
seed: int,
delete_voxceleb_folder: bool,
compress_in_place: bool,
samples_per_shard: int,
):
"""
Parameters
----------
voxceleb_folder_path: folder where extracted voxceleb data is located
shards_path: folder to write shards of data to
seed: random seed used to initially shuffle data into shards
delete_voxceleb_folder: boolean value determining whether the input folder
(voxceleb_folder_path) will be deleted after shards
have been written
compress_in_place: boolean value determining whether the shards will be
compressed with the `gpig` utility.
samples_per_shard: number of data samples to store in each shards.
"""
# make sure output folder exist
shards_path.mkdir(parents=True, exist_ok=True)
# find all audio files
audio_files = sorted([f for f in voxceleb_folder_path.rglob("*.wav")])
# create tuples (unique_sample_id, speaker_id, path_to_audio_file)
data_tuples = []
# track statistics on data
all_speaker_ids = set()
youtube_id_per_speaker = defaultdict(list)
sample_keys_per_speaker = defaultdict(list)
for f in audio_files:
# path should be
# $voxceleb_folder_path/wav/speaker_id/youtube_id/utterance_id.wav
speaker_id = f.parent.parent.name
youtube_id = f.parent.name
utterance_id = f.stem
# create a unique key for this sample
key = f"{speaker_id}{ID_SEPARATOR}{youtube_id}{ID_SEPARATOR}{utterance_id}"
# store statistics
all_speaker_ids.add(speaker_id)
youtube_id_per_speaker[speaker_id].append(youtube_id)
sample_keys_per_speaker[speaker_id].append(key)
t = (key, speaker_id, f)
data_tuples.append(t)
# determine a specific speaker_id label for each speaker_id
speaker_id_to_idx = {
speaker_id: idx
for idx, speaker_id in enumerate(sorted(all_speaker_ids))
}
# write a meta.json file which contains statistics on the data
# which will be written to shards
meta_dict = {
"speaker_ids": list(all_speaker_ids),
"speaker_id_to_idx": speaker_id_to_idx,
"youtube_id_per_speaker": youtube_id_per_speaker,
"sample_keys_per_speaker": sample_keys_per_speaker,
"num_data_samples": len(data_tuples)
}
with (shards_path / "meta.json").open("w") as f:
json.dump(meta_dict, f)
# swap the speaker id for the speaker_id index in each tuple
data_tuples = [
(key, speaker_id_to_idx[speaker_id], f)
for key, speaker_id, f in data_tuples
]
# shuffle the tuples so that each shard has a large variety in speakers
random.seed(seed)
random.shuffle(data_tuples)
# write shards
all_keys = set()
shards_path.mkdir(exist_ok=True, parents=True)
pattern = str(shards_path / "shard") + "-%06d.tar"
with wds.ShardWriter(pattern, maxcount=samples_per_shard) as sink:
for key, speaker_id_idx, f in data_tuples:
# load the audio tensor
tensor = load_audio(f)
# verify key is unique
assert key not in all_keys
all_keys.add(key)
# extract speaker_id, youtube_id and utterance_id from key
speaker_id, youtube_id, utterance_id = key.split(ID_SEPARATOR)
# create sample to write
sample = {
"__key__": key,
"wav.pyd": tensor,
"meta.json": {
"speaker_id": speaker_id,
"youtube_id": youtube_id,
"utterance_id": utterance_id,
"speaker_id_idx": speaker_id_idx,
},
}
# write sample to sink
sink.write(sample)
# optionally delete the (unsharded) input data
if delete_voxceleb_folder:
with yaspin.yaspin(f"deleting {voxceleb_folder_path}"):
remove_directory(voxceleb_folder_path)
# optionally compress the .tar shards
if compress_in_place:
with yaspin.yaspin(
text=f"compressing .tar files in {shards_path}"
) as spinner:
for p in sorted(shards_path.glob("*.tar")):
spinner.write(f"> compressing {p}")
subprocess.call(
["pigz", p.absolute()],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
################################################################################
# define CLI
parser = argparse.ArgumentParser(
description="Convert VoxCeleb to WebDataset shards"
)
parser.add_argument(
"voxceleb_folder_path",
type=pathlib.Path,
help="directory containing the (unzipped) VoxCeleb dataset",
)
parser.add_argument(
"shards_path", type=pathlib.Path, help="directory to write shards to"
)
parser.add_argument(
"--seed",
type=int,
default=12345,
help="random seed used for shuffling data before writing to shard",
)
parser.add_argument(
"--delete_voxceleb_folder",
action="store_true",
default=False,
help="delete the voxceleb data folder after shards have been written",
)
parser.add_argument(
"--compress_in_place",
action="store_true",
default=False,
help="compress each .tar to .tar.gz, deleting the .tar file in the process",
)
parser.add_argument(
"--samples_per_shard",
type=int,
default=5000,
help="the maximum amount of samples placed in each shard. The last shard "
"will most likely contain fewer samples.",
)
################################################################################
# execute script
if __name__ == "__main__":
args = parser.parse_args()
write_shards(
args.voxceleb_folder_path,
args.shards_path,
args.seed,
args.delete_voxceleb_folder,
args.compress_in_place,
args.samples_per_shard,
)
| 2.03125 | 2 |
src/data/generate_n_steps.py | jeammimi/rnn_seg | 0 | 12794394 | from .generator_traj import generate_traj, EmptyError
from .motion_type import random_rot
from ..features.prePostTools import traj_to_dist
import numpy as np
def generate_n_steps(N, nstep, ndim, sub=False, noise_level=0.25):
add = 0
if ndim == 3:
add = 1
size = nstep
X_train = np.zeros((N, size, (5 + add)))
if sub:
Y_trains = np.zeros((N, size, 10))
Y_train_cat = np.zeros((N, 27))
else:
Y_trains = np.zeros((N, size, 7))
Y_train_cat = np.zeros((N, 12))
Y_train_traj = []
# 12
for i in range(N):
# for i in range(1000):
# if i % 1000 == 0:
# print i
sigma = max(np.random.normal(0.5, 1), 0.05)
step = max(np.random.normal(1, 1), 0.2)
tryagain = True
while tryagain:
try:
clean = 4
if size >= 50:
clean = 8
clean = False
"""
ModelN,Model_num,s,sc,real_traj,norm,Z = generate_traj(size,sub=True,
clean=clean,diff_sigma=2.0,
delta_sigma_directed=1.,ndim=ndim,
anisentropy=0.1,deltav=0.2,rho_fixed=False)
"""
clean = 4
ModelN, Model_num, s, sc, real_traj, norm, Z = generate_traj(size, sub=sub,
clean=clean, diff_sigma=2.0,
delta_sigma_directed=6., ndim=ndim,
anisentropy=0.1, deltav=.4, rho_fixed=False,
random_rotation=False)
mu = 2
Ra0 = [0, 1.]
alpharot = 2 * 3.14 * np.random.random()
dt = real_traj[1:] - real_traj[:-1]
std = np.mean(np.sum(dt**2, axis=1) / 3)**0.5
noise_l = noise_level * np.random.rand()
real_traj += np.random.normal(0, noise_l * std, real_traj.shape)
real_traj = random_rot(real_traj, alpharot, ndim=ndim)
# print real_traj.shape
alligned_traj, normed, alpha, _ = traj_to_dist(real_traj, ndim=ndim)
simple = True
if not simple:
real_traj1 = np.array([Propertie(real_traj[::, 0]).smooth(2),
Propertie(real_traj[::, 1]).smooth(2)])
alligned_traj1, normed1, alpha1, _ = traj_to_dist(real_traj1.T, ndim=ndim)
real_traj2 = np.array([Propertie(real_traj[::, 0]).smooth(5),
Propertie(real_traj[::, 1]).smooth(5)])
alligned_traj2, normed2, alpha2, _ = traj_to_dist(real_traj2.T, ndim=ndim)
normed = np.concatenate((normed[::, :4], normed1[::, :4], normed2), axis=1)
for zero in Z:
normed[zero, ::] = 0
tryagain = False
except:
tryagain = True
Y_train_traj.append(real_traj)
X_train[i] = normed
Y_trains[i][range(size), np.array(sc, dtype=np.int)] = 1
Y_train_cat[i, Model_num] = 1
return X_train, Y_trains, Y_train_cat, Y_train_traj
# print np.sum(np.isnan(X_train))
| 2.046875 | 2 |
testfiles/matrix1n2.py | Lytiker/Clustering | 0 | 12794395 | <filename>testfiles/matrix1n2.py
"""two distance matrices from an imputed training dataset with 54 attributes, from two
different row measures"""
import pandas as pd
import numpy as np
from cluster_programme import distance_matrix, distance_matrix2
import time
#read dataset with new atttributes
new_df=pd.read_csv('../output/new_attr.csv', index_col=0)
#drop id and ground truth labels
new_df=new_df.drop(['ID', 'Worst_diagnosis', 'true_labels', 'true_labels2'], axis=1)
#create a list of categorical/continuoustypes
var_df= pd.read_excel('../datafiles/survey_vbl.xlsx')
var_types=var_df['VariableType'].tolist()
var_types.extend(['continuous','continuous','continuous','continuous',
'continuous','continuous','continuous','continuous','categorical'])
del var_types[0:2]
del var_types[2]
print new_df.columns.values
print var_types
print 'length of variabeltype-list and dataframe shape: %i, %s' % (len(var_types), new_df.shape)
#new_df=new_df[:100]
#create distance matrix
start= time.time()
print('start matrix1')
distance_matrix=distance_matrix(new_df, var_types)
print('making a file for the matrix1')
np.savetxt('../output/matrix1.csv', distance_matrix, delimiter=',')
end1= time.time()
print('matrix1 finished at time %f, start matrix2') % (end1-start)
distance_matrix2=distance_matrix2(new_df)
print('making a file for the matrix2')
np.savetxt('../output/matrix2.csv', distance_matrix2, delimiter=',')
end2= time.time()
print('matrix2 finished! time was %f') % (end2-end1)
| 2.921875 | 3 |
src/slack_events/migrations/0014_slackevent_has_files.py | stefdworschak/slack-to-xapi | 2 | 12794396 | <gh_stars>1-10
# Generated by Django 3.1.1 on 2020-10-04 22:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('slack_events', '0013_auto_20201004_1326'),
]
operations = [
migrations.AddField(
model_name='slackevent',
name='has_files',
field=models.BooleanField(default=False),
),
]
| 1.515625 | 2 |
packages/PIPS/pips/src/Passes/pyps/pipsmakerc2python.py | DVSR1966/par4all | 51 | 12794397 | <reponame>DVSR1966/par4all
#/usr/bin/env python
import sys
import re
usage= "usage: pipsmakerc2python.py rc-file.tex properties.rc pipsdep.rc [-loop|-module|-modules]"
if len(sys.argv) < 5:
print usage
exit(1)
texfile = sys.argv[1]
generator = sys.argv[4]
#Read propdep file and convert it into a map.
input = open(sys.argv[3],"r")
lines = input.readlines()
input.close()
pipsdep= dict()
for line in lines:
m = re.match(r"(.*?):\s*(.*)", line)
p = m.group(1)
deps = []
if m.lastindex == 2:
deps = re.split(" ", m.group(2))
deps = deps[0:len(deps)-1]
deps = map(lambda x: x.lower(), deps)
pipsdep[p] = deps
#Read properties into a string
rcfile = sys.argv[2]
input = open(rcfile,"r")
lines = input.readlines()
input.close()
pipsprops = dict()
for line in lines:
m = re.match("\s*(.*?)\s+(.*)", line)
d = m.group(2)
if d == "TRUE": d = "True"
if d == "FALSE" : d = "False"
pipsprops[m.group(1)] = d
#Read input tex file into a string
input = open(texfile,"r")
lines = input.readlines()
rc = "".join(lines)
input.close()
def printPythonMethod(name,doc):
extraparamsetter = ""
extraparamresetter = ""
extraparams = ""
has_loop_label = False
if name in pipsdep and len(pipsdep[name]) > 0:
props = []
for prop in pipsdep[name]:
short_prop = re.sub(r'^' + name + '\_(.*)', r'\1', prop)
arg = short_prop + "=None" # + pipsprops[prop.upper()]
if prop == "loop_label":
has_loop_label = True;
extraparamsetter += '\tif self.workspace:self.workspace.cpypips.push_property("LOOP_LABEL",pypsutils.formatprop(self.label))\n'
extraparamresetter = '\t\tif self.workspace:self.workspace.cpypips.pop_property("LOOP_LABEL")\n' + extraparamresetter
else:
props.append(arg)
extraparamsetter += '\tif '+short_prop+' == None and self.workspace:'+short_prop+'=self.workspace.props.'+prop + '\n'
extraparamsetter += '\tif self.workspace:self.workspace.cpypips.push_property("%s",pypsutils.formatprop(%s))\n' % ( prop.upper(), short_prop)
extraparamresetter = '\t\tif self.workspace:self.workspace.cpypips.pop_property("%s")\n' % (prop.upper()) + extraparamresetter
if len(props) > 0:
extraparams = ",".join(props) + ","
#Some regexp to filter the LaTeX source file, sometimes they work, sometimes they don't,
#sometimes it's worth than before but they only act one the produced Python comments
doc = re.sub(r'(?ms)(\\begin\{.*?\})|(\\end\{.*?\})|(\\label\{.*?\})','',doc) #Remove any begin,end and label LaTeX command
doc = re.sub(r'(?ms)(\\(.*?)\{.*?\})', r'', doc)#, flags=re.M|re.S) #Remove any other LaTeX command
doc = doc.replace("\_","_") #Convert \_ occurences to _
doc = doc.replace("~"," ") #Convert ~ to spaces
doc = re.sub(r"\\verb\|(.*?)\|", r"\1", doc)#, flags=re.M|re.S) #Replace \verb|somefile| by somefile
doc = re.sub(r"\\verb\/(.*?)\/", r"\1", doc)#, flags=re.M|re.S) #Replace \verb/something/ by something
doc = re.sub(r"\\verb\+(.*?)\+", r"\1", doc)#, flags=re.M|re.S) #Replace \verb+something+ by something
doc = doc.replace("\PIPS{}","PIPS") #Convert \PIPS{} to PIPS
name = re.sub(r'\s',r'_',name)
mself = "self"
if has_loop_label and generator == "-loop":
mself = "self.module"
if (has_loop_label and generator == "-loop") or (not has_loop_label and generator != "-loop"):
if generator == "-modules":
extraparams = extraparams + " concurrent=False,"
print '\ndef '+name+'(self,'+extraparams+' **props):'
print '\t"""'+doc+'"""'
print extraparamsetter
print '\tif '+mself+'.workspace: old_props = pypsutils.set_properties(self.workspace,pypsutils.update_props("'+name.upper()+'",props))'
print '\ttry:'
if generator != "-modules":
print '\t\tpypsutils.apply('+mself+',\"'+name+'\")'
else:
print '\t\tif concurrent: pypsutils.capply(self,\"'+name+'\")'
print '\t\telse:'
print '\t\t\tfor m in self: pypsutils.apply(m,\"'+name+'\")'
print '\texcept:'
print '\t\traise'
print '\tfinally:'
print '\t\tif '+mself+'.workspace: pypsutils.set_properties('+mself+'.workspace,old_props)'
print '\n' + extraparamresetter
print generator[1:] + "." + name + "=" + name
#Print workspace properties
if generator == "-properties":
del pipsprops[""]
sys.stdout.write("workspace.Props.all=dict({")
sys.stdout.write(",".join(map(lambda (key,val) : "'"+key+"': "+val,pipsprops.iteritems())))
sys.stdout.write("})")
exit(0)
#Parse string documentation
doc_strings= re.findall(r'\\begin\{PipsPass\}(.*?)\\end\{PipsPass\}', rc, flags=re.M | re.S)
for dstr in doc_strings:
m = re.match(r'\{([^\}]+)\}[\n]+(.*)', dstr, flags = re.M | re.S)
printPythonMethod(m.group(1), m.group(2))
| 2.578125 | 3 |
inference_module.py | mpleung/ANI | 0 | 12794398 | import numpy as np, networkx as nx, math
from scipy.sparse.csgraph import dijkstra
from scipy.sparse import csr_matrix, identity
def make_Zs(Y,ind1,ind0,pscores1,pscores0,subsample=False):
"""Generates vector of Z_i's, used to construct HT estimator.
Parameters
----------
Y : numpy float array
n-dimensional outcome vector.
ind1 : numpy boolean array
n-dimensional vector of indicators for first exposure mapping.
ind0 : numpy boolean array
n-dimensional vector of indicators for second exposure mapping.
pscores1 : numpy float array
n-dimensional vector of probabilities of first exposure mapping for each unit.
pscores0 : numpy float array
n-dimensional vector of probabilities of second exposure mapping for each unit
subsample : numpy boolean array
When set to an object that's not a numpy array, the function will define subsample to be an n-dimensional array of ones, meaning it is assumed that all n units are included in the population. Otherwise, it must be an boolean array of the same dimension as Z where True components indicate population inclusion.
Returns
-------
n-dimensional numpy float array, where entries corresponding to the True entries of subsample are equal to the desired Z's, and entries corresponding to False subsample entries are set to -1000.
"""
if type(subsample) != np.ndarray: subsample = np.ones(Y.size, dtype=bool)
i1 = ind1[subsample]
i0 = ind0[subsample]
ps1 = pscores1[subsample]
ps0 = pscores0[subsample]
weight1 = i1.copy().astype('float')
weight0 = i0.copy().astype('float')
weight1[weight1 == 1] = i1[weight1 == 1] / ps1[weight1 == 1]
weight0[weight0 == 1] = i0[weight0 == 1] / ps0[weight0 == 1]
Z = np.ones(Y.size) * (-1000) # filler entries that won't be used
Z[subsample] = Y[subsample] * (weight1 - weight0)
return Z
def network_SE(Zs, A, subsample=False, K=0, exp_nbhd=True, disp=False, b=-1):
"""Network-dependence robust standard errors.
Returns our standard errors for the sample mean of each array in Zs.
Parameters
----------
Zs : a list of numpy float arrays
Each array is n-dimensional.
A : NetworkX undirected graph
Graph on n nodes. NOTE: Assumes nodes are labeled 0 through n-1, so that the data for node i is given by the ith component of each array in Zs.
subsample : numpy boolean array
When set to an object that's not a numpy array, the function will define subsample to be an n-dimensional array of ones, meaning it is assumed that all n units are included in the population. Otherwise, it must be an boolean array of the same dimension as each array in Zs where True components indicate population inclusion.
K : integer
K used to define the K-neighborhood exposure mapping.
exp_nbhd : boolean
Boolean for whether neighborhood growth is exponential (True) or polynomial (False). Used to determine recommended bandwidth.
b : float
User-specified bandwidth. If a negative value is specified, function will compute our recommended bandwidth choice.
disp : boolean
Boolean for whether to also return more than just the SE (see below).
Returns
-------
SE : float
List of network-dependence robust standard error, one for each array of Zs.
APL : float
Average path length of A.
b : int
Bandwidth.
PSD_failure : list of booleans
True if substitute PSD variance estimator needed to be used for that component of Zs.
"""
if type(Zs) == np.ndarray:
is_list = False
Z_list = [Zs] # handle case where Zs is just an array
else:
is_list = True
Z_list = Zs
if type(subsample) != np.ndarray:
subsample = np.ones(Z_list[0].size, dtype=bool) # handle case where subsample is False
n = subsample.sum()
SEs = []
PSD_failures = []
if b == 0:
for Z in Z_list:
SEs.append(Z[subsample].std() / math.sqrt(subsample.sum())) # iid SE
APL = 0
PSD_failures.append(False)
else:
# compute path distances
G = nx.to_scipy_sparse_matrix(A, nodelist=range(A.number_of_nodes()), format='csr')
dist_matrix = dijkstra(csgraph=G, directed=False, unweighted=True)
Gcc = [A.subgraph(c).copy() for c in sorted(nx.connected_components(A), key=len, reverse=True)]
giant = [i for i in Gcc[0]] # set of nodes in giant component
APL = dist_matrix[np.ix_(giant,giant)].sum() / len(giant) / (len(giant)-1) # average path length
# default bandwidth
if b < 0:
b = round(APL/2) if exp_nbhd else round(APL**(1/3)) # rec bandwidth
b = max(2*K,b)
weights = dist_matrix <= b # weight matrix
for Z in Z_list:
Zc = Z[subsample] - Z[subsample].mean() # demeaned data
# default variance estimator (not guaranteed PSD)
var_est = Zc.dot(weights[np.ix_(subsample,subsample)].dot(Zc[:,None])) / n
# PSD variance estimator from the older draft (Leung, 2019)
if var_est <= 0:
PSD_failures.append(True)
if b < 0: b = round(APL/4) if exp_nbhd else round(APL**(1/3)) # rec bandwidth
b = max(K,b)
b_neighbors = dist_matrix <= b
row_sums = np.squeeze(b_neighbors.dot(np.ones(Z.size)[:,None]))
b_norm = b_neighbors / np.sqrt(row_sums)[:,None]
weights = b_norm.dot(b_norm.T)
var_est = Zc.dot(weights[np.ix_(subsample,subsample)].dot(Zc[:,None])) / n
else:
PSD_failures.append(False)
SEs.append(math.sqrt(var_est / n))
if disp:
if is_list:
return SEs, APL, b, PSD_failures
else:
return SEs[0], APL, b, PSD_failures
else:
if is_list:
return SEs
else:
return SEs[0]
| 3.125 | 3 |
test/test_basic.py | plotlyst/qt-handy | 0 | 12794399 | from qtpy.QtWidgets import QLabel
from qthandy import opaque
def test_opaque(qtbot):
widget = QLabel('Test')
qtbot.addWidget(widget)
widget.show()
opaque(widget)
assert widget.graphicsEffect()
| 2.234375 | 2 |
scripts/00_cyt_plot_and_stats.py | lorenzo-bioinfo/ms_data_analysis | 0 | 12794400 | import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
import math
#getting a list of cytokines names/labels
cyt_list = 'IL1B,IL2,IL4,IL5,IL6,IL7,CXCL8,IL10,IL12B,IL13,IL17A,CSF3,CSF2,IFNG,CCL2,CCL4,TNF,IL1RN,IL9,IL15,CCL11,FGF2,CXCL10,PDGFB,CCL5,VEGFA,CCL3'.split(',')
#getting dataframe from csv previously exported
cyt_df = pd.read_excel('../database/db.xlsx', sheet_name = 'SM NM', usecols = 'F:AF,CB')
cyt_list.append('class')
cyt_df.columns = cyt_list
cyt_list.pop()
#cleaning df from NaN values
cyt_df.dropna(inplace = True) #done: 450 rows of 490 preserved ('no liquor' out too)
#getting cyt_df for each patients' class:
cyt_ctrl = cyt_df[cyt_df['class'] == 6]
cyt_rr = cyt_df[cyt_df['class'] == 3]
cyt_pp = cyt_df[cyt_df['class'] == 5]
cyt_sp = cyt_df[cyt_df['class'] == 4]
#Getting the distribution for each cytokine and
#superimposing it to the control cytokine distribution
sns.distributions._has_statsmodels = False #needed to avoid kde error coming from sns using statsmodel
#CTRL VS PP
for cyt in cyt_list:
plt.title('{} - PP vs CTRL\nN = {}'.format(cyt, len(cyt_pp)))
sns.distplot(cyt_ctrl[cyt], color = 'grey')
sns.distplot(cyt_pp[cyt], color = 'darkgreen')
plt.legend(['Control', 'PP'])
plt.xlabel('{} levels'.format(cyt))
plt.savefig('./../plots/ctrl_pp/{}.png'.format(cyt), dpi = 300)
print('Saved ctrl_pp/{}'.format(cyt))
plt.clf()
#CTRL VS SP
for cyt in cyt_list:
plt.title('{} - SP vs CTRL\nN = {}'.format(cyt, len(cyt_sp)))
sns.distplot(cyt_ctrl[cyt], color = 'grey')
sns.distplot(cyt_sp[cyt], color = 'darkgreen')
plt.legend(['Control', 'SP'])
plt.xlabel('{} levels'.format(cyt))
plt.savefig('./../plots/ctrl_sp/{}.png'.format(cyt), dpi = 300)
print('Saved ctrl_sp/{}'.format(cyt))
plt.clf()
#CTRL VS RR
for cyt in cyt_list:
plt.title('{} - RR vs CTRL\nN = {}'.format(cyt, len(cyt_rr)))
sns.distplot(cyt_ctrl[cyt], color = 'grey')
sns.distplot(cyt_rr[cyt], color = 'darkgreen')
plt.legend(['Control', 'RR'])
plt.xlabel('{} levels'.format(cyt))
plt.savefig('./../plots/ctrl_rr/{}.png'.format(cyt), dpi = 300)
print('Saved ctrl_rr/{}'.format(cyt))
plt.clf()
#creating dictionary for ctrl mean cytokine levels
ctrl_mean_list = []
for cyt in cyt_list:
mean = cyt_ctrl[cyt].astype(float).mean()
ctrl_mean_list.append(mean)
ctrl_mean_dict = dict(zip(cyt_list, ctrl_mean_list))
#getting a csv with more statistics:
cyt_lev_dfs = [cyt_ctrl, cyt_rr, cyt_pp, cyt_sp]
with open('data/cytokine_statistics/full_stats.tsv', 'w') as f:
f.write('cytokine\tctrl_mean\tctrl_std\tpp_mean\tpp_std\tsp_mean\tsp_std\trr_mean\trr_std\tpp_diff\tsp_diff\trr_diff\nrr_d')
for cyt in cyt_list:
ctrl_mean = ctrl_mean_dict[cyt]
ctrl_std = cyt_ctrl[cyt].astype(float).std()
pp_mean = cyt_pp[cyt].astype(float).mean()
pp_std = cyt_pp[cyt].astype(float).std()
pp_diff = (pp_mean - ctrl_mean)/math.sqrt(pp_std * ctrl_std) #define what to do with this value
sp_mean = cyt_sp[cyt].astype(float).mean()
sp_std = cyt_sp[cyt].astype(float).std()
sp_diff = (sp_mean - ctrl_mean)/math.sqrt(sp_std * ctrl_std)
rr_mean = cyt_rr[cyt].astype(float).mean()
rr_std = cyt_rr[cyt].astype(float).std()
rr_diff = (rr_mean - ctrl_mean)/math.sqrt(rr_std * ctrl_std)
rr_d = (rr_mean - ctrl_mean)
line = '{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n'.format(cyt, ctrl_mean, ctrl_std, pp_mean, pp_std, sp_mean, sp_std, rr_mean, rr_std, pp_diff, sp_diff, rr_diff, rr_d)
f.write(line)
stats_df = pd.read_csv('data/cytokine_statistics/full_stats.tsv', sep='\t')
print(stats_df) | 2.6875 | 3 |
testimage.py | sophia2798/sock_sorting | 0 | 12794401 | # THE FOLLOWING CODE CAN BE USED IN YOUR SAGEMAKER NOTEBOOK TO TEST AN UPLOADED IMAGE TO YOUR S3 BUCKET AGAINST YOUR MODEL
import os
import urllib.request
import boto3
from IPython.display import Image
import cv2
import json
import numpy as np
# input the S3 bucket you are using for this project and the file path for a folder and file that contains your uploaded test image
test_image_bucket = 'deeplens-sagemaker-socksortingeast'
test_image_name = 'testimages/image0.jpeg'
tmp_file_name = 'tmp-test-image-jpg'
resized_file_name = 'resized-test-image.jpg'
s3 = boto3.client('s3')
with open(tmp_file_name, 'wb') as f:
s3.download_fileobj(test_image_bucket, test_image_name, f)
# width
W = 500
oriimg = cv2.imread(tmp_file_name)
height, width, depth = oriimg.shape
# scale the image
imgScale = W/width
newX,newY = oriimg.shape[1].imgScale, oriimg.shape[0]*imgScale
newimg = cv2.resize(oriimg, (int(newX),int(newY)))
cv2.imwrite(resized_file_name, newimg)
with open(resized_file_name, 'rb') as f:
payload = f.read()
payload = bytearray(payload)
result = json.loads(ic_classifier.predict(payload, initial_args={'ContentType': 'application/x-image'}))
# find the index of the class that matches the test image with the highest probability
index = np.argmax(result)
# input your own output categories
object_categories = ['BlueStripes', 'DarkGray', 'IronMan']
print("Result: label - " + object_categories[index] + ", probability - " + str(result[index]))
print()
print(result)
print(ic._current_job_name)
Image(resized_file_name) | 2.640625 | 3 |
src/hedera_proto/schedule_delete_pb2.py | HbarStudio/hedera-protobufs-python | 0 | 12794402 | <filename>src/hedera_proto/schedule_delete_pb2.py
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: schedule_delete.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import basic_types_pb2 as basic__types__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='schedule_delete.proto',
package='proto',
syntax='proto3',
serialized_options=b'\n\"com.hederahashgraph.api.proto.javaP\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x15schedule_delete.proto\x12\x05proto\x1a\x11\x62\x61sic_types.proto\"F\n\x1dScheduleDeleteTransactionBody\x12%\n\nscheduleID\x18\x01 \x01(\x0b\x32\x11.proto.ScheduleIDB&\n\"com.hederahashgraph.api.proto.javaP\x01\x62\x06proto3'
,
dependencies=[basic__types__pb2.DESCRIPTOR,])
_SCHEDULEDELETETRANSACTIONBODY = _descriptor.Descriptor(
name='ScheduleDeleteTransactionBody',
full_name='proto.ScheduleDeleteTransactionBody',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='scheduleID', full_name='proto.ScheduleDeleteTransactionBody.scheduleID', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=51,
serialized_end=121,
)
_SCHEDULEDELETETRANSACTIONBODY.fields_by_name['scheduleID'].message_type = basic__types__pb2._SCHEDULEID
DESCRIPTOR.message_types_by_name['ScheduleDeleteTransactionBody'] = _SCHEDULEDELETETRANSACTIONBODY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ScheduleDeleteTransactionBody = _reflection.GeneratedProtocolMessageType('ScheduleDeleteTransactionBody', (_message.Message,), {
'DESCRIPTOR' : _SCHEDULEDELETETRANSACTIONBODY,
'__module__' : 'schedule_delete_pb2'
# @@protoc_insertion_point(class_scope:proto.ScheduleDeleteTransactionBody)
})
_sym_db.RegisterMessage(ScheduleDeleteTransactionBody)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 1.445313 | 1 |
Howie-master/howie/core.py | Srinath-tr/Goferbot | 1 | 12794403 | import marshal
import os.path
import sys
import threading
import time
import traceback
# Howie-specific
import aiml
import configFile
import frontends
from frontends import *
class ActiveFrontEnd:
def __init__(self, inst, thread):
self._inst = inst
self._thread = thread
_frontends = {}
kernel = None
def _addFrontEnd(name, cls):
global _frontends
# verbose output
config = configFile.get()
if config['cla.verboseMode'] in ["yes", "y", "true"]:
print "Creating %s front-end using class %s" % (name, cls)
# Instantiate the frontend object
feInst = eval("%s.%s()" % (name, cls))
# Create a thread to run this frontend
feThread = threading.Thread(name=name, target=feInst.go)
feThread.setDaemon(True)
feThread.start()
_frontends[name] = ActiveFrontEnd(feInst, feThread)
def init():
global kernel
"Initialize the front-ends and back-ends."
# Fetch the configuration info
config = configFile.get()
# Initialize the AIML interpreter
print "Initializing AIML interpreter (please be patient)..."
kernel = aiml.Kernel()
#extract config options
try: verbose = config["general.verbose"] == "yes" or config["cla.verboseMode"] == "yes"
except: verbose = False
try: botName = config["general.botname"]
except: botName = "Nameless"
try: botMaster = config["general.botmaster"]
except: botMaster = "The Master"
try: sessionsPersist = config["general.sessionspersist"].lower() in ["yes", "y", "true"]
except: sessionsPersist = False
try: sessionsDir = config["general.sessionsdir"]
except: sessionsDir = "sessions"
# set up the kernel
kernel.verbose(verbose)
kernel.setPredicate("secure", "yes") # secure the global session
kernel.bootstrap(learnFiles="std-startup.xml", commands="bootstrap")
kernel.setPredicate("secure", "no") # and unsecure it.
# Initialize bot predicates
for k,v in config.items():
if k[:8] != "botinfo.":
continue
kernel.setBotPredicate(k[8:], v)
# Load persistent session data, if necessary
if sessionsPersist:
try:
for session in os.listdir(sessionsDir):
# Session files are named "[email protected]", where
# user@protocol is also the internal name of the session.
root, ext = os.path.splitext(session)
if ext != ".ses":
# This isn't a session file.
continue
# Load the contents of the session file (a single dictionary
# containing all the predicates for this session).
if verbose: print "Loading session:", root
f = file("%s/%s" %(sessionsDir, session), "rb")
d = marshal.load(f)
f.close()
# update the predicate values in the Kernel.
for k,v in d.items():
kernel.setPredicate(k,v,root)
except OSError:
print "WARNING: Error loading session data from", sessionsDir
# Handle local mode: only start the tty frontend
if config['cla.localMode'].lower() in ["yes", "y", "true"]:
try: _addFrontEnd("tty", "FrontEndTTY")
except:
print "ERROR initializing frontend class frontends.tty.FrontEndTTY"
traceback.print_tb(sys.exc_info()[2])
else:
# Initialize the front-ends. Pythonic black magic ensues...
# First we iterate over all frontend modules.
for fe in frontends.__all__:
# If this frontend isn't activated in the configuration file,
# ignore it.
try: isActive = (config["%s.active" % fe].lower() in ["yes", "y", "true"])
except KeyError:
print "WARNING: no 'active' entry found for module %s in configuration file." % fe
isActive = False
if not isActive:
if config['cla.verboseMode'] == 'yes':
print "Skipping inactive frontend: %s" % fe
continue
# Attempt to extract the name of the front-end class defined in this module.
# If no such class is defined, or if the class is not a subclass of IFrontEnd,
# skip this module.
try:
cls = eval("frontends.%s.frontEndClass" % fe)
if not issubclass(eval("frontends.%s.%s" % (fe, cls)), frontends.frontend.IFrontEnd):
continue
except AttributeError:
# no valid front-end class defined in this file.
print "WARNING: could not find valid front-end class in module %s" % fe
continue
# Create an instance of this class in the _frontends dictionary
try: _addFrontEnd(fe, cls)
except:
# raise # uncomment for details on error
print "ERROR initializing frontend class frontends.%s.%s" % (fe,cls)
traceback.print_tb(sys.exc_info()[2])
continue
def submit(input, session):
"Submits a statement to the back-end. Returns the response to the statement."
response = kernel.respond(input, session)
config = configFile.get()
# if logging is enabled, write the input and response to the log.
try:
if config["general.logging"].lower() in ["yes", "y", "true"]:
logdir = config["general.logdir"]
if not os.path.isdir(logdir): os.mkdir(logdir)
logfile = file("%s/%s.log" % (logdir, session), "a")
logfile.write(time.strftime("[%m/%d/%Y %H:%M:%S]\n"))
logfile.write("%s: %s\n" % (session, input))
logfile.write("%s: %s\n" % (kernel.getBotPredicate("name"), response))
logfile.close()
except KeyError:
pass
# If persistent sessions are enabled, store the session data.
try:
if config["general.sessionspersist"].lower() in ["yes", "y", "true"]:
sessionsdir = config["general.sessionsdir"]
if not os.path.isdir(sessionsdir): os.mkdir(sessionsdir)
sessionfile = file("%s/%s.ses" % (sessionsdir, session), "wb")
marshal.dump(kernel.getSessionData(session), sessionfile)
sessionfile.close()
except KeyError:
pass
return response
| 2.046875 | 2 |
scinoephile/ocr/segmentation/__init__.py | KarlTDebiec/scinoephile | 2 | 12794404 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# scinoephile.ocr.segmentation.__init__.py
#
# Copyright (C) 2017-2019 <NAME>
# All rights reserved.
#
# This software may be modified and distributed under the terms of the
# BSD license. See the LICENSE file for details.
################################### MODULES ###################################
################################## VARIABLES ##################################
################################## FUNCTIONS ##################################
################################### CLASSES ###################################
from scinoephile.ocr.segmentation.SegmentationDataset import \
SegmentationDataset
from scinoephile.ocr.segmentation.SegmentationTestDataset import \
SegmentationTestDataset
from scinoephile.ocr.segmentation.SegmentationTrainDataset import \
SegmentationTrainDataset
| 1.742188 | 2 |
menpo/visualize/widgets/tools.py | jacksoncsy/menpo | 0 | 12794405 | from collections import OrderedDict
from StringIO import StringIO
# Global variables to try and reduce overhead of loading the logo
MENPO_LOGO = None
MENPO_LOGO_SCALE = None
def logo(scale=0.3):
r"""
Creates a widget with Menpo Logo Image.
The structure of the widgets is the following:
logo.children = [logo_image_widget]
To fix the alignment within this widget please refer to `format_logo()`
function.
Parameters
----------
scale : `float`, optional
Defines the scale that will be applied to the logo image
(data/menpo_thumbnail.jpg).
"""
import IPython.html.widgets as ipywidgets
# Try only load the logo once
global MENPO_LOGO, MENPO_LOGO_SCALE
if MENPO_LOGO is None or scale != MENPO_LOGO_SCALE:
import menpo.io as mio
image = mio.import_builtin_asset.menpo_thumbnail_jpg()
MENPO_LOGO = image.rescale(scale)
MENPO_LOGO_SCALE = scale
logo_wid = ipywidgets.ImageWidget(value=_convert_image_to_bytes(MENPO_LOGO))
return ipywidgets.ContainerWidget(children=[logo_wid])
def format_logo(logo_wid, container_border='1px solid black',
border_visible=True):
r"""
Function that adds an optional border line arounf the logo widget. Usage
example:
logo_wid = logo()
display(logo_wid)
format_logo(logo_wid)
Parameters
----------
logo_wid :
The widget object generated by the `logo()` function.
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
if border_visible:
logo_wid.set_css('border', container_border)
def index_selection_slider(index_selection_default, plot_function=None,
update_function=None, description='Image Number:'):
r"""
Creates a widget for selecting an index. Specifically, it has:
1) A slider.
The structure of the widget is the following:
index_wid = slider
The returned widget saves the selected values in the following dictionary:
index_wid.selected_values
To fix the alignment within this widget please refer to
`format_index_selection()` function.
Parameters
----------
index_selection_default : `dict`
The dictionary with the default options. For example:
index_selection_default = {'min':0,
'max':100,
'step':1,
'index':10}
plot_function : `function` or None, optional
The plot function that is executed when the index value changes.
If None, then nothing is assigned.
update_function : `function` or None, optional
The update function that is executed when the index value changes.
If None, then nothing is assigned.
description : `str`, optional
The title of the widget.
"""
import IPython.html.widgets as ipywidgets
# Create widget
index_wid = ipywidgets.IntSliderWidget(min=index_selection_default['min'],
max=index_selection_default['max'],
value=index_selection_default[
'index'],
step=index_selection_default['step'],
description=description)
# Assign output
index_wid.selected_values = index_selection_default
# Save index
def save_index(name, value):
index_wid.selected_values['index'] = value
index_wid.on_trait_change(save_index, 'value')
# assign given update_function
if update_function is not None:
index_wid.on_trait_change(update_function, 'value')
# assign given plot_function
if plot_function is not None:
index_wid.on_trait_change(plot_function, 'value')
return index_wid
def index_selection_buttons(index_selection_default, plot_function=None,
update_function=None, description='Image Number:',
minus_description='-', plus_description='+',
loop=True, text_editable=True):
r"""
Creates a widget for selecting an index. Specifically, it has:
1) Two buttons to increase and decrease the index.
2) A text area with the selected widget. It can either be editable or
not.
The structure of the widget is the following:
index_wid = [title, minus_button, text, plus_button]
The returned widget saves the selected values in the following dictionary:
index_wid.selected_values
To fix the alignment within this widget please refer to
`format_index_selection()` function.
Parameters
----------
index_selection_default : `dict`
The dictionary with the default options. For example:
index_selection_default = {'min':0,
'max':100,
'step':1,
'index':10}
plot_function : `function` or None, optional
The plot function that is executed when the index value changes.
If None, then nothing is assigned.
update_function : `function` or None, optional
The update function that is executed when the index value changes.
If None, then nothing is assigned.
description : `str`, optional
The title of the widget.
minus_description : `str`, optional
The title of the button that decreases the index.
plus_description : `str`, optional
The title of the button that increases the index.
loop : `boolean`, optional
If True, if by pressing the buttons we reach the minimum/maximum index
values, then the counting will continue from the end/beginning.
If False, the counting will stop at the minimum/maximum value.
text_editable : `boolean`, optional
Flag that determines whether the index text will be editable.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
tlt = ipywidgets.LatexWidget(value=description)
but_minus = ipywidgets.ButtonWidget(description=minus_description)
but_plus = ipywidgets.ButtonWidget(description=plus_description)
val = ipywidgets.IntTextWidget(value=index_selection_default['index'],
disabled=not text_editable)
index_wid = ipywidgets.ContainerWidget(children=[tlt, but_minus, val,
but_plus])
# Assign output
index_wid.selected_values = index_selection_default
# plus button pressed
def change_value_plus(name):
tmp_val = int(val.value) + index_wid.selected_values['step']
if tmp_val > index_wid.selected_values['max']:
if loop:
val.value = str(index_wid.selected_values['min'])
else:
val.value = str(index_wid.selected_values['max'])
else:
val.value = str(tmp_val)
but_plus.on_click(change_value_plus)
# minus button pressed
def change_value_minus(name):
tmp_val = int(val.value) - index_wid.selected_values['step']
if tmp_val < index_wid.selected_values['min']:
if loop:
val.value = str(index_wid.selected_values['max'])
else:
val.value = str(index_wid.selected_values['min'])
else:
val.value = str(tmp_val)
but_minus.on_click(change_value_minus)
# Save index
def save_index(name, old_value, value):
tmp_val = int(value)
if (tmp_val > index_wid.selected_values['max'] or
tmp_val < index_wid.selected_values['min']):
val.value = int(old_value)
index_wid.selected_values['index'] = tmp_val
val.on_trait_change(save_index, 'value')
# assign given update_function
if update_function is not None:
val.on_trait_change(update_function, 'value')
# assign given plot_function
if plot_function is not None:
val.on_trait_change(plot_function, 'value')
return index_wid
def format_index_selection(index_wid, text_width='0.5cm'):
r"""
Function that corrects the align (style format) of a given index_selection
widget. It can be used with both `index_selection_buttons()` and
`index_selection_slider()` functions. Usage example:
index_wid = index_selection_buttons()
display(index_wid)
format_index_selection(index_wid)
Parameters
----------
index_wid :
The widget object generated by either the `index_selection_buttons()`
or the `index_selection_slider()` function.
text_width : `str`, optional
The width of the index text area in the case of
`index_selection_buttons()`.
"""
import IPython.html.widgets as ipywidgets
if not isinstance(index_wid, ipywidgets.IntSliderWidget):
# align all widgets
index_wid.remove_class('vbox')
index_wid.add_class('hbox')
index_wid.add_class('align-center')
# set text width
index_wid.children[2].set_css('width', text_width)
index_wid.children[2].add_class('center')
# set margins
index_wid.children[0].set_css('margin-right', '6px')
def update_index_selection(index_wid, index_selection_default,
plot_function=None, update_function=None):
r"""
Function that updates the state of a given index_selection widget if the
index bounds have changed. It can be used with both
`index_selection_buttons()` and `index_selection_slider()` functions. Usage
example:
index_selection_default = {'min':0,
'max':100,
'step':1,
'index':10}
index_wid = index_selection_buttons(index_selection_default)
display(index_wid)
format_index_selection(index_wid)
index_selection_default = {'min':0,
'max':10,
'step':5,
'index':5}
update_index_selection(index_wid, index_selection_default)
Parameters
----------
index_wid :
The widget object generated by either the `index_selection_buttons()`
or the `index_selection_slider()` function.
index_selection_default : `dict`
The dictionary with the default options. For example:
index_selection_default = {'min':0,
'max':100,
'step':1,
'index':10}
plot_function : `function` or None, optional
The plot function that is executed when the index value changes.
If None, then nothing is assigned.
update_function : `function` or None, optional
The update function that is executed when the index value changes.
If None, then nothing is assigned.
"""
import IPython.html.widgets as ipywidgets
# check if update is required
index_wid_selected = index_wid.selected_values
if not (index_selection_default['min'] == index_wid_selected['min'] and
index_selection_default['max'] == index_wid_selected['max'] and
index_selection_default['step'] == index_wid_selected['step'] and
index_selection_default['index'] == index_wid_selected['index']):
if isinstance(index_wid, ipywidgets.IntSliderWidget):
# created by `index_selection_slider()` function
index_wid.min = index_selection_default['min']
index_wid.max = index_selection_default['max']
index_wid.step = index_selection_default['step']
index_wid.value = index_selection_default['index']
# assign given update_function
if update_function is not None:
index_wid.on_trait_change(update_function, 'value')
# assign given plot_function
if plot_function is not None:
index_wid.on_trait_change(plot_function, 'value')
else:
# created by `index_selection_buttons()` function
index_wid.children[2].value = str(index_selection_default['index'])
# assign given update_function
if update_function is not None:
index_wid.children[2].on_trait_change(update_function, 'value')
# assign given plot_function
if plot_function is not None:
index_wid.children[2].on_trait_change(plot_function, 'value')
# Assign new options dict to selected_values
index_wid.selected_values = index_selection_default
def _decode_colour(colour):
r_val = g_val = b_val = 0.
if not isinstance(colour, str):
r_val = colour[0]
g_val = colour[1]
b_val = colour[2]
colour = 'custom'
return colour, r_val, g_val, b_val
def colour_selection(default_colour_list, plot_function=None, title='Colour',
labels=None):
r"""
Creates a widget with Colour Selection Options. Specifically, it has:
1) A label selection if more than one colours are provided.
2) An 'apply to all labels' button.
3) A drop down menu with predefined colours and a 'custom' entry.
4) If 'custom is selected, then three float text boxes appear to enter
the desired RGB values.
The structure of the widgets is the following:
colour_selection_wid.children = [labels, drop_down_menu, rgb]
labels.children = [selection_dropdown, apply_to_all_button]
rgb.children = [r_value, g_value, b_value]
The returned widget saves the selected values in the following list:
colour_selection_wid.selected_values
To fix the alignment within this widget please refer to
`format_colour_selection()` function.
To update the state of this widget, please refer to
`update_colour_selection()` function.
Parameters
----------
default_colour_list : `list` of `str` or [`float`, `float`, `float`]
If `str`, it must be one of {'b', 'g', 'r', 'c', 'm', 'y', 'k', 'w'}.
If [`float`, `float`, `float`], it defines an RGB value and must have
length 3.
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
title : `str`, optional
The description of the drop down menu.
labels : `list`, optional
A list with the labels' names.
"""
import IPython.html.widgets as ipywidgets
# check if multiple mode should be enabled
n_labels = len(default_colour_list)
multiple = n_labels > 1
# colours dictionary
colour_dict = OrderedDict()
colour_dict['blue'] = 'b'
colour_dict['green'] = 'g'
colour_dict['red'] = 'r'
colour_dict['cyan'] = 'c'
colour_dict['magenta'] = 'm'
colour_dict['yellow'] = 'y'
colour_dict['black'] = 'k'
colour_dict['white'] = 'w'
colour_dict['custom'] = 'custom'
# Labels dropdown menu
labels_dict = OrderedDict()
if labels is None:
labels = []
for k in range(n_labels):
labels_dict["label {}".format(k)] = k
labels.append("label {}".format(k))
else:
for k, l in enumerate(labels):
labels_dict[l] = k
selection = ipywidgets.DropdownWidget(values=labels_dict, value=0)
apply_to_all = ipywidgets.ButtonWidget(description='apply to all labels')
labels_wid = ipywidgets.ContainerWidget(children=[selection, apply_to_all],
visible=multiple)
# find default values
default_colour, r_val, g_val, b_val = _decode_colour(default_colour_list[0])
# create widgets
r_wid = ipywidgets.BoundedFloatTextWidget(value=r_val, description='RGB',
min=0.0,
max=1.0)
g_wid = ipywidgets.BoundedFloatTextWidget(value=g_val, min=0.0, max=1.0)
b_wid = ipywidgets.BoundedFloatTextWidget(value=b_val, min=0.0, max=1.0)
menu = ipywidgets.DropdownWidget(values=colour_dict, value=default_colour,
description='')
rgb = ipywidgets.ContainerWidget(children=[r_wid, g_wid, b_wid])
if multiple:
selection.description = title
else:
menu.description = title
# Final widget
colour_selection_wid = ipywidgets.ContainerWidget(
children=[labels_wid, menu, rgb])
# Assign output
colour_selection_wid.selected_values = {'colour': default_colour_list,
'labels': labels}
# control visibility
def show_rgb(name, value):
if value == 'custom':
rgb.visible = True
else:
rgb.visible = False
show_rgb('', default_colour)
menu.on_trait_change(show_rgb, 'value')
# functions in case of multiple
def apply_to_all_function(name):
if menu.value == 'custom':
tmp = [r_wid.value, g_wid.value, b_wid.value]
else:
tmp = menu.value
for idx in range(len(colour_selection_wid.selected_values['colour'])):
colour_selection_wid.selected_values['colour'][idx] = tmp
if selection.value == 0:
selection.value = 1
else:
selection.value = 0
apply_to_all.on_click(apply_to_all_function)
def selection_function(name, value):
colour, r_val, g_val, b_val = _decode_colour(
colour_selection_wid.selected_values['colour'][value])
menu.value = colour
r_wid.value = r_val
g_wid.value = g_val
b_wid.value = b_val
selection.on_trait_change(selection_function, 'value')
# save colour
def get_colour(name, value):
idx = selection.value
if menu.value == 'custom':
colour_selection_wid.selected_values['colour'][idx] = [r_wid.value,
g_wid.value,
b_wid.value]
else:
colour_selection_wid.selected_values['colour'][idx] = menu.value
menu.on_trait_change(get_colour, 'value')
r_wid.on_trait_change(get_colour, 'value')
g_wid.on_trait_change(get_colour, 'value')
b_wid.on_trait_change(get_colour, 'value')
# assign plot function
if plot_function is not None:
menu.on_trait_change(plot_function, 'value')
r_wid.on_trait_change(plot_function, 'value')
g_wid.on_trait_change(plot_function, 'value')
b_wid.on_trait_change(plot_function, 'value')
def tmp_plot_function(name):
plot_function('', True)
apply_to_all.on_click(tmp_plot_function)
return colour_selection_wid
def format_colour_selection(colour_selection_wid):
r"""
Function that corrects the align (style format) of a given colour_selection
widget. Usage example:
colour_selection_wid = colour_selection(['r'])
display(colour_selection_wid)
format_colour_selection(colour_selection_wid)
Parameters
----------
colour_selection_wid :
The widget object generated by the `colour_selection()` function.
"""
# align selection container and colour
colour_selection_wid.add_class('align-end')
# align r, g, b values
colour_selection_wid.children[2].remove_class('vbox')
colour_selection_wid.children[2].add_class('hbox')
colour_selection_wid.children[2].add_class('align-start')
# set width of r, g, b
colour_selection_wid.children[2].children[0].set_css('width', '0.5cm')
colour_selection_wid.children[2].children[1].set_css('width', '0.5cm')
colour_selection_wid.children[2].children[2].set_css('width', '0.5cm')
# align label selection and apply to all button
colour_selection_wid.children[0].add_class('align-end')
def update_colour_selection(colour_selection_wid, default_colour_list,
labels=None):
r"""
Function that updates the state of a given colour_selection widget. Usage
example:
colour_selection_wid = colour_selection(default_colour_list=['r', 'b'],
labels=['jaw', 'mouth'])
display(colour_selection_wid)
format_colour_selection(colour_selection_wid)
update_colour_selection(colour_selection_wid,
default_colour_list=[[0.5, 0.7, 1.0]],
labels=['all'])
Parameters
----------
colour_selection_wid :
The widget object generated by the `colour_selection()` function.
default_colour_list : `list` of `str` or [`float`, `float`, `float`]
If `str`, it must be one of {'b', 'g', 'r', 'c', 'm', 'y', 'k', 'w'}.
If [`float`, `float`, `float`], it defines an RGB value and must have
length 3.
labels : `list`, optional
A list with the labels' names.
"""
if labels is None:
labels = colour_selection_wid.selected_values['labels']
sel_colours = colour_selection_wid.selected_values['colour']
sel_labels = colour_selection_wid.selected_values['labels']
if (_lists_are_the_same(sel_colours, default_colour_list) and not
_lists_are_the_same(sel_labels, labels)):
# the provided colours are the same, but the labels changed, so update
# the labels
colour_selection_wid.selected_values['labels'] = labels
labels_dict = OrderedDict()
for k, l in enumerate(labels):
labels_dict[l] = k
colour_selection_wid.children[0].children[0].values = labels_dict
colour_selection_wid.children[0].children[0].value = 0
elif (not _lists_are_the_same(sel_colours, default_colour_list) and
_lists_are_the_same(sel_labels, labels)):
# the provided labels are the same, but the colours are different
# assign colour
colour_selection_wid.selected_values['colour'] = default_colour_list
k = colour_selection_wid.children[0].children[0].value
default_colour = default_colour_list[k]
if not isinstance(default_colour, str):
r_val = default_colour[0]
g_val = default_colour[1]
b_val = default_colour[2]
default_colour = 'custom'
colour_selection_wid.children[2].children[0].value = r_val
colour_selection_wid.children[2].children[1].value = g_val
colour_selection_wid.children[2].children[2].value = b_val
colour_selection_wid.children[1].value = default_colour
colour_selection_wid.children[0].children[0].value = 0
elif (not _lists_are_the_same(sel_colours, default_colour_list) and not
_lists_are_the_same(sel_labels, labels)):
# both the colours and the labels are different
# assign colour
if len(sel_labels) > 1 and len(labels) == 1:
colour_selection_wid.children[1].description = \
colour_selection_wid.children[0].children[0].description
colour_selection_wid.children[0].children[0].description = ''
elif len(sel_labels) == 1 and len(labels) > 1:
colour_selection_wid.children[0].children[0].description = \
colour_selection_wid.children[1].description
colour_selection_wid.children[1].description = ''
colour_selection_wid.children[0].visible = len(labels) > 1
colour_selection_wid.selected_values['colour'] = default_colour_list
colour_selection_wid.selected_values['labels'] = labels
labels_dict = OrderedDict()
for k, l in enumerate(labels):
labels_dict[l] = k
colour_selection_wid.children[0].children[0].values = labels_dict
colour_selection_wid.children[0].children[0].value = 0
k = 0
default_colour = default_colour_list[k]
if not isinstance(default_colour, str):
r_val = default_colour[0]
g_val = default_colour[1]
b_val = default_colour[2]
default_colour = 'custom'
colour_selection_wid.children[2].children[0].value = r_val
colour_selection_wid.children[2].children[1].value = g_val
colour_selection_wid.children[2].children[2].value = b_val
colour_selection_wid.children[1].value = default_colour
colour_selection_wid.children[0].children[0].value = 0
def image_options(image_options_default, plot_function=None,
toggle_show_visible=True, toggle_show_default=True,
toggle_title='Image Object'):
r"""
Creates a widget with Image Options. Specifically, it has:
1) A slider that controls the image's alpha (transparency).
2) A checkbox for interpolation.
3) A toggle button that controls the visibility of all the above, i.e.
the image options.
The structure of the widgets is the following:
image_options_wid.children = [toggle_button, options]
options.children = [alpha_slider, pixelated_checkbox]
The returned widget saves the selected values in the following dictionary:
image_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_image_options()` function.
Parameters
----------
image_options_default : `dict`
The initial selected image options.
Example:
image_options={'alpha': 1.,
'interpolation': 'bilinear'}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
toggle_title : `str`, optional
The title of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description=toggle_title,
value=toggle_show_default,
visible=toggle_show_visible)
# alpha, interpolation
interpolation = ipywidgets.CheckboxWidget(
description='Pixelated',
value=image_options_default['interpolation'] == 'none')
alpha = ipywidgets.FloatSliderWidget(description='Alpha',
value=image_options_default['alpha'],
min=0.0, max=1.0, step=0.05)
options_wid = ipywidgets.ContainerWidget(children=[interpolation, alpha])
# Final widget
image_options_wid = ipywidgets.ContainerWidget(children=[but, options_wid])
# Assign output
image_options_wid.selected_values = image_options_default
# get options functions
def save_interpolation(name, value):
if value:
image_options_wid.selected_values['interpolation'] = 'none'
else:
image_options_wid.selected_values['interpolation'] = 'bilinear'
interpolation.on_trait_change(save_interpolation, 'value')
def save_alpha(name, value):
image_options_wid.selected_values['alpha'] = value
alpha.on_trait_change(save_alpha, 'value')
# Toggle button function
def toggle_fun(name, value):
options_wid.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
interpolation.on_trait_change(plot_function, 'value')
alpha.on_trait_change(plot_function, 'value')
return image_options_wid
def format_image_options(image_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold', border_visible=True):
r"""
Function that corrects the align (style format) of a given image_options
widget. Usage example:
image_options_wid = image_options()
display(image_options_wid)
format_image_options(image_options_wid)
Parameters
----------
image_options_wid :
The widget object generated by the `image_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# fix alpha slider width
image_options_wid.children[1].children[1].set_css('width', '3cm')
# set toggle button font bold
image_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
image_options_wid.set_css('padding', container_padding)
image_options_wid.set_css('margin', container_margin)
if border_visible:
image_options_wid.set_css('border', container_border)
def update_image_options(image_options_wid, image_options_dict):
r"""
Function that updates the state of a given image_options widget. Usage
example:
default_image_options={'interpolation': 'bilinear',
'alpha': 0.2}
image_options_wid = image_options(default_image_options)
display(image_options_wid)
format_image_options(image_options_wid)
default_image_options={'interpolation': 'none',
'alpha': 0.4}
update_image_options(image_options_wid, default_image_options)
Parameters
----------
image_options_wid :
The widget object generated by the `image_options()` function.
image_options_dict : `dict`
The new set of options. For example:
image_options_dict = {'interpolation': 'bilinear',
'alpha': 1.0}
"""
# Assign new options dict to selected_values
image_options_wid.selected_values = image_options_dict
# update alpha slider
if 'alpha' in image_options_dict.keys():
image_options_wid.children[1].children[1].value = \
image_options_dict['alpha']
# update interpolation checkbox
if 'interpolation' in image_options_dict.keys():
image_options_wid.children[1].children[0].value = \
image_options_dict['interpolation'] == 'none'
def line_options(line_options_default, plot_function=None,
toggle_show_visible=True, toggle_show_default=True,
toggle_title='Line Object', show_checkbox_title='Render lines',
labels=None):
r"""
Creates a widget with Line Options. Specifically, it has:
1) A checkbox that controls line's visibility.
2) A dropdown menu for line style.
3) A bounded float text box for line width.
4) A colour_selection widget for line colour.
7) A toggle button that controls the visibility of all the above, i.e.
the line options.
The structure of the widgets is the following:
line_options_wid.children = [toggle_button, options]
options.children = [render_lines_checkbox, other_options]
other_options.children = [line_style, line_width, line_colour]
The returned widget saves the selected values in the following dictionary:
line_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_line_options()` function.
Parameters
----------
line_options_default : `dict`
The initial selected line options.
Example:
line_options={'render_lines': True,
'line_width': 1,
'line_colour': ['b'],
'line_style': '-'}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
toggle_title : `str`, optional
The title of the toggle button.
show_checkbox_title : `str`, optional
The description of the show line checkbox.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description=toggle_title,
value=toggle_show_default,
visible=toggle_show_visible)
# line_style, line_width, line_colour
render_lines = ipywidgets.CheckboxWidget(description=show_checkbox_title,
value=line_options_default[
'render_lines'])
line_width = ipywidgets.BoundedFloatTextWidget(description='Width',
value=line_options_default[
'line_width'],
min=0.)
line_style_dict = OrderedDict()
line_style_dict['solid'] = '-'
line_style_dict['dashed'] = '--'
line_style_dict['dash-dot'] = '-.'
line_style_dict['dotted'] = ':'
line_style = ipywidgets.DropdownWidget(values=line_style_dict,
value=line_options_default[
'line_style'],
description='Style')
line_colour = colour_selection(line_options_default['line_colour'],
title='Colour', labels=labels,
plot_function=plot_function)
# Options widget
all_line_options = ipywidgets.ContainerWidget(
children=[line_style, line_width,
line_colour])
options_wid = ipywidgets.ContainerWidget(
children=[render_lines, all_line_options])
# Final widget
line_options_wid = ipywidgets.ContainerWidget(children=[but, options_wid])
# Assign output
line_options_wid.selected_values = line_options_default
# line options visibility
def options_visible(name, value):
line_style.disabled = not value
line_width.disabled = not value
line_colour.children[0].children[0].disabled = not value
line_colour.children[0].children[1].disabled = not value
line_colour.children[1].disabled = not value
line_colour.children[2].children[0].disabled = not value
line_colour.children[2].children[1].disabled = not value
line_colour.children[2].children[2].disabled = not value
options_visible('', line_options_default['render_lines'])
render_lines.on_trait_change(options_visible, 'value')
# get options functions
def save_render_lines(name, value):
line_options_wid.selected_values['render_lines'] = value
render_lines.on_trait_change(save_render_lines, 'value')
def save_line_width(name, value):
line_options_wid.selected_values['line_width'] = float(value)
line_width.on_trait_change(save_line_width, 'value')
def save_line_style(name, value):
line_options_wid.selected_values['line_style'] = value
line_style.on_trait_change(save_line_style, 'value')
line_options_wid.selected_values['line_colour'] = \
line_colour.selected_values['colour']
# Toggle button function
def toggle_fun(name, value):
options_wid.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
render_lines.on_trait_change(plot_function, 'value')
line_style.on_trait_change(plot_function, 'value')
line_width.on_trait_change(plot_function, 'value')
return line_options_wid
def format_line_options(line_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold', border_visible=True,
suboptions_border_visible=True):
r"""
Function that corrects the align (style format) of a given line_options
widget. Usage example:
line_options_wid = line_options()
display(line_options_wid)
format_line_options(line_options_wid)
Parameters
----------
line_options_wid :
The widget object generated by the `line_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
suboptions_border_visible : `boolean`, optional
Defines whether to draw the border line around the line options, under
the show line checkbox.
"""
# align line options with checkbox
line_options_wid.children[1].add_class('align-end')
# set linewidth text box width
line_options_wid.children[1].children[1].children[1].set_css('width', '1cm')
# format colour options
format_colour_selection(line_options_wid.children[1].children[1].children[2])
# border around options
if suboptions_border_visible:
line_options_wid.children[1].children[1].set_css('border',
container_border)
# set toggle button font bold
line_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
line_options_wid.set_css('padding', container_padding)
line_options_wid.set_css('margin', container_margin)
if border_visible:
line_options_wid.set_css('border', container_border)
def update_line_options(line_options_wid, line_options_dict, labels=None):
r"""
Function that updates the state of a given line_options widget. Usage
example:
default_line_options={'render_lines':True,
'line_width':2,
'line_colour':['r'],
'line_style':'-'}
line_options_wid = line_options(default_line_options)
display(line_options_wid)
format_line_options(line_options_wid)
default_line_options={'render_lines':False,
'line_width':4,
'line_colour':[[0.1, 0.2, 0.3]],
'line_style':'-'}
update_line_options(line_options_wid, default_line_options)
Parameters
----------
line_options_wid :
The widget object generated by the `line_options()` function.
line_options_dict : `dict`
The new set of options. For example:
line_options_dict={'render_lines':True,
'line_width':2,
'line_colour':['r'],
'line_style':'-'}
"""
# Assign new options dict to selected_values
line_options_wid.selected_values = line_options_dict
# update render lines checkbox
if 'render_lines' in line_options_dict.keys():
line_options_wid.children[1].children[0].value = \
line_options_dict['render_lines']
# update line_style dropdown menu
if 'line_style' in line_options_dict.keys():
line_options_wid.children[1].children[1].children[0].value = \
line_options_dict['line_style']
# update line_width text box
if 'line_width' in line_options_dict.keys():
line_options_wid.children[1].children[1].children[1].value = \
float(line_options_dict['line_width'])
# update line_colour
if 'line_colour' in line_options_dict.keys():
update_colour_selection(
line_options_wid.children[1].children[1].children[2],
line_options_dict['line_colour'], labels=labels)
def marker_options(marker_options_default, plot_function=None,
toggle_show_visible=True, toggle_show_default=True,
toggle_title='Marker Object',
show_checkbox_title='Show markers'):
r"""
Creates a widget with Marker Options. Specifically, it has:
1) A checkbox that controls marker's visibility.
2) A dropdown menu for marker style.
3) A bounded int text box for marker size.
4) A bounded float text box for marker edge width.
5) A colour_selection widget for face colour.
6) A colour_selection widget for edge colour.
7) A toggle button that controls the visibility of all the above, i.e.
the marker options.
The structure of the widgets is the following:
marker_options_wid.children = [toggle_button, options]
options.children = [render_markers_checkbox, other_options]
other_options.children = [marker_style, marker_size, marker_edge_width,
marker_face_colour, marker_edge_colour]
The returned widget saves the selected values in the following dictionary:
marker_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_marker_options()` function.
Parameters
----------
marker_options_default : `dict`
The initial selected marker options.
Example:
marker_options_default={'render_markers':True,
'marker_size':20,
'marker_face_colour':['r'],
'marker_edge_colour':['k'],
'marker_style':'o',
'marker_edge_width':1}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
toggle_title : `str`, optional
The title of the toggle button.
show_checkbox_title : `str`, optional
The description of the show marker checkbox.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description=toggle_title,
value=toggle_show_default,
visible=toggle_show_visible)
# marker_size, marker_edge_width, marker_style, marker_face_colour,
# marker_edge_colour
render_markers = ipywidgets.CheckboxWidget(
description=show_checkbox_title,
value=marker_options_default['render_markers'])
marker_size = ipywidgets.BoundedIntTextWidget(
description='Size', value=marker_options_default['marker_size'], min=0)
marker_edge_width = ipywidgets.BoundedFloatTextWidget(
description='Edge width',
value=marker_options_default['marker_edge_width'], min=0.)
marker_style_dict = OrderedDict()
marker_style_dict['point'] = '.'
marker_style_dict['pixel'] = ','
marker_style_dict['circle'] = 'o'
marker_style_dict['triangle down'] = 'v'
marker_style_dict['triangle up'] = '^'
marker_style_dict['triangle left'] = '<'
marker_style_dict['triangle right'] = '>'
marker_style_dict['tri down'] = '1'
marker_style_dict['tri up'] = '2'
marker_style_dict['tri left'] = '3'
marker_style_dict['tri right'] = '4'
marker_style_dict['octagon'] = '8'
marker_style_dict['square'] = 's'
marker_style_dict['pentagon'] = 'p'
marker_style_dict['star'] = '*'
marker_style_dict['hexagon 1'] = 'h'
marker_style_dict['hexagon 2'] = 'H'
marker_style_dict['plus'] = '+'
marker_style_dict['x'] = 'x'
marker_style_dict['diamond'] = 'D'
marker_style_dict['thin diamond'] = 'd'
marker_style = ipywidgets.DropdownWidget(values=marker_style_dict,
value=marker_options_default[
'marker_style'],
description='Style')
marker_face_colour = colour_selection(
marker_options_default['marker_face_colour'], title='Face Colour',
plot_function=plot_function)
marker_edge_colour = colour_selection(
marker_options_default['marker_edge_colour'], title='Edge Colour',
plot_function=plot_function)
# Options widget
all_marker_options = ipywidgets.ContainerWidget(
children=[marker_style, marker_size,
marker_edge_width,
marker_face_colour,
marker_edge_colour])
options_wid = ipywidgets.ContainerWidget(
children=[render_markers, all_marker_options])
# Final widget
marker_options_wid = ipywidgets.ContainerWidget(children=[but, options_wid])
# Assign output
marker_options_wid.selected_values = marker_options_default
# marker options visibility
def options_visible(name, value):
marker_style.disabled = not value
marker_size.disabled = not value
marker_edge_width.disabled = not value
marker_face_colour.children[0].children[0].disabled = not value
marker_face_colour.children[0].children[1].disabled = not value
marker_face_colour.children[1].disabled = not value
marker_face_colour.children[2].children[0].disabled = not value
marker_face_colour.children[2].children[1].disabled = not value
marker_face_colour.children[2].children[2].disabled = not value
marker_edge_colour.children[0].children[0].disabled = not value
marker_edge_colour.children[0].children[1].disabled = not value
marker_edge_colour.children[1].disabled = not value
marker_edge_colour.children[2].children[0].disabled = not value
marker_edge_colour.children[2].children[1].disabled = not value
marker_edge_colour.children[2].children[2].disabled = not value
options_visible('', marker_options_default['render_markers'])
render_markers.on_trait_change(options_visible, 'value')
# get options functions
def save_render_markers(name, value):
marker_options_wid.selected_values['render_markers'] = value
render_markers.on_trait_change(save_render_markers, 'value')
def save_markersize(name, value):
marker_options_wid.selected_values['marker_size'] = int(value)
marker_size.on_trait_change(save_markersize, 'value')
def save_markeredgewidth(name, value):
marker_options_wid.selected_values['marker_edge_width'] = float(value)
marker_edge_width.on_trait_change(save_markeredgewidth, 'value')
def save_markerstyle(name, value):
marker_options_wid.selected_values['marker_style'] = value
marker_style.on_trait_change(save_markerstyle, 'value')
marker_options_wid.selected_values['marker_edge_colour'] = \
marker_edge_colour.selected_values['colour']
marker_options_wid.selected_values['marker_face_colour'] = \
marker_face_colour.selected_values['colour']
# Toggle button function
def toggle_fun(name, value):
options_wid.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
render_markers.on_trait_change(plot_function, 'value')
marker_style.on_trait_change(plot_function, 'value')
marker_edge_width.on_trait_change(plot_function, 'value')
marker_size.on_trait_change(plot_function, 'value')
return marker_options_wid
def format_marker_options(marker_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold', border_visible=True,
suboptions_border_visible=True):
r"""
Function that corrects the align (style format) of a given marker_options
widget. Usage example:
marker_options_wid = marker_options()
display(marker_options_wid)
format_marker_options(marker_options_wid)
Parameters
----------
marker_options_wid :
The widget object generated by the `marker_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
suboptions_border_visible : `boolean`, optional
Defines whether to draw the border line around the marker options, under
the show marker checkbox.
"""
# align marker options with checkbox
marker_options_wid.children[1].add_class('align-end')
# set text boxes width
marker_options_wid.children[1].children[1].children[1].set_css('width',
'1cm')
marker_options_wid.children[1].children[1].children[2].set_css('width',
'1cm')
# border around options
if suboptions_border_visible:
marker_options_wid.children[1].children[1].set_css('border',
container_border)
# format colour options
format_colour_selection(
marker_options_wid.children[1].children[1].children[3])
format_colour_selection(
marker_options_wid.children[1].children[1].children[4])
# set toggle button font bold
marker_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
marker_options_wid.set_css('padding', container_padding)
marker_options_wid.set_css('margin', container_margin)
if border_visible:
marker_options_wid.set_css('border', container_border)
def update_marker_options(marker_options_wid, marker_options_dict):
r"""
Function that updates the state of a given marker_options widget. Usage
example:
default_marker_options={'render_markers':True,
'marker_size':20,
'marker_face_colour':['r'],
'marker_edge_colour':['k'],
'marker_style':'o',
'marker_edge_width':1}
marker_options_wid = marker_options(default_marker_options)
display(marker_options_wid)
format_marker_options(marker_options_wid)
default_marker_options={'render_markers':True,
'marker_size':40,
'marker_face_colour':[[0.1, 0.2, 0.3]],
'marker_edge_colour':['r'],
'marker_style':'d',
'marker_edge_width':1}
update_marker_options(marker_options_wid, default_marker_options)
Parameters
----------
marker_options_wid :
The widget object generated by the `marker_options()` function.
marker_options_dict : `dict`
The new set of options. For example:
marker_options_dict={'render_markers':True,
'marker_size':20,
'marker_face_colour':['r'],
'marker_edge_colour':['k'],
'marker_style':'o',
'marker_edge_width':1}
"""
# Assign new options dict to selected_values
marker_options_wid.selected_values = marker_options_dict
# update render marker checkbox
if 'render_markers' in marker_options_dict.keys():
marker_options_wid.children[1].children[0].value = \
marker_options_dict['render_markers']
# update marker_style dropdown menu
if 'marker_style' in marker_options_dict.keys():
marker_options_wid.children[1].children[1].children[0].value = \
marker_options_dict['marker_style']
# update marker_size text box
if 'marker_size' in marker_options_dict.keys():
marker_options_wid.children[1].children[1].children[1].value = \
int(marker_options_dict['marker_size'])
# update marker_edge_width text box
if 'marker_edge_width' in marker_options_dict.keys():
marker_options_wid.children[1].children[1].children[2].value = \
float(marker_options_dict['marker_edge_width'])
# update marker_face_colour
if 'marker_face_colour' in marker_options_dict.keys():
update_colour_selection(
marker_options_wid.children[1].children[1].children[3],
marker_options_dict['marker_face_colour'])
# update marker_edge_colour
if 'marker_edge_colour' in marker_options_dict.keys():
update_colour_selection(
marker_options_wid.children[1].children[1].children[4],
marker_options_dict['marker_edge_colour'])
def numbering_options(numbers_options_default, plot_function=None,
toggle_show_visible=True, toggle_show_default=True,
toggle_title='Numbering Options',
show_checkbox_title='Render numbering'):
r"""
Creates a widget with Numbering Options. Specifically, it has:
1) A checkbox that controls text's visibility.
2) A dropdown menu for font name.
3) A bounded int text box for font size.
4) A dropdown menu for font style.
5) A dropdown menu for font weight.
6) A colour_selection widget for font colour.
7) A dropdown menu for horizontal alignment.
8) A dropdown menu for vertical alignment.
9) A toggle button that controls the visibility of all the above, i.e.
the numbering options.
The structure of the widgets is the following:
numbering_options_wid.children = [toggle_button, options]
options.children = [show_font_checkbox, other_options]
other_options.children = [font_name, font_size, font_style, font_weight,
font_colour, horizontal_align, vertical_align]
The returned widget saves the selected values in the following dictionary:
numbering_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_numbering_options()` function.
Parameters
----------
numbers_options_default : `dict`
The initial selected font options.
Example:
numbers_options_default = {'render_numbering': True,
'numbers_font_name': 'serif',
'numbers_font_size': 10,
'numbers_font_style': 'normal',
'numbers_font_weight': 'normal',
'numbers_font_colour': ['k'],
'numbers_horizontal_align': 'center',
'numbers_vertical_align': 'bottom'}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
toggle_title : `str`, optional
The title of the toggle button.
show_checkbox_title : `str`, optional
The description of the show text checkbox.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description=toggle_title,
value=toggle_show_default,
visible=toggle_show_visible)
# numbers_font_name, numbers_font_size, numbers_font_style,
# numbers_font_weight, numbers_font_colour
render_numbering = ipywidgets.CheckboxWidget(
description=show_checkbox_title,
value=numbers_options_default['render_numbering'])
numbers_font_name_dict = OrderedDict()
numbers_font_name_dict['serif'] = 'serif'
numbers_font_name_dict['sans-serif'] = 'sans-serif'
numbers_font_name_dict['cursive'] = 'cursive'
numbers_font_name_dict['fantasy'] = 'fantasy'
numbers_font_name_dict['monospace'] = 'monospace'
numbers_font_name = ipywidgets.DropdownWidget(
values=numbers_font_name_dict,
value=numbers_options_default['numbers_font_name'], description='Font')
numbers_font_size = ipywidgets.BoundedIntTextWidget(
description='Size', value=numbers_options_default['numbers_font_size'],
min=2)
numbers_font_style_dict = OrderedDict()
numbers_font_style_dict['normal'] = 'normal'
numbers_font_style_dict['italic'] = 'italic'
numbers_font_style_dict['oblique'] = 'oblique'
numbers_font_style = ipywidgets.DropdownWidget(
values=numbers_font_style_dict,
value=numbers_options_default['numbers_font_style'],
description='Style')
numbers_font_weight_dict = OrderedDict()
numbers_font_weight_dict['normal'] = 'normal'
numbers_font_weight_dict['ultralight'] = 'ultralight'
numbers_font_weight_dict['light'] = 'light'
numbers_font_weight_dict['regular'] = 'regular'
numbers_font_weight_dict['book'] = 'book'
numbers_font_weight_dict['medium'] = 'medium'
numbers_font_weight_dict['roman'] = 'roman'
numbers_font_weight_dict['semibold'] = 'semibold'
numbers_font_weight_dict['demibold'] = 'demibold'
numbers_font_weight_dict['demi'] = 'demi'
numbers_font_weight_dict['bold'] = 'bold'
numbers_font_weight_dict['heavy'] = 'heavy'
numbers_font_weight_dict['extra bold'] = 'extra bold'
numbers_font_weight_dict['black'] = 'black'
numbers_font_weight = ipywidgets.DropdownWidget(
values=numbers_font_weight_dict,
value=numbers_options_default['numbers_font_weight'],
description='Weight')
numbers_font_colour = colour_selection(
numbers_options_default['numbers_font_colour'], title='Colour',
plot_function=plot_function)
numbers_horizontal_align_dict = OrderedDict()
numbers_horizontal_align_dict['center'] = 'center'
numbers_horizontal_align_dict['right'] = 'right'
numbers_horizontal_align_dict['left'] = 'left'
numbers_horizontal_align = ipywidgets.DropdownWidget(
values=numbers_horizontal_align_dict,
value=numbers_options_default['numbers_horizontal_align'],
description='Align hor.')
numbers_vertical_align_dict = OrderedDict()
numbers_vertical_align_dict['center'] = 'center'
numbers_vertical_align_dict['top'] = 'top'
numbers_vertical_align_dict['bottom'] = 'bottom'
numbers_vertical_align_dict['baseline'] = 'baseline'
numbers_vertical_align = ipywidgets.DropdownWidget(
values=numbers_vertical_align_dict,
value=numbers_options_default['numbers_vertical_align'],
description='Align ver.')
# Options widget
all_font_options = ipywidgets.ContainerWidget(
children=[numbers_font_name, numbers_font_size, numbers_font_style,
numbers_font_weight, numbers_font_colour,
numbers_horizontal_align, numbers_vertical_align])
options_wid = ipywidgets.ContainerWidget(
children=[render_numbering, all_font_options])
# Final widget
numbering_options_wid = ipywidgets.ContainerWidget(
children=[but, options_wid])
# Assign output
numbering_options_wid.selected_values = numbers_options_default
# font options visibility
def options_visible(name, value):
numbers_font_name.disabled = not value
numbers_font_size.disabled = not value
numbers_font_style.disabled = not value
numbers_font_weight.disabled = not value
numbers_font_colour.children[0].children[0].disabled = not value
numbers_font_colour.children[0].children[1].disabled = not value
numbers_font_colour.children[1].disabled = not value
numbers_font_colour.children[2].children[0].disabled = not value
numbers_font_colour.children[2].children[1].disabled = not value
numbers_font_colour.children[2].children[2].disabled = not value
numbers_horizontal_align.disabled = not value
numbers_vertical_align.disabled = not value
options_visible('', numbers_options_default['render_numbering'])
render_numbering.on_trait_change(options_visible, 'value')
# get options functions
def save_render_numbering(name, value):
numbering_options_wid.selected_values['render_numbering'] = value
render_numbering.on_trait_change(save_render_numbering, 'value')
def save_numbers_font_name(name, value):
numbering_options_wid.selected_values['numbers_font_name'] = value
numbers_font_name.on_trait_change(save_numbers_font_name, 'value')
def save_numbers_font_size(name, value):
numbering_options_wid.selected_values['numbers_font_size'] = int(value)
numbers_font_size.on_trait_change(save_numbers_font_size, 'value')
def save_numbers_font_style(name, value):
numbering_options_wid.selected_values['numbers_font_style'] = value
numbers_font_style.on_trait_change(save_numbers_font_style, 'value')
def save_numbers_font_weight(name, value):
numbering_options_wid.selected_values['numbers_font_weight'] = value
numbers_font_weight.on_trait_change(save_numbers_font_weight, 'value')
def save_numbers_horizontal_align(name, value):
numbering_options_wid.selected_values['numbers_horizontal_align'] = \
value
numbers_horizontal_align.on_trait_change(save_numbers_horizontal_align,
'value')
def save_numbers_vertical_align(name, value):
numbering_options_wid.selected_values['numbers_vertical_align'] = value
numbers_vertical_align.on_trait_change(save_numbers_vertical_align, 'value')
numbering_options_wid.selected_values['numbers_font_colour'] = \
numbers_font_colour.selected_values['colour']
# Toggle button function
def toggle_fun(name, value):
options_wid.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
render_numbering.on_trait_change(plot_function, 'value')
numbers_font_name.on_trait_change(plot_function, 'value')
numbers_font_style.on_trait_change(plot_function, 'value')
numbers_font_size.on_trait_change(plot_function, 'value')
numbers_font_weight.on_trait_change(plot_function, 'value')
numbers_horizontal_align.on_trait_change(plot_function, 'value')
numbers_vertical_align.on_trait_change(plot_function, 'value')
return numbering_options_wid
def format_numbering_options(numbering_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True,
suboptions_border_visible=True):
r"""
Function that corrects the align (style format) of a given numbering_options
widget. Usage example:
numbering_options_wid = numbering_options()
display(numbering_options_wid)
format_numbering_options(numbering_options_wid)
Parameters
----------
numbering_options_wid :
The widget object generated by the `numbering_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
suboptions_border_visible : `boolean`, optional
Defines whether to draw the border line around the font options, under
the show font checkbox.
"""
# align font options with checkbox
numbering_options_wid.children[1].add_class('align-end')
# set fontsize text box width
numbering_options_wid.children[1].children[1].children[1].set_css('width',
'1cm')
# format colour options
format_colour_selection(
numbering_options_wid.children[1].children[1].children[4])
# border around options
if suboptions_border_visible:
numbering_options_wid.children[1].children[1].set_css('border',
container_border)
# set toggle button font bold
numbering_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
numbering_options_wid.set_css('padding', container_padding)
numbering_options_wid.set_css('margin', container_margin)
if border_visible:
numbering_options_wid.set_css('border', container_border)
def update_numbering_options(numbering_options_wid, numbering_options_dict):
r"""
Function that updates the state of a given numbering_options widget. Usage
example:
numbering_options_default = {'render_numbering': True,
'numbers_font_name': 'serif',
'numbers_font_size': 10,
'numbers_font_style': 'normal',
'numbers_font_weight': 'normal',
'numbers_font_colour': ['k'],
'numbers_horizontal_align': 'center',
'numbers_vertical_align': 'bottom'}
numbering_options_wid = numbering_options(numbering_options_default)
display(numbering_options_wid)
format_numbering_options(numbering_options_wid)
numbering_options_default = {'render_numbering': False,
'numbers_font_name': 'serif',
'numbers_font_size': 10,
'numbers_font_style': 'normal',
'numbers_font_weight': 'normal',
'numbers_font_colour': ['k'],
'numbers_horizontal_align': 'center',
'numbers_vertical_align': 'bottom'}
update_numbering_options(numbering_options_wid,
numbering_options_default)
Parameters
----------
numbering_options_wid :
The widget object generated by the `numbering_options()` function.
numbering_options_dict : `dict`
The new set of options. For example:
numbering_options_dict = {'render_numbering': True,
'numbers_font_name': 'serif',
'numbers_font_size': 10,
'numbers_font_style': 'normal',
'numbers_font_weight': 'normal',
'numbers_font_colour': ['k'],
'numbers_horizontal_align': 'center',
'numbers_vertical_align': 'bottom'}
"""
# Assign new options dict to selected_values
numbering_options_wid.selected_values = numbering_options_dict
# update render numbering checkbox
if 'render_numbering' in numbering_options_dict.keys():
numbering_options_wid.children[1].children[0].value = \
numbering_options_dict['render_numbering']
# update numbers_font_name dropdown menu
if 'numbers_font_name' in numbering_options_dict.keys():
numbering_options_wid.children[1].children[1].children[0].value = \
numbering_options_dict['numbers_font_name']
# update numbers_font_size text box
if 'numbers_font_size' in numbering_options_dict.keys():
numbering_options_wid.children[1].children[1].children[1].value = \
int(numbering_options_dict['numbers_font_size'])
# update numbers_font_style dropdown menu
if 'numbers_font_style' in numbering_options_dict.keys():
numbering_options_wid.children[1].children[1].children[2].value = \
numbering_options_dict['numbers_font_style']
# update numbers_font_weight dropdown menu
if 'numbers_font_weight' in numbering_options_dict.keys():
numbering_options_wid.children[1].children[1].children[3].value = \
numbering_options_dict['numbers_font_weight']
# update numbers_font_colour
if 'numbers_font_colour' in numbering_options_dict.keys():
update_colour_selection(
numbering_options_wid.children[1].children[1].children[4],
numbering_options_dict['numbers_font_colour'])
# update numbers_horizontal_align dropdown menu
if 'numbers_horizontal_align' in numbering_options_dict.keys():
numbering_options_wid.children[1].children[1].children[5].value = \
numbering_options_dict['numbers_horizontal_align']
# update numbers_vertical_align dropdown menu
if 'numbers_vertical_align' in numbering_options_dict.keys():
numbering_options_wid.children[1].children[1].children[6].value = \
numbering_options_dict['numbers_vertical_align']
def figure_options(figure_options_default, plot_function=None,
figure_scale_bounds=(0.1, 4), figure_scale_step=0.1,
figure_scale_visible=True, axes_visible=True,
toggle_show_default=True, toggle_show_visible=True):
r"""
Creates a widget with Figure Options. Specifically, it has:
1) A slider that controls the scaling of the figure.
2) A checkbox that controls the visibility of the figure's axes.
3) Font options for the axes.
4) A toggle button that controls the visibility of all the above, i.e.
the figure options.
The structure of the widgets is the following:
figure_options_wid.children = [toggle_button, figure_scale_slider,
show_axes_checkbox, axes_font_name,
axes_font_size, axes_font_style,
axes_font_weight, axes_x_limits,
axes_y_limits]
The returned widget saves the selected values in the following dictionary:
figure_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_figure_options()` function.
Parameters
----------
figure_options_default : `dict`
The initial selected figure options.
Example:
figure_options_default = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
figure_scale_bounds : (`float`, `float`), optional
The range of scales that can be optionally applied to the figure.
figure_scale_step : `float`, optional
The step of the scale sliders.
figure_scale_visible : `boolean`, optional
The visibility of the figure scales sliders.
show_axes_visible : `boolean`, optional
The visibility of the axes checkbox.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description='Figure Options',
value=toggle_show_default,
visible=toggle_show_visible)
# figure_scale, render_axes
figure_scale = ipywidgets.FloatSliderWidget(description='Figure scale:',
value=figure_options_default[
'x_scale'],
min=figure_scale_bounds[0],
max=figure_scale_bounds[1],
step=figure_scale_step,
visible=figure_scale_visible)
render_axes = ipywidgets.CheckboxWidget(description='Render axes',
value=figure_options_default[
'render_axes'],
visible=axes_visible)
axes_font_name_dict = OrderedDict()
axes_font_name_dict['serif'] = 'serif'
axes_font_name_dict['sans-serif'] = 'sans-serif'
axes_font_name_dict['cursive'] = 'cursive'
axes_font_name_dict['fantasy'] = 'fantasy'
axes_font_name_dict['monospace'] = 'monospace'
axes_font_name = ipywidgets.DropdownWidget(
values=axes_font_name_dict,
value=figure_options_default['axes_font_name'], description='Font',
visible=axes_visible)
axes_font_size = ipywidgets.BoundedIntTextWidget(
description='Size', value=figure_options_default['axes_font_size'],
min=0, visible=axes_visible)
axes_font_style_dict = OrderedDict()
axes_font_style_dict['normal'] = 'normal'
axes_font_style_dict['italic'] = 'italic'
axes_font_style_dict['oblique'] = 'oblique'
axes_font_style = ipywidgets.DropdownWidget(
values=axes_font_style_dict,
value=figure_options_default['axes_font_style'],
description='Style', visible=axes_visible)
axes_font_weight_dict = OrderedDict()
axes_font_weight_dict['normal'] = 'normal'
axes_font_weight_dict['ultralight'] = 'ultralight'
axes_font_weight_dict['light'] = 'light'
axes_font_weight_dict['regular'] = 'regular'
axes_font_weight_dict['book'] = 'book'
axes_font_weight_dict['medium'] = 'medium'
axes_font_weight_dict['roman'] = 'roman'
axes_font_weight_dict['semibold'] = 'semibold'
axes_font_weight_dict['demibold'] = 'demibold'
axes_font_weight_dict['demi'] = 'demi'
axes_font_weight_dict['bold'] = 'bold'
axes_font_weight_dict['heavy'] = 'heavy'
axes_font_weight_dict['extra bold'] = 'extra bold'
axes_font_weight_dict['black'] = 'black'
axes_font_weight = ipywidgets.DropdownWidget(
values=axes_font_weight_dict,
value=figure_options_default['axes_font_weight'],
description='Weight', visible=axes_visible)
if figure_options_default['axes_x_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 100.
else:
tmp1 = True
tmp2 = figure_options_default['axes_x_limits'][0]
tmp3 = figure_options_default['axes_x_limits'][1]
axes_x_limits_enable = ipywidgets.CheckboxWidget(value=tmp1,
description='X limits')
axes_x_limits_from = ipywidgets.FloatTextWidget(value=tmp2, description='')
axes_x_limits_to = ipywidgets.FloatTextWidget(value=tmp3, description='')
axes_x_limits = ipywidgets.ContainerWidget(children=[axes_x_limits_enable,
axes_x_limits_from,
axes_x_limits_to])
if figure_options_default['axes_y_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 100.
else:
tmp1 = True
tmp2 = figure_options_default['axes_y_limits'][0]
tmp3 = figure_options_default['axes_y_limits'][1]
axes_y_limits_enable = ipywidgets.CheckboxWidget(value=tmp1,
description='Y limits')
axes_y_limits_from = ipywidgets.FloatTextWidget(value=tmp2, description='')
axes_y_limits_to = ipywidgets.FloatTextWidget(value=tmp3, description='')
axes_y_limits = ipywidgets.ContainerWidget(children=[axes_y_limits_enable,
axes_y_limits_from,
axes_y_limits_to])
# Final widget
figure_options_wid = ipywidgets.ContainerWidget(children=[but, figure_scale,
render_axes,
axes_font_name,
axes_font_size,
axes_font_style,
axes_font_weight,
axes_x_limits,
axes_y_limits])
# Assign output
figure_options_wid.selected_values = figure_options_default
# font options visibility
def options_visible(name, value):
axes_font_name.disabled = not value
axes_font_size.disabled = not value
axes_font_style.disabled = not value
axes_font_weight.disabled = not value
axes_x_limits_enable.disabled = not value
axes_y_limits_enable.disabled = not value
if value:
axes_x_limits_from.disabled = not axes_x_limits_enable.value
axes_x_limits_to.disabled = not axes_x_limits_enable.value
axes_y_limits_from.disabled = not axes_y_limits_enable.value
axes_y_limits_to.disabled = not axes_y_limits_enable.value
else:
axes_x_limits_from.disabled = True
axes_x_limits_to.disabled = True
axes_y_limits_from.disabled = True
axes_y_limits_to.disabled = True
options_visible('', figure_options_default['render_axes'])
render_axes.on_trait_change(options_visible, 'value')
# get options functions
def save_render_axes(name, value):
figure_options_wid.selected_values['render_axes'] = value
render_axes.on_trait_change(save_render_axes, 'value')
def save_axes_font_name(name, value):
figure_options_wid.selected_values['axes_font_name'] = value
axes_font_name.on_trait_change(save_axes_font_name, 'value')
def save_axes_font_size(name, value):
figure_options_wid.selected_values['axes_font_size'] = int(value)
axes_font_size.on_trait_change(save_axes_font_size, 'value')
def save_axes_font_style(name, value):
figure_options_wid.selected_values['axes_font_style'] = value
axes_font_style.on_trait_change(save_axes_font_style, 'value')
def save_axes_font_weight(name, value):
figure_options_wid.selected_values['axes_font_weight'] = value
axes_font_weight.on_trait_change(save_axes_font_weight, 'value')
def axes_x_limits_disable(name, value):
axes_x_limits_from.disabled = not value
axes_x_limits_to.disabled = not value
axes_x_limits_disable('', axes_x_limits_enable.value)
axes_x_limits_enable.on_trait_change(axes_x_limits_disable, 'value')
def axes_y_limits_disable(name, value):
axes_y_limits_from.disabled = not value
axes_y_limits_to.disabled = not value
axes_y_limits_disable('', axes_y_limits_enable.value)
axes_y_limits_enable.on_trait_change(axes_y_limits_disable, 'value')
def save_axes_x_limits(name, value):
if axes_x_limits_enable.value:
figure_options_wid.selected_values['axes_x_limits'] = \
(axes_x_limits_from.value, axes_x_limits_to.value)
else:
figure_options_wid.selected_values['axes_x_limits'] = None
axes_x_limits_enable.on_trait_change(save_axes_x_limits, 'value')
axes_x_limits_from.on_trait_change(save_axes_x_limits, 'value')
axes_x_limits_to.on_trait_change(save_axes_x_limits, 'value')
def save_axes_y_limits(name, value):
if axes_y_limits_enable.value:
figure_options_wid.selected_values['axes_y_limits'] = \
(axes_y_limits_from.value, axes_y_limits_to.value)
else:
figure_options_wid.selected_values['axes_y_limits'] = None
axes_y_limits_enable.on_trait_change(save_axes_y_limits, 'value')
axes_y_limits_from.on_trait_change(save_axes_y_limits, 'value')
axes_y_limits_to.on_trait_change(save_axes_y_limits, 'value')
def save_scale(name, value):
figure_options_wid.selected_values['x_scale'] = value
figure_options_wid.selected_values['y_scale'] = value
figure_scale.on_trait_change(save_scale, 'value')
# Toggle button function
def toggle_fun(name, value):
figure_scale.visible = value
render_axes.visible = value
axes_font_name.visible = value
axes_font_size.visible = value
axes_font_style.visible = value
axes_font_weight.visible = value
axes_x_limits.visible = value
axes_y_limits.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
figure_scale.on_trait_change(plot_function, 'value')
render_axes.on_trait_change(plot_function, 'value')
axes_font_name.on_trait_change(plot_function, 'value')
axes_font_size.on_trait_change(plot_function, 'value')
axes_font_style.on_trait_change(plot_function, 'value')
axes_font_weight.on_trait_change(plot_function, 'value')
axes_x_limits_from.on_trait_change(plot_function, 'value')
axes_x_limits_to.on_trait_change(plot_function, 'value')
axes_x_limits_enable.on_trait_change(plot_function, 'value')
axes_y_limits_from.on_trait_change(plot_function, 'value')
axes_y_limits_to.on_trait_change(plot_function, 'value')
axes_y_limits_enable.on_trait_change(plot_function, 'value')
return figure_options_wid
def format_figure_options(figure_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True):
r"""
Function that corrects the align (style format) of a given figure_options
widget. Usage example:
figure_options_wid = figure_options()
display(figure_options_wid)
format_figure_options(figure_options_wid)
Parameters
----------
figure_options_wid :
The widget object generated by the `figure_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# fix figure scale slider width
figure_options_wid.children[1].set_css('width', '3cm')
# fix font size width
figure_options_wid.children[4].set_css('width', '1cm')
# align and set width of axes_x_limits
figure_options_wid.children[7].remove_class('vbox')
figure_options_wid.children[7].add_class('hbox')
figure_options_wid.children[7].children[1].set_css('width', '1cm')
figure_options_wid.children[7].children[2].set_css('width', '1cm')
# align and set width of axes_y_limits
figure_options_wid.children[8].remove_class('vbox')
figure_options_wid.children[8].add_class('hbox')
figure_options_wid.children[8].children[1].set_css('width', '1cm')
figure_options_wid.children[8].children[2].set_css('width', '1cm')
# set toggle button font bold
figure_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
figure_options_wid.set_css('padding', container_padding)
figure_options_wid.set_css('margin', container_margin)
if border_visible:
figure_options_wid.set_css('border', container_border)
def update_figure_options(figure_options_wid, figure_options_dict):
r"""
Function that updates the state of a given figure_options widget. Usage
example:
figure_options_default = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
figure_options_wid = figure_options(figure_options_default)
display(figure_options_wid)
format_figure_options(figure_options_wid)
figure_options_default = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
update_figure_options(figure_options_wid, figure_options_default)
Parameters
----------
figure_options_wid :
The widget object generated by the `figure_options()` function.
figure_options_dict : `dict`
The new set of options. For example:
figure_options_dict = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
"""
# Assign new options dict to selected_values
figure_options_wid.selected_values = figure_options_dict
# update scale slider
if 'x_scale' in figure_options_dict.keys():
figure_options_wid.children[1].value = figure_options_dict['x_scale']
elif 'y_scale' in figure_options_dict.keys():
figure_options_wid.children[1].value = figure_options_dict['y_scale']
# update render axes checkbox
if 'render_axes' in figure_options_dict.keys():
figure_options_wid.children[2].value = \
figure_options_dict['render_axes']
# update axes_font_name dropdown menu
if 'axes_font_name' in figure_options_dict.keys():
figure_options_wid.children[3].value = \
figure_options_dict['axes_font_name']
# update axes_font_size text box
if 'axes_font_size' in figure_options_dict.keys():
figure_options_wid.children[4].value = \
int(figure_options_dict['axes_font_size'])
# update axes_font_style dropdown menu
if 'axes_font_style' in figure_options_dict.keys():
figure_options_wid.children[5].value = \
figure_options_dict['axes_font_style']
# update axes_font_weight dropdown menu
if 'axes_font_weight' in figure_options_dict.keys():
figure_options_wid.children[6].value = \
figure_options_dict['axes_font_weight']
# update axes_x_limits
if 'axes_x_limits' in figure_options_dict.keys():
if figure_options_dict['axes_x_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 100.
else:
tmp1 = True
tmp2 = figure_options_dict['axes_x_limits'][0]
tmp3 = figure_options_dict['axes_x_limits'][1]
figure_options_wid.children[7].children[0].value = tmp1
figure_options_wid.children[7].children[1].value = tmp2
figure_options_wid.children[7].children[2].value = tmp3
# update axes_y_limits
if 'axes_y_limits' in figure_options_dict.keys():
if figure_options_dict['axes_y_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 100.
else:
tmp1 = True
tmp2 = figure_options_dict['axes_y_limits'][0]
tmp3 = figure_options_dict['axes_y_limits'][1]
figure_options_wid.children[8].children[0].value = tmp1
figure_options_wid.children[8].children[1].value = tmp2
figure_options_wid.children[8].children[2].value = tmp3
def figure_options_two_scales(figure_options_default, plot_function=None,
coupled_default=False,
figure_scales_bounds=(0.1, 4),
figure_scales_step=0.1,
figure_scales_visible=True,
axes_visible=True, toggle_show_default=True,
toggle_show_visible=True):
r"""
Creates a widget with Figure Options. Specifically, it has:
1) A slider that controls the scaling of the figure.
2) A checkbox that controls the visibility of the figure's axes.
3) Font options for the axes.
4) A toggle button that controls the visibility of all the above, i.e.
the figure options.
The structure of the widgets is the following:
figure_options_wid.children = [toggle_button, figure_scale_slider,
show_axes_checkbox, axes_font_name,
axes_font_size, axes_font_style,
axes_font_weight, axes_x_limits,
axes_y_limits]
The returned widget saves the selected values in the following dictionary:
figure_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_figure_options()` function.
Parameters
----------
figure_options_default : `dict`
The initial selected figure options.
Example:
figure_options_default = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
figure_scale_bounds : (`float`, `float`), optional
The range of scales that can be optionally applied to the figure.
figure_scale_step : `float`, optional
The step of the scale sliders.
figure_scale_visible : `boolean`, optional
The visibility of the figure scales sliders.
show_axes_visible : `boolean`, optional
The visibility of the axes checkbox.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description='Figure Options',
value=toggle_show_default,
visible=toggle_show_visible)
# figure_scale, render_axes
x_scale = ipywidgets.FloatSliderWidget(description='Figure size: X scale',
value=figure_options_default[
'x_scale'],
min=figure_scales_bounds[0],
max=figure_scales_bounds[1],
step=figure_scales_step)
y_scale = ipywidgets.FloatSliderWidget(description='Y scale',
value=figure_options_default[
'y_scale'],
min=figure_scales_bounds[0],
max=figure_scales_bounds[1],
step=figure_scales_step,
disabled=coupled_default)
coupled = ipywidgets.CheckboxWidget(description='Coupled',
value=coupled_default)
figure_scale = ipywidgets.ContainerWidget(
children=[x_scale, y_scale, coupled],
visible=figure_scales_visible)
render_axes = ipywidgets.CheckboxWidget(description='Render axes',
value=figure_options_default[
'render_axes'],
visible=axes_visible)
axes_font_name_dict = OrderedDict()
axes_font_name_dict['serif'] = 'serif'
axes_font_name_dict['sans-serif'] = 'sans-serif'
axes_font_name_dict['cursive'] = 'cursive'
axes_font_name_dict['fantasy'] = 'fantasy'
axes_font_name_dict['monospace'] = 'monospace'
axes_font_name = ipywidgets.DropdownWidget(
values=axes_font_name_dict,
value=figure_options_default['axes_font_name'], description='Font',
visible=axes_visible)
axes_font_size = ipywidgets.BoundedIntTextWidget(
description='Size', value=figure_options_default['axes_font_size'],
min=0, visible=axes_visible)
axes_font_style_dict = OrderedDict()
axes_font_style_dict['normal'] = 'normal'
axes_font_style_dict['italic'] = 'italic'
axes_font_style_dict['oblique'] = 'oblique'
axes_font_style = ipywidgets.DropdownWidget(
values=axes_font_style_dict,
value=figure_options_default['axes_font_style'],
description='Style', visible=axes_visible)
axes_font_weight_dict = OrderedDict()
axes_font_weight_dict['normal'] = 'normal'
axes_font_weight_dict['ultralight'] = 'ultralight'
axes_font_weight_dict['light'] = 'light'
axes_font_weight_dict['regular'] = 'regular'
axes_font_weight_dict['book'] = 'book'
axes_font_weight_dict['medium'] = 'medium'
axes_font_weight_dict['roman'] = 'roman'
axes_font_weight_dict['semibold'] = 'semibold'
axes_font_weight_dict['demibold'] = 'demibold'
axes_font_weight_dict['demi'] = 'demi'
axes_font_weight_dict['bold'] = 'bold'
axes_font_weight_dict['heavy'] = 'heavy'
axes_font_weight_dict['extra bold'] = 'extra bold'
axes_font_weight_dict['black'] = 'black'
axes_font_weight = ipywidgets.DropdownWidget(
values=axes_font_weight_dict,
value=figure_options_default['axes_font_weight'],
description='Weight', visible=axes_visible)
if figure_options_default['axes_x_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 0.
else:
tmp1 = True
tmp2 = figure_options_default['axes_x_limits'][0]
tmp3 = figure_options_default['axes_x_limits'][1]
axes_x_limits_enable = ipywidgets.CheckboxWidget(value=tmp1,
description='X limits')
axes_x_limits_from = ipywidgets.FloatTextWidget(value=tmp2, description='')
axes_x_limits_to = ipywidgets.FloatTextWidget(value=tmp3, description='')
axes_x_limits = ipywidgets.ContainerWidget(children=[axes_x_limits_enable,
axes_x_limits_from,
axes_x_limits_to])
if figure_options_default['axes_y_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 0.
else:
tmp1 = True
tmp2 = figure_options_default['axes_y_limits'][0]
tmp3 = figure_options_default['axes_y_limits'][1]
axes_y_limits_enable = ipywidgets.CheckboxWidget(value=tmp1,
description='Y limits')
axes_y_limits_from = ipywidgets.FloatTextWidget(value=tmp2, description='')
axes_y_limits_to = ipywidgets.FloatTextWidget(value=tmp3, description='')
axes_y_limits = ipywidgets.ContainerWidget(children=[axes_y_limits_enable,
axes_y_limits_from,
axes_y_limits_to])
# Final widget
figure_options_wid = ipywidgets.ContainerWidget(children=[but, figure_scale,
render_axes,
axes_font_name,
axes_font_size,
axes_font_style,
axes_font_weight,
axes_x_limits,
axes_y_limits])
# Assign output
figure_options_wid.selected_values = figure_options_default
# font options visibility
def options_visible(name, value):
axes_font_name.disabled = not value
axes_font_size.disabled = not value
axes_font_style.disabled = not value
axes_font_weight.disabled = not value
axes_x_limits.disabled = not value
axes_y_limits.disabled = not value
options_visible('', figure_options_default['render_axes'])
render_axes.on_trait_change(options_visible, 'value')
# Coupled sliders function
def coupled_sliders(name, value):
y_scale.disabled = value
coupled_sliders('', coupled_default)
coupled.on_trait_change(coupled_sliders, 'value')
# get options functions
def save_render_axes(name, value):
figure_options_wid.selected_values['render_axes'] = value
render_axes.on_trait_change(save_render_axes, 'value')
def save_axes_font_name(name, value):
figure_options_wid.selected_values['axes_font_name'] = value
axes_font_name.on_trait_change(save_axes_font_name, 'value')
def save_axes_font_size(name, value):
figure_options_wid.selected_values['axes_font_size'] = int(value)
axes_font_size.on_trait_change(save_axes_font_size, 'value')
def save_axes_font_style(name, value):
figure_options_wid.selected_values['axes_font_style'] = value
axes_font_style.on_trait_change(save_axes_font_style, 'value')
def save_axes_font_weight(name, value):
figure_options_wid.selected_values['axes_font_weight'] = value
axes_font_weight.on_trait_change(save_axes_font_weight, 'value')
def axes_x_limits_disable(name, value):
axes_x_limits_from.disabled = not value
axes_x_limits_to.disabled = not value
axes_x_limits_enable.on_trait_change(axes_x_limits_disable, 'value')
def axes_y_limits_disable(name, value):
axes_y_limits_from.disabled = not value
axes_y_limits_to.disabled = not value
axes_y_limits_enable.on_trait_change(axes_y_limits_disable, 'value')
def save_axes_x_limits(name, value):
if axes_x_limits_enable.value:
figure_options_wid.selected_values['axes_x_limits'] = \
(axes_x_limits_from.value, axes_x_limits_to.value)
else:
figure_options_wid.selected_values['axes_x_limits'] = None
axes_x_limits_enable.on_trait_change(save_axes_x_limits, 'value')
axes_x_limits_from.on_trait_change(save_axes_x_limits, 'value')
axes_x_limits_to.on_trait_change(save_axes_x_limits, 'value')
def save_axes_y_limits(name, value):
if axes_y_limits_enable.value:
figure_options_wid.selected_values['axes_y_limits'] = \
(axes_y_limits_from.value, axes_y_limits_to.value)
else:
figure_options_wid.selected_values['axes_y_limits'] = None
axes_y_limits_enable.on_trait_change(save_axes_y_limits, 'value')
axes_y_limits_from.on_trait_change(save_axes_y_limits, 'value')
axes_y_limits_to.on_trait_change(save_axes_y_limits, 'value')
def save_x_scale(name, old_value, value):
figure_options_wid.selected_values['x_scale'] = value
if coupled.value:
y_scale.value += value - old_value
x_scale.on_trait_change(save_x_scale, 'value')
def save_y_scale(name, value):
figure_options_wid.selected_values['y_scale'] = value
y_scale.on_trait_change(save_y_scale, 'value')
# Toggle button function
def toggle_fun(name, value):
figure_scale.visible = value
render_axes.visible = value
axes_font_name.visible = value
axes_font_size.visible = value
axes_font_style.visible = value
axes_font_weight.visible = value
axes_x_limits.visible = value
axes_y_limits.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
x_scale.on_trait_change(plot_function, 'value')
y_scale.on_trait_change(plot_function, 'value')
coupled.on_trait_change(plot_function, 'value')
render_axes.on_trait_change(plot_function, 'value')
axes_font_name.on_trait_change(plot_function, 'value')
axes_font_size.on_trait_change(plot_function, 'value')
axes_font_style.on_trait_change(plot_function, 'value')
axes_font_weight.on_trait_change(plot_function, 'value')
axes_x_limits_from.on_trait_change(plot_function, 'value')
axes_x_limits_to.on_trait_change(plot_function, 'value')
axes_x_limits_enable.on_trait_change(plot_function, 'value')
axes_y_limits_from.on_trait_change(plot_function, 'value')
axes_y_limits_to.on_trait_change(plot_function, 'value')
axes_y_limits_enable.on_trait_change(plot_function, 'value')
return figure_options_wid
def format_figure_options_two_scales(figure_options_wid,
container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True):
r"""
Function that corrects the align (style format) of a given
figure_options_two_scales widget. Usage example:
figure_options_wid = figure_options_two_scales()
display(figure_options_wid)
format_figure_options_two_scales(figure_options_wid)
Parameters
----------
figure_options_wid :
The widget object generated by the `figure_options_two_scales()`
function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# align figure scale sliders and checkbox
# figure_options_wid.children[1].remove_class('vbox')
# figure_options_wid.children[1].add_class('hbox')
figure_options_wid.children[1].add_class('align-end')
# fix figure scale sliders width
figure_options_wid.children[1].children[0].set_css('width', '3cm')
figure_options_wid.children[1].children[1].set_css('width', '3cm')
# fix font size width
figure_options_wid.children[4].set_css('width', '1cm')
# align and set width of axes_x_limits
figure_options_wid.children[7].remove_class('vbox')
figure_options_wid.children[7].add_class('hbox')
figure_options_wid.children[7].children[1].set_css('width', '1cm')
figure_options_wid.children[7].children[2].set_css('width', '1cm')
# align and set width of axes_y_limits
figure_options_wid.children[8].remove_class('vbox')
figure_options_wid.children[8].add_class('hbox')
figure_options_wid.children[8].children[1].set_css('width', '1cm')
figure_options_wid.children[8].children[2].set_css('width', '1cm')
# set toggle button font bold
figure_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
figure_options_wid.set_css('padding', container_padding)
figure_options_wid.set_css('margin', container_margin)
if border_visible:
figure_options_wid.set_css('border', container_border)
def update_figure_options_two_scales(figure_options_wid, figure_options_dict):
r"""
Function that updates the state of a given figure_options_two_scales widget.
Usage example:
figure_options_default = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
figure_options_wid = figure_options_two_scales(default_figure_options)
display(figure_options_wid)
format_figure_options_two_scales(figure_options_wid)
figure_options_default = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
update_figure_options_two_scales(figure_options_wid,
default_figure_options)
Parameters
----------
figure_options_wid :
The widget object generated by the `figure_options_two_scales()`
function.
figure_options_dict : `dict`
The new set of options. For example:
figure_options_default = {'x_scale': 1.,
'y_scale': 1.,
'render_axes': True,
'axes_font_name': 'serif',
'axes_font_size': 10,
'axes_font_style': 'normal',
'axes_font_weight': 'normal',
'axes_x_limits': None,
'axes_y_limits': None}
"""
# Assign new options dict to selected_values
figure_options_wid.selected_values = figure_options_dict
# update scale slider
if ('x_scale' in figure_options_dict.keys() and
'y_scale' not in figure_options_dict.keys()):
figure_options_wid.children[1].children[0].value = \
figure_options_dict['x_scale']
figure_options_wid.children[1].children[2].value = False
elif ('x_scale' not in figure_options_dict.keys() and
'y_scale' in figure_options_dict.keys()):
figure_options_wid.children[1].children[1].value = \
figure_options_dict['y_scale']
figure_options_wid.children[1].children[2].value = False
elif ('x_scale' in figure_options_dict.keys() and
'y_scale' in figure_options_dict.keys()):
figure_options_wid.children[1].children[0].value = \
figure_options_dict['x_scale']
figure_options_wid.children[1].children[1].value = \
figure_options_dict['y_scale']
figure_options_wid.children[1].children[2].value = \
figure_options_dict['x_scale'] == figure_options_dict['y_scale']
# update render axes checkbox
if 'render_axes' in figure_options_dict.keys():
figure_options_wid.children[2].value = \
figure_options_dict['render_axes']
# update axes_font_name dropdown menu
if 'axes_font_name' in figure_options_dict.keys():
figure_options_wid.children[3].value = \
figure_options_dict['axes_font_name']
# update axes_font_size text box
if 'axes_font_size' in figure_options_dict.keys():
figure_options_wid.children[4].value = \
int(figure_options_dict['axes_font_size'])
# update axes_font_style dropdown menu
if 'axes_font_style' in figure_options_dict.keys():
figure_options_wid.children[5].value = \
figure_options_dict['axes_font_style']
# update axes_font_weight dropdown menu
if 'axes_font_weight' in figure_options_dict.keys():
figure_options_wid.children[6].value = \
figure_options_dict['axes_font_weight']
# update axes_x_limits
if 'axes_x_limits' in figure_options_dict.keys():
if figure_options_dict['axes_x_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 0.
else:
tmp1 = True
tmp2 = figure_options_dict['axes_x_limits'][0]
tmp3 = figure_options_dict['axes_x_limits'][1]
figure_options_wid.children[7].children[0].value = tmp1
figure_options_wid.children[7].children[1].value = tmp2
figure_options_wid.children[7].children[2].value = tmp3
# update axes_y_limits
if 'axes_y_limits' in figure_options_dict.keys():
if figure_options_dict['axes_y_limits'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 0.
else:
tmp1 = True
tmp2 = figure_options_dict['axes_y_limits'][0]
tmp3 = figure_options_dict['axes_y_limits'][1]
figure_options_wid.children[8].children[0].value = tmp1
figure_options_wid.children[8].children[1].value = tmp2
figure_options_wid.children[8].children[2].value = tmp3
def legend_options(legend_options_default, plot_function=None,
toggle_show_visible=True, toggle_show_default=True,
toggle_title='Legend Options',
show_checkbox_title='Render legend'):
r"""
Creates a widget with Legend Options. Specifically, it has:
1) A checkbox that controls legend's visibility.
2) A tab widget with legend_location, font and formatting options.
3) A toggle button that controls the visibility of all the above, i.e.
the font options.
The structure of the widgets is the following:
legend_options_wid.children = [toggle_button, options]
options.children = [show_legend_checkbox, other_options]
other_options.children = [legend_location, font, formatting]
...
The returned widget saves the selected values in the following dictionary:
legend_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_legend_options()` function.
Parameters
----------
legend_options_default : `dict`
The initial selected font options.
Example:
legend_options_default = {'render_legend':True,
'legend_title':'',
'legend_font_name':'serif',
'legend_font_style':'normal',
'legend_font_size':10,
'legend_font_weight':'normal',
'legend_marker_scale':1.,
'legend_location':2,
'legend_bbox_to_anchor':(1.05, 1.),
'legend_border_axes_pad':1.,
'legend_n_columns':1,
'legend_horizontal_spacing':1.,
'legend_vertical_spacing':1.,
'legend_border':True,
'legend_border_padding':0.5,
'legend_shadow':False,
'legend_rounded_corners':True}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
toggle_title : `str`, optional
The legend_title of the toggle button.
show_checkbox_title : `str`, optional
The description of the show text checkbox.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description=toggle_title,
value=toggle_show_default,
visible=toggle_show_visible)
# render legend
render_legend = ipywidgets.CheckboxWidget(
description=show_checkbox_title,
value=legend_options_default['render_legend'])
# font-related
legend_font_name_dict = OrderedDict()
legend_font_name_dict['serif'] = 'serif'
legend_font_name_dict['sans-serif'] = 'sans-serif'
legend_font_name_dict['cursive'] = 'cursive'
legend_font_name_dict['fantasy'] = 'fantasy'
legend_font_name_dict['monospace'] = 'monospace'
legend_font_name = ipywidgets.DropdownWidget(
values=legend_font_name_dict,
value=legend_options_default['legend_font_name'], description='Font')
legend_font_size = ipywidgets.BoundedIntTextWidget(
description='Size', value=legend_options_default['legend_font_size'],
min=0)
legend_font_style_dict = OrderedDict()
legend_font_style_dict['normal'] = 'normal'
legend_font_style_dict['italic'] = 'italic'
legend_font_style_dict['oblique'] = 'oblique'
legend_font_style = ipywidgets.DropdownWidget(
values=legend_font_style_dict,
value=legend_options_default['legend_font_style'], description='Style')
legend_font_weight_dict = OrderedDict()
legend_font_weight_dict['normal'] = 'normal'
legend_font_weight_dict['ultralight'] = 'ultralight'
legend_font_weight_dict['light'] = 'light'
legend_font_weight_dict['regular'] = 'regular'
legend_font_weight_dict['book'] = 'book'
legend_font_weight_dict['medium'] = 'medium'
legend_font_weight_dict['roman'] = 'roman'
legend_font_weight_dict['semibold'] = 'semibold'
legend_font_weight_dict['demibold'] = 'demibold'
legend_font_weight_dict['demi'] = 'demi'
legend_font_weight_dict['bold'] = 'bold'
legend_font_weight_dict['heavy'] = 'heavy'
legend_font_weight_dict['extra bold'] = 'extra bold'
legend_font_weight_dict['black'] = 'black'
legend_font_weight = ipywidgets.DropdownWidget(
values=legend_font_weight_dict,
value=legend_options_default['legend_font_weight'],
description='Weight')
legend_title = ipywidgets.TextWidget(description='Title',
value=legend_options_default[
'legend_title'])
font_cont_tmp = ipywidgets.ContainerWidget(
children=[ipywidgets.ContainerWidget(children=[legend_font_name,
legend_font_size]),
ipywidgets.ContainerWidget(children=[legend_font_style,
legend_font_weight])])
font_cont = ipywidgets.ContainerWidget(
children=[legend_title, font_cont_tmp])
# legend_location-related
legend_location_dict = OrderedDict()
legend_location_dict['best'] = 0
legend_location_dict['upper right'] = 1
legend_location_dict['upper left'] = 2
legend_location_dict['lower left'] = 3
legend_location_dict['lower right'] = 4
legend_location_dict['right'] = 5
legend_location_dict['center left'] = 6
legend_location_dict['center right'] = 7
legend_location_dict['lower center'] = 8
legend_location_dict['upper center'] = 9
legend_location_dict['center'] = 10
legend_location = ipywidgets.DropdownWidget(
values=legend_location_dict,
value=legend_options_default['legend_location'],
description='Predefined location')
if legend_options_default['legend_bbox_to_anchor'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 0.
else:
tmp1 = True
tmp2 = legend_options_default['legend_bbox_to_anchor'][0]
tmp3 = legend_options_default['legend_bbox_to_anchor'][1]
bbox_to_anchor_enable = ipywidgets.CheckboxWidget(
value=tmp1, description='Arbitrary location')
bbox_to_anchor_x = ipywidgets.FloatTextWidget(value=tmp2, description='')
bbox_to_anchor_y = ipywidgets.FloatTextWidget(value=tmp3, description='')
legend_bbox_to_anchor = ipywidgets.ContainerWidget(
children=[bbox_to_anchor_enable,
bbox_to_anchor_x,
bbox_to_anchor_y])
legend_border_axes_pad = ipywidgets.BoundedFloatTextWidget(
value=legend_options_default['legend_border_axes_pad'],
description='Distance to axes', min=0.)
location_cont = ipywidgets.ContainerWidget(
children=[legend_location, legend_bbox_to_anchor,
legend_border_axes_pad])
# formatting-related
legend_n_columns = ipywidgets.BoundedIntTextWidget(
value=legend_options_default['legend_n_columns'], description='Columns',
min=0)
legend_marker_scale = ipywidgets.BoundedFloatTextWidget(
description='Marker scale',
value=legend_options_default['legend_marker_scale'], min=0.)
legend_horizontal_spacing = ipywidgets.BoundedFloatTextWidget(
value=legend_options_default['legend_horizontal_spacing'],
description='Horizontal space', min=0.)
legend_vertical_spacing = ipywidgets.BoundedFloatTextWidget(
value=legend_options_default['legend_vertical_spacing'],
description='Vertical space', min=0.)
spacing = ipywidgets.ContainerWidget(
children=[ipywidgets.ContainerWidget(children=[legend_n_columns,
legend_marker_scale]),
ipywidgets.ContainerWidget(
children=[legend_horizontal_spacing,
legend_vertical_spacing])])
legend_border = ipywidgets.CheckboxWidget(
description='Border',
value=legend_options_default['legend_border'])
legend_border_padding = ipywidgets.BoundedFloatTextWidget(
value=legend_options_default['legend_border_padding'],
description='Border pad', min=0.)
border = ipywidgets.ContainerWidget(
children=[legend_border, legend_border_padding])
legend_shadow = ipywidgets.CheckboxWidget(
description='Shadow', value=legend_options_default['legend_shadow'])
legend_rounded_corners = ipywidgets.CheckboxWidget(
description='Rounded corners',
value=legend_options_default['legend_rounded_corners'])
shadow_fancy = ipywidgets.ContainerWidget(children=[legend_shadow,
legend_rounded_corners])
formatting_cont = ipywidgets.ContainerWidget(
children=[spacing, border, shadow_fancy])
# Options widget
tab_options = ipywidgets.TabWidget(children=[location_cont, font_cont,
formatting_cont])
options_wid = ipywidgets.ContainerWidget(
children=[render_legend, tab_options])
# Final widget
legend_options_wid = ipywidgets.ContainerWidget(children=[but, options_wid])
# Assign output
legend_options_wid.selected_values = legend_options_default
# font options visibility
def options_visible(name, value):
legend_title.disabled = not value
legend_font_name.disabled = not value
legend_font_size.disabled = not value
legend_font_style.disabled = not value
legend_font_weight.disabled = not value
legend_location.disabled = not value
bbox_to_anchor_enable.disabled = not value
bbox_to_anchor_x.disabled = not value or not bbox_to_anchor_enable.value
bbox_to_anchor_y.disabled = not value or not bbox_to_anchor_enable.value
legend_border_axes_pad.disabled = not value
legend_n_columns.disabled = not value
legend_marker_scale.disabled = not value
legend_horizontal_spacing.disabled = not value
legend_vertical_spacing.disabled = not value
legend_border.disabled = not value
legend_border_padding.disabled = not value or not legend_border.value
legend_shadow.disabled = not value
legend_rounded_corners.disabled = not value
options_visible('', legend_options_default['render_legend'])
render_legend.on_trait_change(options_visible, 'value')
# get options functions
def border_pad_disable(name, value):
legend_border_padding.disabled = not value
legend_border.on_trait_change(border_pad_disable, 'value')
def bbox_to_anchor_disable(name, value):
bbox_to_anchor_x.disabled = not value
bbox_to_anchor_y.disabled = not value
bbox_to_anchor_enable.on_trait_change(bbox_to_anchor_disable, 'value')
def save_show_legend(name, value):
legend_options_wid.selected_values['render_legend'] = value
render_legend.on_trait_change(save_show_legend, 'value')
def save_title(name, value):
legend_options_wid.selected_values['legend_title'] = str(value)
legend_title.on_trait_change(save_title, 'value')
def save_fontname(name, value):
legend_options_wid.selected_values['legend_font_name'] = value
legend_font_name.on_trait_change(save_fontname, 'value')
def save_fontsize(name, value):
legend_options_wid.selected_values['legend_font_size'] = int(value)
legend_font_size.on_trait_change(save_fontsize, 'value')
def save_fontstyle(name, value):
legend_options_wid.selected_values['legend_font_style'] = value
legend_font_style.on_trait_change(save_fontstyle, 'value')
def save_fontweight(name, value):
legend_options_wid.selected_values['legend_font_weight'] = value
legend_font_weight.on_trait_change(save_fontweight, 'value')
def save_location(name, value):
legend_options_wid.selected_values['legend_location'] = value
legend_location.on_trait_change(save_location, 'value')
def save_bbox_to_anchor(name, value):
if bbox_to_anchor_enable.value:
legend_options_wid.selected_values['legend_bbox_to_anchor'] = \
(bbox_to_anchor_x.value, bbox_to_anchor_y.value)
else:
legend_options_wid.selected_values['legend_bbox_to_anchor'] = None
bbox_to_anchor_enable.on_trait_change(save_bbox_to_anchor, 'value')
bbox_to_anchor_x.on_trait_change(save_bbox_to_anchor, 'value')
bbox_to_anchor_y.on_trait_change(save_bbox_to_anchor, 'value')
def save_borderaxespad(name, value):
key = 'legend_border_axes_pad'
legend_options_wid.selected_values[key] = float(value)
legend_border_axes_pad.on_trait_change(save_borderaxespad, 'value')
def save_n_columns(name, value):
legend_options_wid.selected_values['legend_n_columns'] = int(value)
legend_n_columns.on_trait_change(save_n_columns, 'value')
def save_markerscale(name, value):
legend_options_wid.selected_values['legend_marker_scale'] = float(value)
legend_marker_scale.on_trait_change(save_markerscale, 'value')
def save_horizontal_spacing(name, value):
legend_options_wid.selected_values['legend_horizontal_spacing'] = \
float(value)
legend_horizontal_spacing.on_trait_change(save_horizontal_spacing, 'value')
def save_vertical_spacing(name, value):
legend_options_wid.selected_values['legend_vertical_spacing'] = \
float(value)
legend_vertical_spacing.on_trait_change(save_vertical_spacing, 'value')
def save_draw_border(name, value):
legend_options_wid.selected_values['legend_border'] = value
legend_border.on_trait_change(save_draw_border, 'value')
def save_border_padding(name, value):
legend_options_wid.selected_values['legend_border_padding'] = \
float(value)
legend_border_padding.on_trait_change(save_border_padding, 'value')
def save_draw_shadow(name, value):
legend_options_wid.selected_values['legend_shadow'] = value
legend_shadow.on_trait_change(save_draw_shadow, 'value')
def save_fancy_corners(name, value):
legend_options_wid.selected_values['legend_rounded_corners'] = value
legend_rounded_corners.on_trait_change(save_fancy_corners, 'value')
# Toggle button function
def toggle_fun(name, value):
options_wid.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
render_legend.on_trait_change(plot_function, 'value')
legend_title.on_trait_change(plot_function, 'value')
legend_font_name.on_trait_change(plot_function, 'value')
legend_font_style.on_trait_change(plot_function, 'value')
legend_font_size.on_trait_change(plot_function, 'value')
legend_font_weight.on_trait_change(plot_function, 'value')
legend_location.on_trait_change(plot_function, 'value')
bbox_to_anchor_enable.on_trait_change(plot_function, 'value')
bbox_to_anchor_x.on_trait_change(plot_function, 'value')
bbox_to_anchor_y.on_trait_change(plot_function, 'value')
legend_border_axes_pad.on_trait_change(plot_function, 'value')
legend_n_columns.on_trait_change(plot_function, 'value')
legend_marker_scale.on_trait_change(plot_function, 'value')
legend_horizontal_spacing.on_trait_change(plot_function, 'value')
legend_vertical_spacing.on_trait_change(plot_function, 'value')
legend_border.on_trait_change(plot_function, 'value')
legend_border_padding.on_trait_change(plot_function, 'value')
legend_shadow.on_trait_change(plot_function, 'value')
legend_rounded_corners.on_trait_change(plot_function, 'value')
return legend_options_wid
def format_legend_options(legend_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True, suboptions_border_visible=False):
r"""
Function that corrects the align (style format) of a given legend_options
widget. Usage example:
legend_options_wid = legend_options()
display(legend_options_wid)
format_legend_options(legend_options_wid)
Parameters
----------
legend_options_wid :
The widget object generated by the `legend_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
suboptions_border_visible : `boolean`, optional
Defines whether to draw the border line around the legend options, under
the show legend checkbox.
"""
# set tab titles
tab_titles = ['Location', 'Font', 'Formatting']
for (k, tl) in enumerate(tab_titles):
legend_options_wid.children[1].children[1].set_title(k, tl)
# align font-related options
# legend_options_wid.children[1].children[1].children[1].children[1].\
# remove_class('vbox')
# legend_options_wid.children[1].children[1].children[1].children[1].\
# add_class('hbox')
# set fontsize and title text box width
legend_options_wid.children[1].children[1].children[1].children[1].children[
0].children[1].set_css('width', '1cm')
legend_options_wid.children[1].children[1].children[1].children[0]. \
set_css('width', '4cm')
# align and set width of bbox_to_anchor
legend_options_wid.children[1].children[1].children[0].children[1]. \
remove_class('vbox')
legend_options_wid.children[1].children[1].children[0].children[1]. \
add_class('hbox')
legend_options_wid.children[1].children[1].children[0].children[1].children[
1]. \
set_css('width', '1cm')
legend_options_wid.children[1].children[1].children[0].children[1].children[
2]. \
set_css('width', '1cm')
# set distance to axes (borderaxespad) text box width
legend_options_wid.children[1].children[1].children[0].children[2]. \
set_css('width', '1cm')
# align and set width of border options
legend_options_wid.children[1].children[1].children[2].children[1]. \
remove_class('vbox')
legend_options_wid.children[1].children[1].children[2].children[1]. \
add_class('hbox')
legend_options_wid.children[1].children[1].children[2].children[1].children[
0]. \
set_css('width', '1cm')
legend_options_wid.children[1].children[1].children[2].children[1].children[
1]. \
set_css('width', '1cm')
# align shadow and fancy checkboxes
legend_options_wid.children[1].children[1].children[2].children[2]. \
remove_class('vbox')
legend_options_wid.children[1].children[1].children[2].children[2]. \
add_class('hbox')
# align and set width of spacing options
legend_options_wid.children[1].children[1].children[2].children[0].children[
1]. \
add_class('align-end')
legend_options_wid.children[1].children[1].children[2].children[0].children[
1].children[0].set_css('width', '1cm')
legend_options_wid.children[1].children[1].children[2].children[0].children[
1].children[1].set_css('width', '1cm')
# set width of n_columns and markerspace
legend_options_wid.children[1].children[1].children[2].children[0].children[
0]. \
add_class('align-end')
legend_options_wid.children[1].children[1].children[2].children[0].children[
0].children[0].set_css('width', '1cm')
legend_options_wid.children[1].children[1].children[2].children[0].children[
0].children[1].set_css('width', '1cm')
# align n_columns with spacing
legend_options_wid.children[1].children[1].children[2].children[0]. \
remove_class('vbox')
legend_options_wid.children[1].children[1].children[2].children[0]. \
add_class('hbox')
# border around options
if suboptions_border_visible:
legend_options_wid.children[1].children[1].set_css('border',
container_border)
# set toggle button font bold
legend_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
legend_options_wid.set_css('padding', container_padding)
legend_options_wid.set_css('margin', container_margin)
if border_visible:
legend_options_wid.set_css('border', container_border)
def update_legend_options(legend_options_wid, legend_options_dict):
r"""
Function that updates the state of a given font_options widget. Usage
example:
legend_options_default = {'render_legend':True,
'legend_title':'',
'legend_font_name':'serif',
'legend_font_style':'normal',
'legend_font_size':10,
'legend_font_weight':'normal',
'legend_marker_scale':1.,
'legend_location':2,
'legend_bbox_to_anchor':(1.05, 1.),
'legend_border_axes_pad':1.,
'legend_n_columns':1,
'legend_horizontal_spacing':1.,
'legend_vertical_spacing':1.,
'legend_border':True,
'legend_border_padding':0.5,
'legend_shadow':False,
'legend_rounded_corners':True}
legend_options_wid = legend_options(legend_options_default)
display(legend_options_wid)
format_legend_options(legend_options_wid)
legend_options_default = {'render_legend':True,
'legend_title':'',
'legend_font_name':'serif',
'legend_font_style':'normal',
'legend_font_size':10,
'legend_font_weight':'normal',
'legend_marker_scale':1.,
'legend_location':2,
'legend_bbox_to_anchor':(1.05, 1.),
'legend_border_axes_pad':1.,
'legend_n_columns':1,
'legend_horizontal_spacing':1.,
'legend_vertical_spacing':1.,
'legend_border':True,
'legend_border_padding':0.5,
'legend_shadow':False,
'legend_rounded_corners':True}
update_legend_options(legend_options_wid, legend_options_dict)
Parameters
----------
legend_options_wid :
The widget object generated by the `legend_options()` function.
legend_options_dict : `dict`
The new set of options. For example:
legend_options_dict = {'render_legend':True,
'legend_title':'',
'legend_font_name':'serif',
'legend_font_style':'normal',
'legend_font_size':10,
'legend_font_weight':'normal',
'legend_marker_scale':1.,
'legend_location':2,
'legend_bbox_to_anchor':(1.05, 1.),
'legend_border_axes_pad':1.,
'legend_n_columns':1,
'legend_horizontal_spacing':1.,
'legend_vertical_spacing':1.,
'legend_border':True,
'legend_border_padding':0.5,
'legend_shadow':False,
'legend_rounded_corners':True}
"""
# Assign new options dict to selected_values
legend_options_wid.selected_values = legend_options_dict
# update render legend checkbox
if 'render_legend' in legend_options_dict.keys():
legend_options_wid.children[1].children[0].value = \
legend_options_dict['render_legend']
# update legend_title
if 'legend_title' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[1].children[0]. \
value = legend_options_dict['legend_title']
# update legend_font_name dropdown menu
if 'legend_font_name' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[1].children[
1].children[0].children[0].value = \
legend_options_dict['legend_font_name']
# update legend_font_size text box
if 'legend_font_size' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[1].children[
1].children[0].children[1].value = \
int(legend_options_dict['legend_font_size'])
# update legend_font_style dropdown menu
if 'legend_font_style' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[1].children[
1].children[1].children[0].value = \
legend_options_dict['legend_font_style']
# update legend_font_weight dropdown menu
if 'legend_font_weight' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[1].children[
1].children[1].children[1].value = \
legend_options_dict['legend_font_weight']
# update legend_location dropdown menu
if 'legend_location' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[0].children[0]. \
value = legend_options_dict['legend_location']
# update legend_bbox_to_anchor
if 'legend_bbox_to_anchor' in legend_options_dict.keys():
if legend_options_dict['legend_bbox_to_anchor'] is None:
tmp1 = False
tmp2 = 0.
tmp3 = 0.
else:
tmp1 = True
tmp2 = legend_options_dict['legend_bbox_to_anchor'][0]
tmp3 = legend_options_dict['legend_bbox_to_anchor'][1]
legend_options_wid.children[1].children[1].children[0].children[
1].children[0].value = tmp1
legend_options_wid.children[1].children[1].children[0].children[
1].children[1].value = tmp2
legend_options_wid.children[1].children[1].children[0].children[
1].children[2].value = tmp3
# update legend_border_axes_pad
if 'legend_border_axes_pad' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[0].children[
2].value = \
legend_options_dict['legend_border_axes_pad']
# update legend_n_columns text box
if 'legend_n_columns' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
0].children[0].children[0].value = \
int(legend_options_dict['legend_n_columns'])
# update legend_marker_scale text box
if 'legend_marker_scale' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
0].children[0].children[1].value = \
float(legend_options_dict['legend_marker_scale'])
# update legend_horizontal_spacing text box
if 'legend_horizontal_spacing' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
0].children[1].children[0].value = \
float(legend_options_dict['legend_horizontal_spacing'])
# update legend_vertical_spacing text box
if 'legend_vertical_spacing' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
0].children[1].children[1].value = \
float(legend_options_dict['legend_vertical_spacing'])
# update legend_border
if 'legend_border' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
1].children[0].value = \
legend_options_dict['legend_border']
# update legend_border_padding text box
if 'legend_border_padding' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
1].children[1].value = \
float(legend_options_dict['legend_border_padding'])
# update legend_shadow
if 'legend_shadow' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
2].children[0].value = \
legend_options_dict['legend_shadow']
# update legend_rounded_corners
if 'legend_rounded_corners' in legend_options_dict.keys():
legend_options_wid.children[1].children[1].children[2].children[
2].children[1].value = \
legend_options_dict['legend_rounded_corners']
def grid_options(grid_options_default, plot_function=None,
toggle_show_visible=True, toggle_show_default=True,
toggle_title='Grid Object', show_checkbox_title='Render grid'):
r"""
Creates a widget with Grid Options. Specifically, it has:
1) A checkbox that controls grid's visibility.
2) A dropdown menu for grid style.
3) A bounded float text box for line width.
7) A toggle button that controls the visibility of all the above, i.e.
the grid options.
The structure of the widgets is the following:
grid_options_wid.children = [toggle_button, options]
options.children = [render_grid_checkbox, other_options]
other_options.children = [grid_style, grid_width]
The returned widget saves the selected values in the following dictionary:
grid_options_wid.selected_values
To fix the alignment within this widget please refer to
`format_grid_options()` function.
Parameters
----------
grid_options_default : `dict`
The initial selected grid options.
Example:
line_options={'render_grid': True,
'grid_line_width': 1,
'grid_line_style': '-'}
plot_function : `function` or None, optional
The plot function that is executed when a widgets' value changes.
If None, then nothing is assigned.
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
toggle_title : `str`, optional
The title of the toggle button.
show_checkbox_title : `str`, optional
The description of the show line checkbox.
"""
import IPython.html.widgets as ipywidgets
# Create widgets
# toggle button
but = ipywidgets.ToggleButtonWidget(description=toggle_title,
value=toggle_show_default,
visible=toggle_show_visible)
# grid_line_style, grid_line_width
render_grid = ipywidgets.CheckboxWidget(
description=show_checkbox_title,
value=grid_options_default['render_grid'])
grid_line_width = ipywidgets.BoundedFloatTextWidget(
description='Width', value=grid_options_default['grid_line_width'],
min=0.)
grid_line_style_dict = OrderedDict()
grid_line_style_dict['solid'] = '-'
grid_line_style_dict['dashed'] = '--'
grid_line_style_dict['dash-dot'] = '-.'
grid_line_style_dict['dotted'] = ':'
grid_line_style = ipywidgets.DropdownWidget(
values=grid_line_style_dict,
value=grid_options_default['grid_line_style'], description='Style')
# Options widget
all_grid_options = ipywidgets.ContainerWidget(
children=[grid_line_style, grid_line_width])
options_wid = ipywidgets.ContainerWidget(
children=[render_grid, all_grid_options])
# Final widget
grid_options_wid = ipywidgets.ContainerWidget(children=[but, options_wid])
# Assign output
grid_options_wid.selected_values = grid_options_default
# line options visibility
def options_visible(name, value):
grid_line_style.disabled = not value
grid_line_width.disabled = not value
options_visible('', grid_options_default['render_grid'])
render_grid.on_trait_change(options_visible, 'value')
# get options functions
def save_render_grid(name, value):
grid_options_wid.selected_values['render_grid'] = value
render_grid.on_trait_change(save_render_grid, 'value')
def save_grid_line_width(name, value):
grid_options_wid.selected_values['grid_line_width'] = float(value)
grid_line_width.on_trait_change(save_grid_line_width, 'value')
def save_grid_line_style(name, value):
grid_options_wid.selected_values['grid_line_style'] = value
grid_line_style.on_trait_change(save_grid_line_style, 'value')
# Toggle button function
def toggle_fun(name, value):
options_wid.visible = value
toggle_fun('', toggle_show_default)
but.on_trait_change(toggle_fun, 'value')
# assign plot_function
if plot_function is not None:
render_grid.on_trait_change(plot_function, 'value')
grid_line_style.on_trait_change(plot_function, 'value')
grid_line_width.on_trait_change(plot_function, 'value')
return grid_options_wid
def format_grid_options(grid_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold', border_visible=True,
suboptions_border_visible=True):
r"""
Function that corrects the align (style format) of a given grid_options
widget. Usage example:
grid_options_wid = grid_options()
display(grid_options_wid)
format_grid_options(grid_options_wid)
Parameters
----------
grid_options_wid :
The widget object generated by the `grid_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
suboptions_border_visible : `boolean`, optional
Defines whether to draw the border line around the line options, under
the show line checkbox.
"""
# align grid options with checkbox
grid_options_wid.children[1].add_class('align-end')
# set gridlinewidth text box width
grid_options_wid.children[1].children[1].children[1].set_css('width', '1cm')
# border around options
if suboptions_border_visible:
grid_options_wid.children[1].children[1].set_css('border',
container_border)
# set toggle button font bold
grid_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
grid_options_wid.set_css('padding', container_padding)
grid_options_wid.set_css('margin', container_margin)
if border_visible:
grid_options_wid.set_css('border', container_border)
def update_grid_options(grid_options_wid, grid_options_dict):
r"""
Function that updates the state of a given grid_options widget. Usage
example:
default_grid_options={'render_grid':True,
'grid_line_width':2,
'grid_line_style':'-'}
grid_options_wid = grid_options(default_grid_options)
display(grid_options_wid)
format_grid_options(grid_options_wid)
default_grid_options={'render_grid':False,
'grid_line_width':4,
'grid_line_style':'-'}
update_grid_options(grid_options_wid, default_grid_options)
Parameters
----------
grid_options_wid :
The widget object generated by the `grid_options()` function.
grid_options_dict : `dict`
The new set of options. For example:
grid_options_dict={'render_grid':True,
'grid_line_width':2,
'grid_line_style':'-'}
"""
# Assign new options dict to selected_values
grid_options_wid.selected_values = grid_options_dict
# update render grid checkbox
if 'render_grid' in grid_options_dict.keys():
grid_options_wid.children[1].children[0].value = \
grid_options_dict['render_grid']
# update grid_line_style dropdown menu
if 'grid_line_style' in grid_options_dict.keys():
grid_options_wid.children[1].children[1].children[0].value = \
grid_options_dict['grid_line_style']
# update grid_line_width text box
if 'grid_line_width' in grid_options_dict.keys():
grid_options_wid.children[1].children[1].children[1].value = \
float(grid_options_dict['grid_line_width'])
def hog_options(toggle_show_default=True, toggle_show_visible=True):
r"""
Creates a widget with HOG Features Options.
The structure of the widgets is the following:
hog_options_wid.children = [toggle_button, options]
options.children = [window_wid, algorithm_wid]
window_wid.children = [mode_wid, window_opts_wid]
mode_wid.children = [mode_radiobuttons, padding_checkbox]
window_opts_wid.children = [window_size_wid, window_step_wid]
window_size_wid.children = [window_height, window_width,
window_size_unit]
window_step_wid.children = [window_vertical, window_horizontal,
window_step_unit]
algorithm_wid.children = [algorithm_radiobuttons, algorithm_options]
algorithm_options.children = [algorithm_sizes, algorithm_other]
algorithm_sizes.children = [cell_size, block_size, num_bins]
algorithm_other.children = [signed_gradient, l2_norm_clipping]
To fix the alignment within this widget please refer to
`format_hog_options()` function.
Parameters
----------
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Toggle button that controls options' visibility
but = ipywidgets.ToggleButtonWidget(description='HOG Options',
value=toggle_show_default,
visible=toggle_show_visible)
# window related options
tmp = OrderedDict()
tmp['Dense'] = 'dense'
tmp['Sparse'] = 'sparse'
mode = ipywidgets.RadioButtonsWidget(values=tmp, description='Mode')
padding = ipywidgets.CheckboxWidget(value=True, description='Padding')
mode_wid = ipywidgets.ContainerWidget(children=[mode, padding])
window_height = ipywidgets.BoundedIntTextWidget(value='1',
description='Height', min=1)
window_width = ipywidgets.BoundedIntTextWidget(value='1',
description='Width', min=1)
tmp = OrderedDict()
tmp['Blocks'] = 'blocks'
tmp['Pixels'] = 'pixels'
window_size_unit = ipywidgets.RadioButtonsWidget(values=tmp,
description=' Size unit')
window_size_wid = ipywidgets.ContainerWidget(
children=[window_height, window_width,
window_size_unit])
window_vertical = ipywidgets.BoundedIntTextWidget(value='1',
description='Step Y',
min=1)
window_horizontal = ipywidgets.BoundedIntTextWidget(value='1',
description='Step X',
min=1)
tmp = OrderedDict()
tmp['Pixels'] = 'pixels'
tmp['Cells'] = 'cells'
window_step_unit = ipywidgets.RadioButtonsWidget(values=tmp,
description='Step unit')
window_step_wid = ipywidgets.ContainerWidget(children=[window_vertical,
window_horizontal,
window_step_unit])
window_wid = ipywidgets.ContainerWidget(
children=[window_size_wid, window_step_wid])
window_wid = ipywidgets.ContainerWidget(children=[mode_wid, window_wid])
# algorithm related options
tmp = OrderedDict()
tmp['<NAME>'] = 'dalaltriggs'
tmp['Zhu & Ramanan'] = 'zhuramanan'
algorithm = ipywidgets.RadioButtonsWidget(values=tmp, value='dalaltriggs',
description='Algorithm')
cell_size = ipywidgets.BoundedIntTextWidget(
value='8', description='Cell size (in pixels)', min=1)
block_size = ipywidgets.BoundedIntTextWidget(
value='2', description='Block size (in cells)', min=1)
num_bins = ipywidgets.BoundedIntTextWidget(
value='9', description='Orientation bins', min=1)
algorithm_sizes = ipywidgets.ContainerWidget(
children=[cell_size, block_size,
num_bins])
signed_gradient = ipywidgets.CheckboxWidget(value=True,
description='Signed gradients')
l2_norm_clipping = ipywidgets.BoundedFloatTextWidget(
value='0.2', description='L2 norm clipping', min=0.)
algorithm_other = ipywidgets.ContainerWidget(children=[signed_gradient,
l2_norm_clipping])
algorithm_options = ipywidgets.ContainerWidget(children=[algorithm_sizes,
algorithm_other])
algorithm_wid = ipywidgets.ContainerWidget(
children=[algorithm, algorithm_options])
# options tab widget
all_options = ipywidgets.TabWidget(children=[window_wid, algorithm_wid])
# Widget container
hog_options_wid = ipywidgets.ContainerWidget(children=[but, all_options])
# Initialize output dictionary
hog_options_wid.options = {'mode': 'dense', 'algorithm': 'dalaltriggs',
'num_bins': 9, 'cell_size': 8, 'block_size': 2,
'signed_gradient': True, 'l2_norm_clip': 0.2,
'window_height': 1, 'window_width': 1,
'window_unit': 'blocks',
'window_step_vertical': 1,
'window_step_horizontal': 1,
'window_step_unit': 'pixels', 'padding': True,
'verbose': False}
# mode function
def window_mode(name, value):
window_horizontal.disabled = value == 'sparse'
window_vertical.disabled = value == 'sparse'
window_step_unit.disabled = value == 'sparse'
window_height.disabled = value == 'sparse'
window_width.disabled = value == 'sparse'
window_size_unit.disabled = value == 'sparse'
mode.on_trait_change(window_mode, 'value')
# algorithm function
def algorithm_mode(name, value):
l2_norm_clipping.disabled = value == 'zhuramanan'
signed_gradient.disabled = value == 'zhuramanan'
block_size.disabled = value == 'zhuramanan'
num_bins.disabled = value == 'zhuramanan'
algorithm.on_trait_change(algorithm_mode, 'value')
# get options
def get_mode(name, value):
hog_options_wid.options['mode'] = value
mode.on_trait_change(get_mode, 'value')
def get_padding(name, value):
hog_options_wid.options['padding'] = value
padding.on_trait_change(get_padding, 'value')
def get_window_height(name, value):
hog_options_wid.options['window_height'] = value
window_height.on_trait_change(get_window_height, 'value')
def get_window_width(name, value):
hog_options_wid.options['window_width'] = value
window_width.on_trait_change(get_window_width, 'value')
def get_window_size_unit(name, value):
hog_options_wid.options['window_unit'] = value
window_size_unit.on_trait_change(get_window_size_unit, 'value')
def get_window_step_vertical(name, value):
hog_options_wid.options['window_step_vertical'] = value
window_vertical.on_trait_change(get_window_step_vertical, 'value')
def get_window_step_horizontal(name, value):
hog_options_wid.options['window_step_horizontal'] = value
window_horizontal.on_trait_change(get_window_step_horizontal, 'value')
def get_window_step_unit(name, value):
hog_options_wid.options['window_step_unit'] = value
window_step_unit.on_trait_change(get_window_step_unit, 'value')
def get_algorithm(name, value):
hog_options_wid.options['algorithm'] = value
algorithm.on_trait_change(get_algorithm, 'value')
def get_num_bins(name, value):
hog_options_wid.options['num_bins'] = value
num_bins.on_trait_change(get_num_bins, 'value')
def get_cell_size(name, value):
hog_options_wid.options['cell_size'] = value
cell_size.on_trait_change(get_cell_size, 'value')
def get_block_size(name, value):
hog_options_wid.options['block_size'] = value
block_size.on_trait_change(get_block_size, 'value')
def get_signed_gradient(name, value):
hog_options_wid.options['signed_gradient'] = value
signed_gradient.on_trait_change(get_signed_gradient, 'value')
def get_l2_norm_clip(name, value):
hog_options_wid.options['l2_norm_clip'] = value
l2_norm_clipping.on_trait_change(get_l2_norm_clip, 'value')
# Toggle button function
def toggle_options(name, value):
all_options.visible = value
but.on_trait_change(toggle_options, 'value')
return hog_options_wid
def format_hog_options(hog_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True):
r"""
Function that corrects the align (style format) of a given hog_options
widget. Usage example:
hog_options_wid = hog_options()
display(hog_options_wid)
format_hog_options(hog_options_wid)
Parameters
----------
hog_options_wid :
The widget object generated by the `hog_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
tab_top_margin : `str`, optional
The margin around the tab options' widget, e.g. '0.3cm'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# align window options
hog_options_wid.children[1].children[0].children[1].remove_class('vbox')
hog_options_wid.children[1].children[0].children[1].add_class('hbox')
# set width of height, width, step x , step y textboxes
hog_options_wid.children[1].children[0].children[1].children[0].children[0]. \
set_css('width', '40px')
hog_options_wid.children[1].children[0].children[1].children[0].children[1]. \
set_css('width', '40px')
hog_options_wid.children[1].children[0].children[1].children[1].children[0]. \
set_css('width', '40px')
hog_options_wid.children[1].children[0].children[1].children[1].children[1]. \
set_css('width', '40px')
# set margin and border around the window size and step options
hog_options_wid.children[1].children[0].children[1].children[0].set_css(
'margin', container_margin)
hog_options_wid.children[1].children[0].children[1].children[1].set_css(
'margin', container_margin)
hog_options_wid.children[1].children[0].children[1].children[0].set_css(
'border', '1px solid gray')
hog_options_wid.children[1].children[0].children[1].children[1].set_css(
'border', '1px solid gray')
# align mode and padding
hog_options_wid.children[1].children[0].children[0].remove_class('vbox')
hog_options_wid.children[1].children[0].children[0].add_class('hbox')
# set width of algorithm textboxes
hog_options_wid.children[1].children[1].children[1].children[0].children[0]. \
set_css('width', '40px')
hog_options_wid.children[1].children[1].children[1].children[0].children[1]. \
set_css('width', '40px')
hog_options_wid.children[1].children[1].children[1].children[0].children[2]. \
set_css('width', '40px')
hog_options_wid.children[1].children[1].children[1].children[1].children[1]. \
set_css('width', '40px')
# align algorithm options
hog_options_wid.children[1].children[1].children[1].remove_class('vbox')
hog_options_wid.children[1].children[1].children[1].add_class('hbox')
# set margin and border around the algorithm options
hog_options_wid.children[1].children[1].children[1].set_css(
'margin', container_margin)
hog_options_wid.children[1].children[1].children[1].set_css(
'border', '1px solid gray')
hog_options_wid.children[1].set_css('margin-top', '6px')
hog_options_wid.children[1].children[0].add_class('align-center')
hog_options_wid.children[1].children[1].add_class('align-center')
# set final tab titles
tab_titles = ['Window', 'Algorithm']
for (k, tl) in enumerate(tab_titles):
hog_options_wid.children[1].set_title(k, tl)
# set toggle button font bold
hog_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
hog_options_wid.set_css('padding', container_padding)
hog_options_wid.set_css('margin', container_margin)
if border_visible:
hog_options_wid.set_css('border', container_border)
def daisy_options(toggle_show_default=True, toggle_show_visible=True):
r"""
Creates a widget with Daisy Features Options.
The structure of the widgets is the following:
daisy_options_wid.children = [toggle_button, options]
options.children = [options1, options2]
options1.children = [step_int, radius_int, rings_int, histograms_int]
options2.children = [orientations_int, normalization_dropdown,
sigmas_list, ring_radii_list]
To fix the alignment within this widget please refer to
`format_daisy_options()` function.
Parameters
----------
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Toggle button that controls options' visibility
but = ipywidgets.ToggleButtonWidget(description='Daisy Options',
value=toggle_show_default,
visible=toggle_show_visible)
# options widgets
step = ipywidgets.BoundedIntTextWidget(value='1', description='Step', min=1)
radius = ipywidgets.BoundedIntTextWidget(value='15', description='Radius',
min=1)
rings = ipywidgets.BoundedIntTextWidget(value='2', description='Rings',
min=1)
histograms = ipywidgets.BoundedIntTextWidget(value='2',
description='Histograms',
min=1)
orientations = ipywidgets.BoundedIntTextWidget(value='8',
description='Orientations',
min=1)
tmp = OrderedDict()
tmp['L1'] = 'l1'
tmp['L2'] = 'l2'
tmp['Daisy'] = 'daisy'
tmp['None'] = None
normalization = ipywidgets.DropdownWidget(value='l1', values=tmp,
description='Normalization')
sigmas = ipywidgets.TextWidget(description='Sigmas')
ring_radii = ipywidgets.TextWidget(description='Ring radii')
# group widgets
cont1 = ipywidgets.ContainerWidget(
children=[step, radius, rings, histograms])
cont2 = ipywidgets.ContainerWidget(
children=[orientations, normalization, sigmas,
ring_radii])
options = ipywidgets.ContainerWidget(children=[cont1, cont2])
# Widget container
daisy_options_wid = ipywidgets.ContainerWidget(children=[but, options])
# Initialize output dictionary
daisy_options_wid.options = {'step': 1, 'radius': 15,
'rings': 2, 'histograms': 2,
'orientations': 8,
'normalization': 'l1',
'sigmas': None,
'ring_radii': None}
# get options
def get_step(name, value):
daisy_options_wid.options['step'] = value
step.on_trait_change(get_step, 'value')
def get_radius(name, value):
daisy_options_wid.options['radius'] = value
radius.on_trait_change(get_radius, 'value')
def get_rings(name, value):
daisy_options_wid.options['rings'] = value
rings.on_trait_change(get_rings, 'value')
def get_histograms(name, value):
daisy_options_wid.options['histograms'] = value
histograms.on_trait_change(get_histograms, 'value')
def get_orientations(name, value):
daisy_options_wid.options['orientations'] = value
orientations.on_trait_change(get_orientations, 'value')
def get_normalization(name, value):
daisy_options_wid.options['normalization'] = value
normalization.on_trait_change(get_normalization, 'value')
def get_sigmas(name, value):
daisy_options_wid.options['sigmas'] = _convert_str_to_list_int(
str(value))
sigmas.on_trait_change(get_sigmas, 'value')
def get_ring_radii(name, value):
daisy_options_wid.options['ring_radii'] = _convert_str_to_list_float(
str(value))
ring_radii.on_trait_change(get_ring_radii, 'value')
# Toggle button function
def toggle_options(name, value):
options.visible = value
but.on_trait_change(toggle_options, 'value')
return daisy_options_wid
def format_daisy_options(daisy_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True):
r"""
Function that corrects the align (style format) of a given daisy_options
widget. Usage example:
daisy_options_wid = daisy_options()
display(daisy_options_wid)
format_daisy_options(daisy_options_wid)
Parameters
----------
daisy_options_wid :
The widget object generated by the `daisy_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
tab_top_margin : `str`, optional
The margin around the tab options' widget, e.g. '0.3cm'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# align window options
daisy_options_wid.children[1].remove_class('vbox')
daisy_options_wid.children[1].add_class('hbox')
# set textboxes length
daisy_options_wid.children[1].children[0].children[0].set_css('width',
'40px')
daisy_options_wid.children[1].children[0].children[1].set_css('width',
'40px')
daisy_options_wid.children[1].children[0].children[2].set_css('width',
'40px')
daisy_options_wid.children[1].children[0].children[3].set_css('width',
'40px')
daisy_options_wid.children[1].children[1].children[0].set_css('width',
'40px')
daisy_options_wid.children[1].children[1].children[2].set_css('width',
'80px')
daisy_options_wid.children[1].children[1].children[3].set_css('width',
'80px')
# set toggle button font bold
daisy_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
daisy_options_wid.set_css('padding', container_padding)
daisy_options_wid.set_css('margin', container_margin)
if border_visible:
daisy_options_wid.set_css('border', container_border)
def lbp_options(toggle_show_default=True, toggle_show_visible=True):
r"""
Creates a widget with LBP Features Options.
The structure of the widgets is the following:
lbp_options_wid.children = [toggle_button, options]
options.children = [window_wid, algorithm_wid]
window_wid.children = [window_vertical, window_horizontal,
window_step_unit, padding]
algorithm_wid.children = [mapping_type, radius, samples]
To fix the alignment within this widget please refer to
`format_lbp_options()` function.
Parameters
----------
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Toggle button that controls options' visibility
but = ipywidgets.ToggleButtonWidget(description='LBP Options',
value=toggle_show_default,
visible=toggle_show_visible)
# method related options
tmp = OrderedDict()
tmp['Uniform-2'] = 'u2'
tmp['Rotation-Invariant'] = 'ri'
tmp['Both'] = 'riu2'
tmp['None'] = 'none'
mapping_type = ipywidgets.DropdownWidget(value='u2', values=tmp,
description='Mapping')
radius = ipywidgets.TextWidget(value='1, 2, 3, 4', description='Radius')
samples = ipywidgets.TextWidget(value='8, 8, 8, 8', description='Samples')
algorithm_wid = ipywidgets.ContainerWidget(children=[radius,
samples,
mapping_type])
# window related options
window_vertical = ipywidgets.BoundedIntTextWidget(value='1',
description='Step Y',
min=1)
window_horizontal = ipywidgets.BoundedIntTextWidget(value='1',
description='Step X',
min=1)
tmp = OrderedDict()
tmp['Pixels'] = 'pixels'
tmp['Windows'] = 'cells'
window_step_unit = ipywidgets.RadioButtonsWidget(values=tmp,
description='Step unit')
padding = ipywidgets.CheckboxWidget(value=True, description='Padding')
window_wid = ipywidgets.ContainerWidget(children=[window_vertical,
window_horizontal,
window_step_unit,
padding])
# options widget
options = ipywidgets.ContainerWidget(children=[window_wid, algorithm_wid])
# Widget container
lbp_options_wid = ipywidgets.ContainerWidget(children=[but, options])
# Initialize output dictionary
lbp_options_wid.options = {'radius': range(1, 5), 'samples': [8] * 4,
'mapping_type': 'u2',
'window_step_vertical': 1,
'window_step_horizontal': 1,
'window_step_unit': 'pixels', 'padding': True,
'verbose': False, 'skip_checks': False}
# get options
def get_mapping_type(name, value):
lbp_options_wid.options['mapping_type'] = value
mapping_type.on_trait_change(get_mapping_type, 'value')
def get_window_vertical(name, value):
lbp_options_wid.options['window_step_vertical'] = value
window_vertical.on_trait_change(get_window_vertical, 'value')
def get_window_horizontal(name, value):
lbp_options_wid.options['window_step_horizontal'] = value
window_horizontal.on_trait_change(get_window_horizontal, 'value')
def get_window_step_unit(name, value):
lbp_options_wid.options['window_step_unit'] = value
window_step_unit.on_trait_change(get_window_step_unit, 'value')
def get_padding(name, value):
lbp_options_wid.options['padding'] = value
padding.on_trait_change(get_padding, 'value')
def get_radius(name, value):
lbp_options_wid.options['radius'] = _convert_str_to_list_int(str(value))
radius.on_trait_change(get_radius, 'value')
def get_samples(name, value):
str_val = _convert_str_to_list_int(str(value))
lbp_options_wid.options['samples'] = str_val
samples.on_trait_change(get_samples, 'value')
# Toggle button function
def toggle_options(name, value):
options.visible = value
but.on_trait_change(toggle_options, 'value')
return lbp_options_wid
def format_lbp_options(lbp_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True):
r"""
Function that corrects the align (style format) of a given lbp_options
widget. Usage example:
lbp_options_wid = lbp_options()
display(lbp_options_wid)
format_lbp_options(lbp_options_wid)
Parameters
----------
lbp_options_wid :
The widget object generated by the `lbp_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
tab_top_margin : `str`, optional
The margin around the tab options' widget, e.g. '0.3cm'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# align window options
lbp_options_wid.children[1].remove_class('vbox')
lbp_options_wid.children[1].add_class('hbox')
# set textboxes length
lbp_options_wid.children[1].children[0].children[0].set_css('width',
'40px')
lbp_options_wid.children[1].children[0].children[1].set_css('width',
'40px')
lbp_options_wid.children[1].children[1].children[0].set_css('width',
'80px')
lbp_options_wid.children[1].children[1].children[1].set_css('width',
'80px')
# set toggle button font bold
lbp_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
lbp_options_wid.set_css('padding', container_padding)
lbp_options_wid.set_css('margin', container_margin)
if border_visible:
lbp_options_wid.set_css('border', container_border)
def igo_options(toggle_show_default=True, toggle_show_visible=True):
r"""
Creates a widget with IGO Features Options.
The structure of the widgets is the following:
igo_options_wid.children = [toggle_button, double_angles_checkbox]
To fix the alignment within this widget please refer to
`format_igo_options()` function.
Parameters
----------
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Toggle button that controls options' visibility
but = ipywidgets.ToggleButtonWidget(description='IGO Options',
value=toggle_show_default,
visible=toggle_show_visible)
# options widget
double_angles = ipywidgets.CheckboxWidget(value=False,
description='Double angles')
# Widget container
igo_options_wid = ipywidgets.ContainerWidget(children=[but, double_angles])
# Initialize output dictionary
igo_options_wid.options = {'double_angles': False}
# get double_angles
def get_double_angles(name, value):
igo_options_wid.options['double_angles'] = value
double_angles.on_trait_change(get_double_angles, 'value')
# Toggle button function
def toggle_options(name, value):
double_angles.visible = value
but.on_trait_change(toggle_options, 'value')
return igo_options_wid
def format_igo_options(igo_options_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True):
r"""
Function that corrects the align (style format) of a given igo_options
widget. Usage example:
igo_options_wid = igo_options()
display(igo_options_wid)
format_igo_options(igo_options_wid)
Parameters
----------
igo_options_wid :
The widget object generated by the `igo_options()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
tab_top_margin : `str`, optional
The margin around the tab options' widget, e.g. '0.3cm'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# set toggle button font bold
igo_options_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
igo_options_wid.set_css('padding', container_padding)
igo_options_wid.set_css('margin', container_margin)
if border_visible:
igo_options_wid.set_css('border', container_border)
def function_definition(default_function='def my_function():\n pass',
toggle_show_default=True, toggle_show_visible=True):
r"""
Creates a widget for Function Definition.
The structure of the widgets is the following:
function_definition_wid.children = [toggle_button, options]
options.children = [code_textarea, define]
define.children = [message_text, define_button]
To fix the alignment within this widget please refer to
`format_function_definition()` function.
Parameters
----------
toggle_show_default : `boolean`, optional
Defines whether the options will be visible upon construction.
toggle_show_visible : `boolean`, optional
The visibility of the toggle button.
"""
import IPython.html.widgets as ipywidgets
# Toggle button that controls options' visibility
but = ipywidgets.ToggleButtonWidget(description='Features Options',
value=toggle_show_default,
visible=toggle_show_visible)
# code widget
code = ipywidgets.TextareaWidget(value=default_function)
define_but = ipywidgets.ButtonWidget(description='Define')
msg_wid = ipywidgets.LatexWidget(value='')
define_wid = ipywidgets.ContainerWidget(children=[msg_wid, define_but])
# options widget
all_options = ipywidgets.ContainerWidget(children=[code, define_wid])
# Widget container
function_definition_wid = ipywidgets.ContainerWidget(
children=[but, all_options])
# Initialize output dictionary
f, msg = _get_function_handle_from_string(default_function)
function_definition_wid.function = f
# get code
def get_code(name):
function_handle, msg = _get_function_handle_from_string(code.value)
if function_handle is not None:
function_definition_wid.function = function_handle
msg_wid.value = ''
else:
f, _ = _get_function_handle_from_string(default_function)
function_definition_wid.function = f
msg_wid.value = msg
define_but.on_click(get_code)
# Toggle button function
def toggle_options(name, value):
all_options.visible = value
but.on_trait_change(toggle_options, 'value')
return function_definition_wid
def format_function_definition(function_definition_wid, container_padding='6px',
container_margin='6px',
container_border='1px solid black',
toggle_button_font_weight='bold',
border_visible=True):
r"""
Function that corrects the align (style format) of a given features_options
widget. Usage example:
function_definition_wid = function_definition()
display(function_definition_wid)
format_function_definition(function_definition_wid)
Parameters
----------
function_definition_wid :
The widget object generated by the `function_definition()` function.
container_padding : `str`, optional
The padding around the widget, e.g. '6px'
container_margin : `str`, optional
The margin around the widget, e.g. '6px'
tab_top_margin : `str`, optional
The margin around the tab options' widget, e.g. '0.3cm'
container_border : `str`, optional
The border around the widget, e.g. '1px solid black'
toggle_button_font_weight : `str`
The font weight of the toggle button, e.g. 'bold'
border_visible : `boolean`, optional
Defines whether to draw the border line around the widget.
"""
# align message text and button horizontally
function_definition_wid.children[1].children[1].remove_class('vbox')
function_definition_wid.children[1].children[1].add_class('hbox')
# set margin between message and button
function_definition_wid.children[1].children[1].children[0].set_css(
'margin-right', '0.5cm')
# align code textarea and button to the right
function_definition_wid.children[1].add_class('align-end')
# set error message background to red
function_definition_wid.children[1].children[1].children[0].set_css(
'background', 'red')
# set toggle button font bold
function_definition_wid.children[0].set_css('font-weight',
toggle_button_font_weight)
# margin and border around container widget
function_definition_wid.set_css('padding', container_padding)
function_definition_wid.set_css('margin', container_margin)
if border_visible:
function_definition_wid.set_css('border', container_border)
class IntListTextWidget():
r"""
Basic widget that returns a `list` of `int` numbers. It uses
`IPython.html.widgets.TextWidget()` and converts its value to a `list` of
`int`.
Parameters
----------
value : `str` or `list` of `int`, Optional
The initial value of the widget.
description : `str`, Optional
The description of the widget.
Raises
------
ValueError
value must be str or list
"""
def __init__(self, value='', description=''):
import IPython.html.widgets as ipywidgets
if isinstance(value, list):
val = _convert_list_to_str(value)
elif isinstance(value, str):
val = value
else:
raise ValueError("value must be str or list")
self.text_wid = ipywidgets.TextWidget(value=val,
description=description)
@property
def value(self):
r"""
The value fo the widget.
"""
return _convert_str_to_list_int(str(self.text_wid.value))
@property
def description(self):
r"""
The description of the widget.
"""
return self.text_wid.description
@property
def model_id(self):
r"""
The id of the widget.
"""
return self.text_wid.model_id
class FloatListTextWidget(IntListTextWidget):
r"""
Basic widget that returns a `list` of `float` numbers. It uses
`IPython.html.widgets.TextWidget()` and converts its value to a `list` of
`float`.
Parameters
----------
value : `str` or `list` of `int`, Optional
The initial value of the widget.
description : `str`, Optional
The description of the widget.
Raises
------
ValueError
value must be str or list
"""
@property
def value(self):
r"""
The value fo the widget.
"""
return _convert_str_to_list_float(str(self.text_wid.value))
def _convert_list_to_str(l):
r"""
Function that converts a given list of numbers to a string. For example:
convert_list_to_str([1, 2, 3]) returns '1, 2, 3'
"""
if isinstance(l, list):
return str(l)[1:-1]
else:
return ''
def _convert_str_to_list_int(s):
r"""
Function that converts a given string to a list of int numbers. For example:
_convert_str_to_list_int('1, 2, 3') returns [1, 2, 3]
"""
if isinstance(s, str):
return [int(i[:-1]) if i[-1] == ',' else int(i) for i in s.split()]
else:
return []
def _convert_str_to_list_float(s):
r"""
Function that converts a given string to a list of float numbers.
For example:
_convert_str_to_list_float('1, 2, 3') returns [1.0, 2.0, 3.0]
"""
if isinstance(s, str):
return [float(i[:-1]) if i[-1] == ',' else float(i) for i in s.split()]
else:
return []
def _get_function_handle_from_string(s):
r"""
Function that returns a function handle given the function code as a string.
"""
try:
exec s
function_name = s[4:s.find('(')]
return eval(function_name), None
except:
return None, 'Invalid syntax!'
def _convert_image_to_bytes(image):
r"""
Function that given a menpo.Image object, it converts it to the correct
bytes format that can be used by IPython.html.widgets.ImageWidget().
"""
fp = StringIO()
image.as_PILImage().save(fp, format='png')
fp.seek(0)
return fp.read()
def _lists_are_the_same(a, b):
if len(a) == len(b):
for i, j in zip(a, b):
if i != j:
return False
return True
else:
return False
| 2.875 | 3 |
zella-graphics/animation/main.py | whitmans-max/python-examples | 140 | 12794406 | #!/usr/bin/env python3
# date: 2020.05.29
# It use normal loop to animate point and checkMouse to close program on click
from graphics import * # PEP8: `import *` is not preferred
import random
import time
# --- main ---
win = GraphWin("My Window",500,500)
win.setBackground(color_rgb(0,0,0))
pt = Point(250, 250)
pt.setOutline(color_rgb(255,255,0))
pt.draw(win)
while True:
if win.checkMouse():
break
dx = random.randint(-10, 10)
dy = random.randint(-10, 10)
pt.move(dx, dy)
time.sleep(0.1)
win.close()
| 3.703125 | 4 |
bloombox_tests/schema_tests.py | Bloombox/Python | 4 | 12794407 | # -*- coding: utf-8 -*-
"""
bloombox testsuite: schema tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) Momentum Ideas Co., 2018
:license: This software makes use of the Apache License v2.
A copy of this license is included as ``LICENSE.md`` in
the root of the project.
"""
import unittest
class LibrarySchemaTests(unittest.TestCase):
""" Schema object tests. """
def test_schemas_import(self):
""" Schemas: 'schema.base' objects should be importable. """
from bloombox.schema.base import ProductKey_pb2
from bloombox.schema.base import ProductKind_pb2
from bloombox.schema.base import ProductType_pb2
def test_products_import(self):
""" Schemas: 'schema.products' objects should be importable. """
from bloombox.schema.products import Flower_pb2
| 1.835938 | 2 |
atcoder/abc/29/C.py | utgw/programming-contest | 0 | 12794408 | <filename>atcoder/abc/29/C.py
import itertools
N = int(input())
A = list('abc')
for i in itertools.product(A, repeat=N):
print(''.join(i)) | 3.328125 | 3 |
jobcontroller/app.py | nsabine/openshift-batch-demo | 0 | 12794409 | import os
import argparse
from redis import StrictRedis
from rq import Queue
# use the kubernetes service environment variables to
# connect to the redis queue
REDIS_HOST = os.environ['REDIS_MASTER_SERVICE_HOST']
REDIS_PORT = os.environ['REDIS_MASTER_SERVICE_PORT']
# REDIS_URL = 'redis://' + REDIS_HOST + ':' + REDIS_PORT + '/0'
# QUEUES = ['default']
redis_conn = StrictRedis(host=os.environ['REDIS_MASTER_SERVICE_HOST'],
port=os.environ['REDIS_MASTER_SERVICE_PORT'], db=0)
redis_queue = Queue(queue_name, connection=redis_conn)
def populate_queue():
# todo
def create_workers(name, image):
# Accessing the API using the pod's service account
# $ TOKEN="$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)"
# $ curl --cacert /var/run/secrets/kubernetes.io/serviceaccount/ca.crt \
# "https://openshift.default.svc.cluster.local/oapi/v1/users/~" \
# -H "Authorization: Bearer $TOKEN"
# todo:
# - create job for each message in the queue
def monitor_batch():
# todo
# - monitor for failures? retry?
def gather_result():
# todo
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Manage Batch Job')
parser.add_argument('--name', help='unique job name (used for queue name)')
parser.add_argument('--image', help='worker image')
parser.add_argument('--dir', help='working directory')
parser.add_argument('--exe', help='executable')
args = parser.parse_args()
populate_queue()
create_workers(args.name, args.image)
monitor_batch()
gather_result()
| 2.28125 | 2 |
gaphor/ui/tests/test_diagrampage.py | 987Frogh/project-makehuman | 1 | 12794410 | import unittest
from gaphas.examples import Box
from gaphor import UML
from gaphor.application import Application
from gaphor.diagram.general.comment import CommentItem
from gaphor.ui.mainwindow import DiagramPage
class DiagramPageTestCase(unittest.TestCase):
def setUp(self):
Application.init(
services=[
"event_manager",
"component_registry",
"element_factory",
"main_window",
"properties",
"namespace",
"diagrams",
"toolbox",
"elementeditor",
"export_menu",
"tools_menu",
]
)
main_window = Application.get_service("main_window")
main_window.open()
self.element_factory = Application.get_service("element_factory")
self.diagram = self.element_factory.create(UML.Diagram)
self.page = DiagramPage(
self.diagram,
Application.get_service("event_manager"),
self.element_factory,
Application.get_service("properties"),
)
self.page.construct()
assert self.page.diagram == self.diagram
assert self.page.view.canvas == self.diagram.canvas
assert len(self.element_factory.lselect()) == 1
def tearDown(self):
self.page.close()
del self.page
self.diagram.unlink()
del self.diagram
Application.shutdown()
assert len(self.element_factory.lselect()) == 0
def test_creation(self):
pass
def test_placement(self):
box = Box()
self.diagram.canvas.add(box)
self.diagram.canvas.update_now()
self.page.view.request_update([box])
self.diagram.create(
CommentItem, subject=self.element_factory.create(UML.Comment)
)
assert len(self.element_factory.lselect()) == 2
| 2.53125 | 3 |
scripts/slim_recommender.py | inpefess/recommender-systems-course | 0 | 12794411 | <filename>scripts/slim_recommender.py
# Copyright 2021-2022 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
SLIM recommender
================
https://github.com/KarypisLab/SLIM
https://github.com/MaurizioFD/RecSys2019_DeepLearning_Evaluation
"""
from rs_datasets import MovieLens
from rs_metrics import hitrate
# pylint: disable=import-error
from SLIM import SLIM, SLIMatrix
from rs_course.utils import movielens_split
def slim_recommender(dataset_size: str) -> None:
"""
>>> slim_recommender("small")
Learning takes...
0.55
:param dataset_size: a size of MovieLens dataset to use
"""
train, test, _ = movielens_split(
MovieLens(dataset_size).ratings, 0.95, True
)
trainmat = SLIMatrix(train)
model = SLIM()
model.train({}, trainmat)
model.save_model(modelfname="slim_model.csr", mapfname="slim_map.csr")
testmat = SLIMatrix(train, model)
slim_pred = model.predict(testmat, outfile="slim_recommendations.txt")
pred = {int(k): list(map(int, v)) for k, v in slim_pred.items()}
print(hitrate(test, pred))
| 2.375 | 2 |
fsleyes/views/canvaspanel.py | pauldmccarthy/fsleyes | 12 | 12794412 | <filename>fsleyes/views/canvaspanel.py
#!/usr/bin/env python
#
# canvaspanel.py - Base class for all panels that display overlay data.
#
# Author: <NAME> <<EMAIL>>
#
"""This module provides the :class:`CanvasPanel` class, which is the base
class for all panels which display overlays using ``OpenGL``.
"""
import logging
import wx
import fsl.utils.idle as idle
import fsleyes_props as props
import fsleyes.actions as actions
import fsleyes.displaycontext as displayctx
from . import colourbarpanel
from . import viewpanel
log = logging.getLogger(__name__)
class CanvasPanel(viewpanel.ViewPanel):
"""The ``CanvasPanel`` class is a :class:`.ViewPanel` which is the base
class for all panels which display overlays using ``OpenGL``
(e.g. the :class:`.OrthoPanel` and the :class:`.LightBoxPanel`). A
``CanvasPanel`` instance uses a :class:`.SceneOpts` instance to control
much of its functionality. The ``SceneOpts`` instance used by a
``CanvasPanel`` can be accessed via the :meth:`sceneOpts` property.
The ``CanvasPanel`` class contains settings and functionality common to
all sub-classes, including *movie mode* (see :attr:`movieMode`), the
ability to show a colour bar (a :class:`.ColourBarPanel`; see
:attr:`.SceneOpts.showColourBar`), and a number of actions.
**Sub-class implementations**
Sub-classes of the ``CanvasPanel`` must do the following:
1. Add their content to the panel that is accessible via the
:meth:`contentPanel` property (see the note on
:ref:`adding content <canvaspanel-adding-content>`).
2. Override the :meth:`getGLCanvases` method.
3. Call the :meth:`centrePanelLayout` method in their ``__init__``
method.
4. Override the :meth:`centrePanelLayout` method if any custom layout is
necessary.
5. Call :meth:`.ViewPanel.initProfile` with the initial interaction
profile
**Actions**
The following actions are available through a ``CanvasPanel`` (see
the :mod:`.actions` module):
.. autosummary::
:nosignatures:
screenshot
movieGif
showCommandLineArgs
toggleMovieMode
toggleDisplaySync
toggleVolumeSync
.. _canvaspanel-adding-content:
**Adding content**
To support colour bar and screenshot functionality, the ``CanvasPanel``
uses a hierarchy of ``wx.Panel`` instances, depicted in the following
containment hierarchy diagram:
.. graphviz::
digraph canvasPanel {
graph [size=""];
node [style="filled",
shape="box",
fillcolor="#ddffdd",
fontname="sans"];
rankdir="BT";
1 [label="CanvasPanel"];
2 [label="Centre panel"];
3 [label="Custom content (for complex layouts)"];
4 [label="Container panel"];
5 [label="ColourBarPanel"];
6 [label="Content panel"];
7 [label="Content added by sub-classes"];
2 -> 1;
3 -> 2;
4 -> 2;
5 -> 4;
6 -> 4;
7 -> 6;
}
As depicted in the diagram, sub-classes need to add their content to the
*content panel*. This panel is accessible via the :meth:`contentPanel`
property.
The *centre panel* is the :meth:`.ViewPanel.centrePanel`. The *container
panel* is also available, via :meth:`containerPanel`. Everything in
the container panel will appear in screenshots (see the :meth:`screenshot`
method).
The :meth:`centrePanelLayout` method lays out the centre panel, using the
:meth:`layoutContainerPanel` method to lay out the colour bar and the
content panel. The ``centrePanelLayout`` method simply adds the canvas
container directly to the centre panel. Sub-classes which have more
advanced layout requirements (e.g. the :class:`.LightBoxPanel` needs a
scrollbar) may override the :meth:`centrePanelLayout` method to implement
their own layout. These sub-class implementations must:
1. Call the :meth:`layoutContainerPanel` method.
2. Add the container panel (accessed via :meth:`containerPanel`)
to the centre panel (accessed via :meth:`centrePanel`).
3. Add any other custom content to the centre panel.
"""
syncLocation = props.Boolean(default=True)
"""If ``True`` (the default), the :attr:`.DisplayContext.location` for
this ``CanvasPanel`` is linked to the master ``DisplayContext`` location.
"""
syncOverlayOrder = props.Boolean(default=True)
"""If ``True`` (the default), the :attr:`.DisplayContext.overlayOrder`
for this ``CanvasPanel`` is linked to the master ``DisplayContext``
overlay order.
"""
syncOverlayDisplay = props.Boolean(default=True)
"""If ``True`` (the default), the properties of the :class:`.Display`
and :class:`.DisplayOpts` instances for every overlay, as managed
by the :attr:`.DisplayContext` for this ``CanvasPanel``, are linked to
the properties of all ``Display`` and ``DisplayOpts`` instances managed
by the master ``DisplayContext`` instance.
"""
syncOverlayVolume = props.Boolean(default=True)
"""If ``True`` (the default), the volume/timepoint properties of the
:class:`.DisplayOpts` instances for every overlay, as managed by the
:attr:`.DisplayContext` for this ``CanvasPanel``, are linked to the
properties of all ``DisplayOpts`` instances managed by the master
``DisplayContext`` instance.
"""
movieMode = props.Boolean(default=False)
"""If ``True``, and the currently selected overlay (see
:attr:`.DisplayContext.selectedOverlay`) is a :class:`.Image` instance
with its display managed by a :class:`.VolumeOpts` instance, the displayed
volume is changed periodically, according to the :attr:`movieRate`
property.
The update is performed on the main application thread via
``wx.CallLater``.
"""
movieRate = props.Int(minval=10, maxval=500, default=400, clamped=True)
"""The movie update rate in milliseconds. The value of this property is
inverted so that a high value corresponds to a fast rate, which makes
more sense when displayed as an option to the user.
"""
movieAxis = props.Choice((0, 1, 2, 3), default=3)
"""Axis along which the movie should be played, relative to the
currently selected :class:`.Image`.
"""
movieSyncRefresh = props.Boolean(default=True)
"""Whether, when in movie mode, to synchronise the refresh for GL
canvases. This is not possible in some platforms/environments.
"""
def __init__(self, parent, overlayList, displayCtx, frame, sceneOpts):
"""Create a ``CanvasPanel``.
:arg parent: The :mod:`wx` parent object.
:arg overlayList: The :class:`.OverlayList` instance.
:arg displayCtx: The :class:`.DisplayContext` instance.
:arb frame: The :class:`.FSLeyesFrame` instance.
:arg sceneOpts: A :class:`.SceneOpts` instance for this
``CanvasPanel`` - must be created by
sub-classes.
"""
viewpanel.ViewPanel.__init__(
self, parent, overlayList, displayCtx, frame)
self.__opts = sceneOpts
# Use this name for listener registration,
# in case subclasses use the FSLeyesPanel.name
self.__name = 'CanvasPanel_{}'.format(self.name)
# Bind the sync* properties of this
# CanvasPanel to the corresponding
# properties on the DisplayContext
# instance.
if displayCtx.getParent() is not None:
self.bindProps('syncLocation',
displayCtx,
displayCtx.getSyncPropertyName('worldLocation'))
self.bindProps('syncOverlayOrder',
displayCtx,
displayCtx.getSyncPropertyName('overlayOrder'))
self.bindProps('syncOverlayDisplay', displayCtx)
self.bindProps('syncOverlayVolume', displayCtx)
# If the displayCtx instance does not
# have a parent, this means that it is
# a top level instance
else:
self.disableProperty('syncLocation')
self.disableProperty('syncOverlayOrder')
self.disableProperty('syncOverlayDisplay')
self.disableProperty('syncOverlayVolume')
import fsleyes.actions.moviegif as moviegif
self.centrePanel = wx.Panel(self)
self.__containerPanel = wx.Panel(self.centrePanel)
self.__contentPanel = wx.Panel(self.__containerPanel)
self.__movieGifAction = moviegif.MovieGifAction(
overlayList, displayCtx, self)
self.bindProps('movieSyncRefresh', sceneOpts)
self.toggleMovieMode .bindProps('toggled', self, 'movieMode')
self.toggleDisplaySync.bindProps('toggled', self, 'syncOverlayDisplay')
self.toggleVolumeSync .bindProps('toggled', self, 'syncOverlayVolume')
self.movieGif .bindProps('enabled', self.__movieGifAction)
# the __movieModeChanged method is called
# when movieMode changes, but also when
# the movie axis, overlay list, or selected
# overlay changes. This is because, if movie
# mode is on, but no overlay, or an
# incompatible overlay, is selected, the
# movie loop stops. So it needs to be
# re-started if/when a compatible overlay is
# selected.
self.__movieRunning = False
self .addListener('movieMode',
self.__name,
self.__movieModeChanged)
self .addListener('movieAxis',
self.__name,
self.__movieModeChanged)
self.overlayList.addListener('overlays',
self.__name,
self.__movieModeChanged)
self.displayCtx .addListener('selectedOverlay',
self.__name,
self.__movieModeChanged)
# Canvas/colour bar layout is managed
# in the layoutContainerPanel method
self.__colourBar = None
self.__opts.addListener('colourBarLocation',
self.__name,
self.__colourBarPropsChanged)
self.__opts.addListener('showColourBar',
self.__name,
self.__colourBarPropsChanged)
self.__opts.addListener('bgColour',
self.__name,
self.__bgfgColourChanged)
self.__opts.addListener('fgColour',
self.__name,
self.__bgfgColourChanged)
self.__opts.addListener('labelSize',
self.__name,
self.__labelSizeChanged)
idle.idle(self.__bgfgColourChanged)
def destroy(self):
"""Makes sure that any remaining control panels are destroyed
cleanly, and calls :meth:`.ViewPanel.destroy`.
"""
if self.__colourBar is not None:
self.__colourBar.destroy()
self .removeListener('movieMode', self.__name)
self .removeListener('movieAxis', self.__name)
self.overlayList.removeListener('overlays', self.__name)
self.displayCtx .removeListener('selectedOverlay', self.__name)
self.sceneOpts .removeListener('colourBarLocation', self.__name)
self.sceneOpts .removeListener('showColourBar', self.__name)
self.sceneOpts .removeListener('bgColour', self.__name)
self.sceneOpts .removeListener('fgColour', self.__name)
self.sceneOpts .removeListener('labelSize', self.__name)
self.__movieGifAction.destroy()
self.__opts = None
self.__movieGifAction = None
viewpanel.ViewPanel.destroy(self)
@actions.action
def screenshot(self):
"""Takes a screenshot of the currently displayed scene on this
``CanvasPanel``.
See the :class:`.ScreenshotAction`.
"""
from fsleyes.actions.screenshot import ScreenshotAction
ScreenshotAction(self.overlayList, self.displayCtx, self)()
@actions.action
def movieGif(self):
"""Generates an animated GIF of the currently displayed scene and
movie mode settings on this ``CanvasPanel``.
See the :class:`.MovieGifAction`.
"""
self.__movieGifAction()
@actions.action
def showCommandLineArgs(self):
"""Shows the command line arguments which can be used to re-create
the currently displayed scene. See the :class:`.ShowCommandLineAction`
class.
"""
from fsleyes.actions.showcommandline import ShowCommandLineAction
ShowCommandLineAction(self.overlayList, self.displayCtx, self)()
@actions.action
def applyCommandLineArgs(self):
"""Shows the command line arguments which can be used to re-create
the currently displayed scene. See the :class:`.ApplyCommandLineAction`
class.
"""
from fsleyes.actions.applycommandline import ApplyCommandLineAction
ApplyCommandLineAction(self.overlayList, self.displayCtx, self)()
@actions.toggleAction
def toggleMovieMode(self):
"""Toggles the value of :attr:`movieMode`. """
# The state of this action gets bound to
# the movieMode attribute in __init__
pass
@actions.toggleAction
def toggleDisplaySync(self):
"""Toggles the value of :attr:`syncOverlayDisplay`. """
# The state of this action gets bound to
# the syncOverlayDisplay attribute in __init__
pass
@actions.toggleAction
def toggleVolumeSync(self):
"""Toggles the value of :attr:`syncOverlayVolume`. """
# The state of this action gets bound to
# the syncOverlayVolume attribute in __init__
pass
@property
def sceneOpts(self):
"""Returns the :class:`.SceneOpts` instance used by this
``CanvasPanel``.
"""
return self.__opts
@property
def contentPanel(self):
"""Returns the ``wx.Panel`` to which sub-classes must add their content.
See the note on :ref:`adding content <canvaspanel-adding-content>`.
"""
return self.__contentPanel
@property
def containerPanel(self):
"""Returns the ``wx.Panel`` which contains the
:class:`.ColourBarPanel` if it is being displayed, and the content
panel. See the note on
:ref:`adding content <canvaspanel-adding-content>`.
"""
return self.__containerPanel
@property
def colourBarCanvas(self):
"""If a colour bar is being displayed, this method returns
the :class:`.ColourBarCanvas` instance which is used by the
:class:`.ColourBarPanel` to render the colour bar.
Otherwise, ``None`` is returned.
"""
if self.__colourBar is not None:
return self.__colourBar.getCanvas()
return None
def getGLCanvases(self):
"""This method must be overridden by subclasses, and must return a
list containing all :class:`.SliceCanvas` instances which are being
displayed.
"""
raise NotImplementedError(
'getGLCanvases has not been implemented '
'by {}'.format(type(self).__name__))
def centrePanelLayout(self):
"""Lays out the centre panel. This method may be overridden by
sub-classes which need more advanced layout logic. See the note on
:ref:`adding content <canvaspanel-adding-content>`
"""
self.layoutContainerPanel()
sizer = self.centrePanel.GetSizer()
if sizer is not None:
sizer.Clear()
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.__containerPanel, flag=wx.EXPAND, proportion=1)
self.centrePanel.SetSizer(sizer)
self.PostSizeEvent()
def layoutContainerPanel(self):
"""Creates a ``wx.Sizer``, and uses it to lay out the colour bar panel
and canvas panel. The sizer object is returned.
This method is used by the default :meth:`centrePanelLayout` method,
and is available for custom sub-class implementations to use.
"""
sizer = self.__containerPanel.GetSizer()
if sizer is not None:
sizer.Clear()
sizer = None
sopts = self.sceneOpts
if not sopts.showColourBar:
if self.__colourBar is not None:
sopts.unbindProps('colourBarLabelSide',
self.__colourBar.colourBar,
'labelSide')
sopts.unbindProps('colourBarSize',
self.__colourBar.canvas,
'barSize')
sopts.unbindProps('highDpi', self.__colourBar.canvas)
self.__colourBar.destroy()
self.__colourBar.Destroy()
self.__colourBar = None
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.__contentPanel, flag=wx.EXPAND, proportion=1)
self.__containerPanel.SetSizer(sizer)
return
if self.__colourBar is None:
self.__colourBar = colourbarpanel.ColourBarPanel(
self.__containerPanel,
self.overlayList,
self.displayCtx,
self.frame)
bg = sopts.bgColour
fg = sopts.fgColour
fs = sopts.labelSize
self.__colourBar.colourBar.textColour = fg
self.__colourBar.colourBar.bgColour = bg
self.__colourBar.colourBar.fontSize = fs
sopts.bindProps('colourBarLabelSide',
self.__colourBar.colourBar,
'labelSide')
sopts.bindProps('colourBarSize',
self.__colourBar.canvas,
'barSize')
sopts.bindProps('highDpi', self.__colourBar.canvas)
if sopts.colourBarLocation in ('top', 'bottom'):
self.__colourBar.colourBar.orientation = 'horizontal'
elif sopts.colourBarLocation in ('left', 'right'):
self.__colourBar.colourBar.orientation = 'vertical'
if sopts.colourBarLocation in ('top', 'bottom'):
sizer = wx.BoxSizer(wx.VERTICAL)
else:
sizer = wx.BoxSizer(wx.HORIZONTAL)
if sopts.colourBarLocation in ('top', 'left'):
sizer.Add(self.__colourBar, flag=wx.EXPAND)
sizer.Add(self.__contentPanel, flag=wx.EXPAND, proportion=1)
else:
sizer.Add(self.__contentPanel, flag=wx.EXPAND, proportion=1)
sizer.Add(self.__colourBar, flag=wx.EXPAND)
self.__containerPanel.SetSizer(sizer)
def __colourBarPropsChanged(self, *a):
"""Called when any colour bar display properties are changed (see
:class:`.SceneOpts`). Calls :meth:`canvasPanelLayout`.
"""
self.centrePanelLayout()
def __labelSizeChanged(self, *a, **kwa):
"""Called when the :class:`.SceneOpts.lablSize` changes. If a colour
bar is being displayed, it is updated, and the panel layout is
refreshed.
"""
sopts = self.sceneOpts
if self.__colourBar is not None:
self.__colourBar.canvas.colourBar.fontSize = sopts.labelSize
wx.CallAfter(self.Layout)
def __bgfgColourChanged(self, *a, **kwa):
"""Called when the :class:`.SceneOpts.bgColour` or
:class:`.SceneOpts.fgColour` properties change. Updates
background/foreground colours.
The :attr:`.SliceCanvasOpts.bgColour` properties are bound to
``SceneOpts.bgColour``,(see :meth:`.HasProperties.bindProps`), so we
don't need to manually update them.
:arg refresh: Must be passed as a keyword argument. If ``True`` (the
default), this ``OrthoPanel`` is refreshed.
"""
if self.destroyed:
return
refresh = kwa.pop('refresh', True)
sceneOpts = self.sceneOpts
cpanel = self.contentPanel
canvases = self.getGLCanvases()
bg = sceneOpts.bgColour
fg = sceneOpts.fgColour
cpanel.SetBackgroundColour([round(c * 255) for c in bg])
cpanel.SetForegroundColour([round(c * 255) for c in fg])
if self.__colourBar is not None:
canvas = self.__colourBar.canvas
cbar = self.__colourBar.colourBar
cbar.textColour = fg
cbar.bgColour = bg
canvases.append(canvas)
if refresh:
self.Refresh()
self.Update()
def __movieModeChanged(self, *a):
"""Called when the :attr:`movieMode` property changes. If it has been
enabled, calls :meth:`__movieUpdate`, to start the movie loop.
"""
# The fsl.utils.idle idle loop timeout
# defaults to 200 milliseconds, which can
# cause delays in frame updates. So when
# movie mode is on, we bump up the rate.
def startMovie():
idle.idleLoop.callRate = 10
if not self.__movieLoop(startLoop=True):
idle.idleLoop.callRate = None
# The __movieModeChanged method is called
# on the props event queue. Here we make
# sure that __movieLoop() is called *off*
# the props event queue, by calling it from
# the idle loop.
if self.movieMode: idle.idle(startMovie)
else: idle.idleLoop.callRate = None
def __movieLoop(self, startLoop=False):
"""Manages the triggering of the next movie frame. This method is
called by :meth:`__movieModeChanged` when :attr:`movieMode` changes
and when the selected overlay changes, and also by
:meth:`__syncMovieRefresh` and :meth:`__unsyncMovieRefresh` while
the movie loop is running, to trigger the next frame.
:arg startLoop: This is set to ``True`` when called from
:meth:`__movieModeChanged`. If ``True``, and the movie
loop is already running, this method does nothing.
"""
# Movie loop is already running, nothing to do.
if startLoop and self.__movieRunning:
return True
# Attempt to show the next frame -
# __movieFrame returns True if the
# movie is continuing, False if it
# has ended.
self.__movieRunning = self.__movieFrame()
return self.__movieRunning
def canRunMovie(self, overlay, opts):
"""Returns ``True`` or ``False``, depending on whether movie mode
is possible with the given z`overlay`` and ``opts``.
"""
import fsl.data.image as fslimage
import fsl.data.mesh as fslmesh
axis = self.movieAxis
# 3D movies are good for all overlays
if axis < 3:
return True
# 4D Nifti images are all good
if isinstance(overlay, fslimage.Nifti) and \
len(overlay.shape) > 3 and \
overlay.shape[3] > 1 and \
isinstance(opts, displayctx.VolumeOpts):
return True
# Mesh surfaces with N-D
# vertex data are all good
if isinstance(overlay, fslmesh.Mesh) and \
opts.vertexDataLen() > 1:
return True
return False
def getMovieFrame(self, overlay, opts):
"""Returns the current movie frame for the given overlay.
A movie frame is typically a sequentially increasing number in
some minimum/maximum range, e.g. a voxel or volume index.
This method may be overridden by sub-classes for custom behaviour
(e.g. the :class:`.Scene3DPanel`).
"""
axis = self.movieAxis
def nifti():
if axis < 3: return opts.getVoxel(vround=False)[axis]
else: return opts.volume
def mesh():
if axis < 3: return other()
else: return opts.vertexDataIndex
def other():
return self.displayCtx.location.getPos(axis)
import fsl.data.image as fslimage
import fsl.data.mesh as fslmesh
if isinstance(overlay, fslimage.Nifti): return nifti()
elif isinstance(overlay, fslmesh.Mesh): return mesh()
else: return other()
def doMovieUpdate(self, overlay, opts):
"""Called by :meth:`__movieFrame`. Updates the properties on the
given ``opts`` instance to move forward one frame in the movie.
This method may be overridden by sub-classes for custom behaviour
(e.g. the :class:`.Scene3DPanel`).
:returns: A value which identifies the current movie frame. This may
be a volume or voxel index, or a world coordinate location
on one axis.
"""
axis = self.movieAxis
def nifti():
limit = overlay.shape[axis]
# This method has been called off the props
# event queue (see __movieModeChanged).
# Therefore, all listeners on the opts.volume
# or DisplayContext.location properties
# should be called immediately, in these
# assignments.
#
# When the movie axis == 3 (time), this means
# that image texture refreshes should be
# triggered and, after the opts.volume
# assignment, all affected GLObjects should
# return ready() == False.
if axis == 3:
if opts.volume >= limit - 1: opts.volume = 0
else: opts.volume += 1
frame = opts.volume
else:
voxel = opts.getVoxel()
if voxel[axis] >= limit - 1: voxel[axis] = 0
else: voxel[axis] += 1
self.displayCtx.location = opts.transformCoords(
voxel, 'voxel', 'display')
frame = voxel[axis]
return frame
def mesh():
if axis == 3:
limit = opts.vertexDataLen()
val = opts.vertexDataIndex
if val >= limit - 1: val = 0
else: val += 1
opts.vertexDataIndex = val
return val
else:
return other()
def other():
bmin, bmax = opts.bounds.getRange(axis)
delta = (bmax - bmin) / 75.0
pos = self.displayCtx.location.getPos(axis)
if pos >= bmax: pos = bmin
else: pos = pos + delta
self.displayCtx.location.setPos(axis, pos)
return pos
import fsl.data.image as fslimage
import fsl.data.mesh as fslmesh
if isinstance(overlay, fslimage.Nifti): frame = nifti()
elif isinstance(overlay, fslmesh.Mesh): frame = mesh()
else: frame = other()
return frame
def __movieFrame(self):
"""Called by :meth:`__movieLoop`.
If the currently selected overlay (see
:attr:`.DisplayContext.selectedOverlay`) is a 4D :class:`.Image` being
displayed as a ``volume`` (see the :class:`.VolumeOpts` class), the
:attr:`.NiftiOpts.volume` property is incremented and all
GL canvases in this ``CanvasPanel`` are refreshed.
:returns: ``True`` if the movie loop was started, ``False`` otherwise.
"""
from . import scene3dpanel
if self.destroyed: return False
if not self.movieMode: return False
overlay = self.displayCtx.getSelectedOverlay()
canvases = self.getGLCanvases()
if overlay is None:
return False
opts = self.displayCtx.getOpts(overlay)
if not self.canRunMovie(overlay, opts):
return False
# We want the canvas refreshes to be
# synchronised. So we 'freeze' them
# while changing the image volume, and
# then refresh them all afterwards.
for c in canvases:
c.FreezeDraw()
c.FreezeSwapBuffers()
self.doMovieUpdate(overlay, opts)
# Now we get refs to *all* GLObjects managed
# by every canvas - we have to wait until
# they are all ready to be drawn before we
# can refresh the canvases. Note that this
# is only necessary when the movie axis == 3
globjs = [c.getGLObject(o)
for c in canvases
for o in self.overlayList]
globjs = [g for g in globjs if g is not None]
def allReady():
return all([g.ready() for g in globjs])
# Figure out the movie rate - the
# number of seconds to wait until
# triggering the next frame.
rate = self.movieRate
rateMin = self.getAttribute('movieRate', 'minval')
rateMax = self.getAttribute('movieRate', 'maxval')
# Special case/hack - if this is a Scene3DPanel,
# and the movie axis is X/Y/Z, we always
# use a fast rate. Instead, the Scene3dPanel
# will increase/decrease the rotation angle
# to speed up/slow down the movie instead.
if isinstance(self, scene3dpanel.Scene3DPanel) and self.movieAxis < 3:
rate = rateMax
rate = (rateMin + (rateMax - rate)) / 1000.0
def update():
self.movieSync()
idle.idle(self.__movieLoop, after=rate)
# Refresh the canvases when all
# GLObjects are ready to be drawn.
idle.idleWhen(update, allReady, pollTime=rate / 10)
return True
def movieSync(self):
"""Called by :meth:`__movieUpdate`. Updates all GL canvases, attempting
to refresh them in a synchronised manner.
Ideally all canvases should be drawn off-screen (i.e. rendered to the
back buffer), and then all refreshed together (back and front buffers
swapped). Unfortunately some OpenGL drivers seem to have trouble with
this approach, and require drawing and front/back buffer swaps to be
done at the same time.
This method will refresh the GL canvases in either a synchronised or
unsynchronised manner, depending upon the value of the
:attr:`movieSyncRefresh` property.
"""
canvases = self.getGLCanvases()
if self.movieSyncRefresh:
for c in canvases:
c.ThawDraw()
c.Refresh()
for c in canvases:
c.ThawSwapBuffers()
c.SwapBuffers()
else:
for c in canvases:
c.ThawDraw()
c.ThawSwapBuffers()
c.Refresh()
| 2.359375 | 2 |
voiceEmotions/voice assistance.py | Yuni0217/Face_the_Facts | 1 | 12794413 | <filename>voiceEmotions/voice assistance.py
#I can't install PyAudio on PyCharm Linux
import pyttsx3
import datetime
import speech_recognition as sr
import wikipedia
def speak (audio):
engine.say(audio)
engine.runAndWait()
engine = pyttsx3.init('espeak')
voices = engine.getProperty('voices')
if __name__=="__main__":
speak("Hello world!")
def wishme ():
hour = int(datetime.datetime.now().hour)
if hour >=0 and hour<12:
speak("Good Morning!")
elif hour >=12 and hour <18:
speak("Good Afternoon")
else:
speak("Good evening")
speak("Welcome!")
def takeCommand():
r = sr.Recognizer()
with sr.Microphone() as source:
print("Listening....")
r.pause_threshold = 1
audio = r.listen(source)
try:
print("Recognising....")
query = r.recognize_google(audio, language='en-in')
print(f"User said: {query}\n")
except Exception as e:
print("say that again please")
return None
return query
if __name__ == "__main__":
wishme()
while True:
query = takeCommand().lower()
if 'wikipedia' in query:
speak("Searching wikipedia!")
query = query.replace("wikipedia","")
results = wikipedia.summary(query, sentences = 2)
speak(results)
print(results)
elif 'the time' in query:
strTime = datetime.datetime.now().strftime("%H:%M:%S")
speak(f"Ma ' am, time is {strTime}")
| 3.421875 | 3 |
scripts/upgrade_dev_files.py | Chaoste/d-matcher | 0 | 12794414 | import pandas as pd
files = [
'students_wt_15.csv',
'students_st_16.csv',
'students_wt_16.csv',
'students_st_17.csv',
]
for filename in files:
path = f'input/{filename}'
students = pd.read_csv(path, index_col=0)
print('From:', students.columns)
students = students[['hash', 'Sex', 'Nationality', 'Discipline']]
print('To:', students.columns, '\n')
students.to_csv(path)
| 3.40625 | 3 |
paderbox/testing/testfile_fetcher.py | JanekEbb/paderbox | 0 | 12794415 | import urllib.request as url
from paderbox.io.cache_dir import get_cache_dir
def fetch_file_from_url(fpath, file=None):
"""
Checks if local cache directory possesses an example named <file>.
If not found, loads data from urlpath and stores it under <fpath>
Args:
fpath: url to the example repository
file: name of the testfile
Returns: Path to file
"""
path = get_cache_dir()
if file is None:
# remove difficult letters
file = fpath.replace(':', '_').replace('/', '_')
if not (path / file).exists():
datapath = url.urlopen(fpath)
data = datapath.read()
with open(path / file, "wb") as f:
f.write(data)
return path / file
def get_file_path(file_name):
"""
Looks up path to a test audio file and returns to the local file.
Args:
file: audio file needed for the test
Returns: Path to audio test file
"""
_pesq = "https://github.com/ludlows/python-pesq/raw/master/audio/"
_pb_bss = "https://github.com/fgnt/pb_test_data/raw/master/bss_data/" \
"low_reverberation/"
url_ = {
'sample.wav': _pb_bss + "speech_source_0.wav",
'observation.wav': _pb_bss+"observation.wav", # multi channel
'speech_source_0.wav': _pb_bss+"speech_source_0.wav",
'speech_source_1.wav': _pb_bss+"speech_source_1.wav",
'speech_image_0.wav': _pb_bss+"speech_image_0.wav", # multi channel
'speech_image_1.wav': _pb_bss+"speech_image_1.wav", # multi channel
'noise_image.wav': _pb_bss+"noise_image.wav", # multi channel
'speech.wav': _pesq + "speech.wav",
"speech_bab_0dB.wav": _pesq + "speech_bab_0dB.wav",
# pylint: disable=line-too-long
# Found on https://www.isip.piconepress.com/projects/speech/software/tutorials/production/fundamentals/v1.0/section_02/s02_01_p04.html
'speech.sph': 'https://www.isip.piconepress.com/projects/speech/software/tutorials/production/fundamentals/v1.0/section_02/data/speech.sph',
'123_1pcbe_shn.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_1pcbe_shn.sph',
'123_1pcle_shn.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_1pcle_shn.sph',
'123_1ulaw_shn.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_1ulaw_shn.sph',
'123_2alaw.sph': 'https://github.com/robd003/sph2pipe/raw/master/test/123_2alaw.sph',
}[file_name]
return fetch_file_from_url(url_, file_name)
| 3.296875 | 3 |
welib/weio/fast_summary_file.py | moonieann/welib | 24 | 12794416 | <filename>welib/weio/fast_summary_file.py
import numpy as np
import pandas as pd
from io import open
import os
# Local
from .mini_yaml import yaml_read
try:
from .file import File, EmptyFileError
except:
EmptyFileError = type('EmptyFileError', (Exception,),{})
File=dict
# --------------------------------------------------------------------------------}
# --- Main Class
# --------------------------------------------------------------------------------{
class FASTSummaryFile(File):
"""
Read an OpenFAST summary file (.sum, .yaml). The object behaves as a dictionary.
NOTE: open new subdyn format supported.
Main methods
------------
- read, toDataFrame
Examples
--------
# read a subdyn summary file
sum = FASTSummaryFile('5MW.SD.sum.yaml')
print(sum['module']) # SubDyn
M = sum['M'] # Mass matrix
K = sum['K'] # stiffness matrix
"""
@staticmethod
def defaultExtensions():
return ['.sum','.yaml']
@staticmethod
def formatName():
return 'FAST summary file'
def __init__(self,filename=None, **kwargs):
self.filename = None
if filename:
self.read(filename, **kwargs)
def read(self, filename=None, header_only=False):
""" """
if filename:
self.filename = filename
if not self.filename:
raise Exception('No filename provided')
if not os.path.isfile(self.filename):
raise OSError(2,'File not found:',self.filename)
if os.stat(self.filename).st_size == 0:
raise EmptyFileError('File is empty:',self.filename)
with open(self.filename, 'r', errors="surrogateescape") as fid:
header= readFirstLines(fid, 4)
if any(['subdyn' in s.lower() for s in header]):
self['module']='SubDyn'
readSubDynSum(self)
else:
raise NotImplementedError('This summary file format is not yet supported')
def toDataFrame(self):
if 'module' not in self.keys():
raise Exception('');
if self['module']=='SubDyn':
raise Exception('This should not happen since class was added to subdyn object')
# dfs=subDynToDataFrame(self)
return dfs
def toGraph(self):
from .fast_input_file_graph import fastToGraph
return fastToGraph(self)
# --------------------------------------------------------------------------------}
# --- Helper functions
# --------------------------------------------------------------------------------{
def readFirstLines(fid, nLines):
lines=[]
for i, line in enumerate(fid):
lines.append(line.strip())
if i==nLines:
break
return lines
# --------------------------------------------------------------------------------}
# --- Sub-reader/class for SubDyn summary files
# --------------------------------------------------------------------------------{
def readSubDynSum(self):
# Read data
#T=yaml.load(fid, Loader=yaml.SafeLoader)
yaml_read(self.filename, self)
# --- Treatement of useful data
if self['DOF2Nodes'].shape[1]==3:
self['DOF2Nodes']=np.column_stack((np.arange(self['DOF2Nodes'].shape[0])+1,self['DOF2Nodes']))
# NOTE: DOFs are reindexed to start at 0
self['DOF2Nodes'][:,0]-=1
self['DOF___L'] -=1 # internal DOFs
self['DOF___B'] -=1 # internal
self['DOF___F'] -=1 # fixed DOFs
self['CB_frequencies']=self['CB_frequencies'].ravel()
self['X'] = self['Nodes'][:,1].astype(float)
self['Y'] = self['Nodes'][:,2].astype(float)
self['Z'] = self['Nodes'][:,3].astype(float)
# --- Useful methods that will be added to the class
def NodesDisp(self, IDOF, UDOF, maxDisp=None, sortDim=None):
DOF2Nodes = self['DOF2Nodes']
# NOTE: SubDyn nodes in the summary files are sorted
# so the position we give are for all Nodes
INodes = list(np.sort(np.unique(DOF2Nodes[IDOF,1]))) # Sort
nShapes = UDOF.shape[1]
disp=np.empty((len(INodes),3,nShapes)); disp.fill(np.nan)
pos=np.empty((len(INodes),3)) ; pos.fill(np.nan)
# TODO
# handle T_red for rigid and joints
for i,iDOF in enumerate(IDOF):
iNode = DOF2Nodes[iDOF,1]
nDOFPerNode = DOF2Nodes[iDOF,2]
nodeDOF = DOF2Nodes[iDOF,3]
iiNode = INodes.index(iNode)
if nodeDOF<=3:
pos[iiNode, 0]=self['X'][iNode-1]
pos[iiNode, 1]=self['Y'][iNode-1]
pos[iiNode, 2]=self['Z'][iNode-1]
for iShape in np.arange(nShapes):
disp[iiNode, nodeDOF-1, iShape] = UDOF[i, iShape]
# Scaling
if maxDisp is not None:
for iShape in np.arange(nShapes):
mD=np.nanmax(np.abs(disp[:, :, iShape]))
if mD>1e-5:
disp[:, :, iShape] *= maxDisp/mD
# Sorting according to a dimension
if sortDim is not None:
I=np.argsort(pos[:,sortDim])
INodes = np.array(INodes)[I]
disp = disp[I,:,:]
pos = pos[I,:]
return disp, pos, INodes
def getModes(data, maxDisp=None, sortDim=2):
""" return Guyan and CB modes"""
if maxDisp is None:
#compute max disp such as it's 10% of maxdimension
dx = np.max(self['X'])-np.min(self['X'])
dy = np.max(self['Y'])-np.min(self['Y'])
dz = np.max(self['Z'])-np.min(self['Z'])
maxDisp = np.max([dx,dy,dz])*0.1
# NOTE: DOF have been reindexed -1
DOF_B = data['DOF___B'].ravel()
DOF_F = data['DOF___F'].ravel()
DOF_K = (np.concatenate((DOF_B,data['DOF___L'].ravel(), DOF_F))).astype(int)
# CB modes
PhiM = data['PhiM']
Phi_CB = np.vstack((np.zeros((len(DOF_B),PhiM.shape[1])),PhiM, np.zeros((len(DOF_F),PhiM.shape[1]))))
dispCB, posCB, INodesCB = data.NodesDisp(DOF_K, Phi_CB, maxDisp=maxDisp, sortDim=sortDim)
# Guyan modes
PhiR = data['PhiR']
Phi_Guyan = np.vstack((np.eye(len(DOF_B)),PhiR, np.zeros((len(DOF_F),PhiR.shape[1]))))
dispGy, posGy, INodesGy = data.NodesDisp(DOF_K, Phi_Guyan, maxDisp=maxDisp, sortDim=sortDim)
return dispGy, posGy, INodesGy, dispCB, posCB, INodesCB
def subDynToJson(data, outfile=None):
""" Convert to a "JSON" format """
dispGy, posGy, _, dispCB, posCB, _ = data.getModes()
Nodes = self['Nodes']
Elements = self['Elements']
Elements[:,0]-=1
Elements[:,1]-=1
Elements[:,2]-=1
CB_freq = data['CB_frequencies'].ravel()
d=dict();
d['Connectivity']=Elements[:,[1,2]].astype(int).tolist();
d['Nodes']=Nodes[:,[1,2,3]].tolist()
d['ElemProps']=[{'shape':'cylinder','type':int(Elements[iElem,5]),'Diam':np.sqrt(Elements[iElem,7]/np.pi)*4} for iElem in range(len(Elements))] # NOTE: diameter is cranked up
# disp[iiNode, nodeDOF-1, iShape] = UDOF[i, iShape]
d['Modes']=[
{
'name':'GY{:d}'.format(iMode+1),
'omega':1,
'Displ':dispGy[:,:,iMode].tolist()
} for iMode in range(dispGy.shape[2]) ]
d['Modes']+=[
{
'name':'CB{:d}'.format(iMode+1),
'omega':CB_freq[iMode]*2*np.pi, #in [rad/s]
'Displ':dispCB[:,:,iMode].tolist()
} for iMode in range(dispCB.shape[2]) ]
d['groundLevel']=np.min(data['Z']) # TODO
if outfile is not None:
import json
with open(outfile, 'w', encoding='utf-8') as f:
try:
f.write(unicode(json.dumps(d, ensure_ascii=False))) #, indent=2)
except:
json.dump(d, f, indent=2)
return d
def subDynToDataFrame(data):
""" Convert to DataFrame containing nodal displacements """
def toDF(pos,disp,preffix=''):
disp[np.isnan(disp)]=0
disptot=disp.copy()
columns=[]
for ishape in np.arange(disp.shape[2]):
disptot[:,:,ishape]= pos + disp[:,:,ishape]
sMode=preffix+'Mode{:d}'.format(ishape+1)
columns+=[sMode+'x_[m]',sMode+'y_[m]',sMode+'z_[m]']
disptot= np.moveaxis(disptot,2,1).reshape(disptot.shape[0],disptot.shape[1]*disptot.shape[2])
disp = np.moveaxis(disp,2,1).reshape(disp.shape[0],disp.shape[1]*disp.shape[2])
df= pd.DataFrame(data = disptot ,columns = columns)
# remove zero
dfDisp= pd.DataFrame(data = disp ,columns = columns)
df = df.loc[:, (dfDisp != 0).any(axis=0)]
dfDisp = dfDisp.loc[:, (dfDisp != 0).any(axis=0)]
dfDisp.columns = [c.replace('Mode','Disp') for c in dfDisp.columns.values]
return df, dfDisp
dispGy, posGy, _, dispCB, posCB, _ = data.getModes()
columns = ['z_[m]','x_[m]','y_[m]']
dataZXY = np.column_stack((posGy[:,2],posGy[:,0],posGy[:,1]))
dfZXY = pd.DataFrame(data = dataZXY, columns=columns)
df1, df1d = toDF(posGy, dispGy,'Guyan')
df2, df2d = toDF(posCB, dispCB,'CB')
df = pd.concat((dfZXY, df1, df2, df1d, df2d), axis=1)
return df
# adding method to class dynamically to give it a "SubDyn Summary flavor"
setattr(FASTSummaryFile, 'NodesDisp' , NodesDisp)
setattr(FASTSummaryFile, 'toDataFrame', subDynToDataFrame)
setattr(FASTSummaryFile, 'toJSON' , subDynToJson)
setattr(FASTSummaryFile, 'getModes' , getModes)
return self
if __name__=='__main__':
T=FASTSummaryFile('../Pendulum.SD.sum.yaml')
df=T.toDataFrame()
print(df)
| 2.640625 | 3 |
users/tests.py | GilbertTan19/Empire_of_Movies-deploy | 0 | 12794417 | from django.test import Client, TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse, resolve
from .views import ProfileUpdateView
from .forms import CustomUserCreationForm
# Create your tests here.
class CustomUserTests(TestCase):
def test_create_user(self):
User = get_user_model()
user = User.objects.create_user(
username='test123',
email='<EMAIL>',
password = '<PASSWORD>',
)
self.assertEqual(user.username,'test123')
self.assertEqual(user.email, '<EMAIL>')
self.assertTrue(user.is_active)
self.assertFalse(user.is_staff)
self.assertFalse(user.is_superuser)
def test_create_superuser(self):
User = get_user_model()
user = User.objects.create_superuser(
username='test123',
email='<EMAIL>',
password = '<PASSWORD>',
)
self.assertEqual(user.username,'test123')
self.assertEqual(user.email, '<EMAIL>')
self.assertTrue(user.is_active)
self.assertTrue(user.is_staff)
self.assertTrue(user.is_superuser)
class SignupTests(TestCase):
username = 'newuser'
email = '<EMAIL>'
def setUp(self):
url = reverse('account_signup')
self.response = self.client.get(url)
def test_signup_template(self):
self.assertEqual(self.response.status_code, 200)
self.assertTemplateUsed(self.response, 'account/signup.html')
self.assertContains(self.response, 'Sign Up')
self.assertNotContains(
self.response, 'Hi there! I should not be on the page.')
def test_signup_form(self):
new_user = get_user_model().objects.create_user(
self.username, self.email)
self.assertEqual(get_user_model().objects.all().count(), 1)
self.assertEqual(get_user_model().objects.all()
[0].username, self.username)
self.assertEqual(get_user_model().objects.all()
[0].email, self.email)
class UpdateProfileTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
username='testuser',
email='<EMAIL>',
password='<PASSWORD>'
)
self.client.login(username='testuser', password='<PASSWORD>')
self.response = self.client.get(reverse('update_profile'))
def test_update_profile_template(self):
self.assertEqual(self.response.status_code, 200)
self.assertTemplateUsed(self.response, 'account/profile_update.html')
self.assertContains(self.response, 'Update Profile')
self.assertNotContains(
self.response, 'Hi there! I should not be on the page.') | 2.4375 | 2 |
communicator/memcached_comm.py | DS3Lab/LambdaML | 23 | 12794418 | <filename>communicator/memcached_comm.py
from storage import MemcachedStorage
from communicator import Communicator
from communicator import memcached_primitive, memcached_primitive_nn
class MemcachedCommunicator(Communicator):
def __init__(self, _storage, _tmp_bucket, _merged_bucket, _num_workers, _worker_index):
super(MemcachedCommunicator, self).__init__(_storage)
assert isinstance(self.storage, MemcachedStorage)
self.tmp_bucket = _tmp_bucket
self.merged_bucket = _merged_bucket
self.num_workers = _num_workers
self.worker_index = _worker_index
def reduce_batch(self, vector, cur_epoch=0, cur_batch=0):
return memcached_primitive.reduce_batch(self.storage, vector, self.tmp_bucket, self.merged_bucket,
self.num_workers, self.worker_index, cur_epoch, cur_batch)
def reduce_batch_nn(self, data_bytes, cur_epoch=0, cur_batch=0):
return memcached_primitive_nn.reduce_batch(self.storage, data_bytes, self.tmp_bucket, self.merged_bucket,
self.num_workers, self.worker_index, cur_epoch, cur_batch)
def reduce_epoch(self, vector, cur_epoch=0):
return memcached_primitive.reduce_epoch(self.storage, vector, self.tmp_bucket, self.merged_bucket,
self.num_workers, self.worker_index, cur_epoch)
def reduce_epoch_nn(self, data_bytes, cur_epoch=0):
return memcached_primitive_nn.reduce_epoch(self.storage, data_bytes, self.tmp_bucket, self.merged_bucket,
self.num_workers, self.worker_index, cur_epoch)
def reduce_scatter_batch(self, vector, cur_epoch=0, cur_batch=0):
return memcached_primitive.reduce_scatter_batch(self.storage, vector, self.tmp_bucket, self.merged_bucket,
self.num_workers, self.worker_index, cur_epoch, cur_batch)
def reduce_scatter_epoch(self, vector, cur_epoch=0):
return memcached_primitive.reduce_scatter_epoch(self.storage, vector, self.tmp_bucket, self.merged_bucket,
self.num_workers, self.worker_index, cur_epoch)
def delete_expired_batch(self, cur_epoch, cur_batch):
return memcached_primitive.delete_expired_batch(self.storage, self.merged_bucket, cur_epoch, cur_batch)
def delete_expired_epoch(self, cur_epoch):
return memcached_primitive.delete_expired_epoch(self.storage, self.merged_bucket, cur_epoch)
def async_reduce(self, vector, object_name=""):
return memcached_primitive.async_reduce(self.storage, vector, self.merged_bucket, object_name)
def async_reduce_nn(self, data_bytes, object_name=""):
return memcached_primitive_nn.async_reduce(self.storage, data_bytes, self.merged_bucket, object_name)
| 2.421875 | 2 |
tests/unittests/test_exc.py | Cottonwood-Technology/ValidX | 19 | 12794419 | import pickle
from collections import deque
from datetime import datetime
from textwrap import dedent
import pytest
from dateutil.parser import isoparse
from pytz import UTC
from validx import exc
def test_validation_error():
te = exc.InvalidTypeError(expected=int, actual=str)
assert te.context == deque([])
assert te.format_context() == ""
assert te.format_error() == "InvalidTypeError(expected=%r, actual=%r)" % (int, str)
assert repr(te) == "<InvalidTypeError(expected=%r, actual=%r)>" % (int, str)
te.add_context("x")
assert te.context == deque(["x"])
assert te.format_context() == "x"
assert repr(te) == "<x: InvalidTypeError(expected=%r, actual=%r)>" % (int, str)
te.add_context(1)
assert te.context == deque([1, "x"])
assert te.format_context() == "1.x"
assert repr(te) == "<1.x: InvalidTypeError(expected=%r, actual=%r)>" % (int, str)
te.add_context("a.b")
assert te.format_context() == "[a.b].1.x"
assert te.context == deque(["a.b", 1, "x"])
assert repr(te) == "<[a.b].1.x: InvalidTypeError(expected=%r, actual=%r)>" % (
int,
str,
)
assert repr(te) == str(te)
assert list(te) == [te]
assert len(te) == 1
assert te[0] == te
assert te == exc.InvalidTypeError(te.context, expected=int, actual=str)
assert te != exc.ConditionError(te.context, expected=int, actual=str)
assert te != exc.InvalidTypeError(te.context, expected=int, actual=float)
with pytest.raises(IndexError):
te[1]
te.sort()
assert list(te) == [te]
te.sort(reverse=True)
assert list(te) == [te]
assert pickle.loads(pickle.dumps(te)) == te
def test_mapping_key_error():
mke = exc.MissingKeyError("x")
fke = exc.ForbiddenKeyError("y")
assert mke.context == deque(["x"])
assert fke.context == deque(["y"])
assert repr(mke) == "<x: MissingKeyError()>"
assert repr(fke) == "<y: ForbiddenKeyError()>"
assert mke == exc.MissingKeyError(key="x")
assert mke == exc.MissingKeyError(deque(["x"]))
assert pickle.loads(pickle.dumps(mke)) == mke
assert pickle.loads(pickle.dumps(fke)) == fke
def test_schema_error():
mve_1 = exc.MaxValueError(context=deque(["y"]), expected=100, actual=200)
mve_2 = exc.MaxValueError(context=deque(["x"]), expected=100, actual=300)
se = exc.SchemaError(errors=[mve_1, mve_2])
assert se.context == deque([])
assert se == exc.SchemaError([mve_1, mve_2])
assert repr(se) == (
dedent(
"""
<SchemaError(errors=[
<y: MaxValueError(expected=100, actual=200)>,
<x: MaxValueError(expected=100, actual=300)>
])>
"""
).strip()
)
se.add_context("a")
assert se.context == deque([])
assert mve_1.context == deque(["a", "y"])
assert mve_2.context == deque(["a", "x"])
assert repr(se) == (
dedent(
"""
<SchemaError(errors=[
<a.y: MaxValueError(expected=100, actual=200)>,
<a.x: MaxValueError(expected=100, actual=300)>
])>
"""
).strip()
)
assert repr(se) == str(se)
assert list(se) == [mve_1, mve_2]
assert len(se) == 2
assert se[0] == mve_1
assert se[1] == mve_2
with pytest.raises(IndexError):
se[2]
se.sort()
assert list(se) == [mve_2, mve_1]
se.sort(reverse=True)
assert list(se) == [mve_1, mve_2]
assert pickle.loads(pickle.dumps(se)) == se
def test_extra():
assert exc.EXTRA_KEY == exc.Extra("KEY")
assert exc.EXTRA_VALUE == exc.Extra("VALUE")
assert exc.EXTRA_KEY != exc.EXTRA_VALUE
assert repr(exc.EXTRA_KEY) == "@KEY"
assert repr(exc.EXTRA_VALUE) == "@VALUE"
assert str(exc.EXTRA_KEY) == repr(exc.EXTRA_KEY)
assert pickle.loads(pickle.dumps(exc.EXTRA_KEY)) == exc.EXTRA_KEY
assert pickle.loads(pickle.dumps(exc.EXTRA_VALUE)) == exc.EXTRA_VALUE
def test_step():
step_1 = exc.Step(1)
step_2 = exc.Step(2)
assert step_1 != step_2
assert step_1 == exc.Step(1)
assert repr(step_1) == "#1"
assert repr(step_2) == "#2"
assert str(step_1) == repr(step_1)
assert pickle.loads(pickle.dumps(step_1)) == step_1
assert pickle.loads(pickle.dumps(step_2)) == step_2
def test_format_error():
assert exc.format_error(exc.InvalidTypeError(expected=int, actual=type(None))) == [
("", "Value should not be null.")
]
assert exc.format_error(exc.InvalidTypeError(expected=int, actual=str)) == [
("", "Expected type “int”, got “str”.")
]
assert exc.format_error(exc.OptionsError(expected=[1], actual=2)) == [
("", "Expected 1, got 2.")
]
assert exc.format_error(exc.OptionsError(expected=[1, 2, 3], actual=4)) == [
("", "Expected one of [1, 2, 3], got 4.")
]
assert exc.format_error(exc.MinValueError(expected=10, actual=5)) == [
("", "Expected value ≥ 10, got 5.")
]
assert exc.format_error(exc.MaxValueError(expected=10, actual=15)) == [
("", "Expected value ≤ 10, got 15.")
]
assert exc.format_error(
exc.FloatValueError(expected="finite", actual=float("-inf"))
) == [("", "Expected finite number, got -∞.")]
assert exc.format_error(
exc.FloatValueError(expected="finite", actual=float("+inf"))
) == [("", "Expected finite number, got +∞.")]
assert exc.format_error(
exc.FloatValueError(expected="number", actual=float("nan"))
) == [("", "Expected number, got NaN.")]
assert exc.format_error(exc.StrDecodeError(expected="utf-8", actual=b"\xFF")) == [
("", "Cannot decode value using “utf-8” encoding.")
]
assert exc.format_error(exc.MinLengthError(expected=10, actual=5)) == [
("", "Expected value length ≥ 10, got 5.")
]
assert exc.format_error(exc.MaxLengthError(expected=10, actual=15)) == [
("", "Expected value length ≤ 10, got 15.")
]
assert exc.format_error(exc.TupleLengthError(expected=1, actual=2)) == [
("", "Expected exactly 1 element, got 2.")
]
assert exc.format_error(exc.TupleLengthError(expected=3, actual=2)) == [
("", "Expected exactly 3 elements, got 2.")
]
assert exc.format_error(
exc.PatternMatchError(expected="^[0-9]+$", actual="xyz")
) == [("", "Cannot match “xyz” using “^[0-9]+$”.")]
assert exc.format_error(
exc.DatetimeParseError(expected="%Y-%m-%d", actual="08/18/2018")
) == [
("", "Cannot parse date/time value from “08/18/2018” using “%Y-%m-%d” format.")
]
assert exc.format_error(
exc.DatetimeParseError(expected=isoparse, actual="08/18/2018")
) == [("", "Cannot parse date/time value from “08/18/2018”.")]
assert exc.format_error(
exc.DatetimeTypeError(expected="naive", actual=datetime.now(UTC))
) == [("", "Naive date/time object is expected.")]
assert exc.format_error(
exc.DatetimeTypeError(expected="tzaware", actual=datetime.now())
) == [("", "Timezone-aware date/time object is expected.")]
assert exc.format_error(exc.RecursionMaxDepthError(expected=2, actual=3)) == [
("", "Too many nested structures, limit is 2.")
]
assert exc.format_error(exc.ForbiddenKeyError("x")) == [
("x", "Key is not allowed.")
]
assert exc.format_error(exc.MissingKeyError("x")) == [
("x", "Required key is not provided.")
]
# Test fallback
assert exc.format_error(exc.ConditionError(expected=1, actual=2)) == [
("", "ConditionError(expected=1, actual=2)")
]
assert exc.format_error(exc.FloatValueError(expected="something", actual=0.0)) == [
("", "FloatValueError(expected='something', actual=0.0)")
]
assert exc.format_error(
exc.DatetimeTypeError(expected="something", actual=datetime(2018, 12, 5))
) == [
(
"",
"DatetimeTypeError(expected='something', actual=datetime.datetime(2018, 12, 5, 0, 0))",
)
]
| 2.5625 | 3 |
jaseci_core/jaseci/actions/vector.py | Gim3l/jaseci | 0 | 12794420 | <filename>jaseci_core/jaseci/actions/vector.py
"""Built in actions for Jaseci"""
from .module.vector_actions import * # noqa
| 1.15625 | 1 |
dataModules/comSystem.py | wordyallen/drfms | 1 | 12794421 | <filename>dataModules/comSystem.py
import numpy as np
from nvd3Format import nvd3Format
def comSystem(tx_msg, control_index):
tx_bs = []
for c in tx_msg:
byte = np.fromiter(format(ord(c),'b'), dtype=int)
if len(byte) == 6:
pad = np.concatenate(([0,0], byte))
else:
pad = np.concatenate(([0], byte))
tx_bs = np.concatenate((tx_bs, pad ))
SPB = 20
tx_wave = []
for n in range(len(tx_bs)):
bit = tx_bs[n]
if bit == 1:
tx_wave = np.concatenate(( tx_wave, np.ones(SPB)))
else:
tx_wave = np.concatenate(( tx_wave, np.zeros(SPB)))
runs = []
run_len = 0
val = 0
for i in tx_wave:
if i == val:
run_len = run_len + 1
else:
val = i
runs = np.concatenate((runs, [run_len]))
run_len = 1
runs = np.concatenate((runs, [run_len]))
# IMPLEMENT STEP RESPONE HERE
control = np.linspace(0, 0.5, 11)
k= 1
a = .9
#intitial Conditions
n = np.arange(runs[0]+runs[1]) -runs[0]
rx_wave = k*(1-a**(n+1))
rx_wave[rx_wave < 0] = 0
for i in range(len(runs)-2):
if i%2 == 0:
n = np.arange(runs[i+2])
s = - k*(1-a**(n+1)) + k
else:
n = np.arange(runs[i+2])
s = k*(1-a**(n+1))
rx_wave = np.concatenate((rx_wave, s))
noise = np.random.uniform(-control[control_index], control[control_index], len(rx_wave))
rx_wave = rx_wave + noise
threshold = (max(rx_wave)+ min(rx_wave))/2
sample_ind = np.arange(len(rx_wave))[0:len(rx_wave):SPB] + SPB/2
rx_bs = rx_wave[sample_ind]>threshold
rx_bs = rx_bs.astype(float)
num_chars = len(tx_msg)
rx_msg = ''
for i in range(num_chars):
byte = rx_bs[ i*8 : (i*8 + 8)]
data = 0
for j in range(8):
data = data + byte[j]*2**(8-(j+1))
data = data.astype(int)
rx_msg = rx_msg + unichr(data)
return rx_msg, nvd3Format(tx_wave), nvd3Format(rx_wave)
#
#
# # a = comSystem('Hello and Goodbye', 10)
# # print a[0]
#
# # import matplotlib.pyplot as plt
# # plt.plot(a[1])
# # plt.plot(a[2])
#
# # plt.show()
| 2.28125 | 2 |
tools/wix/_get_wix.py | psryland/rylogic_code | 2 | 12794422 | <gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Notes:
# - This script creates the UserVars.py file.
# - It cannot make use of scripts in 'repo/script' because the UserVars.py file may not exist yet in a clean build.
import os, sys, urllib.request, zipfile
wix_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
wix_url = "https://github.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip"
# Download the WiX tools if not already there
def GetWiX():
# Assume already installed
if os.path.exists(os.path.join(wix_dir, "candle.exe")):
return
# Download WiX zip
wix_zip = os.path.join(wix_dir, "wix.zip")
urllib.request.urlretrieve(wix_url, wix_zip)
# Extract to 'wix_dir'
with zipfile.ZipFile(wix_zip, 'r') as zf:
zf.extractall(wix_dir)
return
# Entrt point
if __name__ == "__main__":
try:
GetWiX()
except Exception as ex:
print(f"ERROR: {str(ex)}")
sys.exit(-1)
| 2.390625 | 2 |
anuvaad-etl/anuvaad-extractor/sentence/etl-tokeniser/errors/errors_exception.py | ManavTriesStuff/anuvaad | 15 | 12794423 | class FormatError(Exception):
def __init__(self, code, message):
self._code = code
self._message = message
@property
def code(self):
return self._code
@property
def message(self):
return self._message
def __str__(self):
return self.__class__.__name__ + ': ' + self.message
class WorkflowkeyError(Exception):
def __init__(self, code, message):
self._code = code
self._message = message
@property
def code(self):
return self._code
@property
def message(self):
return self._message
def __str__(self):
return self.__class__.__name__ + ': ' + self.message
class FileErrors(Exception):
def __init__(self, code, message):
self._code = code
self._message = message
@property
def code(self):
return self._code
@property
def message(self):
return self._message
def __repr__(self):
return { "code" : self.code, "message" : self.__class__.__name__ + ': ' + self.message }
class FileEncodingError(Exception):
def __init__(self, code, message):
self._code = code
self._message = message
@property
def code(self):
return self._code
@property
def message(self):
return self._message
def __str__(self):
return self.__class__.__name__ + ': ' + self.message
class ServiceError(Exception):
def __init__(self, code, message):
self._code = code
self._message = message
@property
def code(self):
return self._code
@property
def message(self):
return self._message
def __str__(self):
return self.__class__.__name__ + ': ' + self.message
class KafkaConsumerError(Exception):
def __init__(self, code, message):
self._code = code
self._message = message
@property
def code(self):
return self._code
@property
def message(self):
return self._message
def __str__(self):
return self.__class__.__name__ + ': ' + self.message
class KafkaProducerError(Exception):
def __init__(self, code, message):
self._code = code
self._message = message
@property
def code(self):
return self._code
@property
def message(self):
return self._message
def __repr__(self):
return { "code" : self.code, "message" : self.__class__.__name__ + ': ' + self.message } | 2.875 | 3 |
alura/elasticsearch_001/example01.py | flaviogf/Cursos | 2 | 12794424 | <filename>alura/elasticsearch_001/example01.py
from requests import put, delete
response = delete('http://localhost:9200/customer')
print(response.json())
response = put('http://localhost:9200/customer?pretty')
print(response.json())
| 2.25 | 2 |
searx/engines/pubmed.py | xu1991/open | 4 | 12794425 | #!/usr/bin/env python
"""
PubMed (Scholar publications)
@website https://www.ncbi.nlm.nih.gov/pubmed/
@provide-api yes (https://www.ncbi.nlm.nih.gov/home/develop/api/)
@using-api yes
@results XML
@stable yes
@parse url, title, publishedDate, content
More info on api: https://www.ncbi.nlm.nih.gov/books/NBK25501/
"""
from flask_babel import gettext
from lxml import etree
from datetime import datetime
from searx.url_utils import urlencode
from searx.poolrequests import get
categories = ['science']
base_url = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi'\
+ '?db=pubmed&{query}&retstart={offset}&retmax={hits}'
# engine dependent config
number_of_results = 10
pubmed_url = 'https://www.ncbi.nlm.nih.gov/pubmed/'
def request(query, params):
# basic search
offset = (params['pageno'] - 1) * number_of_results
string_args = dict(query=urlencode({'term': query}),
offset=offset,
hits=number_of_results)
params['url'] = base_url.format(**string_args)
return params
def response(resp):
results = []
# First retrieve notice of each result
pubmed_retrieve_api_url = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?'\
+ 'db=pubmed&retmode=xml&id={pmids_string}'
pmids_results = etree.XML(resp.content)
pmids = pmids_results.xpath('//eSearchResult/IdList/Id')
pmids_string = ''
for item in pmids:
pmids_string += item.text + ','
retrieve_notice_args = dict(pmids_string=pmids_string)
retrieve_url_encoded = pubmed_retrieve_api_url.format(**retrieve_notice_args)
search_results_xml = get(retrieve_url_encoded).content
search_results = etree.XML(search_results_xml).xpath('//PubmedArticleSet/PubmedArticle/MedlineCitation')
for entry in search_results:
title = entry.xpath('.//Article/ArticleTitle')[0].text
pmid = entry.xpath('.//PMID')[0].text
url = pubmed_url + pmid
try:
content = entry.xpath('.//Abstract/AbstractText')[0].text
except:
content = gettext('No abstract is available for this publication.')
# If a doi is available, add it to the snipppet
try:
doi = entry.xpath('.//ELocationID[@EIdType="doi"]')[0].text
content = 'DOI: {doi} Abstract: {content}'.format(doi=doi, content=content)
except:
pass
if len(content) > 300:
content = content[0:300] + "..."
# TODO: center snippet on query term
res_dict = {'url': url,
'title': title,
'content': content}
try:
publishedDate = datetime.strptime(entry.xpath('.//DateCreated/Year')[0].text
+ '-' + entry.xpath('.//DateCreated/Month')[0].text
+ '-' + entry.xpath('.//DateCreated/Day')[0].text, '%Y-%m-%d')
res_dict['publishedDate'] = publishedDate
except:
pass
results.append(res_dict)
return results
| 2.34375 | 2 |
nad_logging_service/tests/logger/exception_test.py | KaiPrince/NAD-Logging-Service | 0 | 12794426 | """
* Project Name: NAD-Logging-Service
* File Name: exception_test.py
* Programmer: <NAME>
* Date: Sun, Nov 15, 2020
* Description: This file contains exception tests for the Logger app.
"""
import pytest
from .sample_data import exception_logs as sample_logs
@pytest.mark.parametrize("data", sample_logs)
def test_all_bad_tests_fail(client, data):
""" All these tests should fail """
# Arrange
# Act
response = client.post(
"/logger/log",
content_type="application/json",
json=data,
headers={"x-access-token": data["authToken"]},
)
# Assert
assert response.status_code != 200
| 2.25 | 2 |
tests/bugs/core_2230_test.py | FirebirdSQL/firebird-qa | 1 | 12794427 | <gh_stars>1-10
#coding:utf-8
#
# id: bugs.core_2230
# title: Implement domain check of input parameters of execute block
# decription:
# tracker_id: CORE-2230
# min_versions: []
# versions: 3.0
# qmid: None
import pytest
from firebird.qa import db_factory, python_act, Action
from firebird.driver import DatabaseError
# version: 3.0
# resources: None
substitutions_1 = []
init_script_1 = """CREATE DOMAIN DOM1 AS INTEGER NOT NULL CHECK (value in (0, 1));
"""
db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1)
# test_script_1
#---
# c = db_conn.cursor()
# cmd = c.prep('execute block (x DOM1 = ?) returns (y integer) as begin y = x; suspend; end')
#
# c.execute(cmd,[1])
# printData(c)
#
# try:
# c.execute(cmd,[10])
# printData(c)
# except kdb.DatabaseError,e:
# print (e[0])
# else:
# print ('Test Failed')
#---
act_1 = python_act('db_1', substitutions=substitutions_1)
expected_stdout_1 = """Y
-----------
1
"""
@pytest.mark.version('>=3.0')
def test_1(act_1: Action, capsys):
with act_1.db.connect() as con:
c = con.cursor()
cmd = c.prepare('execute block (x DOM1 = ?) returns (y integer) as begin y = x; suspend; end')
c.execute(cmd, [1])
act_1.print_data(c)
act_1.expected_stdout = expected_stdout_1
act_1.stdout = capsys.readouterr().out
assert act_1.clean_stdout == act_1.clean_expected_stdout
with pytest.raises(Exception, match='.*validation error for variable X, value "10"'):
c.execute(cmd, [10])
act_1.print_data(c)
| 2.015625 | 2 |
venv/Lib/site-packages/xlsxwriter/__init__.py | IFFP-Team-IT/ScrapingApec | 1 | 12794428 | <gh_stars>1-10
__version__ = '1.4.3'
__VERSION__ = __version__
from .workbook import Workbook
| 0.964844 | 1 |
pythonapm/contrib/django/wrapper.py | nextapm/pythonapm | 0 | 12794429 |
from importlib import import_module
from pythonapm.instrumentation import instrument_method
from pythonapm.logger import agentlogger
from pythonapm import constants
methods = [
'process_request',
'process_view',
'process_exception',
'process_template_response',
'process_response'
]
def instrument_middlewares():
try:
from django.conf import settings
middleware = getattr(settings, "MIDDLEWARE", None) or \
getattr(settings, "MIDDLEWARE_CLASSES", None)
if middleware is None:
return
for each in middleware:
module_path, class_name = each.rsplit('.', 1)
act_module = import_module(module_path)
for each_method in methods:
method_info = {
constants.class_str : class_name,
constants.method_str : each_method,
}
instrument_method(module_path, act_module, method_info)
except Exception as exc:
agentlogger('django middleware instrumentation error', exc)
| 2.171875 | 2 |
cloudnetpy/metadata.py | griesche/cloudnetpy-1 | 1 | 12794430 | """Initial Metadata of Cloudnet variables for NetCDF file writing."""
from typing import NamedTuple, Optional
class MetaData(NamedTuple):
long_name: Optional[str] = None
standard_name: Optional[str] = None
units: Optional[str] = None
comment: Optional[str] = None
definition: Optional[str] = None
references: Optional[str] = None
ancillary_variables: Optional[str] = None
positive: Optional[str] = None
axis: Optional[str] = None
calendar: Optional[str] = None
source: Optional[str] = None
COMMON_ATTRIBUTES = {
"time": MetaData(long_name="Time UTC", axis="T", standard_name="time", calendar="standard"),
"height": MetaData(
long_name="Height above mean sea level",
standard_name="height_above_mean_sea_level",
units="m",
),
"range": MetaData(
long_name="Range from instrument",
axis="Z",
units="m",
comment="Distance from instrument to centre of each range bin.",
),
"latitude": MetaData(
long_name="Latitude of site", units="degree_north", standard_name="latitude"
),
"longitude": MetaData(
long_name="Longitude of site", units="degree_east", standard_name="longitude"
),
"altitude": MetaData(long_name="Altitude of site", standard_name="altitude", units="m"),
"Zh": MetaData(
long_name="Radar reflectivity factor",
units="dBZ",
comment="Calibrated reflectivity. Calibration convention: in the absence of attenuation,\n"
"a cloud at 273 K containing one million 100-micron droplets per cubic metre will\n"
"have a reflectivity of 0 dBZ at all frequencies.",
),
"width": MetaData(
long_name="Spectral width",
units="m s-1",
comment=(
"This parameter is the standard deviation of the reflectivity-weighted\n"
"velocities in the radar pulse volume."
),
),
"v": MetaData(
long_name="Doppler velocity",
units="m s-1",
comment=(
"This parameter is the radial component of the velocity, with positive\n"
"velocities are away from the radar."
),
),
"ldr": MetaData(
long_name="Linear depolarisation ratio",
units="dB",
comment="This parameter is the ratio of cross-polar to co-polar reflectivity.",
),
"lwp": MetaData(
long_name="Liquid water path",
units="g m-2",
standard_name="atmosphere_cloud_liquid_water_content",
),
"kurtosis": MetaData(
long_name="Kurtosis of spectra",
units="1",
),
"nyquist_velocity": MetaData(long_name="Nyquist velocity", units="m s-1"),
"radar_frequency": MetaData(long_name="Radar transmit frequency", units="GHz"),
"beta": MetaData(
long_name="Attenuated backscatter coefficient",
units="sr-1 m-1",
comment="SNR-screened attenuated backscatter coefficient.",
),
"beta_raw": MetaData(
long_name="Attenuated backscatter coefficient",
units="sr-1 m-1",
comment="Non-screened attenuated backscatter coefficient.",
),
"beta_smooth": MetaData(
long_name="Attenuated backscatter coefficient",
units="sr-1 m-1",
comment="SNR-screened attenuated backscatter coefficient.\n"
"Weak background smoothed using Gaussian 2D-kernel.",
),
"wavelength": MetaData(
long_name="Laser wavelength",
units="nm",
),
"zenith_angle": MetaData(
long_name="Zenith angle",
units="degree",
standard_name="zenith_angle",
comment="Angle to the local vertical. A value of zero is directly overhead.",
),
"azimuth_angle": MetaData(
long_name="Azimuth angle",
standard_name="solar_azimuth_angle",
units="degree",
),
"temperature": MetaData(
long_name="Temperature",
units="K",
),
"pressure": MetaData(
long_name="Pressure",
units="Pa",
),
}
| 2.5 | 2 |
nipype/algorithms/rapidart.py | satra/NiPypeold | 0 | 12794431 | <filename>nipype/algorithms/rapidart.py
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
The rapidart module provides routines for artifact detection and region of
interest analysis.
These functions include:
* ArtifactDetect: performs artifact detection on functional images
* StimulusCorrelation: determines correlation between stimuli
schedule and movement/intensity parameters
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data'))
>>> os.chdir(datadir)
"""
import os
from glob import glob
from copy import deepcopy
import numpy as np
from scipy import signal
import scipy.io as sio
from nipype.interfaces.base import (Bunch, InterfaceResult, BaseInterface,
traits, InputMultiPath, OutputMultiPath,
TraitedSpec, File)
from nipype.externals.pynifti import load, funcs
from nipype.utils.filemanip import filename_to_list, list_to_filename
from nipype.utils.misc import find_indices
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#import traceback
class ArtifactDetectInputSpec(TraitedSpec):
realigned_files = InputMultiPath(File(exists=True), desc="Names of realigned functional data files", mandatory=True)
realignment_parameters = InputMultiPath(File(exists=True), mandatory=True,
desc=("Names of realignment parameters"
"corresponding to the functional data files"))
parameter_source = traits.Enum("SPM", "FSL", "Siemens", desc="Are the movement parameters from SPM or FSL or from" \
"Siemens PACE data. Options: SPM, FSL or Siemens", mandatory=True)
use_differences = traits.ListBool([True, True], minlen = 2, maxlen = 2, usedefault=True,
desc="Use differences between successive motion (first element)" \
"and intensity paramter (second element) estimates in order" \
"to determine outliers. (default is [True, True])")
use_norm = traits.Bool(True, desc = "Uses a composite of the motion parameters in order to determine" \
"outliers. Requires ``norm_threshold`` to be set. (default is" \
"True) ", usedefault=True)
norm_threshold = traits.Float(desc="Threshold to use to detect motion-related outliers when" \
"composite motion is being used (see ``use_norm``)", mandatory=True,
xor=['rotation_threshold','translation_threshold'])
rotation_threshold = traits.Float(desc="Threshold (in radians) to use to detect rotation-related outliers",
mandatory=True, xor=['norm_threshold'])
translation_threshold = traits.Float(desc="Threshold (in mm) to use to detect translation-related outliers",
mandatory=True, xor=['norm_threshold'])
zintensity_threshold = traits.Float(desc="Intensity Z-threshold use to detection images that deviate from the" \
"mean", mandatory=True)
mask_type = traits.Enum('spm_global', 'file', 'thresh', desc="Type of mask that should be used to mask the functional data." \
"*spm_global* uses an spm_global like calculation to determine the" \
"brain mask. *file* specifies a brain mask file (should be an image" \
"file consisting of 0s and 1s). *thresh* specifies a threshold to" \
"use. By default all voxels are used, unless one of these mask" \
"types are defined.")
mask_file = File(exists=True, desc="Mask file to be used if mask_type is 'file'.")
mask_threshold = traits.Float(desc="Mask threshold to be used if mask_type is 'thresh'.")
intersect_mask = traits.Bool(True, desc = "Intersect the masks when computed from spm_global. (default is" \
"True)")
class ArtifactDetectOutputSpec(TraitedSpec):
outlier_files = OutputMultiPath(File(exists=True),desc="One file for each functional run containing a list of 0-based" \
"indices corresponding to outlier volumes")
intensity_files = OutputMultiPath(File(exists=True),desc="One file for each functional run containing the global intensity" \
"values determined from the brainmask")
statistic_files = OutputMultiPath(File(exists=True),desc="One file for each functional run containing information about the" \
"different types of artifacts and if design info is provided then" \
"details of stimulus correlated motion and a listing or artifacts by" \
"event type.")
#mask_file = File(exists=True,
# desc='generated or provided mask file')
class ArtifactDetect(BaseInterface):
"""Detects outliers in a functional imaging series
Uses intensity and motion parameters to infer outliers. If `use_norm` is
True, it computes the movement of the center of each face a cuboid centered
around the head and returns the maximal movement across the centers.
Examples
--------
>>> ad = ArtifactDetect()
>>> ad.inputs.realigned_files = 'functional.nii'
>>> ad.inputs.realignment_parameters = 'functional.par'
>>> ad.inputs.parameter_source = 'FSL'
>>> ad.inputs.norm_threshold = 1
>>> ad.inputs.use_differences = [True, False]
>>> ad.inputs.zintensity_threshold = 3
>>> ad.run() # doctest: +SKIP
"""
input_spec = ArtifactDetectInputSpec
output_spec = ArtifactDetectOutputSpec
def _get_output_filenames(self,motionfile,output_dir):
"""Generate output files based on motion filenames
Parameters
----------
motionfile: file/string
Filename for motion parameter file
output_dir: string
output directory in which the files will be generated
"""
if isinstance(motionfile,str):
infile = motionfile
elif isinstance(motionfile,list):
infile = motionfile[0]
else:
raise Exception("Unknown type of file")
(filepath,filename) = os.path.split(infile)
(filename,ext) = os.path.splitext(filename)
artifactfile = os.path.join(output_dir,''.join(('art.',filename,'_outliers.txt')))
intensityfile = os.path.join(output_dir,''.join(('global_intensity.',filename,'.txt')))
statsfile = os.path.join(output_dir,''.join(('stats.',filename,'.txt')))
normfile = os.path.join(output_dir,''.join(('norm.',filename,'.txt')))
return artifactfile,intensityfile,statsfile,normfile
def _list_outputs(self):
outputs = self._outputs().get()
outputs['outlier_files'] = []
outputs['intensity_files'] = []
outputs['statistic_files'] = []
for i,f in enumerate(filename_to_list(self.inputs.realigned_files)):
outlierfile,intensityfile,statsfile, _ = self._get_output_filenames(f,os.getcwd())
outputs['outlier_files'].insert(i,outlierfile)
outputs['intensity_files'].insert(i,intensityfile)
outputs['statistic_files'].insert(i,statsfile)
outputs['outlier_files'] = list_to_filename(outputs['outlier_files'])
outputs['intensity_files'] = list_to_filename(outputs['intensity_files'])
outputs['statistic_files'] = list_to_filename(outputs['statistic_files'])
return outputs
def _get_affine_matrix(self,params):
"""Returns an affine matrix given a set of parameters
params : np.array (upto 12 long)
[translation (3), rotation (3,xyz, radians), scaling (3),
shear/affine (3)]
"""
rotfunc = lambda x : np.array([[np.cos(x), np.sin(x)],[-np.sin(x),np.cos(x)]])
q = np.array([0,0,0,0,0,0,1,1,1,0,0,0])
if len(params)<12:
params=np.hstack((params,q[len(params):]))
params.shape = (len(params),)
# Translation
T = np.eye(4)
T[0:3,-1] = params[0:3] #np.vstack((np.hstack((np.eye(3),params[0:3,])),np.array([0,0,0,1])))
# Rotation
Rx = np.eye(4)
Rx[1:3,1:3] = rotfunc(params[3])
Ry = np.eye(4)
Ry[(0,0,2,2),(0,2,0,2)] = rotfunc(params[4]).ravel()
Rz = np.eye(4)
Rz[0:2,0:2] = rotfunc(params[5])
# Scaling
S = np.eye(4)
S[0:3,0:3] = np.diag(params[6:9])
# Shear
Sh = np.eye(4)
Sh[(0,0,1),(1,2,2)] = params[9:12]
return np.dot(T,np.dot(Rx,np.dot(Ry,np.dot(Rz,np.dot(S,Sh)))))
def _calc_norm(self,mc,use_differences):
"""Calculates the maximum overall displacement of the midpoints
of the faces of a cube due to translation and rotation.
Parameters
----------
mc : motion parameter estimates
[3 translation, 3 rotation (radians)]
use_differences : boolean
Returns
-------
norm : at each time point
"""
respos=np.diag([70,70,75]);resneg=np.diag([-70,-110,-45]);
# respos=np.diag([50,50,50]);resneg=np.diag([-50,-50,-50]);
# XXX - SG why not the above box
cube_pts = np.vstack((np.hstack((respos,resneg)),np.ones((1,6))))
newpos = np.zeros((mc.shape[0],18))
for i in range(mc.shape[0]):
newpos[i,:] = np.dot(self._get_affine_matrix(mc[i,:]),cube_pts)[0:3,:].ravel()
normdata = np.zeros(mc.shape[0])
if use_differences:
newpos = np.concatenate((np.zeros((1,18)),np.diff(newpos,n=1,axis=0)),axis=0)
for i in range(newpos.shape[0]):
normdata[i] = np.max(np.sqrt(np.sum(np.reshape(np.power(np.abs(newpos[i,:]),2),(3,6)),axis=0)))
else:
#if not registered to mean we may want to use this
#mc_sum = np.sum(np.abs(mc),axis=1)
#ref_idx = find_indices(mc_sum == np.min(mc_sum))
#ref_idx = ref_idx[0]
#newpos = np.abs(newpos-np.kron(np.ones((newpos.shape[0],1)),newpos[ref_idx,:]))
newpos = np.abs(signal.detrend(newpos,axis=0,type='constant'))
normdata = np.sqrt(np.mean(np.power(newpos,2),axis=1))
return normdata
def _nanmean(self, a, axis=None):
if axis:
return np.nansum(a, axis)/np.sum(1-np.isnan(a),axis)
else:
return np.nansum(a)/np.sum(1-np.isnan(a))
def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None):
"""
Core routine for detecting outliers
"""
if not cwd:
cwd = os.getcwd()
# read in motion parameters
mc_in = np.loadtxt(motionfile)
mc = deepcopy(mc_in)
if self.inputs.parameter_source == 'SPM':
pass
elif self.inputs.parameter_source == 'FSL':
mc = mc[:,[3,4,5,0,1,2]]
elif self.inputs.parameter_source == 'Siemens':
Exception("Siemens PACE format not implemented yet")
else:
Exception("Unknown source for movement parameters")
if self.inputs.use_norm:
# calculate the norm of the motion parameters
normval = self._calc_norm(mc,self.inputs.use_differences[0])
tidx = find_indices(normval>self.inputs.norm_threshold)
ridx = find_indices(normval<0)
else:
if self.inputs.use_differences[0]:
mc = np.concatenate( (np.zeros((1,6)),np.diff(mc_in,n=1,axis=0)) , axis=0)
traval = mc[:,0:3] # translation parameters (mm)
rotval = mc[:,3:6] # rotation parameters (rad)
tidx = find_indices(np.sum(abs(traval)>self.inputs.translation_threshold,1)>0)
ridx = find_indices(np.sum(abs(rotval)>self.inputs.rotation_threshold,1)>0)
# read in functional image
if isinstance(imgfile,str):
nim = load(imgfile)
elif isinstance(imgfile,list):
if len(imgfile) == 1:
nim = load(imgfile[0])
else:
images = [load(f) for f in imgfile]
nim = funcs.concat_images(images)
# compute global intensity signal
(x,y,z,timepoints) = nim.get_shape()
data = nim.get_data()
g = np.zeros((timepoints,1))
masktype = self.inputs.mask_type
if masktype == 'spm_global': # spm_global like calculation
intersect_mask = self.inputs.intersect_mask
if intersect_mask:
mask = np.ones((x,y,z),dtype=bool)
for t0 in range(timepoints):
vol = data[:,:,:,t0]
mask = mask*(vol>(self._nanmean(vol)/8))
for t0 in range(timepoints):
vol = data[:,:,:,t0]
g[t0] = self._nanmean(vol[mask])
if len(find_indices(mask))<(np.prod((x,y,z))/10):
intersect_mask = False
g = np.zeros((timepoints,1))
if not intersect_mask:
for t0 in range(timepoints):
vol = data[:,:,:,t0]
mask = vol>(self._nanmean(vol)/8)
g[t0] = self._nanmean(vol[mask])
elif masktype == 'file': # uses a mask image to determine intensity
mask = load(self.inputs.mask_file).get_data()
mask = mask>0.5
for t0 in range(timepoints):
vol = data[:,:,:,t0]
g[t0] = self._nanmean(vol[mask])
elif masktype == 'thresh': # uses a fixed signal threshold
for t0 in range(timepoints):
vol = data[:,:,:,t0]
mask = vol>self.inputs.mask_threshold
g[t0] = self._nanmean(vol[mask])
else:
mask = np.ones((x,y,z))
g = self._nanmean(data[mask>0,:],1)
# compute normalized intensity values
gz = signal.detrend(g,axis=0) # detrend the signal
if self.inputs.use_differences[1]:
gz = np.concatenate( (np.zeros((1,1)),np.diff(gz,n=1,axis=0)) , axis=0)
gz = (gz-np.mean(gz))/np.std(gz) # normalize the detrended signal
iidx = find_indices(abs(gz)>self.inputs.zintensity_threshold)
outliers = np.unique(np.union1d(iidx,np.union1d(tidx,ridx)))
artifactfile,intensityfile,statsfile,normfile = self._get_output_filenames(imgfile,cwd)
# write output to outputfile
np.savetxt(artifactfile, outliers, fmt='%d', delimiter=' ')
np.savetxt(intensityfile, g, fmt='%.2f', delimiter=' ')
if self.inputs.use_norm:
np.savetxt(normfile, normval, fmt='%.4f', delimiter=' ')
file = open(statsfile,'w')
file.write("Stats for:\n")
file.write("Motion file: %s\n" % motionfile)
file.write("Functional file: %s\n" % imgfile)
file.write("Motion:\n")
file.write("Number of Motion Outliers: %d\n"%len(np.union1d(tidx,ridx)))
file.write("Motion (original):\n")
file.write( ''.join(('mean: ',str(np.mean(mc_in,axis=0)),'\n')))
file.write( ''.join(('min: ',str(np.min(mc_in,axis=0)),'\n')))
file.write( ''.join(('max: ',str(np.max(mc_in,axis=0)),'\n')))
file.write( ''.join(('std: ',str(np.std(mc_in,axis=0)),'\n')))
if self.inputs.use_norm:
if self.inputs.use_differences[0]:
file.write("Motion (norm-differences):\n")
else:
file.write("Motion (norm):\n")
file.write( ''.join(('mean: ',str(np.mean(normval,axis=0)),'\n')))
file.write( ''.join(('min: ',str(np.min(normval,axis=0)),'\n')))
file.write( ''.join(('max: ',str(np.max(normval,axis=0)),'\n')))
file.write( ''.join(('std: ',str(np.std(normval,axis=0)),'\n')))
elif self.inputs.use_differences[0]:
file.write("Motion (differences):\n")
file.write( ''.join(('mean: ',str(np.mean(mc,axis=0)),'\n')))
file.write( ''.join(('min: ',str(np.min(mc,axis=0)),'\n')))
file.write( ''.join(('max: ',str(np.max(mc,axis=0)),'\n')))
file.write( ''.join(('std: ',str(np.std(mc,axis=0)),'\n')))
if self.inputs.use_differences[1]:
file.write("Normalized intensity:\n")
else:
file.write("Intensity:\n")
file.write("Number of Intensity Outliers: %d\n"%len(iidx))
file.write( ''.join(('min: ',str(np.min(gz,axis=0)),'\n')))
file.write( ''.join(('max: ',str(np.max(gz,axis=0)),'\n')))
file.write( ''.join(('mean: ',str(np.mean(gz,axis=0)),'\n')))
file.write( ''.join(('std: ',str(np.std(gz,axis=0)),'\n')))
file.close()
def _run_interface(self, runtime):
"""Execute this module.
"""
funcfilelist = filename_to_list(self.inputs.realigned_files)
motparamlist = filename_to_list(self.inputs.realignment_parameters)
for i,imgf in enumerate(funcfilelist):
self._detect_outliers_core(imgf ,motparamlist[i], i, os.getcwd())
runtime.returncode = 0
return runtime
class StimCorrInputSpec(TraitedSpec):
realignment_parameters = InputMultiPath(File(exists=True), mandatory=True,
desc='Names of realignment parameters corresponding to the functional data files')
intensity_values = InputMultiPath(File(exists=True), mandatory=True,
desc='Name of file containing intensity values')
spm_mat_file = File(exists=True, mandatory=True,
desc='SPM mat file (use pre-estimate SPM.mat file)')
concatenated_design = traits.Bool(mandatory=True,
desc='state if the design matrix contains concatenated sessions')
class StimCorrOutputSpec(TraitedSpec):
stimcorr_files = OutputMultiPath(File(exists=True),
desc='List of files containing correlation values')
class StimulusCorrelation(BaseInterface):
"""Determines if stimuli are correlated with motion or intensity
parameters.
Currently this class supports an SPM generated design matrix and
requires intensity parameters. This implies that one must run
ArtifactDetect and :class:`nipype.interfaces.spm.Level1Design`
prior to running this or provide an SPM.mat file and intensity
parameters through some other means.
Examples
--------
>>> sc = StimulusCorrelation()
>>> sc.inputs.realignment_parameters = 'functional.par'
>>> sc.inputs.intensity_values = 'functional.rms'
>>> sc.inputs.spm_mat_file = 'SPM.mat'
>>> sc.inputs.concatenated_design = False
>>> sc.run() # doctest: +SKIP
"""
input_spec = StimCorrInputSpec
output_spec = StimCorrOutputSpec
def _get_output_filenames(self, motionfile, output_dir):
"""Generate output files based on motion filenames
Parameters
----------
motionfile: file/string
Filename for motion parameter file
output_dir: string
output directory in which the files will be generated
"""
(filepath,filename) = os.path.split(motionfile)
(filename,ext) = os.path.splitext(filename)
corrfile = os.path.join(output_dir,''.join(('qa.',filename,'_stimcorr.txt')))
return corrfile
def _stimcorr_core(self,motionfile,intensityfile,designmatrix,cwd=None):
"""
Core routine for determining stimulus correlation
"""
if not cwd:
cwd = os.getcwd()
# read in motion parameters
mc_in = np.loadtxt(motionfile)
g_in = np.loadtxt(intensityfile)
g_in.shape = g_in.shape[0],1
dcol = designmatrix.shape[1]
mccol= mc_in.shape[1]
concat_matrix = np.hstack((np.hstack((designmatrix,mc_in)),g_in))
cm = np.corrcoef(concat_matrix,rowvar=0)
corrfile = self._get_output_filenames(motionfile, cwd)
# write output to outputfile
file = open(corrfile,'w')
file.write("Stats for:\n")
file.write("Stimulus correlated motion:\n%s\n" % motionfile)
for i in range(dcol):
file.write("SCM.%d:"%i)
for v in cm[i,dcol+np.arange(mccol)]:
file.write(" %.2f"%v)
file.write('\n')
file.write("Stimulus correlated intensity:\n%s\n" % intensityfile)
for i in range(dcol):
file.write("SCI.%d: %.2f\n"%(i,cm[i,-1]))
file.close()
def _get_spm_submatrix(self,spmmat,sessidx,rows=None):
"""
Parameters
----------
spmmat: scipy matlab object
full SPM.mat file loaded into a scipy object
sessidx: int
index to session that needs to be extracted.
"""
designmatrix = spmmat['SPM'][0][0].xX[0][0].X
U = spmmat['SPM'][0][0].Sess[0][sessidx].U[0]
if rows is None:
rows = spmmat['SPM'][0][0].Sess[0][sessidx].row[0]-1
cols = spmmat['SPM'][0][0].Sess[0][sessidx].col[0][range(len(U))]-1
outmatrix = designmatrix.take(rows.tolist(),axis=0).take(cols.tolist(),axis=1)
return outmatrix
def _run_interface(self, runtime):
"""Execute this module.
"""
motparamlist = self.inputs.realignment_parameters
intensityfiles = self.inputs.intensity_values
spmmat = sio.loadmat(self.inputs.spm_mat_file)
nrows = []
for i,imgf in enumerate(motparamlist):
sessidx = i
rows=None
if self.inputs.concatenated_design:
sessidx = 0
mc_in = np.loadtxt(motparamlist[i])
rows = np.sum(nrows)+np.arange(mc_in.shape[0])
nrows.append(mc_in.shape[0])
matrix = self._get_spm_submatrix(spmmat,sessidx,rows)
self._stimcorr_core(motparamlist[i],intensityfiles[i],
matrix, os.getcwd())
runtime.returncode=0
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
files = []
for i,f in enumerate(self.inputs.realignment_parameters):
files.insert(i, self._get_output_filenames(f, os.getcwd()))
if files:
outputs['stimcorr_files'] = files
return outputs
| 2.296875 | 2 |
sendsecure/client.py | xmedius/sendsecure-python | 0 | 12794432 | <gh_stars>0
import json
from .utils import *
from .helpers import *
from .exceptions import *
from .json_client import *
class Client:
"""
Gets an API Token for a specific user within a SendSecure enterprise account.
@param enterprise_account:
The SendSecure enterprise account
@param username:
The username of a SendSecure user of the current enterprise account
@param password:
The <PASSWORD> <PASSWORD>
@param device_id:
The unique ID of the device used to get the Token
@param device_name:
The name of the device used to get the Token
@param application_type:
The type/name of the application used to get the Token ("SendSecure Python" will be used by default if empty)
@param otp:
The one-time password of this user (if any)
@param endpoint:
The URL to the SendSecure service ("https://portal.xmedius.com" will be used by default if empty)
@return: (API Token, user ID) to be used for the specified user
"""
@staticmethod
def get_user_token(enterprise_account, username, password, device_id, device_name,
application_type='SendSecure Python', endpoint='https://portal.xmedius.com', one_time_password=''):
url = urljoin([endpoint, 'services', enterprise_account, 'portal/host'])
(status_code, status_line, response_body) = http_get(url, 'text/plain')
if status_code >= 400:
raise SendSecureException(status_code, status_line, response_body)
url = urljoin([response_body, 'api/user_token'])
post_params = {
'permalink': enterprise_account,
'username': username,
'password': password,
'application_type': application_type,
'device_id': device_id,
'device_name': device_name,
'otp': one_time_password
}
(status_code, status_line, response_body) = http_post(url, 'application/json', json.dumps(post_params), 'application/json')
if status_code >= 400:
error_code = status_code
error_message = status_line
try:
j = json.loads(response_body)
error_code = j['code']
error_message = j['message']
except:
pass
raise SendSecureException(error_code, error_message, response_body)
try:
return json.loads(response_body)
except:
raise UnexpectedServerResponseException(500, 'Unexpected Error', '')
"""
Client object constructor.
@param api_token:
The API Token to be used for authentication with the SendSecure service
@param user_id:
The user id of the current user
@param enterprise_account:
The SendSecure enterprise account
@param endpoint:
The URL to the SendSecure service ("https://portal.xmedius.com" will be used by default if empty)
@param locale:
The locale in which the server errors will be returned ("en" will be used by default if empty)
"""
def __init__(self, options):
self.json_client = JsonClient(options)
"""
Retrieves all the current enterprise account's settings specific to a SendSecure Account
@return: All values/properties of the enterprise account's settings specific to SendSecure.
"""
def get_enterprise_settings(self):
result = self.json_client.get_enterprise_settings()
return EnterpriseSettings(result)
"""
Retrieves all available security profiles of the enterprise account for a specific user.
@param user_email:
The email address of a SendSecure user of the current enterprise account
@return: The list of all security profiles of the enterprise account, with all their setting values/properties.
"""
def get_security_profiles(self, user_email):
result = self.json_client.get_security_profiles(user_email)
j = json.loads(result)
return [SecurityProfile(elem) for elem in j['security_profiles']]
"""
Retrieves the default security profile of the enterprise account for a specific user.
A default security profile must have been set in the enterprise account, otherwise
the method will return nothing.
@param user_email:
The email address of a SendSecure user of the current enterprise account
@return: Default security profile of the enterprise, with all its setting values/properties.
"""
def get_default_security_profile(self, user_email):
enterprise_settings = self.get_enterprise_settings()
security_profiles = self.get_security_profiles(user_email)
for p in security_profiles:
if p.id == enterprise_settings.default_security_profile_id:
return p
return None
"""
Pre-creates a SafeBox on the SendSecure system and initializes the Safebox object accordingly.
@param safebox:
A SafeBox object to be finalized by the SendSecure system
@return: The updated SafeBox object with the necessary system parameters (GUID, public encryption key, upload URL)
filled out. Raise SendSecureException if the safebox is already initialize.
"""
def initialize_safebox(self, safebox):
result = self.json_client.new_safebox(safebox.user_email)
return safebox.update_attributes(result)
"""
Uploads the specified file as an Attachment of the specified SafeBox.
@param safebox:
An initialized Safebox object
@param attachment:
An Attachment object - the file to upload to the SendSecure system
@return: The updated Attachment object with the GUID parameter filled out.
"""
def upload_attachment(self, safebox, attachment):
result = self.json_client.upload_file(safebox.upload_url,
attachment.source, attachment.content_type, attachment.filename, attachment.size)
j = json.loads(result)
attachment.guid = j['temporary_document']['document_guid']
return attachment
"""
This actually "Sends" the SafeBox with all content and contact info previously specified.
@param safebox:
A Safebox object already finalized, with security profile, recipient(s),
subject and message already defined, and attachments already uploaded.
@return: Updated Safebox
"""
def commit_safebox(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
if not safebox.participants:
raise SendSecureException(0, 'Participants cannot be empty', '')
if safebox.security_profile_id is None:
raise SendSecureException(0, 'No Security Profile configured', '')
json_result = self.json_client.commit_safebox(safebox.to_json())
result = json.loads(json_result)
result['is_creation'] = True
return safebox.update_attributes(result)
"""
This method is a high-level combo that initializes the SafeBox, uploads all attachments and commits the SafeBox.
@param safebox:
A non-initialized Safebox object with security profile, recipient(s), subject,
message and attachments (not yet uploaded) already defined.
@return: Updated Safebox
"""
def submit_safebox(self, safebox):
self.initialize_safebox(safebox)
for attachment in safebox.attachments:
self.upload_attachment(safebox, attachment)
if safebox.security_profile_id is None:
safebox.security_profile_id = self.get_default_security_profile(safebox.user_email).id
return self.commit_safebox(safebox)
"""
Retrieves all the current user account's settings specific to SendSecure Account
@return: All values/properties of the user account's settings specific to SendSecure.
"""
def get_user_settings(self):
result = self.json_client.get_user_settings()
return UserSettings(result)
"""
Retrieves all favorites associated to a specific user.
@return: The list of all favorites of the user account, with all their properties.
"""
def get_favorites(self):
json_result = self.json_client.get_favorites()
result = json.loads(json_result)
return [Favorite(params=favorite_params) for favorite_params in result['favorites']]
"""
Create a new favorite associated to a specific user.
@param favorite:
A Favorite object
@return: The updated Favorite
"""
def create_favorite(self, favorite):
if favorite.email is None:
raise SendSecureException(0, 'Favorite email cannot be null', '')
result = self.json_client.create_favorite(favorite.to_json())
return favorite.update_attributes(result)
"""
Update an existing favorite associated to a specific user.
@param favorite:
A Favorite object
@return: The updated Favorite
"""
def update_favorite(self, favorite):
if favorite.id is None:
raise SendSecureException(0, 'Favorite id cannot be null', '')
result = self.json_client.update_favorite(favorite.id, favorite.to_json())
return favorite.update_attributes(result)
"""
Delete contact methods of an existing favorite associated to a specific user.
@param favorite:
A Favorite object
@param contact_method_ids:
An array of contact methods ids
@return: The updated Favorite
"""
def delete_favorite_contact_methods(self, favorite, contact_method_ids):
if favorite.id is None:
raise SendSecureException(0, 'Favorite id cannot be null', '')
favorite.prepare_to_destroy_contact(contact_method_ids)
result = self.json_client.update_favorite(favorite.id, favorite.to_json())
return favorite.update_attributes(result)
"""
Delete an existing favorite associated to a specific user.
@param favorite:
The favorite to be deleted
@return: Nothing
"""
def delete_favorite(self, favorite):
self.json_client.delete_favorite(favorite.id)
"""
Create a new participant for a specific safebox associated to the current user's account,
and add the new participant to the Safebox object.
@param safebox:
A Safebox object
@param participant:
A Participant object
@return: The updated Participant
"""
def create_participant(self, safebox, participant):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
if participant.email is None:
raise SendSecureException(0, 'Participant email cannot be null', '')
result = self.json_client.create_participant(safebox.guid, participant.to_json())
participant.update_attributes(result)
safebox.participants.append(participant)
return participant
"""
Update an existing participant of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@param participant:
A Participant object
@return: The updated Participant
"""
def update_participant(self, safebox, participant):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
if participant.id is None:
raise SendSecureException(0, 'Participant id cannot be null', '')
result = self.json_client.update_participant(safebox.guid, participant.id, participant.to_json())
return participant.update_attributes(result)
"""
Delete contact methods of an existing participant of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@param participant:
A Participant object
@param contact_method_ids:
An array of contact method id
@return: The updated Participant
"""
def delete_participant_contact_methods(self, safebox, participant, contact_method_ids):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
if participant.id is None:
raise SendSecureException(0, 'Participant id cannot be null', '')
participant.prepare_to_destroy_contact(contact_method_ids)
result = self.json_client.update_participant(safebox.guid, participant.id, participant.to_json())
return participant.update_attributes(result)
"""
Search the recipients for a SafeBox
@param term:
A Search term
@return: The list of recipients that matches the search term
"""
def search_recipient(self, term):
return json.loads(self.json_client.search_recipient(term))
"""
Reply to a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@param reply:
A reply object
@return: An object containing the request result
"""
def reply(self, safebox, reply):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
for attachment in reply.attachments:
file_params = safebox._temporary_document(os.path.getsize(attachment.source))
temporary_file = json.loads(self.json_client.new_file(safebox.guid, json.dumps(file_params)))
uploaded_file = json.loads(self.json_client.upload_file(temporary_file['upload_url'], attachment.source,
attachment.content_type, attachment.filename, attachment.size))
reply.document_ids.append(uploaded_file['temporary_document']['document_guid'])
result = self.json_client.reply(safebox.guid, reply.to_json())
return json.loads(result)
"""
Add time to the expiration date of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@param value:
Time value to be added to the expiration date
@param unit:
Time unit to be added to the expiration date
@return: An object containing the request result
"""
def add_time(self, safebox, value, unit):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
add_time_params = { 'safebox': { 'add_time_value': value, 'add_time_unit': unit }}
result = self.json_client.add_time(safebox.guid, json.dumps(add_time_params))
return json.loads(result)
"""
Close a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def close_safebox(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
result = self.json_client.close_safebox(safebox.guid)
return json.loads(result)
"""
Delete content of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def delete_safebox_content(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
result = self.json_client.delete_safebox_content(safebox.guid)
return json.loads(result)
"""
Mark all messages as read of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def mark_as_read(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
result = self.json_client.mark_as_read(safebox.guid)
return json.loads(result)
"""
Mark all messages as unread of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def mark_as_unread(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
result = self.json_client.mark_as_unread(safebox.guid)
return json.loads(result)
"""
Mark a message as read of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@param message:
A Message object
@return: An object containing the request result
"""
def mark_as_read_message(self, safebox, message):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.mark_as_read_message(safebox.guid, message.id)
return json.loads(json_result)
"""
Mark a message as unread of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@param message:
A Message object
@return: An object containing the request result
"""
def mark_as_unread_message(self, safebox, message):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', ' ')
json_result = self.json_client.mark_as_unread_message(safebox.guid, message.id)
return json.loads(json_result)
"""
Retrieve a specific file url of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@param document:
An Attachment object
@return: The file url
"""
def get_file_url(self, safebox, document):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
if document.guid is None:
raise SendSecureException(0, 'Document GUID cannot be null', '')
json_result = self.json_client.get_file_url(safebox.guid, document.guid, safebox.user_email)
result = json.loads(json_result)
return result['url']
"""
Retrieve the audit record url of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@return: The audit record url
"""
def get_audit_record_url(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.get_audit_record_url(safebox.guid)
result = json.loads(json_result)
return result['url']
"""
Retrieve the audit record pdf of a specific safebox associated to the current user's account.
@param safebox:
A Safebox object
@return: The audit record pdf stream
"""
def get_audit_record_pdf(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
url = self.get_audit_record_url(safebox)
return self.json_client.get_audit_record_pdf(url)
"""
Retrieve a filtered list of safeboxes for the current user account.
@param url:
The search url (optional)
@param params:
optional filtering parameters { status: <in_progress, closed, content_deleted or unread>,
search_term: <search_term>, per_page: < ]0, 1000] default = 100>, page: <page to return> }
@return: An object containing the count of found safeboxes, previous page url, the next page url and a list of Safebox objects
"""
def get_safeboxes(self, url=None, search_params={}):
json_result = self.json_client.get_safeboxes(url, search_params)
result = json.loads(json_result)
result['safeboxes'] = [Safebox(params=safebox_params['safebox']) for safebox_params in result['safeboxes']]
return result
"""
Retrieve a specific Safebox by its guid.
@param safebox_guid:
Safebox GUID
@return: A Safebox object
"""
def get_safebox(self, safebox_guid):
safeboxes = self.get_safeboxes()['safeboxes']
for safebox in safeboxes:
if safebox.guid == safebox_guid:
return safebox
"""
Retrieve all info of an existing safebox for the current user account.
@param safebox:
A Safebox object
@param sections:
A string containing the list of sections to be retrieved
@return: The updated Safebox
"""
def get_safebox_info(self, safebox, sections=[]):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.get_safebox_info(safebox.guid, ','.join(sections))
result = json.loads(json_result)
return safebox.update_attributes(result['safebox'])
"""
Retrieve all participants info of an existing safebox for the current user account.
@param safebox:
A Safebox object
@return: The list of all participants of the safebox, with all their properties
"""
def get_safebox_participants(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.get_safebox_participants(safebox.guid)
result = json.loads(json_result)
return [Participant(params=participant_params) for participant_params in result['participants']]
"""
Retrieve all messages info of an existing safebox for the current user account.
@param safebox:
A Safebox object
@return: The list of all messages of the safebox, with all their properties
"""
def get_safebox_messages(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.get_safebox_messages(safebox.guid)
result = json.loads(json_result)
return [Message(message_params) for message_params in result['messages']]
"""
Retrieve all the security options of an existing safebox for the current user account.
@param safebox:
A Safebox object
@return: All values/properties of the security options
"""
def get_safebox_security_options(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.get_safebox_security_options(safebox.guid)
result = json.loads(json_result)
return SecurityOptions(params=result['security_options'])
"""
Retrieve all the download activity info of an existing safebox for the current user account.
@param safebox:
A Safebox object
@return: All values/properties of the download activity
"""
def get_safebox_download_activity(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.get_safebox_download_activity(safebox.guid)
result = json.loads(json_result)
return DownloadActivity(result['download_activity'])
"""
Retrieve all the event history info of an existing safebox for the current user account.
@param safebox:
A Safebox object
@return: The list of all event history of the safebox, with all their properties
"""
def get_safebox_event_history(self, safebox):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
json_result = self.json_client.get_safebox_event_history(safebox.guid)
result = json.loads(json_result)
return [EventHistory(event_history_params) for event_history_params in result['event_history']]
"""
Archive a specific safebox.
@param safebox:
The safebox to be archived
@param user_email:
The current user email
@return: An object containing the request result
"""
def archive_safebox(self, safebox, user_email):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
user_email_json = json.dumps({ 'user_email': user_email })
json_result = self.json_client.archive_safebox(safebox.guid, user_email_json)
return json.loads(json_result)
"""
Remove the tag "archive" from a specific safebox.
@param safebox:
The safebox to be archived
@param user_email:
The current user email
@return: An object containing the request result
"""
def unarchive_safebox(self, safebox, user_email):
if safebox.guid is None:
raise SendSecureException(0, 'SafeBox GUID cannot be null', '')
user_email_json = json.dumps({ 'user_email': user_email })
json_result = self.json_client.unarchive_safebox(safebox.guid, user_email_json)
return json.loads(json_result)
"""
Call to get the list of all the localized messages of a consent group.
@param consent_group_id:
The id of the consent group
@return: The list of all the localized messages
"""
def get_consent_group_messages(self, consent_group_id):
json_result = self.json_client.get_consent_group_messages(consent_group_id)
result = json.loads(json_result)
return ConsentMessageGroup(result["consent_message_group"])
"""
Call to unfollow the SafeBox. By default, all new Safeboxes are "followed"
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def unfollow(self, safebox):
json_result = self.json_client.unfollow(safebox.guid)
result = json.loads(json_result)
return result
"""
Call to follow the SafeBox (opposite of the unfollow call).
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def follow(self, safebox):
json_result = self.json_client.follow(safebox.guid)
result = json.loads(json_result)
return result
| 2.828125 | 3 |
website/app/api/views.py | yqh231/cloudmusic_api | 4 | 12794433 | import re
from flask import request
from website.app.api import api
from spider.database import *
from website.app.util import JsonSuccess, JsonError, ParamCheck, Param, error
@api.route('/popular_songs_list', endpoint='get_popular_song_list')
@error
@ParamCheck({'type': Param(int),
'offset': Param(int, optional=True),
'limit': Param(int, optional=True)})
def get_popular_song_list(params):
type_ = params['type']
offset = params.get('offset')
limit = params.get('limit')
if not offset:
offset = 0
if not limit:
limit = 20
filters = {
'type': int(type_)
}
result = search_song_list_by_filter(filters, int(offset), int(limit))
data = [{'song_id': item['_id'], 'name': item['song_name'],
'comment_id': item['comment_id'], 'source_url': item['source_url']} for item in result]
return JsonSuccess(data)
@api.route('/popular_song_comments', endpoint='get_popular_song_comments')
@error
@ParamCheck({'comment_id': Param(int)})
def get_popular_song_comments(params):
comment_id = params['comment_id']
filter = {
'_id': comment_id
}
result = search_by_comment_id(filter)
return JsonSuccess(result[0])
@api.route('/songs_list', endpoint='get_chinese_songs_list')
@error
@ParamCheck({'name': Param(str),
'type': Param(int),
'offset': Param(int, optional=True),
'limit': Param(int, optional=True)})
def get_chinese_songs_list(params):
list_name = params['name']
offset = params.get('offset')
limit = params.get('limit')
type_ = int(params['type'])
if not offset:
offset = 0
if not limit:
limit = 20
filters = {
'title': {'$regex': re.compile(re.escape(list_name)), '$options': 'i'}
}
if 1 == type_:
result = search_chinese_lists_by_filter(filters, int(offset), int(limit))
elif 2 == type_:
result = search_janpanese_lists_by_filter(filters, int(offset), int(limit))
else:
raise Exception('type的数值暂不支持')
data = [{'song_id': item['_id'], 'name': item['song_name'],
'comment_id': item['comment_id'], 'source_url': item['source_url']} for item in result]
return JsonSuccess(data) | 2.578125 | 3 |
src/olympia/stats/tests/test_models.py | Osmose/olympia | 0 | 12794434 | <filename>src/olympia/stats/tests/test_models.py
# -*- coding: utf-8 -*-
import json
from django.core import mail
from olympia.amo.tests import TestCase
from olympia.addons.models import Addon
from olympia.stats.models import Contribution
from olympia.stats.db import StatsDictField
from olympia.users.models import UserProfile
class TestStatsDictField(TestCase):
def test_to_python_none(self):
assert StatsDictField().to_python(None) is None
def test_to_python_dict(self):
assert StatsDictField().to_python({'a': 1}) == {'a': 1}
def test_to_python_json(self):
val = {'a': 1}
assert StatsDictField().to_python(json.dumps(val)) == val
class TestEmail(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestEmail, self).setUp()
self.addon = Addon.objects.get(pk=3615)
self.user = UserProfile.objects.get(pk=999)
def make_contribution(self, amount, locale):
return Contribution.objects.create(addon=self.addon, amount=amount,
source_locale=locale)
def test_thankyou_note(self):
self.addon.enable_thankyou = True
self.addon.thankyou_note = u'Thank "quoted". <script>'
self.addon.name = u'Test'
self.addon.save()
cont = self.make_contribution('10', 'en-US')
cont.update(transaction_id='yo',
post_data={'payer_email': '<EMAIL>'})
cont.mail_thankyou()
assert len(mail.outbox) == 1
email = mail.outbox[0]
assert email.to == ['<EMAIL>']
assert '"' not in email.body
assert u'Thank "quoted".' in email.body
assert '<script>' not in email.body
assert '<script>' not in email.body
| 2 | 2 |
lsh_random_projecion.py | alexunder193/Machine-Learning | 0 | 12794435 | <gh_stars>0
import numpy as np
import pandas as pd
from numpy import dot
from numpy import random
import os
import os.path
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.pipeline import make_pipeline
from sklearn.decomposition import TruncatedSVD
from sklearn.metrics.pairwise import cosine_similarity
import time
class HashTable:
def __init__(self, hash_size, inp_dimensions):
self.hash_size = hash_size
self.inp_dimensions = inp_dimensions
self.hash_table = dict()
self.projections = np.random.randn(self.hash_size, inp_dimensions)
def generate_hash(self, inp_vector):
bools = (np.dot(inp_vector, self.projections.T) > 0).astype('int')
return ''.join(bools.astype('str'))
def __setitem__(self, inp_vec, label):
hash_value = self.generate_hash(inp_vec)
self.hash_table[hash_value] = self.hash_table\
.get(hash_value, list()) + [label]
def __getitem__(self, inp_vec):
hash_value = self.generate_hash(inp_vec)
return self.hash_table.get(hash_value, [])
def main():
train = pd.read_csv('datasets/q2a/corpusTrain.csv')
train = train[0:50000]
ids_train = train['Id']
contents_train = train['Content']
test = pd.read_csv('datasets/q2a/corpusTest.csv')
test = test[0:1000]
contents_test = test['Content']
vectorizer = TfidfVectorizer(stop_words='english')
vector = vectorizer.fit_transform(contents_train)
array_vector_train = vector.toarray()
components = array_vector_train.shape[1]
vector = vectorizer.transform(contents_test)
array_vector_test = vector.toarray()
hash_table = HashTable(hash_size=5, inp_dimensions=components)
t0 = time.time()
counter = 0
for id in ids_train:
hash_table.__setitem__(array_vector_train[counter], id)
counter = counter + 1
t1 = time.time()
print('Time to build LSH forest for train is: {} '.format(t1-t0))
t2 = time.time()
match_for_every_test = []
for i in range(0, len(array_vector_test)):
key = hash_table.generate_hash(array_vector_test[i])
keys = list(hash_table.hash_table[key])
match = 0
for key in keys:
distance = cosine_similarity([array_vector_train[key]], [array_vector_test[i]])
if distance > 0.8:
match = match + 1
match_for_every_test.append(match)
t3 = time.time()
print('Time to query test set is: {} '.format(t3-t2))
count = 0
for item in match_for_every_test:
count = count + item
print('Similar items from test set to train set is : {}'.format(count))
print('Total time is : {}'.format(t3-t0))
if __name__ == "__main__":
main()
| 2.53125 | 3 |
tests/linkedin_login.py | fdjlss/linkedin_hunting | 0 | 12794436 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# linkedin_login.py
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
import parameters
# install webdrive when needed
driver = webdriver.Chrome(ChromeDriverManager().install())
# driver.get method() will navigate to a page given by the URL address
driver.get('https://www.linkedin.com/login')
# locate email form by element_by_id
username = driver.find_element_by_id('username')
# send_keys() to simulate key strokes
username.send_keys('<EMAIL>')
# locate password form by_class_name
password = driver.find_element_by_id('password')
# send_keys() to simulate key strokes
password.send_keys('<<PASSWORD>>')
# locate submit button by_class_name
log_in_button = driver.find_element_by_class_name('btn__primary--large')
# locate submit button by_xpath
log_in_button = driver.find_element_by_xpath('//*[@type="submit"]')
# .click() to mimic button click
log_in_button.click()
| 3.03125 | 3 |
tasklist/views.py | marcbperez/django-tasklist | 1 | 12794437 | from .models import Task
from django.http import HttpResponse
def index(request):
collection = Task.objects.all()
return HttpResponse(collection)
| 1.609375 | 2 |
parsifal/apps/activities/migrations/0003_auto_20210906_0158.py | ShivamPytho/parsifal | 342 | 12794438 | <reponame>ShivamPytho/parsifal
# Generated by Django 3.2.6 on 2021-09-06 01:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('reviews', '0035_auto_20210829_0005'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('activities', '0002_alter_activity_id'),
]
operations = [
migrations.AlterField(
model_name='activity',
name='activity_type',
field=models.CharField(choices=[('F', 'Follow'), ('C', 'Comment'), ('S', 'Star')], max_length=1, verbose_name='type'),
),
migrations.AlterField(
model_name='activity',
name='content',
field=models.CharField(blank=True, max_length=500, verbose_name='content'),
),
migrations.AlterField(
model_name='activity',
name='date',
field=models.DateTimeField(auto_now_add=True, verbose_name='date'),
),
migrations.AlterField(
model_name='activity',
name='from_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='from user'),
),
migrations.AlterField(
model_name='activity',
name='review',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='reviews.review', verbose_name='review'),
),
migrations.AlterField(
model_name='activity',
name='to_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='to user'),
),
]
| 1.6875 | 2 |
py-data/peewee/problems/api-related/1/correct-usages/SqliteDatabase.py | ualberta-smr/NFBugs | 3 | 12794439 | <gh_stars>1-10
import datetime
import decimal
import hashlib
import logging
import operator
import re
import sys
import threading
import uuid
from bisect import bisect_left
from bisect import bisect_right
from collections import deque
from collections import namedtuple
import sqlite3
try:
from collections import OrderedDict
except ImportError:
OrderedDict = dict
from copy import deepcopy
from functools import wraps
from inspect import isclass
class SqliteDatabase(Database):
def _connect(self, database, **kwargs):
conn = sqlite3.connect(database, **kwargs)
try:
# ...
except:
conn.close()
raise
return conn
| 1.921875 | 2 |
urizen/generators/locations/lfd_worm_simple.py | misagai/urizen | 0 | 12794440 | #!/usr/bin/python3
import random
from urizen.core.map import Map
NORTH = 'N'
SOUTH = 'S'
EAST = 'E'
WEST = 'W'
class LFD_WormSimpleFactory(object):
def generate(self, w, h, length=None, turn_chance=0.4):
if not length:
length = int(w*h/2)
return self._gen_main(w, h, length, turn_chance)
def _gen_main(self, xsize, ysize, length, turn_chance=0.4):
M = Map(xsize, ysize, fill_symbol='#')
worm_x = random.randint(int(xsize * 0.3), int(xsize * 0.6))
worm_y = random.randint(int(ysize * 0.3), int(ysize * 0.6))
move = random.choice([NORTH, SOUTH, EAST, WEST])
for _ in range(length):
worm_x, worm_y, move = self._move_worm(M, worm_x, worm_y, move, turn_chance)
return M
def _move_worm(self, M, x, y, move, turn_chance):
self._dig_cell(M, x, y)
if random.random() > turn_chance:
move = random.choice([NORTH, SOUTH, EAST, WEST])
xsize, ysize = M.get_size()
if x == xsize - 2 and move == EAST:
move = WEST
elif x == 1 and move == WEST:
move = EAST
elif y == ysize - 2 and move == SOUTH:
move = NORTH
elif y == 1 and move == NORTH:
move = SOUTH
if move == NORTH:
new_state = [x, y - 1]
elif move == SOUTH:
new_state = [x, y + 1]
elif move == EAST:
new_state = [x + 1, y]
else:
new_state = [x - 1, y]
new_state.append(move)
return new_state
def _dig_cell(self, M, x, y):
try:
M.cells[y][x].symbol = '.'
except IndexError:
pass
LFD_WormSimple = LFD_WormSimpleFactory() | 3.21875 | 3 |
powerline_owmweather/__init__.py | suoto/powerline-owmweather | 0 | 12794441 | from .weather import weather
| 1.195313 | 1 |
01/script_2.py | FiniteSingularity/aoc-2021 | 0 | 12794442 | with open('./input', encoding='utf8') as file:
measurements = [int(value) for value in file]
last = 99999
increasing = 0
for i, measurement in enumerate(measurements):
if i > 1:
sliding_sum = sum(measurements[i-2:i+1])
if sliding_sum > last:
increasing += 1
last = sliding_sum
print(increasing)
| 3.5 | 4 |
ch29/globalnonlocal.py | eroicaleo/LearningPython | 1 | 12794443 | X = 11
def g1():
print(X)
def g2():
global X
X = 22
def h1():
X = 33
def nested():
print(X)
nested()
def h2():
X = 33
def nested():
nonlocal X
X = 44
nested()
print(X)
if __name__ == '__main__':
g1()
g2()
g1()
h1()
h2()
| 3.390625 | 3 |
osg_configure/configure_modules/bosco.py | brianhlin/osg-configure | 1 | 12794444 | <gh_stars>1-10
"""
Module to handle attributes related to the bosco jobmanager
configuration
"""
import errno
import os
import logging
import subprocess
import pwd
import shutil
import stat
import re
from osg_configure.modules import utilities
from osg_configure.modules import configfile
from osg_configure.modules import validation
from osg_configure.modules.jobmanagerconfiguration import JobManagerConfiguration
__all__ = ['BoscoConfiguration']
class BoscoConfiguration(JobManagerConfiguration):
"""Class to handle attributes related to Bosco job manager configuration"""
SSH_CONFIG_SECTION_BEGIN = "### THIS SECTION MANAGED BY OSG-CONFIGURE\n"
SSH_CONFIG_SECTION_END = "### END OF SECTION MANAGED BY OSG-CONFIGURE\n"
def __init__(self, *args, **kwargs):
# pylint: disable-msg=W0142
super().__init__(*args, **kwargs)
self.logger = logging.getLogger(__name__)
self.log('BoscoConfiguration.__init__ started')
# dictionary to hold information about options
self.options = {'endpoint':
configfile.Option(name='endpoint',
requred=configfile.Option.MANDATORY),
'batch':
configfile.Option(name='batch',
requred=configfile.Option.MANDATORY),
'users':
configfile.Option(name='users',
requred=configfile.Option.MANDATORY),
'ssh_key':
configfile.Option(name='ssh_key',
requred=configfile.Option.MANDATORY),
'install_cluster':
configfile.Option(name='install_cluster',
required=configfile.Option.OPTIONAL,
default_value="if_needed"),
'max_jobs':
configfile.Option(name='max_jobs',
requred=configfile.Option.OPTIONAL,
default_value=1000),
'edit_ssh_config':
configfile.Option(name='edit_ssh_config',
required=configfile.Option.OPTIONAL,
opt_type=bool,
default_value=True),
'override_dir':
configfile.Option(name='override_dir',
required=configfile.Option.OPTIONAL,
default_value='')}
self.config_section = "BOSCO"
self.log("BoscoConfiguration.__init__ completed")
def parse_configuration(self, configuration):
"""Try to get configuration information from ConfigParser or SafeConfigParser object given
by configuration and write recognized settings to attributes dict
"""
super().parse_configuration(configuration)
self.log('BoscoConfiguration.parse_configuration started')
self.check_config(configuration)
if not configuration.has_section(self.config_section):
self.log('Bosco section not found in config file')
self.log('BoscoConfiguration.parse_configuration completed')
return
if not self.set_status(configuration):
self.log('BoscoConfiguration.parse_configuration completed')
return True
self.get_options(configuration, ignore_options=['enabled'])
# pylint: disable-msg=W0613
def check_attributes(self, attributes):
"""Check attributes currently stored and make sure that they are consistent"""
self.log('BoscoConfiguration.check_attributes started')
attributes_ok = True
if not self.enabled:
self.log('Bosco not enabled, returning True')
self.log('BoscoConfiguration.check_attributes completed')
return attributes_ok
if self.ignored:
self.log('Ignored, returning True')
self.log('BoscoConfiguration.check_attributes completed')
return attributes_ok
if self.options['batch'].value not in ['pbs', 'lsf', 'sge', 'condor', 'slurm']:
attributes_ok = False
self.log("Batch attribute is not valid: %s" %
(self.options['batch'].value),
option='batch',
section=self.config_section,
level=logging.ERROR)
# TODO: check if the ssh_key has the correct permissions!
if not validation.valid_file(self.options['ssh_key'].value):
attributes_ok = False
self.log("ssh_key given is not a file: %s" %
(self.options['ssh_key'].value),
option='ssh_key',
section=self.config_section,
level=logging.ERROR)
if not validation.valid_integer(self.options['max_jobs'].value):
attributes_ok = False
self.log("max_jobs is not an integer: %s" %
(self.options['max_jobs'].value),
option='max_jobs',
section=self.config_section,
level=logging.ERROR)
# Split the users, comma seperated
split_users = self.options['users'].value.split(',')
for user in split_users:
if not validation.valid_user(user.strip()):
attributes_ok = False
self.log("%s is not a valid user" %
(user.strip()),
option='users',
section=self.config_section,
level=logging.ERROR)
# TODO: validate list of usernames
endpoint = self.options['endpoint'].value
if len(endpoint.split('@')) != 2:
attributes_ok = False
self.log("endpoint not in user@host format: %s" %
endpoint,
option='endpoint',
section=self.config_section,
level=logging.ERROR)
if self.opt_val("install_cluster") not in ["always", "never", "if_needed"]:
self.log("install_cluster attribute is not valid: %s" %
self.opt_val("install_cluster"),
option="install_cluster",
section=self.config_section,
level=logging.ERROR)
self.log('BoscoConfiguration.check_attributes completed')
return attributes_ok
def configure(self, attributes):
"""Configure installation using attributes"""
self.log('BoscoConfiguration.configure started')
if not self.enabled:
self.log('Bosco not enabled, returning True')
self.log('BoscoConfiguration.configure completed')
return True
if self.ignored:
self.log("%s configuration ignored" % self.config_section,
level=logging.WARNING)
self.log('BoscoConfiguration.configure completed')
return True
# Do all the things here!
# For each user, install bosco.
for username in self.options['users'].value.split(","):
username = username.strip()
if not self._installBosco(username):
self.log('Installation of Bosco failed', level=logging.ERROR)
return False
# Step 3. Configure the routes so the default route will go to the Bosco
# installed remote cluster.
self._write_route_config_vars()
if self.htcondor_gateway_enabled:
self.write_htcondor_ce_sentinel()
self.log('BoscoConfiguration.configure completed')
return True
def _installBosco(self, username):
"""
Install Bosco on the remote cluster for a given username
"""
# First, get the uid of the username so we can seteuid
try:
user_info = pwd.getpwnam(username)
except KeyError as e:
self.log("Error finding username: %s on system." % username, level=logging.ERROR)
return False
user_name = user_info.pw_name
user_home = user_info.pw_dir
user_uid = user_info.pw_uid
user_gid = user_info.pw_gid
# Copy the ssh key to the user's .ssh directory
ssh_key = self.options["ssh_key"].value
ssh_key_loc = os.path.join(user_home, ".ssh", "bosco_ssh_key")
try:
os.mkdir(os.path.join(user_home, ".ssh"))
except OSError as err:
if err.errno != errno.EEXIST:
raise
try:
if not os.path.exists(ssh_key_loc) or not os.path.samefile(ssh_key, ssh_key_loc):
shutil.copy(ssh_key, ssh_key_loc)
except OSError as err:
self.log("Error copying SSH key to %s: %s" % (ssh_key_loc, err), level=logging.ERROR)
return False
os.chmod(ssh_key_loc, stat.S_IRUSR | stat.S_IWUSR)
if self.opt_val("edit_ssh_config"):
self.edit_ssh_config(ssh_key_loc, user_home, user_name)
# Change the ownership of everything to the user
# https://stackoverflow.com/questions/2853723/whats-the-python-way-for-recursively-setting-file-permissions
path = os.path.join(user_home, ".ssh")
for root, dirs, files in os.walk(path):
for momo in dirs:
os.chown(os.path.join(root, momo), user_uid, user_gid)
for momo in files:
os.chown(os.path.join(root, momo), user_uid, user_gid)
os.chown(path, user_uid, user_gid)
if self.opt_val("install_cluster") == "never":
return True
return self._run_bosco_cluster(user_gid, user_home, user_name, user_uid)
def _run_bosco_cluster(self, user_gid, user_home, user_name, user_uid):
# Function to demote to a specified uid and gid
def demote(uid, gid):
def result():
os.setgid(gid)
os.setuid(uid)
return result
try:
# Set the user home directory
env = os.environ.copy()
env['HOME'] = user_home
env['LOGNAME'] = user_name
env['USER'] = user_name
endpoint = self.opt_val("endpoint")
batch = self.opt_val("batch")
if self.opt_val("install_cluster") == "if_needed":
# Only install if it's not in the clusterlist
cmd = ["bosco_cluster", "-l"]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
preexec_fn=demote(user_uid, user_gid), env=env,
encoding="latin-1")
stdout, stderr = process.communicate()
returncode = process.returncode
if returncode == 2:
self.log("Bosco clusterlist empty", level=logging.DEBUG)
elif returncode == 0:
self.log("Bosco clusterlist:\n%s" % stdout, level=logging.DEBUG)
# Looking for a line like "<EMAIL>/pbs"
pattern = re.compile(r"^%s/%s" % (re.escape(endpoint),
re.escape(batch)), re.MULTILINE)
if pattern.search(stdout):
self.log("Entry found in clusterlist", level=logging.DEBUG)
return True
else:
self.log("bosco_cluster -l failed with unexpected exit code %d" % returncode, level=logging.ERROR)
self.log("stdout:\n%s" % stdout, level=logging.ERROR)
self.log("stderr:\n%s" % stderr, level=logging.ERROR)
return False
# Run bosco cluster to install the remote cluster
install_cmd = ["bosco_cluster"]
override_dir = self.opt_val('override_dir')
if override_dir:
install_cmd += ['-o', override_dir]
install_cmd += ["-a", endpoint, batch]
self.log("Bosco command to execute: %s" % install_cmd, level=logging.DEBUG)
process = subprocess.Popen(install_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
preexec_fn=demote(user_uid, user_gid), env=env,
encoding="latin-1")
stdout, stderr = process.communicate()
returncode = process.returncode
if returncode:
self.log("Bosco installation command failed with exit code %i" % returncode, level=logging.ERROR)
self.log("stdout:\n%s" % stdout, level=logging.ERROR)
self.log("stderr:\n%s" % stderr, level=logging.ERROR)
return False
else:
self.log("Bosco installation successful", level=logging.DEBUG)
self.log("stdout:\n%s" % stdout, level=logging.DEBUG)
self.log("stderr:\n%s" % stderr, level=logging.DEBUG)
except Exception as e:
self.log("Error in bosco installation: %s" % e, level=logging.ERROR)
return False
return True
def edit_ssh_config(self, ssh_key_loc, local_user_home, local_user_name):
# Add a section to .ssh/config for this host
config_path = os.path.join(local_user_home, ".ssh", "config")
# Split the entry point by the "@"
endpoint_user_name, endpoint_host = self.options["endpoint"].value.split('@')
host_config = """
Host %(endpoint_host)s
HostName %(endpoint_host)s
User %(endpoint_user_name)s
IdentityFile %(ssh_key_loc)s
""" % locals()
text_to_add = "%s%s%s" % (self.SSH_CONFIG_SECTION_BEGIN, host_config, self.SSH_CONFIG_SECTION_END)
if not os.path.exists(config_path):
utilities.atomic_write(config_path, text_to_add)
return
config_contents = ""
with open(config_path, "r", encoding="latin-1") as f:
config_contents = f.read()
section_re = re.compile(r"%s.+?%s" % (re.escape(self.SSH_CONFIG_SECTION_BEGIN), re.escape(self.SSH_CONFIG_SECTION_END)),
re.MULTILINE | re.DOTALL)
host_re = re.compile(r"^\s*Host\s+%s\s*$" % re.escape(endpoint_host), re.MULTILINE)
if section_re.search(config_contents):
config_contents = section_re.sub(text_to_add, config_contents)
self.logger.debug("osg-configure section found in %s", config_path)
elif host_re.search(config_contents):
self.logger.info("Host %s already found in %s but not in an osg-configure section. Not modifying it.", endpoint_host, config_path)
return
else:
config_contents += "\n" + text_to_add
utilities.atomic_write(config_path, config_contents)
def _write_route_config_vars(self):
"""
Write condor-ce config attributes for the bosco job route. Sets values for:
- BOSCO_RMS
- BOSCO_ENDPOINT
"""
contents = utilities.read_file(self.HTCONDOR_CE_CONFIG_FILE,
default="# This file is managed by osg-configure\n")
contents = utilities.add_or_replace_setting(contents, "BOSCO_RMS", self.options['batch'].value,
quote_value=False)
contents = utilities.add_or_replace_setting(contents, "BOSCO_ENDPOINT", self.options['endpoint'].value,
quote_value=False)
utilities.atomic_write(self.HTCONDOR_CE_CONFIG_FILE, contents)
def _search_config(self, host, config_path):
"""
Search the ssh config file for exactly this host
Returns: true - if host section found
false - if host section not found
"""
if not os.path.exists(config_path):
return False
host_re = re.compile("^\s*Host\s+%s\s*$" % host)
with open(config_path, "r", encoding="latin-1") as f:
for line in f:
if host_re.search(line):
return True
return False
| 2.09375 | 2 |
tlidb/TLiDB/datasets/TLiDB_dataset.py | alon-albalak/TLiDB | 0 | 12794445 | import os
import json
from urllib.request import urlopen
from io import BytesIO
from zipfile import ZipFile
import numpy as np
from torch.utils.data import Dataset
def download_and_unzip(url, extract_to='.'):
print(f"Waiting for response from {url}")
http_response = urlopen(url)
print(f"Downloading data from {url}")
zipfile = ZipFile(BytesIO(http_response.read()))
zipfile.extractall(path=extract_to)
def load_dataset_local(name, dataset_folder):
ds = {}
for root, dirs, files in os.walk(f"{dataset_folder}/TLiDB_{name}"):
for file in files:
if file.endswith(".json") and file!="sample_format.json":
ds[file[:-5]] = json.load(open(f"{root}/{file}"))
if len(ds.keys()) == 1:
ds = ds[list(ds.keys())[0]]
return ds
def load_dataset(name, dataset_folder, url):
# download and unzip dataset if needed
if f"TLiDB_{name}" not in os.listdir(dataset_folder):
assert(url is not None), "Must provide a url to download from"
download_and_unzip(url, dataset_folder)
print(f"Extracted files to {os.path.join(dataset_folder,name)}")
ds = load_dataset_local(name, dataset_folder)
return ds
def load_split_ids(name, dataset_folder, split, few_shot_percent=None):
if f"TLiDB_{name}" not in os.listdir(dataset_folder):
raise ValueError("Dataset not found")
if few_shot_percent and split!="test":
ids_file = f"{dataset_folder}/TLiDB_{name}/TTiDB_{few_shot_percent}_percent_few_shot_{split}_ids.txt"
else:
ids_file = f"{dataset_folder}/TLiDB_{name}/TTiDB_{split}_ids.txt"
with open(ids_file) as f:
ids = f.read().splitlines()
return ids
class TLiDB_Dataset(Dataset):
"""
Abstract dataset class for all TLiDB datasets
"""
def __init__(self, dataset_name, task, model_type, max_dialogue_length, dataset_folder):
super().__init__()
self.dataset = load_dataset(dataset_name, dataset_folder, self.url)
self._task = task
task_metadata = self.dataset['metadata']['task_metadata']
self.task_labels = []
self._max_dialogue_length = max_dialogue_length
self._model_type = model_type
if task in task_metadata and 'labels' in task_metadata[task]:
self.task_labels = task_metadata[task]['labels']
if task == "response_generation":
self.metrics = ['token_f1', 'bleu', 'bert_score', 'distinct_ngrams']
self.metric_kwargs = {
"bleu": [{"ngram_order": 1}, {"ngram_order": 2}, {"ngram_order": 3}, {"ngram_order": 4}],
"distinct_ngrams": [{"ngram_order": 1}, {"ngram_order": 2}, {"ngram_order": 3}]
}
self._collate = self._collate_response_generation
else:
self.metrics = task_metadata[task]['metrics']
self.metric_kwargs = task_metadata[task].get("metric_kwargs", dict())
if model_type == "Encoder":
self._collate = self._collate_encoder
elif model_type == "Decoder":
self._collate = self._collate_decoder
elif model_type == "EncoderDecoder":
self._collate = self._collate_encoderdecoder
else:
raise ValueError(f"{model_type} is not a valid algorithm type")
@property
def dataset_name(self):
return self._dataset_name
@property
def tasks(self):
return self._tasks
@property
def task(self):
return self._task
@property
def task_metadata(self):
return self._task_metadata
@property
def url(self):
return self._url
@property
def max_dialogue_length(self):
return self._max_dialogue_length
@property
def model_type(self):
return self._model_type
@property
def collate(self):
"""
Returns collate function to be used with dataloader
By default returns None -> uses default torch collate function
"""
return getattr(self, "_collate", None)
@property
def y_array(self):
"""
Targets for the model to predict, can be labels for classification or string for generation tasks
"""
return self._y_array
@property
def y_size(self):
"""
Number of elements in the target
For standard classification and text generation, y_size = 1
For multi-class or multi-task prediction tasks, y_size > 1
"""
return self._y_size
@property
def num_classes(self):
"""
Returns number of classes in the dataset
"""
return getattr(self, "_num_classes", None)
@property
def metadata_fields(self):
"""
Returns the fields that are stored in the metadata
Metadata should always contain the domains
If it is a classification task, then metadata should also contain the classes
"""
return self._metadata_fields
@property
def metadata_array(self):
"""
Returns the metadata array
"""
return self._metadata_array
def get_metadata_field(self, field):
return self.metadata_array[self.metadata_fields.index(field)]
def __getitem__(self, idx):
"""
Returns a single sample from the dataset
"""
x = self.get_input(idx)
y = self.get_target(idx)
m = self.get_metadata(idx)
return x, y, m
def get_input(self, idx):
return self._input_array[idx]
def get_target(self, idx):
return self.y_array[idx]
def get_metadata(self, idx):
return {}
def _collate(self, batch):
return NotImplementedError
def _collate_encoder(self, batch):
return NotImplementedError
def _collate_decoder(self, batch):
return NotImplementedError
def _collate_encoderdecoder(self, batch):
return NotImplementedError
def __len__(self):
"""
Returns the length of the dataset
"""
return len(self.y_array)
def _truncate_dialogue(self, input):
"""
Truncates the dialogue to the max dialogue length
"""
if self.max_dialogue_length:
dialogue = self._convert_dialogue_to_string(input)
while len(dialogue.split()) > self.max_dialogue_length:
input = input[1:]
dialogue = self._convert_dialogue_to_string(input)
return input
def _convert_dialogue_to_string(self, input):
dialogue = ""
for (speaker, utt) in input:
if speaker:
dialogue += f"{speaker}: "
dialogue += f"{utt} "
return dialogue[:-1]
def _join_strings(self, *args):
return " ".join(args)
def _load_response_generation_task(self, task, split_ids):
for datum in self.dataset['data']:
if datum['dialogue_id'] in split_ids:
dialogue = []
for turn in datum['dialogue']:
truncated_dialogue = self._truncate_dialogue(dialogue)
if turn['speakers']:
str_dialogue = self._convert_dialogue_to_string(truncated_dialogue)
str_dialogue += f" {' '.join(turn['speakers'])}: "
str_dialogue = str_dialogue.lstrip()
self._input_array.append(str_dialogue)
self._y_array.append(turn['utterance'])
dialogue.append([" ".join(turn['speakers']), turn['utterance']])
def _collate_response_generation(self, batch):
X, y, metadata = [], [], {}
for item in batch:
X.append(item[0])
y.append(item[1])
for k, v in item[2].items():
if k not in metadata:
metadata.append(k)
metadata[k].append(v)
return X, y, metadata
def random_subsample(self, frac=1.0):
"""
Subsamples the dataset
Args:
- frac (float): Fraction of the dataset to keep
"""
if frac < 1.0:
num_to_retain = int(self.y_size * frac)
if num_to_retain == 0:
return
idxs_to_retain = np.sort(np.random.permutation(len(self))[:num_to_retain]).tolist()
subsampled_input_array, subsampled_y_array, subsampled_metadata_array = [], [], []
for idx in idxs_to_retain:
input_item, y_item, metadata_item = self.__getitem__(idx)
subsampled_input_array.append(input_item)
subsampled_y_array.append(y_item)
subsampled_metadata_array.append(metadata_item)
self._input_array = subsampled_input_array
self._y_array = subsampled_y_array
metadata_iterated = list(metadata_item.keys())
metadata_not_iterated = [metadata_field for metadata_field in self.metadata_fields if metadata_field not in metadata_iterated]
subsampled_metadata_array = [subsampled_metadata_array]
for metadata_field in metadata_not_iterated:
subsampled_metadata_array.append(self.get_metadata_field(metadata_field))
self._metadata_array = subsampled_metadata_array
self._metadata_fields = metadata_iterated+metadata_not_iterated
self._y_size = num_to_retain
| 2.6875 | 3 |
sqlalchemy_django/middleware.py | jayvdb/sqlalchemy_django | 0 | 12794446 | <gh_stars>0
# -*- coding: utf-8 -*-
'''
Created on 2017-6-22
@author: hshl.ltd
'''
# https://blndxp.wordpress.com/2016/03/04/django-get-current-user-anywhere-in-your-code-using-a-middleware/
from __future__ import absolute_import, division, print_function, unicode_literals
try:
from threading import local
except ImportError:
from django.utils._threading_local import local
_thread_locals = local()
def get_current_request():
""" returns the request object for this thread """
return getattr(_thread_locals, "request", None)
def get_current_user():
""" returns the current user, if exist, otherwise returns None """
request = get_current_request()
if request:
return getattr(request, "user", None)
class SqlAlchemyMiddleware(object):
""" Simple middleware that adds the request object in thread local storage."""
def process_request(self, request):
_thread_locals.request = request
def process_response(self, request, response):
return response
"""
if hasattr(_thread_locals, 'request'):
del _thread_locals.request
return response
"""
| 2.34375 | 2 |
lockbot/controllers/ping.py | preyneyv/iot-door-opener | 0 | 12794447 | from starlette.responses import PlainTextResponse
def ping(_):
return PlainTextResponse('')
| 1.382813 | 1 |
fuzzinator/igalia/fuzzinator/call/__init__.py | pmatos/jsc32-fuzz | 0 | 12794448 | <gh_stars>0
# Copyright (c) 2021 <NAME>, Igalia S.L.
# Copyright (c) 2020 <NAME>, Igalia S.L.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
from .remotefile_writer_decorator import RemoteFileWriterDecorator
from .subprocess_remotecall import SubprocessRemoteCall
from .subprocess_jsccall import SubprocessJSCCall
from .jsc_gdb_backtrace_decorator import JSCGdbBacktraceDecorator
try:
from .test_runner_subprocess_remotecall import TestRunnerSubprocessRemoteCall
except ImportError:
pass
| 1.109375 | 1 |
download_summaries.py | charlesjlee/WikiNovelPlots | 0 | 12794449 | <filename>download_summaries.py
import pandas as pd
import requests
import sys
from tenacity import retry, wait_exponential, stop_after_attempt
from tqdm import tqdm
tqdm.pandas()
pd.options.display.max_columns = None
def looks_like_plot(s):
# effective for novels according to https://github.com/markriedl/WikiPlots/issues/1
return any(x in s.lower() for x in ('plot', 'summary', 'synopsis'))
@retry(wait=wait_exponential(multiplier=1, min=1, max=60), stop=stop_after_attempt(10))
def get_plot_from_pageid(row):
api_route_prefix = f'https://en.wikipedia.org/w/api.php?action=parse&pageid={row.pageid}&format=json&maxlag=5'
request = requests.get(f'{api_route_prefix}&prop=sections').json()
if 'error' in request:
if request['error']['code'] == 'nosuchpageid':
return ''
# find first `line` that looks like a plot
for section in request['parse']['sections']:
if looks_like_plot(section['line']):
# get that section's text
data = requests.get(f"{api_route_prefix}&prop=text§ion={section['index']}").json()
return data['parse']['text']['*']
return ''
if __name__ == "__main__":
petscan_file_name = sys.argv[1] if len(sys.argv) == 2 else 'petscan_psid_21520280_20220223.csv'
df = pd.read_csv(petscan_file_name, escapechar='\\')
df = df[['title', 'pageid']]
df['summary'] = df.progress_apply(get_plot_from_pageid, axis=1)
df.to_pickle('summaries.pkl', compression='xz')
| 2.96875 | 3 |
kader/migrations/0004_auto_20180210_2104.py | hanbei/kdmanager | 0 | 12794450 | <reponame>hanbei/kdmanager<gh_stars>0
# Generated by Django 2.0.1 on 2018-02-10 21:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kader', '0003_fight_tournament'),
]
operations = [
migrations.CreateModel(
name='Training',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('attended', models.ManyToManyField(to='kader.Member')),
('fights', models.ManyToManyField(to='kader.Fight')),
],
),
migrations.RemoveField(
model_name='attendance',
name='attended',
),
migrations.RemoveField(
model_name='tournament',
name='fights',
),
migrations.DeleteModel(
name='Attendance',
),
migrations.DeleteModel(
name='Tournament',
),
]
| 1.765625 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.