max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
app/util/auth2.py | FSU-ACM/Contest-Server | 8 | 6400 | """ util.auth2: Authentication tools
This module is based off of util.auth, except with the action
paradigm removed.
"""
from flask import session
from app.models import Account
from app.util import course as course_util
# Session keys
SESSION_EMAIL = 'email'
def create_account(email: str, password: str, first_name: str,
last_name: str, fsuid: str, course_list: list = []):
"""
Creates an account for a single user.
:email: Required, the email address of the user.
:password: Required, user's chosen password.
:first_name: Required, user's first name.
:last_name: Required, user's last name.
:fsuid: Optional, user's FSUID.
:course_list: Optional, courses being taken by user
:return: Account object.
"""
account = Account(
email=email,
first_name=first_name,
last_name=last_name,
fsuid=fsuid,
is_admin=False
)
# Set user's extra credit courses
course_util.set_courses(account, course_list)
account.set_password(password)
account.save()
return account
def get_account(email: str=None):
"""
Retrieves account via email (defaults to using session), otherwise
redirects to login page.
:email: Optional email string, if not provided will use session['email']
:return: Account if email is present in session, None otherwise.
"""
try:
email = email or session['email']
return Account.objects.get_or_404(email=email)
except:
return None
| 3.078125 | 3 |
FeView/pstaticwidget.py | motiurce/FeView | 10 | 6401 | from PyQt5.QtWidgets import *
from matplotlib.backends.backend_qt5agg import FigureCanvas
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
class PstaticWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.fig_pstatic = Figure()
self.fig_pstatic.set_facecolor('#ffffff')
self.canvas_pstatic = FigureCanvas(self.fig_pstatic)
vertical_layout = QVBoxLayout()
vertical_layout.addWidget(self.canvas_pstatic)
self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111)
self.setLayout(vertical_layout)
self.canvas_pstatic.axes_pstatic.set_xticks([])
self.canvas_pstatic.axes_pstatic.set_yticks([])
self.canvas_pstatic.axes_pstatic.axis('off')
self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95)
self.toolbar = NavigationToolbar(self.canvas_pstatic, self)
self.toolbar.setFixedHeight(25)
vertical_layout.addWidget(self.toolbar) | 2.515625 | 3 |
pyallocation/solvers/exhaustive.py | julesy89/pyallocation | 0 | 6402 | <gh_stars>0
import numpy as np
from pymoo.core.algorithm import Algorithm
from pymoo.core.population import Population
from pymoo.util.termination.no_termination import NoTermination
from pyallocation.allocation import FastAllocation
from pyallocation.problem import AllocationProblem
def exhaustively(problem):
alloc = FastAllocation(problem, debug=False)
k = 0
sols = []
rec_exhaustively(problem, alloc, k, sols)
sols.sort(key=lambda x: (x[1], x[2]))
return sols[:100]
def rec_exhaustively(problem, alloc, k, sols):
if not alloc.feas:
return
if k == problem.n_var:
x, cv, f = np.copy(alloc.x), alloc.CV, (alloc.F * problem.w).sum()
sols.append((x, cv, f))
if len(sols) > 1000:
sols.sort(key=lambda x: (x[1], x[2]))
while len(sols) > 100:
sols.pop()
else:
for val in range(problem.xl[k], problem.xu[k] + 1):
alloc.set(k, val)
rec_exhaustively(problem, alloc, k + 1, sols)
alloc.set(k, -1)
class ExhaustiveAlgorithm(Algorithm):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_termination = NoTermination()
def setup(self, problem, **kwargs):
super().setup(problem, **kwargs)
assert isinstance(problem, AllocationProblem)
return self
def _initialize(self):
self._next()
def _next(self):
solutions = exhaustively(self.problem)
self.pop = Population.new(X=np.array([x for x, _, _ in solutions]))
self.evaluator.eval(self.problem, self.pop)
for ind in self.pop:
print(ind.F[0], ind.X)
self.termination.force_termination = True
| 2.28125 | 2 |
config.py | yasminbraga/ufopa-reports | 0 | 6403 | import os
class Config:
CSRF_ENABLED = True
SECRET_KEY = 'your-very-very-secret-key'
SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = True
class Development(Config):
ENV = 'development'
DEBUG = True
TESTING = False
class Production(Config):
ENV = 'production'
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL', 'postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da')
| 1.765625 | 2 |
heat/api/openstack/v1/views/stacks_view.py | noironetworks/heat | 265 | 6404 | <reponame>noironetworks/heat
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from heat.api.openstack.v1 import util
from heat.api.openstack.v1.views import views_common
from heat.rpc import api as rpc_api
_collection_name = 'stacks'
basic_keys = (
rpc_api.STACK_ID,
rpc_api.STACK_NAME,
rpc_api.STACK_DESCRIPTION,
rpc_api.STACK_STATUS,
rpc_api.STACK_STATUS_DATA,
rpc_api.STACK_CREATION_TIME,
rpc_api.STACK_DELETION_TIME,
rpc_api.STACK_UPDATED_TIME,
rpc_api.STACK_OWNER,
rpc_api.STACK_PARENT,
rpc_api.STACK_USER_PROJECT_ID,
rpc_api.STACK_TAGS,
)
def format_stack(req, stack, keys=None, include_project=False):
def transform(key, value):
if keys and key not in keys:
return
if key == rpc_api.STACK_ID:
yield ('id', value['stack_id'])
yield ('links', [util.make_link(req, value)])
if include_project:
yield ('project', value['tenant'])
elif key == rpc_api.STACK_ACTION:
return
elif (key == rpc_api.STACK_STATUS and
rpc_api.STACK_ACTION in stack):
# To avoid breaking API compatibility, we join RES_ACTION
# and RES_STATUS, so the API format doesn't expose the
# internal split of state into action/status
yield (key, '_'.join((stack[rpc_api.STACK_ACTION], value)))
else:
# TODO(zaneb): ensure parameters can be formatted for XML
# elif key == rpc_api.STACK_PARAMETERS:
# return key, json.dumps(value)
yield (key, value)
return dict(itertools.chain.from_iterable(
transform(k, v) for k, v in stack.items()))
def collection(req, stacks, count=None, include_project=False):
keys = basic_keys
formatted_stacks = [format_stack(req, s, keys, include_project)
for s in stacks]
result = {'stacks': formatted_stacks}
links = views_common.get_collection_links(req, formatted_stacks)
if links:
result['links'] = links
if count is not None:
result['count'] = count
return result
| 1.835938 | 2 |
pykrev/formula/find_intersections.py | Kzra/pykrev | 4 | 6405 | import itertools
import numpy as np
import pandas as pd
def find_intersections(formula_lists,group_labels,exclusive = True):
"""
Docstring for function pyKrev.find_intersections
====================
This function compares n lists of molecular formula and outputs a dictionary containing the intersections between each list.
Use
----
find_intersections([list_1,..,list_n],['group_1',...,'group_n'])
Returns a dictionary in which each key corresponds to a combination of group labels
and the corresponding value is a set containing the intersections between the groups in that combination.
Parameters
----------
formula_lists: a list containing n lists of molecular formula. Each item in the sub list should be a formula string.
group_labels: a list containing n strings of corresponding group labels.
exclusive: True or False, depending on whether you want the intersections to contain only unique values.
"""
if len(formula_lists) != len(group_labels):
raise InputError('formula_lists and group_labels must be of equal length')
combinations = [seq for i in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if len(seq) > 0]
combinations = sorted(combinations,key = lambda c : len(c),reverse = True) # sort combinations by length
if exclusive == True:
assigned_formula = set() #create a set that will hold all the formula already assigned to a group
amb = pd.DataFrame(data = formula_lists).T
amb.columns = group_labels
intersections = dict()
for combo in combinations:
queries = []
for c in combo:
formula = list(filter(None,amb[c])) #Remove None entries introduced by dataframe
queries.append(set(formula))
if len(queries) == 1: #if there is only one query find the unique elements in it
q_set = frozenset(queries[0]) #qset is a frozen set, so it will not be mutated by changes to queries[0]
for f_list in formula_lists: #cycle all formula in formula_lists
set_f = frozenset(f_list) #convert f_list to sets, must be frozen so type matches q_set
if set_f == q_set: # ignore the set that corresponds to the query
pass
else:
queries[0] = queries[0] - set_f #delete any repeated elements in fset
intersections[combo] = queries[0]
elif len(queries) > 1:
if exclusive == True:
q_intersect = intersect(queries)
intersections[combo] = q_intersect - assigned_formula #remove any elements from q_intersect that have already been assigned
assigned_formula.update(q_intersect) #update the assigned_set with q_intersect
else:
intersections[combo] = intersect(queries)
return intersections
def intersect(samples,counter=0):
""" This command uses recursion to find the intersections between a variable number of sets given in samples.
Where samples = [set_1,set_2,...,set_n] """
if len(samples) == 1:
return samples[0]
a = samples[counter]
b = samples[counter+1::]
if len(b) == 1: #check to see whether the recursion has reached the final element
return a & b[0]
else:
counter += 1
return a & intersect(samples,counter) | 3.53125 | 4 |
Create Playlist.py | j4ck64/PlaylistDirectories | 0 | 6406 | import os
import glob
import shutil
from tinytag import TinyTag
""" root = 'C:/'
copy_to = '/copy to/folder'
tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump God and Drugz).mp3')
print(tag.artist)
print('song duration: '+str(tag.duration))
"""
f = []
f=glob.glob('C:/Users/jchap/OneDrive/*.mp3')
print(f)
musicDirectory=[]
musicFiles =[]
# tag = TinyTag.get(f[0])
# print(tag.artist)
# for root, dirs, files in os.walk("C:/Users/jchap/OneDrive/"):
for root, dirs, files in os.walk("C:/"):
for file in files:
if file.endswith(".mp3"):
musicFiles.append(file)
musicDirectory.append(os.path.join(root, file))
#print(os.path.join(root, file))
print('files'+str(musicFiles))
tag = TinyTag.get(musicDirectory[0])
print('Artist',tag.artist)
print('Album Artist',tag.albumartist)
print('Title',tag.title)
print('Biterate',tag.bitrate)
print('music directory'+str(musicDirectory))
print(len(musicDirectory))
currentDirectory =os.path.dirname(__file__)
with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', "r") as f:
content_list = [word.strip() for word in f]
""" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', "r")
content_list = my_file. readlines() """
# print('playlist contents')
# print(content_list)
musicDirectory
musicWithoutDuplicates = []
duplicatesList = []
count =0
# check for tags equal to none
#musicDirectory =[x for x in musicDirectory j = TinyTag.get(x) if x != 'wdg']
#remove tracks without albumn artist or title
for track in reversed(range(len(musicDirectory))):
try:
trackTag = TinyTag.get(musicDirectory[track])
if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None':
print('albumArtist = none',musicDirectory[track])
print('removing track and adding to log file')
musicDirectory.remove(musicDirectory[track])
except IndexError:
break
#check for duplicates
for j in range(len(musicDirectory)):
musicDtag = TinyTag.get(musicDirectory[j])
duplicateL=[]
duplicateLBiterate=[]
for duplicate in range(len(musicDirectory)):
duplicateTag = TinyTag.get(musicDirectory[duplicate])
musicWithoutDuplicates.append(musicDirectory[j])
if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist:
if duplicateTag.title == musicDtag.title or duplicateTag.title in musicDtag.title :
#check if last iteration
if duplicate>=len(musicDirectory)-1:
print("found a duplicate!",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)
if len(duplicateLBiterate)==1:## did something here may need to change the conditional statement or add another
print('biterate')
#[x for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]]
print("Current duplicate Bite rate", duplicateLBiterate)
for x in range(len(duplicateL)):
if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate):
#REMOVE ONE WITH THE BEST BITERATE
duplicateL.remove(duplicateL[x])
print('duplicate list',duplicateL)
#Add
duplicatesList = duplicatesList + duplicateL
else:
print("found a duplicate!",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)
duplicateL.append(musicDirectory[duplicate])
duplicateLBiterate.append(duplicateTag.bitrate)
print('dup ',duplicatesList)
#remove duplicates from list
for u in range(len(duplicatesList)):
for i in range(len(musicDirectory)):
if duplicatesList[u]==musicDirectory[i]:
musicDirectory.remove(musicDirectory[i])
print('music ',musicDirectory)
#create playlist
newPlaylist = open("Test.m3u", "w")
#add file path to the respective track in the new playlist
for content in enumerate(content_list):
# split strings into artist and title
trackNumber=content[0]
trackArray =str(content[1]).split('-')
albumArtist= trackArray[0].strip()
title=trackArray[1].strip()
print('title:',title)
print('albumArtist:',albumArtist)
for trackDirectory in range(len(musicDirectory)):
trackTag = TinyTag.get(musicDirectory[trackDirectory])
if trackTag.albumartist == albumArtist or trackTag.albumartist in albumArtist:
if trackTag.title == title or trackTag.title in title:
newPlaylist.write(trackDirectory + " " + content)
newPlaylist.close()
try:
while True:
content.next()
except StopIteration:
pass
break
else:
print()
else:
print() | 2.765625 | 3 |
PyBank/main.py | Alexis-Kepano/python_challenge | 0 | 6407 | #import modules
import os
import csv
#input
csvpath = os.path.join('Resources', 'budget_data.csv')
#output
outfile = os.path.join('Analysis', 'pybankstatements.txt')
#declare variables
months = []; total_m = 1; net_total = 0; total_change = 0; monthly_changes = []; greatest_inc = ['', 0]; greatest_dec = ['', 0]
#open & read csv
with open(csvpath) as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
header = next(csvreader)
first_row = next(csvreader)
previous_row = int(first_row[1])
net_total = int(first_row[1])
#loop
for row in csvreader:
net_total += int(row[1])
total_m = total_m+1
current_value = int(row[1])
change_value = int(current_value-previous_row)
monthly_changes.append(change_value)
months.append(row[0])
previous_row = int(row[1])
total_change = total_change + change_value
if change_value > greatest_inc[1]:
greatest_inc[0] = str(row[0])
greatest_inc[1] = change_value
if change_value < greatest_dec[1]:
greatest_dec[0] = str(row[0])
greatest_dec[1] = change_value
avg_change = total_change/len(months)
output = (
f"\n Financial Analysis \n"
f"------------------------------\n"
f"Total Months: {total_m}\n"
f"Total: ${net_total}\n"
f"Average Change: ${avg_change:.2f}\n"
f"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\n"
f"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\n")
with open(outfile, "w") as txt_file:
txt_file.write(output)
outfile | 3.265625 | 3 |
bot/constants/messages.py | aasw0ng/thornode-telegram-bot | 15 | 6408 | <gh_stars>10-100
from enum import Enum
from constants.globals import HEALTH_EMOJIS
NETWORK_ERROR = '😱 There was an error while getting data 😱\nAn API endpoint is down!'
HEALTH_LEGEND = f'\n*Node health*:\n{HEALTH_EMOJIS[True]} - *healthy*\n{HEALTH_EMOJIS[False]} - *unhealthy*\n' \
f'{HEALTH_EMOJIS[None]} - *unknown*\n'
class NetworkHealthStatus(Enum):
INEFFICIENT = "Inefficient"
OVERBONDED = "Overbonded"
OPTIMAL = "Optimal"
UNDBERBONDED = "Underbonded"
INSECURE = "Insecure"
NETWORK_HEALTHY_AGAIN = "The network is safe and efficient again! ✅"
def get_network_health_warning(network_health_status: NetworkHealthStatus) -> str:
severity = "🤒"
if network_health_status is NetworkHealthStatus.INSECURE:
severity = "💀"
elif network_health_status is NetworkHealthStatus.INEFFICIENT:
severity = "🦥"
return f"Network health is not optimal: {network_health_status.value} {severity}"
def get_node_healthy_again_message(node_data) -> str:
return f"⚕️Node is healthy again⚕️\nAddress: {node_data['node_address']}\nIP: {node_data['ip_address']}\n" \
def get_node_health_warning_message(node_data) -> str:
return "⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ \n" \
f"Node is *not responding*!\nAddress: {node_data['node_address']}\nIP: {node_data['ip_address']}\n" \
"\nCheck it's health immediately\n" \
"⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️ ️⚠ ️⚠ ⚠️ ️⚠ ️ ️⚠️ ️ ⚠ ️⚠ ⚠️"
| 2.75 | 3 |
src/interactive_conditional_samples.py | RanHerOver/cometaai | 0 | 6409 | <reponame>RanHerOver/cometaai
import random
import fire
import json
import os
import numpy as np
import tensorflow as tf
import pytumblr
import mysql.connector
import datetime
from random import seed
import model, sample, encoder
def interact_model(
model_name='1558M',
seed=None,
nsamples=1,
batch_size=1,
length=None,
temperature=.7,
top_k=10,
top_p=1,
models_dir='models',
):
# Autenticazione
client = pytumblr.TumblrRestClient(
'',
'',
'',
''
)
# Al fine di mantenere la sicurezza del mio account le due coppie di chiavi per la connessione a Tumblr sono state eliminate da questo file.
# Connessione al DB
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="",
database="cometa"
)
print(mydb)
cursor = mydb.cursor()
# Generazione query
print("prima di eseguire la query")
cursor.execute("SELECT testo FROM prompts ORDER BY RAND() LIMIT 1")
print("dopo query")
for (testo) in cursor:
print("{}".format(testo))
# Formattazione del prompt
testoBuono = "{}".format(testo)
testoBuono=testoBuono.replace("(","")
testoBuono=testoBuono.replace(")","")
testoBuono=testoBuono.replace("'","")
testoBuono=testoBuono.replace(",","")
print(testoBuono)
client.info() # Riceve e trattiene le informazioni del profilo
blogName='unlikelycrownkitty'
models_dir = os.path.expanduser(os.path.expandvars(models_dir))
if batch_size is None:
batch_size = 1
assert nsamples % batch_size == 0
# Carico il modello dalla directory
enc = encoder.get_encoder(model_name, models_dir)
hparams = model.default_hparams()
with open(os.path.join(models_dir, model_name, 'hparams.json')) as f:
hparams.override_from_dict(json.load(f))
# Eseguo un controllo per verificare che il prompt non sia eccessivamente lungo
if length is None:
length = hparams.n_ctx // 2
elif length > hparams.n_ctx:
raise ValueError("Can't get samples longer than window size: %s" % hparams.n_ctx)
# Avvio il modello con i parametri
with tf.Session(graph=tf.Graph()) as sess:
context = tf.placeholder(tf.int32, [batch_size, None])
np.random.seed(seed)
tf.set_random_seed(seed)
output = sample.sample_sequence(
hparams=hparams, length=length,
context=context,
batch_size=batch_size,
temperature=temperature, top_k=top_k, top_p=top_p
)
continua=True
# Inizio la generazione del testo
saver = tf.train.Saver()
ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name))
saver.restore(sess, ckpt)
while continua:
raw_text = testoBuono
# raw_text = f.read()
while not raw_text:
print('The file is empty! Write something yourself.')
raw_text = input("Model prompt >>> ")
context_tokens = enc.encode(raw_text)
generated = 0
for _ in range(nsamples // batch_size):
out = sess.run(output, feed_dict={
context: [context_tokens for _ in range(batch_size)]
})[:, len(context_tokens):]
for i in range(batch_size):
generated += 1
text = enc.decode(out[i])
print("=" * 40 + " SAMPLE " + str(generated) + " " + "=" * 40)
print(text)
print("=" * 80)
# Pubblico il testo generato
client.create_text(blogName, state="published", slug="testing-text-posts",title=raw_text, body=text)
print('Continue? y/n')
risposta=input()
if risposta.lower() in ['y', 'yes']:
continua=True
else:
continua=False
exit()
if __name__ == '__main__':
fire.Fire(interact_model())
| 2.203125 | 2 |
desktop/core/ext-py/pyasn1-0.1.8/pyasn1/compat/iterfunc.py | kokosing/hue | 422 | 6410 | <filename>desktop/core/ext-py/pyasn1-0.1.8/pyasn1/compat/iterfunc.py
from sys import version_info
if version_info[0] <= 2 and version_info[1] <= 4:
def all(iterable):
for element in iterable:
if not element:
return False
return True
else:
all = all
| 2.453125 | 2 |
src/cms/carousels/serializers.py | UniversitaDellaCalabria/uniCMS | 6 | 6411 | from rest_framework import serializers
from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer
from cms.medias.serializers import MediaSerializer
from . models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization
class CarouselForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
return Carousel.objects.filter(pk=carousel_id)
return None # pragma: no cover
class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
item_id = self.context['request'].parser_context['kwargs']['carousel_item_id']
return CarouselItem.objects.filter(pk=item_id,
carousel__pk=carousel_id)
return None # pragma: no cover
class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
item_id = self.context['request'].parser_context['kwargs']['carousel_item_id']
link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id']
return CarouselItemLink.objects.filter(pk=link_id,
carousel_item__pk=item_id,
carousel_item__carousel__pk=carousel_id)
return None # pragma: no cover
class CarouselSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
class Meta:
model = Carousel
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel = CarouselForeignKey()
def to_representation(self, instance):
data = super().to_representation(instance)
image = MediaSerializer(instance.image)
data['image'] = image.data
return data
class Meta:
model = CarouselItem
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item = CarouselItemForeignKey()
class Meta:
model = CarouselItemLocalization
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item = CarouselItemForeignKey()
class Meta:
model = CarouselItemLink
fields = '__all__'
class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item_link = CarouselItemLinkForeignKey()
class Meta:
model = CarouselItemLinkLocalization
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselSelectOptionsSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
data = super().to_representation(instance)
data['value'] = instance.pk
data['text'] = instance.name
return data
class Meta:
model = Carousel
fields = ()
| 2.046875 | 2 |
demos/colorization_demo/python/colorization_demo.py | mzegla/open_model_zoo | 0 | 6412 | #!/usr/bin/env python3
"""
Copyright (c) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from openvino.runtime import Core, get_version
import cv2 as cv
import numpy as np
import logging as log
from time import perf_counter
import sys
from argparse import ArgumentParser, SUPPRESS
from pathlib import Path
sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python'))
sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo'))
import monitors
from images_capture import open_images_capture
from model_api.performance_metrics import PerformanceMetrics
log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout)
def build_arg():
parser = ArgumentParser(add_help=False)
in_args = parser.add_argument_group('Options')
in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with the script.')
in_args.add_argument("-m", "--model", help="Required. Path to .xml file with pre-trained model.",
required=True, type=Path)
in_args.add_argument("-d", "--device",
help="Optional. Specify target device for infer: CPU, GPU, HDDL or MYRIAD. "
"Default: CPU",
default="CPU", type=str)
in_args.add_argument('-i', "--input", required=True,
help='Required. An input to process. The input must be a single image, '
'a folder of images, video file or camera id.')
in_args.add_argument('--loop', default=False, action='store_true',
help='Optional. Enable reading the input in a loop.')
in_args.add_argument('-o', '--output', required=False,
help='Optional. Name of the output file(s) to save.')
in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int,
help='Optional. Number of frames to store in output. '
'If 0 is set, all frames are stored.')
in_args.add_argument("--no_show", help="Optional. Don't show output.",
action='store_true', default=False)
in_args.add_argument("-u", "--utilization_monitors", default="", type=str,
help="Optional. List of monitors to show initially.")
return parser
def main(args):
cap = open_images_capture(args.input, args.loop)
log.info('OpenVINO Inference Engine')
log.info('\tbuild: {}'.format(get_version()))
core = Core()
log.info('Reading model {}'.format(args.model))
model = core.read_model(args.model, args.model.with_suffix(".bin"))
input_tensor_name = 'data_l'
input_shape = model.input(input_tensor_name).shape
assert input_shape[1] == 1, "Expected model input shape with 1 channel"
inputs = {}
for input in model.inputs:
inputs[input.get_any_name()] = np.zeros(input.shape)
assert len(model.outputs) == 1, "Expected number of outputs is equal 1"
compiled_model = core.compile_model(model, device_name=args.device)
infer_request = compiled_model.create_infer_request()
log.info('The model {} is loaded to {}'.format(args.model, args.device))
_, _, h_in, w_in = input_shape
frames_processed = 0
imshow_size = (640, 480)
graph_size = (imshow_size[0] // 2, imshow_size[1] // 4)
presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1], graph_size)
metrics = PerformanceMetrics()
video_writer = cv.VideoWriter()
if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'),
cap.fps(), (imshow_size[0] * 2, imshow_size[1] * 2)):
raise RuntimeError("Can't open video writer")
start_time = perf_counter()
original_frame = cap.read()
if original_frame is None:
raise RuntimeError("Can't read an image from the input")
while original_frame is not None:
(h_orig, w_orig) = original_frame.shape[:2]
if original_frame.shape[2] > 1:
frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB)
else:
frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB)
img_rgb = frame.astype(np.float32) / 255
img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab)
img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0]
inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1])
res = next(iter(infer_request.infer(inputs).values()))
update_res = np.squeeze(res)
out = update_res.transpose((1, 2, 0))
out = cv.resize(out, (w_orig, h_orig))
img_lab_out = np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2)
img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1)
original_image = cv.resize(original_frame, imshow_size)
grayscale_image = cv.resize(frame, imshow_size)
colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8)
lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8)
original_image = cv.putText(original_image, 'Original', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
lab_image = cv.putText(lab_image, 'LAB interpretation', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
ir_image = [cv.hconcat([original_image, grayscale_image]),
cv.hconcat([lab_image, colorize_image])]
final_image = cv.vconcat(ir_image)
metrics.update(start_time, final_image)
frames_processed += 1
if video_writer.isOpened() and (args.output_limit <= 0 or frames_processed <= args.output_limit):
video_writer.write(final_image)
presenter.drawGraphs(final_image)
if not args.no_show:
cv.imshow('Colorization Demo', final_image)
key = cv.waitKey(1)
if key in {ord("q"), ord("Q"), 27}:
break
presenter.handleKey(key)
start_time = perf_counter()
original_frame = cap.read()
metrics.log_total()
for rep in presenter.reportMeans():
log.info(rep)
if __name__ == "__main__":
args = build_arg().parse_args()
sys.exit(main(args) or 0)
| 1.804688 | 2 |
swagger_client/models/transfer.py | chbndrhnns/ahoi-client | 0 | 6413 | # coding: utf-8
"""
[AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) © 2016‐2017 Starfinanz - Ein Unternehmen der Finanz Informatik # noqa: E501
OpenAPI spec version: 2.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.amount import Amount # noqa: F401,E501
class Transfer(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'iban': 'str',
'bic': 'str',
'name': 'str',
'amount': 'Amount',
'purpose': 'str',
'tan_media_id': 'str',
'tan_scheme': 'str'
}
attribute_map = {
'iban': 'iban',
'bic': 'bic',
'name': 'name',
'amount': 'amount',
'purpose': 'purpose',
'tan_media_id': 'tanMediaId',
'tan_scheme': 'tanScheme'
}
def __init__(self, iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501
"""Transfer - a model defined in Swagger""" # noqa: E501
self._iban = None
self._bic = None
self._name = None
self._amount = None
self._purpose = None
self._tan_media_id = None
self._tan_scheme = None
self.discriminator = None
self.iban = iban
if bic is not None:
self.bic = bic
self.name = name
self.amount = amount
if purpose is not None:
self.purpose = purpose
self.tan_media_id = tan_media_id
self.tan_scheme = tan_scheme
@property
def iban(self):
"""Gets the iban of this Transfer. # noqa: E501
IBAN - International Bank Account Number (defined in ISO 13616-1) # noqa: E501
:return: The iban of this Transfer. # noqa: E501
:rtype: str
"""
return self._iban
@iban.setter
def iban(self, iban):
"""Sets the iban of this Transfer.
IBAN - International Bank Account Number (defined in ISO 13616-1) # noqa: E501
:param iban: The iban of this Transfer. # noqa: E501
:type: str
"""
if iban is None:
raise ValueError("Invalid value for `iban`, must not be `None`") # noqa: E501
self._iban = iban
@property
def bic(self):
"""Gets the bic of this Transfer. # noqa: E501
BIC - Business Identifier Code (defined in ISO-9362) # noqa: E501
:return: The bic of this Transfer. # noqa: E501
:rtype: str
"""
return self._bic
@bic.setter
def bic(self, bic):
"""Sets the bic of this Transfer.
BIC - Business Identifier Code (defined in ISO-9362) # noqa: E501
:param bic: The bic of this Transfer. # noqa: E501
:type: str
"""
self._bic = bic
@property
def name(self):
"""Gets the name of this Transfer. # noqa: E501
Name - Name of the creditor # noqa: E501
:return: The name of this Transfer. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Transfer.
Name - Name of the creditor # noqa: E501
:param name: The name of this Transfer. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def amount(self):
"""Gets the amount of this Transfer. # noqa: E501
Amount to be transfered # noqa: E501
:return: The amount of this Transfer. # noqa: E501
:rtype: Amount
"""
return self._amount
@amount.setter
def amount(self, amount):
"""Sets the amount of this Transfer.
Amount to be transfered # noqa: E501
:param amount: The amount of this Transfer. # noqa: E501
:type: Amount
"""
if amount is None:
raise ValueError("Invalid value for `amount`, must not be `None`") # noqa: E501
self._amount = amount
@property
def purpose(self):
"""Gets the purpose of this Transfer. # noqa: E501
Purpose # noqa: E501
:return: The purpose of this Transfer. # noqa: E501
:rtype: str
"""
return self._purpose
@purpose.setter
def purpose(self, purpose):
"""Sets the purpose of this Transfer.
Purpose # noqa: E501
:param purpose: The purpose of this Transfer. # noqa: E501
:type: str
"""
self._purpose = purpose
@property
def tan_media_id(self):
"""Gets the tan_media_id of this Transfer. # noqa: E501
TANMediaId - The identifying ID of the TANMedia. # noqa: E501
:return: The tan_media_id of this Transfer. # noqa: E501
:rtype: str
"""
return self._tan_media_id
@tan_media_id.setter
def tan_media_id(self, tan_media_id):
"""Sets the tan_media_id of this Transfer.
TANMediaId - The identifying ID of the TANMedia. # noqa: E501
:param tan_media_id: The tan_media_id of this Transfer. # noqa: E501
:type: str
"""
if tan_media_id is None:
raise ValueError("Invalid value for `tan_media_id`, must not be `None`") # noqa: E501
self._tan_media_id = tan_media_id
@property
def tan_scheme(self):
"""Gets the tan_scheme of this Transfer. # noqa: E501
TANScheme - The scheme **id** that is used to verify this payment (e.g. \"901\") # noqa: E501
:return: The tan_scheme of this Transfer. # noqa: E501
:rtype: str
"""
return self._tan_scheme
@tan_scheme.setter
def tan_scheme(self, tan_scheme):
"""Sets the tan_scheme of this Transfer.
TANScheme - The scheme **id** that is used to verify this payment (e.g. \"901\") # noqa: E501
:param tan_scheme: The tan_scheme of this Transfer. # noqa: E501
:type: str
"""
if tan_scheme is None:
raise ValueError("Invalid value for `tan_scheme`, must not be `None`") # noqa: E501
self._tan_scheme = tan_scheme
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Transfer):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 1.984375 | 2 |
external/trappy/tests/test_caching.py | vdonnefort/lisa | 1 | 6414 | # Copyright 2015-2017 ARM Limited, Google and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from builtins import chr
import os
import json
import shutil
import sys
import unittest
import utils_tests
import trappy
from trappy.ftrace import GenericFTrace
from trappy.systrace import SysTrace
class TestCaching(utils_tests.SetupDirectory):
def __init__(self, *args, **kwargs):
super(TestCaching, self).__init__(
[("trace_sched.txt", "trace.txt"),
("trace_sched.txt", "trace.raw.txt"),
("trace_systrace.html", "trace.html")],
*args,
**kwargs)
def test_cache_created(self):
"""Test cache creation when enabled"""
GenericFTrace.disable_cache = False
traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))
for trace in traces:
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
self.assertTrue(cache_dir in os.listdir(trace_dir))
def test_cache_not_created(self):
"""Test that cache should not be created when disabled """
GenericFTrace.disable_cache = True
traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))
for trace in traces:
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
self.assertFalse(cache_dir in os.listdir(trace_dir))
def test_compare_cached_vs_uncached(self):
""" Test that the cached and uncached traces are same """
# Build the cache, but the actual trace will be parsed
# fresh since this is a first time parse
GenericFTrace.disable_cache = False
uncached_trace = trappy.FTrace()
uncached_dfr = uncached_trace.sched_wakeup.data_frame
# Now read from previously parsed cache by reusing the path
cached_trace = trappy.FTrace(uncached_trace.trace_path)
cached_dfr = cached_trace.sched_wakeup.data_frame
# By default, the str to float conversion done when reading from csv is
# different from the one used when reading from the trace.txt file.
#
# Here's an example:
# - trace.txt string timestamps:
# [76.402065, 80.402065, 80.001337]
# - parsed dataframe timestamps:
# [76.402065000000007, 80.402065000000007, 82.001337000000007]
#
# - csv string timestamps:
# [76.402065, 80.402065, 80.001337]
# - cached dataframe timestamps:
# [76.402064999999993, 80.402064999999993, 82.001337000000007]
#
# To fix this, the timestamps read from the cache are converted using
# the same conversion method as the trace.txt parser, which results in
# cache-read timestamps being identical to trace-read timestamps.
#
# This test ensures that this stays true.
cached_times = [r[0] for r in cached_dfr.iterrows()]
uncached_times = [r[0] for r in uncached_dfr.iterrows()]
self.assertTrue(cached_times == uncached_times)
# compare other columns as well
self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] ==
[r[1].pid for r in uncached_dfr.iterrows()])
self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] ==
[r[1].comm for r in uncached_dfr.iterrows()])
self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] ==
[r[1].prio for r in uncached_dfr.iterrows()])
def test_invalid_cache_overwritten(self):
"""Test a cache with a bad checksum is overwritten"""
# This is a directory so we can't use the files_to_copy arg of
# SetUpDirectory, just do it ourselves.
cache_path = ".trace.txt.cache"
src = os.path.join(utils_tests.TESTS_DIRECTORY, "trace_sched.txt.cache")
shutil.copytree(src, cache_path)
metadata_path = os.path.join(cache_path, "metadata.json")
def read_metadata():
with open(metadata_path, "r") as f:
return json.load(f)
def write_md5(md5):
metadata = read_metadata()
metadata["md5sum"] = md5
with open(metadata_path, "w") as f:
json.dump(metadata, f)
# Change 1 character of the stored checksum
md5sum = read_metadata()["md5sum"]
md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) + 1)
write_md5(md5sum_inc)
# Parse a trace, this should delete and overwrite the invalidated cache
GenericFTrace.disable_cache = False
trace = trappy.FTrace()
# Check that the modified md5sum was overwritten
self.assertNotEqual(read_metadata()["md5sum"], md5sum_inc,
"The invalid ftrace cache wasn't overwritten")
def test_cache_dynamic_events(self):
"""Test that caching works if new event parsers have been registered"""
# Parse the trace to create a cache
GenericFTrace.disable_cache = False
trace1 = trappy.FTrace()
# Check we're actually testing what we think we are
if hasattr(trace1, 'dynamic_event'):
raise RuntimeError('Test bug: found unexpected event in trace')
# Now register a new event type, call the constructor again, and check
# that the newly added event (which is not present in the cache) is
# parsed.
parse_class = trappy.register_dynamic_ftrace("DynamicEvent", "dynamic_test_key")
trace2 = trappy.FTrace()
self.assertTrue(len(trace2.dynamic_event.data_frame) == 1)
trappy.unregister_dynamic_ftrace(parse_class)
def test_cache_normalize_time(self):
"""Test that caching doesn't break normalize_time"""
GenericFTrace.disable_cache = False
# Times in trace_sched.txt
start_time = 6550.018511
first_freq_event_time = 6550.056870
# Parse without normalizing time
trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],
normalize_time=False)
self.assertEqual(trace1.cpu_frequency.data_frame.index[0],
first_freq_event_time)
# Parse with normalized time
trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],
normalize_time=True)
self.assertEqual(trace2.cpu_frequency.data_frame.index[0],
first_freq_event_time - start_time)
def test_cache_window_broad(self):
"""Test that caching doesn't break the 'window' parameter"""
GenericFTrace.disable_cache = False
trace1 = trappy.FTrace(
events=['sched_wakeup'],
window=(0, 1))
# Check that we're testing what we think we're testing The trace
# contains 2 sched_wakeup events; this window should get rid of one of
# them.
if len(trace1.sched_wakeup.data_frame) != 1:
raise RuntimeError('Test bug: bad sched_wakeup event count')
# Parse again without the window
trace1 = trappy.FTrace(
events=['sched_wakeup'],
window=(0, None))
self.assertEqual(len(trace1.sched_wakeup.data_frame), 2)
def test_cache_window_narrow(self):
"""
Test that applying a window to a cached trace returns EXACTLY what is expected
"""
# As described in test_compare_cache_vs_uncached, reading from cache
# results in slightly different timestamps
#
# This test verifies that applying windows results in identical
# dataframes whether cache is used or not.
GenericFTrace.disable_cache = False
uncached_trace = trappy.FTrace()
trace = trappy.FTrace(uncached_trace.trace_path,
normalize_time=False,
abs_window=(6550.100000, 6552.000002))
self.assertAlmostEquals(trace.get_duration(), 1.900002)
self.assertEqual(len(trace.sched_wakeup.data_frame), 2)
self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1)
def test_ftrace_metadata(self):
"""Test that caching keeps trace metadata"""
GenericFTrace.disable_cache = False
self.test_cache_created()
trace = trappy.FTrace()
version = int(trace._version)
cpus = int(trace._cpus)
self.assertEqual(version, 6)
self.assertEqual(cpus, 6)
def test_cache_delete_single(self):
GenericFTrace.disable_cache = False
trace = trappy.FTrace()
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
number_of_trace_categories = 31
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)
os.remove(os.path.join(cache_dir, 'SchedWakeup.csv'))
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1)
# Generate trace again, should regenerate only the missing item
trace = trappy.FTrace()
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)
for c in trace.trace_classes:
if isinstance(c, trace.class_definitions['sched_wakeup']):
self.assertEqual(c.cached, False)
continue
self.assertEqual(c.cached, True)
| 1.875 | 2 |
src/mf_horizon_client/client/pipelines/blueprints.py | MF-HORIZON/mf-horizon-python-client | 0 | 6415 | from enum import Enum
class BlueprintType(Enum):
"""
A blueprint is a pipeline template in horizon, and must be specified when creating a new pipeline
Nonlinear
===============================================================================================================
A nonlinear pipeline combines nonlinear feature generation and selection with a nonlinear regressor to generate
forecasts that are at a specific target in the future.
A number of different regressor types are available here:
1. Mondrian Forest. An adaptation of the probabilistic Mondrian Forest algorithm - https://arxiv.org/abs/1406.2673
Provides Bayesian-esque error bounds, and is our recommended nonlinear regressor of choice.
2. XG Boost
3. Random Forest.
The stages of a nonlinear pipeline are as follows:
A. Forecast Specification
B. Stationarization
C. Feature Generation
D. Feature Filtering
E. Feature Refinement
F. Nonlinear Backtesting
G. Nonlinear Prediction
Linear
===============================================================================================================
A nonlinear pipeline combines nonlinear feature generation with a nonlinear regressor to generate
forecasts that are at a specific target in the future.
The regressor used is a Variational Bayesian Linear Regressor
The stages of a linear pipeline are as follows:
A. Forecast Specification
B. Stationarization
C. Nonlinear Feature Generation
D. Feature Filtering
E. Feature Refinement
F. Linear Backtesting
G. Linear Prediction
Fast Forecasting
===============================================================================================================
The fast forecasting pipeline is intended to be used as a quick assessment of a dataset's predictive performance
It is identical to the linear pipeline, but does not include Feature Refinement.
The stages of a linear pipeline are as follows:
A. Forecast Specification
B. Stationarization
C. Nonlinear Feature Generation
D. Feature Filtering
E. Linear Backtesting
F. Linear Prediction
Feature Selection
===============================================================================================================
The feature selection pipeline assumes that the input data set already encodes information about a signal's
past, such that a horizontal observation vector may be used in a traditional regression sense to map to a target
value at a point in the future.
Feat1 | Feat2 | Feat3 | .... | FeatP
Obs1 ------------------------------------- t
Obs2 ------------------------------------- t-1
Obs3 ------------------------------------- t-2
... .....................................
... .....................................
ObsN ------------------------------------- t-N
Two stages of feature selection are then used in order to maximize predictive performance of the feature set
on specified future points for a given target
The stages of a linear pipeline are as follows:
A. Forecast Specification
B. Feature Filtering
E. Feature Refinement
Feature Discovery
===============================================================================================================
The feature discovery pipeline discovers features to maximize performance for a particular forecast target,
at a specified point in the future. Unlike the feature selection pipeline, it does not assume that the signal
set has already encoded historical information about the original data's past.
The stages of a feature discovery pipeline are as follows:
A. Forecast Specification
B. Feature Generation
C. Feature Filtering
D. Feature Refinement
Signal Encoding
===============================================================================================================
One of Horizon's feature generation methods is to encode signals in the frequency domain, extracting historic
lags that will efficiently represent the information contained within them.
The signal encoding pipeline allows for this functionality to be isolated, where the output is a feature
set that has encoded past information about a signal that can be exported from the platform
The stages of a signal encoding pipeline are as follows:
A. Forecast Specification
B. Feature Generation
C. Feature Filtering
Stationarization
===============================================================================================================
Stationarize a signal set and specified target using Augmented Dicky Fuller analysis, and a detrending method
for the specified target.
The stages of a stationarization pipeline are as follows:
A. Forecast Specification
B. Stationarization
Time-Series Regression
===============================================================================================================
Run Horizon's regression algorithms on a pre-encoded signal set.
Small Data Forecasting
===============================================================================================================
Time-series pipeline for small data. Does not contain any backtesting, and uses all the data for model training.
A. Forecast Specification
B. Stationarization
C. Linear Feature Generation
D. Feature Filtering
E. Feature Refinement
G. Linear Prediction
Variational Forecasting
===============================================================================================================
Creates a stacked lag-embedding matrix by combining a two-stage feature generation and selection process, with
lag-only feature generation.
A. Forecast Specification
B. Stationarization
C. Linear Feature Generation
D. Feature Filtering
E. Linear Feature Generation
F. Feature Filtering
G. Linear Backtesting
H. Linear Prediction
Custom
===============================================================================================================
Advanced: Contains only a forecast specification stage for adding stages manually.
N.B. There is no validation on stage addition.
"""
nonlinear = "nonlinear"
linear = "linear"
fast_forecasting = "fast_forecast"
feature_selection = "feature_selection"
feature_discovery = "feature_discovery"
signal_encoding = "signal_encoding"
stationarisation = "stationarisation"
time_series_regression = "regression"
variational_forecasting = "variational_forecasting"
custom = "custom"
small_data = "small_data"
| 2.640625 | 3 |
pyChess/olaf/views.py | An-Alone-Cow/pyChess | 0 | 6416 | from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.shortcuts import render
from django.urls import reverse
from django.http import HttpResponseRedirect, HttpResponse
from django.utils import timezone
from olaf.models import *
from olaf.forms import *
from olaf.utility import usertools
from olaf.chess.controller import proccess_move
def index ( request ):
args = {}
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
args [ 'message' ] = message
if ( request.user.is_authenticated ):
if ( request.method == 'POST' ):
if ( request.POST.get ( 'game_id' ) is not None ):
game_id = request.POST.get ( 'game_id' )
if ( game_id == '-1' ):
game_id = usertools.new_game ( request )
request.session [ 'game_id' ] = game_id
else:
request.session.pop ( 'game_id', default = None )
f = lambda a : str ( a.date () ) + " - " + str ( a.hour ) + ":" + str ( a.minute ) + ":" + str ( a.second )
args [ 'game_list' ] = list ([str ( game.id ), f ( game.creation_time )] for game in request.user.userdata.game_history.filter ( result = 0 ).order_by ( '-creation_time' ) )
if ( request.session.get ( 'game_id' ) is not None ):
args [ 'game_board' ] = usertools.get_translated_game_board ( request )
else:
args [ 'game_board' ] = None
return render ( request, 'olaf/index_logged_in.html', args )
else:
args [ 'login_form' ] = LoginForm ()
args [ 'register_form' ] = RegisterForm ()
args [ 'score' ] = list ( [user.master.username, user.wins, user.loses, user.ties] for user in UserData.objects.filter ( is_active = True ) )
return render ( request, 'olaf/index_not_logged_in.html', args )
form_operation_dict = {
'login' : (
usertools.login_user,
LoginForm,
'olaf/login.html',
{},
'index',
{ 'message' : "You're logged in. :)"}
),
'register' : (
usertools.register_user,
RegisterForm,
'olaf/register.html',
{},
'index',
{ 'message' : "An activation email has been sent to you" }
),
'password_reset_request' : (
usertools.init_pass_reset_token,
ForgotPasswordUsernameOrEmailForm,
'olaf/password_reset_request.html',
{},
'index',
{ 'message' : "An email containing the password reset link will be sent to your email"}
),
'reset_password' : (
usertools.reset_password_action,
PasswordChangeForm,
'olaf/reset_password.html',
{},
'olaf:login',
{ 'message' : "Password successfully changed, you can login now" }
),
'resend_activation_email' : (
usertools.resend_activation_email,
ResendActivationUsernameOrEmailForm,
'olaf/resend_activation_email.html',
{},
'index',
{ 'message' : "Activation email successfully sent to your email" }
),
}
def form_operation ( request, oper, *args ):
func, FORM, fail_template, fail_args, success_url, success_args = form_operation_dict [ oper ]
if ( request.method == 'POST' ):
form = FORM ( request.POST )
if ( form.is_valid () ):
func ( request, form, *args )
for key in success_args:
request.session [ key ] = success_args [ key ]
return HttpResponseRedirect ( reverse ( success_url ) )
else:
form = FORM ()
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
fail_args [ 'message' ] = message
fail_args [ 'form' ] = form
return render ( request, fail_template, fail_args )
#view functions
def login_user ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'login' )
def register_user ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'register' )
def password_reset_request ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'password_reset_request' )
def reset_password_action ( request, token ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
tk = ExpirableTokenField.objects.filter ( token = token ).first ()
if ( tk is None ):
request.session [ 'message' ] = "Broken link"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
if ( timezone.now () <= tk.expiration_time ):
return form_operation ( request, 'reset_password', token )
else:
request.session [ 'message' ] = "Link expired, try getting a new one"
return HttpResponseRedirect ( reverse ( 'olaf:reset_password' ) )
def activate_account ( request, token ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
tk = ExpirableTokenField.objects.filter ( token = token ).first ()
if ( tk is None ):
request.session [ 'message' ] = "Broken link"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
if ( timezone.now () <= tk.expiration_time ):
if ( tk.user.is_active ):
request.session [ 'message' ] = "Account already active"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
userdata = tk.user
userdata.is_active = True
userdata.save ()
request.session [ 'message' ] = "Your account has been activated successfully"
return HttpResponseRedirect ( reverse ( 'olaf:login' ) )
else:
request.session [ 'message' ] = "Link expired, try getting a new one"
return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' ) )
def resend_activation_email ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'resend_activation_email' )
def logout_user ( request ):
usertools.logout_user ( request )
request.session [ 'message' ] = "Goodbye :)"
return HttpResponseRedirect ( reverse ( 'index' ) )
def scoreboard ( request ):
if ( request.method == 'POST' ):
username = request.POST.get ( 'username' )
user = User.objects.filter ( username = username ).first ()
if ( user is None ):
request.session [ 'message' ] = "User not found"
return HttpResponseRedirect ( reverse ( 'olaf:scoreboard' ) )
else:
return HttpResponseRedirect ( reverse ( 'olaf:user_profile', args = (username, ) ) )
else:
args = {}
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
args [ 'message' ] = message
lst = [ (user.master.username, user.wins, user.loses, user.ties) for user in UserData.objects.filter ( is_active = True ) ]
args [ 'lst' ] = lst
if ( request.user.is_authenticated ):
args [ 'logged_in' ] = True
return render ( request, 'olaf/scoreboard.html', args )
def move ( request ):
proccess_move ( request )
return HttpResponseRedirect ( reverse ( 'index' ) ) | 2.015625 | 2 |
ce_vae_test/main_cetrainer.py | fgitmichael/SelfSupevisedSkillDiscovery | 0 | 6417 | <reponame>fgitmichael/SelfSupevisedSkillDiscovery<gh_stars>0
from __future__ import print_function
import argparse
import torch
import torch.utils.data
import matplotlib.pyplot as plt
from torch import nn, optim
from torch.nn import functional as F
from torchvision import datasets, transforms
from torchvision.utils import save_image
from torch.utils.tensorboard import SummaryWriter
from ce_vae_test.networks.min_vae import MinVae
from ce_vae_test.trainer.ce_trainer import CeVaeTrainer
from ce_vae_test.sampler.dataset_sampler import SamplerDatasetWithReplacement
parser = argparse.ArgumentParser(description='VAE MNIST Example')
parser.add_argument('--batch-size', type=int, default=128, metavar='N',
help='input batch size for training (default: 128)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='enables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if args.cuda else "cpu")
writer = SummaryWriter()
kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {}
train_sampler = SamplerDatasetWithReplacement(
dataset=datasets.MNIST('../data',
train=True,
download=True,
transform=transforms.ToTensor()),
batch_size=args.batch_size
)
test_sampler = SamplerDatasetWithReplacement(
dataset=datasets.MNIST('../data',
train=False,
transform=transforms.ToTensor()),
batch_size=args.batch_size * 10
)
cevae = MinVae(
input_size=28 * 28,
output_size=10,
latent_dim=2,
hidden_sizes_dec=[5],
device=device
).to(device)
trainer = CeVaeTrainer(
vae=cevae,
num_epochs=300,
train_loader=train_sampler,
test_loader=test_sampler,
writer=writer,
device=device,
alpha=0.90,
lamda=0.22
)
trainer.run()
| 1.992188 | 2 |
appr/commands/logout.py | sergeyberezansky/appr | 31 | 6418 | <filename>appr/commands/logout.py
from __future__ import absolute_import, division, print_function
from appr.auth import ApprAuth
from appr.commands.command_base import CommandBase, PackageSplit
class LogoutCmd(CommandBase):
name = 'logout'
help_message = "logout"
def __init__(self, options):
super(LogoutCmd, self).__init__(options)
self.status = None
self.registry_host = options.registry_host
self.package_parts = options.package_parts
pname = self.package_parts.get('package', None)
namespace = self.package_parts.get('namespace', None)
self.package = None
if pname:
self.package = "%s/%s" % (namespace, pname)
elif namespace:
self.package = namespace
@classmethod
def _add_arguments(cls, parser):
cls._add_registryhost_option(parser)
parser.add_argument('registry', nargs='?', default=None, action=PackageSplit,
help="registry url: quay.io[/namespace][/repo]\n" +
"If namespace and/or repo are passed, creds only logout for them")
def _call(self):
client = self.RegistryClient(self.registry_host)
ApprAuth().delete_token(client.host, scope=self.package)
self.status = "Logout complete"
if self.registry_host != '*':
self.status += " from %s" % self.registry_host
def _render_dict(self):
return {"status": self.status, 'host': self.registry_host, "scope": self.package}
def _render_console(self):
return " >>> %s" % self.status
| 2.515625 | 3 |
musica/apps.py | webnowone/albumMusical | 1 | 6419 | <reponame>webnowone/albumMusical<gh_stars>1-10
from django.apps import AppConfig
class MusicaConfig(AppConfig):
name = 'musica'
| 1.296875 | 1 |
tzwhere/tzwhere.py | tuxiqae/pytzwhere | 115 | 6420 | <reponame>tuxiqae/pytzwhere
#!/usr/bin/env python
'''tzwhere.py - time zone computation from latitude/longitude.
Ordinarily this is loaded as a module and instances of the tzwhere
class are instantiated and queried directly
'''
import collections
try:
import ujson as json # loads 2 seconds faster than normal json
except:
try:
import json
except ImportError:
import simplejson as json
import math
import gzip
import os
import shapely.geometry as geometry
import shapely.prepared as prepared
# We can save about 222MB of RAM by turning our polygon lists into
# numpy arrays rather than tuples, if numpy is installed.
try:
import numpy
WRAP = numpy.asarray
COLLECTION_TYPE = numpy.ndarray
except ImportError:
WRAP = tuple
COLLECTION_TYPE = tuple
# for navigation and pulling values/files
this_dir, this_filename = os.path.split(__file__)
BASE_DIR = os.path.dirname(this_dir)
class tzwhere(object):
SHORTCUT_DEGREES_LATITUDE = 1.0
SHORTCUT_DEGREES_LONGITUDE = 1.0
# By default, use the data file in our package directory
DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__),
'tz_world_shortcuts.json')
DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__),
'tz_world.json.gz')
def __init__(self, forceTZ=False):
'''
Initializes the tzwhere class.
@forceTZ: If you want to force the lookup method to a return a
timezone even if the point you are looking up is slightly outside it's
bounds, you need to specify this during initialization arleady
'''
featureCollection = read_tzworld(tzwhere.DEFAULT_POLYGONS)
pgen = feature_collection_polygons(featureCollection)
self.timezoneNamesToPolygons = collections.defaultdict(list)
self.unprepTimezoneNamesToPolygons = collections.defaultdict(list)
for tzname, poly in pgen:
self.timezoneNamesToPolygons[tzname].append(poly)
for tzname, polys in self.timezoneNamesToPolygons.items():
self.timezoneNamesToPolygons[tzname] = WRAP(polys)
if forceTZ:
self.unprepTimezoneNamesToPolygons[tzname] = WRAP(polys)
with open(tzwhere.DEFAULT_SHORTCUTS, 'r') as f:
self.timezoneLongitudeShortcuts, self.timezoneLatitudeShortcuts = json.load(f)
self.forceTZ = forceTZ
for tzname in self.timezoneNamesToPolygons:
# Convert things to tuples to save memory
for degree in self.timezoneLatitudeShortcuts:
for tzname in self.timezoneLatitudeShortcuts[degree].keys():
self.timezoneLatitudeShortcuts[degree][tzname] = \
tuple(self.timezoneLatitudeShortcuts[degree][tzname])
for degree in self.timezoneLongitudeShortcuts.keys():
for tzname in self.timezoneLongitudeShortcuts[degree].keys():
self.timezoneLongitudeShortcuts[degree][tzname] = \
tuple(self.timezoneLongitudeShortcuts[degree][tzname])
def tzNameAt(self, latitude, longitude, forceTZ=False):
'''
Let's you lookup for a given latitude and longitude the appropriate
timezone.
@latitude: latitude
@longitude: longitude
@forceTZ: If forceTZ is true and you can't find a valid timezone return
the closest timezone you can find instead. Only works if the point has
the same integer value for its degree than the timezeone
'''
if forceTZ:
assert self.forceTZ, 'You need to initialize tzwhere with forceTZ'
latTzOptions = self.timezoneLatitudeShortcuts[str(
(math.floor(latitude / self.SHORTCUT_DEGREES_LATITUDE) *
self.SHORTCUT_DEGREES_LATITUDE)
)]
latSet = set(latTzOptions.keys())
lngTzOptions = self.timezoneLongitudeShortcuts[str(
(math.floor(longitude / self.SHORTCUT_DEGREES_LONGITUDE) *
self.SHORTCUT_DEGREES_LONGITUDE)
)]
lngSet = set(lngTzOptions.keys())
possibleTimezones = lngSet.intersection(latSet)
queryPoint = geometry.Point(longitude, latitude)
if possibleTimezones:
for tzname in possibleTimezones:
if isinstance(self.timezoneNamesToPolygons[tzname], COLLECTION_TYPE):
self.timezoneNamesToPolygons[tzname] = list(
map(lambda p: prepared.prep(
geometry.Polygon(p[0], p[1])
), self.timezoneNamesToPolygons[tzname]))
polyIndices = set(latTzOptions[tzname]).intersection(set(
lngTzOptions[tzname]
))
for polyIndex in polyIndices:
poly = self.timezoneNamesToPolygons[tzname][polyIndex]
if poly.contains_properly(queryPoint):
return tzname
if forceTZ:
return self.__forceTZ__(possibleTimezones, latTzOptions,
lngTzOptions, queryPoint)
def __forceTZ__(self, possibleTimezones, latTzOptions,
lngTzOptions, queryPoint):
distances = []
if possibleTimezones:
if len(possibleTimezones) == 1:
return possibleTimezones.pop()
else:
for tzname in possibleTimezones:
if isinstance(self.unprepTimezoneNamesToPolygons[tzname],
COLLECTION_TYPE):
self.unprepTimezoneNamesToPolygons[tzname] = list(
map(lambda p: p.context if isinstance(p, prepared.PreparedGeometry) else geometry.Polygon(p[0], p[1]),
self.timezoneNamesToPolygons[tzname]))
polyIndices = set(latTzOptions[tzname]).intersection(
set(lngTzOptions[tzname]))
for polyIndex in polyIndices:
poly = self.unprepTimezoneNamesToPolygons[
tzname][polyIndex]
d = poly.distance(queryPoint)
distances.append((d, tzname))
if len(distances) > 0:
return sorted(distances, key=lambda x: x[0])[0][1]
class prepareMap(object):
def __init__(self):
DEFAULT_SHORTCUTS = os.path.join(os.path.dirname(__file__),
'tz_world_shortcuts.json')
DEFAULT_POLYGONS = os.path.join(os.path.dirname(__file__),
'tz_world.json.gz')
featureCollection = read_tzworld(DEFAULT_POLYGONS)
pgen = feature_collection_polygons(featureCollection)
tzNamesToPolygons = collections.defaultdict(list)
for tzname, poly in pgen:
tzNamesToPolygons[tzname].append(poly)
for tzname, polys in tzNamesToPolygons.items():
tzNamesToPolygons[tzname] = \
WRAP(tzNamesToPolygons[tzname])
timezoneLongitudeShortcuts,\
timezoneLatitudeShortcuts = self.construct_shortcuts(
tzNamesToPolygons, tzwhere.SHORTCUT_DEGREES_LONGITUDE,
tzwhere.SHORTCUT_DEGREES_LATITUDE)
with open(DEFAULT_SHORTCUTS, 'w') as f:
json.dump(
(timezoneLongitudeShortcuts, timezoneLatitudeShortcuts), f)
@staticmethod
def construct_shortcuts(timezoneNamesToPolygons,
shortcut_long, shortcut_lat):
''' Construct our shortcuts for looking up polygons. Much faster
than using an r-tree '''
def find_min_max(ls, gridSize):
minLs = (math.floor(min(ls) / gridSize) *
gridSize)
maxLs = (math.floor(max(ls) / gridSize) *
gridSize)
return minLs, maxLs
timezoneLongitudeShortcuts = {}
timezoneLatitudeShortcuts = {}
for tzname in timezoneNamesToPolygons:
tzLngs = []
tzLats = []
for polyIndex, poly in enumerate(timezoneNamesToPolygons[tzname]):
lngs = [x[0] for x in poly[0]]
lats = [x[1] for x in poly[0]]
tzLngs.extend(lngs)
tzLats.extend(lats)
minLng, maxLng = find_min_max(
lngs, shortcut_long)
minLat, maxLat = find_min_max(
lats, shortcut_lat)
degree = minLng
while degree <= maxLng:
if degree not in timezoneLongitudeShortcuts:
timezoneLongitudeShortcuts[degree] =\
collections.defaultdict(list)
timezoneLongitudeShortcuts[degree][tzname].append(polyIndex)
degree = degree + shortcut_long
degree = minLat
while degree <= maxLat:
if degree not in timezoneLatitudeShortcuts:
timezoneLatitudeShortcuts[degree] =\
collections.defaultdict(list)
timezoneLatitudeShortcuts[degree][tzname].append(polyIndex)
degree = degree + shortcut_lat
return timezoneLongitudeShortcuts, timezoneLatitudeShortcuts
def read_tzworld(path):
reader = read_json
return reader(path)
def read_json(path):
with gzip.open(path, "rb") as f:
featureCollection = json.loads(f.read().decode("utf-8"))
return featureCollection
def feature_collection_polygons(featureCollection):
"""Turn a feature collection
into an iterator over polygons.
Given a featureCollection of the kind loaded from the json
input, unpack it to an iterator which produces a series of
(tzname, polygon) pairs, one for every polygon in the
featureCollection. Here tzname is a string and polygon is a
list of floats.
"""
for feature in featureCollection['features']:
tzname = feature['properties']['TZID']
if feature['geometry']['type'] == 'Polygon':
exterior = feature['geometry']['coordinates'][0]
interior = feature['geometry']['coordinates'][1:]
yield (tzname, (exterior, interior))
if __name__ == "__main__":
prepareMap()
| 2.796875 | 3 |
tests/home_assistant/custom_features.py | jre21/mindmeld | 1 | 6421 | <filename>tests/home_assistant/custom_features.py<gh_stars>1-10
from mindmeld.models.helpers import register_query_feature
@register_query_feature(feature_name='average-token-length')
def extract_average_token_length(**args):
"""
Example query feature that gets the average length of normalized tokens in the query„
Returns:
(function) A feature extraction function that takes a query and
returns the average normalized token length
"""
# pylint: disable=locally-disabled,unused-argument
def _extractor(query, resources):
tokens = query.normalized_tokens
average_token_length = sum([len(t) for t in tokens]) / len(tokens)
return {'average_token_length': average_token_length}
return _extractor
| 2.84375 | 3 |
source/statuscodes.py | woody2371/fishbowl-api | 6 | 6422 | #!/usr/bin/python
# -*- coding: utf-8 -*-
def getstatus(code):
if code == "1000":
value = "Success!"
elif code == "1001":
value = "Unknown Message Received"
elif code == "1002":
value = "Connection to Fishbowl Server was lost"
elif code == "1003":
value = "Some Requests had errors -- now isn't that helpful..."
elif code == "1004":
value = "There was an error with the database."
elif code == "1009":
value = "Fishbowl Server has been shut down."
elif code == "1010":
value = "You have been logged off the server by an administrator."
elif code == "1012":
value = "Unknown request function."
elif code == "1100":
value = "Unknown login error occurred."
elif code == "1110":
value = "A new Integrated Application has been added to Fishbowl Inventory. Please contact your Fishbowl Inventory Administrator to approve this Integrated Application."
elif code == "1111":
value = "This Integrated Application registration key does not match."
elif code == "1112":
value = "This Integrated Application has not been approved by the Fishbowl Inventory Administrator."
elif code == "1120":
value = "Invalid Username or Password."
elif code == "1130":
value = "Invalid Ticket passed to Fishbowl Inventory Server."
elif code == "1131":
value = "Invalid Key value."
elif code == "1140":
value = "Initialization token is not correct type."
elif code == "1150":
value = "Request was invalid"
elif code == "1160":
value = "Response was invalid."
elif code == "1162":
value = "The login limit has been reached for the server's key."
elif code == "1200":
value = "Custom Field is invalid."
elif code == "1500":
value = "The import was not properly formed."
elif code == "1501":
value = "That import type is not supported"
elif code == "1502":
value = "File not found."
elif code == "1503":
value = "That export type is not supported."
elif code == "1504":
value = "File could not be written to."
elif code == "1505":
value = "The import data was of the wrong type."
elif code == "2000":
value = "Was not able to find the Part {0}."
elif code == "2001":
value = "The part was invalid."
elif code == "2100":
value = "Was not able to find the Product {0}."
elif code == "2101":
value = "The product was invalid."
elif code == "2200":
value = "The yield failed."
elif code == "2201":
value = "Commit failed."
elif code == "2202":
value = "Add initial inventory failed."
elif code == "2203":
value = "Can not adjust committed inventory."
elif code == "2300":
value = "Was not able to find the Tag number {0}."
elif code == "2301":
value = "The tag is invalid."
elif code == "2302":
value = "The tag move failed."
elif code == "2303":
value = "Was not able to save Tag number {0}."
elif code == "2304":
value = "Not enough available inventory in Tagnumber {0}."
elif code == "2305":
value = "Tag number {0} is a location."
elif code == "2400":
value = "Invalid UOM."
elif code == "2401":
value = "UOM {0} not found."
elif code == "2402":
value = "Integer UOM {0} cannot have non-integer quantity."
elif code == "2500":
value = "The Tracking is not valid."
elif code == "2510":
value = "Serial number is missing."
elif code == "2511":
value = "Serial number is null."
elif code == "2512":
value = "Serial number is duplicate."
elif code == "2513":
value = "Serial number is not valid."
elif code == "2600":
value = "Location not found."
elif code == "2601":
value = "Invalid location."
elif code == "2602":
value = "Location Group {0} not found."
elif code == "3000":
value = "Customer {0} not found."
elif code == "3001":
value = "Customer is invalid."
elif code == "3100":
value = "Vendor {0} not found."
elif code == "3101":
value = "Vendor is invalid."
elif code == "4000":
value = "There was an error load PO {0}."
elif code == "4001":
value = "Unknow status {0}."
elif code == "4002":
value = "Unknown carrier {0}."
elif code == "4003":
value = "Unknown QuickBooks class {0}."
elif code == "4004":
value = "PO does not have a PO number. Please turn on the auto-assign PO number option in the purchase order module options."
else:
value = 'Unknown status'
return value
| 2.21875 | 2 |
app/src/server/hoge/hoge_api.py | jacob327/docker-flask-nginx-uwsgi-mysql | 0 | 6423 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# [Import start]
from flask import Blueprint, jsonify
# [Import end]
app = Blueprint(
'hoge',
__name__,
url_prefix='/hoge'
)
@app.route('/test')
def hoge():
return "\nhogehoge"
| 2.21875 | 2 |
preinstall_setup/makedeb-11.0.1-1-stable/src/makedeb/utils/missing_apt_dependencies.py | chipbuster/Energy-Languages-Setup | 0 | 6424 | #!/usr/bin/env python3
import apt_pkg
import sys
from apt_pkg import CURSTATE_INSTALLED, version_compare
from operator import lt, le, eq, ge, gt
# Function mappings for relationship operators.
relation_operators = {"<<": lt, "<=": le, "=": eq, ">=": ge, ">>": gt}
# Set up APT cache.
apt_pkg.init()
cache = apt_pkg.Cache(None)
missing_packages = []
for i in sys.argv[1:]:
# Build the package relationship string for use by 'apt-get satisfy'.
relationship_operator = None
for j in ["<=", ">=", "<", ">", "="]:
if j in i:
relationship_operator = j
break
if relationship_operator is not None:
if relationship_operator in ["<", ">"]:
relationship_operator_formatted = j + j
else:
relationship_operator_formatted = j
package = i.split(relationship_operator)
pkgname = package[0]
pkgver = package[1]
package_string = f"{pkgname} ({relationship_operator_formatted} {pkgver})"
else:
pkgname = i
pkgver = None
package_string = pkgname
# Check if the package is in the cache.
try:
pkg = cache[pkgname]
except KeyError:
missing_packages += [package_string]
continue
# Get the list of installed and provided packages that are currently installed.
installed_pkg_versions = []
if pkg.current_state == CURSTATE_INSTALLED:
installed_pkg_versions += [pkg]
for i in pkg.provides_list:
parent_pkg = i[2].parent_pkg
if parent_pkg.current_state == CURSTATE_INSTALLED:
installed_pkg_versions += [parent_pkg]
# If an installed package was found and no relationship operators were used, the dependency has been satisfied.
if (len(installed_pkg_versions) != 0) and (relationship_operator is None):
continue
# Otherwise, check all matching installed packages and see if any of them fit the specified relationship operator.
matched_pkg = False
for i in installed_pkg_versions:
installed_version = i.current_ver.ver_str
version_result = version_compare(installed_version, pkgver)
if relation_operators[relationship_operator_formatted](version_result, 0):
matched_pkg = True
if not matched_pkg:
missing_packages += [package_string]
for i in missing_packages:
print(i)
exit(0)
| 2.515625 | 3 |
cohorts_proj/datasets/migrations/0009_auto_20200824_0617.py | zferic/harmonization-website | 1 | 6425 | # Generated by Django 3.0.7 on 2020-08-24 06:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datasets', '0008_auto_20200821_1427'),
]
operations = [
migrations.AddField(
model_name='rawdar',
name='AsB',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='AsB_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='AsB_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Ba',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Ba_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Ba_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Cs',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Cs_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Cs_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='DMA',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='DMA_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='DMA_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='MMA',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='MMA_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='MMA_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Sr',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Sr_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Sr_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='iAs',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='iAs_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='iAs_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ag',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ag_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Al',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Al_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='As',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='As_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Be',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Be_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cd',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cd_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Co',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Co_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cr',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cr_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cu',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cu_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Fe',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Fe_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Hg',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Hg_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mo',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mo_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ni',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ni_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Pb',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Pb_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sb',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sb_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Se',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Se_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Tl',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Tl_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='U',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='U_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='V',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='V_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='W',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='W_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Zn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Zn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='urine_specific_gravity',
field=models.FloatField(blank=True, null=True),
),
]
| 1.726563 | 2 |
test_hello.py | skvel/pynet_testx | 0 | 6426 | <gh_stars>0
print "Hello World!"
print "Trying my hand at Git!"
print "Something else"
for i in range(10):
print i
| 2.828125 | 3 |
tasks/views.py | TheDim0n/ProjectManager | 0 | 6427 | <reponame>TheDim0n/ProjectManager<filename>tasks/views.py<gh_stars>0
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.views.generic import DetailView, ListView
from projects.models import Project
from status.models import Status
from .models import Task
from .forms import TaskForm, FilterForm
def _get_projects(user):
projects = [("All", "All"), ('---', '---')]
for item in Project.objects.filter(created_by=user):
projects.append((item.name, item.name))
return projects
def _get_statuses():
statuses = [("All", "All")]
for item in Status.objects.all():
statuses.append((item.text, item.text))
return statuses
class TaskListView(LoginRequiredMixin, ListView):
login_url = '/users/register'
model = Task
context_object_name = 'tasks'
template_name = 'tasks/index.html'
ordering = ['finish_date']
def get_queryset(self):
queryset = super().get_queryset()
for obj in queryset:
obj.check_expired()
return queryset
def get_context_data(self, *args, **kwargs):
try:
project_name = self.request.GET['project']
except KeyError:
project_name = ''
try:
status_name = self.request.GET['status']
except KeyError:
status_name = ''
if self.request.user.is_authenticated:
tasks = Task.objects.filter(created_by=self.request.user)
if project_name and project_name != "All":
if project_name == '---':
tasks = tasks.filter(level=None)
else:
tasks = tasks.filter(level__project__name=project_name)
if status_name and status_name != "All":
tasks = tasks.filter(status__text=status_name)
status_list = Status.objects.all()
last_initial = {
'status': status_name,
'project': project_name,
}
form = FilterForm(initial=last_initial)
form.fields['project'].choices = _get_projects(user=self.request.user)
form.fields['status'].choices = _get_statuses()
context = super(TaskListView, self).get_context_data(*args, **kwargs)
context['status_list'] = status_list
context['tasks'] = tasks
context['filter_form'] = form
context['task_form'] = TaskForm
return context
class TaskDetailView(DetailView):
model = Task
template_name = 'tasks/details.html'
def get_object(self):
obj = super().get_object()
obj.check_expired()
return obj
def get_context_data(self, *args, **kwargs):
initial_content = {
'name': self.object.name,
'start_date': self.object.start_date,
'finish_date': self.object.finish_date,
'status': self.object.status,
'description': self.object.description,
}
context = super(TaskDetailView, self).get_context_data(*args, **kwargs)
context['task_form'] = TaskForm(initial=initial_content)
return context
class TaskCreateView(LoginRequiredMixin, CreateView):
login_url = '/users/register'
model = Task
form_class = TaskForm
template_name = 'tasks/index.html'
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
class TaskUpdateView(LoginRequiredMixin, UpdateView):
login_url = '/users/register'
model = Task
form_class = TaskForm
template_name = "tasks/update_task.html"
def form_valid(self, form):
self.object.check_expired()
return super().form_valid(form)
class TaskDeleteView(DeleteView):
model = Task
template_name = "tasks/delete_task.html"
| 2.203125 | 2 |
smoke/noaa/get_smokeplume_counts.py | minnieteng/smoke_project | 0 | 6428 | import os
import math
import time
import geohash
import geojson
from geojson import MultiLineString
from shapely import geometry
import shapefile
import numpy
import datetime as dt
import pandas as pd
import logging
logger = logging.getLogger(__name__)
source_shape_file_path = "C:/temp/2018/"
threshold = 60*60
cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt']
times = []
for root,dirs,files in os.walk(source_shape_file_path):
for file in files:
with open(os.path.join(root,file),"r") as auto:
if file.endswith(".shp"):
try:
filename = file.replace(".shp","")
shape=shapefile.Reader(source_shape_file_path+filename+"/"+file)
for r in shape.iterRecords():
start_time = dt.datetime.strptime(r[1], '%Y%j %H%M')
end_time = dt.datetime.strptime(r[2], '%Y%j %H%M')
epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M'))
epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M'))
# sometimes start is later than end time, we'll assume the earlier time is start
epoch_end_round = round(max(epoch_s,epoch_e) / threshold) * threshold
epoch_start_round = round(min(epoch_s,epoch_e) / threshold) * threshold
epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) // 3600))
epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) // 3600))
times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt])
break
except:
logger.error('failed to parse file:'+source_shape_file_path+filename+"/")
continue
df = pd.DataFrame(times, columns=cols)
df.to_csv('noaa_times.csv')
| 2.3125 | 2 |
notes/OOBall/OOBall/main-demo.py | KRHS-GameProgramming-2015/Manpac | 0 | 6429 | import pygame_sdl2
pygame_sdl2.import_as_pygame()
import pygame
import os
import random
import math
from Ball import Ball
def save_state(balls):
"""
Saves the game state.
"""
stateString = ""
with open("state.txt", "w") as f:
for ball in balls:
stateString += "{} {} {} {} {}".format(ball.imageFile,
ball.speedx,
ball.speedy,
ball.rect.centerx,
ball.rect.centery)
stateString += '\n'
f.write(stateString)
def load_state():
try:
objects = []
with open("state.txt", "r") as f:
for line in f.read():
f, sx, sy, x, y = line.split()
objects += Ball(f, [int(sx), int(sy)], [int(x), int(y)])
return objects
except:
return None
def delete_state():
if os.path.exists("state.txt"):
os.unlink("state.txt")
def main():
pygame.init()
clock = pygame.time.Clock()
infoObject = pygame.display.Info()
#print infoObject.current_w
width = infoObject.current_w
height = infoObject.current_h
size = width, height
bgColor = r,g,b = 0, 0, 0
screen = pygame.display.set_mode(size)
pygame.display.set_mode()
balls = load_state()
delete_state()
if balls == None:
balls = []
ballTimer = 0
ballTimerMax = .75 * 60
done = False
sleeping = False
font = pygame.font.Font("DejaVuSans.ttf", 124)
text = font.render("Start", True, (255, 255, 255, 255))
textRect = text.get_rect(center = (width/2, height/2))
while not done:
for event in pygame.event.get():
text = font.render(str(event.type), True, (255, 255, 255, 255))
if event.type == pygame.QUIT:
done = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_AC_BACK:
done = True
elif event.type == pygame.APP_WILLENTERBACKGROUND:
# The app is about to go to sleep. It should save state, cancel
# any timers, and stop drawing the screen until an APP_DIDENTERFOREGROUND
# event shows up.
save_state(balls)
sleeping = True
elif event.type == pygame.APP_DIDENTERFOREGROUND:
# The app woke back up. Delete the saved state (we don't need it),
# restore any times, and start drawing the screen again.
delete_state()
sleeping = False
# For now, we have to re-open the window when entering the
# foreground.
screen = pygame.display.set_mode((1280, 720))
if not sleeping:
ballTimer += 1
if ballTimer >= ballTimerMax:
ballTimer = 0
ballSpeed = [random.randint(-5, 5),
random.randint(-5, 5)]
ballPos = [random.randint(100, width-100),
random.randint(100, height-100)]
balls += [Ball("ball.png",ballSpeed,ballPos)]
save_state(balls)
for ball in balls:
ball.move()
ball.collideScreen(size)
for first in balls:
for second in balls:
if first != second:
first.collideBall(second)
bgColor = r,g,b
screen.fill(bgColor)
for ball in balls:
screen.blit(ball.image, ball.rect)
screen.blit(text, textRect)
pygame.display.flip()
clock.tick(60)
if done:
break
if __name__ == "__main__":
main()
| 3.140625 | 3 |
sprt.py | vdbergh/pentanomial | 3 | 6430 | from __future__ import division
import math, copy
import argparse
from brownian import Brownian
import scipy
import LLRcalc
class sprt:
def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model="logistic"):
assert elo_model in ("logistic", "normalized")
self.elo_model = elo_model
self.a = math.log(beta / (1 - alpha))
self.b = math.log((1 - beta) / alpha)
self.elo0 = elo0
self.elo1 = elo1
self.clamped = False
self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2
def elo_to_score(self, elo):
"""
"elo" is expressed in our current elo_model.
"""
if self.elo_model == "normalized":
nt = elo / LLRcalc.nelo_divided_by_nt
return nt * self.sigma_pg + 0.5
else:
return LLRcalc.L_(elo)
def lelo_to_elo(self, lelo):
"""
For external use. "elo" is expressed in our current elo_model.
"lelo" is logistic.
"""
if self.elo_model == "logistic":
return lelo
score = LLRcalc.L_(lelo)
nt = (score - 0.5) / self.sigma_pg
return nt * LLRcalc.nelo_divided_by_nt
def set_state(self, results):
N, self.pdf = LLRcalc.results_to_pdf(results)
if self.elo_model == "normalized":
mu, var = LLRcalc.stats(self.pdf) # code duplication with LLRcalc
if len(results) == 5:
self.sigma_pg = (2 * var) ** 0.5
elif len(results) == 3:
self.sigma_pg = var ** 0.5
else:
assert False
self.s0, self.s1 = [self.elo_to_score(elo) for elo in (self.elo0, self.elo1)]
mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None)
# llr estimate
self.llr = N * mu_LLR
self.T = N
# now normalize llr (if llr is not legal then the implications
# of this are unclear)
slope = self.llr / N
if self.llr > 1.03 * self.b or self.llr < 1.03 * self.a:
self.clamped = True
if self.llr < self.a:
self.T = self.a / slope
self.llr = self.a
elif self.llr > self.b:
self.T = self.b / slope
self.llr = self.b
def outcome_prob(self, elo):
"""
The probability of a test with the given elo with worse outcome
(faster fail, slower pass or a pass changed into a fail).
"""
s = LLRcalc.L_(elo)
mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s)
sigma_LLR = math.sqrt(var_LLR)
return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf(
T=self.T, y=self.llr
)
def lower_cb(self, p):
"""
Maximal elo value such that the observed outcome of the test has probability
less than p.
"""
avg_elo = (self.elo0 + self.elo1) / 2
delta = self.elo1 - self.elo0
N = 30
# Various error conditions must be handled better here!
while True:
elo0 = max(avg_elo - N * delta, -1000)
elo1 = min(avg_elo + N * delta, 1000)
try:
sol, res = scipy.optimize.brentq(
lambda elo: self.outcome_prob(elo) - (1 - p),
elo0,
elo1,
full_output=True,
disp=False,
)
except ValueError:
if elo0 > -1000 or elo1 < 1000:
N *= 2
continue
else:
if self.outcome_prob(elo0) - (1 - p) > 0:
return elo1
else:
return elo0
assert res.converged
break
return sol
def analytics(self, p=0.05):
ret = {}
ret["clamped"] = self.clamped
ret["a"] = self.a
ret["b"] = self.b
ret["elo"] = self.lower_cb(0.5)
ret["ci"] = [self.lower_cb(p / 2), self.lower_cb(1 - p / 2)]
ret["LOS"] = self.outcome_prob(0)
ret["LLR"] = self.llr
return ret
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--alpha", help="probability of a false positve", type=float, default=0.05
)
parser.add_argument(
"--beta", help="probability of a false negative", type=float, default=0.05
)
parser.add_argument(
"--elo0", help="H0 (expressed in LogisticElo)", type=float, default=0.0
)
parser.add_argument(
"--elo1", help="H1 (expressed in LogisticElo)", type=float, default=5.0
)
parser.add_argument("--level", help="confidence level", type=float, default=0.95)
parser.add_argument(
"--elo-model",
help="logistic or normalized",
choices=['logistic', 'normalized'],
default='logistic',
)
parser.add_argument(
"--results",
help="trinomial of pentanomial frequencies, low to high",
nargs="*",
type=int,
required=True,
)
args = parser.parse_args()
results = args.results
if len(results) != 3 and len(results) != 5:
parser.error("argument --results: expected 3 or 5 arguments")
alpha = args.alpha
beta = args.beta
elo0 = args.elo0
elo1 = args.elo1
elo_model = args.elo_model
p = 1 - args.level
s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model)
s.set_state(results)
a = s.analytics(p)
print("Design parameters")
print("=================")
print("False positives : %4.2f%%" % (100 * alpha,))
print("False negatives : %4.2f%%" % (100 * beta,))
print("[Elo0,Elo1] : [%.2f,%.2f]" % (elo0, elo1))
print("Confidence level : %4.2f%%" % (100 * (1 - p),))
print("Elo model : %s" % elo_model)
print("Estimates")
print("=========")
print("Elo : %.2f" % a["elo"])
print(
"Confidence interval : [%.2f,%.2f] (%4.2f%%)"
% (a["ci"][0], a["ci"][1], 100 * (1 - p))
)
print("LOS : %4.2f%%" % (100 * a["LOS"],))
print("Context")
print("=======")
print(
"LLR [u,l] : %.2f %s [%.2f,%.2f]"
% (a["LLR"], "(clamped)" if a["clamped"] else "", a["a"], a["b"])
)
| 2.734375 | 3 |
tools/hci_throughput/hci.py | t3zeng/mynewt-nimble | 0 | 6431 | <reponame>t3zeng/mynewt-nimble<filename>tools/hci_throughput/hci.py
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from dataclasses import dataclass
import struct
from binascii import unhexlify
import random
############
# DEFINES
############
AF_BLUETOOTH = 31
HCI_CHANNEL_USER = 1
HCI_COMMAND_PACKET = 0x01
HCI_ACL_DATA_PACKET = 0x02
HCI_EVENT_PACKET = 0x04
HCI_EV_CODE_DISCONN_CMP = 0x05
HCI_EV_CODE_CMD_CMP = 0x0e
HCI_EV_CODE_CMD_STATUS = 0x0f
HCI_EV_CODE_LE_META_EVENT = 0x3e
HCI_SUBEV_CODE_LE_ENHANCED_CONN_CMP = 0x0a
HCI_SUBEV_CODE_LE_DATA_LEN_CHANGE = 0x07
HCI_SUBEV_CODE_LE_PHY_UPDATE_CMP = 0x0c
HCI_SUBEV_CODE_LE_CHAN_SEL_ALG = 0x14
HCI_EV_NUM_COMP_PKTS = 0x13
CONN_FAILED_TO_BE_ESTABLISHED = 0x3e
CONN_TIMEOUT = 0x08
OGF_HOST_CTL = 0x03
OCF_SET_EVENT_MASK = 0x0001
OCF_RESET = 0X0003
OGF_INFO_PARAM = 0x04
OCF_READ_LOCAL_COMMANDS = 0x0002
OCF_READ_BD_ADDR = 0x0009
OGF_LE_CTL = 0x08
OCF_LE_SET_EVENT_MASK = 0x0001
OCF_LE_READ_BUFFER_SIZE_V1 = 0x0002
OCF_LE_READ_BUFFER_SIZE_V2 = 0x0060
OCF_LE_SET_RANDOM_ADDRESS = 0x0005
OCF_LE_SET_ADVERTISING_PARAMETERS = 0x0006
OCF_LE_SET_ADVERTISE_ENABLE = 0x000a
OCF_LE_SET_SCAN_PARAMETERS = 0x000b
OCF_LE_SET_SCAN_ENABLE = 0x000c
OCF_LE_CREATE_CONN = 0x000d
OCF_LE_SET_DATA_LEN = 0x0022
OCF_LE_READ_SUGGESTED_DFLT_DATA_LEN = 0x0023
OCF_LE_READ_MAX_DATA_LEN = 0x002f
OCF_LE_READ_PHY = 0x0030
OCF_LE_SET_DFLT_PHY = 0x0031
OCF_LE_SET_PHY = 0x0032
OGF_VENDOR_SPECIFIC = 0x003f
BLE_HCI_OCF_VS_RD_STATIC_ADDR = 0x0001
PUBLIC_ADDRESS_TYPE = 0
STATIC_RANDOM_ADDRESS_TYPE = 1
WAIT_FOR_EVENT_TIMEOUT = 5
WAIT_FOR_EVENT_CONN_TIMEOUT = 25
############
# GLOBAL VAR
############
num_of_bytes_to_send = None # based on supported_max_tx_octets
num_of_packets_to_send = None
events_list = []
bdaddr = '00:00:00:00:00:00'
static_addr = '00:00:00:00:00:00'
le_read_buffer_size = None
conn_handle = 0
requested_tx_octets = 1
requested_tx_time = 1
suggested_dflt_data_len = None
max_data_len = None
phy = None
ev_num_comp_pkts = None
num_of_completed_packets_cnt = 0
num_of_completed_packets_time = 0
############
# FUNCTIONS
############
def get_opcode(ogf: int, ocf: int):
return ((ocf & 0x03ff)|(ogf << 10))
def get_ogf_ocf(opcode: int):
ogf = opcode >> 10
ocf = opcode & 0x03ff
return ogf, ocf
def cmd_addr_to_ba(addr_str: str):
return unhexlify("".join(addr_str.split(':')))[::-1]
def ba_addr_to_str(addr_ba: bytearray):
addr_str = addr_ba.hex().upper()
return ':'.join(addr_str[i:i+2] for i in range(len(addr_str), -2, -2))[1:]
def gen_static_rand_addr():
while True:
x = [random.randint(0,1) for _ in range(0,48)]
if 0 in x[:-2] and 1 in x[:-2]:
x[0] = 1
x[1] = 1
break
addr_int = int("".join([str(x[i]) for i in range(0,len(x))]), 2)
addr_hex = "{0:0{1}x}".format(addr_int, 12)
addr = ":".join(addr_hex[i:i+2] for i in range(0, len(addr_hex), 2))
return addr.upper()
############
# GLOBAL VAR CLASSES
############
@dataclass
class Suggested_Dflt_Data_Length():
status: int
suggested_max_tx_octets: int
suggested_max_tx_time: int
def __init__(self):
self.set()
def set(self, status=0, suggested_max_tx_octets=0, suggested_max_tx_time=0):
self.status = status
self.suggested_max_tx_octets = suggested_max_tx_octets
self.suggested_max_tx_time = suggested_max_tx_time
@dataclass
class Max_Data_Length():
status: int
supported_max_tx_octets: int
supported_max_tx_time: int
supported_max_rx_octets: int
supported_max_rx_time: int
def __init__(self):
self.set()
def set(self, status=0, supported_max_tx_octets=0, supported_max_tx_time=0,
supported_max_rx_octets=0, supported_max_rx_time=0):
self.status = status
self.supported_max_tx_octets = supported_max_tx_octets
self.supported_max_tx_time = supported_max_tx_time
self.supported_max_rx_octets = supported_max_rx_octets
self.supported_max_rx_time = supported_max_rx_time
@dataclass
class LE_Read_Buffer_Size:
status: int
le_acl_data_packet_length: int
total_num_le_acl_data_packets: int
iso_data_packet_len: int
total_num_iso_data_packets: int
def __init__(self):
self.set()
def set(self, status=0, le_acl_data_packet_length=0,
total_num_le_acl_data_packets=0, iso_data_packet_len=0,
total_num_iso_data_packets=0):
self.status = status
self.le_acl_data_packet_length = le_acl_data_packet_length
self.total_num_le_acl_data_packets = total_num_le_acl_data_packets
self.iso_data_packet_len = iso_data_packet_len
self.total_num_iso_data_packets = total_num_iso_data_packets
@dataclass
class LE_Read_PHY:
status: int
connection_handle: int
tx_phy: int
rx_phy: int
def __init__(self):
self.set()
def set(self, status=0, connection_handle=0, tx_phy=0, rx_phy=0):
self.status = status
self.connection_handle = connection_handle
self.tx_phy = tx_phy
self.rx_phy = rx_phy
############
# EVENTS
############
@dataclass
class HCI_Ev_Disconn_Complete:
status: int
connection_handle: int
reason: int
def __init__(self):
self.set()
def set(self, status=0, connection_handle=0, reason=0):
self.status = status
self.connection_handle = connection_handle
self.reason = reason
@dataclass
class HCI_Ev_Cmd_Complete:
num_hci_command_packets: int
opcode: int
return_parameters: int
def __init__(self):
self.set()
def set(self, num_hci_cmd_packets=0, opcode=0, return_parameters=b''):
self.num_hci_command_packets = num_hci_cmd_packets
self.opcode = opcode
self.return_parameters = return_parameters
@dataclass
class HCI_Ev_Cmd_Status:
status: int
num_hci_command_packets: int
opcode: int
def __init__(self):
self.set()
def set(self, status = 0, num_hci_cmd_packets=0, opcode=0):
self.status = status
self.num_hci_command_packets = num_hci_cmd_packets
self.opcode = opcode
@dataclass
class HCI_Ev_LE_Meta:
subevent_code: int
def __init__(self):
self.set()
def set(self, subevent_code=0):
self.subevent_code = subevent_code
@dataclass
class HCI_Ev_LE_Enhanced_Connection_Complete(HCI_Ev_LE_Meta):
status: int
connection_handle: int
role: int
peer_address_type: int
peer_address: str
local_resolvable_private_address: int
peer_resolvable_private_address: int
connection_interval: int
peripheral_latency: int
supervision_timeout: int
central_clock_accuracy: int
def __init__(self):
self.set()
def set(self, subevent_code=0, status=0, connection_handle=0, role=0,
peer_address_type=0, peer_address='00:00:00:00:00:00',
local_resolvable_private_address='00:00:00:00:00:00',
peer_resolvable_private_address='00:00:00:00:00:00',
connection_interval=0, peripheral_latency=0, supervision_timeout=0,
central_clock_accuracy=0):
super().set(subevent_code)
self.status = status
self.connection_handle = connection_handle
self.role = role
self.peer_address_type = peer_address_type
self.peer_address = peer_address
self.local_resolvable_private_address = local_resolvable_private_address
self.peer_resolvable_private_address = peer_resolvable_private_address
self.connection_interval = connection_interval
self.peripheral_latency = peripheral_latency
self.supervision_timeout = supervision_timeout
self.central_clock_accuracy = central_clock_accuracy
@dataclass
class HCI_Ev_LE_Data_Length_Change(HCI_Ev_LE_Meta):
conn_handle: int
max_tx_octets: int
max_tx_time: int
max_rx_octets: int
max_rx_time: int
triggered: int
def __init__(self):
self.set()
def set(self, subevent_code=0, conn_handle=0, max_tx_octets=0,
max_tx_time=0, max_rx_octets=0, max_rx_time=0, triggered=0):
super().set(subevent_code)
self.conn_handle = conn_handle
self.max_tx_octets = max_tx_octets
self.max_tx_time = max_tx_time
self.max_rx_octets = max_rx_octets
self.max_rx_time = max_rx_time
self.triggered = triggered
@dataclass
class HCI_Ev_LE_PHY_Update_Complete(HCI_Ev_LE_Meta):
status: int
connection_handle: int
tx_phy: int
rx_phy: int
def __init__(self):
self.set()
def set(self, subevent_code=0, status=0, connection_handle=0,
tx_phy=0, rx_phy=0):
super().set(subevent_code)
self.status = status
self.connection_handle = connection_handle
self.tx_phy = tx_phy
self.rx_phy = rx_phy
@dataclass
class HCI_Number_Of_Completed_Packets:
num_handles: int
connection_handle: int
num_completed_packets: int
def __init__(self):
self.set()
def set(self, num_handles=0, connection_handle=0, num_completed_packets=0):
self.num_handles = num_handles
self.connection_handle = connection_handle
self.num_completed_packets = num_completed_packets
class HCI_Ev_LE_Chan_Sel_Alg(HCI_Ev_LE_Meta):
connection_handle: int
algorithm: int
def __init__(self):
self.set()
def set(self, subevent_code=0, connection_handle=0, algorithm=0):
super().set(subevent_code)
self.connection_handle = connection_handle
self.algorithm = algorithm
############
# PARAMETERS
############
@dataclass
class HCI_Advertising:
advertising_interval_min: int
advertising_interval_max: int
advertising_type: int
own_address_type: int
peer_address_type: int
peer_address: str
advertising_channel_map: int
advertising_filter_policy: int
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, advertising_interval_min=0, advertising_interval_max=0, \
advertising_type=0, own_address_type=0, peer_address_type=0, \
peer_address='00:00:00:00:00:00', advertising_channel_map=0, \
advertising_filter_policy=0):
self.advertising_interval_min = advertising_interval_min
self.advertising_interval_max = advertising_interval_max
self.advertising_type = advertising_type
self.own_address_type = own_address_type
self.peer_address_type = peer_address_type
self.peer_address = peer_address
self.advertising_channel_map = advertising_channel_map
self.advertising_filter_policy = advertising_filter_policy
self.ba_full_message = bytearray(struct.pack('<HHBBBBB',
advertising_interval_min, advertising_interval_max,
advertising_type, own_address_type, peer_address_type,
advertising_channel_map, advertising_filter_policy))
peer_addr_ba = cmd_addr_to_ba(peer_address)
self.ba_full_message[7:7] = peer_addr_ba
@dataclass
class HCI_Scan:
le_scan_type: int
le_scan_interval: int
le_scan_window: int
own_address_type: int
scanning_filter_policy: int
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, le_scan_type=0, le_scan_interval=0, le_scan_window=0,
own_address_type=0, scanning_filter_policy=0):
self.le_scan_type = le_scan_type
self.le_scan_interval = le_scan_interval
self.le_scan_window = le_scan_window
self.own_address_type = own_address_type
self.scanning_filter_policy = scanning_filter_policy
self.ba_full_message = bytearray(struct.pack('<BHHBB',le_scan_type,
le_scan_interval, le_scan_window, own_address_type,
scanning_filter_policy))
@dataclass
class HCI_Connect:
le_scan_interval: int
le_scan_window: int
initiator_filter_policy: int
peer_address_type: int
peer_address: str
own_address_type: int
connection_interval_min: int
connection_interval_max: int
max_latency: int
supervision_timeout: int
min_ce_length: int
max_ce_length: int
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, le_scan_interval=0, le_scan_window=0, \
initiator_filter_policy=0, peer_address_type=0, \
peer_address='00:00:00:00:00:00', own_address_type=0, \
connection_interval_min=0, connection_interval_max=0, \
max_latency=0, supervision_timeout=0, min_ce_length=0, \
max_ce_length=0):
self.le_scan_interval = le_scan_interval
self.le_scan_window = le_scan_window
self.initiator_filter_policy = initiator_filter_policy
self.peer_address_type = peer_address_type
self.peer_address = peer_address
self.own_address_type = own_address_type
self.connection_interval_min = connection_interval_min
self.connection_interval_max = connection_interval_max
self.max_latency = max_latency
self.supervision_timeout = supervision_timeout
self.min_ce_length = min_ce_length
self.max_ce_length = max_ce_length
self.ba_full_message = bytearray(struct.pack('<HHBBBHHHHHH',
le_scan_interval, le_scan_window, initiator_filter_policy,
peer_address_type, own_address_type, connection_interval_min,
connection_interval_max, max_latency,supervision_timeout,
min_ce_length, max_ce_length))
peer_addr_ba = cmd_addr_to_ba(peer_address)
self.ba_full_message[6:6] = peer_addr_ba
############
# RX / TX
############
@dataclass
class HCI_Receive:
packet_type: int
def __init__(self):
self.set()
def set(self,packet_type=0):
self.packet_type = packet_type
@dataclass
class HCI_Recv_Event_Packet(HCI_Receive):
ev_code: int
packet_len: int
recv_data: bytearray
current_event: None
def __init__(self):
self.set()
def set(self,packet_type=0, ev_code=0, packet_len=0,
recv_data=bytearray(256)):
super().set(packet_type)
self.ev_code = ev_code
self.packet_len = packet_len
self.recv_data = recv_data
self.recv_data = recv_data[:packet_len]
@dataclass
class HCI_Recv_ACL_Data_Packet(HCI_Receive):
connection_handle: int
pb_flag: int
bc_flag: int
data_total_len: int
data: bytearray
def __init__(self):
self.set()
def set(self, packet_type=0, connection_handle=0,
pb_flag=0, bc_flag=0, total_data_len=0, data=b''):
super().set(packet_type)
self.connection_handle = connection_handle
self.pb_flag = pb_flag
self.bc_flag = bc_flag
self.data_total_len = total_data_len
self.data = data
@dataclass
class HCI_Recv_L2CAP_Data:
pdu_length: int
channel_id: int
data: bytearray
def __init__(self):
self.set()
def set(self, pdu_length=0, channel_id=0, data=b''):
self.pdu_length = pdu_length
self.channel_id = channel_id
self.data = data
@dataclass
class HCI_Cmd_Send:
packet_type: int
ogf: int
ocf: int
packet_len: int
data: bytearray
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, ogf=0, ocf=0, data=b''):
self.packet_type = HCI_COMMAND_PACKET
self.ogf = ogf
self.ocf = ocf
self.opcode = get_opcode(ogf, ocf)
self.packet_len = len(data)
self.data = data
self.ba_full_message = bytearray(struct.pack('<BHB',
self.packet_type, self.opcode, self.packet_len))
self.ba_full_message.extend(self.data)
@dataclass
class HCI_ACL_Data_Send:
packet_type: int
connection_handle: int
pb_flag: int
bc_flag: int
data_total_length: int
data: bytearray
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, connection_handle=0, pb_flag=0b00, bc_flag=0b00, data=b''):
self.packet_type = HCI_ACL_DATA_PACKET
self.connection_handle = connection_handle
self.pb_flag = pb_flag
self.bc_flag = bc_flag
self.data_total_length = len(data)
self.data = data
self.ba_full_message = bytearray(struct.pack('<BHH',
self.packet_type,
((self.connection_handle & 0x0eff) |
(self.pb_flag << 12) |
(self.bc_flag << 14)),
self.data_total_length))
self.ba_full_message.extend(self.data)
@dataclass
class L2CAP_Data_Send:
pdu_length: int
channel_id: int
data: bytearray
ba_full_message: bytearray
def __init__(self):
self.set()
def set(self, pdu_length=0, channel_id=0, data=b''):
if not pdu_length:
self.pdu_length = len(data)
else:
self.pdu_length = pdu_length
self.channel_id = channel_id
self.data = data
fmt_conf = "<HH"
self.ba_full_message = bytearray(struct.pack(fmt_conf,
self.pdu_length, self.channel_id))
self.ba_full_message.extend(data)
| 1.53125 | 2 |
examples/dataproc/query.py | populationgenomics/analysis-runner | 0 | 6432 | """Simple Hail query example."""
import click
import hail as hl
from bokeh.io.export import get_screenshot_as_png
from analysis_runner import output_path
GNOMAD_HGDP_1KG_MT = (
'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/'
'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt'
)
@click.command()
@click.option('--rerun', help='Whether to overwrite cached files', default=False)
def query(rerun):
"""Query script entry point."""
hl.init(default_reference='GRCh38')
sample_qc_path = output_path('sample_qc.mt')
if rerun or not hl.hadoop_exists(sample_qc_path):
mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT)
mt = mt.head(100, n_cols=100)
mt_qc = hl.sample_qc(mt)
mt_qc.write(sample_qc_path)
mt_qc = hl.read_matrix_table(sample_qc_path)
plot_filename = output_path('call_rate_plot.png', 'web')
if rerun or not hl.hadoop_exists(plot_filename):
call_rate_plot = hl.plot.histogram(
mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate'
)
with hl.hadoop_open(plot_filename, 'wb') as f:
get_screenshot_as_png(call_rate_plot).save(f, format='PNG')
if __name__ == '__main__':
query() # pylint: disable=no-value-for-parameter
| 2.515625 | 3 |
ptpip/ptpip.py | darkarnium/ptpip | 0 | 6433 | <filename>ptpip/ptpip.py
import uuid
import time
import socket
import struct
class PtpIpConnection(object):
"""docstring for PtpIP"""
def __init__(self):
super(PtpIpConnection, self).__init__()
self.session = None
self.session_events = None
self.session_id = None
self.cmd_queue = []
self.event_queue = []
self.object_queue = []
def open(self, host='192.168.1.1', port=15740):
# Open both session, first one for for commands, second for events
self.session = self.connect(host=host, port=port)
self.send_recieve_ptpip_packet(PtpIpInitCmdReq(), self.session)
self.session_events = self.connect(host=host, port=port)
self.send_recieve_ptpip_packet(PtpIpEventReq(), self.session_events)
# 0x1002 OpenSession
ptip_cmd = PtpIpCmdRequest(cmd=0x1002, param1=struct.unpack('L', self.session_id)[0])
self.send_recieve_ptpip_packet(ptip_cmd, self.session)
def communication_thread(self):
while True:
if len(self.cmd_queue) == 0:
# do a ping receive a pong (same as ping) as reply to keep the connection alive
# couldnt get any reply onto a propper PtpIpPing packet so i am querying the status
# of the device
ptpip_packet_reply = self.send_recieve_ptpip_packet(PtpIpCmdRequest(cmd=0x90C8),
self.session)
if isinstance(ptpip_packet_reply, PtpIpCmdResponse):
time.sleep(1)
continue
else:
# get the next command from command the queue
ptip_cmd = self.cmd_queue.pop()
ptpip_packet_reply = self.send_recieve_ptpip_packet(ptip_cmd, self.session)
if (ptpip_packet_reply.ptp_response_code == 0x2001 and \
ptpip_packet_reply.ptp_response_code == 0x2019):
print("Cmd send successfully")
else:
print(f"cmd reply is: {ptpip_packet_reply.ptp_response_code}")
# wait 1 second before new packets are processed/send to the camera
time.sleep(1)
pass
def send_ptpip_cmd(self, ptpip_packet):
self.cmd_queue.append(ptpip_packet)
def connect(self, host='192.168.1.1', port=15740):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
s.connect((host, port))
except socket.error as message:
if s:
s.close()
print(f"Could not open socket: {message}")
return s
def send_recieve_ptpip_packet(self, ptpip_packet, session):
if isinstance(ptpip_packet, PtpIpInitCmdReq):
self.send_data(ptpip_packet.data(), session)
# set the session id of the object if the reply is of type PtpIpInitCmdAck
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if isinstance(ptpip_packet_reply, PtpIpInitCmdAck):
self.session_id = ptpip_packet_reply.session_id
elif isinstance(ptpip_packet, PtpIpEventReq):
self.send_ptpip_event_req(ptpip_packet, session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x90C7:
self.send_data(ptpip_packet.data(), session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if isinstance(ptpip_packet_reply, PtpIpStartDataPacket):
data_length = struct.unpack('I', ptpip_packet_reply.length)[0]
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
data = ptpip_packet_reply.data
while isinstance(ptpip_packet_reply, PtpIpDataPacket):
data = data + ptpip_packet_reply.data
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if data_length == len(data):
events = PtpIpEventFactory(data).get_events()
for event in events:
self.event_queue.append(event)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
elif isinstance(ptpip_packet, PtpIpCmdRequest) and ptpip_packet.ptp_cmd == 0x1009:
self.send_data(ptpip_packet.data(), session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if isinstance(ptpip_packet_reply, PtpIpStartDataPacket):
data_length = struct.unpack('I', ptpip_packet_reply.length)[0]
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
data = ptpip_packet_reply.data
while isinstance(ptpip_packet_reply, PtpIpDataPacket):
data = data + ptpip_packet_reply.data
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
if data_length == len(data):
self.object_queue.append(PtpIpDataObject(ptpip_packet.param1, data))
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
else:
self.send_data(ptpip_packet.data(), session)
ptpip_packet_reply = PtpIpPacket().factory(data=self.recieve_data(session))
return ptpip_packet_reply
def send_ptpip_event_req(self, ptpip_packet, session):
# add the session id of the object itself if it is not specified in the package
if ptpip_packet.session_id is None:
ptpip_packet.session_id = self.session_id
self.send_data(ptpip_packet.data(), session)
def send_data(self, data, session):
session.send(struct.pack('I', len(data) + 4) + data)
def recieve_data(self, session):
data = session.recv(4)
(data_length,) = struct.unpack('I', data)
print(f"Packet length: {data_length}")
while (data_length) > len(data):
data += session.recv(data_length - len(data))
return data[4:]
class PtpIpPacket(object):
"""docstring for PtpIpCmd"""
def __init__(self):
super(PtpIpPacket, self).__init__()
def factory(self, data=None):
if data is None:
self.cmdtype = None
else:
print(f"Cmd Type: {struct.unpack('I', data[0:4])[0]}")
self.cmdtype = struct.unpack('I', data[0:4])[0]
if self.cmdtype == 1:
return PtpIpInitCmdReq(data[4:])
elif self.cmdtype == 2:
return PtpIpInitCmdAck(data[4:])
elif self.cmdtype == 3:
return PtpIpEventReq(data[4:])
elif self.cmdtype == 4:
return PtpIpEventAck(data[4:])
elif self.cmdtype == 5:
return PtpIpInitFail(data[4:])
elif self.cmdtype == 6:
return PtpIpCmdRequest(data[4:])
elif self.cmdtype == 7:
return PtpIpCmdResponse(data[4:])
elif self.cmdtype == 9:
return PtpIpStartDataPacket(data[4:])
elif self.cmdtype == 10:
return PtpIpDataPacket(data[4:])
elif self.cmdtype == 12:
return PtpIpEndDataPacket(data[4:])
elif self.cmdtype == 13:
return PtpIpPing(data[4:])
def data(self):
pass
class PtpIpInitCmdReq(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpInitCmdReq, self).__init__()
self.cmdtype = struct.pack('I', 0x01)
self.version = struct.pack('>I', 0x0100)
if data is None:
guid = uuid.uuid4()
self.guid = guid.bytes
self.hostname = socket.gethostname() + '\x00'
self.hostname = self.hostname.encode('utf-16-le')
else:
self.guid = data[0:16]
self.hostname = data[16:0]
def data(self):
return self.cmdtype + self.guid + self.hostname + self.version
class PtpIpInitCmdAck(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpInitCmdAck, self).__init__()
self.cmdtype = struct.pack('I', 0x02)
if data is not None:
self.session_id = data[0:4]
self.guid = data[4:20]
self.hostname = data[20:]
class PtpIpEventReq(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None, session_id=None):
super(PtpIpEventReq, self).__init__()
self.cmdtype = struct.pack('I', 0x03)
self.session_id = None
if data is not None:
self.session_id = data[0:4]
elif session_id is not None:
self.session_id = session_id
def data(self):
if self.session_id:
return self.cmdtype + self.session_id
return self.cmdtype
class PtpIpEventAck(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpEventAck, self).__init__()
self.cmdtype = struct.pack('I', 0x04)
class PtpIpInitFail(PtpIpPacket):
"""docstring for PtpIpInitCmd"""
def __init__(self, data=None):
super(PtpIpInitFail, self).__init__()
self.cmdtype = struct.pack('I', 0x05)
class PtpIpCmdRequest(PtpIpPacket):
"""
Operation Code Description
0x1001 GetDeviceInfo
0x1002 OpenSession
0x1003 CloseSession
0x1004 GetStorageIDs
0x1005 GetStorageInfo
0x1006 GetNumObjects
0x1007 GetObjectHandles
0x1008 GetObjectInfo
0x1009 GetObject
0x100A GetThumb
0x100B DeleteObject
0x100C SendObjectInfo
0x100D SendObject
0x100E InitiateCapture
0x100F FormatStore
0x1014 GetDevicePropDesc
0x1015 GetDevicePropValue
0x1016 SetDevicePropValue
0x101B GetPartialObject
0x90C0 InitiateCaptureRecInSdram
0x90C1 AfDrive
0x90C2 ChangeCameraMode
0x90C3 DeleteImagesInSdram
0x90C4 GetLargeThumb
0x90C7 GetEvent
0x90C8 DeviceReady
0x90C9 SetPreWbData
0x90CA GetVendorPropCodes
0x90CB AfAndCaptureRecInSdram
0x90CC GetPicCtrlData
0x90CD SetPicCtrlData
0x90CE DeleteCustomPicCtrl
0x90CF GetPicCtrlCapability
0x9201 StartLiveView
0x9202 EndLiveView
0x9203 GetLiveViewImage
0x9204 MfDrive
0x9205 ChangeAfArea
0x9206 AfDriveCancel
0x9207 InitiateCaptureRecInMedia
0x9209 GetVendorStorageIDs
0x920A StartMovieRecInCard
0x920B EndMovieRec
0x920C TerminateCapture
0x9400 GetPartialObjectHighSpeed
0x9407 SetTransferListLock
0x9408 GetTransferList
0x9409 NotifyFileAcquisitionStart
0x940A NotifyFileAcquisitionEnd
0x940B GetSpecificSizeObject
0x9801 GetObjectPropsSupported
0x9802 GetObjectPropDesc
0x9803 GetObjectPropValue
0x9805 GetObjectPropList
"""
def __init__(self, data=None, cmd=None, param1=None, param2=None, param3=None, param4=None,
param5=None):
super(PtpIpCmdRequest, self).__init__()
self.cmdtype = struct.pack('I', 0x06)
self.unkown = struct.pack('I', 0x01)
self.ptp_cmd = cmd
self.param1 = param1
self.param2 = param2
self.param3 = param3
self.param4 = param4
self.param5 = param5
# Todo: Transaction ID generieren
self.transaction_id = struct.pack('I', 0x06)
self.args = ''
if self.param1 is not None:
self.args = self.args + struct.pack('L', self.param1)
if self.param2 is not None:
self.args = self.args + struct.pack('L', self.param2)
if self.param3 is not None:
self.args = self.args + struct.pack('L', self.param3)
if self.param4 is not None:
self.args = self.args + struct.pack('L', self.param4)
if self.param5 is not None:
self.args = self.args + struct.pack('L', self.param5)
def data(self):
return self.cmdtype + self.unkown + struct.pack('H', self.ptp_cmd) + \
self.transaction_id + self.args
class PtpIpCmdResponse(PtpIpPacket):
"""
ResponseCode Description
0x2000 Undefined
0x2001 OK
0x2002 General Error
0x2003 Session Not Open
0x2004 Invalid TransactionID
0x2005 Operation Not Supported
0x2006 Parameter Not Supported
0x2007 Incomplete Transfer
0x2008 Invalid StorageID
0x2009 Invalid ObjectHandle
0x200A DeviceProp Not Supported
0x200B Invalid ObjectFormatCode
0x200C Store Full
0x200D Object WriteProtected
0x200E Store Read-Only
0x200F Access Denied
0x2010 No Thumbnail Present
0x2011 SelfTest Failed
0x2012 Partial Deletion
0x2013 Store Not Available
0x2014 Specification By Format Unsupported
0x2015 No Valid ObjectInfo
0x2016 Invalid Code Format
0x2017 Unknown Vendor Code
0x2018 Capture Already Terminated
0x2019 Device Busy
0x201A Invalid ParentObject
0x201B Invalid DeviceProp Format
0x201C Invalid DeviceProp Value
0x201D Invalid Parameter
0x201E Session Already Open
0x201F Transaction Cancelled
0x2020 Specification of Destination Unsupported
"""
def __init__(self, data=None):
super(PtpIpCmdResponse, self).__init__()
self.cmdtype = struct.pack('I', 0x07)
if data is not None:
self.ptp_response_code = struct.unpack('H', data[0:2])[0]
self.transaction_id = data[2:6]
self.args = data[6:]
class PtpIpStartDataPacket(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x09)
super(PtpIpStartDataPacket, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
self.length = data[4:8]
class PtpIpDataPacket(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x10)
super(PtpIpDataPacket, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
self.data = data[4:]
class PtpIpCancelTransaction(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x11)
super(PtpIpCancelTransaction, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
class PtpIpEndDataPacket(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x12)
super(PtpIpEndDataPacket, self).__init__()
if data is not None:
self.transaction_id = data[0:4]
print(f"transaction_id: {struct.unpack('I', self.transaction_id)[0]}")
self.data = data[4:]
class PtpIpPing(PtpIpPacket):
"""docstring for Start_Data_Packet"""
def __init__(self, data=None):
self.cmdtype = struct.pack('I', 0x13)
super(PtpIpPing, self).__init__()
if data is not None:
self.data = ''
def data(self):
return self.cmdtype
class PtpIpEvent(object):
"""
EventCode Description
0x4001 CancelTransaction
0x4002 ObjectAdded
0x4003 ObjectRemoved
0x4004 StoreAdded
0x4005 StoreRemoved
0x4006 DevicePropChanged
0x4007 ObjectInfoChanged
0x4008 DeviceInfoChanged
0x4009 RequestObjectTransfer
0x400A StoreFull
0x400C StorageInfoChanged
0x400D CaptureComplete
0xC101 ObjectAddedInSdram
0xC102 CaptureCompleteRecInSdram
0xC105 RecordingInterrupted
"""
def __init__(self, event_code, event_parameter):
super(PtpIpEvent, self).__init__()
self.event_code = int(event_code)
self.event_parameter = int(event_parameter)
class PtpIpEventFactory(object):
"""
This is a factory to produce an array of PtpIpEvent objects if it got passd a data reply
from a GetEvent request 0x90C7
"""
def __init__(self, data):
super(PtpIpEventFactory, self).__init__()
# create an empty array for the PtpIpEvent object which will be replied
self.events = []
# get the amount of events passed from the data passed to the factory
amount_of_events = struct.unpack('H', data[0:2])[0]
# set an counter and an offset of 2 as the first two bytes are already processed
counter = 1
offset = 2
while counter <= amount_of_events:
# get the event_code which consists of two bytes
event_code = str(struct.unpack('H', data[offset:offset+2])[0])
# get the event_parameter which consists of 4 bytes
event_parameter = str(struct.unpack('I', data[offset+2:offset+6])[0])
self.events.append(PtpIpEvent(event_code, event_parameter))
# increase the offset by 6 to get to the next event_code and event_parameter pair
offset = offset + 6
counter = counter + 1
def get_events(self):
return self.events
class PtpIpDataObject(object):
"""docstring for PtpIpDataObject"""
def __init__(self, object_handle, data):
super(PtpIpDataObject, self).__init__()
self.object_handle = object_handle
self.data = data
| 2.890625 | 3 |
examples/morpho.py | jaideep-seth/PyOpenWorm | 0 | 6434 | <gh_stars>0
"""
How to load morphologies of certain cells from the database.
"""
#this is an expected failure right now, as morphology is not implemented
from __future__ import absolute_import
from __future__ import print_function
import PyOpenWorm as P
from PyOpenWorm.context import Context
from PyOpenWorm.worm import Worm
from six import StringIO
#Connect to database.
with P.connect('default.conf') as conn:
ctx = Context(ident="http://openworm.org/data", conf=conn.conf).stored
#Create a new Cell object to work with.
aval = ctx(Worm)().get_neuron_network().aneuron('AVAL')
#Get the morphology associated with the Cell. Returns a neuroml.Morphology object.
morph = aval._morphology()
out = StringIO()
morph.export(out, 0) # we're printing it here, but we would normally do something else with the morphology object.
print(str(out.read()))
| 2.515625 | 3 |
corehq/apps/app_manager/tests/test_form_workflow.py | kkrampa/commcare-hq | 1 | 6435 | <reponame>kkrampa/commcare-hq
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import SimpleTestCase
from corehq.apps.app_manager.const import (
AUTO_SELECT_RAW,
AUTO_SELECT_CASE,
WORKFLOW_FORM,
WORKFLOW_MODULE,
WORKFLOW_PREVIOUS,
WORKFLOW_ROOT,
WORKFLOW_PARENT_MODULE,
)
from corehq.apps.app_manager.models import FormDatum, FormLink
from corehq.apps.app_manager.suite_xml.post_process.workflow import _replace_session_references_in_stack, CommandId
from corehq.apps.app_manager.suite_xml.xml_models import StackDatum
from corehq.apps.app_manager.tests.app_factory import AppFactory
from corehq.apps.app_manager.tests.util import TestXmlMixin
from corehq.apps.app_manager.xpath import session_var
class TestFormWorkflow(SimpleTestCase, TestXmlMixin):
file_path = ('data', 'form_workflow')
def test_basic(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
m1, m1f0 = factory.new_basic_module('m1', 'frog')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="(today() - dob) < 7", form_id=m1f0.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_basic'), factory.app.create_suite(), "./entry[1]")
def test_with_case_management_both_update(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
factory.form_requires_case(m0f0)
m1, m1f0 = factory.new_basic_module('m1', 'frog')
factory.form_requires_case(m1f0)
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="(today() - dob) > 7", form_id=m1f0.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_update_case'), factory.app.create_suite(), "./entry[1]")
def test_with_case_management_create_update(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('m1', 'frog')
factory.form_requires_case(m1f0)
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath='true()', form_id=m1f0.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_create_update_case'), factory.app.create_suite(), "./entry[1]")
def test_with_case_management_multiple_links(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', 'frog')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('m1', 'frog')
factory.form_requires_case(m1f0)
m1f1 = factory.new_form(m1)
factory.form_opens_case(m1f1)
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="a = 1", form_id=m1f0.unique_id),
FormLink(xpath="a = 2", form_id=m1f1.unique_id)
]
self.assertXmlPartialEqual(self.get_xml('form_link_multiple'), factory.app.create_suite(), "./entry[1]")
def test_link_to_child_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id),
]
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id),
]
self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), "./entry")
def test_manual_form_link(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id_new_child_0")
]),
]
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id"),
FormDatum(name='case_id_load_visit_0', xpath="instance('commcaresession')/session/data/case_id_new_visit_0"),
]),
]
self.assertXmlPartialEqual(self.get_xml('form_link_tdh'), factory.app.create_suite(), "./entry")
def test_manual_form_link_with_fallback(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m0f0.post_form_workflow = WORKFLOW_FORM
m0f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id_new_child_0")
]),
]
m1f0.post_form_workflow = WORKFLOW_FORM
condition_for_xpath = "instance('casedb')/casedb/case[@case_id = " \
"instance('commcaresession')/session/data/case_id]/prop = 'value'"
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id"),
FormDatum(name='case_id_load_visit_0',
xpath="instance('commcaresession')/session/data/case_id_new_visit_0"),
]),
FormLink(xpath=condition_for_xpath, form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id', xpath="instance('commcaresession')/session/data/case_id"),
FormDatum(name='case_id_load_visit_0',
xpath="instance('commcaresession')/session/data/case_id_new_visit_0"),
]),
]
m1f0.post_form_workflow_fallback = WORKFLOW_PREVIOUS
self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_previous'),
factory.app.create_suite(), "./entry")
m1f0.post_form_workflow_fallback = WORKFLOW_MODULE
self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_module'),
factory.app.create_suite(), "./entry")
m1f0.post_form_workflow_fallback = WORKFLOW_ROOT
self.assertXmlPartialEqual(self.get_xml('form_link_tdh_with_fallback_root'),
factory.app.create_suite(), "./entry")
def test_reference_to_missing_session_variable_in_stack(self):
# http://manage.dimagi.com/default.asp?236750
#
# Stack create blocks do not update the session after each datum
# so items put into the session in one step aren't available later steps
#
# <datum id="case_id_A" value="instance('commcaresession')/session/data/case_id_new_A"/>
# - <datum id="case_id_B" value="instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_A]/index/host"/>
# + <datum id="case_id_B" value="instance('casedb')/casedb/case[@case_id=instance('commcaresession')/session/data/case_id_new_A]/index/host"/>
#
# in the above example ``case_id_A`` is being added to the session and then
# later referenced. However since the session doesn't get updated
# the value isn't available in the session.
#
# To fix this we need to replace any references to previous variables with the full xpath which
# that session variable references.
#
# See corehq.apps.app_manager.suite_xml.post_process.workflow._replace_session_references_in_stack
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('person registration', 'person')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_advanced_module('episode registration', 'episode')
factory.form_requires_case(m1f0, case_type='person')
factory.form_opens_case(m1f0, case_type='episode', is_subcase=True, is_extension=True)
m2, m2f0 = factory.new_advanced_module('tests', 'episode')
factory.form_requires_case(m2f0, 'episode')
factory.advanced_form_autoloads(m2f0, AUTO_SELECT_CASE, 'host', 'load_episode_0')
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m2f0.unique_id, datums=[
FormDatum(name='case_id_load_episode_0', xpath="instance('commcaresession')/session/data/case_id_new_episode_0")
]),
]
self.assertXmlPartialEqual(self.get_xml('form_link_enikshay'), factory.app.create_suite(), "./entry")
def test_return_to_parent_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m2f0.post_form_workflow = WORKFLOW_PARENT_MODULE
expected = """
<partial>
<stack>
<create>
<command value="'m1'"/>
<datum id="case_id" value="instance('commcaresession')/session/data/case_id"/>
<datum id="case_id_new_visit_0" value="uuid()"/>
</create>
</stack>
</partial>
"""
self.assertXmlPartialEqual(expected, factory.app.create_suite(), "./entry[3]/stack")
def test_return_to_child_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
factory.form_opens_case(m1f0, case_type='visit', is_subcase=True)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
factory.form_requires_case(m2f0, 'visit', parent_case_type='child')
m2f0.post_form_workflow = WORKFLOW_MODULE
expected = """
<partial>
<stack>
<create>
<command value="'m1'"/>
<datum id="case_id" value="instance('commcaresession')/session/data/case_id"/>
<datum id="case_id_new_visit_0" value="uuid()"/>
<command value="'m2'"/>
</create>
</stack>
</partial>
"""
self.assertXmlPartialEqual(expected, factory.app.create_suite(), "./entry[3]/stack")
def test_link_to_form_in_parent_module(self):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('enroll child', 'child')
factory.form_opens_case(m0f0)
m1, m1f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m1f0)
m2, m2f0 = factory.new_advanced_module('visit history', 'visit', parent_module=m1)
factory.form_requires_case(m2f0, 'child')
# link to child -> edit child
m2f0.post_form_workflow = WORKFLOW_FORM
m2f0.form_links = [
FormLink(xpath="true()", form_id=m1f0.unique_id),
]
self.assertXmlPartialEqual(self.get_xml('form_link_child_modules'), factory.app.create_suite(), "./entry[3]")
def test_form_links_submodule(self):
# Test that when linking between two forms in a submodule we match up the
# session variables between the source and target form correctly
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('child visit', 'child')
factory.form_requires_case(m0f0)
factory.form_opens_case(m0f0, 'visit', is_subcase=True)
m1, m1f0 = factory.new_advanced_module('visit histroy', 'visit', parent_module=m0)
factory.form_requires_case(m1f0, 'child')
factory.form_requires_case(m1f0, 'visit', parent_case_type='child')
m1f1 = factory.new_form(m1)
factory.form_requires_case(m1f1, 'child')
factory.form_requires_case(m1f1, 'visit', parent_case_type='child')
m1f0.post_form_workflow = WORKFLOW_FORM
m1f0.form_links = [
FormLink(xpath="true()", form_id=m1f1.unique_id),
]
self.assertXmlPartialEqual(self.get_xml('form_link_submodule'), factory.app.create_suite(), "./entry")
def _build_workflow_app(self, mode):
factory = AppFactory(build_version='2.9.0')
m0, m0f0 = factory.new_basic_module('m0', '')
factory.new_form(m0)
m1, m1f0 = factory.new_basic_module('m1', 'patient')
m1f1 = factory.new_form(m1)
factory.form_opens_case(m1f0)
factory.form_requires_case(m1f1)
m2, m2f0 = factory.new_basic_module('m2', 'patient')
m2f1 = factory.new_form(m2)
factory.form_requires_case(m2f0)
factory.form_requires_case(m2f1)
m3, m3f0 = factory.new_basic_module('m3', 'child')
m3f1 = factory.new_form(m3)
factory.form_requires_case(m3f0, parent_case_type='patient')
factory.form_requires_case(m3f1)
m4, m4f0 = factory.new_advanced_module('m4', 'patient')
factory.form_requires_case(m4f0, case_type='patient')
factory.form_requires_case(m4f0, case_type='patient')
m4f1 = factory.new_form(m4)
factory.form_requires_case(m4f1, case_type='patient')
factory.form_requires_case(m4f1, case_type='patient')
factory.form_requires_case(m4f1, case_type='patient')
m4f2 = factory.new_form(m4)
factory.form_requires_case(m4f2, case_type='patient')
factory.form_requires_case(m4f2, case_type='patient')
factory.advanced_form_autoloads(m4f2, AUTO_SELECT_RAW, 'case_id')
m5, m5f0 = factory.new_basic_module('m5', 'patient', parent_module=m1)
factory.form_requires_case(m5f0)
for module in factory.app.get_modules():
for form in module.get_forms():
form.post_form_workflow = mode
return factory.app
def test_form_workflow_previous(self):
app = self._build_workflow_app(WORKFLOW_PREVIOUS)
self.assertXmlPartialEqual(self.get_xml('suite-workflow-previous'), app.create_suite(), "./entry")
def test_form_workflow_module(self):
app = self._build_workflow_app(WORKFLOW_MODULE)
self.assertXmlPartialEqual(self.get_xml('suite-workflow-module'), app.create_suite(), "./entry")
def test_form_workflow_module_in_root(self):
app = self._build_workflow_app(WORKFLOW_PREVIOUS)
for m in [1, 2]:
module = app.get_module(m)
module.put_in_root = True
self.assertXmlPartialEqual(self.get_xml('suite-workflow-module-in-root'), app.create_suite(), "./entry")
def test_form_workflow_root(self):
app = self._build_workflow_app(WORKFLOW_ROOT)
self.assertXmlPartialEqual(self.get_xml('suite-workflow-root'), app.create_suite(), "./entry")
class TestReplaceSessionRefs(SimpleTestCase):
def test_replace_session_references_in_stack(self):
children = [
CommandId('m0'),
StackDatum(id='a', value=session_var('new_a')),
StackDatum(id='b', value=session_var('new_b')),
StackDatum(id='c', value="instance('casedb')/case/[@case_id = {a}]/index/parent".format(a=session_var('a'))),
StackDatum(id='d', value="if({c}, {c}, {a}]".format(a=session_var('a'), c=session_var('c')))
]
clean = _replace_session_references_in_stack(children)
clean_raw = []
for child in clean:
if isinstance(child, CommandId):
clean_raw.append(child.id)
else:
clean_raw.append((child.id, child.value))
new_c = "instance('casedb')/case/[@case_id = {a}]/index/parent".format(a=session_var('new_a'))
self.assertEqual(clean_raw, [
'm0',
('a', session_var('new_a')),
('b', session_var('new_b')),
('c', new_c),
('d', "if({c}, {c}, {a}]".format(a=session_var('new_a'), c=new_c))
])
| 1.890625 | 2 |
tensorflow/python/compiler/tensorrt/model_tests/model_handler.py | sboshin/tensorflow | 0 | 6436 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Loads, converts, and runs sample models."""
import abc
import collections
import functools
import tempfile
import time
from typing import Callable, Iterable, List, Mapping, Optional, Sequence, Union
from absl import logging
import numpy as np
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import tensor_shape_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.client import session
from tensorflow.python.compiler.tensorrt import trt_convert as trt
from tensorflow.python.framework import convert_to_constants
from tensorflow.python.framework import dtypes as tf_dtypes
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops as framework_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.saved_model import load as saved_model_load
from tensorflow.python.saved_model import loader as saved_model_loader
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
# pylint: disable=bad-whitespace
### Helper Functions
def _get_concrete_tensor_shape(
tensor_shape: tensor_shape_pb2.TensorShapeProto,
batch_size: Optional[int] = None) -> Sequence[int]:
"""Gets a concrete tensor shape without dynamic dimensions."""
if tensor_shape.unknown_rank:
raise ValueError("Cannot generates random tensors for unknown rank!")
shape = [dim.size for dim in tensor_shape.dim]
if not shape:
raise ValueError("The tensor cannot have a rank of 0!")
if shape[0] < 0:
if batch_size is None or batch_size <= 0:
raise ValueError("Must provide a valid batch size "
"as the tensor has a dynamic batch size!")
shape[0] = batch_size
if any(filter(lambda x: x < 0, shape)):
raise ValueError("Cannot have dynamic dimensions except for batch size!")
return shape
def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo,
batch_size: Optional[int] = None) -> np.ndarray:
"""Generates a random tensor based on the data type and tensor shape."""
dtype = tf_dtypes.as_dtype(tensor_info.dtype)
shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size)
with session.Session():
return random_ops.random_uniform(
shape=shape, dtype=dtype, name=tensor_info.name.split(":")[0]).eval()
def _generate_random_tensor_v2(
tensor: framework_ops.Tensor,
batch_size: Optional[int] = None) -> framework_ops.Tensor:
"""Generates a random tensor based on the data type and tensor shape."""
shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size)
return random_ops.random_uniform(
shape=shape, dtype=tensor.dtype, name=tensor.name)
# Models are repeatedly loaded for different TensorRT conversion settings.
# Using cache can reduce I/O.
@functools.lru_cache()
def load_meta_graph(
saved_model_dir: str, saved_model_tags: str,
saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef:
"""Loads a `tf.MetaGraphDef` in TF1."""
with session.Session() as sess:
meta_graph = saved_model_loader.load(
sess=sess,
export_dir=saved_model_dir,
tags=saved_model_tags,
)
output_node_names = [
tensor.name.split(":")[0] for tensor in
meta_graph.signature_def[saved_model_signature_key].outputs.values()
]
graph_def = (
convert_to_constants.convert_variables_to_constants_from_session_graph(
sess, meta_graph.graph_def, output_node_names))
meta_graph.graph_def.CopyFrom(graph_def)
return meta_graph
@functools.lru_cache()
def load_graph_func(saved_model_dir: str, saved_model_tags: str,
saved_model_signature_key: str):
"""Loads a graph function in TF2."""
imported = saved_model_load.load(
export_dir=saved_model_dir, tags=saved_model_tags)
graph_func = imported.signatures[saved_model_signature_key]
return convert_to_constants.convert_variables_to_constants_v2(graph_func)
### Test Classes
class TestResult(
collections.namedtuple("TestResult",
["outputs", "latency", "trt_convert_params"])):
def __new__(cls,
outputs: Mapping[str, np.ndarray],
latency: List[float],
trt_convert_params: trt.TrtConversionParams = None):
return super(TestResult, cls).__new__(cls, outputs, latency,
trt_convert_params)
class ModelConfig(
collections.namedtuple("ModelConfig", [
"saved_model_dir", "saved_model_tags", "saved_model_signature_key",
"default_batch_size"
])):
"""Configurations for test models."""
def __new__(cls,
saved_model_dir: str,
saved_model_tags: Sequence[str] = (tag_constants.SERVING,),
saved_model_signature_key: str = (
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY),
default_batch_size: int = 1):
return super(ModelConfig,
cls).__new__(cls, saved_model_dir, saved_model_tags,
saved_model_signature_key, default_batch_size)
class TestResultCollection(
collections.namedtuple("TestResultCollection", ["results", "config"])):
def __new__(cls, config: ModelConfig,
results: Sequence[TestResult] = tuple()):
return super(TestResultCollection, cls).__new__(cls, config, results)
class _ModelHandlerBase(metaclass=abc.ABCMeta):
"""Base class for running a model."""
def __init__(self, model_config: ModelConfig):
self._model_config = model_config
def __str__(self) -> str:
return str(self._model_config)
def __repr__(self) -> str:
return "{}({})".format(self.__class__.__name__, str(self))
@property
def model_config(self) -> ModelConfig:
return self._model_config
@property
def input_tensort_names(self) -> Sequence[str]:
"""Names of input tensors."""
@property
def output_tensor_names(self) -> Sequence[str]:
"""Names of output tensors."""
@abc.abstractmethod
def generate_random_inputs(
self,
batch_size: Optional[int] = None
) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]:
"""Generates mapping from names to input tensors."""
@abc.abstractmethod
def run(self,
inputs=None,
warmup_iterations: int = 10,
benchmark_iterations: int = 100,
allow_to_use_gpu: bool = False) -> TestResult:
"""Runs the model with provided or randomly generated input tensors.
Args:
inputs: Mapping from names to input ndarrays in TF1, or a sequence of
tensors in TF2. If `None`, ramdomly generated inputs will be used
instead.
warmup_iterations: Number of inferences to warm up the runtime.
benchmark_iterations: Number of inferences to measure the latency.
allow_to_use_gpu: Whether it is allowed to use GPU or not.
Returns:
`TestResult` summarizing timing and numerics information.
"""
class ModelHandlerV1(_ModelHandlerBase):
"""Runs a model in TF1."""
@property
def meta_graph(self) -> meta_graph_pb2.MetaGraphDef:
return load_meta_graph(
saved_model_dir=self.model_config.saved_model_dir,
saved_model_tags=self.model_config.saved_model_tags,
saved_model_signature_key=self.model_config.saved_model_signature_key)
@property
def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:
return self.meta_graph.signature_def[
self.model_config.saved_model_signature_key].inputs
@property
def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:
return self.meta_graph.signature_def[
self.model_config.saved_model_signature_key].outputs
@property
def input_tensort_names(self) -> Sequence[str]:
return [info.name for info in self.input_tensor_info.values()]
@property
def output_tensor_names(self) -> Sequence[str]:
return [info.name for info in self.output_tensor_info.values()]
def generate_random_inputs(self,
batch_size: Optional[int] = None
) -> Mapping[str, np.ndarray]:
batch_size = batch_size or self.model_config.default_batch_size
return {
tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size)
for tensor_info in self.input_tensor_info.values()
}
def run(self,
inputs: Optional[Mapping[str, np.ndarray]] = None,
warmup_iterations=10,
benchmark_iterations=100,
allow_to_use_gpu=False) -> TestResult:
inputs = inputs or self.generate_random_inputs()
config_proto = None
if not allow_to_use_gpu:
config_proto = config_pb2.ConfigProto(device_count={"CPU": 1, "GPU": 0})
with session.Session(config=config_proto) as sess:
importer.import_graph_def(self.meta_graph.graph_def)
try:
for _ in range(warmup_iterations):
sess.run(fetches=self.output_tensor_names, feed_dict=inputs)
latency = []
for _ in range(benchmark_iterations):
before = time.time()
outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs)
latency.append(time.time() - before)
except Exception as exc:
raise RuntimeError("Failed to run model inference! "
"Model information: {}".format(str(self))) from exc
outputs = dict(zip(self.output_tensor_names, outputs))
return TestResult(latency=latency, outputs=outputs if inputs else None)
class ModelHandlerV2(_ModelHandlerBase):
"""Runs a model in TF2."""
@property
def graph_func(self):
graph_func = load_graph_func(
saved_model_dir=self.model_config.saved_model_dir,
saved_model_tags=self.model_config.saved_model_tags,
saved_model_signature_key=self.model_config.saved_model_signature_key)
return convert_to_constants.convert_variables_to_constants_v2(graph_func)
@property
def input_tensor_names(self):
return [tensor.name for tensor in self.graph_func.inputs]
@property
def output_tensor_names(self):
return [tensor.name for tensor in self.graph_func.outputs]
def generate_random_inputs(self,
batch_size: Optional[int] = None
) -> Sequence[framework_ops.Tensor]:
batch_size = batch_size or self.model_config.default_batch_size
return [
_generate_random_tensor_v2(tensor, batch_size)
for tensor in self.graph_func.inputs
]
def run(self,
inputs: Optional[Sequence[framework_ops.Tensor]] = None,
warmup_iterations=10,
benchmark_iterations=100,
allow_to_use_gpu=False) -> TestResult:
inputs = inputs or self.generate_random_inputs()
try:
device = "/device:gpu:0" if allow_to_use_gpu else "/device:cpu:0"
with framework_ops.device(device):
for _ in range(warmup_iterations):
self.graph_func(*inputs)
latency = []
for _ in range(benchmark_iterations):
before = time.time()
outputs = self.graph_func(*inputs)
latency.append(time.time() - before)
except Exception as exc:
raise RuntimeError("Failed to run model inference! "
"Model information: {}".format(str(self))) from exc
outputs = dict(zip(self.output_tensor_names, outputs))
return TestResult(latency=latency, outputs=outputs if inputs else None)
class _TrtModelHandlerBase(_ModelHandlerBase):
"""Base class for converting and running a model."""
def __init__(
self,
model_config: ModelConfig,
trt_convert_params: trt.TrtConversionParams,
):
super(_TrtModelHandlerBase, self).__init__(model_config)
self._trt_convert_params = trt_convert_params
self._converter = self._create_converter(trt_convert_params)
logging.info("Converting to TensorRT!")
self._check_conversion(self._converter.convert())
self._conversion_is_saved = False
@abc.abstractmethod
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
"""Creates a converter for the corresponding TF version."""
@abc.abstractmethod
def _check_conversion(self, conversion_output):
"""Checks if conversion output has any TensorRT engines."""
def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef):
if "TRTEngineOp" not in [node.op for node in graph_def.node]:
raise RuntimeError("Failed to convert to TensorRT! "
"Model Information: {}".format(str(self)))
def __str__(self) -> str:
base = super(_TrtModelHandlerBase, self).__str__()
return "{}, TrtConversionParams: {}".format(base,
str(self._trt_convert_params))
@property
def trt_convert_params(self) -> trt.TrtConversionParams:
return self._trt_convert_params
def save(self,
output_saved_model_dir: Optional[str] = None,
overwrite=True) -> None:
"""Saves a TensorRT converted model."""
if self._conversion_is_saved and not overwrite:
return
output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp()
logging.info("Saving TensorRT model to %s!", output_saved_model_dir)
self._converter.save(output_saved_model_dir)
self._model_config = self.model_config._replace(
saved_model_dir=output_saved_model_dir)
self._conversion_is_saved = True
class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1):
"""Converts a TF1 model with TensorRT and runs the converted model."""
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
conversion_nodes_denylist = self.output_tensor_names
return trt.TrtGraphConverter(
input_saved_model_dir=self.model_config.saved_model_dir,
input_saved_model_tags=self.model_config.saved_model_tags,
input_saved_model_signature_key=(
self.model_config.saved_model_signature_key),
nodes_denylist=conversion_nodes_denylist,
max_batch_size=trt_convert_params.max_batch_size,
max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes,
precision_mode=trt_convert_params.precision_mode,
minimum_segment_size=trt_convert_params.minimum_segment_size,
is_dynamic_op=trt_convert_params.is_dynamic_op,
maximum_cached_engines=trt_convert_params.maximum_cached_engines,
use_calibration=trt_convert_params.use_calibration,
)
_check_conversion = _TrtModelHandlerBase._check_contains_trt_engine
def run(self,
inputs: Optional[Mapping[str, np.ndarray]] = None,
warmup_iterations=10,
benchmark_iterations=100) -> TestResult:
self.save(overwrite=False)
logging.info("Running with TensorRT!")
test_result = ModelHandlerV1.run(
self,
inputs,
warmup_iterations,
benchmark_iterations,
allow_to_use_gpu=True)
return test_result._replace(trt_convert_params=self._trt_convert_params)
class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2):
"""Converts a TF2 model with TensorRT and runs the converted model."""
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
return trt.TrtGraphConverterV2(
input_saved_model_dir=self.model_config.saved_model_dir,
input_saved_model_tags=self.model_config.saved_model_tags,
input_saved_model_signature_key=(
self.model_config.saved_model_signature_key),
conversion_params=trt_convert_params)
def _check_conversion(self, graph_func):
graph_def = graph_func.graph.as_graph_def()
self._check_contains_trt_engine(graph_def)
def run(self,
inputs: Optional[Sequence[framework_ops.Tensor]] = None,
warmup_iterations=10,
benchmark_iterations=100) -> TestResult:
self.save(overwrite=False)
logging.info("Running with TensorRT!")
test_result = ModelHandlerV2.run(
self,
inputs,
warmup_iterations,
benchmark_iterations,
allow_to_use_gpu=True)
return test_result._replace(trt_convert_params=self._trt_convert_params)
class _ModelHandlerManagerBase(metaclass=abc.ABCMeta):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking."""
def __init__(
self, model_config: ModelConfig,
default_trt_convert_params: trt.TrtConversionParams,
trt_convert_params_updater: Callable[[trt.TrtConversionParams],
Iterable[trt.TrtConversionParams]]):
self._ori_model = self.model_handler_cls(model_config)
self._trt_models = []
for trt_convert_params in trt_convert_params_updater(
default_trt_convert_params):
trt_model = self.trt_model_handler_cls(
model_config, trt_convert_params=trt_convert_params)
self._trt_models.append(trt_model)
self._result_collection = TestResultCollection(
results=[], config=model_config)
def __str__(self) -> str:
return "Input Model: {}".format(str(self._ori_model))
def __repr__(self) -> str:
return "{}({})".format(self.__class__.__name__, str(self))
@property
@classmethod
@abc.abstractmethod
def model_handler_cls(cls):
"""The modle handler class. ModelHandleV1/ModelHandlerV2."""
@property
@classmethod
@abc.abstractmethod
def trt_model_handler_cls(cls):
"""The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2."""
@property
def model_config(self):
return self._ori_model.model_config
def generate_random_inputs(self, batch_size: Optional[int] = None):
return self._ori_model.generate_random_inputs(batch_size)
def run(self,
inputs=None,
warmup_iterations: int = 10,
benchmark_iterations: int = 100) -> TestResultCollection:
"""Runs model inference with provided or randomly generated input tensors.
Args:
inputs: Mapping from names to input ndarrays in TF1. Or a sequence of
tensors in TF2. If `None`, ramdomly generated input tensors will be used
instead.
warmup_iterations: Number of inferences to warm up the runtime.
benchmark_iterations: Number of inferences to measure the latency.
Returns:
`TestResultCollection` summarizing timing and numerics information for
different TensorRT conversion settings.
"""
inputs = inputs or self.generate_random_inputs()
results = [
model.run(inputs, warmup_iterations, benchmark_iterations)
for model in [self._ori_model] + self._trt_models
]
return self._result_collection._replace(results=results)
class ModelHandlerManagerV1(_ModelHandlerManagerBase):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF1."""
model_handler_cls = ModelHandlerV1
trt_model_handler_cls = TrtModelHandlerV1
class ModelHandlerManagerV2(_ModelHandlerManagerBase):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF2."""
model_handler_cls = ModelHandlerV2
trt_model_handler_cls = TrtModelHandlerV2
| 1.53125 | 2 |
Python/Python Evaluation/solution.py | arpitran/HackerRank_solutions | 0 | 6437 | <filename>Python/Python Evaluation/solution.py
eval(input("Enter a expression ")) | 2.390625 | 2 |
kpca_iris.py | syamkakarla98/Kernel-PCA-Using-Different-Kernels-With-Classification | 10 | 6438 | <gh_stars>1-10
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# load dataset into Pandas DataFrame
df = pd.read_csv("D:\Python_programs\ML\Iris Data\KPCA\iris.csv")
#df.to_csv('iris.csv')
from sklearn.preprocessing import StandardScaler
features = ['sepal length', 'sepal width', 'petal length', 'petal width']
# Separating out the features
x = df.loc[:, features].values
# Separating out the target
y = df.loc[:,['target']].values
# Standardizing the features
x = StandardScaler().fit_transform(x)
from sklearn.decomposition import KernelPCA
## Finding the principle components
# KERNELS : linear,rbf,poly
#
def Kernel_Pca(ker):
kpca = KernelPCA(n_components=4, kernel=ker, gamma=15)
x_kpca = kpca.fit_transform(x)
kpca_transform = kpca.fit_transform(x)
explained_variance = np.var(kpca_transform, axis=0)
ev = explained_variance / np.sum(explained_variance)
#--------- Bar Graph for Explained Variance Ratio ------------
plt.bar([1,2,3,4],list(ev*100),label='Principal Components',color='b')
plt.legend()
plt.xlabel('Principal Components ')
#----------------------
n=list(ev*100)
pc=[]
for i in range(len(n)):
n[i]=round(n[i],4)
pc.append('PC-'+str(i+1)+'('+str(n[i])+')')
#----------------------
plt.xticks([1,2,3,4],pc, fontsize=7, rotation=30)
plt.ylabel('Variance Ratio')
plt.title('Variance Ratio of IRIS Dataset using kernel:'+str(ker))
plt.show()
#---------------------------------------------------
# *Since the initial 2 principal components have high variance.
# so, we select pc-1 and pc-2.
#---------------------------------------------------
kpca = KernelPCA(n_components=2, kernel=ker, gamma=15)
x_kpca = kpca.fit_transform(x)
principalComponents = kpca.fit_transform(x)
principalDf = pd.DataFrame(data = principalComponents
, columns = ['PC-1', 'PC-2'])
# Adding lables
finalDf = pd.concat([principalDf, df[['target']]], axis = 1)
# Plotting pc1 & pc2
fig = plt.figure(figsize = (8,8))
ax = fig.add_subplot(1,1,1)
ax.set_xlabel('PC-1', fontsize = 15)
ax.set_ylabel('PC-2', fontsize = 15)
ax.set_title('KPCA on IRIS Dataset using kernel:'+str(ker), fontsize = 20)
targets = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']
colors = ['r', 'g', 'b']
for target, color in zip(targets,colors):
indicesToKeep = finalDf['target'] == target
ax.scatter(finalDf.loc[indicesToKeep, 'PC-1']
, finalDf.loc[indicesToKeep, 'PC-2']
, c = color
, s = 30)
ax.legend(targets)
ax.grid()
plt.show() # FOR SHOWING THE PLOT
#------------------- SAVING DATA INTO CSV FILE ------------
finalDf.to_csv('iris_after_KPCA_using_'+str(ker)+'.csv')
#------------------------------------------------------
k=['linear','rbf','poly']
for i in k:
Kernel_Pca(i)
| 3.171875 | 3 |
Python/libraries/recognizers-date-time/recognizers_date_time/date_time/italian/dateperiod_extractor_config.py | felaray/Recognizers-Text | 0 | 6439 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List, Pattern
from recognizers_text.utilities import RegExpUtility
from recognizers_number.number import BaseNumberParser
from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor
from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration
from ...resources.base_date_time import BaseDateTime
from ...resources.italian_date_time import ItalianDateTime
from ..extractors import DateTimeExtractor
from ..base_duration import BaseDurationExtractor
from ..base_date import BaseDateExtractor
from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex
from .duration_extractor_config import ItalianDurationExtractorConfiguration
from .date_extractor_config import ItalianDateExtractorConfiguration
from recognizers_text.extractor import Extractor
from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor
class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration):
@property
def previous_prefix_regex(self) -> Pattern:
return self._previous_prefix_regex
@property
def check_both_before_after(self) -> bool:
return self._check_both_before_after
@property
def simple_cases_regexes(self) -> List[Pattern]:
return self._simple_cases_regexes
@property
def illegal_year_regex(self) -> Pattern:
return self._illegal_year_regex
@property
def year_regex(self) -> Pattern:
return self._year_regex
@property
def till_regex(self) -> Pattern:
return self._till_regex
@property
def followed_unit(self) -> Pattern:
return self._followed_unit
@property
def number_combined_with_unit(self) -> Pattern:
return self._number_combined_with_unit
@property
def past_regex(self) -> Pattern:
return self._past_regex
@property
def decade_with_century_regex(self) -> Pattern:
return self._decade_with_century_regex
@property
def future_regex(self) -> Pattern:
return self._future_regex
@property
def week_of_regex(self) -> Pattern:
return self._week_of_regex
@property
def month_of_regex(self) -> Pattern:
return self._month_of_regex
@property
def date_unit_regex(self) -> Pattern:
return self._date_unit_regex
@property
def in_connector_regex(self) -> Pattern:
return self._in_connector_regex
@property
def range_unit_regex(self) -> Pattern:
return self._range_unit_regex
@property
def date_point_extractor(self) -> DateTimeExtractor:
return self._date_point_extractor
@property
def integer_extractor(self) -> BaseNumberExtractor:
return self._integer_extractor
@property
def number_parser(self) -> BaseNumberParser:
return self._number_parser
@property
def duration_extractor(self) -> DateTimeExtractor:
return self._duration_extractor
@property
def now_regex(self) -> Pattern:
return self._now_regex
@property
def future_suffix_regex(self) -> Pattern:
return self._future_suffix_regex
@property
def ago_regex(self) -> Pattern:
return self._ago_regex
@property
def later_regex(self) -> Pattern:
return self._later_regex
@property
def less_than_regex(self) -> Pattern:
return self._less_than_regex
@property
def more_than_regex(self) -> Pattern:
return self._more_than_regex
@property
def duration_date_restrictions(self) -> [str]:
return self._duration_date_restrictions
@property
def year_period_regex(self) -> Pattern:
return self._year_period_regex
@property
def month_num_regex(self) -> Pattern:
return self._month_num_regex
@property
def century_suffix_regex(self) -> Pattern:
return self._century_suffix_regex
@property
def ordinal_extractor(self) -> BaseNumberExtractor:
return self._ordinal_extractor
@property
def cardinal_extractor(self) -> Extractor:
return self._cardinal_extractor
@property
def time_unit_regex(self) -> Pattern:
return self._time_unit_regex
@property
def within_next_prefix_regex(self) -> Pattern:
return self._within_next_prefix_regex
@property
def range_connector_regex(self) -> Pattern:
return self._range_connector_regex
@property
def day_regex(self) -> Pattern:
return self._day_regex
@property
def week_day_regex(self) -> Pattern:
return self._week_day_regex
@property
def relative_month_regex(self) -> Pattern:
return self._relative_month_regex
@property
def month_suffix_regex(self) -> Pattern:
return self._month_suffix_regex
@property
def past_prefix_regex(self) -> Pattern:
return self._past_prefix_regex
@property
def next_prefix_regex(self) -> Pattern:
return self._next_prefix_regex
@property
def this_prefix_regex(self) -> Pattern:
return self._this_prefix_regex
@property
def which_week_regex(self) -> Pattern:
return self._which_week_regex
@property
def rest_of_date_regex(self) -> Pattern:
return self._rest_of_date_regex
@property
def complex_date_period_regex(self) -> Pattern:
return self._complex_date_period_regex
@property
def week_day_of_month_regex(self) -> Pattern:
return self._week_day_of_month_regex
@property
def all_half_year_regex(self) -> Pattern:
return self._all_half_year_regex
def __init__(self):
self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex)
self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex)
self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex)
self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex)
self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex)
self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex)
self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex)
self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex)
self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex)
self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex)
self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex)
self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex)
self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex)
self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex)
self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PastSuffixRegex)
self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter
self._simple_cases_regexes = [
RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthFrontBetweenRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthFrontSimpleCasesRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.QuarterRegexYearFront),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LaterEarlyPeriodRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WeekWithWeekDayRangeRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex)
]
self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter
self._illegal_year_regex = RegExpUtility.get_safe_reg_exp(
BaseDateTime.IllegalYearRegex)
self._year_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.YearRegex)
self._till_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.TillRegex)
self._followed_unit = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FollowedDateUnit)
self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NumberCombinedWithDateUnit)
self._past_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PastSuffixRegex)
self._future_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NextSuffixRegex)
self._week_of_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WeekOfRegex)
self._month_of_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthOfRegex)
self._date_unit_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.DateUnitRegex)
self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WithinNextPrefixRegex)
self._in_connector_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.InConnectorRegex)
self._range_unit_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.RangeUnitRegex)
self.from_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FromRegex)
self.connector_and_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.ConnectorAndRegex)
self.before_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.BeforeRegex2)
self._date_point_extractor = BaseDateExtractor(
ItalianDateExtractorConfiguration())
self._integer_extractor = ItalianIntegerExtractor()
self._number_parser = BaseNumberParser(
ItalianNumberParserConfiguration())
self._duration_extractor = BaseDurationExtractor(
ItalianDurationExtractorConfiguration())
self._now_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NowRegex)
self._future_suffix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FutureSuffixRegex
)
self._ago_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.AgoRegex
)
self._later_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LaterRegex
)
self._less_than_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LessThanRegex
)
self._more_than_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MoreThanRegex
)
self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions
self._year_period_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.YearPeriodRegex
)
self._month_num_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthNumRegex
)
self._century_suffix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.CenturySuffixRegex
)
self._ordinal_extractor = ItalianOrdinalExtractor()
self._cardinal_extractor = ItalianCardinalExtractor()
self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PreviousPrefixRegex
)
self._cardinal_extractor = ItalianCardinalExtractor()
# TODO When the implementation for these properties is added, change the None values to their respective Regexps
self._time_unit_regex = None
def get_from_token_index(self, source: str) -> MatchedIndex:
match = self.from_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def get_between_token_index(self, source: str) -> MatchedIndex:
match = self.before_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def has_connector_token(self, source: str) -> bool:
return not self.connector_and_regex.search(source) is None
| 2.296875 | 2 |
pydbrepo/drivers/sqlite.py | danteay/pydbrepo | 2 | 6440 | <filename>pydbrepo/drivers/sqlite.py
"""SQLite Driver implementation."""
# pylint: disable=R0201
import os
import sqlite3
from typing import Any, AnyStr, List, NoReturn, Optional, Tuple
from pydbrepo.drivers.driver import Driver
class SQLite(Driver):
"""SQLite Driver connection class.
Environment variables:
DATABASE_URL: Database file ulr on the system. If it's an in memory database the url should
be None or `:memory:` string
DATABASE_COMMIT: default('false') Auto commit transaction flag
:type url:
:param url: Database connection url
:param autocommit: Auto commit transactions
"""
def __init__(
self,
url: Optional[AnyStr] = None,
autocommit: Optional[bool] = None,
):
super().__init__()
self.__build_connection(url, autocommit)
def __build_connection(
self,
url: Optional[AnyStr] = None,
autocommit: Optional[bool] = None,
) -> NoReturn:
"""Start real driver connection from parameters.
:param url: Database connection url
:param autocommit: Auto commit transactions
"""
if url is None:
url = ':memory:'
if autocommit is None:
autocommit = False
if os.getenv('DATABASE_URL', None) is not None:
url = os.getenv('DATABASE_URL')
if os.getenv('DATABASE_COMMIT', None) is not None:
autocommit = os.getenv('DATABASE_COMMIT').lower() == "true"
self.__url = url
self.__conn = sqlite3.connect(url)
self.__commit = autocommit
@staticmethod
def __execute(cursor, sql: AnyStr, *args) -> Any:
"""Execute query and attempt to replace with arguments.
:param cursor: Connection cursor statement
:param sql: Raw query to be executed
:param args: List of arguments passed to be replaced in query
"""
if not args:
return cursor.execute(sql)
return cursor.execute(sql, tuple(args))
def query(self, **kwargs) -> List[Tuple]:
"""Execute a query and return all values.
:param kwargs: Parameters to execute query statement.
sql: AnyStr -> SQL query statement
args: Optional[Iterable[Any]] -> Object with query replacement values
:return List[Tuple]: List of tuple records found by query
"""
self._validate_params({'sql'}, set(kwargs.keys()))
cursor = self.__conn.cursor()
_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', []))
self.__commit_transaction()
res = cursor.fetchall()
cursor.close()
return res
def query_one(self, **kwargs) -> Tuple[Any, ...]:
"""Execute a query and do not return any result value.
:param kwargs: Parameters to execute query statement.
sql: AnyStr -> SQL query statement
args: Optional[Iterable[Any]] -> Object with query replacement values
:return Tuple: Found record
"""
self._validate_params({'sql'}, set(kwargs.keys()))
cursor = self.__conn.cursor()
_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', []))
self.__commit_transaction()
res = cursor.fetchone()
cursor.close()
return res
def query_none(self, **kwargs) -> NoReturn:
"""Execute a query and do not return any result value.
:param kwargs: Parameters to execute query statement.
sql: AnyStr -> SQL query statement
args: Optional[Iterable[Any]] -> Object with query replacement values
"""
self._validate_params({'sql'}, set(kwargs.keys()))
cursor = self.__conn.cursor()
_ = self.__execute(cursor, kwargs['sql'], *kwargs.get('args', []))
self.__commit_transaction()
cursor.close()
def commit(self) -> NoReturn:
"""Commit transaction."""
self.__conn.commit()
def rollback(self) -> NoReturn:
self.__conn.rollback()
def close(self) -> NoReturn:
"""Close current connection."""
self.__conn.close()
def get_real_driver(self) -> Any:
"""Return real mysql driver connection."""
return self.__conn
def placeholder(self, **kwargs) -> AnyStr:
"""Return query place holder."""
return '?'
def reset_placeholder(self) -> NoReturn:
"""Reset place holder status (do nothing)"""
def __repr__(self):
"""Mysql driver representation."""
return f"SQLite({self.__url})"
def __commit_transaction(self):
"""Execute commit operation if the __commit flag is True."""
if self.__commit:
self.commit()
| 3.109375 | 3 |
Modules/BatchNormND.py | EmilPi/PuzzleLib | 52 | 6441 | <reponame>EmilPi/PuzzleLib
import numpy as np
from PuzzleLib import Config
from PuzzleLib.Backend import gpuarray, Blas
from PuzzleLib.Backend.Dnn import batchNormNd, batchNormNdBackward
from PuzzleLib.Variable import Variable
from PuzzleLib.Modules.Module import ModuleError, Module
class BatchNormND(Module):
def __init__(self, nd, maps, epsilon=1e-5, initFactor=1.0, minFactor=0.1, sscale=0.01, affine=True, name=None,
empty=False, inplace=False):
super().__init__(name)
self.inplace = inplace
if inplace and Config.showWarnings:
Config.getLogger().info("Warning: %s is using inplace flag", self)
self.maps = maps
self.epsilon = epsilon
self.initFactor = initFactor
self.minFactor = minFactor
self.numOfProps = 0
self.affine = affine
self.scale, self.bias, self.mean, self.var = None, None, None, None
self.savemean, self.saveinvvar, self.scalegrad, self.biasgrad = None, None, None, None
if empty:
return
shape = (1, maps) + self.repeat(1, nd)
scale = np.random.normal(1.0, sscale if affine else 0.0, shape).astype(self.calctype)
var = np.ones(shape, dtype=self.calctype)
self.setVar("scale", Variable(gpuarray.to_gpu(scale)))
self.setVar("bias", Variable(gpuarray.zeros(shape, dtype=self.calctype)))
self.setAttr("mean", gpuarray.zeros(shape, dtype=self.calctype))
self.setAttr("var", gpuarray.to_gpu(var))
def updateData(self, data):
if self.train:
if self.inplace:
raise ModuleError("%s: using inplace flag in train mode is prohibited" % self)
self.numOfProps += 1
factor = max(self.initFactor / self.numOfProps, self.minFactor)
self.data, self.savemean, self.saveinvvar = batchNormNd(
data, self.scale, self.bias, self.mean, self.var, self.epsilon, factor, False
)
else:
self.data = batchNormNd(
data, self.scale, self.bias, self.mean, self.var, self.epsilon, 0, True,
out=data if self.inplace else None
)
def updateGrad(self, grad):
tup = batchNormNdBackward(self.inData, grad, self.scale, self.savemean, self.saveinvvar, self.epsilon)
if self.affine:
self.grad, self.scalegrad, self.biasgrad = tup
else:
self.grad, _, _ = tup
def accGradParams(self, grad, scale=1.0, momentum=0.0):
if self.affine:
Blas.addVectorToVector(
self.scalegrad.ravel(), self.vars["scale"].grad.ravel(), out=self.vars["scale"].grad.ravel(),
alpha=scale, beta=momentum
)
Blas.addVectorToVector(
self.biasgrad.ravel(), self.vars["bias"].grad.ravel(), out=self.vars["bias"].grad.ravel(),
alpha=scale, beta=momentum
)
def dataShapeFrom(self, shape):
return shape
def gradShapeFrom(self, shape):
return shape
def reset(self):
super().reset()
self.savemean, self.saveinvvar = None, None
if self.affine:
self.scalegrad, self.biasgrad = None, None
def calcMode(self, T):
if Config.backend == Config.Backend.cuda:
if T not in {np.float16, np.float32}:
raise ModuleError("Unsupported dtype %s" % T)
elif T != np.float32:
raise ModuleError("Unsupported dtype %s" % T)
self.calctype = T
| 2.203125 | 2 |
python/testData/editing/enterInIncompleteTupleLiteral.after.py | jnthn/intellij-community | 2 | 6442 | <filename>python/testData/editing/enterInIncompleteTupleLiteral.after.py
xs = ('foo', 'bar',
'baz'<caret> | 1.34375 | 1 |
model/server/server.py | waltzofpearls/reckon | 8 | 6443 | <reponame>waltzofpearls/reckon<filename>model/server/server.py
from concurrent import futures
from forecaster.prophet import Forecaster as ProphetForecaster
from multiprocessing import Event, Process, cpu_count
from pythonjsonlogger import jsonlogger
import contextlib
import grpc
import logging
import model.api.forecast_pb2_grpc as grpc_lib
import os
import signal
import socket
import sys
import time
class ForecastServicer(ProphetForecaster):
def __init__(self, logger):
self.logger = logger
def pretty_timedelta(self, seconds):
seconds = int(seconds)
days, seconds = divmod(seconds, 86400)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if days > 0:
return '{:d}d{:d}h{:d}m{:d}s'.format(days, hours, minutes, seconds)
elif hours > 0:
return '{:d}h{:d}m{:d}s'.format(hours, minutes, seconds)
elif minutes > 0:
return '{:d}m{:d}s'.format(minutes, seconds)
else:
return '{:d}s'.format(seconds)
class GracefulShutdown:
def __init__(self, logger):
self.logger = logger
self.event = Event()
signal.signal(signal.SIGINT, self.handler('SIGINT'))
signal.signal(signal.SIGTERM, self.handler('SIGTERM'))
signal.signal(signal.SIGHUP, self.handler('SIGHUP'))
def handler(self, signal_name):
def fn(signal_received, frame):
self.logger.info('signal received', extra={'signal': signal_name})
self.event.set()
return fn
class Config(object):
def __init__(self):
self.grpc_server_address = os.getenv('GRPC_SERVER_ADDRESS', '')
self.grpc_server_key = str.encode(os.getenv('GRPC_SERVER_KEY', ''))
self.grpc_server_cert = str.encode(os.getenv('GRPC_SERVER_CERT', ''))
self.grpc_root_ca = str.encode(os.getenv('GRPC_ROOT_CA', ''))
self.gprc_server_process_num = int(os.getenv('GPRC_SERVER_PROCESS_NUM', cpu_count()))
self.grpc_server_thread_num = int(os.getenv('GRPC_SERVER_THREAD_NUM', 1))
self.grpc_server_grace_period_in_secs = int(os.getenv('GRPC_SERVER_GRACE_PERIOD_IN_SECS', 2))
self.grpc_server_kill_period_in_secs = int(os.getenv('GRPC_SERVER_KILL_PERIOD_IN_SECS', 5))
class Server(object):
def __init__(self, config, logger):
self.config = config
self.logger = logger
@contextlib.contextmanager
def _reserve_port(self):
"""Find and reserve a port for all subprocesses to use"""
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0:
raise RuntimeError('failed to set SO_REUSEPORT.')
_, port = self.config.grpc_server_address.split(':')
sock.bind(('', int(port)))
try:
yield sock.getsockname()[1]
finally:
sock.close()
def _run_server(self, shutdown_event):
server_credentials = grpc.ssl_server_credentials(
[(self.config.grpc_server_key, self.config.grpc_server_cert)],
root_certificates=self.config.grpc_root_ca,
require_client_auth=True
)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=self.config.grpc_server_thread_num),
options=[
("grpc.so_reuseport", 1),
("grpc.use_local_subchannel_pool", 1),
],
)
grpc_lib.add_ForecastServicer_to_server(ForecastServicer(self.logger), server)
server.add_secure_port(self.config.grpc_server_address, server_credentials)
self.logger.info('starting python gRPC server...')
server.start()
while not shutdown_event.is_set():
time.sleep(1)
server.stop(5).wait()
self.logger.info('python gRPC server stopped')
def serve(self):
with self._reserve_port():
procs = []
shutdown = GracefulShutdown(self.logger)
for _ in range(self.config.gprc_server_process_num):
proc = Process(target=self._run_server, args=(shutdown.event,))
procs.append(proc)
proc.start()
while not shutdown.event.is_set():
time.sleep(1)
t = time.time()
grace_period = self.config.grpc_server_grace_period_in_secs
kill_period = self.config.grpc_server_kill_period_in_secs
while True:
# Send SIGINT if process doesn't exit quickly enough, and kill it as last resort
# .is_alive() also implicitly joins the process (good practice in linux)
alive_procs = [proc for proc in procs if proc.is_alive()]
if len(alive_procs) == 0:
break
elapsed = time.time() - t
if elapsed >= grace_period and elapsed < kill_period:
for proc in alive_procs:
proc.terminate()
self.logger.info("sending SIGTERM to subprocess", extra={'proc': proc})
elif elapsed >= kill_period:
for proc in alive_procs:
self.logger.warning("sending SIGKILL to subprocess", extra={'proc': proc})
# Queues and other inter-process communication primitives can break when
# process is killed, but we don't care here
proc.kill()
time.sleep(1)
time.sleep(1)
for proc in procs:
self.logger.info("subprocess terminated", extra={'proc': proc})
def json_logger():
logger = logging.getLogger()
log_handler = logging.StreamHandler(sys.stdout)
formatter = jsonlogger.JsonFormatter(fmt='%(asctime)s %(name)s %(levelname)s %(message)s')
log_handler.setFormatter(formatter)
log_handler.flush = sys.stdout.flush
logger.setLevel(logging.INFO)
logger.addHandler(log_handler)
return logger
| 2.1875 | 2 |
test/test_setupcall.py | jhgoebbert/jupyter-libertem-proxy | 0 | 6444 | def test_setupcall():
"""
Test the call of the setup function
"""
import jupyter_libertem_proxy as jx
print("\nRunning test_setupcall...")
print(jx.setup_libertem())
| 2.03125 | 2 |
launchpad/launch/worker_manager.py | LaudateCorpus1/launchpad | 0 | 6445 | <gh_stars>0
# Copyright 2020 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WorkerManager handles thread and process-based runtimes."""
import atexit
import collections
from concurrent import futures
import ctypes
import os
import signal
import subprocess
import threading
import time
from typing import Optional, Sequence, Text
from absl import flags
from absl import logging
from absl.testing import absltest
from launchpad import flags as lp_flags
import psutil
import termcolor
FLAGS = flags.FLAGS
ThreadWorker = collections.namedtuple('ThreadWorker', ['thread', 'future'])
_WORKER_MANAGERS = threading.local()
_HAS_MAIN_MANAGER = False
def get_worker_manager():
manager = getattr(_WORKER_MANAGERS, 'manager', None)
assert manager, 'Worker manager is not available in the current thread'
return manager
def register_signal_handler(sig, handler):
"""Registers a signal handler."""
return signal.signal(sig, handler)
def remove_signal_handler(sig, handler):
return signal.signal(sig, handler)
def wait_for_stop():
"""Blocks until termination of the node's program is requested.
Can be used to perform cleanup at the end of the run, for example:
start_server()
lp.wait_for_stop()
stop_server()
checkpoint()
"""
get_worker_manager().wait_for_stop()
class WorkerManager:
"""Encapsulates running threads and processes of a Launchpad Program."""
def __init__(
self,
stop_main_thread=False,
kill_main_thread=True,
register_in_thread=False,
register_signals=True):
"""Initializes a WorkerManager.
Args:
stop_main_thread: Should main thread be notified about termination.
kill_main_thread: When set to false try not to kill the launcher while
killing workers. This is not possible when thread workers run in the
same process.
register_in_thread: TODO
register_signals: Whether or not to register signal handlers.
"""
self._mutex = threading.Lock()
self._termination_notice_secs = -1
handle_user_stop = False
global _HAS_MAIN_MANAGER
# Make the first created worker manager the main manager, which handles
# signals.
if not _HAS_MAIN_MANAGER:
self._termination_notice_secs = FLAGS.lp_termination_notice_secs
handle_user_stop = True
_HAS_MAIN_MANAGER = True
self._active_workers = collections.defaultdict(list)
self._workers_count = collections.defaultdict(lambda: 0)
self._first_failure = None
self._stop_counter = 0
self._alarm_enabled = False
self._kill_main_thread = kill_main_thread
self._stop_event = threading.Event()
self._main_thread = threading.current_thread().ident
self._sigterm_handler = None
self._sigquit_handler = None
self._sigalrm_handler = None
if register_signals:
self._sigterm_handler = register_signal_handler(signal.SIGTERM,
self._sigterm)
self._sigquit_handler = register_signal_handler(signal.SIGQUIT,
self._sigquit)
if handle_user_stop:
register_signal_handler(
signal.SIGINT, lambda sig=None, frame=None: self._stop_by_user())
self._stop_main_thread = stop_main_thread
if register_in_thread:
_WORKER_MANAGERS.manager = self
def _disable_signals(self):
self._disable_alarm()
if self._sigterm_handler is not None:
remove_signal_handler(signal.SIGTERM, self._sigterm_handler)
self._sigterm_handler = None
if self._sigquit_handler is not None:
remove_signal_handler(signal.SIGQUIT, self._sigquit_handler)
self._sigquit_handler = None
def _sigterm(self, sig=None, frame=None):
"""Handles SIGTERM by stopping the workers."""
if callable(self._sigterm_handler):
self._sigterm_handler(sig, frame)
self._stop()
def _sigquit(self, sig=None, frame=None):
if callable(self._sigquit_handler):
self._sigquit_handler(sig, frame)
self._kill()
def wait_for_stop(self):
"""Blocks until managed runtime is being terminated."""
self._stop_event.wait()
def thread_worker(self, name, function):
"""Registers and start a new thread worker.
Args:
name: Name of the worker group.
function: Entrypoint function to execute in a worker.
"""
with self._mutex:
future = futures.Future()
def run_inner(f=function, future=future, manager=self):
_WORKER_MANAGERS.manager = manager
try:
future.set_result(f())
except BaseException as e:
future.set_exception(e)
builder = lambda t, n: threading.Thread(target=t, name=n)
thread = builder(run_inner, name)
thread.setDaemon(True)
thread.start()
self._workers_count[name] += 1
worker = ThreadWorker(thread=thread, future=future)
self._active_workers[name].append(worker)
if self._stop_event.is_set():
# Runtime is terminating, so notify the worker.
self._send_exception(worker)
def process_worker(self, name, command, env=None, **kwargs):
"""Adds process worker to the runtime.
Args:
name: Name of the worker's group.
command: Command to execute in the worker.
env: Environment variables to set for the worker.
**kwargs: Other parameters to be passed to `subprocess.Popen`.
"""
with self._mutex:
process = subprocess.Popen(command, env=env or {}, **kwargs)
self._workers_count[name] += 1
self._active_workers[name].append(process)
def register_existing_process(self, name: str, pid: int):
"""Registers already started worker process.
Args:
name: Name of the workers' group.
pid: Pid of the process to monitor.
"""
with self._mutex:
self._workers_count[name] += 1
self._active_workers[name].append(psutil.Process(pid))
def _stop_by_user(self):
"""Handles stopping of the runtime by a user."""
if self._termination_notice_secs != 0:
print(
termcolor.colored(
'User-requested termination. Asking workers to stop.', 'blue'))
print(termcolor.colored('Press CTRL+C to terminate immediately.', 'blue'))
signal.signal(signal.SIGINT, lambda sig, frame: self._kill())
self._stop()
def _kill_process_tree(self, pid):
"""Kills all child processes of the current process."""
parent = psutil.Process(pid)
for process in parent.children(recursive=True):
try:
process.send_signal(signal.SIGKILL)
except psutil.NoSuchProcess:
pass
parent.send_signal(signal.SIGKILL)
def _kill(self):
"""Kills all workers (and main thread/process if needed)."""
print(termcolor.colored('\nKilling entire runtime.', 'blue'))
kill_self = self._kill_main_thread
for workers in self._active_workers.values():
for worker in workers:
if isinstance(worker, ThreadWorker):
# Not possible to kill a thread without killing the process.
kill_self = True
else:
self._kill_process_tree(worker.pid)
if kill_self:
self._kill_process_tree(os.getpid())
def _send_exception(self, worker):
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(worker.thread.ident),
ctypes.py_object(SystemExit))
assert res < 2, 'Exception raise failure'
def _stop_or_kill(self):
"""Stops all workers; kills them if they don't stop on time."""
pending_secs = self._termination_notice_secs - self._stop_counter
if pending_secs == 0:
if self._termination_notice_secs > 0:
still_running = [
label for label in self._active_workers
if self._active_workers[label]
]
print(
termcolor.colored(
f'Worker groups that did not terminate in time: {still_running}',
'red'))
self._kill()
return
if pending_secs >= 0:
print(
termcolor.colored(f'Waiting for workers to stop for {pending_secs}s.',
'blue'),
end='\r')
self._stop_counter += 1
for workers in self._active_workers.values():
for worker in workers:
if isinstance(worker, ThreadWorker):
if self._stop_counter == 1:
self._send_exception(worker)
elif isinstance(worker, subprocess.Popen):
worker.send_signal(signal.SIGTERM)
else:
# Notify all workers running under a proxy process.
children = worker.children(recursive=True)
worker_found = False
for process in children:
if process.name() != 'bash' and 'envelope_' not in process.name():
try:
worker_found = True
process.send_signal(signal.SIGTERM)
except psutil.NoSuchProcess:
pass
if not worker_found:
# No more workers running, so we can kill the proxy itself.
try:
worker.send_signal(signal.SIGKILL)
except psutil.NoSuchProcess:
pass
if self._stop_main_thread:
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(threading.main_thread().ident),
ctypes.py_object(SystemExit))
assert res < 2, 'Exception raise failure'
if pending_secs >= 0:
signal.alarm(1)
def _stop(self):
"""Requests all workers to stop and schedule delayed termination."""
if not self._stop_event.is_set():
self._stop_event.set()
try:
if self._termination_notice_secs > 0:
self._alarm_enabled = True
self._sigalrm_handler = register_signal_handler(
signal.SIGALRM, lambda sig=None, frame=None: self._stop_or_kill())
except ValueError:
# This happens when we attempt to register a signal handler but not in the
# main thread. Send a SIGTERM to redirect to the main thread.
psutil.Process(os.getpid()).send_signal(signal.SIGTERM)
return
self._stop_or_kill()
def _disable_alarm(self):
if self._alarm_enabled:
self._alarm_enabled = False
signal.alarm(0)
remove_signal_handler(signal.SIGALRM, self._sigalrm_handler)
def stop_and_wait(self):
"""Requests stopping all workers and wait for termination."""
with self._mutex:
self._stop()
self.wait(raise_error=False)
def join(self):
self.wait()
def wait(self,
labels_to_wait_for: Optional[Sequence[Text]] = None,
raise_error=True,
return_on_first_completed=False):
"""Waits for workers to finish.
Args:
labels_to_wait_for: If supplied, only wait for these groups' workers to
finish. Wait for all workers otherwise.
raise_error: Raise an exception upon any worker failure.
return_on_first_completed: Whether to return upon the first completed (or
failed) worker.
Raises:
RuntimeError: if any worker raises an exception.
"""
while True:
try:
active_workers = True
while active_workers:
with self._mutex:
self._check_workers()
active_workers = False
if self._first_failure and raise_error:
failure = self._first_failure
self._first_failure = None
raise failure
for label in labels_to_wait_for or self._active_workers.keys():
if self._active_workers[label]:
active_workers = True
if (return_on_first_completed and len(self._active_workers[label])
< self._workers_count[label]):
return
time.sleep(0.1)
return
except SystemExit:
self._stop()
def cleanup_after_test(self, test_case: absltest.TestCase):
"""Cleanups runtime after a test."""
with self._mutex:
self._check_workers()
self._stop()
self._disable_signals()
self.wait(raise_error=False)
with self._mutex:
if self._first_failure:
raise self._first_failure
def _check_workers(self):
"""Checks status of running workers, terminate runtime in case of errors."""
has_workers = False
for label in self._active_workers:
still_active = []
for worker in self._active_workers[label]:
active = True
if isinstance(worker, ThreadWorker):
if not worker.thread.is_alive():
worker.thread.join()
if not self._stop_counter:
try:
worker.future.result()
except BaseException as e:
if not self._first_failure and not self._stop_counter:
self._first_failure = e
active = False
elif isinstance(worker, subprocess.Popen):
try:
res = worker.wait(0)
active = False
if res and not self._first_failure and not self._stop_counter:
self._first_failure = RuntimeError('One of the workers failed.')
except subprocess.TimeoutExpired:
pass
else:
try:
# We can't obtain return code of external process, so clean
# termination is assumed.
res = worker.wait(0)
active = False
except psutil.TimeoutExpired:
pass
if active:
has_workers = True
still_active.append(worker)
self._active_workers[label] = still_active
if has_workers and self._first_failure and not self._stop_counter:
self._stop()
elif not has_workers:
self._disable_alarm()
def __del__(self):
self._disable_signals()
| 2.21875 | 2 |
mmdeploy/backend/tensorrt/init_plugins.py | hanrui1sensetime/mmdeploy | 1 | 6446 | <gh_stars>1-10
# Copyright (c) OpenMMLab. All rights reserved.
import ctypes
import glob
import logging
import os
def get_ops_path() -> str:
"""Get path of the TensorRT plugin library.
Returns:
str: A path of the TensorRT plugin library.
"""
wildcard = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'../../../build/lib/libmmdeploy_tensorrt_ops.so'))
paths = glob.glob(wildcard)
lib_path = paths[0] if len(paths) > 0 else ''
return lib_path
def load_tensorrt_plugin() -> bool:
"""Load TensorRT plugins library.
Returns:
bool: True if TensorRT plugin library is successfully loaded.
"""
lib_path = get_ops_path()
success = False
if os.path.exists(lib_path):
ctypes.CDLL(lib_path)
logging.info(f'Successfully loaded tensorrt plugins from {lib_path}')
success = True
else:
logging.warning(f'Could not load the library of tensorrt plugins. \
Because the file does not exist: {lib_path}')
return success
| 2.296875 | 2 |
reagent/test/world_model/test_seq2reward.py | dmitryvinn/ReAgent | 0 | 6447 | <reponame>dmitryvinn/ReAgent
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import logging
import os
import random
import unittest
from typing import Optional
import numpy as np
import pytorch_lightning as pl
import torch
import torch.nn as nn
from parameterized import parameterized
from reagent.core import types as rlt
from reagent.core.parameters import (
NormalizationData,
NormalizationParameters,
ProblemDomain,
Seq2RewardTrainerParameters,
)
from reagent.gym.envs import Gym
from reagent.gym.utils import create_df_from_replay_buffer
from reagent.models.seq2reward_model import Seq2RewardNetwork
from reagent.net_builder.value.fully_connected import FullyConnected
from reagent.prediction.predictor_wrapper import (
Seq2RewardWithPreprocessor,
Seq2RewardPlanShortSeqWithPreprocessor,
FAKE_STATE_ID_LIST_FEATURES,
FAKE_STATE_ID_SCORE_LIST_FEATURES,
)
from reagent.preprocessing.identify_types import DO_NOT_PREPROCESS
from reagent.preprocessing.preprocessor import Preprocessor
from reagent.training.utils import gen_permutations
from reagent.training.world_model.compress_model_trainer import CompressModelTrainer
from reagent.training.world_model.seq2reward_trainer import get_Q, Seq2RewardTrainer
from torch.utils.data import DataLoader
logger = logging.getLogger(__name__)
SEED = 0
STRING_GAME_TESTS = [(False,), (True,)]
class FakeStepPredictionNetwork(nn.Module):
def __init__(self, look_ahead_steps):
super().__init__()
self.look_ahead_steps = look_ahead_steps
def forward(self, state: torch.Tensor):
"""
Given the current state, predict the probability of
experiencing next n steps (1 <=n <= look_ahead_steps)
For the test purpose, it outputs fixed fake numbers
"""
batch_size, _ = state.shape
return torch.ones(batch_size, self.look_ahead_steps).float()
class FakeSeq2RewardNetwork(nn.Module):
def forward(
self,
state: rlt.FeatureData,
action: rlt.FeatureData,
valid_reward_len: Optional[torch.Tensor] = None,
):
"""
Mimic I/O of Seq2RewardNetwork but return fake reward
Reward is the concatenation of action indices, independent
of state.
For example, when seq_len = 3, batch_size = 1, action_num = 2,
acc_reward = tensor(
[[ 0.],
[ 1.],
[ 10.],
[ 11.],
[100.],
[101.],
[110.],
[111.]]
)
Input action shape: seq_len, batch_size, num_action
Output acc_reward shape: batch_size, 1
"""
# pyre-fixme[9]: action has type `FeatureData`; used as `Tensor`.
action = action.float_features.transpose(0, 1)
action_indices = torch.argmax(action, dim=2).tolist()
acc_reward = torch.tensor(
list(map(lambda x: float("".join(map(str, x))), action_indices))
).reshape(-1, 1)
logger.info(f"acc_reward: {acc_reward}")
return rlt.Seq2RewardOutput(acc_reward=acc_reward)
def create_string_game_data(
dataset_size=10000, training_data_ratio=0.9, filter_short_sequence=False
):
SEQ_LEN = 6
NUM_ACTION = 2
NUM_MDP_PER_BATCH = 5
env = Gym(env_name="StringGame-v0", set_max_steps=SEQ_LEN)
df = create_df_from_replay_buffer(
env=env,
problem_domain=ProblemDomain.DISCRETE_ACTION,
desired_size=dataset_size,
multi_steps=None,
ds="2020-10-10",
)
if filter_short_sequence:
batch_size = NUM_MDP_PER_BATCH
time_diff = torch.ones(SEQ_LEN, batch_size)
valid_step = SEQ_LEN * torch.ones(batch_size, dtype=torch.int64)[:, None]
not_terminal = torch.Tensor(
[0 if i == SEQ_LEN - 1 else 1 for i in range(SEQ_LEN)]
)
not_terminal = torch.transpose(not_terminal.tile(NUM_MDP_PER_BATCH, 1), 0, 1)
else:
batch_size = NUM_MDP_PER_BATCH * SEQ_LEN
time_diff = torch.ones(SEQ_LEN, batch_size)
valid_step = torch.arange(SEQ_LEN, 0, -1).tile(NUM_MDP_PER_BATCH)[:, None]
not_terminal = torch.transpose(
torch.tril(torch.ones(SEQ_LEN, SEQ_LEN), diagonal=-1).tile(
NUM_MDP_PER_BATCH, 1
),
0,
1,
)
num_batches = int(dataset_size / SEQ_LEN / NUM_MDP_PER_BATCH)
batches = [None for _ in range(num_batches)]
batch_count, batch_seq_count = 0, 0
batch_reward = torch.zeros(SEQ_LEN, batch_size)
batch_action = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION)
batch_state = torch.zeros(SEQ_LEN, batch_size, NUM_ACTION)
for mdp_id in sorted(set(df.mdp_id)):
mdp = df[df["mdp_id"] == mdp_id].sort_values("sequence_number", ascending=True)
if len(mdp) != SEQ_LEN:
continue
all_step_reward = torch.Tensor(list(mdp["reward"]))
all_step_state = torch.Tensor([list(s.values()) for s in mdp["state_features"]])
all_step_action = torch.zeros_like(all_step_state)
all_step_action[torch.arange(SEQ_LEN), [int(a) for a in mdp["action"]]] = 1.0
for j in range(SEQ_LEN):
if filter_short_sequence and j > 0:
break
reward = torch.zeros_like(all_step_reward)
reward[: SEQ_LEN - j] = all_step_reward[-(SEQ_LEN - j) :]
batch_reward[:, batch_seq_count] = reward
state = torch.zeros_like(all_step_state)
state[: SEQ_LEN - j] = all_step_state[-(SEQ_LEN - j) :]
batch_state[:, batch_seq_count] = state
action = torch.zeros_like(all_step_action)
action[: SEQ_LEN - j] = all_step_action[-(SEQ_LEN - j) :]
batch_action[:, batch_seq_count] = action
batch_seq_count += 1
if batch_seq_count == batch_size:
batches[batch_count] = rlt.MemoryNetworkInput(
reward=batch_reward,
action=rlt.FeatureData(float_features=batch_action),
state=rlt.FeatureData(float_features=batch_state),
next_state=rlt.FeatureData(
float_features=torch.zeros_like(batch_state)
), # fake, not used anyway
not_terminal=not_terminal,
time_diff=time_diff,
valid_step=valid_step,
step=None,
)
batch_count += 1
batch_seq_count = 0
batch_reward = torch.zeros_like(batch_reward)
batch_action = torch.zeros_like(batch_action)
batch_state = torch.zeros_like(batch_state)
assert batch_count == num_batches
num_training_batches = int(training_data_ratio * num_batches)
training_data = DataLoader(
batches[:num_training_batches], collate_fn=lambda x: x[0]
)
eval_data = DataLoader(batches[num_training_batches:], collate_fn=lambda x: x[0])
return training_data, eval_data
def train_seq2reward_model(training_data, learning_rate=0.01, num_epochs=5):
SEQ_LEN, batch_size, NUM_ACTION = next(
iter(training_data)
).action.float_features.shape
assert SEQ_LEN == 6 and NUM_ACTION == 2
seq2reward_network = Seq2RewardNetwork(
state_dim=NUM_ACTION,
action_dim=NUM_ACTION,
num_hiddens=64,
num_hidden_layers=2,
)
trainer_param = Seq2RewardTrainerParameters(
learning_rate=learning_rate,
multi_steps=SEQ_LEN,
action_names=["0", "1"],
gamma=1.0,
view_q_value=True,
)
trainer = Seq2RewardTrainer(
seq2reward_network=seq2reward_network, params=trainer_param
)
pl.seed_everything(SEED)
pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True)
pl_trainer.fit(trainer, training_data)
return trainer
def eval_seq2reward_model(eval_data, seq2reward_trainer):
SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape
initial_state = torch.Tensor([[0, 0]])
initial_state_q_values = torch.squeeze(
get_Q(
seq2reward_trainer.seq2reward_network,
initial_state,
seq2reward_trainer.all_permut,
)
)
total_mse_loss = 0
total_q_values = torch.zeros(NUM_ACTION)
total_action_distribution = torch.zeros(NUM_ACTION)
for idx, batch in enumerate(eval_data):
(
mse_loss,
_,
q_values,
action_distribution,
) = seq2reward_trainer.validation_step(batch, idx)
total_mse_loss += mse_loss
total_q_values += torch.tensor(q_values)
total_action_distribution += torch.tensor(action_distribution)
N_eval = len(eval_data)
eval_mse_loss = total_mse_loss / N_eval
eval_q_values = total_q_values / N_eval
eval_action_distribution = total_action_distribution / N_eval
return (
initial_state_q_values,
eval_mse_loss,
eval_q_values,
eval_action_distribution,
)
def train_seq2reward_compress_model(
training_data, seq2reward_network, learning_rate=0.1, num_epochs=5
):
SEQ_LEN, batch_size, NUM_ACTION = next(
iter(training_data)
).action.float_features.shape
assert SEQ_LEN == 6 and NUM_ACTION == 2
compress_net_builder = FullyConnected(sizes=[8, 8])
state_normalization_data = NormalizationData(
dense_normalization_parameters={
0: NormalizationParameters(feature_type=DO_NOT_PREPROCESS),
1: NormalizationParameters(feature_type=DO_NOT_PREPROCESS),
}
)
compress_model_network = compress_net_builder.build_value_network(
state_normalization_data,
output_dim=NUM_ACTION,
)
trainer_param = Seq2RewardTrainerParameters(
learning_rate=0.0,
multi_steps=SEQ_LEN,
action_names=["0", "1"],
compress_model_learning_rate=learning_rate,
gamma=1.0,
view_q_value=True,
)
trainer = CompressModelTrainer(
compress_model_network=compress_model_network,
seq2reward_network=seq2reward_network,
params=trainer_param,
)
pl.seed_everything(SEED)
pl_trainer = pl.Trainer(max_epochs=num_epochs, deterministic=True)
pl_trainer.fit(trainer, training_data)
return trainer
def eval_seq2reward_compress_model(eval_data, compress_model_trainer):
SEQ_LEN, batch_size, NUM_ACTION = next(iter(eval_data)).action.float_features.shape
total_mse_loss = 0
total_q_values = torch.zeros(NUM_ACTION)
total_action_distribution = torch.zeros(NUM_ACTION)
for idx, batch in enumerate(eval_data):
(
mse_loss,
q_values,
action_distribution,
_,
) = compress_model_trainer.validation_step(batch, idx)
total_mse_loss += mse_loss
total_q_values += torch.tensor(q_values)
total_action_distribution += torch.tensor(action_distribution)
N_eval = len(eval_data)
eval_mse_loss = total_mse_loss / N_eval
eval_q_values = total_q_values / N_eval
eval_action_distribution = total_action_distribution / N_eval
return eval_mse_loss, eval_q_values, eval_action_distribution
class TestSeq2Reward(unittest.TestCase):
def test_seq2reward_with_preprocessor_plan_short_sequence(self):
self._test_seq2reward_with_preprocessor(plan_short_sequence=True)
def test_seq2reward_with_preprocessor_plan_full_sequence(self):
self._test_seq2reward_with_preprocessor(plan_short_sequence=False)
def _test_seq2reward_with_preprocessor(self, plan_short_sequence):
state_dim = 4
action_dim = 2
seq_len = 3
model = FakeSeq2RewardNetwork()
state_normalization_parameters = {
i: NormalizationParameters(
feature_type=DO_NOT_PREPROCESS, mean=0.0, stddev=1.0
)
for i in range(1, state_dim)
}
state_preprocessor = Preprocessor(state_normalization_parameters, False)
if plan_short_sequence:
step_prediction_model = FakeStepPredictionNetwork(seq_len)
model_with_preprocessor = Seq2RewardPlanShortSeqWithPreprocessor(
model,
step_prediction_model,
state_preprocessor,
seq_len,
action_dim,
)
else:
model_with_preprocessor = Seq2RewardWithPreprocessor(
model,
state_preprocessor,
seq_len,
action_dim,
)
input_prototype = rlt.ServingFeatureData(
float_features_with_presence=state_preprocessor.input_prototype(),
id_list_features=FAKE_STATE_ID_LIST_FEATURES,
id_score_list_features=FAKE_STATE_ID_SCORE_LIST_FEATURES,
)
q_values = model_with_preprocessor(input_prototype)
if plan_short_sequence:
# When planning for 1, 2, and 3 steps ahead,
# the expected q values are respectively:
# [0, 1], [1, 11], [11, 111]
# Weighting the expected q values by predicted step
# probabilities [0.33, 0.33, 0.33], we have [4, 41]
expected_q_values = torch.tensor([[4.0, 41.0]])
else:
expected_q_values = torch.tensor([[11.0, 111.0]])
assert torch.all(expected_q_values == q_values)
def test_get_Q(self):
NUM_ACTION = 2
MULTI_STEPS = 3
BATCH_SIZE = 2
STATE_DIM = 4
all_permut = gen_permutations(MULTI_STEPS, NUM_ACTION)
seq2reward_network = FakeSeq2RewardNetwork()
state = torch.zeros(BATCH_SIZE, STATE_DIM)
q_values = get_Q(seq2reward_network, state, all_permut)
expected_q_values = torch.tensor([[11.0, 111.0], [11.0, 111.0]])
logger.info(f"q_values: {q_values}")
assert torch.all(expected_q_values == q_values)
def test_gen_permutations_seq_len_1_action_6(self):
SEQ_LEN = 1
NUM_ACTION = 6
expected_outcome = torch.tensor([[0], [1], [2], [3], [4], [5]])
self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome)
def test_gen_permutations_seq_len_3_num_action_2(self):
SEQ_LEN = 3
NUM_ACTION = 2
expected_outcome = torch.tensor(
[
[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0],
[1, 1, 1],
]
)
self._test_gen_permutations(SEQ_LEN, NUM_ACTION, expected_outcome)
def _test_gen_permutations(self, SEQ_LEN, NUM_ACTION, expected_outcome):
# expected shape: SEQ_LEN, PERM_NUM, ACTION_DIM
result = gen_permutations(SEQ_LEN, NUM_ACTION)
assert result.shape == (SEQ_LEN, NUM_ACTION ** SEQ_LEN, NUM_ACTION)
outcome = torch.argmax(result.transpose(0, 1), dim=-1)
assert torch.all(outcome == expected_outcome)
@parameterized.expand(STRING_GAME_TESTS)
@unittest.skipIf("SANDCASTLE" in os.environ, "Skipping long test on sandcastle.")
def test_seq2reward_on_string_game_v0(self, filter_short_sequence):
np.random.seed(SEED)
random.seed(SEED)
torch.manual_seed(SEED)
training_data, eval_data = create_string_game_data(
filter_short_sequence=filter_short_sequence
)
seq2reward_trainer = train_seq2reward_model(training_data)
(
initial_state_q_values,
eval_mse_loss,
eval_q_values,
eval_action_distribution,
) = eval_seq2reward_model(eval_data, seq2reward_trainer)
assert abs(initial_state_q_values[0].item() - 10) < 1.0
assert abs(initial_state_q_values[1].item() - 5) < 1.0
if filter_short_sequence:
assert eval_mse_loss < 0.1
else:
# Same short sequences may have different total rewards due to the missing
# states and actions in previous steps, so the trained network is not able
# to reduce the mse loss to values close to zero.
assert eval_mse_loss < 10
compress_model_trainer = train_seq2reward_compress_model(
training_data, seq2reward_trainer.seq2reward_network
)
(
compress_eval_mse_loss,
compress_eval_q_values,
compress_eval_action_distribution,
) = eval_seq2reward_compress_model(eval_data, compress_model_trainer)
assert compress_eval_mse_loss < 1e-5
assert torch.all(eval_q_values - compress_eval_q_values < 1e-5)
assert torch.all(
eval_action_distribution - compress_eval_action_distribution < 1e-5
)
| 1.867188 | 2 |
models_SHOT_convex/syn30m03hfsg.py | grossmann-group/pyomo-MINLP-benchmarking | 0 | 6448 | <gh_stars>0
# MINLP written by GAMS Convert at 01/15/21 11:37:33
#
# Equation counts
# Total E G L N X C B
# 1486 571 111 804 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 865 685 180 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 3373 3193 180 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x547 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x548 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x549 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x550 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x551 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x552 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x553 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x554 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x555 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x556 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x557 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x558 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x559 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x560 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x561 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x562 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x563 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x564 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x565 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x566 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x567 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x568 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x569 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x570 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x571 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x572 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x573 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x574 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x575 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x576 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x577 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x578 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x579 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x580 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x581 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x582 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x583 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x584 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x585 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x586 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x587 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x588 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x589 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x590 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x591 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x592 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x593 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x594 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x595 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b596 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b597 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b598 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b599 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b600 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b601 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b602 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b603 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b604 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b605 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b606 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b607 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b608 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b609 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b610 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b611 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b612 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b613 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b614 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b615 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b616 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b617 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b618 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b619 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b620 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b621 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b622 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b623 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b624 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b625 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b626 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b627 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b628 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b629 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b630 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b631 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b632 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b633 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b634 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b635 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b636 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b637 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b638 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b639 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b640 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b641 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b642 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b643 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b644 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b645 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b646 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b647 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b648 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b649 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b650 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b651 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b652 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b653 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b654 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b655 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b656 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b657 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b658 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b659 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b660 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b661 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b662 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b663 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b664 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b665 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b666 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b667 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b668 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b669 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b670 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b671 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b672 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b673 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b674 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b675 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b676 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b677 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b678 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b679 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b680 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b681 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b682 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b683 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b684 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b685 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b686 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b687 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b688 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b689 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b690 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b691 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b692 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b693 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b694 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b695 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b696 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b697 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b698 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b699 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b700 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b701 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b702 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b703 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b704 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b705 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b706 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b707 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b708 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b709 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b710 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b711 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b712 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b713 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b714 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b715 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b716 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b717 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b718 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b719 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b720 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b721 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b722 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b723 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b724 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b725 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b726 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b727 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b728 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b729 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b730 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b731 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b732 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b733 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b734 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b735 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b736 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b737 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b738 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b739 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b740 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b741 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b742 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b743 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b744 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b745 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b746 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b747 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b748 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b749 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b750 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b751 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b752 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b753 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b754 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b755 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b756 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b757 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b758 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b759 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b760 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b761 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b762 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b763 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b764 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b765 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b766 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b767 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b768 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b769 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b770 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b771 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b772 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b773 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b774 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b775 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x776 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x777 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x778 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x779 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x780 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x781 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x782 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x783 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x784 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x785 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x786 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x787 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x788 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x789 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x790 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x791 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x792 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x793 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x794 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x795 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x796 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x797 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x798 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x799 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x800 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x801 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x802 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x803 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x804 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x805 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x806 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x807 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x808 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x809 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x810 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x811 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x812 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x813 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x814 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x815 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x816 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x817 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x818 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x819 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x820 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x821 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x822 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x823 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x824 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x825 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x826 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x827 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x828 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x829 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x830 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x831 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x832 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x833 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x834 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x835 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x836 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x837 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x838 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x839 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x840 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x841 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x842 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x843 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x844 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x845 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x846 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x847 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x848 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x849 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x850 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x851 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x852 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x853 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x854 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x855 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x856 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x857 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x858 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x859 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x860 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x861 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x862 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x863 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x864 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x865 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr= - m.x2 - m.x3 - m.x4 + 5*m.x20 + 10*m.x21 + 5*m.x22 - 2*m.x35 - m.x36 - 2*m.x37 - 10*m.x86
- 5*m.x87 - 5*m.x88 - 5*m.x89 - 5*m.x90 - 5*m.x91 + 40*m.x110 + 30*m.x111 + 15*m.x112
+ 15*m.x113 + 20*m.x114 + 25*m.x115 + 10*m.x116 + 30*m.x117 + 40*m.x118 + 30*m.x119 + 20*m.x120
+ 20*m.x121 + 35*m.x122 + 50*m.x123 + 20*m.x124 + 20*m.x125 + 30*m.x126 + 35*m.x127 + 25*m.x128
+ 50*m.x129 + 10*m.x130 + 15*m.x131 + 20*m.x132 + 20*m.x133 + 30*m.x155 + 40*m.x156 + 40*m.x157
- m.x170 - m.x171 - m.x172 + 80*m.x194 + 90*m.x195 + 120*m.x196 + 285*m.x197 + 390*m.x198
+ 350*m.x199 + 290*m.x200 + 405*m.x201 + 190*m.x202 + 280*m.x203 + 400*m.x204 + 430*m.x205
+ 290*m.x206 + 300*m.x207 + 240*m.x208 + 350*m.x209 + 250*m.x210 + 300*m.x211 - 5*m.b686
- 4*m.b687 - 6*m.b688 - 8*m.b689 - 7*m.b690 - 6*m.b691 - 6*m.b692 - 9*m.b693 - 4*m.b694
- 10*m.b695 - 9*m.b696 - 5*m.b697 - 6*m.b698 - 10*m.b699 - 6*m.b700 - 7*m.b701 - 7*m.b702
- 4*m.b703 - 4*m.b704 - 3*m.b705 - 2*m.b706 - 5*m.b707 - 6*m.b708 - 7*m.b709 - 2*m.b710
- 5*m.b711 - 2*m.b712 - 4*m.b713 - 7*m.b714 - 4*m.b715 - 3*m.b716 - 9*m.b717 - 3*m.b718
- 7*m.b719 - 2*m.b720 - 9*m.b721 - 3*m.b722 - m.b723 - 9*m.b724 - 2*m.b725 - 6*m.b726 - 3*m.b727
- 4*m.b728 - 8*m.b729 - m.b730 - 2*m.b731 - 5*m.b732 - 2*m.b733 - 3*m.b734 - 4*m.b735 - 3*m.b736
- 5*m.b737 - 7*m.b738 - 6*m.b739 - 2*m.b740 - 8*m.b741 - 4*m.b742 - m.b743 - 4*m.b744 - m.b745
- 2*m.b746 - 5*m.b747 - 2*m.b748 - 9*m.b749 - 2*m.b750 - 9*m.b751 - 5*m.b752 - 8*m.b753
- 4*m.b754 - 2*m.b755 - 3*m.b756 - 8*m.b757 - 10*m.b758 - 6*m.b759 - 3*m.b760 - 4*m.b761
- 8*m.b762 - 7*m.b763 - 7*m.b764 - 3*m.b765 - 9*m.b766 - 4*m.b767 - 8*m.b768 - 6*m.b769
- 2*m.b770 - m.b771 - 3*m.b772 - 8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize)
m.c2 = Constraint(expr= m.x2 - m.x5 - m.x8 == 0)
m.c3 = Constraint(expr= m.x3 - m.x6 - m.x9 == 0)
m.c4 = Constraint(expr= m.x4 - m.x7 - m.x10 == 0)
m.c5 = Constraint(expr= - m.x11 - m.x14 + m.x17 == 0)
m.c6 = Constraint(expr= - m.x12 - m.x15 + m.x18 == 0)
m.c7 = Constraint(expr= - m.x13 - m.x16 + m.x19 == 0)
m.c8 = Constraint(expr= m.x17 - m.x20 - m.x23 == 0)
m.c9 = Constraint(expr= m.x18 - m.x21 - m.x24 == 0)
m.c10 = Constraint(expr= m.x19 - m.x22 - m.x25 == 0)
m.c11 = Constraint(expr= m.x23 - m.x26 - m.x29 - m.x32 == 0)
m.c12 = Constraint(expr= m.x24 - m.x27 - m.x30 - m.x33 == 0)
m.c13 = Constraint(expr= m.x25 - m.x28 - m.x31 - m.x34 == 0)
m.c14 = Constraint(expr= m.x38 - m.x47 - m.x50 == 0)
m.c15 = Constraint(expr= m.x39 - m.x48 - m.x51 == 0)
m.c16 = Constraint(expr= m.x40 - m.x49 - m.x52 == 0)
m.c17 = Constraint(expr= m.x44 - m.x53 - m.x56 - m.x59 == 0)
m.c18 = Constraint(expr= m.x45 - m.x54 - m.x57 - m.x60 == 0)
m.c19 = Constraint(expr= m.x46 - m.x55 - m.x58 - m.x61 == 0)
m.c20 = Constraint(expr= m.x68 - m.x80 - m.x83 == 0)
m.c21 = Constraint(expr= m.x69 - m.x81 - m.x84 == 0)
m.c22 = Constraint(expr= m.x70 - m.x82 - m.x85 == 0)
m.c23 = Constraint(expr= - m.x71 - m.x89 + m.x92 == 0)
m.c24 = Constraint(expr= - m.x72 - m.x90 + m.x93 == 0)
m.c25 = Constraint(expr= - m.x73 - m.x91 + m.x94 == 0)
m.c26 = Constraint(expr= m.x74 - m.x95 - m.x98 == 0)
m.c27 = Constraint(expr= m.x75 - m.x96 - m.x99 == 0)
m.c28 = Constraint(expr= m.x76 - m.x97 - m.x100 == 0)
m.c29 = Constraint(expr= m.x77 - m.x101 - m.x104 - m.x107 == 0)
m.c30 = Constraint(expr= m.x78 - m.x102 - m.x105 - m.x108 == 0)
m.c31 = Constraint(expr= m.x79 - m.x103 - m.x106 - m.x109 == 0)
m.c32 = Constraint(expr= m.x134 - m.x137 == 0)
m.c33 = Constraint(expr= m.x135 - m.x138 == 0)
m.c34 = Constraint(expr= m.x136 - m.x139 == 0)
m.c35 = Constraint(expr= m.x137 - m.x140 - m.x143 == 0)
m.c36 = Constraint(expr= m.x138 - m.x141 - m.x144 == 0)
m.c37 = Constraint(expr= m.x139 - m.x142 - m.x145 == 0)
m.c38 = Constraint(expr= - m.x146 - m.x149 + m.x152 == 0)
m.c39 = Constraint(expr= - m.x147 - m.x150 + m.x153 == 0)
m.c40 = Constraint(expr= - m.x148 - m.x151 + m.x154 == 0)
m.c41 = Constraint(expr= m.x152 - m.x155 - m.x158 == 0)
m.c42 = Constraint(expr= m.x153 - m.x156 - m.x159 == 0)
m.c43 = Constraint(expr= m.x154 - m.x157 - m.x160 == 0)
m.c44 = Constraint(expr= m.x158 - m.x161 - m.x164 - m.x167 == 0)
m.c45 = Constraint(expr= m.x159 - m.x162 - m.x165 - m.x168 == 0)
m.c46 = Constraint(expr= m.x160 - m.x163 - m.x166 - m.x169 == 0)
m.c47 = Constraint(expr= m.x173 - m.x182 - m.x185 == 0)
m.c48 = Constraint(expr= m.x174 - m.x183 - m.x186 == 0)
m.c49 = Constraint(expr= m.x175 - m.x184 - m.x187 == 0)
m.c50 = Constraint(expr= m.x179 - m.x188 - m.x191 - m.x194 == 0)
m.c51 = Constraint(expr= m.x180 - m.x189 - m.x192 - m.x195 == 0)
m.c52 = Constraint(expr= m.x181 - m.x190 - m.x193 - m.x196 == 0)
m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596)
<= 0)
m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597)
<= 0)
m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598)
<= 0)
m.c56 = Constraint(expr= m.x215 == 0)
m.c57 = Constraint(expr= m.x216 == 0)
m.c58 = Constraint(expr= m.x217 == 0)
m.c59 = Constraint(expr= m.x227 == 0)
m.c60 = Constraint(expr= m.x228 == 0)
m.c61 = Constraint(expr= m.x229 == 0)
m.c62 = Constraint(expr= m.x5 - m.x212 - m.x215 == 0)
m.c63 = Constraint(expr= m.x6 - m.x213 - m.x216 == 0)
m.c64 = Constraint(expr= m.x7 - m.x214 - m.x217 == 0)
m.c65 = Constraint(expr= m.x11 - m.x224 - m.x227 == 0)
m.c66 = Constraint(expr= m.x12 - m.x225 - m.x228 == 0)
m.c67 = Constraint(expr= m.x13 - m.x226 - m.x229 == 0)
m.c68 = Constraint(expr= m.x212 - 40*m.b596 <= 0)
m.c69 = Constraint(expr= m.x213 - 40*m.b597 <= 0)
m.c70 = Constraint(expr= m.x214 - 40*m.b598 <= 0)
m.c71 = Constraint(expr= m.x215 + 40*m.b596 <= 40)
m.c72 = Constraint(expr= m.x216 + 40*m.b597 <= 40)
m.c73 = Constraint(expr= m.x217 + 40*m.b598 <= 40)
m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0)
m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0)
m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0)
m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431)
m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431)
m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431)
m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999*
m.b599) <= 0)
m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999*
m.b600) <= 0)
m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999*
m.b601) <= 0)
m.c83 = Constraint(expr= m.x221 == 0)
m.c84 = Constraint(expr= m.x222 == 0)
m.c85 = Constraint(expr= m.x223 == 0)
m.c86 = Constraint(expr= m.x233 == 0)
m.c87 = Constraint(expr= m.x234 == 0)
m.c88 = Constraint(expr= m.x235 == 0)
m.c89 = Constraint(expr= m.x8 - m.x218 - m.x221 == 0)
m.c90 = Constraint(expr= m.x9 - m.x219 - m.x222 == 0)
m.c91 = Constraint(expr= m.x10 - m.x220 - m.x223 == 0)
m.c92 = Constraint(expr= m.x14 - m.x230 - m.x233 == 0)
m.c93 = Constraint(expr= m.x15 - m.x231 - m.x234 == 0)
m.c94 = Constraint(expr= m.x16 - m.x232 - m.x235 == 0)
m.c95 = Constraint(expr= m.x218 - 40*m.b599 <= 0)
m.c96 = Constraint(expr= m.x219 - 40*m.b600 <= 0)
m.c97 = Constraint(expr= m.x220 - 40*m.b601 <= 0)
m.c98 = Constraint(expr= m.x221 + 40*m.b599 <= 40)
m.c99 = Constraint(expr= m.x222 + 40*m.b600 <= 40)
m.c100 = Constraint(expr= m.x223 + 40*m.b601 <= 40)
m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0)
m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0)
m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0)
m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517)
m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517)
m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517)
m.c107 = Constraint(expr= - 0.75*m.x236 + m.x260 == 0)
m.c108 = Constraint(expr= - 0.75*m.x237 + m.x261 == 0)
m.c109 = Constraint(expr= - 0.75*m.x238 + m.x262 == 0)
m.c110 = Constraint(expr= m.x239 == 0)
m.c111 = Constraint(expr= m.x240 == 0)
m.c112 = Constraint(expr= m.x241 == 0)
m.c113 = Constraint(expr= m.x263 == 0)
m.c114 = Constraint(expr= m.x264 == 0)
m.c115 = Constraint(expr= m.x265 == 0)
m.c116 = Constraint(expr= m.x26 - m.x236 - m.x239 == 0)
m.c117 = Constraint(expr= m.x27 - m.x237 - m.x240 == 0)
m.c118 = Constraint(expr= m.x28 - m.x238 - m.x241 == 0)
m.c119 = Constraint(expr= m.x38 - m.x260 - m.x263 == 0)
m.c120 = Constraint(expr= m.x39 - m.x261 - m.x264 == 0)
m.c121 = Constraint(expr= m.x40 - m.x262 - m.x265 == 0)
m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0)
m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0)
m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0)
m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517)
m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517)
m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517)
m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0)
m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0)
m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0)
m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388)
m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388)
m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388)
m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999*
m.b605) <= 0)
m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999*
m.b606) <= 0)
m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999*
m.b607) <= 0)
m.c137 = Constraint(expr= m.x245 == 0)
m.c138 = Constraint(expr= m.x246 == 0)
m.c139 = Constraint(expr= m.x247 == 0)
m.c140 = Constraint(expr= m.x272 == 0)
m.c141 = Constraint(expr= m.x273 == 0)
m.c142 = Constraint(expr= m.x274 == 0)
m.c143 = Constraint(expr= m.x29 - m.x242 - m.x245 == 0)
m.c144 = Constraint(expr= m.x30 - m.x243 - m.x246 == 0)
m.c145 = Constraint(expr= m.x31 - m.x244 - m.x247 == 0)
m.c146 = Constraint(expr= m.x41 - m.x266 - m.x272 == 0)
m.c147 = Constraint(expr= m.x42 - m.x267 - m.x273 == 0)
m.c148 = Constraint(expr= m.x43 - m.x268 - m.x274 == 0)
m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0)
m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0)
m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0)
m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517)
m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517)
m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517)
m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0)
m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0)
m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0)
m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353)
m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353)
m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353)
m.c161 = Constraint(expr= - m.x248 + m.x278 == 0)
m.c162 = Constraint(expr= - m.x249 + m.x279 == 0)
m.c163 = Constraint(expr= - m.x250 + m.x280 == 0)
m.c164 = Constraint(expr= - 0.5*m.x254 + m.x278 == 0)
m.c165 = Constraint(expr= - 0.5*m.x255 + m.x279 == 0)
m.c166 = Constraint(expr= - 0.5*m.x256 + m.x280 == 0)
m.c167 = Constraint(expr= m.x251 == 0)
m.c168 = Constraint(expr= m.x252 == 0)
m.c169 = Constraint(expr= m.x253 == 0)
m.c170 = Constraint(expr= m.x257 == 0)
m.c171 = Constraint(expr= m.x258 == 0)
m.c172 = Constraint(expr= m.x259 == 0)
m.c173 = Constraint(expr= m.x281 == 0)
m.c174 = Constraint(expr= m.x282 == 0)
m.c175 = Constraint(expr= m.x283 == 0)
m.c176 = Constraint(expr= m.x32 - m.x248 - m.x251 == 0)
m.c177 = Constraint(expr= m.x33 - m.x249 - m.x252 == 0)
m.c178 = Constraint(expr= m.x34 - m.x250 - m.x253 == 0)
m.c179 = Constraint(expr= m.x35 - m.x254 - m.x257 == 0)
m.c180 = Constraint(expr= m.x36 - m.x255 - m.x258 == 0)
m.c181 = Constraint(expr= m.x37 - m.x256 - m.x259 == 0)
m.c182 = Constraint(expr= m.x44 - m.x278 - m.x281 == 0)
m.c183 = Constraint(expr= m.x45 - m.x279 - m.x282 == 0)
m.c184 = Constraint(expr= m.x46 - m.x280 - m.x283 == 0)
m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0)
m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0)
m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0)
m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517)
m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517)
m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517)
m.c191 = Constraint(expr= m.x254 - 30*m.b608 <= 0)
m.c192 = Constraint(expr= m.x255 - 30*m.b609 <= 0)
m.c193 = Constraint(expr= m.x256 - 30*m.b610 <= 0)
m.c194 = Constraint(expr= m.x257 + 30*m.b608 <= 30)
m.c195 = Constraint(expr= m.x258 + 30*m.b609 <= 30)
m.c196 = Constraint(expr= m.x259 + 30*m.b610 <= 30)
m.c197 = Constraint(expr= m.x278 - 15*m.b608 <= 0)
m.c198 = Constraint(expr= m.x279 - 15*m.b609 <= 0)
m.c199 = Constraint(expr= m.x280 - 15*m.b610 <= 0)
m.c200 = Constraint(expr= m.x281 + 15*m.b608 <= 15)
m.c201 = Constraint(expr= m.x282 + 15*m.b609 <= 15)
m.c202 = Constraint(expr= m.x283 + 15*m.b610 <= 15)
m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999*
m.b611) <= 0)
m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999*
m.b612) <= 0)
m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999*
m.b613) <= 0)
m.c206 = Constraint(expr= m.x287 == 0)
m.c207 = Constraint(expr= m.x288 == 0)
m.c208 = Constraint(expr= m.x289 == 0)
m.c209 = Constraint(expr= m.x320 == 0)
m.c210 = Constraint(expr= m.x321 == 0)
m.c211 = Constraint(expr= m.x322 == 0)
m.c212 = Constraint(expr= m.x47 - m.x284 - m.x287 == 0)
m.c213 = Constraint(expr= m.x48 - m.x285 - m.x288 == 0)
m.c214 = Constraint(expr= m.x49 - m.x286 - m.x289 == 0)
m.c215 = Constraint(expr= m.x62 - m.x314 - m.x320 == 0)
m.c216 = Constraint(expr= m.x63 - m.x315 - m.x321 == 0)
m.c217 = Constraint(expr= m.x64 - m.x316 - m.x322 == 0)
m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0)
m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0)
m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0)
m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388)
m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388)
m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388)
m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0)
m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0)
m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0)
m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539)
m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539)
m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539)
m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999*
m.b614) <= 0)
m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999*
m.b615) <= 0)
m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999*
m.b616) <= 0)
m.c233 = Constraint(expr= m.x293 == 0)
m.c234 = Constraint(expr= m.x294 == 0)
m.c235 = Constraint(expr= m.x295 == 0)
m.c236 = Constraint(expr= m.x332 == 0)
m.c237 = Constraint(expr= m.x333 == 0)
m.c238 = Constraint(expr= m.x334 == 0)
m.c239 = Constraint(expr= m.x50 - m.x290 - m.x293 == 0)
m.c240 = Constraint(expr= m.x51 - m.x291 - m.x294 == 0)
m.c241 = Constraint(expr= m.x52 - m.x292 - m.x295 == 0)
m.c242 = Constraint(expr= m.x65 - m.x326 - m.x332 == 0)
m.c243 = Constraint(expr= m.x66 - m.x327 - m.x333 == 0)
m.c244 = Constraint(expr= m.x67 - m.x328 - m.x334 == 0)
m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0)
m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0)
m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0)
m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388)
m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388)
m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388)
m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0)
m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0)
m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0)
m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348)
m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348)
m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348)
m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617)
<= 0)
m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618)
<= 0)
m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619)
<= 0)
m.c260 = Constraint(expr= m.x275 == 0)
m.c261 = Constraint(expr= m.x276 == 0)
m.c262 = Constraint(expr= m.x277 == 0)
m.c263 = Constraint(expr= m.x341 == 0)
m.c264 = Constraint(expr= m.x342 == 0)
m.c265 = Constraint(expr= m.x343 == 0)
m.c266 = Constraint(expr= m.x41 - m.x269 - m.x275 == 0)
m.c267 = Constraint(expr= m.x42 - m.x270 - m.x276 == 0)
m.c268 = Constraint(expr= m.x43 - m.x271 - m.x277 == 0)
m.c269 = Constraint(expr= m.x68 - m.x338 - m.x341 == 0)
m.c270 = Constraint(expr= m.x69 - m.x339 - m.x342 == 0)
m.c271 = Constraint(expr= m.x70 - m.x340 - m.x343 == 0)
m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0)
m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0)
m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0)
m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353)
m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353)
m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353)
m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0)
m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0)
m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0)
m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553)
m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553)
m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553)
m.c284 = Constraint(expr= - 0.9*m.x296 + m.x344 == 0)
m.c285 = Constraint(expr= - 0.9*m.x297 + m.x345 == 0)
m.c286 = Constraint(expr= - 0.9*m.x298 + m.x346 == 0)
m.c287 = Constraint(expr= m.x299 == 0)
m.c288 = Constraint(expr= m.x300 == 0)
m.c289 = Constraint(expr= m.x301 == 0)
m.c290 = Constraint(expr= m.x347 == 0)
m.c291 = Constraint(expr= m.x348 == 0)
m.c292 = Constraint(expr= m.x349 == 0)
m.c293 = Constraint(expr= m.x53 - m.x296 - m.x299 == 0)
m.c294 = Constraint(expr= m.x54 - m.x297 - m.x300 == 0)
m.c295 = Constraint(expr= m.x55 - m.x298 - m.x301 == 0)
m.c296 = Constraint(expr= m.x71 - m.x344 - m.x347 == 0)
m.c297 = Constraint(expr= m.x72 - m.x345 - m.x348 == 0)
m.c298 = Constraint(expr= m.x73 - m.x346 - m.x349 == 0)
m.c299 = Constraint(expr= m.x296 - 15*m.b620 <= 0)
m.c300 = Constraint(expr= m.x297 - 15*m.b621 <= 0)
m.c301 = Constraint(expr= m.x298 - 15*m.b622 <= 0)
m.c302 = Constraint(expr= m.x299 + 15*m.b620 <= 15)
m.c303 = Constraint(expr= m.x300 + 15*m.b621 <= 15)
m.c304 = Constraint(expr= m.x301 + 15*m.b622 <= 15)
m.c305 = Constraint(expr= m.x344 - 13.5*m.b620 <= 0)
m.c306 = Constraint(expr= m.x345 - 13.5*m.b621 <= 0)
m.c307 = Constraint(expr= m.x346 - 13.5*m.b622 <= 0)
m.c308 = Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5)
m.c309 = Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5)
m.c310 = Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5)
m.c311 = Constraint(expr= - 0.6*m.x302 + m.x350 == 0)
m.c312 = Constraint(expr= - 0.6*m.x303 + m.x351 == 0)
m.c313 = Constraint(expr= - 0.6*m.x304 + m.x352 == 0)
m.c314 = Constraint(expr= m.x305 == 0)
m.c315 = Constraint(expr= m.x306 == 0)
m.c316 = Constraint(expr= m.x307 == 0)
m.c317 = Constraint(expr= m.x353 == 0)
m.c318 = Constraint(expr= m.x354 == 0)
m.c319 = Constraint(expr= m.x355 == 0)
m.c320 = Constraint(expr= m.x56 - m.x302 - m.x305 == 0)
m.c321 = Constraint(expr= m.x57 - m.x303 - m.x306 == 0)
m.c322 = Constraint(expr= m.x58 - m.x304 - m.x307 == 0)
m.c323 = Constraint(expr= m.x74 - m.x350 - m.x353 == 0)
m.c324 = Constraint(expr= m.x75 - m.x351 - m.x354 == 0)
m.c325 = Constraint(expr= m.x76 - m.x352 - m.x355 == 0)
m.c326 = Constraint(expr= m.x302 - 15*m.b623 <= 0)
m.c327 = Constraint(expr= m.x303 - 15*m.b624 <= 0)
m.c328 = Constraint(expr= m.x304 - 15*m.b625 <= 0)
m.c329 = Constraint(expr= m.x305 + 15*m.b623 <= 15)
m.c330 = Constraint(expr= m.x306 + 15*m.b624 <= 15)
m.c331 = Constraint(expr= m.x307 + 15*m.b625 <= 15)
m.c332 = Constraint(expr= m.x350 - 9*m.b623 <= 0)
m.c333 = Constraint(expr= m.x351 - 9*m.b624 <= 0)
m.c334 = Constraint(expr= m.x352 - 9*m.b625 <= 0)
m.c335 = Constraint(expr= m.x353 + 9*m.b623 <= 9)
m.c336 = Constraint(expr= m.x354 + 9*m.b624 <= 9)
m.c337 = Constraint(expr= m.x355 + 9*m.b625 <= 9)
m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999*
m.b626) <= 0)
m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999*
m.b627) <= 0)
m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999*
m.b628) <= 0)
m.c341 = Constraint(expr= m.x311 == 0)
m.c342 = Constraint(expr= m.x312 == 0)
m.c343 = Constraint(expr= m.x313 == 0)
m.c344 = Constraint(expr= m.x359 == 0)
m.c345 = Constraint(expr= m.x360 == 0)
m.c346 = Constraint(expr= m.x361 == 0)
m.c347 = Constraint(expr= m.x59 - m.x308 - m.x311 == 0)
m.c348 = Constraint(expr= m.x60 - m.x309 - m.x312 == 0)
m.c349 = Constraint(expr= m.x61 - m.x310 - m.x313 == 0)
m.c350 = Constraint(expr= m.x77 - m.x356 - m.x359 == 0)
m.c351 = Constraint(expr= m.x78 - m.x357 - m.x360 == 0)
m.c352 = Constraint(expr= m.x79 - m.x358 - m.x361 == 0)
m.c353 = Constraint(expr= m.x308 - 15*m.b626 <= 0)
m.c354 = Constraint(expr= m.x309 - 15*m.b627 <= 0)
m.c355 = Constraint(expr= m.x310 - 15*m.b628 <= 0)
m.c356 = Constraint(expr= m.x311 + 15*m.b626 <= 15)
m.c357 = Constraint(expr= m.x312 + 15*m.b627 <= 15)
m.c358 = Constraint(expr= m.x313 + 15*m.b628 <= 15)
m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0)
m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0)
m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0)
m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376)
m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376)
m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376)
m.c365 = Constraint(expr= - 0.9*m.x317 + m.x416 == 0)
m.c366 = Constraint(expr= - 0.9*m.x318 + m.x417 == 0)
m.c367 = Constraint(expr= - 0.9*m.x319 + m.x418 == 0)
m.c368 = Constraint(expr= - m.x374 + m.x416 == 0)
m.c369 = Constraint(expr= - m.x375 + m.x417 == 0)
m.c370 = Constraint(expr= - m.x376 + m.x418 == 0)
m.c371 = Constraint(expr= m.x323 == 0)
m.c372 = Constraint(expr= m.x324 == 0)
m.c373 = Constraint(expr= m.x325 == 0)
m.c374 = Constraint(expr= m.x377 == 0)
m.c375 = Constraint(expr= m.x378 == 0)
m.c376 = Constraint(expr= m.x379 == 0)
m.c377 = Constraint(expr= m.x419 == 0)
m.c378 = Constraint(expr= m.x420 == 0)
m.c379 = Constraint(expr= m.x421 == 0)
m.c380 = Constraint(expr= m.x62 - m.x317 - m.x323 == 0)
m.c381 = Constraint(expr= m.x63 - m.x318 - m.x324 == 0)
m.c382 = Constraint(expr= m.x64 - m.x319 - m.x325 == 0)
m.c383 = Constraint(expr= m.x86 - m.x374 - m.x377 == 0)
m.c384 = Constraint(expr= m.x87 - m.x375 - m.x378 == 0)
m.c385 = Constraint(expr= m.x88 - m.x376 - m.x379 == 0)
m.c386 = Constraint(expr= m.x110 - m.x416 - m.x419 == 0)
m.c387 = Constraint(expr= m.x111 - m.x417 - m.x420 == 0)
m.c388 = Constraint(expr= m.x112 - m.x418 - m.x421 == 0)
m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0)
m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0)
m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0)
m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539)
m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539)
m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539)
m.c395 = Constraint(expr= m.x374 - 20*m.b629 <= 0)
m.c396 = Constraint(expr= m.x375 - 20*m.b630 <= 0)
m.c397 = Constraint(expr= m.x376 - 20*m.b631 <= 0)
m.c398 = Constraint(expr= m.x377 + 20*m.b629 <= 20)
m.c399 = Constraint(expr= m.x378 + 20*m.b630 <= 20)
m.c400 = Constraint(expr= m.x379 + 20*m.b631 <= 20)
m.c401 = Constraint(expr= m.x416 - 20*m.b629 <= 0)
m.c402 = Constraint(expr= m.x417 - 20*m.b630 <= 0)
m.c403 = Constraint(expr= m.x418 - 20*m.b631 <= 0)
m.c404 = Constraint(expr= m.x419 + 20*m.b629 <= 20)
m.c405 = Constraint(expr= m.x420 + 20*m.b630 <= 20)
m.c406 = Constraint(expr= m.x421 + 20*m.b631 <= 20)
m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632)
<= 0)
m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633)
<= 0)
m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634)
<= 0)
m.c410 = Constraint(expr= m.x335 == 0)
m.c411 = Constraint(expr= m.x336 == 0)
m.c412 = Constraint(expr= m.x337 == 0)
m.c413 = Constraint(expr= m.x425 == 0)
m.c414 = Constraint(expr= m.x426 == 0)
m.c415 = Constraint(expr= m.x427 == 0)
m.c416 = Constraint(expr= m.x65 - m.x329 - m.x335 == 0)
m.c417 = Constraint(expr= m.x66 - m.x330 - m.x336 == 0)
m.c418 = Constraint(expr= m.x67 - m.x331 - m.x337 == 0)
m.c419 = Constraint(expr= m.x113 - m.x422 - m.x425 == 0)
m.c420 = Constraint(expr= m.x114 - m.x423 - m.x426 == 0)
m.c421 = Constraint(expr= m.x115 - m.x424 - m.x427 == 0)
m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0)
m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0)
m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0)
m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348)
m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348)
m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348)
m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0)
m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0)
m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0)
m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186)
m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186)
m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186)
m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999*
m.b635) <= 0)
m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999*
m.b636) <= 0)
m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999*
m.b637) <= 0)
m.c437 = Constraint(expr= m.x365 == 0)
m.c438 = Constraint(expr= m.x366 == 0)
m.c439 = Constraint(expr= m.x367 == 0)
m.c440 = Constraint(expr= m.x431 == 0)
m.c441 = Constraint(expr= m.x432 == 0)
m.c442 = Constraint(expr= m.x433 == 0)
m.c443 = Constraint(expr= m.x80 - m.x362 - m.x365 == 0)
m.c444 = Constraint(expr= m.x81 - m.x363 - m.x366 == 0)
m.c445 = Constraint(expr= m.x82 - m.x364 - m.x367 == 0)
m.c446 = Constraint(expr= m.x116 - m.x428 - m.x431 == 0)
m.c447 = Constraint(expr= m.x117 - m.x429 - m.x432 == 0)
m.c448 = Constraint(expr= m.x118 - m.x430 - m.x433 == 0)
m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0)
m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0)
m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0)
m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553)
m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553)
m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553)
m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0)
m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0)
m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0)
m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686)
m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686)
m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686)
m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*
m.b638) <= 0)
m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*
m.b639) <= 0)
m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*
m.b640) <= 0)
m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*
m.b638) <= 0)
m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*
m.b639) <= 0)
m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*
m.b640) <= 0)
m.c467 = Constraint(expr= m.x371 == 0)
m.c468 = Constraint(expr= m.x372 == 0)
m.c469 = Constraint(expr= m.x373 == 0)
m.c470 = Constraint(expr= m.x383 == 0)
m.c471 = Constraint(expr= m.x384 == 0)
m.c472 = Constraint(expr= m.x385 == 0)
m.c473 = Constraint(expr= m.x437 == 0)
m.c474 = Constraint(expr= m.x438 == 0)
m.c475 = Constraint(expr= m.x439 == 0)
m.c476 = Constraint(expr= m.x83 - m.x368 - m.x371 == 0)
m.c477 = Constraint(expr= m.x84 - m.x369 - m.x372 == 0)
m.c478 = Constraint(expr= m.x85 - m.x370 - m.x373 == 0)
m.c479 = Constraint(expr= m.x92 - m.x380 - m.x383 == 0)
m.c480 = Constraint(expr= m.x93 - m.x381 - m.x384 == 0)
m.c481 = Constraint(expr= m.x94 - m.x382 - m.x385 == 0)
m.c482 = Constraint(expr= m.x119 - m.x434 - m.x437 == 0)
m.c483 = Constraint(expr= m.x120 - m.x435 - m.x438 == 0)
m.c484 = Constraint(expr= m.x121 - m.x436 - m.x439 == 0)
m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0)
m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0)
m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0)
m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553)
m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553)
m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553)
m.c491 = Constraint(expr= m.x380 - 33.5*m.b638 <= 0)
m.c492 = Constraint(expr= m.x381 - 33.5*m.b639 <= 0)
m.c493 = Constraint(expr= m.x382 - 33.5*m.b640 <= 0)
m.c494 = Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5)
m.c495 = Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5)
m.c496 = Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5)
m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0)
m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0)
m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0)
m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425)
m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425)
m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425)
m.c503 = Constraint(expr= - m.x386 + m.x440 == 0)
m.c504 = Constraint(expr= - m.x387 + m.x441 == 0)
m.c505 = Constraint(expr= - m.x388 + m.x442 == 0)
m.c506 = Constraint(expr= m.x389 == 0)
m.c507 = Constraint(expr= m.x390 == 0)
m.c508 = Constraint(expr= m.x391 == 0)
m.c509 = Constraint(expr= m.x443 == 0)
m.c510 = Constraint(expr= m.x444 == 0)
m.c511 = Constraint(expr= m.x445 == 0)
m.c512 = Constraint(expr= m.x95 - m.x386 - m.x389 == 0)
m.c513 = Constraint(expr= m.x96 - m.x387 - m.x390 == 0)
m.c514 = Constraint(expr= m.x97 - m.x388 - m.x391 == 0)
m.c515 = Constraint(expr= m.x122 - m.x440 - m.x443 == 0)
m.c516 = Constraint(expr= m.x123 - m.x441 - m.x444 == 0)
m.c517 = Constraint(expr= m.x124 - m.x442 - m.x445 == 0)
m.c518 = Constraint(expr= m.x386 - 9*m.b641 <= 0)
m.c519 = Constraint(expr= m.x387 - 9*m.b642 <= 0)
m.c520 = Constraint(expr= m.x388 - 9*m.b643 <= 0)
m.c521 = Constraint(expr= m.x389 + 9*m.b641 <= 9)
m.c522 = Constraint(expr= m.x390 + 9*m.b642 <= 9)
m.c523 = Constraint(expr= m.x391 + 9*m.b643 <= 9)
m.c524 = Constraint(expr= m.x440 - 9*m.b641 <= 0)
m.c525 = Constraint(expr= m.x441 - 9*m.b642 <= 0)
m.c526 = Constraint(expr= m.x442 - 9*m.b643 <= 0)
m.c527 = Constraint(expr= m.x443 + 9*m.b641 <= 9)
m.c528 = Constraint(expr= m.x444 + 9*m.b642 <= 9)
m.c529 = Constraint(expr= m.x445 + 9*m.b643 <= 9)
m.c530 = Constraint(expr= - m.x392 + m.x446 == 0)
m.c531 = Constraint(expr= - m.x393 + m.x447 == 0)
m.c532 = Constraint(expr= - m.x394 + m.x448 == 0)
m.c533 = Constraint(expr= m.x395 == 0)
m.c534 = Constraint(expr= m.x396 == 0)
m.c535 = Constraint(expr= m.x397 == 0)
m.c536 = Constraint(expr= m.x449 == 0)
m.c537 = Constraint(expr= m.x450 == 0)
m.c538 = Constraint(expr= m.x451 == 0)
m.c539 = Constraint(expr= m.x98 - m.x392 - m.x395 == 0)
m.c540 = Constraint(expr= m.x99 - m.x393 - m.x396 == 0)
m.c541 = Constraint(expr= m.x100 - m.x394 - m.x397 == 0)
m.c542 = Constraint(expr= m.x125 - m.x446 - m.x449 == 0)
m.c543 = Constraint(expr= m.x126 - m.x447 - m.x450 == 0)
m.c544 = Constraint(expr= m.x127 - m.x448 - m.x451 == 0)
m.c545 = Constraint(expr= m.x392 - 9*m.b644 <= 0)
m.c546 = Constraint(expr= m.x393 - 9*m.b645 <= 0)
m.c547 = Constraint(expr= m.x394 - 9*m.b646 <= 0)
m.c548 = Constraint(expr= m.x395 + 9*m.b644 <= 9)
m.c549 = Constraint(expr= m.x396 + 9*m.b645 <= 9)
m.c550 = Constraint(expr= m.x397 + 9*m.b646 <= 9)
m.c551 = Constraint(expr= m.x446 - 9*m.b644 <= 0)
m.c552 = Constraint(expr= m.x447 - 9*m.b645 <= 0)
m.c553 = Constraint(expr= m.x448 - 9*m.b646 <= 0)
m.c554 = Constraint(expr= m.x449 + 9*m.b644 <= 9)
m.c555 = Constraint(expr= m.x450 + 9*m.b645 <= 9)
m.c556 = Constraint(expr= m.x451 + 9*m.b646 <= 9)
m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999*
m.b647) <= 0)
m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999*
m.b648) <= 0)
m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999*
m.b649) <= 0)
m.c560 = Constraint(expr= m.x401 == 0)
m.c561 = Constraint(expr= m.x402 == 0)
m.c562 = Constraint(expr= m.x403 == 0)
m.c563 = Constraint(expr= m.x455 == 0)
m.c564 = Constraint(expr= m.x456 == 0)
m.c565 = Constraint(expr= m.x457 == 0)
m.c566 = Constraint(expr= m.x101 - m.x398 - m.x401 == 0)
m.c567 = Constraint(expr= m.x102 - m.x399 - m.x402 == 0)
m.c568 = Constraint(expr= m.x103 - m.x400 - m.x403 == 0)
m.c569 = Constraint(expr= m.x128 - m.x452 - m.x455 == 0)
m.c570 = Constraint(expr= m.x129 - m.x453 - m.x456 == 0)
m.c571 = Constraint(expr= m.x130 - m.x454 - m.x457 == 0)
m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0)
m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0)
m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0)
m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376)
m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376)
m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376)
m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0)
m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0)
m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0)
m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034)
m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034)
m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034)
m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999*
m.b650) <= 0)
m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999*
m.b651) <= 0)
m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999*
m.b652) <= 0)
m.c587 = Constraint(expr= m.x407 == 0)
m.c588 = Constraint(expr= m.x408 == 0)
m.c589 = Constraint(expr= m.x409 == 0)
m.c590 = Constraint(expr= m.x461 == 0)
m.c591 = Constraint(expr= m.x462 == 0)
m.c592 = Constraint(expr= m.x463 == 0)
m.c593 = Constraint(expr= m.x104 - m.x404 - m.x407 == 0)
m.c594 = Constraint(expr= m.x105 - m.x405 - m.x408 == 0)
m.c595 = Constraint(expr= m.x106 - m.x406 - m.x409 == 0)
m.c596 = Constraint(expr= m.x131 - m.x458 - m.x461 == 0)
m.c597 = Constraint(expr= m.x132 - m.x459 - m.x462 == 0)
m.c598 = Constraint(expr= m.x133 - m.x460 - m.x463 == 0)
m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0)
m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0)
m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0)
m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376)
m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376)
m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376)
m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0)
m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0)
m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0)
m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103)
m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103)
m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103)
m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999*
m.b653) <= 0)
m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999*
m.b654) <= 0)
m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999*
m.b655) <= 0)
m.c614 = Constraint(expr= m.x413 == 0)
m.c615 = Constraint(expr= m.x414 == 0)
m.c616 = Constraint(expr= m.x415 == 0)
m.c617 = Constraint(expr= m.x467 == 0)
m.c618 = Constraint(expr= m.x468 == 0)
m.c619 = Constraint(expr= m.x469 == 0)
m.c620 = Constraint(expr= m.x107 - m.x410 - m.x413 == 0)
m.c621 = Constraint(expr= m.x108 - m.x411 - m.x414 == 0)
m.c622 = Constraint(expr= m.x109 - m.x412 - m.x415 == 0)
m.c623 = Constraint(expr= m.x134 - m.x464 - m.x467 == 0)
m.c624 = Constraint(expr= m.x135 - m.x465 - m.x468 == 0)
m.c625 = Constraint(expr= m.x136 - m.x466 - m.x469 == 0)
m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0)
m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0)
m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0)
m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376)
m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376)
m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376)
m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0)
m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0)
m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0)
m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171)
m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171)
m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171)
m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656)
<= 0)
m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657)
<= 0)
m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658)
<= 0)
m.c641 = Constraint(expr= m.x473 == 0)
m.c642 = Constraint(expr= m.x474 == 0)
m.c643 = Constraint(expr= m.x475 == 0)
m.c644 = Constraint(expr= m.x485 == 0)
m.c645 = Constraint(expr= m.x486 == 0)
m.c646 = Constraint(expr= m.x487 == 0)
m.c647 = Constraint(expr= m.x140 - m.x470 - m.x473 == 0)
m.c648 = Constraint(expr= m.x141 - m.x471 - m.x474 == 0)
m.c649 = Constraint(expr= m.x142 - m.x472 - m.x475 == 0)
m.c650 = Constraint(expr= m.x146 - m.x482 - m.x485 == 0)
m.c651 = Constraint(expr= m.x147 - m.x483 - m.x486 == 0)
m.c652 = Constraint(expr= m.x148 - m.x484 - m.x487 == 0)
m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0)
m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0)
m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0)
m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171)
m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171)
m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171)
m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0)
m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0)
m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0)
m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327)
m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327)
m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327)
m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999*
m.b659) <= 0)
m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999*
m.b660) <= 0)
m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999*
m.b661) <= 0)
m.c668 = Constraint(expr= m.x479 == 0)
m.c669 = Constraint(expr= m.x480 == 0)
m.c670 = Constraint(expr= m.x481 == 0)
m.c671 = Constraint(expr= m.x491 == 0)
m.c672 = Constraint(expr= m.x492 == 0)
m.c673 = Constraint(expr= m.x493 == 0)
m.c674 = Constraint(expr= m.x143 - m.x476 - m.x479 == 0)
m.c675 = Constraint(expr= m.x144 - m.x477 - m.x480 == 0)
m.c676 = Constraint(expr= m.x145 - m.x478 - m.x481 == 0)
m.c677 = Constraint(expr= m.x149 - m.x488 - m.x491 == 0)
m.c678 = Constraint(expr= m.x150 - m.x489 - m.x492 == 0)
m.c679 = Constraint(expr= m.x151 - m.x490 - m.x493 == 0)
m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0)
m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0)
m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0)
m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171)
m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171)
m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171)
m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0)
m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0)
m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0)
m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924)
m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924)
m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924)
m.c692 = Constraint(expr= - 0.75*m.x494 + m.x518 == 0)
m.c693 = Constraint(expr= - 0.75*m.x495 + m.x519 == 0)
m.c694 = Constraint(expr= - 0.75*m.x496 + m.x520 == 0)
m.c695 = Constraint(expr= m.x497 == 0)
m.c696 = Constraint(expr= m.x498 == 0)
m.c697 = Constraint(expr= m.x499 == 0)
m.c698 = Constraint(expr= m.x521 == 0)
m.c699 = Constraint(expr= m.x522 == 0)
m.c700 = Constraint(expr= m.x523 == 0)
m.c701 = Constraint(expr= m.x161 - m.x494 - m.x497 == 0)
m.c702 = Constraint(expr= m.x162 - m.x495 - m.x498 == 0)
m.c703 = Constraint(expr= m.x163 - m.x496 - m.x499 == 0)
m.c704 = Constraint(expr= m.x173 - m.x518 - m.x521 == 0)
m.c705 = Constraint(expr= m.x174 - m.x519 - m.x522 == 0)
m.c706 = Constraint(expr= m.x175 - m.x520 - m.x523 == 0)
m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0)
m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0)
m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0)
m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924)
m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924)
m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924)
m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0)
m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0)
m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0)
m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943)
m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943)
m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943)
m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999*
m.b665) <= 0)
m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999*
m.b666) <= 0)
m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999*
m.b667) <= 0)
m.c722 = Constraint(expr= m.x503 == 0)
m.c723 = Constraint(expr= m.x504 == 0)
m.c724 = Constraint(expr= m.x505 == 0)
m.c725 = Constraint(expr= m.x530 == 0)
m.c726 = Constraint(expr= m.x531 == 0)
m.c727 = Constraint(expr= m.x532 == 0)
m.c728 = Constraint(expr= m.x164 - m.x500 - m.x503 == 0)
m.c729 = Constraint(expr= m.x165 - m.x501 - m.x504 == 0)
m.c730 = Constraint(expr= m.x166 - m.x502 - m.x505 == 0)
m.c731 = Constraint(expr= m.x176 - m.x524 - m.x530 == 0)
m.c732 = Constraint(expr= m.x177 - m.x525 - m.x531 == 0)
m.c733 = Constraint(expr= m.x178 - m.x526 - m.x532 == 0)
m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0)
m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0)
m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0)
m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924)
m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924)
m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924)
m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0)
m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0)
m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0)
m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506)
m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506)
m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506)
m.c746 = Constraint(expr= - m.x506 + m.x536 == 0)
m.c747 = Constraint(expr= - m.x507 + m.x537 == 0)
m.c748 = Constraint(expr= - m.x508 + m.x538 == 0)
m.c749 = Constraint(expr= - 0.5*m.x512 + m.x536 == 0)
m.c750 = Constraint(expr= - 0.5*m.x513 + m.x537 == 0)
m.c751 = Constraint(expr= - 0.5*m.x514 + m.x538 == 0)
m.c752 = Constraint(expr= m.x509 == 0)
m.c753 = Constraint(expr= m.x510 == 0)
m.c754 = Constraint(expr= m.x511 == 0)
m.c755 = Constraint(expr= m.x515 == 0)
m.c756 = Constraint(expr= m.x516 == 0)
m.c757 = Constraint(expr= m.x517 == 0)
m.c758 = Constraint(expr= m.x539 == 0)
m.c759 = Constraint(expr= m.x540 == 0)
m.c760 = Constraint(expr= m.x541 == 0)
m.c761 = Constraint(expr= m.x167 - m.x506 - m.x509 == 0)
m.c762 = Constraint(expr= m.x168 - m.x507 - m.x510 == 0)
m.c763 = Constraint(expr= m.x169 - m.x508 - m.x511 == 0)
m.c764 = Constraint(expr= m.x170 - m.x512 - m.x515 == 0)
m.c765 = Constraint(expr= m.x171 - m.x513 - m.x516 == 0)
m.c766 = Constraint(expr= m.x172 - m.x514 - m.x517 == 0)
m.c767 = Constraint(expr= m.x179 - m.x536 - m.x539 == 0)
m.c768 = Constraint(expr= m.x180 - m.x537 - m.x540 == 0)
m.c769 = Constraint(expr= m.x181 - m.x538 - m.x541 == 0)
m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0)
m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0)
m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0)
m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924)
m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924)
m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924)
m.c776 = Constraint(expr= m.x512 - 30*m.b668 <= 0)
m.c777 = Constraint(expr= m.x513 - 30*m.b669 <= 0)
m.c778 = Constraint(expr= m.x514 - 30*m.b670 <= 0)
m.c779 = Constraint(expr= m.x515 + 30*m.b668 <= 30)
m.c780 = Constraint(expr= m.x516 + 30*m.b669 <= 30)
m.c781 = Constraint(expr= m.x517 + 30*m.b670 <= 30)
m.c782 = Constraint(expr= m.x536 - 15*m.b668 <= 0)
m.c783 = Constraint(expr= m.x537 - 15*m.b669 <= 0)
m.c784 = Constraint(expr= m.x538 - 15*m.b670 <= 0)
m.c785 = Constraint(expr= m.x539 + 15*m.b668 <= 15)
m.c786 = Constraint(expr= m.x540 + 15*m.b669 <= 15)
m.c787 = Constraint(expr= m.x541 + 15*m.b670 <= 15)
m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999*
m.b671) <= 0)
m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999*
m.b672) <= 0)
m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999*
m.b673) <= 0)
m.c791 = Constraint(expr= m.x545 == 0)
m.c792 = Constraint(expr= m.x546 == 0)
m.c793 = Constraint(expr= m.x547 == 0)
m.c794 = Constraint(expr= m.x569 == 0)
m.c795 = Constraint(expr= m.x570 == 0)
m.c796 = Constraint(expr= m.x571 == 0)
m.c797 = Constraint(expr= m.x182 - m.x542 - m.x545 == 0)
m.c798 = Constraint(expr= m.x183 - m.x543 - m.x546 == 0)
m.c799 = Constraint(expr= m.x184 - m.x544 - m.x547 == 0)
m.c800 = Constraint(expr= m.x197 - m.x566 - m.x569 == 0)
m.c801 = Constraint(expr= m.x198 - m.x567 - m.x570 == 0)
m.c802 = Constraint(expr= m.x199 - m.x568 - m.x571 == 0)
m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0)
m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0)
m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0)
m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943)
m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943)
m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943)
m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0)
m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0)
m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0)
m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719)
m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719)
m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719)
m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999*
m.b674) <= 0)
m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999*
m.b675) <= 0)
m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999*
m.b676) <= 0)
m.c818 = Constraint(expr= m.x551 == 0)
m.c819 = Constraint(expr= m.x552 == 0)
m.c820 = Constraint(expr= m.x553 == 0)
m.c821 = Constraint(expr= m.x575 == 0)
m.c822 = Constraint(expr= m.x576 == 0)
m.c823 = Constraint(expr= m.x577 == 0)
m.c824 = Constraint(expr= m.x185 - m.x548 - m.x551 == 0)
m.c825 = Constraint(expr= m.x186 - m.x549 - m.x552 == 0)
m.c826 = Constraint(expr= m.x187 - m.x550 - m.x553 == 0)
m.c827 = Constraint(expr= m.x200 - m.x572 - m.x575 == 0)
m.c828 = Constraint(expr= m.x201 - m.x573 - m.x576 == 0)
m.c829 = Constraint(expr= m.x202 - m.x574 - m.x577 == 0)
m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0)
m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0)
m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0)
m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943)
m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943)
m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943)
m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0)
m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0)
m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0)
m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917)
m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917)
m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917)
m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677)
<= 0)
m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678)
<= 0)
m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679)
<= 0)
m.c845 = Constraint(expr= m.x533 == 0)
m.c846 = Constraint(expr= m.x534 == 0)
m.c847 = Constraint(expr= m.x535 == 0)
m.c848 = Constraint(expr= m.x581 == 0)
m.c849 = Constraint(expr= m.x582 == 0)
m.c850 = Constraint(expr= m.x583 == 0)
m.c851 = Constraint(expr= m.x176 - m.x527 - m.x533 == 0)
m.c852 = Constraint(expr= m.x177 - m.x528 - m.x534 == 0)
m.c853 = Constraint(expr= m.x178 - m.x529 - m.x535 == 0)
m.c854 = Constraint(expr= m.x203 - m.x578 - m.x581 == 0)
m.c855 = Constraint(expr= m.x204 - m.x579 - m.x582 == 0)
m.c856 = Constraint(expr= m.x205 - m.x580 - m.x583 == 0)
m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0)
m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0)
m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0)
m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506)
m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506)
m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506)
m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0)
m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0)
m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0)
m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672)
m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672)
m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672)
m.c869 = Constraint(expr= - 0.9*m.x554 + m.x584 == 0)
m.c870 = Constraint(expr= - 0.9*m.x555 + m.x585 == 0)
m.c871 = Constraint(expr= - 0.9*m.x556 + m.x586 == 0)
m.c872 = Constraint(expr= m.x557 == 0)
m.c873 = Constraint(expr= m.x558 == 0)
m.c874 = Constraint(expr= m.x559 == 0)
m.c875 = Constraint(expr= m.x587 == 0)
m.c876 = Constraint(expr= m.x588 == 0)
m.c877 = Constraint(expr= m.x589 == 0)
m.c878 = Constraint(expr= m.x188 - m.x554 - m.x557 == 0)
m.c879 = Constraint(expr= m.x189 - m.x555 - m.x558 == 0)
m.c880 = Constraint(expr= m.x190 - m.x556 - m.x559 == 0)
m.c881 = Constraint(expr= m.x206 - m.x584 - m.x587 == 0)
m.c882 = Constraint(expr= m.x207 - m.x585 - m.x588 == 0)
m.c883 = Constraint(expr= m.x208 - m.x586 - m.x589 == 0)
m.c884 = Constraint(expr= m.x554 - 15*m.b680 <= 0)
m.c885 = Constraint(expr= m.x555 - 15*m.b681 <= 0)
m.c886 = Constraint(expr= m.x556 - 15*m.b682 <= 0)
m.c887 = Constraint(expr= m.x557 + 15*m.b680 <= 15)
m.c888 = Constraint(expr= m.x558 + 15*m.b681 <= 15)
m.c889 = Constraint(expr= m.x559 + 15*m.b682 <= 15)
m.c890 = Constraint(expr= m.x584 - 13.5*m.b680 <= 0)
m.c891 = Constraint(expr= m.x585 - 13.5*m.b681 <= 0)
m.c892 = Constraint(expr= m.x586 - 13.5*m.b682 <= 0)
m.c893 = Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5)
m.c894 = Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5)
m.c895 = Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5)
m.c896 = Constraint(expr= - 0.6*m.x560 + m.x590 == 0)
m.c897 = Constraint(expr= - 0.6*m.x561 + m.x591 == 0)
m.c898 = Constraint(expr= - 0.6*m.x562 + m.x592 == 0)
m.c899 = Constraint(expr= m.x563 == 0)
m.c900 = Constraint(expr= m.x564 == 0)
m.c901 = Constraint(expr= m.x565 == 0)
m.c902 = Constraint(expr= m.x593 == 0)
m.c903 = Constraint(expr= m.x594 == 0)
m.c904 = Constraint(expr= m.x595 == 0)
m.c905 = Constraint(expr= m.x191 - m.x560 - m.x563 == 0)
m.c906 = Constraint(expr= m.x192 - m.x561 - m.x564 == 0)
m.c907 = Constraint(expr= m.x193 - m.x562 - m.x565 == 0)
m.c908 = Constraint(expr= m.x209 - m.x590 - m.x593 == 0)
m.c909 = Constraint(expr= m.x210 - m.x591 - m.x594 == 0)
m.c910 = Constraint(expr= m.x211 - m.x592 - m.x595 == 0)
m.c911 = Constraint(expr= m.x560 - 15*m.b683 <= 0)
m.c912 = Constraint(expr= m.x561 - 15*m.b684 <= 0)
m.c913 = Constraint(expr= m.x562 - 15*m.b685 <= 0)
m.c914 = Constraint(expr= m.x563 + 15*m.b683 <= 15)
m.c915 = Constraint(expr= m.x564 + 15*m.b684 <= 15)
m.c916 = Constraint(expr= m.x565 + 15*m.b685 <= 15)
m.c917 = Constraint(expr= m.x590 - 9*m.b683 <= 0)
m.c918 = Constraint(expr= m.x591 - 9*m.b684 <= 0)
m.c919 = Constraint(expr= m.x592 - 9*m.b685 <= 0)
m.c920 = Constraint(expr= m.x593 + 9*m.b683 <= 9)
m.c921 = Constraint(expr= m.x594 + 9*m.b684 <= 9)
m.c922 = Constraint(expr= m.x595 + 9*m.b685 <= 9)
m.c923 = Constraint(expr= 5*m.b686 + m.x776 == 0)
m.c924 = Constraint(expr= 4*m.b687 + m.x777 == 0)
m.c925 = Constraint(expr= 6*m.b688 + m.x778 == 0)
m.c926 = Constraint(expr= 8*m.b689 + m.x779 == 0)
m.c927 = Constraint(expr= 7*m.b690 + m.x780 == 0)
m.c928 = Constraint(expr= 6*m.b691 + m.x781 == 0)
m.c929 = Constraint(expr= 6*m.b692 + m.x782 == 0)
m.c930 = Constraint(expr= 9*m.b693 + m.x783 == 0)
m.c931 = Constraint(expr= 4*m.b694 + m.x784 == 0)
m.c932 = Constraint(expr= 10*m.b695 + m.x785 == 0)
m.c933 = Constraint(expr= 9*m.b696 + m.x786 == 0)
m.c934 = Constraint(expr= 5*m.b697 + m.x787 == 0)
m.c935 = Constraint(expr= 6*m.b698 + m.x788 == 0)
m.c936 = Constraint(expr= 10*m.b699 + m.x789 == 0)
m.c937 = Constraint(expr= 6*m.b700 + m.x790 == 0)
m.c938 = Constraint(expr= 7*m.b701 + m.x791 == 0)
m.c939 = Constraint(expr= 7*m.b702 + m.x792 == 0)
m.c940 = Constraint(expr= 4*m.b703 + m.x793 == 0)
m.c941 = Constraint(expr= 4*m.b704 + m.x794 == 0)
m.c942 = Constraint(expr= 3*m.b705 + m.x795 == 0)
m.c943 = Constraint(expr= 2*m.b706 + m.x796 == 0)
m.c944 = Constraint(expr= 5*m.b707 + m.x797 == 0)
m.c945 = Constraint(expr= 6*m.b708 + m.x798 == 0)
m.c946 = Constraint(expr= 7*m.b709 + m.x799 == 0)
m.c947 = Constraint(expr= 2*m.b710 + m.x800 == 0)
m.c948 = Constraint(expr= 5*m.b711 + m.x801 == 0)
m.c949 = Constraint(expr= 2*m.b712 + m.x802 == 0)
m.c950 = Constraint(expr= 4*m.b713 + m.x803 == 0)
m.c951 = Constraint(expr= 7*m.b714 + m.x804 == 0)
m.c952 = Constraint(expr= 4*m.b715 + m.x805 == 0)
m.c953 = Constraint(expr= 3*m.b716 + m.x806 == 0)
m.c954 = Constraint(expr= 9*m.b717 + m.x807 == 0)
m.c955 = Constraint(expr= 3*m.b718 + m.x808 == 0)
m.c956 = Constraint(expr= 7*m.b719 + m.x809 == 0)
m.c957 = Constraint(expr= 2*m.b720 + m.x810 == 0)
m.c958 = Constraint(expr= 9*m.b721 + m.x811 == 0)
m.c959 = Constraint(expr= 3*m.b722 + m.x812 == 0)
m.c960 = Constraint(expr= m.b723 + m.x813 == 0)
m.c961 = Constraint(expr= 9*m.b724 + m.x814 == 0)
m.c962 = Constraint(expr= 2*m.b725 + m.x815 == 0)
m.c963 = Constraint(expr= 6*m.b726 + m.x816 == 0)
m.c964 = Constraint(expr= 3*m.b727 + m.x817 == 0)
m.c965 = Constraint(expr= 4*m.b728 + m.x818 == 0)
m.c966 = Constraint(expr= 8*m.b729 + m.x819 == 0)
m.c967 = Constraint(expr= m.b730 + m.x820 == 0)
m.c968 = Constraint(expr= 2*m.b731 + m.x821 == 0)
m.c969 = Constraint(expr= 5*m.b732 + m.x822 == 0)
m.c970 = Constraint(expr= 2*m.b733 + m.x823 == 0)
m.c971 = Constraint(expr= 3*m.b734 + m.x824 == 0)
m.c972 = Constraint(expr= 4*m.b735 + m.x825 == 0)
m.c973 = Constraint(expr= 3*m.b736 + m.x826 == 0)
m.c974 = Constraint(expr= 5*m.b737 + m.x827 == 0)
m.c975 = Constraint(expr= 7*m.b738 + m.x828 == 0)
m.c976 = Constraint(expr= 6*m.b739 + m.x829 == 0)
m.c977 = Constraint(expr= 2*m.b740 + m.x830 == 0)
m.c978 = Constraint(expr= 8*m.b741 + m.x831 == 0)
m.c979 = Constraint(expr= 4*m.b742 + m.x832 == 0)
m.c980 = Constraint(expr= m.b743 + m.x833 == 0)
m.c981 = Constraint(expr= 4*m.b744 + m.x834 == 0)
m.c982 = Constraint(expr= m.b745 + m.x835 == 0)
m.c983 = Constraint(expr= 2*m.b746 + m.x836 == 0)
m.c984 = Constraint(expr= 5*m.b747 + m.x837 == 0)
m.c985 = Constraint(expr= 2*m.b748 + m.x838 == 0)
m.c986 = Constraint(expr= 9*m.b749 + m.x839 == 0)
m.c987 = Constraint(expr= 2*m.b750 + m.x840 == 0)
m.c988 = Constraint(expr= 9*m.b751 + m.x841 == 0)
m.c989 = Constraint(expr= 5*m.b752 + m.x842 == 0)
m.c990 = Constraint(expr= 8*m.b753 + m.x843 == 0)
m.c991 = Constraint(expr= 4*m.b754 + m.x844 == 0)
m.c992 = Constraint(expr= 2*m.b755 + m.x845 == 0)
m.c993 = Constraint(expr= 3*m.b756 + m.x846 == 0)
m.c994 = Constraint(expr= 8*m.b757 + m.x847 == 0)
m.c995 = Constraint(expr= 10*m.b758 + m.x848 == 0)
m.c996 = Constraint(expr= 6*m.b759 + m.x849 == 0)
m.c997 = Constraint(expr= 3*m.b760 + m.x850 == 0)
m.c998 = Constraint(expr= 4*m.b761 + m.x851 == 0)
m.c999 = Constraint(expr= 8*m.b762 + m.x852 == 0)
m.c1000 = Constraint(expr= 7*m.b763 + m.x853 == 0)
m.c1001 = Constraint(expr= 7*m.b764 + m.x854 == 0)
m.c1002 = Constraint(expr= 3*m.b765 + m.x855 == 0)
m.c1003 = Constraint(expr= 9*m.b766 + m.x856 == 0)
m.c1004 = Constraint(expr= 4*m.b767 + m.x857 == 0)
m.c1005 = Constraint(expr= 8*m.b768 + m.x858 == 0)
m.c1006 = Constraint(expr= 6*m.b769 + m.x859 == 0)
m.c1007 = Constraint(expr= 2*m.b770 + m.x860 == 0)
m.c1008 = Constraint(expr= m.b771 + m.x861 == 0)
m.c1009 = Constraint(expr= 3*m.b772 + m.x862 == 0)
m.c1010 = Constraint(expr= 8*m.b773 + m.x863 == 0)
m.c1011 = Constraint(expr= 3*m.b774 + m.x864 == 0)
m.c1012 = Constraint(expr= 4*m.b775 + m.x865 == 0)
m.c1013 = Constraint(expr= m.b596 - m.b597 <= 0)
m.c1014 = Constraint(expr= m.b596 - m.b598 <= 0)
m.c1015 = Constraint(expr= m.b597 - m.b598 <= 0)
m.c1016 = Constraint(expr= m.b599 - m.b600 <= 0)
m.c1017 = Constraint(expr= m.b599 - m.b601 <= 0)
m.c1018 = Constraint(expr= m.b600 - m.b601 <= 0)
m.c1019 = Constraint(expr= m.b602 - m.b603 <= 0)
m.c1020 = Constraint(expr= m.b602 - m.b604 <= 0)
m.c1021 = Constraint(expr= m.b603 - m.b604 <= 0)
m.c1022 = Constraint(expr= m.b605 - m.b606 <= 0)
m.c1023 = Constraint(expr= m.b605 - m.b607 <= 0)
m.c1024 = Constraint(expr= m.b606 - m.b607 <= 0)
m.c1025 = Constraint(expr= m.b608 - m.b609 <= 0)
m.c1026 = Constraint(expr= m.b608 - m.b610 <= 0)
m.c1027 = Constraint(expr= m.b609 - m.b610 <= 0)
m.c1028 = Constraint(expr= m.b611 - m.b612 <= 0)
m.c1029 = Constraint(expr= m.b611 - m.b613 <= 0)
m.c1030 = Constraint(expr= m.b612 - m.b613 <= 0)
m.c1031 = Constraint(expr= m.b614 - m.b615 <= 0)
m.c1032 = Constraint(expr= m.b614 - m.b616 <= 0)
m.c1033 = Constraint(expr= m.b615 - m.b616 <= 0)
m.c1034 = Constraint(expr= m.b617 - m.b618 <= 0)
m.c1035 = Constraint(expr= m.b617 - m.b619 <= 0)
m.c1036 = Constraint(expr= m.b618 - m.b619 <= 0)
m.c1037 = Constraint(expr= m.b620 - m.b621 <= 0)
m.c1038 = Constraint(expr= m.b620 - m.b622 <= 0)
m.c1039 = Constraint(expr= m.b621 - m.b622 <= 0)
m.c1040 = Constraint(expr= m.b623 - m.b624 <= 0)
m.c1041 = Constraint(expr= m.b623 - m.b625 <= 0)
m.c1042 = Constraint(expr= m.b624 - m.b625 <= 0)
m.c1043 = Constraint(expr= m.b626 - m.b627 <= 0)
m.c1044 = Constraint(expr= m.b626 - m.b628 <= 0)
m.c1045 = Constraint(expr= m.b627 - m.b628 <= 0)
m.c1046 = Constraint(expr= m.b629 - m.b630 <= 0)
m.c1047 = Constraint(expr= m.b629 - m.b631 <= 0)
m.c1048 = Constraint(expr= m.b630 - m.b631 <= 0)
m.c1049 = Constraint(expr= m.b632 - m.b633 <= 0)
m.c1050 = Constraint(expr= m.b632 - m.b634 <= 0)
m.c1051 = Constraint(expr= m.b633 - m.b634 <= 0)
m.c1052 = Constraint(expr= m.b635 - m.b636 <= 0)
m.c1053 = Constraint(expr= m.b635 - m.b637 <= 0)
m.c1054 = Constraint(expr= m.b636 - m.b637 <= 0)
m.c1055 = Constraint(expr= m.b638 - m.b639 <= 0)
m.c1056 = Constraint(expr= m.b638 - m.b640 <= 0)
m.c1057 = Constraint(expr= m.b639 - m.b640 <= 0)
m.c1058 = Constraint(expr= m.b641 - m.b642 <= 0)
m.c1059 = Constraint(expr= m.b641 - m.b643 <= 0)
m.c1060 = Constraint(expr= m.b642 - m.b643 <= 0)
m.c1061 = Constraint(expr= m.b644 - m.b645 <= 0)
m.c1062 = Constraint(expr= m.b644 - m.b646 <= 0)
m.c1063 = Constraint(expr= m.b645 - m.b646 <= 0)
m.c1064 = Constraint(expr= m.b647 - m.b648 <= 0)
m.c1065 = Constraint(expr= m.b647 - m.b649 <= 0)
m.c1066 = Constraint(expr= m.b648 - m.b649 <= 0)
m.c1067 = Constraint(expr= m.b650 - m.b651 <= 0)
m.c1068 = Constraint(expr= m.b650 - m.b652 <= 0)
m.c1069 = Constraint(expr= m.b651 - m.b652 <= 0)
m.c1070 = Constraint(expr= m.b653 - m.b654 <= 0)
m.c1071 = Constraint(expr= m.b653 - m.b655 <= 0)
m.c1072 = Constraint(expr= m.b654 - m.b655 <= 0)
m.c1073 = Constraint(expr= m.b656 - m.b657 <= 0)
m.c1074 = Constraint(expr= m.b656 - m.b658 <= 0)
m.c1075 = Constraint(expr= m.b657 - m.b658 <= 0)
m.c1076 = Constraint(expr= m.b659 - m.b660 <= 0)
m.c1077 = Constraint(expr= m.b659 - m.b661 <= 0)
m.c1078 = Constraint(expr= m.b660 - m.b661 <= 0)
m.c1079 = Constraint(expr= m.b662 - m.b663 <= 0)
m.c1080 = Constraint(expr= m.b662 - m.b664 <= 0)
m.c1081 = Constraint(expr= m.b663 - m.b664 <= 0)
m.c1082 = Constraint(expr= m.b665 - m.b666 <= 0)
m.c1083 = Constraint(expr= m.b665 - m.b667 <= 0)
m.c1084 = Constraint(expr= m.b666 - m.b667 <= 0)
m.c1085 = Constraint(expr= m.b668 - m.b669 <= 0)
m.c1086 = Constraint(expr= m.b668 - m.b670 <= 0)
m.c1087 = Constraint(expr= m.b669 - m.b670 <= 0)
m.c1088 = Constraint(expr= m.b671 - m.b672 <= 0)
m.c1089 = Constraint(expr= m.b671 - m.b673 <= 0)
m.c1090 = Constraint(expr= m.b672 - m.b673 <= 0)
m.c1091 = Constraint(expr= m.b674 - m.b675 <= 0)
m.c1092 = Constraint(expr= m.b674 - m.b676 <= 0)
m.c1093 = Constraint(expr= m.b675 - m.b676 <= 0)
m.c1094 = Constraint(expr= m.b677 - m.b678 <= 0)
m.c1095 = Constraint(expr= m.b677 - m.b679 <= 0)
m.c1096 = Constraint(expr= m.b678 - m.b679 <= 0)
m.c1097 = Constraint(expr= m.b680 - m.b681 <= 0)
m.c1098 = Constraint(expr= m.b680 - m.b682 <= 0)
m.c1099 = Constraint(expr= m.b681 - m.b682 <= 0)
m.c1100 = Constraint(expr= m.b683 - m.b684 <= 0)
m.c1101 = Constraint(expr= m.b683 - m.b685 <= 0)
m.c1102 = Constraint(expr= m.b684 - m.b685 <= 0)
m.c1103 = Constraint(expr= m.b686 + m.b687 <= 1)
m.c1104 = Constraint(expr= m.b686 + m.b688 <= 1)
m.c1105 = Constraint(expr= m.b686 + m.b687 <= 1)
m.c1106 = Constraint(expr= m.b687 + m.b688 <= 1)
m.c1107 = Constraint(expr= m.b686 + m.b688 <= 1)
m.c1108 = Constraint(expr= m.b687 + m.b688 <= 1)
m.c1109 = Constraint(expr= m.b689 + m.b690 <= 1)
m.c1110 = Constraint(expr= m.b689 + m.b691 <= 1)
m.c1111 = Constraint(expr= m.b689 + m.b690 <= 1)
m.c1112 = Constraint(expr= m.b690 + m.b691 <= 1)
m.c1113 = Constraint(expr= m.b689 + m.b691 <= 1)
m.c1114 = Constraint(expr= m.b690 + m.b691 <= 1)
m.c1115 = Constraint(expr= m.b692 + m.b693 <= 1)
m.c1116 = Constraint(expr= m.b692 + m.b694 <= 1)
m.c1117 = Constraint(expr= m.b692 + m.b693 <= 1)
m.c1118 = Constraint(expr= m.b693 + m.b694 <= 1)
m.c1119 = Constraint(expr= m.b692 + m.b694 <= 1)
m.c1120 = Constraint(expr= m.b693 + m.b694 <= 1)
m.c1121 = Constraint(expr= m.b695 + m.b696 <= 1)
m.c1122 = Constraint(expr= m.b695 + m.b697 <= 1)
m.c1123 = Constraint(expr= m.b695 + m.b696 <= 1)
m.c1124 = Constraint(expr= m.b696 + m.b697 <= 1)
m.c1125 = Constraint(expr= m.b695 + m.b697 <= 1)
m.c1126 = Constraint(expr= m.b696 + m.b697 <= 1)
m.c1127 = Constraint(expr= m.b698 + m.b699 <= 1)
m.c1128 = Constraint(expr= m.b698 + m.b700 <= 1)
m.c1129 = Constraint(expr= m.b698 + m.b699 <= 1)
m.c1130 = Constraint(expr= m.b699 + m.b700 <= 1)
m.c1131 = Constraint(expr= m.b698 + m.b700 <= 1)
m.c1132 = Constraint(expr= m.b699 + m.b700 <= 1)
m.c1133 = Constraint(expr= m.b701 + m.b702 <= 1)
m.c1134 = Constraint(expr= m.b701 + m.b703 <= 1)
m.c1135 = Constraint(expr= m.b701 + m.b702 <= 1)
m.c1136 = Constraint(expr= m.b702 + m.b703 <= 1)
m.c1137 = Constraint(expr= m.b701 + m.b703 <= 1)
m.c1138 = Constraint(expr= m.b702 + m.b703 <= 1)
m.c1139 = Constraint(expr= m.b704 + m.b705 <= 1)
m.c1140 = Constraint(expr= m.b704 + m.b706 <= 1)
m.c1141 = Constraint(expr= m.b704 + m.b705 <= 1)
m.c1142 = Constraint(expr= m.b705 + m.b706 <= 1)
m.c1143 = Constraint(expr= m.b704 + m.b706 <= 1)
m.c1144 = Constraint(expr= m.b705 + m.b706 <= 1)
m.c1145 = Constraint(expr= m.b707 + m.b708 <= 1)
m.c1146 = Constraint(expr= m.b707 + m.b709 <= 1)
m.c1147 = Constraint(expr= m.b707 + m.b708 <= 1)
m.c1148 = Constraint(expr= m.b708 + m.b709 <= 1)
m.c1149 = Constraint(expr= m.b707 + m.b709 <= 1)
m.c1150 = Constraint(expr= m.b708 + m.b709 <= 1)
m.c1151 = Constraint(expr= m.b710 + m.b711 <= 1)
m.c1152 = Constraint(expr= m.b710 + m.b712 <= 1)
m.c1153 = Constraint(expr= m.b710 + m.b711 <= 1)
m.c1154 = Constraint(expr= m.b711 + m.b712 <= 1)
m.c1155 = Constraint(expr= m.b710 + m.b712 <= 1)
m.c1156 = Constraint(expr= m.b711 + m.b712 <= 1)
m.c1157 = Constraint(expr= m.b713 + m.b714 <= 1)
m.c1158 = Constraint(expr= m.b713 + m.b715 <= 1)
m.c1159 = Constraint(expr= m.b713 + m.b714 <= 1)
m.c1160 = Constraint(expr= m.b714 + m.b715 <= 1)
m.c1161 = Constraint(expr= m.b713 + m.b715 <= 1)
m.c1162 = Constraint(expr= m.b714 + m.b715 <= 1)
m.c1163 = Constraint(expr= m.b716 + m.b717 <= 1)
m.c1164 = Constraint(expr= m.b716 + m.b718 <= 1)
m.c1165 = Constraint(expr= m.b716 + m.b717 <= 1)
m.c1166 = Constraint(expr= m.b717 + m.b718 <= 1)
m.c1167 = Constraint(expr= m.b716 + m.b718 <= 1)
m.c1168 = Constraint(expr= m.b717 + m.b718 <= 1)
m.c1169 = Constraint(expr= m.b719 + m.b720 <= 1)
m.c1170 = Constraint(expr= m.b719 + m.b721 <= 1)
m.c1171 = Constraint(expr= m.b719 + m.b720 <= 1)
m.c1172 = Constraint(expr= m.b720 + m.b721 <= 1)
m.c1173 = Constraint(expr= m.b719 + m.b721 <= 1)
m.c1174 = Constraint(expr= m.b720 + m.b721 <= 1)
m.c1175 = Constraint(expr= m.b722 + m.b723 <= 1)
m.c1176 = Constraint(expr= m.b722 + m.b724 <= 1)
m.c1177 = Constraint(expr= m.b722 + m.b723 <= 1)
m.c1178 = Constraint(expr= m.b723 + m.b724 <= 1)
m.c1179 = Constraint(expr= m.b722 + m.b724 <= 1)
m.c1180 = Constraint(expr= m.b723 + m.b724 <= 1)
m.c1181 = Constraint(expr= m.b725 + m.b726 <= 1)
m.c1182 = Constraint(expr= m.b725 + m.b727 <= 1)
m.c1183 = Constraint(expr= m.b725 + m.b726 <= 1)
m.c1184 = Constraint(expr= m.b726 + m.b727 <= 1)
m.c1185 = Constraint(expr= m.b725 + m.b727 <= 1)
m.c1186 = Constraint(expr= m.b726 + m.b727 <= 1)
m.c1187 = Constraint(expr= m.b728 + m.b729 <= 1)
m.c1188 = Constraint(expr= m.b728 + m.b730 <= 1)
m.c1189 = Constraint(expr= m.b728 + m.b729 <= 1)
m.c1190 = Constraint(expr= m.b729 + m.b730 <= 1)
m.c1191 = Constraint(expr= m.b728 + m.b730 <= 1)
m.c1192 = Constraint(expr= m.b729 + m.b730 <= 1)
m.c1193 = Constraint(expr= m.b731 + m.b732 <= 1)
m.c1194 = Constraint(expr= m.b731 + m.b733 <= 1)
m.c1195 = Constraint(expr= m.b731 + m.b732 <= 1)
m.c1196 = Constraint(expr= m.b732 + m.b733 <= 1)
m.c1197 = Constraint(expr= m.b731 + m.b733 <= 1)
m.c1198 = Constraint(expr= m.b732 + m.b733 <= 1)
m.c1199 = Constraint(expr= m.b734 + m.b735 <= 1)
m.c1200 = Constraint(expr= m.b734 + m.b736 <= 1)
m.c1201 = Constraint(expr= m.b734 + m.b735 <= 1)
m.c1202 = Constraint(expr= m.b735 + m.b736 <= 1)
m.c1203 = Constraint(expr= m.b734 + m.b736 <= 1)
m.c1204 = Constraint(expr= m.b735 + m.b736 <= 1)
m.c1205 = Constraint(expr= m.b737 + m.b738 <= 1)
m.c1206 = Constraint(expr= m.b737 + m.b739 <= 1)
m.c1207 = Constraint(expr= m.b737 + m.b738 <= 1)
m.c1208 = Constraint(expr= m.b738 + m.b739 <= 1)
m.c1209 = Constraint(expr= m.b737 + m.b739 <= 1)
m.c1210 = Constraint(expr= m.b738 + m.b739 <= 1)
m.c1211 = Constraint(expr= m.b740 + m.b741 <= 1)
m.c1212 = Constraint(expr= m.b740 + m.b742 <= 1)
m.c1213 = Constraint(expr= m.b740 + m.b741 <= 1)
m.c1214 = Constraint(expr= m.b741 + m.b742 <= 1)
m.c1215 = Constraint(expr= m.b740 + m.b742 <= 1)
m.c1216 = Constraint(expr= m.b741 + m.b742 <= 1)
m.c1217 = Constraint(expr= m.b743 + m.b744 <= 1)
m.c1218 = Constraint(expr= m.b743 + m.b745 <= 1)
m.c1219 = Constraint(expr= m.b743 + m.b744 <= 1)
m.c1220 = Constraint(expr= m.b744 + m.b745 <= 1)
m.c1221 = Constraint(expr= m.b743 + m.b745 <= 1)
m.c1222 = Constraint(expr= m.b744 + m.b745 <= 1)
m.c1223 = Constraint(expr= m.b746 + m.b747 <= 1)
m.c1224 = Constraint(expr= m.b746 + m.b748 <= 1)
m.c1225 = Constraint(expr= m.b746 + m.b747 <= 1)
m.c1226 = Constraint(expr= m.b747 + m.b748 <= 1)
m.c1227 = Constraint(expr= m.b746 + m.b748 <= 1)
m.c1228 = Constraint(expr= m.b747 + m.b748 <= 1)
m.c1229 = Constraint(expr= m.b749 + m.b750 <= 1)
m.c1230 = Constraint(expr= m.b749 + m.b751 <= 1)
m.c1231 = Constraint(expr= m.b749 + m.b750 <= 1)
m.c1232 = Constraint(expr= m.b750 + m.b751 <= 1)
m.c1233 = Constraint(expr= m.b749 + m.b751 <= 1)
m.c1234 = Constraint(expr= m.b750 + m.b751 <= 1)
m.c1235 = Constraint(expr= m.b752 + m.b753 <= 1)
m.c1236 = Constraint(expr= m.b752 + m.b754 <= 1)
m.c1237 = Constraint(expr= m.b752 + m.b753 <= 1)
m.c1238 = Constraint(expr= m.b753 + m.b754 <= 1)
m.c1239 = Constraint(expr= m.b752 + m.b754 <= 1)
m.c1240 = Constraint(expr= m.b753 + m.b754 <= 1)
m.c1241 = Constraint(expr= m.b755 + m.b756 <= 1)
m.c1242 = Constraint(expr= m.b755 + m.b757 <= 1)
m.c1243 = Constraint(expr= m.b755 + m.b756 <= 1)
m.c1244 = Constraint(expr= m.b756 + m.b757 <= 1)
m.c1245 = Constraint(expr= m.b755 + m.b757 <= 1)
m.c1246 = Constraint(expr= m.b756 + m.b757 <= 1)
m.c1247 = Constraint(expr= m.b758 + m.b759 <= 1)
m.c1248 = Constraint(expr= m.b758 + m.b760 <= 1)
m.c1249 = Constraint(expr= m.b758 + m.b759 <= 1)
m.c1250 = Constraint(expr= m.b759 + m.b760 <= 1)
m.c1251 = Constraint(expr= m.b758 + m.b760 <= 1)
m.c1252 = Constraint(expr= m.b759 + m.b760 <= 1)
m.c1253 = Constraint(expr= m.b761 + m.b762 <= 1)
m.c1254 = Constraint(expr= m.b761 + m.b763 <= 1)
m.c1255 = Constraint(expr= m.b761 + m.b762 <= 1)
m.c1256 = Constraint(expr= m.b762 + m.b763 <= 1)
m.c1257 = Constraint(expr= m.b761 + m.b763 <= 1)
m.c1258 = Constraint(expr= m.b762 + m.b763 <= 1)
m.c1259 = Constraint(expr= m.b764 + m.b765 <= 1)
m.c1260 = Constraint(expr= m.b764 + m.b766 <= 1)
m.c1261 = Constraint(expr= m.b764 + m.b765 <= 1)
m.c1262 = Constraint(expr= m.b765 + m.b766 <= 1)
m.c1263 = Constraint(expr= m.b764 + m.b766 <= 1)
m.c1264 = Constraint(expr= m.b765 + m.b766 <= 1)
m.c1265 = Constraint(expr= m.b767 + m.b768 <= 1)
m.c1266 = Constraint(expr= m.b767 + m.b769 <= 1)
m.c1267 = Constraint(expr= m.b767 + m.b768 <= 1)
m.c1268 = Constraint(expr= m.b768 + m.b769 <= 1)
m.c1269 = Constraint(expr= m.b767 + m.b769 <= 1)
m.c1270 = Constraint(expr= m.b768 + m.b769 <= 1)
m.c1271 = Constraint(expr= m.b770 + m.b771 <= 1)
m.c1272 = Constraint(expr= m.b770 + m.b772 <= 1)
m.c1273 = Constraint(expr= m.b770 + m.b771 <= 1)
m.c1274 = Constraint(expr= m.b771 + m.b772 <= 1)
m.c1275 = Constraint(expr= m.b770 + m.b772 <= 1)
m.c1276 = Constraint(expr= m.b771 + m.b772 <= 1)
m.c1277 = Constraint(expr= m.b773 + m.b774 <= 1)
m.c1278 = Constraint(expr= m.b773 + m.b775 <= 1)
m.c1279 = Constraint(expr= m.b773 + m.b774 <= 1)
m.c1280 = Constraint(expr= m.b774 + m.b775 <= 1)
m.c1281 = Constraint(expr= m.b773 + m.b775 <= 1)
m.c1282 = Constraint(expr= m.b774 + m.b775 <= 1)
m.c1283 = Constraint(expr= m.b596 - m.b686 <= 0)
m.c1284 = Constraint(expr= - m.b596 + m.b597 - m.b687 <= 0)
m.c1285 = Constraint(expr= - m.b596 - m.b597 + m.b598 - m.b688 <= 0)
m.c1286 = Constraint(expr= m.b599 - m.b689 <= 0)
m.c1287 = Constraint(expr= - m.b599 + m.b600 - m.b690 <= 0)
m.c1288 = Constraint(expr= - m.b599 - m.b600 + m.b601 - m.b691 <= 0)
m.c1289 = Constraint(expr= m.b602 - m.b692 <= 0)
m.c1290 = Constraint(expr= - m.b602 + m.b603 - m.b693 <= 0)
m.c1291 = Constraint(expr= - m.b602 - m.b603 + m.b604 - m.b694 <= 0)
m.c1292 = Constraint(expr= m.b605 - m.b695 <= 0)
m.c1293 = Constraint(expr= - m.b605 + m.b606 - m.b696 <= 0)
m.c1294 = Constraint(expr= - m.b605 - m.b606 + m.b607 - m.b697 <= 0)
m.c1295 = Constraint(expr= m.b608 - m.b698 <= 0)
m.c1296 = Constraint(expr= - m.b608 + m.b609 - m.b699 <= 0)
m.c1297 = Constraint(expr= - m.b608 - m.b609 + m.b610 - m.b700 <= 0)
m.c1298 = Constraint(expr= m.b611 - m.b701 <= 0)
m.c1299 = Constraint(expr= - m.b611 + m.b612 - m.b702 <= 0)
m.c1300 = Constraint(expr= - m.b611 - m.b612 + m.b613 - m.b703 <= 0)
m.c1301 = Constraint(expr= m.b614 - m.b704 <= 0)
m.c1302 = Constraint(expr= - m.b614 + m.b615 - m.b705 <= 0)
m.c1303 = Constraint(expr= - m.b614 - m.b615 + m.b616 - m.b706 <= 0)
m.c1304 = Constraint(expr= m.b617 - m.b707 <= 0)
m.c1305 = Constraint(expr= - m.b617 + m.b618 - m.b708 <= 0)
m.c1306 = Constraint(expr= - m.b617 - m.b618 + m.b619 - m.b709 <= 0)
m.c1307 = Constraint(expr= m.b620 - m.b710 <= 0)
m.c1308 = Constraint(expr= - m.b620 + m.b621 - m.b711 <= 0)
m.c1309 = Constraint(expr= - m.b620 - m.b621 + m.b622 - m.b712 <= 0)
m.c1310 = Constraint(expr= m.b623 - m.b713 <= 0)
m.c1311 = Constraint(expr= - m.b623 + m.b624 - m.b714 <= 0)
m.c1312 = Constraint(expr= - m.b623 - m.b624 + m.b625 - m.b715 <= 0)
m.c1313 = Constraint(expr= m.b626 - m.b716 <= 0)
m.c1314 = Constraint(expr= - m.b626 + m.b627 - m.b717 <= 0)
m.c1315 = Constraint(expr= - m.b626 - m.b627 + m.b628 - m.b718 <= 0)
m.c1316 = Constraint(expr= m.b629 - m.b719 <= 0)
m.c1317 = Constraint(expr= - m.b629 + m.b630 - m.b720 <= 0)
m.c1318 = Constraint(expr= - m.b629 - m.b630 + m.b631 - m.b721 <= 0)
m.c1319 = Constraint(expr= m.b632 - m.b722 <= 0)
m.c1320 = Constraint(expr= - m.b632 + m.b633 - m.b723 <= 0)
m.c1321 = Constraint(expr= - m.b632 - m.b633 + m.b634 - m.b724 <= 0)
m.c1322 = Constraint(expr= m.b635 - m.b725 <= 0)
m.c1323 = Constraint(expr= - m.b635 + m.b636 - m.b726 <= 0)
m.c1324 = Constraint(expr= - m.b635 - m.b636 + m.b637 - m.b727 <= 0)
m.c1325 = Constraint(expr= m.b638 - m.b728 <= 0)
m.c1326 = Constraint(expr= - m.b638 + m.b639 - m.b729 <= 0)
m.c1327 = Constraint(expr= - m.b638 - m.b639 + m.b640 - m.b730 <= 0)
m.c1328 = Constraint(expr= m.b641 - m.b731 <= 0)
m.c1329 = Constraint(expr= - m.b641 + m.b642 - m.b732 <= 0)
m.c1330 = Constraint(expr= - m.b641 - m.b642 + m.b643 - m.b733 <= 0)
m.c1331 = Constraint(expr= m.b644 - m.b734 <= 0)
m.c1332 = Constraint(expr= - m.b644 + m.b645 - m.b735 <= 0)
m.c1333 = Constraint(expr= - m.b644 - m.b645 + m.b646 - m.b736 <= 0)
m.c1334 = Constraint(expr= m.b647 - m.b737 <= 0)
m.c1335 = Constraint(expr= - m.b647 + m.b648 - m.b738 <= 0)
m.c1336 = Constraint(expr= - m.b647 - m.b648 + m.b649 - m.b739 <= 0)
m.c1337 = Constraint(expr= m.b650 - m.b740 <= 0)
m.c1338 = Constraint(expr= - m.b650 + m.b651 - m.b741 <= 0)
m.c1339 = Constraint(expr= - m.b650 - m.b651 + m.b652 - m.b742 <= 0)
m.c1340 = Constraint(expr= m.b653 - m.b743 <= 0)
m.c1341 = Constraint(expr= - m.b653 + m.b654 - m.b744 <= 0)
m.c1342 = Constraint(expr= - m.b653 - m.b654 + m.b655 - m.b745 <= 0)
m.c1343 = Constraint(expr= m.b656 - m.b746 <= 0)
m.c1344 = Constraint(expr= - m.b656 + m.b657 - m.b747 <= 0)
m.c1345 = Constraint(expr= - m.b656 - m.b657 + m.b658 - m.b748 <= 0)
m.c1346 = Constraint(expr= m.b659 - m.b749 <= 0)
m.c1347 = Constraint(expr= - m.b659 + m.b660 - m.b750 <= 0)
m.c1348 = Constraint(expr= - m.b659 - m.b660 + m.b661 - m.b751 <= 0)
m.c1349 = Constraint(expr= m.b662 - m.b752 <= 0)
m.c1350 = Constraint(expr= - m.b662 + m.b663 - m.b753 <= 0)
m.c1351 = Constraint(expr= - m.b662 - m.b663 + m.b664 - m.b754 <= 0)
m.c1352 = Constraint(expr= m.b665 - m.b755 <= 0)
m.c1353 = Constraint(expr= - m.b665 + m.b666 - m.b756 <= 0)
m.c1354 = Constraint(expr= - m.b665 - m.b666 + m.b667 - m.b757 <= 0)
m.c1355 = Constraint(expr= m.b668 - m.b758 <= 0)
m.c1356 = Constraint(expr= - m.b668 + m.b669 - m.b759 <= 0)
m.c1357 = Constraint(expr= - m.b668 - m.b669 + m.b670 - m.b760 <= 0)
m.c1358 = Constraint(expr= m.b671 - m.b761 <= 0)
m.c1359 = Constraint(expr= - m.b671 + m.b672 - m.b762 <= 0)
m.c1360 = Constraint(expr= - m.b671 - m.b672 + m.b673 - m.b763 <= 0)
m.c1361 = Constraint(expr= m.b674 - m.b764 <= 0)
m.c1362 = Constraint(expr= - m.b674 + m.b675 - m.b765 <= 0)
m.c1363 = Constraint(expr= - m.b674 - m.b675 + m.b676 - m.b766 <= 0)
m.c1364 = Constraint(expr= m.b677 - m.b767 <= 0)
m.c1365 = Constraint(expr= - m.b677 + m.b678 - m.b768 <= 0)
m.c1366 = Constraint(expr= - m.b677 - m.b678 + m.b679 - m.b769 <= 0)
m.c1367 = Constraint(expr= m.b680 - m.b770 <= 0)
m.c1368 = Constraint(expr= - m.b680 + m.b681 - m.b771 <= 0)
m.c1369 = Constraint(expr= - m.b680 - m.b681 + m.b682 - m.b772 <= 0)
m.c1370 = Constraint(expr= m.b683 - m.b773 <= 0)
m.c1371 = Constraint(expr= - m.b683 + m.b684 - m.b774 <= 0)
m.c1372 = Constraint(expr= - m.b683 - m.b684 + m.b685 - m.b775 <= 0)
m.c1373 = Constraint(expr= m.b596 + m.b599 == 1)
m.c1374 = Constraint(expr= m.b597 + m.b600 == 1)
m.c1375 = Constraint(expr= m.b598 + m.b601 == 1)
m.c1376 = Constraint(expr= - m.b602 + m.b611 + m.b614 >= 0)
m.c1377 = Constraint(expr= - m.b603 + m.b612 + m.b615 >= 0)
m.c1378 = Constraint(expr= - m.b604 + m.b613 + m.b616 >= 0)
m.c1379 = Constraint(expr= - m.b611 + m.b629 >= 0)
m.c1380 = Constraint(expr= - m.b612 + m.b630 >= 0)
m.c1381 = Constraint(expr= - m.b613 + m.b631 >= 0)
m.c1382 = Constraint(expr= - m.b614 + m.b632 >= 0)
m.c1383 = Constraint(expr= - m.b615 + m.b633 >= 0)
m.c1384 = Constraint(expr= - m.b616 + m.b634 >= 0)
m.c1385 = Constraint(expr= - m.b605 + m.b617 >= 0)
m.c1386 = Constraint(expr= - m.b606 + m.b618 >= 0)
m.c1387 = Constraint(expr= - m.b607 + m.b619 >= 0)
m.c1388 = Constraint(expr= - m.b617 + m.b635 + m.b638 >= 0)
m.c1389 = Constraint(expr= - m.b618 + m.b636 + m.b639 >= 0)
m.c1390 = Constraint(expr= - m.b619 + m.b637 + m.b640 >= 0)
m.c1391 = Constraint(expr= - m.b608 + m.b620 + m.b623 + m.b626 >= 0)
m.c1392 = Constraint(expr= - m.b609 + m.b621 + m.b624 + m.b627 >= 0)
m.c1393 = Constraint(expr= - m.b610 + m.b622 + m.b625 + m.b628 >= 0)
m.c1394 = Constraint(expr= - m.b620 + m.b638 >= 0)
m.c1395 = Constraint(expr= - m.b621 + m.b639 >= 0)
m.c1396 = Constraint(expr= - m.b622 + m.b640 >= 0)
m.c1397 = Constraint(expr= - m.b623 + m.b641 + m.b644 >= 0)
m.c1398 = Constraint(expr= - m.b624 + m.b642 + m.b645 >= 0)
m.c1399 = Constraint(expr= - m.b625 + m.b643 + m.b646 >= 0)
m.c1400 = Constraint(expr= - m.b626 + m.b647 + m.b650 + m.b653 >= 0)
m.c1401 = Constraint(expr= - m.b627 + m.b648 + m.b651 + m.b654 >= 0)
m.c1402 = Constraint(expr= - m.b628 + m.b649 + m.b652 + m.b655 >= 0)
m.c1403 = Constraint(expr= m.b596 + m.b599 - m.b602 >= 0)
m.c1404 = Constraint(expr= m.b597 + m.b600 - m.b603 >= 0)
m.c1405 = Constraint(expr= m.b598 + m.b601 - m.b604 >= 0)
m.c1406 = Constraint(expr= m.b596 + m.b599 - m.b605 >= 0)
m.c1407 = Constraint(expr= m.b597 + m.b600 - m.b606 >= 0)
m.c1408 = Constraint(expr= m.b598 + m.b601 - m.b607 >= 0)
m.c1409 = Constraint(expr= m.b596 + m.b599 - m.b608 >= 0)
m.c1410 = Constraint(expr= m.b597 + m.b600 - m.b609 >= 0)
m.c1411 = Constraint(expr= m.b598 + m.b601 - m.b610 >= 0)
m.c1412 = Constraint(expr= m.b602 - m.b611 >= 0)
m.c1413 = Constraint(expr= m.b603 - m.b612 >= 0)
m.c1414 = Constraint(expr= m.b604 - m.b613 >= 0)
m.c1415 = Constraint(expr= m.b602 - m.b614 >= 0)
m.c1416 = Constraint(expr= m.b603 - m.b615 >= 0)
m.c1417 = Constraint(expr= m.b604 - m.b616 >= 0)
m.c1418 = Constraint(expr= m.b605 - m.b617 >= 0)
m.c1419 = Constraint(expr= m.b606 - m.b618 >= 0)
m.c1420 = Constraint(expr= m.b607 - m.b619 >= 0)
m.c1421 = Constraint(expr= m.b608 - m.b620 >= 0)
m.c1422 = Constraint(expr= m.b609 - m.b621 >= 0)
m.c1423 = Constraint(expr= m.b610 - m.b622 >= 0)
m.c1424 = Constraint(expr= m.b608 - m.b623 >= 0)
m.c1425 = Constraint(expr= m.b609 - m.b624 >= 0)
m.c1426 = Constraint(expr= m.b610 - m.b625 >= 0)
m.c1427 = Constraint(expr= m.b608 - m.b626 >= 0)
m.c1428 = Constraint(expr= m.b609 - m.b627 >= 0)
m.c1429 = Constraint(expr= m.b610 - m.b628 >= 0)
m.c1430 = Constraint(expr= m.b611 - m.b629 >= 0)
m.c1431 = Constraint(expr= m.b612 - m.b630 >= 0)
m.c1432 = Constraint(expr= m.b613 - m.b631 >= 0)
m.c1433 = Constraint(expr= m.b614 - m.b632 >= 0)
m.c1434 = Constraint(expr= m.b615 - m.b633 >= 0)
m.c1435 = Constraint(expr= m.b616 - m.b634 >= 0)
m.c1436 = Constraint(expr= m.b617 - m.b635 >= 0)
m.c1437 = Constraint(expr= m.b618 - m.b636 >= 0)
m.c1438 = Constraint(expr= m.b619 - m.b637 >= 0)
m.c1439 = Constraint(expr= m.b617 - m.b638 >= 0)
m.c1440 = Constraint(expr= m.b618 - m.b639 >= 0)
m.c1441 = Constraint(expr= m.b619 - m.b640 >= 0)
m.c1442 = Constraint(expr= m.b623 - m.b641 >= 0)
m.c1443 = Constraint(expr= m.b624 - m.b642 >= 0)
m.c1444 = Constraint(expr= m.b625 - m.b643 >= 0)
m.c1445 = Constraint(expr= m.b623 - m.b644 >= 0)
m.c1446 = Constraint(expr= m.b624 - m.b645 >= 0)
m.c1447 = Constraint(expr= m.b625 - m.b646 >= 0)
m.c1448 = Constraint(expr= m.b626 - m.b647 >= 0)
m.c1449 = Constraint(expr= m.b627 - m.b648 >= 0)
m.c1450 = Constraint(expr= m.b628 - m.b649 >= 0)
m.c1451 = Constraint(expr= m.b626 - m.b650 >= 0)
m.c1452 = Constraint(expr= m.b627 - m.b651 >= 0)
m.c1453 = Constraint(expr= m.b628 - m.b652 >= 0)
m.c1454 = Constraint(expr= m.b626 - m.b653 >= 0)
m.c1455 = Constraint(expr= m.b627 - m.b654 >= 0)
m.c1456 = Constraint(expr= m.b628 - m.b655 >= 0)
m.c1457 = Constraint(expr= - m.b653 + m.b656 + m.b659 >= 0)
m.c1458 = Constraint(expr= - m.b654 + m.b657 + m.b660 >= 0)
m.c1459 = Constraint(expr= - m.b655 + m.b658 + m.b661 >= 0)
m.c1460 = Constraint(expr= - m.b662 + m.b671 + m.b674 >= 0)
m.c1461 = Constraint(expr= - m.b663 + m.b672 + m.b675 >= 0)
m.c1462 = Constraint(expr= - m.b664 + m.b673 + m.b676 >= 0)
m.c1463 = Constraint(expr= - m.b665 + m.b677 >= 0)
m.c1464 = Constraint(expr= - m.b666 + m.b678 >= 0)
m.c1465 = Constraint(expr= - m.b667 + m.b679 >= 0)
m.c1466 = Constraint(expr= m.b653 - m.b656 >= 0)
m.c1467 = Constraint(expr= m.b654 - m.b657 >= 0)
m.c1468 = Constraint(expr= m.b655 - m.b658 >= 0)
m.c1469 = Constraint(expr= m.b653 - m.b659 >= 0)
m.c1470 = Constraint(expr= m.b654 - m.b660 >= 0)
m.c1471 = Constraint(expr= m.b655 - m.b661 >= 0)
m.c1472 = Constraint(expr= m.b662 - m.b671 >= 0)
m.c1473 = Constraint(expr= m.b663 - m.b672 >= 0)
m.c1474 = Constraint(expr= m.b664 - m.b673 >= 0)
m.c1475 = Constraint(expr= m.b662 - m.b674 >= 0)
m.c1476 = Constraint(expr= m.b663 - m.b675 >= 0)
m.c1477 = Constraint(expr= m.b664 - m.b676 >= 0)
m.c1478 = Constraint(expr= m.b665 - m.b677 >= 0)
m.c1479 = Constraint(expr= m.b666 - m.b678 >= 0)
m.c1480 = Constraint(expr= m.b667 - m.b679 >= 0)
m.c1481 = Constraint(expr= m.b668 - m.b680 >= 0)
m.c1482 = Constraint(expr= m.b669 - m.b681 >= 0)
m.c1483 = Constraint(expr= m.b670 - m.b682 >= 0)
m.c1484 = Constraint(expr= m.b668 - m.b683 >= 0)
m.c1485 = Constraint(expr= m.b669 - m.b684 >= 0)
m.c1486 = Constraint(expr= m.b670 - m.b685 >= 0)
| 1.507813 | 2 |
backend/tests/test_resources.py | sartography/star-drive | 0 | 6449 | <reponame>sartography/star-drive
import unittest
from flask import json
from tests.base_test import BaseTest
from app import db, elastic_index
from app.model.resource import Resource
from app.model.resource_category import ResourceCategory
from app.model.resource_change_log import ResourceChangeLog
from app.model.user import Role
class TestResources(BaseTest, unittest.TestCase):
def test_resource_basics(self):
self.construct_resource()
r = db.session.query(Resource).first()
self.assertIsNotNone(r)
r_id = r.id
rv = self.app.get('/api/resource/%i' % r_id,
follow_redirects=True,
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response["id"], r_id)
self.assertEqual(response["title"], 'A+ Resource')
self.assertEqual(response["description"], 'A delightful Resource destined to create rejoicing')
def test_modify_resource_basics(self):
self.construct_resource()
r = db.session.query(Resource).first()
self.assertIsNotNone(r)
r_id = r.id
rv = self.app.get('/api/resource/%i' % r_id, content_type="application/json")
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Edwarardos Lemonade and Oil Change'
response['description'] = 'Better fluids for you and your car.'
response['website'] = 'http://sartography.com'
orig_date = response['last_updated']
rv = self.app.put('/api/resource/%i' % r_id, data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.get('/api/resource/%i' % r_id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response['title'], 'Edwarardos Lemonade and Oil Change')
self.assertEqual(response['description'], 'Better fluids for you and your car.')
self.assertEqual(response['website'], 'http://sartography.com')
self.assertNotEqual(orig_date, response['last_updated'])
def test_delete_resource(self):
r = self.construct_resource()
r_id = r.id
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assert_success(rv)
rv = self.app.delete('api/resource/%i' % r_id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assertEqual(404, rv.status_code)
def test_delete_resource_with_admin_note_and_no_elastic_record(self):
r = self.construct_resource()
r_id = r.id
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assert_success(rv)
self.construct_admin_note(user=self.construct_user(), resource=r)
elastic_index.remove_document(r, 'Resource')
rv = self.app.delete('api/resource/%i' % r_id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.get('api/resource/%i' % r_id, content_type="application/json")
self.assertEqual(404, rv.status_code)
def test_create_resource(self):
resource = {'title': "Resource of Resources", 'description': "You need this resource in your life.",
'organization_name': "Resource Org"}
rv = self.app.post('api/resource', data=self.jsonify(resource), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response['title'], 'Resource of Resources')
self.assertEqual(response['description'], 'You need this resource in your life.')
self.assertIsNotNone(response['id'])
def test_get_resource_by_category(self):
c = self.construct_category()
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
db.session.add(cr)
db.session.commit()
rv = self.app.get(
'/api/category/%i/resource' % c.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, len(response))
self.assertEqual(r.id, response[0]["resource_id"])
self.assertEqual(r.description, response[0]["resource"]["description"])
def test_get_resource_by_category_includes_category_details(self):
c = self.construct_category(name="c1")
c2 = self.construct_category(name="c2")
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
cr2 = ResourceCategory(resource=r, category=c2, type='resource')
db.session.add_all([cr, cr2])
db.session.commit()
rv = self.app.get(
'/api/category/%i/resource' % c.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(r.id, response[0]["resource_id"])
self.assertEqual(2,
len(response[0]["resource"]["resource_categories"]))
self.assertEqual(
"c1", response[0]["resource"]["resource_categories"][0]["category"]
["name"])
def test_category_resource_count(self):
c = self.construct_category()
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
db.session.add(cr)
db.session.commit()
rv = self.app.get(
'/api/category/%i' % c.id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, response["resource_count"])
def test_get_category_by_resource(self):
c = self.construct_category()
r = self.construct_resource()
cr = ResourceCategory(resource=r, category=c, type='resource')
db.session.add(cr)
db.session.commit()
rv = self.app.get(
'/api/resource/%i/category' % r.id,
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, len(response))
self.assertEqual(c.id, response[0]["id"])
self.assertEqual(c.name, response[0]["category"]["name"])
def test_add_category_to_resource(self):
c = self.construct_category()
r = self.construct_resource()
rc_data = {"resource_id": r.id, "category_id": c.id}
rv = self.app.post(
'/api/resource_category',
data=self.jsonify(rc_data),
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(c.id, response["category_id"])
self.assertEqual(r.id, response["resource_id"])
def test_set_all_categories_on_resource(self):
c1 = self.construct_category(name="c1")
c2 = self.construct_category(name="c2")
c3 = self.construct_category(name="c3")
r = self.construct_resource()
rc_data = [
{
"category_id": c1.id
},
{
"category_id": c2.id
},
{
"category_id": c3.id
},
]
rv = self.app.post(
'/api/resource/%i/category' % r.id,
data=self.jsonify(rc_data),
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(3, len(response))
rc_data = [{"category_id": c1.id}]
rv = self.app.post(
'/api/resource/%i/category' % r.id,
data=self.jsonify(rc_data),
content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(1, len(response))
def test_remove_category_from_resource(self):
self.test_add_category_to_resource()
rv = self.app.delete('/api/resource_category/%i' % 1)
self.assert_success(rv)
rv = self.app.get(
'/api/resource/%i/category' % 1, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(0, len(response))
def test_resource_change_log_types(self):
u = self.construct_user(email="<EMAIL>", role=Role.admin)
r = {'id': 258, 'title': "A Resource that is Super and Great", 'description': "You need this resource in your life."}
rv = self.app.post('api/resource', data=self.jsonify(r), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers())
self.assert_success(rv)
logs = ResourceChangeLog.query.all()
self.assertIsNotNone(logs[-1].resource_id)
self.assertIsNotNone(logs[-1].user_id)
self.assertEqual(logs[-1].type, 'create')
rv = self.app.get('api/resource/%i' % r['id'], content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Super Great Resource'
rv = self.app.put('/api/resource/%i' % r['id'], data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers(user=u))
self.assert_success(rv)
rv = self.app.get('/api/resource/%i' % r['id'], content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response['title'], 'Super Great Resource')
logs = ResourceChangeLog.query.all()
self.assertIsNotNone(logs[-1].resource_id)
self.assertIsNotNone(logs[-1].user_id)
self.assertEqual(logs[-1].type, 'edit')
rv = self.app.delete('api/resource/%i' % r['id'], content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(rv)
logs = ResourceChangeLog.query.all()
self.assertIsNotNone(logs[-1].resource_id)
self.assertIsNotNone(logs[-1].user_id)
self.assertEqual(logs[-1].type, 'delete')
def test_get_resource_change_log_by_resource(self):
r = self.construct_resource()
u = self.construct_user(email="<EMAIL>", role=Role.admin)
rv = self.app.get('api/resource/%i' % r.id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Super Great Resource'
rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers(user=u))
self.assert_success(rv)
rv = self.app.get('/api/resource/%i/change_log' % r.id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response[-1]['user_id'], u.id)
def test_get_resource_change_log_by_user(self):
r = self.construct_resource()
u = self.construct_user(email="<EMAIL>", role=Role.admin)
rv = self.app.get('api/resource/%i' % r.id, content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
response['title'] = 'Super Great Resource'
rv = self.app.put('/api/resource/%i' % r.id, data=self.jsonify(response), content_type="application/json",
follow_redirects=True, headers=self.logged_in_headers(user=u))
self.assert_success(rv)
rv = self.app.get('/api/user/%i/resource_change_log' % u.id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(response[-1]['resource_id'], r.id)
def test_covid19_resource_lists(self):
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Free_educational_resources'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Free_educational_resources'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Supports_with_Living'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Visual_Aids'])
self.construct_resource(covid19_categories=['COVID-19_for_Autism', 'Edu-tainment', 'Health_and_Telehealth'])
rv = self.app.get('api/resource/covid19/COVID-19_for_Autism', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 5)
rv = self.app.get('api/resource/covid19/Edu-tainment', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 4)
rv = self.app.get('api/resource/covid19/Free_educational_resources', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 2)
rv = self.app.get('api/resource/covid19/Supports_with_Living', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 1)
rv = self.app.get('api/resource/covid19/Visual_Aids', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 1)
rv = self.app.get('api/resource/covid19/Health_and_Telehealth', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 1)
def test_is_uva_education_content(self):
self.construct_resource(is_draft=True, title='Autism at UVA', is_uva_education_content=True)
self.construct_resource(is_draft=False, title='Healthy Eating', is_uva_education_content=True)
self.construct_resource(is_draft=True, title='Autism and the Arts', is_uva_education_content=False)
self.construct_resource(is_draft=False, title='Autism One', is_uva_education_content=True)
self.construct_resource(is_draft=False, title='Two', is_uva_education_content=False)
rv = self.app.get('api/resource/education', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 2)
rv = self.app.get('api/resource', content_type="application/json")
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(response), 5)
| 2.328125 | 2 |
kolibri/core/auth/management/commands/sync.py | reubenjacob/kolibri | 0 | 6450 | <filename>kolibri/core/auth/management/commands/sync.py
import json
import logging
import math
import re
from contextlib import contextmanager
from django.core.management import call_command
from django.core.management.base import CommandError
from morango.models import Filter
from morango.models import InstanceIDModel
from morango.models import ScopeDefinition
from morango.sync.controller import MorangoProfileController
from ..utils import create_superuser_and_provision_device
from ..utils import get_baseurl
from ..utils import get_client_and_server_certs
from ..utils import get_dataset_id
from ..utils import get_single_user_sync_filter
from ..utils import provision_single_user_device
from kolibri.core.auth.constants.morango_sync import PROFILE_FACILITY_DATA
from kolibri.core.auth.constants.morango_sync import ScopeDefinitions
from kolibri.core.auth.constants.morango_sync import State
from kolibri.core.auth.management.utils import get_facility
from kolibri.core.auth.management.utils import run_once
from kolibri.core.auth.models import dataset_cache
from kolibri.core.logger.utils.data import bytes_for_humans
from kolibri.core.tasks.exceptions import UserCancelledError
from kolibri.core.tasks.management.commands.base import AsyncCommand
from kolibri.core.utils.lock import db_lock
from kolibri.utils import conf
DATA_PORTAL_SYNCING_BASE_URL = conf.OPTIONS["Urls"]["DATA_PORTAL_SYNCING_BASE_URL"]
TRANSFER_MESSAGE = "{records_transferred}/{records_total}, {transfer_total}"
logger = logging.getLogger(__name__)
class Command(AsyncCommand):
help = "Allow the syncing of facility data with Kolibri Data Portal or another Kolibri device."
def add_arguments(self, parser):
parser.add_argument(
"--facility", action="store", type=str, help="ID of facility to sync"
)
parser.add_argument(
"--baseurl", type=str, default=DATA_PORTAL_SYNCING_BASE_URL, dest="baseurl"
)
parser.add_argument("--noninteractive", action="store_true")
parser.add_argument(
"--chunk-size",
type=int,
default=500,
help="Chunk size of records to send/retrieve per request",
)
parser.add_argument(
"--no-push", action="store_true", help="Do not push data to the server"
)
parser.add_argument(
"--no-pull", action="store_true", help="Do not pull data from the server"
)
parser.add_argument(
"--username",
type=str,
help="username of superuser or facility admin on server we are syncing with",
)
parser.add_argument(
"--password",
type=str,
help="password of superuser or facility admin on server we are syncing with",
)
parser.add_argument(
"--user",
type=str,
help="for single-user syncing, the user ID of the account to be synced",
)
parser.add_argument(
"--no-provision",
action="store_true",
help="do not create a facility and temporary superuser",
)
# parser.add_argument("--scope-id", type=str, default=FULL_FACILITY)
def handle_async(self, *args, **options): # noqa C901
(
baseurl,
facility_id,
chunk_size,
username,
password,
user_id,
no_push,
no_pull,
noninteractive,
no_provision,
) = (
options["baseurl"],
options["facility"],
options["chunk_size"],
options["username"],
options["password"],
options["user"],
options["no_push"],
options["no_pull"],
options["noninteractive"],
options["no_provision"],
)
PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL
# validate url that is passed in
if not PORTAL_SYNC:
baseurl = get_baseurl(baseurl)
# call this in case user directly syncs without migrating database
if not ScopeDefinition.objects.filter():
call_command("loaddata", "scopedefinitions")
dataset_cache.clear()
dataset_cache.activate()
# try to connect to server
controller = MorangoProfileController(PROFILE_FACILITY_DATA)
network_connection = controller.create_network_connection(baseurl)
# if instance_ids are equal, this means device is trying to sync with itself, which we don't allow
if (
InstanceIDModel.get_or_create_current_instance()[0].id
== network_connection.server_info["instance_id"]
):
raise CommandError(
"Device can not sync with itself. Please recheck base URL and try again."
)
if user_id: # it's a single-user sync
if not facility_id:
raise CommandError(
"Facility ID must be specified in order to do single-user syncing"
)
if not re.match("[a-f0-9]{32}", user_id):
raise CommandError("User ID must be a 32-character UUID (no dashes)")
dataset_id = get_dataset_id(
baseurl, identifier=facility_id, noninteractive=True
)
client_cert, server_cert, username = get_client_and_server_certs(
username,
password,
dataset_id,
network_connection,
user_id=user_id,
noninteractive=noninteractive,
)
scopes = [client_cert.scope_definition_id, server_cert.scope_definition_id]
if len(set(scopes)) != 2:
raise CommandError(
"To do a single-user sync, one device must have a single-user certificate, and the other a full-facility certificate."
)
elif PORTAL_SYNC: # do portal sync setup
facility = get_facility(
facility_id=facility_id, noninteractive=noninteractive
)
# check for the certs we own for the specific facility
client_cert = (
facility.dataset.get_owned_certificates()
.filter(scope_definition_id=ScopeDefinitions.FULL_FACILITY)
.first()
)
if not client_cert:
raise CommandError(
"This device does not own a certificate for Facility: {}".format(
facility.name
)
)
# get primary partition
scope_params = json.loads(client_cert.scope_params)
dataset_id = scope_params["dataset_id"]
# check if the server already has a cert for this facility
server_certs = network_connection.get_remote_certificates(
dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY
)
# if necessary, push a cert up to the server
server_cert = (
server_certs[0]
if server_certs
else network_connection.push_signed_client_certificate_chain(
local_parent_cert=client_cert,
scope_definition_id=ScopeDefinitions.FULL_FACILITY,
scope_params=scope_params,
)
)
else: # do P2P setup
dataset_id = get_dataset_id(
baseurl, identifier=facility_id, noninteractive=noninteractive
)
client_cert, server_cert, username = get_client_and_server_certs(
username,
password,
dataset_id,
network_connection,
noninteractive=noninteractive,
)
logger.info("Syncing has been initiated (this may take a while)...")
sync_session_client = network_connection.create_sync_session(
client_cert, server_cert, chunk_size=chunk_size
)
try:
# pull from server
if not no_pull:
self._handle_pull(
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id=user_id,
)
# and push our own data to server
if not no_push:
self._handle_push(
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id=user_id,
)
if not no_provision:
with self._lock():
if user_id:
provision_single_user_device(user_id)
else:
create_superuser_and_provision_device(
username, dataset_id, noninteractive=noninteractive
)
except UserCancelledError:
if self.job:
self.job.extra_metadata.update(sync_state=State.CANCELLED)
self.job.save_meta()
logger.info("Syncing has been cancelled.")
return
network_connection.close()
if self.job:
self.job.extra_metadata.update(sync_state=State.COMPLETED)
self.job.save_meta()
dataset_cache.deactivate()
logger.info("Syncing has been completed.")
@contextmanager
def _lock(self):
cancellable = False
# job can't be cancelled while locked
if self.job:
cancellable = self.job.cancellable
self.job.save_as_cancellable(cancellable=False)
with db_lock():
yield
if self.job:
self.job.save_as_cancellable(cancellable=cancellable)
def _raise_cancel(self, *args, **kwargs):
if self.is_cancelled() and (not self.job or self.job.cancellable):
raise UserCancelledError()
def _handle_pull(
self,
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id,
):
"""
:type sync_session_client: morango.sync.syncsession.SyncSessionClient
:type noninteractive: bool
:type dataset_id: str
"""
sync_client = sync_session_client.get_pull_client()
sync_client.signals.queuing.connect(self._raise_cancel)
sync_client.signals.transferring.connect(self._raise_cancel)
self._queueing_tracker_adapter(
sync_client.signals.queuing,
"Remotely preparing data",
State.REMOTE_QUEUING,
noninteractive,
)
self._transfer_tracker_adapter(
sync_client.signals.transferring,
"Receiving data ({})".format(TRANSFER_MESSAGE),
State.PULLING,
noninteractive,
)
self._queueing_tracker_adapter(
sync_client.signals.dequeuing,
"Locally integrating received data",
State.LOCAL_DEQUEUING,
noninteractive,
)
self._session_tracker_adapter(
sync_client.signals.session,
"Creating pull transfer session",
"Completed pull transfer session",
)
if not user_id:
# full-facility sync
sync_client.initialize(Filter(dataset_id))
else:
# single-user sync
client_is_single_user = (
client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER
)
filt = get_single_user_sync_filter(
dataset_id, user_id, is_read=client_is_single_user
)
sync_client.initialize(Filter(filt))
sync_client.run()
with self._lock():
sync_client.finalize()
def _handle_push(
self,
sync_session_client,
noninteractive,
dataset_id,
client_cert,
server_cert,
user_id,
):
"""
:type sync_session_client: morango.sync.syncsession.SyncSessionClient
:type noninteractive: bool
:type dataset_id: str
"""
sync_client = sync_session_client.get_push_client()
sync_client.signals.transferring.connect(self._raise_cancel)
self._queueing_tracker_adapter(
sync_client.signals.queuing,
"Locally preparing data to send",
State.LOCAL_QUEUING,
noninteractive,
)
self._transfer_tracker_adapter(
sync_client.signals.transferring,
"Sending data ({})".format(TRANSFER_MESSAGE),
State.PUSHING,
noninteractive,
)
self._queueing_tracker_adapter(
sync_client.signals.dequeuing,
"Remotely integrating data",
State.REMOTE_DEQUEUING,
noninteractive,
)
self._session_tracker_adapter(
sync_client.signals.session,
"Creating push transfer session",
"Completed push transfer session",
)
with self._lock():
if not user_id:
# full-facility sync
sync_client.initialize(Filter(dataset_id))
else:
# single-user sync
client_is_single_user = (
client_cert.scope_definition_id == ScopeDefinitions.SINGLE_USER
)
filt = get_single_user_sync_filter(
dataset_id, user_id, is_read=not client_is_single_user
)
sync_client.initialize(Filter(filt))
sync_client.run()
# we can't cancel remotely integrating data
if self.job:
self.job.save_as_cancellable(cancellable=False)
# allow server timeout since remotely integrating data can take a while and the request
# could timeout. In that case, we'll assume everything is good.
sync_client.finalize(allow_server_timeout=True)
def _update_all_progress(self, progress_fraction, progress):
"""
Override parent progress update callback to report from the progress tracker we're sent
"""
if self.job:
self.job.update_progress(progress_fraction, 1.0)
self.job.extra_metadata.update(progress.extra_data)
self.job.save_meta()
def _session_tracker_adapter(self, signal_group, started_msg, completed_msg):
"""
Attaches a signal handler to session creation signals
:type signal_group: morango.sync.syncsession.SyncSignalGroup
:type started_msg: str
:type completed_msg: str
"""
@run_once
def session_creation(transfer_session):
"""
A session is created individually for pushing and pulling
"""
logger.info(started_msg)
if self.job:
self.job.extra_metadata.update(sync_state=State.SESSION_CREATION)
@run_once
def session_destruction(transfer_session):
if transfer_session.records_total == 0:
logger.info("There are no records to transfer")
logger.info(completed_msg)
signal_group.started.connect(session_creation)
signal_group.completed.connect(session_destruction)
def _transfer_tracker_adapter(
self, signal_group, message, sync_state, noninteractive
):
"""
Attaches a signal handler to pushing/pulling signals
:type signal_group: morango.sync.syncsession.SyncSignalGroup
:type message: str
:type sync_state: str
:type noninteractive: bool
"""
tracker = self.start_progress(total=100)
def stats_msg(transfer_session):
transfer_total = (
transfer_session.bytes_sent + transfer_session.bytes_received
)
return message.format(
records_transferred=transfer_session.records_transferred,
records_total=transfer_session.records_total,
transfer_total=bytes_for_humans(transfer_total),
)
def stats(transfer_session):
logger.info(stats_msg(transfer_session))
def handler(transfer_session):
"""
:type transfer_session: morango.models.core.TransferSession
"""
progress = (
100
* transfer_session.records_transferred
/ float(transfer_session.records_total)
)
tracker.update_progress(
increment=math.ceil(progress - tracker.progress),
message=stats_msg(transfer_session),
extra_data=dict(
bytes_sent=transfer_session.bytes_sent,
bytes_received=transfer_session.bytes_received,
sync_state=sync_state,
),
)
if noninteractive or tracker.progressbar is None:
signal_group.started.connect(stats)
signal_group.in_progress.connect(stats)
signal_group.connect(handler)
# log one more time at end to capture in logging output
signal_group.completed.connect(stats)
def _queueing_tracker_adapter(
self, signal_group, message, sync_state, noninteractive
):
"""
Attaches a signal handler to queuing/dequeuing signals
:type signal_group: morango.sync.syncsession.SyncSignalGroup
:type message: str
:type sync_state: str
:type noninteractive: bool
"""
tracker = self.start_progress(total=2)
def started(transfer_session):
dataset_cache.clear()
if noninteractive or tracker.progressbar is None:
logger.info(message)
def handler(transfer_session):
tracker.update_progress(
message=message, extra_data=dict(sync_state=sync_state)
)
if noninteractive or tracker.progressbar is None:
signal_group.started.connect(started)
signal_group.started.connect(started)
signal_group.started.connect(handler)
signal_group.completed.connect(handler)
| 1.726563 | 2 |
warp.py | RezaFirouzii/fum-delta-vision | 0 | 6451 | import math
import imageio
import cv2 as cv
import numpy as np
import transformer
def fix_rotation(img):
img_copy = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
rows, cols = img.shape
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel)
img = cv.medianBlur(img, 3)
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
roi = max(contours, key=cv.contourArea)
x, y, w, h = cv.boundingRect(roi)
corners = [[x, y], [x + w, y], [x, y + h], [x + w, y + h]]
src = np.float32(corners)
# src = np.reshape(src, (len(src), 1, 2))
# perimeter = cv.arcLength(src, True)
# corners = cv.approxPolyDP(src, perimeter // 10, True)
# corners = np.vstack(corners)
dst = np.float32([[0, 0], [cols, 0], [0, rows], [cols, rows]])
matrix = cv.getPerspectiveTransform(src, dst)
rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows))
cv.imshow('', rotated_img)
D1 = 105
D2 = 175
D3 = 275
if __name__ == "__main__":
cap = cv.VideoCapture('samples/delta.mp4')
if not cap.isOpened():
raise IOError("Video was not opened!")
mse = 0
count = 0
reader = imageio.get_reader('samples/delta.mp4')
fps = reader.get_meta_data()['fps']
writer = imageio.get_writer('samples/result.mp4', fps=fps)
while True:
res, frame = cap.read()
if not res:
break
mean_error = 0
holes_count = 0
img = frame.copy()
cv.imshow('dfa', img)
frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
frame_copy = frame.copy()
# frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
# kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
# frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel)
# frame = cv.medianBlur(frame, 3)
# contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
# roi = max(contours, key=cv.contourArea)
# x, y, w, h = cv.boundingRect(roi)
x, y, w, h = 115, 0, 445, 360
img = img[y: y+h, x: x+w]
img = transformer.rotate_along_axis(img, theta=40)
frame_copy = frame_copy[y: y+h, x: x+w]
frame_copy = transformer.rotate_along_axis(frame_copy, theta=40)
# cv.imshow('', frame_copy)
# cv.rectangle(frame_copy, (x, y), (x + w, y + h), (0, 255, 0), 2)
# cv.drawContours(frame_copy, roi, -1, (0, 0, 255), 2)
# res, mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY)
# frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask)
# corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1)
# corners = list(sorted(corners, key=lambda x: x[0][1]))
# print(corners[-1], corners[-2])
# print()
# corners = np.array([[38, 293], [407, 293]])
# for item in corners:
# # x, y = map(int, item.ravel())
# x, y = item
# cv.circle(img, (x, y), 5, (0, 0, 255), -1)
src = np.float32([[0, 0], [w, 0], [38, 293], [407, 293]])
dst = np.float32([[0, 0], [w, 0], [30, h], [w - 30, h]])
matrix = cv.getPerspectiveTransform(src, dst)
img = cv.warpPerspective(img, matrix, (w, h))
cv.imshow('', img)
img_copy = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel)
img = cv.medianBlur(img, 3)
origin = (w // 2 + 4, h // 2 + 2)
o1, o2 = origin
r = w // 2 + 1
ORIGIN = (0, 0)
R = 300 # mm
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
contours = list(filter(lambda x: 50 < cv.contourArea(x) < 175, contours))
factor = 0.1
smooth_contours = []
for i in range(len(contours)):
epsilon = factor * cv.arcLength(contours[i], True)
approx = cv.approxPolyDP(contours[i], epsilon, True)
x, y, width, height = cv.boundingRect(approx)
area = width*height
if len(approx) == 4 and 75 < area < 200:
smooth_contours.append(contours[i])
center, radius = cv.minEnclosingCircle(approx)
radius = int(radius)
center = tuple(map(int, center))
x, y = center
X = ((x - o1) * R) / r
Y = ((y - o2) * R) / r
X, Y = round(X, 2), round(Y, 2)
cv.circle(img_copy, center, radius, (0, 255, 0), 2)
cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255), 1, cv.LINE_AA)
e1, e2, e3 = map(lambda d: abs(math.hypot(X, Y) - d), [D1, D2, D3])
error = min(e1, e2, e3)
if error < 10:
mean_error += error ** 2
holes_count += 1
cv.circle(img_copy, origin, 4, (0, 0, 255), -1)
# cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0, 255), 2)
mean_error /= holes_count
mse += mean_error
count += 1
cv.imshow("Final", img_copy)
writer.append_data(img_copy)
# cv.imshow("Chg", img)
if cv.waitKey(30) == 27:
break
print("E:", mse / count, "N:", count)
writer.close()
cap.release()
cv.destroyAllWindows() | 2.40625 | 2 |
sdssobstools/boss_data.py | sdss/ObserverTools | 0 | 6452 | <reponame>sdss/ObserverTools<filename>sdssobstools/boss_data.py
#!/usr/bin/env python3
"""
A tool to grab a single BOSS image and pull a few items from its header. It is
used in bin/sloan_log.py, but it could be used directly as well.
"""
import argparse
from pathlib import Path
from astropy.time import Time
import fitsio
class BOSSRaw:
"""A class to parse raw data from APOGEE. The purpose of collecting this
raw data is to future-proof things that need these ouptuts in case
things like autoschedulers change, which many libraries depend on. This
will hopefully help SDSS-V logging"""
def __init__(self, fil):
self.fil = fil
header = fitsio.read_header(fil)
self.dither = header['MGDPOS']
if not self.dither: # This key started working instead during SDSS-V
self.dither = header['POINTING'][0]
self.exp_time = int(header['EXPTIME'])
self.isot = Time(header['DATE-OBS']) # UTC
self.plate_id = header['PLATEID']
self.cart_id = header['CARTID']
self.exp_id = int(str(fil).split('-')[-1].split('.')[0])
self.lead = header['PLATETYP']
if 'Closed' in header['HARTMANN']:
self.hartmann = 'Closed'
self.flavor = header['FLAVOR'].capitalize()
elif 'Out' in header['HARTMANN']:
self.hartmann = 'Open'
self.flavor = header['FLAVOR'].capitalize()
self.hart_resids = []
else:
self.hartmann = header['HARTMANN']
self.flavor = 'Hart'
# self.seeing = header['SEEING']
# self.img_type = header['IMAGETYP']
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--today', action='store_true')
args = parser.parse_args()
parser.add_argument('-m', '--mjd',
help='If not today (-t), the mjd to search')
parser.add_argument('-v', '--verbose', action='count', default=1,
help='Show details, can be stacked')
if args.today:
mjd_today = int(Time.now().sjd)
data_dir = '/data/spectro/{}/'.format(mjd_today)
elif args.mjd:
data_dir = '/data/spectro/{}/'.format(args.mjd)
else:
raise Exception('No date specified')
for path in Path(data_dir).rglob('sdR*.fit.gz'):
print(path)
if __name__ == '__main__':
main()
| 2.203125 | 2 |
capitulo-08/ex13b.py | bryan-lima/exercicios-livro-introd-prog-python-3ed | 3 | 6453 | # Altere o Programa 8.20 de forma que o usuário tenha três chances de acertar o número
# O programa termina se o usuário acertar ou errar três vezes
# Programa 8.20 do livro, página 184
# Programa 8.20 - Adivinhando o número
#
# import random
#
# n = random.randint(1, 10)
# x = int(input('Escolha um número entre 1 e 10: '))
# if x == n:
# print('Você acertou!')
# else:
# print('Você errou.')
import random
numberRandom = random.randint(1, 10)
counter = 0
while True:
chosenNumber = int(input('\nEscolha um número entre 1 e 10: '))
counter += 1
if chosenNumber == numberRandom:
print(f'Parabéns! Você acertou na {counter}ª de 3 tentativas!')
break
else:
print(f'Você errou!')
if counter < 3:
print(f'Resta(m) {3 - counter} tentativa(s).')
else:
print('Suas tentativas acabaram! Mais sorte na próxima vez.')
print(f'O número sorteado foi {numberRandom}.')
break
| 4.09375 | 4 |
slogviz/config.py | mariusfrinken/slogviz | 1 | 6454 | <reponame>mariusfrinken/slogviz
# -*- coding: utf-8 -*-
"""This sub module provides a global variable to check for checking if the non-interactive argument was set
Exported variable:
interactive -- False, if the main the non-interactive argument was set, True, if it was not set
"""
global interactive
interactive = True; | 1.5 | 2 |
setup.py | shb84/ATM76 | 0 | 6455 | import setuptools
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setuptools.setup(
name="atm76",
version="0.1.0",
author="<NAME>",
author_email="<EMAIL>",
description="Differentiable 1976 Atmosphere",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/shb84/ATM76.git",
packages=setuptools.find_packages(),
package_data={},
install_requires=["numpy>=1.16", "genn"],
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
)
| 1.640625 | 2 |
agent/check_plugins/download_speed.py | indigos33k3r/god-eye | 1 | 6456 | import logging
import asyncio
from agent.check_plugins import AbstractCheckPlugin
# Do khong biet dung thu vien asyncio ntn ca nen em dung thu vien request
# python
import requests
import sys
import time
from datetime import datetime
logger = logging.getLogger(__name__)
class Download(AbstractCheckPlugin):
@asyncio.coroutine
def __call__(self, client, dnode):
logger.info('Test download speed : running...')
start = time.clock()
r = requests.get('http://{}'.format(dnode), stream=True)
total_length = int(r.headers.get('content-length'))
if total_length is None:
logger.error("Empty file!")
else:
array_speed = []
start_chunk = time.clock()
for chunk in r.iter_content(1024): # 1kB1024 1MB 1048576
end_chunk = time.clock()
delta = end_chunk - start_chunk
start_chunk = end_chunk
if delta <= 0:
break
else:
array_speed.append(1//delta) # kB / s
end = time.clock()
yield from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed))
@asyncio.coroutine
def get_result(self, url, start, end, total_length, array_speed):
"""Download and processing data.
Args:
url (str): url file download.
start (float): It's time which started download.
end (float): It's time which finished download.
total_length (int): size of file download (Byte)
array_speed (list): list download speeds for each 1024 Byte (kB/s)
Returns:
list with item 0 : json format for influxdb
"""
download_speed = total_length // (time.clock() - start)
accelerationS = self.acceleration(array_speed)
mean_deviationS = self.mean_deviation(array_speed, download_speed)
logger.info("Test download speed done!")
#TODO Bỏ time, để kiểm tra xem db có ghi đc dữ liệu hay chưa
return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])]
def acceleration(self, array_speed):
"""Caculate acceleration.
By get the highest speed in the first cycle.
Args:
array_speed (list): list download times for each 1024 Byte
Returns:
acceleration (kB/s) : the deviation between highest speed and first byte speed
"""
if len(array_speed) == 0:
return 0
speed_before = array_speed[0]
for speed in array_speed:
if speed < speed_before:
break
else:
speed_before = speed
return speed_before - array_speed[0]
def mean_deviation(self, array_speed, download_speed):
"""The mean deviation each downloads with download_speed.
Args:
array_speed (list): list download speeds for each kB.
download_speed (kB/s): mean download speed.
Returns:
mean_deviation (kB/s)
"""
if len(array_speed) == 0:
return 0
sum = 0
for speed in array_speed:
sum += abs(speed - download_speed)
return sum//len(array_speed)
def output(self, my_array):
"""Reformat my_array for inserting into influxdb.
Args:
my_array (list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS]
Returns:
json format for influxdb
"""
return {
"measurement": "download_speed",
"tags": {
"snode": "{}".format(my_array[0]),
"dnode": "{}".format(my_array[1])
},
# "time": "{}".format(my_array[2]),
"fields": {
"speed": my_array[3],
"mean_deviation": my_array[4],
"acceleration": my_array[5]
}
}
| 2.375 | 2 |
Setup Rich Text Editor/mysite/main/urls.py | AyemunHossain/Django | 2 | 6457 | <reponame>AyemunHossain/Django
from django.urls import path
from . import views
app_name = "main"
urlpatterns = [
path("",views.homepage,name="homepage")
] | 1.8125 | 2 |
GA/train.py | jcordell/keras-optimization | 1 | 6458 | <gh_stars>1-10
"""
Utility used by the Network class to actually train.
Based on:
https://github.com/fchollet/keras/blob/master/examples/mnist_mlp.py
"""
from keras.datasets import mnist, cifar10
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.utils.np_utils import to_categorical
from keras.callbacks import EarlyStopping
import data_parser
import numpy as np
from keras.optimizers import Adadelta, Adam, rmsprop
from sklearn.metrics import mean_squared_error
# Helper: Early stopping.
early_stopper = EarlyStopping(patience=5)
def get_cifar10():
"""Retrieve the CIFAR dataset and process the data."""
# Set defaults.
nb_classes = 10
batch_size = 64
input_shape = (3072,)
# Get the data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.reshape(50000, 3072)
x_test = x_test.reshape(10000, 3072)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
# convert class vectors to binary class matrices
y_train = to_categorical(y_train, nb_classes)
y_test = to_categorical(y_test, nb_classes)
return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)
def get_mnist():
"""Retrieve the MNIST dataset and process the data."""
# Set defaults.
nb_classes = 10
batch_size = 128
input_shape = (784,)
# Get the data.
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
# convert class vectors to binary class matrices
y_train = to_categorical(y_train, nb_classes)
y_test = to_categorical(y_test, nb_classes)
return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)
def get_dbtt():
data = data_parser.parse("DBTT_Data22.csv")
data_lwr = data_parser.parse("CD_LWR_clean8.csv")
X = ["N_log(eff fl p =.05)", "N_log(eff fl p =.4)", "N_log(eff fl p =.5)", "N(Cu)", "N(Ni)", "N(Mn)", "N(P)",
"N(Si)", "N( C )", "N_log(eff fl p =.1)", "N_log(eff fl p =.2)", "N_log(eff fl p =.3)", "N(Temp)"]
Y = "CD delta sigma"
data.set_x_features(X)
data.set_y_feature(Y)
data_lwr.set_y_feature(Y)
data_lwr.set_x_features(X)
data.add_exclusive_filter("Alloy", '=', 29)
data.add_exclusive_filter("Alloy", '=', 8)
data.add_exclusive_filter("Alloy", '=', 1)
data.add_exclusive_filter("Alloy", '=', 2)
data.add_exclusive_filter("Alloy", '=', 14)
data_lwr.add_exclusive_filter("Alloy", '=', 29)
data_lwr.add_exclusive_filter("Alloy", '=', 14)
x_test = np.array(data_lwr.get_x_data())
y_test = np.array(data_lwr.get_y_data())
x_train = np.array(data.get_x_data())
y_train = np.array(data.get_y_data())
#print("Training with", np.shape(y_train)[0], "data points")
nb_classes = -1
batch_size = np.shape(y_train)[0]
input_shape = (13,)
# normalize y columns
y_train = y_train/758.92
return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test)
def compile_model(network, nb_classes, input_shape):
"""Compile a sequential model.
Args:
network (dict): the parameters of the network
Returns:
a compiled network.
"""
# Get our network parameters.
nb_layers = network['nb_layers']
nb_neurons = network['nb_neurons']
activation = network['activation']
optimizer = network['optimizer']
learning_rate = network['learning_rate']
model = Sequential()
# Add each layer.
for i in range(nb_layers):
# Need input shape for first layer.
if i == 0:
print(nb_neurons)
model.add(Dense(units=nb_neurons, activation=activation, input_shape=input_shape))
else:
print(nb_neurons)
model.add(Dense(nb_neurons, activation=activation))
model.add(Dropout(0.2)) # hard-coded dropout
# Output layer.
if(nb_classes == -1):
model.add(Dense(1, activation='linear'))
ADAM = Adam(lr=learning_rate)
model.compile(loss='mean_squared_error', metrics=['accuracy'], optimizer=ADAM)
else:
model.add(Dense(nb_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=optimizer,
metrics=['accuracy'])
return model
def train_and_score(network, dataset):
"""Train the model, return test loss.
Args:
network (dict): the parameters of the network
dataset (str): Dataset to use for training/evaluating
"""
if dataset == 'cifar10':
nb_classes, batch_size, input_shape, x_train, \
x_test, y_train, y_test = get_cifar10()
elif dataset == 'mnist':
nb_classes, batch_size, input_shape, x_train, \
x_test, y_train, y_test = get_mnist()
elif dataset == 'dbtt':
nb_classes, batch_size, input_shape, x_train, \
x_test, y_train, y_test = get_dbtt()
model = compile_model(network, nb_classes, input_shape)
if dataset == 'dbtt':
model.fit(x_train, y_train, epochs=10, batch_size=1406, verbose=0)
y_predict = model.predict(x_test) * 758.92 # todo way to not hardcode this?
rms = np.sqrt(mean_squared_error(y_test, y_predict))
print(rms)
return rms
else:
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=10000, # using early stopping, so no real limit
verbose=0,
validation_data=(x_test, y_test),
callbacks=[early_stopper])
score = model.evaluate(x_test, y_test, verbose=0)
return score[1] # 1 is accuracy. 0 is loss.
| 3.234375 | 3 |
tests/integration/agenda/test_models.py | rolandgeider/OpenSlides | 0 | 6459 | <reponame>rolandgeider/OpenSlides
from openslides.agenda.models import Item
from openslides.core.models import CustomSlide
from openslides.utils.test import TestCase
class TestItemManager(TestCase):
def test_get_root_and_children_db_queries(self):
"""
Test that get_root_and_children needs only one db query.
"""
for i in range(10):
CustomSlide.objects.create(title='item{}'.format(i))
with self.assertNumQueries(1):
Item.objects.get_root_and_children()
| 2.21875 | 2 |
ssl_context_builder/http_impl/requests_wrapper/secure_session.py | mbjahnoon/ssl_context_builder | 1 | 6460 | import weakref
import os
import requests
import ssl
from ssl import SSLContext
import logging
from ssl_context_builder.builder.builder import SslContextBuilder
from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter
class RequestsSecureSession:
def __init__(self, ssl_context: SSLContext):
"""
This class create a wrapper for the requests.Session object
It does the following:
1. Disable session env_vars consuming
2. Load certificates provided with the ssl_context
3. Except ssl_context to control the TLS communication
@param ssl_context: SSLContext
"""
self.cert_file_path = self._create_cert_file(ssl_context) # see note inside the function why not using tempfile
self._ssl_context = ssl_context
self.session = requests.Session()
self.session.trust_env = False
self.session.verify = self.cert_file_path
self.session.mount('https://', SslAdapter(ssl_context))
self._finalizer = weakref.finalize(
self, self._cleanup, self.cert_file_path, self.session,
warn_message="Implicitly cleaning up {!r}".format(self))
def __enter__(self):
return self
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self): # Non throw function
"""
Delete the cert file and close the session
@return:
"""
if self._finalizer.detach():
try:
os.remove(self.cert_file_path)
except:
logging.warning(f"Couldn't delete certs file {self.cert_file_path}")
try:
self.session.close()
except:
logging.warning("Couldn't close session")
@staticmethod
def _cleanup(name, session, warn_message):
try:
os.remove(name)
except:
logging.warning(f"Couldn't delete certs file {name}")
try:
session.close()
except:
logging.warning("Couldn't close session")
logging.warning(warn_message)
@classmethod
def _create_cert_file(cls, ssl_context: SSLContext):
"""
This create a CA bundle file extracted from the ssl_context
The reason we are creating a real file and deleting it is that this file is being opened later on
in the requests flow. This means we have to close the file before it is being used
tempfile is being destroyed when closed.
@param ssl_context: ssl_context
@return: path to the created ca_bundle file
"""
path = "certs.pem"
if os.path.exists(path):
path = cls._generate_cert_file_path("certs")
with open(path, mode="a+") as certs_file:
certs = ""
for der in ssl_context.get_ca_certs(True):
certs += f"{ssl.DER_cert_to_PEM_cert(der)}\n"
certs_file.write(certs)
return path
@classmethod
def _generate_cert_file_path(cls, file_name: str, num=1):
file_name_candidate = f"{file_name}({num}).pem"
if os.path.exists(file_name_candidate):
return cls._generate_cert_file_path(file_name, num + 1)
return file_name_candidate
| 2.484375 | 2 |
tiny_scripts/select_cifar_10.py | jiaqiangwjq/python_workhouse | 0 | 6461 | <reponame>jiaqiangwjq/python_workhouse
'''
Selected cifar-10. The .csv file format:
class_index,data_index
3,0
8,1
8,2
...
'''
import pickle
import pandas as pd
file = 'E:\pycharm\LEARN\data\cifar-10\cifar-10-batches-py\\test_batch'
with open(file, 'rb') as f:
dict = pickle.load(f, encoding='bytes')
dict.keys()
batch_label = dict[b'batch_label']
labels = dict[b'labels']
data = dict[b'data']
filenames = dict[b'filenames']
length = len(labels)
data_index = [i for i in range(length)]
class_index = labels
csv_dict = {'class_index': class_index, 'data_index': data_index}
df = pd.DataFrame(csv_dict)
df.to_csv('selected_cifar10.csv', index=False) | 3.125 | 3 |
codebox/scripts/fixture.py | disqus/codebox | 5 | 6462 | # Ghetto Fixtures
from codebox import app
from codebox.apps.auth.models import User
from codebox.apps.snippets.models import Snippet
from codebox.apps.organizations.models import Organization, OrganizationMember
from flask import g
client = app.test_client()
_ctx = app.test_request_context()
_ctx.push()
app.preprocess_request()
g.redis.flushdb()
User.objects.create(pk=1, name='zeeg')
Organization.objects.create(pk='disqus', name='DISQUS')
OrganizationMember.objects.create(org='disqus', user=1)
# Create sample snippets
# plaintext
Snippet.objects.create(org='disqus', user=1, lang='text', text = "Hello World!")
# python
Snippet.objects.create(org='disqus', user=1, lang='python', text = "print 'Disqus was here'")
# html
Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look its HTML!</h1>')
# javascript
Snippet.objects.create(org='disqus', user=1, lang='javascript', text = "document.write('Di-squs')")
| 2.234375 | 2 |
corehq/apps/linked_domain/tests/test_views.py | akashkj/commcare-hq | 0 | 6463 | <filename>corehq/apps/linked_domain/tests/test_views.py
from unittest.mock import Mock, patch
from django.test import SimpleTestCase
from corehq.apps.domain.exceptions import DomainDoesNotExist
from corehq.apps.linked_domain.exceptions import (
DomainLinkAlreadyExists,
DomainLinkError,
DomainLinkNotAllowed,
)
from corehq.apps.linked_domain.views import link_domains
class LinkDomainsTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
super(LinkDomainsTests, cls).setUpClass()
cls.upstream_domain = 'upstream'
cls.downstream_domain = 'downstream'
def test_exception_raised_if_domain_does_not_exist(self):
def mock_handler(domain):
return domain != self.downstream_domain
with patch('corehq.apps.linked_domain.views.domain_exists') as mock_domainexists,\
self.assertRaises(DomainDoesNotExist):
mock_domainexists.side_effect = mock_handler
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_exception_raised_if_domain_link_already_exists(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\
self.assertRaises(DomainLinkAlreadyExists):
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_exception_raised_if_domain_link_error_raised(self):
def mock_handler(downstream, upstream):
raise DomainLinkError
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\
patch('corehq.apps.linked_domain.views.DomainLink.link_domains') as mock_linkdomains,\
self.assertRaises(DomainLinkError):
mock_linkdomains.side_effect = mock_handler
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_exception_raised_if_user_is_not_admin_in_both_domains(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\
patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\
self.assertRaises(DomainLinkNotAllowed):
link_domains(Mock(), self.upstream_domain, self.downstream_domain)
def test_successful(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\
patch('corehq.apps.linked_domain.views.DomainLink.link_domains', return_value=True),\
patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=True):
domain_link = link_domains(Mock(), self.upstream_domain, self.downstream_domain)
self.assertIsNotNone(domain_link)
| 2.328125 | 2 |
LanguageBasics/functions/import_eg.py | Vamsi-TM/jubilant-train | 0 | 6464 | import function_exercise_01 as st
st.sandwich_toppings('meatballs', 'salad')
| 1.0625 | 1 |
pyingest/parsers/zenodo.py | golnazads/adsabs-pyingest | 1 | 6465 | <reponame>golnazads/adsabs-pyingest
#!/usr/bin/python
#
#
from __future__ import absolute_import
import json
import re
import logging
from .datacite import DataCiteParser
class WrongPublisherException(Exception):
pass
class ZenodoParser(DataCiteParser):
def get_references(self, r):
# as of version 3.1 of datacite schema, "References" is not an
# allowed description type so Lars is shoving the references
# in a section labeled as "Other" as a json structure
references = []
for s in self._array(r.get('descriptions', {}).get('description', [])):
t = s.get('@descriptionType')
c = self._text(s)
if t == 'References':
# XXX not supported yet, but one can only hope...
references = c.split('\n')
elif t == 'Other':
try:
j = json.loads(c)
references = j.get('references', [])
except ValueError:
logging.warning(u'Ignoring unparsable "Other" description element: %s\n' % c)
return references
def get_abstract(self, r):
abs = super(ZenodoParser, self).get_abstract(r)
abs = re.sub(r'\s*<p>', '', abs)
abs = re.sub(r'</p>\s*$', '', abs)
return abs
def parse(self, fp, **kwargs):
"""Parses Zenodo's flavor of DataCite 3.1 schema, returns ADS tagged format"""
doc = super(self.__class__, self).parse(fp, **kwargs)
# r = self._resource
return doc
# publisher
pub = doc.get('source')
if pub != 'Zenodo' and pub != 'ZENODO':
raise WrongPublisherException("Found publisher field of \"%s\" rather than Zenodo" % pub)
else:
doc['source'] = 'ZENODO'
return doc
#
# if __name__ == "__main__":
#
# # allows program to print utf-8 encoded output sensibly
# import codecs
# sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
# sys.stderr = codecs.getwriter('utf-8')(sys.stderr)
#
# parser = ZenodoParser()
# for file in sys.argv[1:]:
# d = None
# with open(file, 'r') as fp:
# d = parser.parse(fp)
# print json.dumps(d, indent=2)
| 1.953125 | 2 |
src/fullnode.py | AmeyaDaddikar/vjtichain | 1 | 6466 | import json
import time
from functools import lru_cache
from multiprocessing import Pool, Process
from threading import Thread, Timer
from typing import Any, Dict, List
from datetime import datetime
import hashlib
import inspect
import requests
import waitress
from bottle import BaseTemplate, Bottle, request, response, static_file, template, error
import utils.constants as consts
from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block
from authority import Authority
from utils.logger import logger, iplogger
from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db
from utils.utils import compress, decompress, dhash
from wallet import Wallet
app = Bottle()
BaseTemplate.defaults["get_url"] = app.get_url
LINE_PROFILING = False
BLOCKCHAIN = BlockChain()
PEER_LIST: List[Dict[str, Any]] = []
MY_WALLET = Wallet()
miner = Authority()
def mining_thread_task():
while True:
if not miner.is_mining() and not consts.NO_MINING:
miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET)
time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2)
def send_to_all_peers(url, data):
def request_task(peers, url, data):
for peer in peers:
try:
requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1))
except Exception as e:
logger.debug("Server: Requests: Error while sending data in process" + str(peer))
Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start()
def start_mining_thread():
time.sleep(5)
Thread(target=mining_thread_task, name="Miner", daemon=True).start()
def fetch_peer_list() -> List[Dict[str, Any]]:
try:
r = requests.post(consts.SEED_SERVER_URL, data={"port": consts.MINER_SERVER_PORT})
peer_list = json.loads(r.text)
return peer_list
except Exception as e:
logger.error("Could not connect to DNS Seed")
return []
def get_peer_url(peer: Dict[str, Any]) -> str:
return "http://" + str(peer["ip"]) + ":" + str(peer["port"])
def greet_peer(peer: Dict[str, Any]) -> bool:
try:
url = get_peer_url(peer)
data = {"port": consts.MINER_SERVER_PORT, "version": consts.MINER_VERSION, "blockheight": BLOCKCHAIN.active_chain.length}
# Send a POST request to the peer
r = requests.post(url + "/greetpeer", data=data)
data = json.loads(r.text)
# Update the peer data in the peer list with the new data received from the peer.
if data.get("blockheight", None):
peer.update(data)
else:
logger.debug("Main: Peer data does not have Block Height")
return False
return True
except Exception as e:
logger.debug("Main: Could not greet peer" + str(e))
return False
def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block:
r = requests.post(get_peer_url(peer) + "/getblock", data={"headerhash": header_hash})
return Block.from_json(decompress(r.text)).object()
def check_block_with_peer(peer, hhash):
r = requests.post(get_peer_url(peer) + "/checkblock", data={"headerhash": hhash})
result = json.loads(r.text)
if result:
return True
return False
def get_block_header_hash(height):
return dhash(BLOCKCHAIN.active_chain.header_list[height])
def sync(max_peer):
fork_height = BLOCKCHAIN.active_chain.length
r = requests.post(get_peer_url(max_peer) + "/getblockhashes", data={"myheight": fork_height})
hash_list = json.loads(decompress(r.text.encode()))
for hhash in hash_list:
block = receive_block_from_peer(max_peer, hhash)
if not BLOCKCHAIN.add_block(block):
logger.error("Sync: Block received is invalid, Cannot Sync")
break
return
# Periodically sync with all the peers
def sync_with_peers():
try:
PEER_LIST = fetch_peer_list()
new_peer_list = []
for peer in PEER_LIST:
if greet_peer(peer):
new_peer_list.append(peer)
PEER_LIST = new_peer_list
if PEER_LIST:
max_peer = max(PEER_LIST, key=lambda k: k["blockheight"])
logger.debug(f"Sync: Syncing with {get_peer_url(max_peer)}, he seems to have height {max_peer['blockheight']}")
sync(max_peer)
except Exception as e:
logger.error("Sync: Error: " + str(e))
Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start()
def check_balance(pub_key: str) -> int:
current_balance = 0
for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():
tx_out = utxo_list[0]
if tx_out.address == pub_key:
current_balance += int(tx_out.amount)
return int(current_balance)
def send_bounty(receiver_public_keys: List[str], amounts: List[int]):
current_balance = check_balance(MY_WALLET.public_key)
for key in receiver_public_keys:
if len(key) < consts.PUBLIC_KEY_LENGTH:
logger.debug("Invalid Public Key Length")
return False
total_amount = sum(amounts)
if current_balance < total_amount:
logger.debug("Insuficient balance")
elif MY_WALLET.public_key in receiver_public_keys:
logger.debug("Cannot send to myself")
else:
transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message="Authority: Faucet Money")
transaction.sign(MY_WALLET)
logger.info("Wallet: Attempting to Send Transaction")
try:
r = requests.post(
"http://0.0.0.0:" + str(consts.MINER_SERVER_PORT) + "/newtransaction",
data=compress(transaction.to_json()),
timeout=(5, 1),
)
if r.status_code == 400:
logger.info("Wallet: Could not Send Transaction. Invalid Transaction")
else:
logger.info("Wallet: Transaction Sent, Wait for it to be Mined")
return True
except Exception as e:
logger.error("Wallet: Could not Send Transaction. Try Again." + str(e))
return False
def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message="") -> Transaction:
vout = {}
vin = {}
current_amount = 0
total_amount = sum(amounts)
i = 0
for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():
tx_out = utxo_list[0]
if current_amount >= total_amount:
break
if tx_out.address == sender_public_key:
current_amount += tx_out.amount
vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig="")
i += 1
for i, address in enumerate(receiver_public_keys):
vout[i] = TxOut(amount=amounts[i], address=address)
change = (current_amount - total_amount)
if change > 0:
vout[i + 1] = TxOut(amount=change, address=sender_public_key)
tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message)
return tx
def get_ip(request):
return request.environ.get("HTTP_X_FORWARDED_FOR") or request.environ.get("REMOTE_ADDR")
def log_ip(request, fname):
client_ip = get_ip(request)
iplogger.info(f"{client_ip} : Called function {fname}")
@app.post("/checkBalance")
def checkingbalance():
log_ip(request, inspect.stack()[0][3])
data = request.json
public_key = data["public_key"]
logger.debug(public_key)
current_balance = check_balance(public_key)
return str(current_balance)
@app.post("/makeTransaction")
def make_transaction():
log_ip(request, inspect.stack()[0][3])
data = request.json
bounty = int(data["bounty"])
receiver_public_key = data["receiver_public_key"]
sender_public_key = data["sender_public_key"]
message = "No Message"
if "message" in data:
message = data["message"]
if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH:
logger.debug("Invalid Receiver Public Key")
response.status = 400
return "Invalid Receiver Public Key"
current_balance = check_balance(sender_public_key)
if current_balance < bounty:
logger.debug("Insufficient Balance to make Transaction")
response.status = 400
return "Insufficient Balance to make Transaction, need more " + str(bounty - current_balance)
elif sender_public_key == receiver_public_key:
logger.debug("Someone trying to send money to himself")
response.status = 400
return "Cannot send money to youself"
else:
transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message)
data = {}
data["send_this"] = transaction.to_json()
transaction.vin = {}
data["sign_this"] = transaction.to_json()
return json.dumps(data)
@app.post("/sendTransaction")
def send_transaction():
log_ip(request, inspect.stack()[0][3])
data = request.json
transaction = Transaction.from_json(data["transaction"]).object()
sig = data["signature"]
transaction.add_sign(sig)
logger.debug(transaction)
logger.info("Wallet: Attempting to Send Transaction")
try:
r = requests.post(
"http://0.0.0.0:" + str(consts.MINER_SERVER_PORT) + "/newtransaction",
data=compress(transaction.to_json()),
timeout=(5, 1),
)
if r.status_code == 400:
response.status = 400
logger.error("Wallet: Could not Send Transaction. Invalid transaction")
return "Try Again"
except Exception as e:
response.status = 400
logger.error("Wallet: Could not Send Transaction. Try Again." + str(e))
return "Try Again"
else:
logger.info("Wallet: Transaction Sent, Wait for it to be Mined")
return "Done"
@app.post("/transactionHistory")
def transaction_history():
log_ip(request, inspect.stack()[0][3])
data = request.json
public_key = data["public_key"]
tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key)
return json.dumps(tx_hist)
@app.post("/greetpeer")
def greet_peer_f():
log_ip(request, inspect.stack()[0][3])
try:
peer = {}
peer["port"] = request.forms.get("port")
peer["ip"] = request.remote_addr
peer["time"] = time.time()
peer["version"] = request.forms.get("version")
peer["blockheight"] = request.forms.get("blockheight")
ADD_ENTRY = True
for entry in PEER_LIST:
ip = entry["ip"]
port = entry["port"]
if ip == peer["ip"] and port == peer["port"]:
ADD_ENTRY = False
if ADD_ENTRY:
PEER_LIST.append(peer)
logger.debug("Server: Greet, A new peer joined, Adding to List")
except Exception as e:
logger.debug("Server: Greet Error: " + str(e))
pass
data = {"version": consts.MINER_VERSION, "blockheight": BLOCKCHAIN.active_chain.length}
response.content_type = "application/json"
return json.dumps(data)
@lru_cache(maxsize=128)
def cached_get_block(headerhash: str) -> str:
if headerhash:
db_block = get_block_from_db(headerhash)
if db_block:
return compress(db_block)
else:
logger.error("ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK")
return "Invalid Hash"
@app.post("/getblock")
def getblock():
log_ip(request, inspect.stack()[0][3])
hhash = request.forms.get("headerhash")
return cached_get_block(hhash)
@app.post("/checkblock")
def checkblock():
log_ip(request, inspect.stack()[0][3])
headerhash = request.forms.get("headerhash")
if get_block_from_db(headerhash):
return json.dumps(True)
return json.dumps(False)
@app.post("/getblockhashes")
def send_block_hashes():
log_ip(request, inspect.stack()[0][3])
peer_height = int(request.forms.get("myheight"))
hash_list = []
for i in range(peer_height, BLOCKCHAIN.active_chain.length):
hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i]))
return compress(json.dumps(hash_list)).decode()
@lru_cache(maxsize=16)
def process_new_block(request_data: bytes) -> str:
global BLOCKCHAIN
block_json = decompress(request_data)
if block_json:
try:
block = Block.from_json(block_json).object()
# Check if block already exists
if get_block_from_db(dhash(block.header)):
logger.info("Server: Received block exists, doing nothing")
return "Block already Received Before"
if BLOCKCHAIN.add_block(block):
logger.info("Server: Received a New Valid Block, Adding to Chain")
logger.debug("Server: Sending new block to peers")
# Broadcast block to other peers
send_to_all_peers("/newblock", request_data)
# TODO Make new chain/ orphan set for Block that is not added
except Exception as e:
logger.error("Server: New Block: invalid block received " + str(e))
return "Invalid Block Received"
# Kill Miner
t = Timer(1, miner.stop_mining)
t.start()
return "Block Received"
logger.error("Server: Invalid Block Received")
return "Invalid Block"
@app.post("/newblock")
def received_new_block():
log_ip(request, inspect.stack()[0][3])
return process_new_block(request.body.read())
@lru_cache(maxsize=16)
def process_new_transaction(request_data: bytes) -> str:
global BLOCKCHAIN
transaction_json = decompress(request_data)
if transaction_json:
try:
tx = Transaction.from_json(transaction_json).object()
# Add transaction to Mempool
if tx not in BLOCKCHAIN.mempool:
if BLOCKCHAIN.active_chain.is_transaction_valid(tx):
logger.debug("Valid Transaction received, Adding to Mempool")
BLOCKCHAIN.mempool.add(tx)
# Broadcast block to other peers
send_to_all_peers("/newtransaction", request_data)
else:
logger.debug("The transation is not valid, not added to Mempool")
return False, "Not Valid Transaction"
else:
return True, "Transaction Already received"
except Exception as e:
logger.error("Server: New Transaction: Invalid tx received: " + str(e))
return False, "Not Valid Transaction"
return True, "Done"
# Transactions for all active chains
@app.post("/newtransaction")
def received_new_transaction():
log_ip(request, inspect.stack()[0][3])
result, message = process_new_transaction(request.body.read())
if result:
response.status = 200
else:
response.status = 400
return message
question = '''What is greater than God,
more evil than the devil,
the poor have it,
the rich need it,
and if you eat it, you'll die?'''
actual_answer = "nothing"
@app.get("/")
def home():
log_ip(request, inspect.stack()[0][3])
message = ""
message_type = "info"
return template("index.html", message=message, message_type=message_type, question=question)
with open('uuids.json', 'r') as file:
uuid_json = file.read()
valid_ids = set(json.loads(uuid_json))
@app.post("/")
def puzzle():
log_ip(request, inspect.stack()[0][3])
message = ""
message_type = "info"
uuid = request.forms.get("uuid")
pubkey = request.forms.get("pubkey")
amounts = [300]
if uuid in valid_ids:
logger.debug("Valid Answer, Rewarding " + pubkey)
message = "Well Done!"
if check_balance(MY_WALLET.public_key) >= sum(amounts):
result = send_bounty([pubkey], amounts)
if result:
message = "Your reward is being sent, please wait for it to be mined!"
valid_ids.remove(uuid)
else:
message = "Some Error Occured, Contact Admin."
message_type = "warning"
else:
message = "Invalid Unique ID!"
message_type = "danger"
return template("index.html", message=message, message_type=message_type, question=question)
@app.get('/about')
def about():
return template("about.html")
# @app.get("/wallet")
# def wallet():
# log_ip(request, inspect.stack()[0][3])
# return template("wallet.html", message="", message_type="", pubkey=MY_WALLET.public_key)
# @app.post("/wallet")
# def wallet_post():
# log_ip(request, inspect.stack()[0][3])
# number = int(request.forms.get("number"))
# message = ""
# message_type = "info"
# try:
# receivers = []
# amounts = []
# total_amount = 0
# for i in range(0, number):
# receiver = str(request.forms.get("port" + str(i)))
# bounty = int(request.forms.get("amount" + str(i)))
# publickey = ""
# if len(receiver) < 10:
# wallet = get_wallet_from_db(receiver)
# if wallet is not None:
# publickey = wallet[1]
# else:
# message = "Error with the Receiver Port ID, try again."
# message_type = "danger"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
# else:
# publickey = receiver
# total_amount += bounty
# receivers.append(publickey)
# amounts.append(bounty)
# if check_balance(MY_WALLET.public_key) >= total_amount:
# result = send_bounty(receivers, amounts)
# if result:
# message = "Your transaction is sent, please wait for it to be mined!"
# else:
# message = "Some Error Occured, Contact Admin."
# message_type = "warning"
# else:
# message = "You have Insufficient Balance!"
# message_type = "warning"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
# except Exception as e:
# logger.error(e)
# message = "Some Error Occured. Please try again later."
# message_type = "danger"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
@app.get("/checkmybalance")
def checkblance():
log_ip(request, inspect.stack()[0][3])
return str(check_balance(MY_WALLET.public_key))
@app.route("/static/<filename:path>", name="static")
def serve_static(filename):
log_ip(request, inspect.stack()[0][3])
return static_file(filename, root="static")
@app.get("/favicon.ico")
def get_favicon():
log_ip(request, inspect.stack()[0][3])
return static_file("favicon.ico", root="static")
@app.get("/info")
def sendinfo():
log_ip(request, inspect.stack()[0][3])
s = (
"No. of Blocks: "
+ str(BLOCKCHAIN.active_chain.length)
+ "<br>"
+ dhash(BLOCKCHAIN.active_chain.header_list[-1])
+ "<br>"
+ "Balance "
+ str(check_balance(MY_WALLET.public_key))
+ "<br>Public Key: <br>"
+ str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1])
)
return s
def render_block_header(hdr):
html = "<table>"
html += "<tr><th>" + "Height" + "</th>"
html += "<td>" + str(hdr.height) + "</td></tr>"
html += "<tr><th>" + "Block Hash" + "</th>"
html += "<td>" + dhash(hdr) + "</td></tr>"
html += "<tr><th>" + "Prev Block Hash" + "</th>"
html += "<td>" + str(hdr.prev_block_hash) + "</td></tr>"
html += "<tr><th>" + "Merkle Root" + "</th>"
html += "<td>" + str(hdr.merkle_root) + "</td></tr>"
html += "<tr><th>" + "Timestamp" + "</th>"
html += (
"<td>"
+ str(datetime.fromtimestamp(hdr.timestamp).strftime("%d-%m-%Y %H:%M:%S"))
+ " ("
+ str(hdr.timestamp)
+ ")</td></tr>"
)
# get block
block = Block.from_json(get_block_from_db(dhash(hdr))).object()
html += "<tr><th>" + "Transactions" + "</th>"
html += "<td>" + str(len(block.transactions)) + "</td></tr>"
# for i, transaction in enumerate(block.transactions):
# s = "coinbase: " + str(transaction.is_coinbase) + ", fees: " + str(transaction.fees)
# html += "<tr><th>Transaction " + str(i) + "</th><td>" + str(s) + "</td></tr>"
html += "</table>"
return str(html)
@app.get("/chains")
def visualize_chain():
log_ip(request, inspect.stack()[0][3])
data = []
start = BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length > 10 else 0
headers = []
hdr_list = BLOCKCHAIN.active_chain.header_list
if len(hdr_list) > 200:
hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:]
for hdr in hdr_list:
d = {}
d["hash"] = dhash(hdr)[-5:]
d["time"] = hdr.timestamp
d["data"] = render_block_header(hdr)
headers.append(d)
data.append(headers)
return template("chains.html", data=data, start=start)
@app.get("/explorer")
def explorer():
log_ip(request, inspect.stack()[0][3])
prev = int(request.query.prev or 0)
if prev < 0:
prev = 0
hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list))
indexes = [i for i in range(prev * 8, (prev + 1) * 8) if i < len(hdr_list)]
blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes]
transactions = list(BLOCKCHAIN.mempool)
return template("explorer.html", blocks=blocks, transactions=transactions, prev=prev)
@app.route("/block/<blockhash>", name="transaction")
def block(blockhash):
log_ip(request, inspect.stack()[0][3])
try:
block = Block.from_json(get_block_from_db(blockhash)).object()
except Exception as e:
logger.debug("BLOCK/blockhash: " + str(e))
return template("error.html")
return template("block.html", block=block)
@app.route("/transaction/<blockhash>/<txhash>", name="transaction")
def transaction(blockhash, txhash):
log_ip(request, inspect.stack()[0][3])
try:
block = Block.from_json(get_block_from_db(blockhash)).object()
tx = None
for t in block.transactions:
if t.hash() == txhash:
tx = t
except Exception as e:
logger.debug("Transaction/bhash/tx: " + str(e))
return template("error.html")
return template("transaction.html", tx=tx, block=block)
@app.route("/address/<pubkey:re:.+>", name="account")
def account(pubkey):
log_ip(request, inspect.stack()[0][3])
balance = check_balance(pubkey)
tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey)
return template("account.html", tx_hist=tx_hist, balance=balance, pubkey=pubkey)
@app.post("/mining")
def mining():
log_ip(request, inspect.stack()[0][3])
password = request.body.read().decode("utf-8")
hashed = b"\x11`\x1e\xdd\xd1\xb6\x80\x0f\xd4\xb0t\x90\x9b\xd3]\xa0\xcc\x1d\x04$\x8b\xb1\x19J\xaa!T5-\x9eJ\xfcI5\xc0\xbb\xf5\xb1\x9d\xba\xbef@\xa1)\xcf\x9b]c(R\x91\x0e\x9dMM\xb6\x94\xa9\xe2\x94il\x15"
dk = hashlib.pbkdf2_hmac("sha512", password.encode("utf-8"), b"<PASSWORD>", 200000)
if hashed == dk:
consts.NO_MINING = not consts.NO_MINING
logger.info("Mining: " + str(not consts.NO_MINING))
return "Mining Toggled, " + "NOT MINING" if consts.NO_MINING else "MINING"
else:
return "Password Mismatch," + "NOT MINING" if consts.NO_MINING else "MINING"
@app.route("/<url:re:.+>")
@error(403)
@error(404)
@error(505)
def error_handle(url="url", error="404"):
log_ip(request, inspect.stack()[0][3])
return template("error.html")
if __name__ == "__main__":
try:
if consts.NEW_BLOCKCHAIN:
logger.info("FullNode: Starting New Chain from Genesis")
BLOCKCHAIN.add_block(genesis_block)
else:
# Restore Blockchain
logger.info("FullNode: Restoring Existing Chain")
header_list = read_header_list_from_db()
BLOCKCHAIN.build_from_header_list(header_list)
# Sync with all my peers
sync_with_peers()
# Start mining Thread
Thread(target=start_mining_thread, daemon=True).start()
if consts.NO_MINING:
logger.info("FullNode: Not Mining")
# Start server
if LINE_PROFILING:
from wsgi_lineprof.middleware import LineProfilerMiddleware
with open("lineprof" + str(consts.MINER_SERVER_PORT) + ".log", "w") as f:
app = LineProfilerMiddleware(app, stream=f, async_stream=True)
waitress.serve(app, host="0.0.0.0", threads=16, port=consts.MINER_SERVER_PORT)
else:
waitress.serve(app, host="0.0.0.0", threads=16, port=consts.MINER_SERVER_PORT)
except KeyboardInterrupt:
miner.stop_mining()
| 2.078125 | 2 |
deepexplain/tf/v1_x/main.py | alexus37/MasterThesisCode | 1 | 6467 | <filename>deepexplain/tf/v1_x/main.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.framework import ops
from collections import OrderedDict
import warnings, logging
from deepexplain.tf.v1_x import constants
from deepexplain.tf.v1_x.baseClasses import GradientBasedMethod
from deepexplain.tf.v1_x.methods import DeepLIFTRescale, EpsilonLRP
from deepexplain.tf.v1_x.utils import original_grad
from deepexplain.tf.v1_x.methods import DummyZero, Saliency, GradientXInput, IntegratedGradients, EpsilonLRP, DeepLIFTRescale, Occlusion, ShapleySampling
attribution_methods = OrderedDict({
'zero': (DummyZero, 0),
'saliency': (Saliency, 1),
'grad*input': (GradientXInput, 2),
'intgrad': (IntegratedGradients, 3),
'elrp': (EpsilonLRP, 4),
'deeplift': (DeepLIFTRescale, 5),
'occlusion': (Occlusion, 6),
'shapley_sampling': (ShapleySampling, 7)
})
print(f'Using tf version = {tf.__version__}')
@ops.RegisterGradient("DeepExplainGrad")
def deepexplain_grad(op, grad):
# constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG
constants._GRAD_OVERRIDE_CHECKFLAG = 1
if constants._ENABLED_METHOD_CLASS is not None \
and issubclass(constants._ENABLED_METHOD_CLASS, GradientBasedMethod):
return constants._ENABLED_METHOD_CLASS.nonlinearity_grad_override(op, grad)
else:
return original_grad(op, grad)
class DeepExplain(object):
def __init__(self, graph=None, session=tf.compat.v1.get_default_session()):
self.method = None
self.batch_size = None
self.session = session
self.graph = session.graph if graph is None else graph
self.graph_context = self.graph.as_default()
self.override_context = self.graph.gradient_override_map(self.get_override_map())
self.keras_phase_placeholder = None
self.context_on = False
if self.session is None:
raise RuntimeError('DeepExplain: could not retrieve a session. Use DeepExplain(session=your_session).')
def __enter__(self):
# Override gradient of all ops created in context
self.graph_context.__enter__()
self.override_context.__enter__()
self.context_on = True
return self
def __exit__(self, type, value, traceback):
self.graph_context.__exit__(type, value, traceback)
self.override_context.__exit__(type, value, traceback)
self.context_on = False
def get_explainer(self, method, T, X, **kwargs):
if not self.context_on:
raise RuntimeError('Explain can be called only within a DeepExplain context.')
# global constants._ENABLED_METHOD_CLASS, _GRAD_OVERRIDE_CHECKFLAG
self.method = method
if self.method in attribution_methods:
method_class, method_flag = attribution_methods[self.method]
else:
raise RuntimeError('Method must be in %s' % list(attribution_methods.keys()))
if isinstance(X, list):
for x in X:
if 'tensor' not in str(type(x)).lower():
raise RuntimeError('If a list, X must contain only Tensorflow Tensor objects')
else:
if 'tensor' not in str(type(X)).lower():
raise RuntimeError('X must be a Tensorflow Tensor object or a list of them')
if 'tensor' not in str(type(T)).lower():
raise RuntimeError('T must be a Tensorflow Tensor object')
# logging.info('DeepExplain: running "%s" explanation method (%d)' % (self.method, method_flag))
self._check_ops()
constants._GRAD_OVERRIDE_CHECKFLAG = 0
constants._ENABLED_METHOD_CLASS = method_class
method = constants._ENABLED_METHOD_CLASS(T, X,
self.session,
keras_learning_phase=self.keras_phase_placeholder,
**kwargs)
if (issubclass(constants._ENABLED_METHOD_CLASS, DeepLIFTRescale) or issubclass(constants._ENABLED_METHOD_CLASS, EpsilonLRP)) \
and constants._GRAD_OVERRIDE_CHECKFLAG == 0:
warnings.warn('DeepExplain detected you are trying to use an attribution method that requires '
'gradient override but the original gradient was used instead. You might have forgot to '
'(re)create your graph within the DeepExlain context. Results are not reliable!')
constants._ENABLED_METHOD_CLASS = None
constants._GRAD_OVERRIDE_CHECKFLAG = 0
self.keras_phase_placeholder = None
return method
def explain(self, method, T, X, xs, ys=None, batch_size=None, **kwargs):
explainer = self.get_explainer(method, T, X, **kwargs)
return explainer.run(xs, ys, batch_size)
@staticmethod
def get_override_map():
return dict((a, 'DeepExplainGrad') for a in constants.SUPPORTED_ACTIVATIONS)
def _check_ops(self):
"""
Heuristically check if any op is in the list of unsupported activation functions.
This does not cover all cases where explanation methods would fail, and must be improved in the future.
Also, check if the placeholder named 'keras_learning_phase' exists in the graph. This is used by Keras
and needs to be passed in feed_dict.
:return:
"""
g = tf.compat.v1.get_default_graph()
for op in g.get_operations():
if len(op.inputs) > 0 and not op.name.startswith('gradients'):
if op.type in constants.UNSUPPORTED_ACTIVATIONS:
warnings.warn('Detected unsupported activation (%s). '
'This might lead to unexpected or wrong results.' % op.type)
elif 'keras_learning_phase' in op.name:
self.keras_phase_placeholder = op.outputs[0] | 2.0625 | 2 |
util/mem_usage.py | robinupham/cnn_lensing | 0 | 6468 | <gh_stars>0
"""
Get the memory usage of a Keras model.
From https://stackoverflow.com/a/46216013.
"""
def get_model_memory_usage(batch_size, model):
"""
Get the memory usage of a Keras model in GB.
From https://stackoverflow.com/a/46216013.
"""
import numpy as np
try:
from keras import backend as K
except ImportError:
from tensorflow.keras import backend as K
shapes_mem_count = 0
internal_model_mem_count = 0
for l in model.layers:
layer_type = l.__class__.__name__
if layer_type == 'Model':
internal_model_mem_count += get_model_memory_usage(batch_size, l)
single_layer_mem = 1
out_shape = l.output_shape
if isinstance(out_shape, list):
out_shape = out_shape[0]
for s in out_shape:
if s is None:
continue
single_layer_mem *= s
shapes_mem_count += single_layer_mem
trainable_count = np.sum([K.count_params(p) for p in model.trainable_weights])
non_trainable_count = np.sum([K.count_params(p) for p in model.non_trainable_weights])
number_size = 4.0
if K.floatx() == 'float16':
number_size = 2.0
if K.floatx() == 'float64':
number_size = 8.0
total_memory = number_size * (batch_size * shapes_mem_count + trainable_count + non_trainable_count)
gbytes = np.round(total_memory / (1024.0 ** 3), 3) + internal_model_mem_count
return gbytes
| 2.84375 | 3 |
hexrd/distortion/distortionabc.py | glemaitre/hexrd | 27 | 6469 | import abc
class DistortionABC(metaclass=abc.ABCMeta):
maptype = None
@abc.abstractmethod
def apply(self, xy_in):
"""Apply distortion mapping"""
pass
@abc.abstractmethod
def apply_inverse(self, xy_in):
"""Apply inverse distortion mapping"""
pass
| 3.09375 | 3 |
setup.py | statisticianinstilettos/recommender_metrics | 0 | 6470 | import io
import os
from setuptools import setup
def read(file_name):
"""Read a text file and return the content as a string."""
with io.open(os.path.join(os.path.dirname(__file__), file_name),
encoding='utf-8') as f:
return f.read()
setup(
name='recmetrics',
url='https://github.com/statisticianinstilettos/recommender_metrics',
author='<NAME>',
author_email='<EMAIL>',
packages=['recmetrics'],
install_requires=['funcsigs',
'numpy',
'pandas',
'plotly',
'scikit-learn',
'seaborn'],
license='MIT',
version='0.1.4',
description='Evaluation metrics for recommender systems',
long_description=read("README.md"),
long_description_content_type="text/markdown",
)
| 2.640625 | 3 |
run_classifier.py | wj-Mcat/model-getting-started | 0 | 6471 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BERT finetuning runner."""
from __future__ import annotations, absolute_import
import os
from typing import Dict, List
from transformers import (
AutoTokenizer, BertTokenizer,
BertForSequenceClassification, BertConfig,
Trainer, TrainingArguments,
PreTrainedTokenizer
)
from transformers.configuration_utils import PretrainedConfig
from src.schema import (
InputExample, InputFeatures, Config
)
from src.data_process import (
AgNewsDataProcessor
)
from config import create_logger
logger = create_logger()
def convert_single_example(
example_index: int, example: InputExample, label2id: Dict[str, int], max_seq_length: int, tokenizer: BertTokenizer
) -> InputFeatures:
"""Converts a single `InputExample` into a single `InputFeatures`.
example_index: 用于展示example中的前几例数据
"""
parameters = {
"text":example.text_a,
"add_special_tokens":True,
"padding":True,
"max_length":max_seq_length,
"return_attention_mask":True,
"return_token_type_ids":True,
"return_length":True,
"verbose":True
}
if example.text_b:
parameters['text_pair'] = example.text_b
feature = tokenizer(**parameters)
input_feature = InputFeatures(
input_ids=feature['token_ids'],
attention_mask=feature['attention_mask'],
segment_ids=feature['token_type_ids'],
label_id=label2id[example.label],
is_real_example=True
)
if example_index < 5:
logger.info(f'*************************** Example {example_index} ***************************')
logger.info(example)
logger.info(input_feature)
logger.info('*************************** Example End ***************************')
return input_feature
def create_bert_for_sequence_classification_model(config: Config):
bert_config: BertConfig = BertConfig.from_pretrained(config.pretrained_model_name)
bert_config.num_labels = config.num_labels
model = BertForSequenceClassification(bert_config)
return model
def create_model(config: Config):
"""Creates a classification model."""
models = {
"bert-for-sequence-classification": create_bert_for_sequence_classification_model,
}
return models[config.model_name](config)
def convert_examples_to_features(
examples, label_list: List[str],
max_seq_length: int, tokenizer: PreTrainedTokenizer
):
"""Convert a set of `InputExample`s to a list of `InputFeatures`."""
label2id = {label: index for index, label in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 200 == 0:
logger.info("Writing example %d of %d" % (ex_index, len(examples)))
feature = convert_single_example(ex_index, example, label2id,
max_seq_length, tokenizer)
features.append(feature)
return features
class SequenceClassificationTrainer(Trainer):
def compute_loss(self, model, inputs, return_outputs=False):
labels = inputs.pop("labels")
outputs = model(**inputs)
return outputs.loss
def main():
# processors need to be updated
processors = {
'agnews-processor': AgNewsDataProcessor,
}
config: Config = Config.instance()
if not config.do_train and not config.do_eval and not config.do_predict:
raise ValueError(
"At least one of `do_train`, `do_eval` or `do_predict' must be True.")
bert_config = PretrainedConfig.from_pretrained(config.pretrained_model_name)
# 根据不同的任务,处理不同的数据集
task_name = config.task_name.lower()
if task_name not in processors:
raise ValueError("Task not found: %s" % (task_name))
processor = processors[task_name]()
label_list = processor.get_labels()
tokenizer = AutoTokenizer.from_pretrained(config.pretrained_model_name)
train_examples = None
num_train_steps = None
num_warmup_steps = None
if config.do_train:
train_examples: List[InputExample] = processor.get_train_examples(config.data_dir)
train_dataset_loader =
num_train_steps = int(
len(train_examples) / config.train_batch_size * config.epochs
)
num_warmup_steps = int(num_train_steps * config.warmup_proportion)
model = create_model(config=config)
training_arguments = TrainingArguments(
output_dir=config.output_dir,
overwrite_output_dir=True,
)
trainer = SequenceClassificationTrainer(
model=model,
)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPUs
if config.do_train:
train_file = os.path.join(config.output_dir, "train.tf_record")
file_based_convert_examples_to_features(
train_examples, label_list, config.max_seq_length, tokenizer, train_file)
tf.logging.info("***** Running training *****")
tf.logging.info(" Num examples = %d", len(train_examples))
tf.logging.info(" Batch size = %d", config.train_batch_size)
tf.logging.info(" Num steps = %d", num_train_steps)
train_input_fn = file_based_input_fn_builder(
input_file=train_file,
seq_length=config.max_seq_length,
is_training=True,
drop_remainder=True)
estimator.train(input_fn=train_input_fn, max_steps=num_train_steps)
if config.do_eval:
eval_examples = processor.get_dev_examples(config.data_dir)
num_actual_eval_examples = len(eval_examples)
if config.use_tpu:
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on. These do NOT count towards the metric (all tf.metrics
# support a per-instance weight, and these get a weight of 0.0).
while len(eval_examples) % config.eval_batch_size != 0:
eval_examples.append(PaddingInputExample())
eval_file = os.path.join(config.output_dir, "eval.tf_record")
file_based_convert_examples_to_features(
eval_examples, label_list, config.max_seq_length, tokenizer, eval_file)
tf.logging.info("***** Running evaluation *****")
tf.logging.info(" Num examples = %d (%d actual, %d padding)",
len(eval_examples), num_actual_eval_examples,
len(eval_examples) - num_actual_eval_examples)
tf.logging.info(" Batch size = %d", config.eval_batch_size)
# This tells the estimator to run through the entire set.
eval_steps = None
# However, if running eval on the TPU, you will need to specify the
# number of steps.
if config.use_tpu:
assert len(eval_examples) % config.eval_batch_size == 0
eval_steps = int(len(eval_examples) // config.eval_batch_size)
eval_drop_remainder = True if config.use_tpu else False
eval_input_fn = file_based_input_fn_builder(
input_file=eval_file,
seq_length=config.max_seq_length,
is_training=False,
drop_remainder=eval_drop_remainder)
result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps)
output_eval_file = os.path.join(config.output_dir, "eval_results.txt")
with tf.gfile.GFile(output_eval_file, "w") as writer:
tf.logging.info("***** Eval results *****")
for key in sorted(result.keys()):
tf.logging.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
if config.do_predict:
predict_examples = processor.get_test_examples(config.data_dir)
num_actual_predict_examples = len(predict_examples)
if config.use_tpu:
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on.
while len(predict_examples) % config.predict_batch_size != 0:
predict_examples.append(PaddingInputExample())
predict_file = os.path.join(config.output_dir, "predict.tf_record")
file_based_convert_examples_to_features(predict_examples, label_list,
config.max_seq_length, tokenizer,
predict_file)
tf.logging.info("***** Running prediction*****")
tf.logging.info(" Num examples = %d (%d actual, %d padding)",
len(predict_examples), num_actual_predict_examples,
len(predict_examples) - num_actual_predict_examples)
tf.logging.info(" Batch size = %d", config.predict_batch_size)
predict_drop_remainder = True if config.use_tpu else False
predict_input_fn = file_based_input_fn_builder(
input_file=predict_file,
seq_length=config.max_seq_length,
is_training=False,
drop_remainder=predict_drop_remainder)
result = estimator.predict(input_fn=predict_input_fn)
output_predict_file = os.path.join(config.output_dir, "test_results.tsv")
with tf.gfile.GFile(output_predict_file, "w") as writer:
num_written_lines = 0
tf.logging.info("***** Predict results *****")
for (i, prediction) in enumerate(result):
probabilities = prediction["probabilities"]
if i >= num_actual_predict_examples:
break
output_line = "\t".join(
str(class_probability)
for class_probability in probabilities) + "\n"
writer.write(output_line)
num_written_lines += 1
assert num_written_lines == num_actual_predict_examples
if __name__ == "__main__":
main()
| 2 | 2 |
module2-sql-for-analysis/rpg_db.py | TobyChen320/DS-Unit-3-Sprint-2-SQL-and-Databases | 0 | 6472 | <reponame>TobyChen320/DS-Unit-3-Sprint-2-SQL-and-Databases
import sqlite3
import os
import psycopg2
from dotenv import load_dotenv
load_dotenv()
DB_NAME2 = os.getenv("DB_NAME3")
DB_USER2 = os.getenv("DB_USER3")
DB_PASS2 = os.getenv("DB_PASS3")
DB_HOST2 = os.getenv("DB_HOST3")
conn = psycopg2.connect(dbname=DB_NAME2,
user=DB_USER2,
password=<PASSWORD>,
host=DB_HOST2)
cursor = conn.cursor()
sl_conn = sqlite3.connect("rpg_db.sqlite3")
sl_cursor = sl_conn.cursor()
characters = sl_cursor.execute('SELECT * FROM charactercreator_character LIMIT 10').fetchall()
print(characters)
create_character_table_query = '''
CREATE TABLE IF NOT EXISTS rpg_characters (
character_id SERIAL PRIMARY KEY,
name VARCHAR(30),
level INT,
exp INT,
hp INT,
strength INT,
intelligence INT,
dexterity INT,
wisdom INT
)
'''
cursor.execute(create_character_table_query)
conn.commit()
for character in characters:
insert_query = f''' INSERT INTO rpg_characters
(character_id, name, level, exp, hp, strength, intelligence, dexterity, wisdom) VALUES
{character}
'''
cursor.execute(insert_query)
conn.commit()
cursor.close()
conn.close()
| 3.171875 | 3 |
sws_comp_wiki_gen.py | moff-wildfire/sws-battlefy | 1 | 6473 | <reponame>moff-wildfire/sws-battlefy
import battlefy_data
import battlefy_wiki_linkings
from datetime import datetime
from operator import itemgetter
from pathlib import Path
import calcup_roster_tracking
def create_sidebar(data, wiki_name):
sidebar = '{{Infobox league' + '\n'
sidebar += '|liquipediatier=' + '\n'
sidebar += '|name=' + data['name'] + '\n'
sidebar += '|shortname=' + data['name'] + '\n'
sidebar += '|tickername=' + data['name'] + '\n'
sidebar += '|image=' + '\n'
sidebar += '|icon=' + '\n'
sidebar += '|series=' + '\n'
sidebar += '|organizer=' + data['organization']['name'] + '\n'
sidebar += '|organizer-link=' + '\n'
sidebar += '|sponsor=' + '\n'
sidebar += '|localcurrency=' + '\n'
sidebar += '|prizepool=' + data['prizes'] + '\n'
sidebar += '|type=Online' + '\n'
sidebar += '|platform=' + data['platform'] + '\n'
sidebar += '|country=' + '\n'
sidebar += '|format=' + '\n'
sidebar += '|patch=' + '\n'
sidebar += '|sdate=' + datetime.strptime(data['checkInStartTime'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime(
'%Y-%m-%d') + '\n'
try:
sidebar += '|edate=' + datetime.strptime(data['lastCompletedMatchAt'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime(
'%Y-%m-%d') + '\n'
except KeyError:
sidebar += '|edate=\n'
sidebar += '|web=' + '\n'
sidebar += '|bracket=https://battlefy.com/' + data['organization']['slug'] + '/' + data['slug'] + '/' \
+ data['_id'] + '/bracket-list' + '\n'
sidebar += '|rulebook=' + '\n'
sidebar += '|twitter=' + '\n'
sidebar += '|twitch=' + '\n'
sidebar += '|instagram=' + '\n'
sidebar += '|discord=' + '\n'
sidebar += '|map1=' + '\n'
sidebar += '|map2=' + '\n'
sidebar += '|map3=' + '\n'
sidebar += '|map4=' + '\n'
sidebar += '|map5=' + '\n'
sidebar += '|team_number=' + str(len(data['teams'])) + '\n'
sidebar += '|previous=' + '\n'
sidebar += '|next=' + '\n'
sidebar += '}}\n'
sidebar += '{{Upcoming matches tournament|' + wiki_name + '}}\n'
return sidebar
def create_event_format(data):
event_format = ''
for stage in data['stages']:
event_format += '* ' + stage['name'] + '\n'
if stage['bracket']['type'] == "swiss":
event_format += '** ' + str(stage['bracket']['roundsCount']) + '-round ' + stage['bracket']['type'] + '\n'
elif stage['bracket']['type'] == "elimination":
numGames = 0
rounds = 0
for match in stage['bracket']['series']:
if match['numGames'] != numGames:
if rounds:
event_format += '** ' + str(rounds) + '-round ' \
+ stage['bracket']['seriesStyle'] + str(numGames) + '\n'
rounds = 1
numGames = match['numGames']
else:
rounds += 1
if rounds:
event_format += '** ' + str(rounds) + '-round ' \
+ stage['bracket']['seriesStyle'] + str(numGames) + '\n'
return event_format
def rank_teams(data, bw_teams, sort_place=True, break_ties=False):
for stage in data['stages']:
for place, standing in enumerate(stage['standings']):
if 'place' in standing:
if 'place' not in data['teams'][standing['team']['_id']]:
data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place
else:
if break_ties:
data['teams'][standing['team']['_id']]['place'] = \
standing['place'] + (1 - 1 / data['teams'][standing['team']['_id']]['place'])
else:
data['teams'][standing['team']['_id']]['place'] = standing['place']
else:
data['teams'][standing['team']['_id']]['place'] = len(stage['standings']) + place
teams = list()
for team_id in data['teams']:
if 'place' in data['teams'][team_id]:
place = data['teams'][team_id]['place']
else:
place = 0
team_info = bw_teams.get_team_info(data['teams'][team_id]['persistentTeamID'], data['teams'][team_id]['name'])
teams.append((team_id,
data['teams'][team_id]['name'],
place,
data['teams'][team_id]['persistentTeamID'],
team_info['name']
))
if sort_place:
teams = sorted(teams, key=itemgetter(2, 4, 0))
else:
teams = sorted(teams, key=itemgetter(4, 0))
return teams
def create_participants(data, bw_players, bw_teams, dynamic=[], sort_place=True):
header = '{{TeamCardToggleButton}}\n'
teams_ordered = ''
# Use prior rounds as a tiebreaker for when multiple teams have the same place at the end
teams = rank_teams(data, bw_teams, sort_place)
dynamic_idx = 0
if dynamic:
header += '{{tabs dynamic\n'
header += '|name' + str(dynamic_idx+1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\n'
header += '|This=1\n'
header += '|content' + str(dynamic_idx+1) + '=' + '\n'
header += '{{TeamCard columns start|cols=5|height=250}}\n'
for team_num, team in enumerate(teams):
if dynamic:
if team_num == dynamic[dynamic_idx]['count']:
teams_ordered += '{{TeamCard columns end}}\n'
dynamic_idx += 1
teams_ordered += '|name' + str(dynamic_idx + 1) + '=' + dynamic[dynamic_idx]['tab_name'] + '\n'
teams_ordered += '|content' + str(dynamic_idx+1) + '=' + '\n'
teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\n'
else:
if team_num == 0:
teams_ordered += '{{TeamCard columns start|cols=5|height=250}}\n'
teams_table = '{{TeamCard\n'
team_info = bw_teams.get_team_info(team[3], team[1])
teams_table += '|team=' + team_info['name'] + '\n'
teams_table += '|image=' + team_info['image'] + '\n'
for idx, player in enumerate(data['teams'][team[0]]['players']):
player_tag = 'p' + str(idx + 1)
if player['_id'] in calcup_roster_tracking.eventid_to_missing_userid:
player['userID'] = calcup_roster_tracking.eventid_to_missing_userid[player['_id']]
player_info = bw_players.get_player_info(player['userID'], player['inGameName'])
teams_table += '|' + player_tag + '=' + player_info['name'] \
+ ' |' + player_tag + 'flag=' + player_info['flag']
if player_info['link']:
teams_table += ' |' + player_tag + 'link=' + player_info['link']
teams_table += '\n'
# teams_table += '|c= |cflag=\n'
# teams_table += '|qualifier=\n'
teams_table += '}}\n'
teams_ordered += teams_table
footer = '{{TeamCard columns end}}\n'
if dynamic:
footer += '}}\n'
return header + teams_ordered + footer
def create_swiss_table(stage, bw_teams):
dropped_style = 'drop'
swiss_table = '{{SwissTableLeague|rounds=' + str(stage['bracket']['roundsCount']) + '|diff=false\n'
for i in range(stage['bracket']['teamsCount']):
swiss_table += '|pbg' + str(i + 1) + '=down'
if (i + 1) % 8 == 0:
swiss_table += '\n'
if '\n' not in swiss_table[-1]:
swiss_table += '\n'
for rank, record in enumerate(stage['standings']):
if record['disqualified']:
swiss_table += '|bg' + str(rank + 1) + '=' + dropped_style + ''
else:
swiss_table += '|bg' + str(rank + 1) + '=down'
team_info = bw_teams.get_team_info(record['team']['persistentTeamID'], record['team']['name'])
swiss_table += '|team' + str(rank + 1) + '=' + team_info['teamteamplate']
swiss_table += '|temp_tie' + str(rank+1) + '=' + "{:7.3f}".format(record['opponentsMatchWinPercentage']) + '\n'
swiss_table += '}}\n'
return swiss_table
def create_swiss_matches(matches, teams, bw_teams):
swiss_match_table = ''
rounds = dict()
for match in matches:
match_line = create_match_maps(match, teams, bw_teams)
if not match_line:
continue
try:
rounds[str(match['roundNumber'])].append(match_line)
except KeyError:
rounds[str(match['roundNumber'])] = list()
rounds[str(match['roundNumber'])].append(match_line)
for i in range(1, len(rounds) + 1):
if i == 1:
swiss_match_table += '{{box|start|padding=2em}}\n'
else:
swiss_match_table += '{{box|break|padding=2em}}\n'
swiss_match_table += '====={{HiddenSort|Round ' + str(i) + '}}=====\n'
swiss_match_table += '{{MatchListStart|width=450px|title=Round ' + str(i) + ' Matches|matchsection=Round ' \
+ str(i) + '|hide=false}}\n'
for match in rounds[str(i)]:
swiss_match_table += match
swiss_match_table += '{{MatchListEnd}}\n'
swiss_match_table += '{{box|end}}\n'
return swiss_match_table
def create_elim_bracket(stage, teams, bw_teams):
if stage['bracket']['style'] == 'single':
bracket = '{{' + str(stage['bracket']['teamsCount']) + 'SETeamBracket\n'
elif stage['bracket']['style'] == 'double':
bracket = '{{' + str(stage['bracket']['teamsCount']) + 'DETeamBracket\n'
else:
print('Unknown stage style: ' + stage['bracket']['style'])
return
# todo handle double elimination brackets
# set up team number trackers
team_previous_round = dict()
# set up round-match count trackers
round_max_win_match_count = [1] * (len(stage['bracket']['series']) + 1)
round_max_win_match_count[0] = 0
round_max_loss_match_count = [1] * (len(stage['bracket']['series']) + 1)
round_max_loss_match_count[0] = 0
# matches = sorted(stage['matches'], key=itemgetter('matchNumber'))
matches = stage['matches']
for match in matches:
# TODO: this will need to get updated for non SE16 templates
# In DE brackets D means the team dropped down from the previous round
# In DE brackest W means the team won the previous round
# So there are rounds where D vs L happen such as R2D1 vs R2W5 and R2D2 vs R2W6
# Might want to key off match['inConsolationBracket']
# May also just need to keep track of match['next'] and build up the D and W that way instead
# Default first round to D and then future bracket type is defined by match['next']
# Not exactly sure how to address round_team_number, in a 8 team DE the third winners bracket round is
# called the 4th round and in a 16 team DE the 4th winners bracket round is called the 6th round
# https://liquipedia.net/rainbowsix/Template:4DETeamBracket/doc
# https://liquipedia.net/rainbowsix/Template:8DETeamBracket/doc
# https://liquipedia.net/rainbowsix/Template:16DETeamBracket/doc
# if match['matchType'] == 'winner':
# round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'],
# round_max_win_match_count[match['roundNumber']])
# elif match['matchType'] == 'loser':
# round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'],
# round_max_loss_match_count[match['roundNumber']])
if not 'teamID' in match['top']:
continue
if match['top']['teamID'] in team_previous_round:
if team_previous_round[match['top']['teamID']]:
bracket_type = 'W'
else:
bracket_type = 'D'
else:
bracket_type = 'D'
if match['matchType'] == 'winner':
round_match_offset = -2 * round_max_win_match_count[match['roundNumber'] - 1]
else:
round_match_offset = -2 * round_max_loss_match_count[match['roundNumber'] - 1] \
+ (round_max_win_match_count[match['roundNumber']]
- round_max_win_match_count[match['roundNumber'] - 1]) * 2
# Increment for next time
if match['matchType'] == 'winner':
round_max_win_match_count[match['roundNumber']] = max(match['matchNumber'],
round_max_win_match_count[match['roundNumber']])
elif match['matchType'] == 'loser':
round_max_loss_match_count[match['roundNumber']] = max(match['matchNumber'],
round_max_loss_match_count[match['roundNumber']])
bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \
+ str(match['matchNumber'] * 2 - 1 + round_match_offset)
if 'teamID' in match['top']:
team_name = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'],
teams[match['top']['teamID']]['name'])['teamteamplate']
bracket += bracket_indicator + 'team=' + team_name + ' '
else:
bracket += bracket_indicator + 'literal=BYE '
if 'score' in match['top']:
bracket += bracket_indicator + 'score=' + str(match['top']['score']) + ' '
if 'winner' in match['top'] and match['top']['winner']:
bracket += bracket_indicator + 'win=1 '
team_previous_round[match['top']['teamID']] = True
else:
team_previous_round[match['top']['teamID']] = False
bracket += '\n'
if 'teamID' in match['bottom']:
if match['bottom']['teamID'] in team_previous_round:
if team_previous_round[match['bottom']['teamID']]:
bracket_type = 'W'
else:
bracket_type = 'D'
else:
bracket_type = 'D'
else:
bracket_type = 'D'
bracket_indicator = '|R' + str(match['roundNumber']) + bracket_type \
+ str(match['matchNumber'] * 2 + round_match_offset)
if 'teamID' in match['bottom']:
team_name = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'],
teams[match['bottom']['teamID']]['name'])['teamteamplate']
bracket += bracket_indicator + 'team=' + team_name + ' '
else:
bracket += bracket_indicator + 'literal=BYE '
if 'score' in match['bottom']:
bracket += bracket_indicator + 'score=' + str(match['bottom']['score']) + ' '
if 'winner' in match['bottom'] and match['bottom']['winner']:
bracket += bracket_indicator + 'win=2 '
team_previous_round[match['bottom']['teamID']] = True
elif 'teamID' in match['bottom']:
team_previous_round[match['bottom']['teamID']] = False
bracket += '\n'
bracket += '}}\n'
return bracket
def create_match_maps(match, teams, bw_teams):
match_line = ''
if not match['isComplete']:
return match_line
match_line = '{{MatchMaps\n'
match_line += '|date=\n'
if 'teamID' in match['top']:
team_top = bw_teams.get_team_info(teams[match['top']['teamID']]['persistentTeamID'],
teams[match['top']['teamID']]['name'])
elif match['isBye']:
team_top = bw_teams.get_team_info('0', 'BYE')
if 'teamID' in match['bottom']:
team_bot = bw_teams.get_team_info(teams[match['bottom']['teamID']]['persistentTeamID'],
teams[match['bottom']['teamID']]['name'])
elif match['isBye']:
team_bot = bw_teams.get_team_info('0', 'BYE')
match_line += '|team1=' + team_top['teamteamplate']
match_line += '|team2=' + team_bot['teamteamplate']
if 'isTie' in match and match['isTie']:
match_line += '|winner=0\n'
elif 'winner' in match['top'] and match['top']['winner']:
match_line += '|winner=1\n'
elif 'winner' in match['bottom'] and match['bottom']['winner']:
match_line += '|winner=2\n'
else:
match_line += '|winner=0\n'
if match['isBye']:
match_line += '|walkover=1'
match_line += '|games1='
if match['top']['winner']:
match_line += 'W'
else:
match_line += 'FF'
match_line += '|games2='
if 'winner' in match['bottom'] and match['bottom']['winner']:
match_line += 'W'
else:
match_line += 'FF'
else:
match_line += '|games1=' + str(match['top']['score'])
match_line += '|games2=' + str(match['bottom']['score']) + '\n'
match_line += '|details={{BracketMatchSummary\n'
match_line += '|date=|finished=true\n'
match_line += '|twitch= |youtube=\n'
match_line += '|vod=\n'
match_line += '}}\n'
match_line += '}}\n'
return match_line
def create_round_robin_tables(stage, teams, bw_teams, wiki_name, include_matches=True):
tables = ''
for idx, group in enumerate(stage['groups']):
if idx == 1:
tables += '{{box|start|padding=2em}}\n'
else:
tables += '{{box|break|padding=2em}}\n'
tables += '===={{HiddenSort|Group ' + group['name'] + '}}====\n'
tables += '{{GroupTableLeague|title=Group ' + group['name'] + '|width=450px|show_p=false|date=|ties=true\n'
tables += '|tournament=' + wiki_name + '\n'
group_header = ''
group_table = ''
for pos, standing_id in enumerate(group['standingIDs']):
group_header += '|pbg' + str(pos + 1) + '=down'
for standing in stage['standings']:
if standing_id == standing['_id']:
# if standing['disqualified']:
# has_drop = True
team_info = bw_teams.get_team_info(teams[standing['team']['_id']]['persistentTeamID'],
teams[standing['team']['_id']]['name'])
group_table += '|bg' + str(pos + 1) + '=down|team' + str(pos + 1) + "=" \
+ team_info['teamteamplate'] + '\n'
group_header += '|tiebreaker1=series\n'
tables += group_header
tables += group_table
tables += "}}\n"
if include_matches:
match_table = '{{MatchListStart|title=Group ' + group['name'] + ' Matches|width=450px|hide=true}}\n'
for match in group['matches']:
match_line = create_match_maps(match, teams, bw_teams)
match_table += match_line
tables += match_table
tables += '{{MatchListEnd}}\n'
tables += '{{box|end}}\n'
return tables
def create_prize_pool(prize):
prize_pool = prize + '\n'
prize_pool += '{{prize pool start}}\n'
prize_pool += '{{prize pool slot |place=1 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\n'
prize_pool += '{{prize pool slot |place=2 |usdprize=0 |tbd |lastvs1= |lastscore1= |lastvsscore1=}}\n'
prize_pool += '{{prize pool slot |place=3-4 |usdprize=0\n'
prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\n'
prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\n'
prize_pool += '}}\n'
prize_pool += '{{prize pool slot |place=5-8 |usdprize=0\n'
prize_pool += '|tbd |lastvs1= |lastscore1= |lastvsscore1=\n'
prize_pool += '|tbd |lastvs2= |lastscore2= |lastvsscore2=\n'
prize_pool += '|tbd |lastvs3= |lastscore3= |lastvsscore3=\n'
prize_pool += '|tbd |lastvs4= |lastscore4= |lastvsscore4=\n'
prize_pool += '}}\n'
prize_pool += '{{Prize pool end}}\n'
return prize_pool
def main():
ccs_winter_minor_id = '5ff3354193edb53839d44d55'
ccs_winter_minor_wiki = 'Calrissian_Cup/Winter/Minor'
ccs_winter_major_id = '60019f8ebcc5ed46373408a1'
ccs_winter_major_wiki = 'Calrissian_Cup/Winter/Major'
ccs_spring_minor_id = '603c00fbfe4fb811b3168f5b'
ccs_spring_minor_wiki = 'Calrissian_Cup/Spring/Minor'
ccs_spring_major_id = '6061b764f68d8733c8455fcf'
ccs_spring_major_wiki = 'Calrissian_Cup/Spring/Major'
ccs_summer_minor_id = '60b41961d35b1411a7b31d64'
ccs_summer_minor_wiki = 'Calrissian_Cup/Summer/Minor'
ccs_summer_major_id = '60dd319012cb9c33c2f63868'
ccs_summer_major_wiki = 'Calrissian_Cup/Summer/Major'
ccs_fall_minor_id = '60fa26043ba15d73719669bd'
ccs_fall_minor_wiki = 'Calrissian_Cup/Fall/Minor'
ccs_fall_major_id = '61314505635fe17a14eafe03'
ccs_fall_major_wiki = 'Calrissian_Cup/Fall/Major'
ccs_championship_id = '6150dd2b0dd060282bebb0eb'
ccs_championship_wiki = 'Calrissian_Cup/Championship'
world_cup_id = '611dac6ecb6f6260d5f30b6e'
world_cup_wiki = 'World_Cup'
twin_suns_tourny_id = '60806876938bed74f6edea9e'
twin_suns_wiki = 'Twin_Suns_Tournament'
gsl_s1_id = '5ff4b388fd124e11b18e185d'
gsl_s1_wiki = 'Global_Squadrons_League/2021/Season_1'
tournament_id = world_cup_id
wiki_name = world_cup_wiki
participant_tabs = [
# {'tab_name': 'Top 16',
# 'count': 16},
# {'tab_name': 'Top 32',
# 'count': 32},
# {'tab_name': 'Other Notable Participants',
# 'count': -1},
]
bw_teams = battlefy_wiki_linkings.BattlefyWikiTeamLinkings()
bw_players = battlefy_wiki_linkings.BattlefyWikiPlayerLinkings()
event_data = battlefy_data.BattlefyData(tournament_id)
event_data.load_tournament_data()
# FORCE REDUCE TEAMS
event_data.reduce_teams()
event_path = event_data.get_tournament_data_path()
event_path.mkdir(parents=True, exist_ok=True)
filename = Path.joinpath(event_path, event_data.tournament_data['name'] + '.wiki')
with open(filename, 'w+', newline='\n', encoding='utf-8') as f:
display = '{{DISPLAYTITLE:' + event_data.tournament_data['name'] + '}}\n'
f.write(display)
sidebar = create_sidebar(event_data.tournament_data, wiki_name)
f.write(sidebar)
f.write('==About==\n')
f.write('===Format===\n')
event_format = create_event_format(event_data.tournament_data)
f.write(event_format)
f.write('===Broadcast Talent===\n')
f.write('===Prize Pool===\n')
prize_pool = create_prize_pool(event_data.tournament_data['prizes'])
f.write(prize_pool)
f.write('==Participants==\n')
teams = create_participants(event_data.tournament_data, bw_players, bw_teams,
dynamic=participant_tabs, sort_place=True)
f.write(teams)
f.write('==Results==\n')
for stage in event_data.tournament_data['stages']:
if stage['bracket']['type'] == 'swiss':
f.write('===Swiss Stage===\n')
f.write('====Swiss Standings====\n')
swiss_table = create_swiss_table(stage, bw_teams)
f.write(swiss_table)
f.write('====Swiss Match Results====\n')
swiss_matches = create_swiss_matches(stage['matches'], event_data.tournament_data['teams'], bw_teams)
f.write(swiss_matches)
elif stage['bracket']['type'] == 'elimination':
f.write('===Playoffs===\n')
bracket = create_elim_bracket(stage, event_data.tournament_data['teams'], bw_teams)
f.write(bracket)
elif stage['bracket']['type'] == 'roundrobin':
f.write('===' + stage['name'] + '===\n')
round_robin_tables = create_round_robin_tables(stage, event_data.tournament_data['teams'], bw_teams,
wiki_name, include_matches=True)
f.write(round_robin_tables)
else:
print('Unsupported bracket type of: ' + stage['bracket']['type'])
if __name__ == '__main__':
main()
| 2.203125 | 2 |
utilidades/texto.py | DeadZombie14/chillMagicCarPygame | 0 | 6474 | import pygame
class Texto:
def __init__(self, screen, text, x, y, text_size = 20, fuente = 'Calibri', italic = False, bold= False, subrayado= False, color = (250, 240, 230), bg = [] ):
self.screen = screen
fg = color
self.coord = x, y
#load font, prepare values
font = pygame.font.Font(None, 80)
size = font.size(text)
# Font
a_sys_font = pygame.font.SysFont(fuente, text_size)
# Cursiva
if italic:
a_sys_font.set_bold(1)
# Negritas
if bold:
a_sys_font.set_bold(1)
# Subrayado
if subrayado:
a_sys_font.set_underline(1)
# Construccion del texto
if len(bg) > 1: # Si hay fondo de texto
ren = a_sys_font.render(text, 1, fg, bg)
else: # Si no, transparente
ren = a_sys_font.render(text, 1, fg)
# self.size = x+size[0], y
self.text_rect = ren.get_rect()
self.text_rect.center = (x,y)
self.image = ren, (x,y)
screen.blit(ren, (x, y))
# Cursiva
if italic:
a_sys_font.set_bold(0)
# Negritas
if bold:
a_sys_font.set_bold(0)
# Subrayado
if subrayado:
a_sys_font.set_underline(0)
# self.image.blit(ren, self.text_rect)
# self.text_rect = (x, y),ren.get_size()
# text = str(self.counter)
# label = self.myfont.render(text, 1, (255,0,0))
# text_rect = label.get_rect()
# text_rect.center = (50,50)
# self.image.blit(label, text_rect)
pass
def getProperties(self):
return self.text_rect
def redraw(self):
self.screen.blit(self.image[0], self.image[1])
pass
##################### EJEMPLO DE USO ##############################
# texto1 = Texto(screen, 'Hola', 10, 10)
class TextArea():
def __init__(self, screen, text, x, y, fuente='Calibri', text_size = 20, color=pygame.Color('black')):
self.coord = x, y
font = pygame.font.SysFont(fuente, text_size)
words = [word.split(' ') for word in text.splitlines()] # 2D array where each row is a list of words.
space = font.size(' ')[0] # The width of a space.
max_width, max_height = screen.get_size()
pos = x,y
for line in words:
for word in line:
word_surface = font.render(word, 0, color)
word_width, word_height = word_surface.get_size()
if x + word_width >= max_width:
x = pos[0] # Reset the x.
y += word_height # Start on new row.
screen.blit(word_surface, (x, y))
x += word_width + space
x = pos[0] # Reset the x.
y += word_height # Start on new row.
self.size = word_width, word_height
pass
def getProperties(self):
return self.size, self.coord
##################### EJEMPLO DE USO ##############################
# textarea1 = Textarea(screen, 'Hola mundo que tal estas hoy') | 3.171875 | 3 |
training_xgboost_model.py | MighTy-Weaver/Inefficient-AC-detection | 2 | 6475 | # This is the code to train the xgboost model with cross-validation for each unique room in the dataset.
# Models are dumped into ./models and results are dumped into two csv files in the current work directory.
import argparse
import json
import math
import os
import pickle
import warnings
from typing import Tuple
import numpy as np
import pandas as pd
import xgboost as xgb
from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
from imblearn.over_sampling import SMOTE
from numpy.random import RandomState
from sklearn.metrics import r2_score, mean_squared_error
from sklearn.model_selection import train_test_split
from sklearn.utils import compute_sample_weight
from tqdm import tqdm
from xgboost import DMatrix, cv
# Set up an argument parser to decide the metric function
parser = argparse.ArgumentParser()
parser.add_argument("--metric", choices=['R2', 'RMSE'], type=str, required=False, default='R2',
help="The evaluation metric you want to use to train the XGBoost model")
parser.add_argument("--log", choices=[0, 1, 100], type=int, required=False, default=0,
help="Whether to print out the training progress")
parser.add_argument("--SMOTE", choices=[0, 1], type=int, required=False, default=1, help="Whether use the SMOTE or not")
parser.add_argument("--SMOGN", choices=[0, 1], type=int, required=False, default=0, help="Whether use the SMOGN or not")
parser.add_argument("--SampleWeight", choices=[0, 1], type=int, required=False, default=0,
help="Whether use the sample weight")
args = parser.parse_args()
# Ignore all the warnings and set pandas to display every column and row everytime we print a dataframe
warnings.filterwarnings('ignore')
pd.set_option('display.max_columns', None)
pd.set_option('display.max_rows', None)
assert args.SMOTE != args.SMOGN, "Can't use SMOTE and SMOGN at the same time!"
# Load the data with a positive AC electricity consumption value, and drop the time data as we don't need them
data = pd.read_csv("summer_data_compiled.csv", index_col=0)
data = data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True)
# Create some directory to store the models and future analysis figures.
# log_folder_name = "Test_{}_{}".format(args.metric, datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
log_folder_name = "Test_R2_HYPEROPT"
log_folder_name = log_folder_name + "_SMOTE" if args.SMOTE else log_folder_name
log_folder_name = log_folder_name + "_SMOGN" if args.SMOGN else log_folder_name
log_folder_name = log_folder_name + "_SW" if args.SampleWeight else log_folder_name
previous_parameter_folder = "Test_R2_HYPEROPT"
assert log_folder_name != previous_parameter_folder, "Previous folder name exists"
if not os.path.exists('./{}/'.format(log_folder_name)):
os.mkdir('./{}'.format(log_folder_name))
os.mkdir('./{}/models/'.format(log_folder_name))
os.mkdir('./{}/trntst_models/'.format(log_folder_name))
# Define our evaluation functions
def RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:
truth_value = dtrain.get_label()
root_squard_error = math.sqrt(mean_squared_error(truth_value, predt))
return "RMSE", root_squard_error
def R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:
truth_value = dtrain.get_label()
r2_value = r2_score(truth_value, predt)
return "R2", r2_value
def fobjective(space):
param_dict_tunning = {'max_depth': int(space['max_depth']),
'learning_rate': space['learning_rate'],
'colsample_bytree': space['colsample_bytree'],
'min_child_weight': int(space['min_child_weight']),
'reg_alpha': int(space['reg_alpha']),
'reg_lambda': space['reg_lambda'],
'subsample': space['subsample'],
'min_split_loss': space['min_split_loss'],
'objective': 'reg:squarederror'}
xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5,
early_stopping_rounds=30, as_pandas=True, num_boost_round=200,
seed=seed, metrics='rmse', maximize=False, shuffle=True)
return {"loss": (xgb_cv_result["test-rmse-mean"]).tail(1).iloc[0], "status": STATUS_OK}
eval_dict = {'RMSE': RMSE, 'R2': R2}
print("Start Training The Models")
# Create two dataframes to store the result during the training and after the training.
error_csv = pd.DataFrame(
columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean',
'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean',
'test-rmse-std'])
prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction'])
room_list = data['Location'].unique()
# ranging through all the rooms and do the training and cross-validation for each room.
for room in tqdm(room_list):
seed = 2030 + room
# Four rooms have low quality data and we delete them manually
if room == 309 or room == 312 or room == 826 or room == 917 or room == 1001:
continue
# We extract the data of particular room and run the SMOTE algorithm on it.
room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True)
if args.SMOTE:
# Label all the AC data by 0.75, all AC above 0.75 will be marked as 1, otherwise 0. Split into X and y
room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')
X = room_data.drop(['SMOTE_split'], axis=1)
y = room_data['SMOTE_split']
# Run the SMOTE algorithm and retrieve the result.
model_smote = SMOTE(random_state=621, k_neighbors=3)
room_data_smote, smote_split = model_smote.fit_resample(X, y)
# concat the result from SMOTE and split the result into X and y for training.
room_data_smote = pd.concat([room_data_smote, smote_split], axis=1)
y = room_data_smote['AC']
X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1)
elif args.SMOGN:
if len(room_data) < 500:
room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')
X = room_data.drop(['SMOTE_split'], axis=1)
y = room_data['SMOTE_split']
# Run the SMOTE algorithm and retrieve the result.
model_smote = SMOTE(random_state=621, k_neighbors=3)
room_data_smote, smote_split = model_smote.fit_resample(X, y)
# concat the result from SMOTE and split the result into X and y for training.
room_data_smote = pd.concat([room_data_smote, smote_split], axis=1)
y = room_data_smote['AC']
X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1)
else:
room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0)
y = room_data['AC']
X = room_data.drop(['AC'], axis=1)
else:
y = pd.DataFrame(room_data['AC'].fillna(method='pad'))
X = room_data.drop(['AC'], axis=1).fillna(method='pad')
if args.SampleWeight:
class_sample = pd.cut(y, bins=15)
weight = compute_sample_weight(class_weight="balanced", y=class_sample)
X = X.to_numpy()
# Build another full data matrix for the built-in cross validation function to work.
data_matrix = DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else DMatrix(data=X, label=y)
# Cross_validation with hyper-parameter tuning
space = {'max_depth': hp.quniform("max_depth", 3, 10, 1),
'learning_rate': hp.uniform("learning_rate", 0.1, 3),
'colsample_bytree': hp.uniform("colsample_bytree", 0.5, 1),
'min_child_weight': hp.quniform("min_child_weight", 1, 20, 1),
'reg_alpha': hp.quniform("reg_alpha", 0, 100, 1),
'reg_lambda': hp.uniform("reg_lambda", 0, 2),
'subsample': hp.uniform("subsample", 0.5, 1),
'min_split_loss': hp.uniform("min_split_loss", 0, 9)}
if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)):
best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room),
allow_pickle=True).item()
np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)
else:
trials = Trials()
best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials,
rstate=RandomState(seed))
# setup our training parameters and a model variable as model checkpoint
best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']),
'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'],
'min_child_weight': best_hyperparams['min_child_weight'],
'colsample_bytree': best_hyperparams['colsample_bytree'],
'learning_rate': best_hyperparams['learning_rate'],
'subsample': best_hyperparams['subsample'],
'min_split_loss': best_hyperparams['min_split_loss']}
np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)
# Use the built-in cv function to do the cross validation, still with ten folds, this will return us the results.
xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5,
early_stopping_rounds=30, as_pandas=True, num_boost_round=200,
seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True)
xgb_cv_result['room'] = room
error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1]
# Use one training_testing for ploting, and save both ground truth and prediction value into the dataframe
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=seed)
d_train = DMatrix(X_train, label=y_train)
d_test = DMatrix(X_test, label=y_test)
watchlist = [(d_test, 'eval'), (d_train, 'train')]
xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist,
verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)
prediction = np.array(xgb_model_train_test.predict(d_test)).tolist()
real = np.array(y_test).tolist()
prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real),
'prediction': json.dumps(prediction)}
# Dump the error dataframes into csv files.
error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False)
prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False)
# Develop a model using the whole orignial dataset, and save the model
xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist,
verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)
# Save all the models we trained for future use
pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))
pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))
print("Training finished!")
| 2.734375 | 3 |
setup.py | editorconfig/editorconfig-core-py | 70 | 6476 | import os
from setuptools import setup
# Read the version
g = {}
with open(os.path.join("editorconfig", "version.py"), "rt") as fp:
exec(fp.read(), g)
v = g['VERSION']
version = ".".join(str(x) for x in v[:3])
if v[3] != "final":
version += "-" + v[3]
setup(
name='EditorConfig',
version=version,
author='EditorConfig Team',
packages=['editorconfig'],
url='http://editorconfig.org/',
license='python',
description='EditorConfig File Locator and Interpreter for Python',
long_description=open('README.rst').read(),
entry_points = {
'console_scripts': [
'editorconfig = editorconfig.__main__:main',
]
},
classifiers=[
'License :: OSI Approved :: Python Software Foundation License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
| 1.78125 | 2 |
vaping/config.py | josephburnett/vaping | 0 | 6477 | <filename>vaping/config.py<gh_stars>0
import re
import munge
def parse_interval(val):
"""
converts a string to float of seconds
.5 = 500ms
90 = 1m30s
**Arguments**
- val (`str`)
"""
re_intv = re.compile(r"([\d\.]+)([a-zA-Z]+)")
val = val.strip()
total = 0.0
for match in re_intv.findall(val):
unit = match[1]
count = float(match[0])
if unit == "s":
total += count
elif unit == "m":
total += count * 60
elif unit == "ms":
total += count / 1000
elif unit == "h":
total += count * 3600
elif unit == "d":
total += count * 86400
else:
raise ValueError("unknown unit from interval string '%s'" % val)
return total
class Config(munge.Config):
"""
Vaping config manager
"""
defaults = {
"config": {
"vaping": {"home_dir": None, "pidfile": "vaping.pid", "plugin_path": [],},
},
"config_dir": "~/.vaping",
"codec": "yaml",
}
| 2.890625 | 3 |
sktime/annotation/tests/test_all_annotators.py | Rubiel1/sktime | 1 | 6478 | # -*- coding: utf-8 -*-
"""Tests for sktime annotators."""
import pandas as pd
import pytest
from sktime.registry import all_estimators
from sktime.utils._testing.estimator_checks import _make_args
ALL_ANNOTATORS = all_estimators(estimator_types="series-annotator", return_names=False)
@pytest.mark.parametrize("Estimator", ALL_ANNOTATORS)
def test_output_type(Estimator):
"""Test annotator output type."""
estimator = Estimator.create_test_instance()
args = _make_args(estimator, "fit")
estimator.fit(*args)
args = _make_args(estimator, "predict")
y_pred = estimator.predict(*args)
assert isinstance(y_pred, pd.Series)
| 2.375 | 2 |
raspberry-pi-camera/cam.py | AlexMassin/mlh-react-vr-website | 1 | 6479 | <gh_stars>1-10
picamera import PiCamera
from time import sleep
import boto3
import os.path
import subprocess
s3 = boto3.client('s3')
bucket = 'cambucket21'
camera = PiCamera()
#camera.resolution(1920,1080)
x = 0
camerafile = x
while True:
if (x == 6):
x = 1
else:
x = x + 1
camera.start_preview()
camera.start_recording('/home/pi/' + str(x) + '.h264')
sleep(2)
camera.stop_recording()
camera.stop_preview()
subprocess.Popen("MP4Box -add " + str(x) + ".h264 " + str(x) +".mp4", shell=True)
sleep(1)
s3.upload_file('/home/pi/' + str(x) + '.mp4',bucket,'/home/pi/' + str(x) + '.mp4')
| 2.125 | 2 |
Part_3_advanced/m04_datetime_and_timedelta/datetime_formats/example_1.py | Mikma03/InfoShareacademy_Python_Courses | 0 | 6480 | <reponame>Mikma03/InfoShareacademy_Python_Courses<filename>Part_3_advanced/m04_datetime_and_timedelta/datetime_formats/example_1.py<gh_stars>0
from datetime import datetime
def run_example():
moment_in_time = datetime.fromordinal(256)
print(moment_in_time)
print(moment_in_time.toordinal())
print(moment_in_time.weekday())
print(moment_in_time.isoweekday())
other_moment = datetime.fromtimestamp(16_000_000)
print(other_moment)
print(other_moment.timestamp())
print(other_moment.isocalendar())
if __name__ == "__main__":
run_example()
| 3.1875 | 3 |
examples/scripts/segmentation/nnet3-segmenter.py | mxmpl/pykaldi | 916 | 6481 | #!/usr/bin/env python
from __future__ import print_function
from kaldi.segmentation import NnetSAD, SegmentationProcessor
from kaldi.nnet3 import NnetSimpleComputationOptions
from kaldi.util.table import SequentialMatrixReader
# Construct SAD
model = NnetSAD.read_model("final.raw")
post = NnetSAD.read_average_posteriors("post_output.vec")
transform = NnetSAD.make_sad_transform(post)
graph = NnetSAD.make_sad_graph()
decodable_opts = NnetSimpleComputationOptions()
decodable_opts.extra_left_context = 79
decodable_opts.extra_right_context = 21
decodable_opts.extra_left_context_initial = 0
decodable_opts.extra_right_context_final = 0
decodable_opts.frames_per_chunk = 150
decodable_opts.acoustic_scale = 0.3
sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts)
seg = SegmentationProcessor(target_labels=[2])
# Define feature pipeline as a Kaldi rspecifier
feats_rspec = "ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |"
# Segment
with SequentialMatrixReader(feats_rspec) as f, open ("segments", "w") as s:
for key, feats in f:
out = sad.segment(feats)
segments, stats = seg.process(out["alignment"])
seg.write(key, segments, s)
print("segments:", segments, flush=True)
print("stats:", stats, flush=True)
print("global stats:", seg.stats, flush=True)
| 2.203125 | 2 |
src/dataset.py | HeegyuKim/CurseFilter | 0 | 6482 | <reponame>HeegyuKim/CurseFilter
from cProfile import label
from matplotlib.pyplot import text
import pandas as pd
import numpy as np
from tokenizers import Tokenizer
import torch
from torch.utils.data import Dataset, DataLoader
from typing import Dict, Any, Tuple
from datasets import load_dataset
class DataFrameDataset(Dataset):
def __init__(self,
tokenizer: Tokenizer,
df: pd.DataFrame,
text_column: str,
label_column: str,
max_length: int = 256,
padding: str = "max_length") -> None:
super().__init__()
inputs = tokenizer(df[text_column].to_list(), padding=padding, max_length=max_length, truncation=True, return_tensors="pt")
self.input_ids = inputs["input_ids"]
self.attention_masks = inputs["attention_mask"]
dtype = np.int64 if len(df[label_column].unique()) > 2 else np.float32
self.labels = torch.from_numpy(df[label_column].values.astype(dtype))
def __len__(self):
return self.input_ids.shape[0]
def __getitem__(self, index: Any) -> Dict:
return self.input_ids[index], self.attention_masks[index], self.labels[index]
def dataloader(self, **kwargs) -> DataLoader:
return DataLoader(self, **kwargs)
class DataFrameStudentDataset(DataFrameDataset):
def __init__(self,
teacher_model: torch.nn.Module,
teacher_tokenizer: Tokenizer,
student_tokenizer: Tokenizer,
df: pd.DataFrame,
text_column: str,
label_column: str,
max_length: int = 256,
padding: str = "max_length",
device: str = 'cuda') -> None:
super().__init__(student_tokenizer, df, text_column, label_column, max_length, padding)
teacher_ds = DataFrameDataset(
teacher_tokenizer,
df,
text_column,
label_column,
max_length,
padding
)
teacher_model = teacher_model.to(device)
with torch.no_grad():
soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device)
for i in range(len(self))]
self.soft_labels = torch.stack(soft_labels)
def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
return *super().__getitem__(index), self.soft_labels[index]
def _get_soft_label(self, model, teacher_ds, index, device):
ids, mask, _ = teacher_ds[index]
ids = ids.unsqueeze(0).to(device)
mask = mask.unsqueeze(0).to(device)
return model(ids, mask).cpu().squeeze(0)
class ApeachDataset(Dataset):
def __init__(self,
split: str,
tokenizer: Tokenizer,
max_length: int = 256,
padding: str = "max_length") -> None:
super().__init__()
dataset = load_dataset("jason9693/APEACH")
texts = dataset[split]['text']
inputs = tokenizer(texts, padding=padding, max_length=max_length, truncation=True, return_tensors="pt")
self.input_ids = inputs["input_ids"]
self.attention_masks = inputs["attention_mask"]
labels = dataset[split]['class']
self.labels = torch.tensor(labels, dtype=torch.float32)
def __len__(self):
return self.input_ids.shape[0]
def __getitem__(self, index: Any) -> Dict:
return self.input_ids[index], self.attention_masks[index], self.labels[index]
def dataloader(self, **kwargs) -> DataLoader:
return DataLoader(self, **kwargs)
class ApeachStudentDataset(ApeachDataset):
def __init__(self,
teacher_model: torch.nn.Module,
split: str,
teacher_tokenizer: Tokenizer,
student_tokenizer: Tokenizer,
max_length: int = 256,
padding: str = "max_length",
device: str="cuda") -> None:
super().__init__(split, student_tokenizer, max_length, padding)
teacher_ds = ApeachDataset(split, teacher_tokenizer, max_length, padding)
teacher_model = teacher_model.to(device)
with torch.no_grad():
soft_labels = [self._get_soft_label(teacher_model, teacher_ds, i, device)
for i in range(len(self))]
self.soft_labels = torch.stack(soft_labels)
def __getitem__(self, index: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
return *super().__getitem__(index), self.soft_labels[index]
def _get_soft_label(self, model, teacher_ds, index, device):
ids, mask, _ = teacher_ds[index]
ids = ids.unsqueeze(0).to(device)
mask = mask.unsqueeze(0).to(device)
return model(ids, mask).cpu().squeeze(0) | 2.453125 | 2 |
helper_tools/raspi_OMX-Player_Howto_demo.py | stko/Schnipsl | 0 | 6483 | <gh_stars>0
#!/usr/bin/python
# mp4museum.org by <NAME> 2019
import os
import sys
import glob
from subprocess import Popen, PIPE
import RPi.GPIO as GPIO
FNULL = open(os.devnull, "w")
# setup GPIO pin
GPIO.setmode(GPIO.BOARD)
GPIO.setup(11, GPIO.IN, pull_up_down = GPIO.PUD_DOWN)
GPIO.setup(13, GPIO.IN, pull_up_down = GPIO.PUD_DOWN)
# functions to be called by event listener
def buttonPause(channel):
player.stdin.write("p")
def buttonNext(channel):
player.stdin.write("q")
# add event listener
GPIO.add_event_detect(11, GPIO.FALLING, callback = buttonPause, bouncetime = 234)
GPIO.add_event_detect(13, GPIO.FALLING, callback = buttonNext, bouncetime = 1234)
# please do not remove my logo screen
player = Popen(['omxplayer', '--adev', 'both', '/home/pi/mp4museum.mp4'],stdin=PIPE,stdout=FNULL)
player.wait()
# the loop
while(1):
for files in sorted(glob.glob(r'/media/*/*.mp4')):
player = Popen(['omxplayer','--adev', 'both',files],stdin=PIPE,stdout=FNULL)
player.wait()
| 2.609375 | 3 |
dash_app/compare_alg.py | zeyu2001/ICT1002-Python | 1 | 6484 | """
Comparison between the efficiency of the Boyer-Moore algorithm and the naive substring search algorithm.
The runtimes for both algorithms are plotted on the same axes.
"""
import matplotlib.pyplot as plt
import numpy as np
import string
import time
import random
from bm_alg import boyer_moore_match, naive_match
# number of test cases for each iteration
TEST_CASES = 100
# test cases generated based on this pattern (vary_n)
PATTERN = 'ICT1002 is a really great module!'
# test cases generated based on this text (vary_m)
TEXT = PATTERN * 50
def generate_test_cases(pattern, length, k):
"""
Generates <k> test cases with text of length <length> containing <pattern>
Args:
pattern (str): A pattern within the text.
length (int): The length of the pattern
k (int): The number of test cases
Returns:
A list of test cases, i.e. strings that contain <pattern>
"""
result = []
for _ in range(k):
text = pattern
while len(text) < length:
direction = random.choice((0, 1))
# 0 --> Left
if direction == 0:
text = random.choice(string.ascii_lowercase) + text
# 1 --> Right
else:
text = text + random.choice(string.ascii_lowercase)
result.append(text)
return result
def vary_n(max_n):
x = [n for n in range(1, max_n + 1)]
y_bm = []
y_naive = []
for n in x:
print('n =', n)
bm_result = []
naive_result = []
if n >= len(PATTERN):
# generate test cases of length n, which contain PATTERN
test_cases = generate_test_cases(PATTERN, n, TEST_CASES)
else:
# generate test cases of length n, which do not (and can not possibly) contain PATTERN
test_cases = generate_test_cases('', n, TEST_CASES)
for test_case in test_cases:
start = time.time()
naive_match(test_case, PATTERN)
naive_result.append(time.time() - start)
start = time.time()
boyer_moore_match(test_case, PATTERN)
bm_result.append(time.time() - start)
# obtain median runtime (mean is affected by outliers)
y_naive.append(sorted(naive_result)[TEST_CASES // 2])
y_bm.append(sorted(bm_result)[TEST_CASES // 2])
plt.plot(x, y_naive, label="Naive Algorithm")
plt.plot(x, y_bm, label="Boyer-Moore Algorithm")
plt.xlabel("n")
plt.ylabel("Runtime")
plt.title("Substring Search Algorithm Efficiency")
plt.legend()
plt.show()
def vary_m(max_m):
x = [m for m in range(1, max_m + 1)]
y_bm = []
y_naive = []
for m in x:
print('m =', m)
bm_result = []
naive_result = []
# generate test cases of length n
test_cases = generate_test_cases('', m, TEST_CASES)
for test_case in test_cases:
start = time.time()
naive_match(TEXT, test_case)
naive_result.append(time.time() - start)
start = time.time()
boyer_moore_match(TEXT, test_case)
bm_result.append(time.time() - start)
# obtain median runtime (mean is affected by outliers)
y_naive.append(sorted(naive_result)[TEST_CASES // 2])
y_bm.append(sorted(bm_result)[TEST_CASES // 2])
plt.plot(x, y_naive, label="Naive Algorithm")
plt.plot(x, y_bm, label="Boyer-Moore Algorithm")
plt.xlabel("m")
plt.ylabel("Runtime")
plt.title("Substring Search Algorithm Efficiency")
plt.legend()
plt.show()
def main():
done = False
print("m = Length of pattern\nn = Length of text\n")
print("1. Constant m, vary n")
print("2. Constant n, vary m")
print("3. Quit\n")
while not done:
choice = input("Your choice: ")
if choice == '1':
max_n = input("Upper limit of n: ")
while not (max_n.isnumeric() and int(max_n) > 1):
print("That is not a valid number.")
max_n = input("Upper limit of n: ")
vary_n(int(max_n))
elif choice == '2':
max_m = input("Upper limit of m: ")
while not (max_m.isnumeric() and int(max_m) > 1):
print("That is not a valid number.")
max_m = input("Upper limit of m: ")
vary_m(int(max_m))
elif choice == '3':
done = True
else:
print("That is not a valid option.")
if __name__ == '__main__':
main()
| 3.671875 | 4 |
TSIS_3/3774.py | GMKanat/PP2_spring | 0 | 6485 | ans = dict()
pairs = dict()
def create_tree(p):
if p in ans:
return ans[p]
else:
try:
res = 0
if p in pairs:
for ch in pairs[p]:
res += create_tree(ch) + 1
ans[p] = res
return res
except:
pass
n = int(input())
for i in range(0, n-1):
child, parent = input().split()
if parent in pairs:
pairs[parent].append(child)
else:
pairs[parent] = [child]
if n > 0:
for k in pairs:
create_tree(k)
for key in sorted(ans.keys()):
print(key, ans[key]) | 3.53125 | 4 |
italicizer.py | Dorijan-Cirkveni/Miniprojects | 0 | 6486 | def italicize(s):
b = False
res = ''
for e in s:
if e == '"':
if b:
res += '{\\i}' + e
else:
res += e + '{i}'
b=not b
else:
res += e
return res
def main():
F=open('test_in.txt','r')
X=F.read()
F.close()
print(italicize(X))
return
if __name__ == "__main__":
main()
| 3.859375 | 4 |
maps/views.py | WPRDC/neighborhood-simulacrum | 0 | 6487 | import json
from typing import Type, TYPE_CHECKING
from django.core.exceptions import ObjectDoesNotExist
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from rest_framework import viewsets, filters
from rest_framework.exceptions import NotFound
from rest_framework.negotiation import BaseContentNegotiation
from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from indicators.models import Variable, DataViz
from indicators.utils import get_geog_model
from indicators.views import GeoJSONRenderer
from maps.models import DataLayer
from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer
from profiles.settings import VIEW_CACHE_TTL
if TYPE_CHECKING:
from geo.models import AdminRegion
from indicators.models.viz import MiniMap
class DataLayerViewSet(viewsets.ModelViewSet):
queryset = DataLayer.objects.all()
serializer_class = DataLayerSerializer
permission_classes = [IsAuthenticatedOrReadOnly, ]
filter_backends = [filters.SearchFilter, ]
def get_serializer_class(self):
if self.action == 'list':
return DataLayerSerializer
return DataLayerDetailsSerializer
media_type = 'application/geo+json'
format = 'geojson'
def render(self, data, media_type=None, renderer_context=None):
return json.dumps(data)
class GeoJSONContentNegotiation(BaseContentNegotiation):
"""
Custom content negotiation scheme for GeoJSON files.
`GeoJSONRenderer` is used for downloading geojson files
`JSONRenderer` is used for ajax calls.
"""
def select_parser(self, request, parsers):
return super(GeoJSONContentNegotiation, self).select_parser(request, parsers)
def select_renderer(self, request: Request, renderers, format_suffix=None):
renderer = renderers[0]
if request.query_params.get('download', False):
renderer = GeoJSONRenderer()
return renderer, renderer.media_type
class GeoJSONDataLayerView(APIView):
permission_classes = [AllowAny, ]
content_negotiation_class = GeoJSONContentNegotiation
@method_decorator(cache_page(VIEW_CACHE_TTL))
def get(self, request: Request, map_slug=None):
try:
data_layer: DataLayer = DataLayer.objects.get(slug=map_slug)
geojson = data_layer.as_geojson()
except KeyError as e:
# when the geog is wrong todo: make 400 malformed with info on available geo types
raise NotFound
except ObjectDoesNotExist as e:
raise NotFound
if request.query_params.get('download', False):
headers = {
'Content-Disposition': f'attachment; filename="{map_slug}.geojson"'
}
return Response(geojson, headers=headers, content_type='application/geo+json')
return Response(geojson)
| 1.851563 | 2 |
magma/operators.py | Kuree/magma | 0 | 6488 | from magma import _BitType, BitType, BitsType, UIntType, SIntType
class MantleImportError(RuntimeError):
pass
class UndefinedOperatorError(RuntimeError):
pass
def raise_mantle_import_error_unary(self):
raise MantleImportError(
"Operators are not defined until mantle has been imported")
def raise_mantle_import_error_binary(self, other):
raise MantleImportError(
"Operators are not defined until mantle has been imported")
def define_raise_undefined_operator_error(type_str, operator, type_):
if type_ == "unary":
def wrapped(self):
raise UndefinedOperatorError(
f"{operator} is undefined for {type_str}")
else:
assert type_ == "binary"
def wrapped(self, other):
raise UndefinedOperatorError(
f"{operator} is undefined for {type_str}")
return wrapped
for op in ("__eq__", "__ne__"):
setattr(_BitType, op, raise_mantle_import_error_binary)
for op in (
"__and__",
"__or__",
"__xor__",
"__invert__",
"__add__",
"__sub__",
"__mul__",
"__div__",
"__lt__",
# __le__ skipped because it's used for assignment on inputs
# "__le__",
"__gt__",
"__ge__"
):
if op == "__invert__":
setattr(_BitType, op,
define_raise_undefined_operator_error("_BitType", op, "unary"))
else:
setattr(
_BitType, op,
define_raise_undefined_operator_error("_BitType", op, "binary"))
for op in ("__and__",
"__or__",
"__xor__",
"__invert__"
):
if op == "__invert__":
setattr(BitType, op, raise_mantle_import_error_unary)
else:
setattr(BitType, op, raise_mantle_import_error_binary)
for op in ("__and__",
"__or__",
"__xor__",
"__invert__",
"__lshift__",
"__rshift__",
):
if op == "__invert__":
setattr(BitsType, op, raise_mantle_import_error_unary)
else:
setattr(BitsType, op, raise_mantle_import_error_binary)
for op in ("__add__",
"__sub__",
"__mul__",
"__div__",
"__lt__",
# __le__ skipped because it's used for assignment on inputs
# "__le__",
"__gt__",
"__ge__"
):
setattr(BitsType, op,
define_raise_undefined_operator_error("BitsType", op, "binary"))
for op in ("__add__",
"__sub__",
"__mul__",
"__div__",
"__lt__",
# __le__ skipped because it's used for assignment on inputs
# "__le__",
"__gt__",
"__ge__"
):
setattr(SIntType, op, raise_mantle_import_error_binary)
setattr(UIntType, op, raise_mantle_import_error_binary)
| 2.53125 | 3 |
src/sultan/result.py | bquantump/sultan | 0 | 6489 | <gh_stars>0
import subprocess
import sys
import time
import traceback
from queue import Queue
from sultan.core import Base
from sultan.echo import Echo
from threading import Thread
class Result(Base):
"""
Class that encompasses the result of a POpen command.
"""
def __init__(self, process, commands, context, streaming=False, exception=None, halt_on_nonzero=False):
super(Result, self).__init__()
self._process = process
self._commands = commands
self._context = context
self._exception = exception
self.__echo = Echo()
self._streaming = streaming
self.rc = None
self._halt_on_nonzero=halt_on_nonzero
if process and streaming:
self.is_complete = False
self.__stdout = Queue()
self.__stderr = Queue()
self.__stdin = Queue()
self._stdout_t = Thread(target=self.read_output, args=(process.stdout, self.__stdout))
self._stderr_t = Thread(target=self.read_output, args=(process.stderr, self.__stderr))
self._stdin_t = Thread(target=self.write_input)
self._wait_t = Thread(target=self.wait_on_process)
for t in (self._stdout_t, self._stderr_t, self._stdin_t, self._wait_t):
t.daemon = True
t.start()
else:
self.is_complete = True
try:
stdout, stderr = process.communicate()
except:
stdout, stderr = None, None
try:
self.rc = process.returncode
except:
pass
self.__stdout = stdout.strip().splitlines() if stdout else []
self.__stderr = stderr.strip().splitlines() if stderr else []
if self._halt_on_nonzero and self.rc != 0:
print(self.stderr)
raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr)
# self.dump_exception()
def read_output(self, pipe, q):
for line in iter(pipe.readline, b''):
if line:
q.put(line.strip())
elif self.is_complete:
break
else:
time.sleep(0.1)
pipe.close()
def write_input(self):
for line in iter(self.__stdin.get, None):
if line.endswith("\n"):
self._process.stdin.write(line)
else:
self._process.stdin.write(line + "\n")
def wait_on_process(self):
self.rc = self._process.wait()
self.__stdin.put(None)
self.is_complete = True
for t in (self._stdout_t, self._stderr_t, self._stdin_t):
t.join()
if self._halt_on_nonzero and self.rc != 0:
self.dump_exception()
sys.exit()
def dump_exception(self):
if not self._exception:
try:
raise subprocess.CalledProcessError(self.rc, ''.join(self._commands), self.stderr)
except subprocess.CalledProcessError as e:
self._exception = e
self.__echo.critical("Unable to run '%s'" % self._commands)
# traceback
self.print_traceback()
# standard out
self.print_stdout()
# standard error
self.print_stderr()
# print debug information
self.__display_exception_debug_information()
if self._halt_on_nonzero:
raise self._exception
def __display_exception_debug_information(self):
def echo_debug_info(key):
if self._context and len(self._context) > 0:
self.__echo.warn("\t - %s: %s" % (key, self._context[0].get(key, 'N/A')))
self.__echo.warn("The following are additional information that can be used to debug this exception.")
self.__echo.warn("The following is the context used to run:")
echo_debug_info('cwd')
echo_debug_info('sudo')
echo_debug_info('user')
echo_debug_info('hostname')
echo_debug_info('env')
echo_debug_info('logging')
echo_debug_info('executable')
echo_debug_info('ssh_config')
echo_debug_info('src')
def __str__(self):
return '\n'.join(self.stdout)
def __format_line(self, msg):
return '| %s' % msg
def __format_lines_error(self, lines):
for line in lines:
self.__echo.critical(self.__format_line(line))
def __format_lines_info(self, lines):
for line in lines:
self.__echo.info(self.__format_line(line))
@property
def stdout(self):
"""
Converts stdout string to a list.
"""
if self._streaming:
stdout = []
while not self.__stdout.empty():
try:
line = self.__stdout.get_nowait()
stdout.append(line)
except:
pass
else:
stdout = self.__stdout
return stdout
@property
def stderr(self):
"""
Converts stderr string to a list.
"""
if self._streaming:
stderr = []
while not self.__stderr.empty():
try:
line = self.__stderr.get_nowait()
stderr.append(line)
except:
pass
else:
stderr = self.__stderr
return stderr
def stdin(self, line):
"""
Sends input to stdin.
"""
if self._streaming:
self.__stdin.put(line)
@property
def traceback(self):
"""
Converts traceback string to a list.
"""
if self._exception:
return traceback.format_exc().split("\n")
else:
return []
@property
def is_success(self):
"""
Returns if the result of the command was a success.
True for success, False for failure.
"""
return self.is_complete and self.rc == 0
@property
def is_failure(self):
"""
Returns if the result of the command was a failure.
True for failure, False for succes.
"""
return self.is_complete and not self.rc == 0
@property
def has_exception(self):
'''
Returns True if self._exception is not empty.
'''
return bool(self._exception)
def print_stdout(self, always_print=False):
"""
Prints the stdout to console - if there is any stdout, otherwise does nothing.
:param always_print: print the stdout, even if there is nothing in the buffer (default: false)
"""
if self.__stdout or always_print:
self.__echo.info("---------------" + "-" * 100)
self.__format_lines_info(self.stdout)
self.__echo.info("---------------" + "-" * 100)
def print_stderr(self, always_print=False):
"""
Prints the stderr to console - if there is any stdout, otherwise does nothing.
:param always_print: print the stderr, even if there is nothing in the buffer (default: false)
"""
if self.__stderr or always_print:
self.__echo.critical("--{ STDERR }---" + "-" * 100)
self.__format_lines_error(self.stderr)
self.__echo.critical("---------------" + "-" * 100)
def print_traceback(self, always_print=False):
"""
Prints the traceback to console - if there is any traceback, otherwise does nothing.
:param always_print: print the traceback, even if there is nothing in the buffer (default: false)
"""
if self._exception or always_print:
self.__echo.critical("--{ TRACEBACK }" + "-" * 100)
self.__format_lines_error(self.traceback)
self.__echo.critical("---------------" + "-" * 100)
| 2.671875 | 3 |
great_expectations/cli/datasource.py | orenovadia/great_expectations | 0 | 6490 | <filename>great_expectations/cli/datasource.py<gh_stars>0
import os
import click
from .util import cli_message
from great_expectations.render import DefaultJinjaPageView
from great_expectations.version import __version__ as __version__
def add_datasource(context):
cli_message(
"""
========== Datasources ==========
See <blue>https://docs.greatexpectations.io/en/latest/core_concepts/datasource.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue> for more information about datasources.
""".format(__version__.replace(".", "_"))
)
data_source_selection = click.prompt(
msg_prompt_choose_data_source,
type=click.Choice(["1", "2", "3", "4"]),
show_choices=False
)
cli_message(data_source_selection)
if data_source_selection == "1": # pandas
path = click.prompt(
msg_prompt_filesys_enter_base_path,
# default='/data/',
type=click.Path(
exists=False,
file_okay=False,
dir_okay=True,
readable=True
),
show_default=True
)
if path.startswith("./"):
path = path[2:]
if path.endswith("/"):
basenamepath = path[:-1]
else:
basenamepath = path
default_data_source_name = os.path.basename(basenamepath) + "__dir"
data_source_name = click.prompt(
msg_prompt_datasource_name,
default=default_data_source_name,
show_default=True
)
context.add_datasource(data_source_name, "pandas",
base_directory=os.path.join("..", path))
elif data_source_selection == "2": # sqlalchemy
data_source_name = click.prompt(
msg_prompt_datasource_name, default="mydb", show_default=True)
cli_message(msg_sqlalchemy_config_connection.format(
data_source_name))
drivername = click.prompt("What is the driver for the sqlalchemy connection?", default="postgres",
show_default=True)
host = click.prompt("What is the host for the sqlalchemy connection?", default="localhost",
show_default=True)
port = click.prompt("What is the port for the sqlalchemy connection?", default="5432",
show_default=True)
username = click.prompt("What is the username for the sqlalchemy connection?", default="postgres",
show_default=True)
password = click.prompt("What is the password for the sqlalchemy connection?", default="",
show_default=False, hide_input=True)
database = click.prompt("What is the database name for the sqlalchemy connection?", default="postgres",
show_default=True)
credentials = {
"drivername": drivername,
"host": host,
"port": port,
"username": username,
"password": password,
"database": database
}
context.add_profile_credentials(data_source_name, **credentials)
context.add_datasource(
data_source_name, "sqlalchemy", profile=data_source_name)
elif data_source_selection == "3": # Spark
path = click.prompt(
msg_prompt_filesys_enter_base_path,
default='/data/',
type=click.Path(
exists=True,
file_okay=False,
dir_okay=True,
readable=True
),
show_default=True
)
if path.startswith("./"):
path = path[2:]
if path.endswith("/"):
basenamepath = path[:-1]
default_data_source_name = os.path.basename(basenamepath)
data_source_name = click.prompt(
msg_prompt_datasource_name, default=default_data_source_name, show_default=True)
context.add_datasource(data_source_name, "spark", base_directory=path)
# if data_source_selection == "5": # dbt
# dbt_profile = click.prompt(msg_prompt_dbt_choose_profile)
# log_message(msg_dbt_go_to_notebook, color="blue")
# context.add_datasource("dbt", "dbt", profile=dbt_profile)
if data_source_selection == "4": # None of the above
cli_message(msg_unknown_data_source)
print("Skipping datasource configuration. You can add a datasource later by editing the great_expectations.yml file.")
return None
if data_source_name != None:
cli_message(
"""
========== Profiling ==========
Would you like to profile '{0:s}' to create candidate expectations and documentation?
Please note: As of v0.7.0, profiling is still a beta feature in Great Expectations.
This generation of profilers will evaluate the entire data source (without sampling) and may be very time consuming.
As a rule of thumb, we recommend starting with data smaller than 100MB.
To learn more about profiling, visit <blue>https://docs.greatexpectations.io/en/latest/guides/profiling.html?utm_source=cli&utm_medium=init&utm_campaign={1:s}</blue>.
""".format(data_source_name, __version__.replace(".", "_"))
)
if click.confirm("Proceed?",
default=True
):
profiling_results = context.profile_datasource(
data_source_name,
max_data_assets=20
)
print("\nDone.\n\nProfiling results are saved here:")
for profiling_result in profiling_results:
data_asset_name = profiling_result[1]['meta']['data_asset_name']
expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name']
run_id = profiling_result[1]['meta']['run_id']
print(" {0:s}".format(context.get_validation_location(
data_asset_name, expectation_suite_name, run_id)['filepath']))
cli_message(
"""
========== Data Documentation ==========
To generate documentation from the data you just profiled, the profiling results should be moved from
great_expectations/uncommitted (ignored by git) to great_expectations/fixtures.
Before committing, please make sure that this data does not contain sensitive information!
To learn more: <blue>https://docs.greatexpectations.io/en/latest/guides/data_documentation.html?utm_source=cli&utm_medium=init&utm_campaign={0:s}</blue>
""".format(__version__.replace(".", "_"))
)
if click.confirm("Move the profiled data and build HTML documentation?",
default=True
):
cli_message("\nMoving files...")
for profiling_result in profiling_results:
data_asset_name = profiling_result[1]['meta']['data_asset_name']
expectation_suite_name = profiling_result[1]['meta']['expectation_suite_name']
run_id = profiling_result[1]['meta']['run_id']
context.move_validation_to_fixtures(
data_asset_name, expectation_suite_name, run_id)
cli_message("\nDone.")
cli_message("\nBuilding documentation...")
context.render_full_static_site()
cli_message(
"""
To view the generated data documentation, open this file in a web browser:
<green>great_expectations/uncommitted/documentation/index.html</green>
""")
else:
cli_message(
"Okay, skipping HTML documentation for now.`."
)
else:
cli_message(
"Okay, skipping profiling for now. You can always do this later by running `great_expectations profile`."
)
if data_source_selection == "1": # Pandas
cli_message(msg_filesys_go_to_notebook)
elif data_source_selection == "2": # SQL
cli_message(msg_sqlalchemy_go_to_notebook)
elif data_source_selection == "3": # Spark
cli_message(msg_spark_go_to_notebook)
msg_prompt_choose_data_source = """
Configure a datasource:
1. Pandas DataFrame
2. Relational database (SQL)
3. Spark DataFrame
4. Skip datasource configuration
"""
# msg_prompt_dbt_choose_profile = """
# Please specify the name of the dbt profile (from your ~/.dbt/profiles.yml file Great Expectations \
# should use to connect to the database
# """
# msg_dbt_go_to_notebook = """
# To create expectations for your dbt models start Jupyter and open notebook
# great_expectations/notebooks/using_great_expectations_with_dbt.ipynb -
# it will walk you through next steps.
# """
msg_prompt_filesys_enter_base_path = """
Enter the path of the root directory where the data files are stored.
(The path may be either absolute or relative to current directory.)
"""
msg_prompt_datasource_name = """
Give your new data source a short name.
"""
msg_sqlalchemy_config_connection = """
Great Expectations relies on sqlalchemy to connect to relational databases.
Please make sure that you have it installed.
Next, we will configure database credentials and store them in the "{0:s}" section
of this config file: great_expectations/uncommitted/credentials/profiles.yml:
"""
msg_unknown_data_source = """
We are looking for more types of data types to support.
Please create a GitHub issue here:
https://github.com/great-expectations/great_expectations/issues/new
In the meantime you can see what Great Expectations can do on CSV files.
To create expectations for your CSV files start Jupyter and open notebook
great_expectations/notebooks/using_great_expectations_with_pandas.ipynb -
it will walk you through configuring the database connection and next steps.
"""
msg_filesys_go_to_notebook = """
To create expectations for your data, start Jupyter and open a tutorial notebook:
To launch with jupyter notebooks:
<green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>
To launch with jupyter lab:
<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green>
"""
msg_sqlalchemy_go_to_notebook = """
To create expectations for your data start Jupyter and open the notebook
that will walk you through next steps.
To launch with jupyter notebooks:
<green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>
To launch with jupyter lab:
<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green>
"""
msg_spark_go_to_notebook = """
To create expectations for your data start Jupyter and open the notebook
that will walk you through next steps.
To launch with jupyter notebooks:
<green>jupyter notebook great_expectations/notebooks/create_expectations.ipynb</green>
To launch with jupyter lab:
<green>jupyter lab great_expectations/notebooks/create_expectations.ipynb</green>
"""
| 2.390625 | 2 |
python/crawler/downloader.py | rgb-24bit/code-library | 0 | 6491 | # -*- coding: utf-8 -*-
"""
Provide download function by request
"""
from datetime import datetime
import logging
import time
import urllib.parse
import requests
from bs4 import BeautifulSoup
class Throttle(object):
"""Throttle downloading by sleeping between requests to same domain."""
def __init__(self, delay):
# amount of delay between downloads for each domain
self.delay = delay
# timestamp of when a domain was last accessed
self.domains = {}
def wait(self, url):
domain = urllib.parse.urlparse(url).netloc
last_accessed = self.domains.get(domain)
if self.delay > 0 and last_accessed is not None:
sleep_secs = self.delay - (datetime.now() - last_accessed).seconds
if sleep_secs > 0:
time.sleep(sleep_secs)
self.domains[domain] = datetime.now()
class Downloader(object):
"""Convenient download of web pages or caller to call api.
Args:
delay: Interval between downloads (seconds)
num_retries: Number of retries when downloading errors
timeout: Download timeout
"""
def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1,
timeout=60, cache=None, auth=None):
self.session = requests.Session()
self.session.headers.update({'user-agent': user_agent})
self.session.proxies = proxies
self.session.auth = auth
self.throttle = Throttle(delay)
self.num_retries = num_retries
self.timeout = timeout
self.cache = cache
def get_from_cache(self, request):
"""Try to get the result of the request from the cache."""
result = None
if self.cache:
result = self.cache.get(request.url)
if result and self.num_retries > 0 and 500 <= result['code'] < 600:
result = None
return result
def prepare_request(self, url, params=None):
"""Build requests based on the provided url and parameters."""
request = requests.Request('GET', url, params=params)
return self.session.prepare_request(request)
def send_request(self, request, num_retries):
"""Send request and return response object."""
self.throttle.wait(request.url)
try:
logging.info('Downloading: %s' % request.url)
response = self.session.send(request, timeout=self.timeout)
response.raise_for_status()
except requests.exceptions.HTTPError as e:
logging.warn('Download error: %s' % e)
if num_retries > 0 and 500 <= response.status_code < 600:
return self.send_request(request, num_retries - 1)
except requests.exceptions.RequestException:
logging.error('Download faild: %s' % request.url)
response = None
return response
def text(self, url, params=None, encoding=None):
"""Download web content in text format or html."""
request = self.prepare_request(url, params)
result = self.get_from_cache(request)
if result is None:
response = self.send_request(request, self.num_retries)
if response:
if encoding:
response.encoding = encoding
result = {'text': response.text, 'code': response.status_code}
if self.cache:
self.cache[request.url] = result
return result['text']
def json(self, url, params=None):
"""Access the api and return the json object."""
request = self.prepare_request(url, params)
result = self.get_from_cache(request)
if result is None:
response = self.send_request(request, self.num_retries)
if response:
result = {'json': response.json(), 'code': response.status_code}
if self.cache:
self.cache[request.url] = result
return result['json']
| 3.328125 | 3 |
medium/151.py | pisskidney/leetcode | 0 | 6492 | #!/usr/bin/python
class Solution(object):
def reverseWords(self, s):
if s == '':
return s
res = []
i = len(s) - 2
while i >= -1:
if s[i] == ' ' or i == -1:
word = ''
j = i + 1
while j < len(s) and s[j] != ' ':
word += s[j]
j += 1
if word:
res.append(word)
i -= 1
return ' '.join(res)
s = Solution()
print s.reverseWords('a x')
| 3.65625 | 4 |
src/keycloak/connection.py | ecederstrand/python-keycloak | 0 | 6493 | # -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (C) 2017 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
import requests
from requests.adapters import HTTPAdapter
from .exceptions import KeycloakConnectionError
class ConnectionManager(object):
"""
Represents a simple server connection.
:param base_url: (str) The server URL.
:param headers: (dict) The header parameters of the requests to the server.
:param timeout: (int) Timeout to use for requests to the server.
:param verify: (bool) Verify server SSL.
:param proxies: (dict) The proxies servers requests is sent by.
"""
def __init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None):
self._base_url = base_url
self._headers = headers
self._timeout = timeout
self._verify = verify
self._s = requests.Session()
self._s.auth = lambda x: x # don't let requests add auth headers
# retry once to reset connection with Keycloak after tomcat's ConnectionTimeout
# see https://github.com/marcospereirampj/python-keycloak/issues/36
for protocol in ("https://", "http://"):
adapter = HTTPAdapter(max_retries=1)
# adds POST to retry whitelist
allowed_methods = set(adapter.max_retries.allowed_methods)
allowed_methods.add("POST")
adapter.max_retries.allowed_methods = frozenset(allowed_methods)
self._s.mount(protocol, adapter)
if proxies:
self._s.proxies.update(proxies)
def __del__(self):
self._s.close()
@property
def base_url(self):
"""Return base url in use for requests to the server."""
return self._base_url
@base_url.setter
def base_url(self, value):
""" """
self._base_url = value
@property
def timeout(self):
"""Return timeout in use for request to the server."""
return self._timeout
@timeout.setter
def timeout(self, value):
""" """
self._timeout = value
@property
def verify(self):
"""Return verify in use for request to the server."""
return self._verify
@verify.setter
def verify(self, value):
""" """
self._verify = value
@property
def headers(self):
"""Return header request to the server."""
return self._headers
@headers.setter
def headers(self, value):
""" """
self._headers = value
def param_headers(self, key):
"""
Return a specific header parameter.
:param key: (str) Header parameters key.
:returns: If the header parameters exist, return its value.
"""
return self.headers.get(key)
def clean_headers(self):
"""Clear header parameters."""
self.headers = {}
def exist_param_headers(self, key):
"""Check if the parameter exists in the header.
:param key: (str) Header parameters key.
:returns: If the header parameters exist, return True.
"""
return self.param_headers(key) is not None
def add_param_headers(self, key, value):
"""Add a single parameter inside the header.
:param key: (str) Header parameters key.
:param value: (str) Value to be added.
"""
self.headers[key] = value
def del_param_headers(self, key):
"""Remove a specific parameter.
:param key: (str) Key of the header parameters.
"""
self.headers.pop(key, None)
def raw_get(self, path, **kwargs):
"""Submit get request to the path.
:param path: (str) Path for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.get(
urljoin(self.base_url, path),
params=kwargs,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_post(self, path, data, **kwargs):
"""Submit post request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.post(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_put(self, path, data, **kwargs):
"""Submit put request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.put(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_delete(self, path, data={}, **kwargs):
"""Submit delete request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.delete(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
| 2.078125 | 2 |
2020/23.py | Valokoodari/advent-of-code | 2 | 6494 | <gh_stars>1-10
#!venv/bin/python3
cs = [int(c) for c in open("inputs/23.in", "r").readline().strip()]
def f(cs, ts):
p,cc = {n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1]
for _ in range(ts):
cc,dc = p[cc],p[cc]-1 if p[cc]-1 > 0 else max(p.keys())
hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]]
while dc in hc:
dc -= 1
if dc < 1:
dc = max(p.keys())
p[dc],p[hc[-1]] = hc[0],p[dc]
a,n = [],1
for _ in range(8):
n = p[n]
a.append(str(n))
return "".join(a), p[1] * p[p[1]]
print("Part 1:", f(cs.copy(), 100)[0])
print("Part 2:", f(cs.copy() + [i for i in range(10, 1000001)], 10000000)[1]) | 2.234375 | 2 |
run.py | jakewright/home-automation-device-registry | 15 | 6495 | <gh_stars>10-100
# Import the application
from device_registry import app
# Run the application in debug mode
app.run(host='0.0.0.0', port=int(app.config['PORT']), debug=True)
| 1.617188 | 2 |
dvc/utils/stage.py | Abrosimov-a-a/dvc | 0 | 6496 | import yaml
from ruamel.yaml import YAML
from ruamel.yaml.error import YAMLError
try:
from yaml import CSafeLoader as SafeLoader
except ImportError:
from yaml import SafeLoader
from dvc.exceptions import StageFileCorruptedError
from dvc.utils.compat import open
def load_stage_file(path):
with open(path, "r", encoding="utf-8") as fd:
return parse_stage(fd.read(), path)
def parse_stage(text, path):
try:
return yaml.load(text, Loader=SafeLoader) or {}
except yaml.error.YAMLError as exc:
raise StageFileCorruptedError(path, cause=exc)
def parse_stage_for_update(text, path):
"""Parses text into Python structure.
Unlike `parse_stage()` this returns ordered dicts, values have special
attributes to store comments and line breaks. This allows us to preserve
all of those upon dump.
This one is, however, several times slower than simple `parse_stage()`.
"""
try:
yaml = YAML()
return yaml.load(text) or {}
except YAMLError as exc:
raise StageFileCorruptedError(path, cause=exc)
def dump_stage_file(path, data):
with open(path, "w", encoding="utf-8") as fd:
yaml = YAML()
yaml.default_flow_style = False
yaml.dump(data, fd)
| 2.609375 | 3 |
CAMPODETIRO/test.py | Arguel/old-projects | 0 | 6497 | entrada = input("palabra")
listaDeLetras = []
for i in entrada:
listaDeLetras.append(i)
| 3.6875 | 4 |
demos/nn_classification_demo.py | fire-breathing-rubber-lemons/cs207-FinalProject | 0 | 6498 | import numpy as np
from pyad.nn import NeuralNet
from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
np.random.seed(0)
data = load_breast_cancer()
X_train, X_test, y_train, y_test = train_test_split(
data.data, data.target, train_size=0.8, random_state=0
)
nn = NeuralNet(loss_fn='cross_entropy')
nn.add_layer(X_train.shape[1], 100, activation='linear')
nn.add_layer(100, 100, activation='logistic')
nn.add_layer(100, 1 + np.max(y_train), activation='linear')
nn.train(
X_train, y_train, X_test, y_test,
batch_size=1, learning_rate=1e-3, epochs=20
)
print('Predictions:', nn.predict(X_test))
| 3.109375 | 3 |
mgatemp.py | zobclub/chapter8 | 1 | 6499 | from microbit import *
I2CADR = 0x0E
DIE_TEMP = 0x0F
while True:
i2c.write(I2CADR, bytearray([DIE_TEMP]))
d = i2c.read(I2CADR, 1)
x = d[0]
if x >=128:
x -= 256
x += 10
print(x)
sleep(500) | 2.78125 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.