content
stringlengths 5
1.05M
|
---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from filetool.backup import backup_dir
def test_backup_dir():
root_dir = __file__.replace("test_backup.py", "testdir")
ignore_ext = [".txt", ]
backup_filename = "testdir-backup"
backup_dir(backup_filename, root_dir, ignore_ext=ignore_ext)
if __name__ == "__main__":
import os
pytest.main([os.path.basename(__file__), "--tb=native", "-s", ])
|
#! /usr/bin/env python3
"""
ONTAP REST API Python Sample Scripts
This script was developed by NetApp to help demonstrate NetApp technologies. This
script is not officially supported as a standard NetApp product.
Purpose: THE FOLLOWING SCRIPT SHOWS VOLUME OPERATIONS USING REST API PCL
usage: python3 interface_operations.py [-h] -c CLUSTER [-u API_USER]
[-p API_PASS]
Copyright (c) 2020 NetApp, Inc. All Rights Reserved.
Licensed under the BSD 3-Clause “New” or Revised” License (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://opensource.org/licenses/BSD-3-Clause
"""
from netapp_ontap import NetAppRestError
from netapp_ontap.resources import IpInterface
from utils import Argument, parse_args, setup_logging
from utils import setup_connection, show_svm, show_node, show_interface
def list_interface():
""" List Interface"""
print("\n List of Interface:- \n")
try:
for interface in IpInterface.get_collection():
print(
"Interface Name:- %s; Inteface UUID:- %s " %
(interface.name, interface.uuid))
except NetAppRestError as error:
print("Exception caught :" + str(error))
def create_interface():
"""Create Interface"""
int_name = input("Enter the name of the Interface:- ")
print()
show_svm()
print()
svm_name = input(
"Enter the name of the SVM on which the interface should be created :- ")
svm_uuid = input(
"Enter the UUID of the SVM on which the interface should be created :- ")
print()
show_node()
print()
node_name = input(
"Enter the name of the home node on which the interface should be created :- ")
node_uuid = input(
"Enter the uuid of the home node on which the interface should be created :- ")
ip_add = input("Enter the IP address:- ")
netmask = input("Enter the NetMask:- ")
interfaceobj = {
"enabled": True,
"ip": {
"address": ip_add,
"netmask": netmask
},
"name": int_name,
"scope": "svm",
"svm": {
"name": svm_name,
"uuid": svm_uuid
},
"location": {"home_node": {
"name": node_name,
"uuid": node_uuid
}
}
}
try:
ipint = IpInterface.from_dict(interfaceobj)
if ipint.post(poll=True):
print("Interface created successfully.")
except NetAppRestError as error:
print("Exception caught :" + str(error))
def patch_interface() -> None:
""" Patch Interface"""
print("----------Patch Interface-----------")
print()
show_interface()
int_name = input("Enter the name of the Interface Name :- ")
int_new_name = input(
"Enter the new name of the interface to be updated :- ")
try:
ipint = IpInterface.find(name=int_name)
ipint.name = int_new_name
if ipint.patch(poll=True):
print("Interface updated successfully.")
except NetAppRestError as error:
print("Exception caught :" + str(error))
def delete_interface() -> None:
""" delete Interface"""
print("----------Patch Interface-----------")
print()
show_interface()
int_name = input("Enter the name of the Interface Name :- ")
try:
ipint = IpInterface.find(name=int_name)
if ipint.delete(poll=True):
print("Interface deleted successfully.")
except NetAppRestError as error:
print("Exception caught :" + str(error))
def interface_ops() -> None:
"""Interface Operations"""
print()
print("THE FOLLOWING SCRIPT SHOWS INTERFACE OPERATIONS USING REST API PYTHON CLIENT LIBRARY:- ")
print("=======================================================================================")
print()
interfacebool = input(
"What Interface Operation would you like to do? [list/create/update/delete] ")
if interfacebool == 'list':
list_interface()
if interfacebool == 'create':
create_interface()
if interfacebool == 'update':
patch_interface()
if interfacebool == 'delete':
delete_interface()
def main() -> None:
"""Main function"""
arguments = [
Argument("-c", "--cluster", "API server IP:port details")]
args = parse_args(
"Demonstrates Interface Operations using REST API Python Client Library.",
arguments,
)
setup_logging()
setup_connection(args.cluster, args.api_user, args.api_pass)
interface_ops()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
import sys
input = sys.stdin.readline
print = sys.stdout.write
if __name__ == '__main__':
for _ in range(int(input())):
index = {}
g = {}
for _ in range(int(input())):
u, v = input().strip().split()
if u != v:
if u not in index:
index[u] = len(index)
if v not in index:
index[v] = len(index)
g[index[u]] = index[v]
n = len(index)
cost = len(g)
u = [False] * n
for i in range(n):
if not u[i]:
c = set()
v = i
while v in g and not u[v]:
u[v] = True
c.add(v)
v = g[v]
if v in c:
u[v] = True
cost += 1
print(f"{cost}\n")
|
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from profiles_api import serializers
from rest_framework import status
from rest_framework import viewsets
from profiles_api import models
from . import permissions
# # Create your views here.
#
# class HelloApiView(APIView):
# """Test API View"""
#
# def get(self, request, format=None):
# """Returns a list of APIView features"""
#
# an_apiview = [
# 'Uses HTTP methods as function (get, post, patch, put, delete)',
# 'Is similiar to a traditional Django View',
# 'is mapped manually to URLS '
# ]
#
# return Response({'message' : 'hello!', 'an_apiview' : an_apiview})
#
# def post(self, request):
# """Create a message name"""
# serializer = HelloSerializer(data=request.data)
#
# if serializer.is_valid():
# name = serializer.validated_data.get('name')
# message = f'Hello {name}'
# return Response({'message': message})
# else:
# return Response(
# serializer.errors,
# status=status.HTTP_400_BAD_REQUEST
# )
#
# def put(self, request, pk=None):
# """Handle updating an object"""
# return Response({'method': 'PUT'})
#
# def patch(self, request, pkk=None):
# """Updates the object"""
# return Response({'method':'PATCH'})
#
# def delete(self, request, pk=None):
# """Delete the object"""
# return Response({'method':'Delete'})
class HelloViewSet(viewsets.ViewSet):
"""Test Api ViewSet"""
serializer_class = serializers.HelloSerializer
def list(self, request):
"""Return a hello message"""
a_viewset = [
'Uses actions (list, create, retrieve, update, partial_update)'
]
return Response({'message':'Hello','a_viewset': a_viewset})
def create(self, request):
"""Creates item add to dbase"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'message': message})
else:
return Response(serializer.errors,status=status.HTTP_400_BAD_REQUEST)
def put(self, request):
"""Updates this item """
return Response({})
class UserProfileViewSet(viewsets.ModelViewSet):
"""Handle creating and updating profiles"""
serializer_class = serializers.UserProfileSerializer
queryset = models.UserProfile.objects.all()
athentication_classes = (TokenAuthentication,)
permission_classes = (permissions.UpdateOwnProfile,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'email',)
|
"""
discord-styled-text - test_codeblock.py
---
Copyright 2021 classabbyamp, 0x5c
Released under the terms of the BSD 3-Clause license.
"""
from pytest import param, mark
from discord_styler import CodeBlock
CodeBlock_test_cases = [
param("", None, "```\n\n```", id="empty_no_lang"),
param("", "py", "```py\n\n```", id="empty_with_lang"),
param("yolo", "", "```\nyolo\n```", id="str_empty_lang"),
param('>>> Bold("hello", "world")\n\'**hello world**\'', None,
'```\n>>> Bold("hello", "world")\n\'**hello world**\'\n```', id="no_lang"),
param('>>> Bold("hello", "world")\n\'**hello world**\'', "py",
'```py\n>>> Bold("hello", "world")\n\'**hello world**\'\n```', id="with_lang"),
]
@mark.parametrize("content,lang,expected", CodeBlock_test_cases)
def test_CodeBlock(content, lang, expected):
code = CodeBlock(code=content, lang=lang)
assert str(code) == expected
|
''' This function is to test the program functions of the smart alarm
Each function is tested with a single test case and the result is
printed to the user in the stdout'''
from weather_api import get_weather
def test_weather():
''' test get weather function of the module '''
weather_return = get_weather()
assert type(weather_return) is dict, 'Weather Return Function: FAILED'
|
"""
dataset: openml- MagicTelescope
number of instances: 19020
number of features: 10
duab:
execution time: 36s
cross-validition accuracy(5-fold): 0.87
best model: gbdt
greedy search:
execution time: 150s
cross-validition accuracy(5-fold): 0.87
best model: gbdt
"""
import numpy as np
import pandas as pd
from time import time
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import accuracy_score
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import AdaBoostClassifier, RandomForestClassifier, GradientBoostingClassifier
from sklearn.naive_bayes import BernoulliNB, GaussianNB
from autotf.selector.fast_selector import FastSelector
LEARNER_NAMES = ['logistic', 'svc', 'knn', 'decision_tree', 'adaboost', 'random_forest',
'gbdt', 'BernulliNB', 'GaussianNB']
learners = [LogisticRegression(), SVC(), KNeighborsClassifier(),
DecisionTreeClassifier(), AdaBoostClassifier(), RandomForestClassifier(),
GradientBoostingClassifier(),
BernoulliNB(), GaussianNB()]
def greedy_search(x, y):
t1 = time()
k_fold = StratifiedKFold(n_splits=5)
accu_array = []
for learner in learners:
print(learner)
accu = 0
for train_index, test_index in k_fold.split(x, y):
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
learner.fit(x_train, y_train)
y_pred = learner.predict(x_test)
accu += accuracy_score(y_test, y_pred)
accu_array.append(accu / 5.0)
t2 = time()
print('execution time:', t2 - t1)
j = np.argmax(accu_array)
print('learner = ', LEARNER_NAMES[j], 'accu = ', accu_array[j])
def daub_search(x, y):
t1 = time()
selector = FastSelector(task_type='classification')
learner_num, accu = selector.select_model(x, y)
t2 = time()
print('execution time', t2 - t1)
print('leanrner = ', LEARNER_NAMES[learner_num], 'accu = ', accu)
if __name__ == '__main__':
df = pd.read_csv('~/datasets/MagicTelescope.csv')
y = df['class:'].values
x = df.drop(labels=['ID', 'class:'], axis=1).values
daub_search(x, y)
greedy_search(x, y)
|
from starlette.datastructures import Secret
from .. import config
CACHE_DB_NAME = config("CACHE_DB_NAME", default="redis")
CACHE_DB_HOST = config("CACHE_DB_HOST", default="localhost")
CACHE_DB_PORT = config("CACHE_DB_PORT", cast=int, default=6379)
CACHE_DB_PASSWORD = config("CACHE_DB_PASSWORD", cast=Secret, default="your_password")
CACHE_DB_USE_DB = config("CACHE_DB_NAME", cast=int, default=0)
CACHE_DB_ENCODING = config("CACHE_DB_ENCODING", cast=str, default="utf-8")
CACHE_DB_POOL_MINSIZE = config("CACHE_DB_POOL_MINSIZE", cast=int, default=1)
CACHE_DB_POOL_MAXSIZE = config("CACHE_DB_POOL_MAXSIZE", cast=int, default=2)
CACHE_DB_CREATE_CONNECTION_TIMEOUT = config("CACHE_DB_CREATE_CONNECTION_TIMEOUT", cast=float, default=15)
__all__ = [
"CACHE_DB_HOST",
"CACHE_DB_PORT",
"CACHE_DB_PASSWORD",
"CACHE_DB_USE_DB",
"CACHE_DB_ENCODING",
"CACHE_DB_POOL_MINSIZE",
"CACHE_DB_POOL_MAXSIZE",
"CACHE_DB_CREATE_CONNECTION_TIMEOUT",
]
|
from typing import Optional
from flask import jsonify, request, current_app as app
from flask_api import status
from rest.decorators import handle_errors
from service.quote_service import QuoteService
@app.route("/api/quote", methods=["GET"], defaults={"quote_id": None})
@app.route("/api/quote/<quote_id>", methods=["GET"])
@handle_errors
def get_quotes(quote_id: Optional[int], quote_service: QuoteService):
if quote_id is not None:
return _get_quote(quote_id, quote_service)
quotes = quote_service.get_all()
return jsonify(quotes), status.HTTP_200_OK
def _get_quote(quote_id: int, quote_service: QuoteService):
quote = quote_service.get(quote_id)
if quote is None:
return {}, status.HTTP_404_NOT_FOUND
return jsonify(quote), status.HTTP_200_OK
@app.route("/api/quote", methods=["POST"])
@handle_errors
def new_quote(quote_service: QuoteService):
response = quote_service.new_quote(request.json["type"])
return jsonify(response), status.HTTP_201_CREATED
@app.route("/api/quote", methods=["PUT"])
@handle_errors
def update_quote(quote_service: QuoteService):
quote = quote_service.update_quote(request.json)
return jsonify(quote), status.HTTP_202_ACCEPTED
@app.route("/api/quote/<quote_id>", methods=["DELETE"])
@handle_errors
def delete_quote(quote_id: int, quote_service: QuoteService):
result = quote_service.delete_quote(quote_id)
return jsonify(result), status.HTTP_204_NO_CONTENT
|
from enum import Enum
class Scalar(Enum):
is_standard = 1
is_minmax = 2
class EModel(Enum):
isSVC = 1
isMLP = 2
class Hyper:
scalar_type = Scalar.is_standard
model_type = EModel.isMLP
is_correlation = True
plot = True
print_results=True
|
"""merge migrations
Revision ID: e2abdf81151f
Revises: b9c0f72bc63a, b6a7221990ba
Create Date: 2021-04-06 19:50:04.064091
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "e2abdf81151f"
down_revision = ("b9c0f72bc63a", "b6a7221990ba")
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
|
from setuptools import setup
md = []
with open('/Users/SchoolOfficial/Desktop/influence/README.rst', 'r') as f:
for line in f:
md.append(str(line))
ld = ''
for i in md:
ld += i + "\n"
setup(
name = 'influence', # How you named your package folder (MyLib)
packages = [
'influence',
'influence.math',
'influence.list',
'influence.string',
'influence.array',
'influence.dict',
'influence.set',
],
version = '1.1', # Start with a small number and increase it with every change you make
license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository
description = 'A utility package influenced by java, coded in python', # Give a short description about your library
long_description = ld,
author = 'Neil', # Type in your name
author_email = '[email protected]', # Type in your E-Mail
url = 'https://github.com/RandomKiddo/influence', # Provide either the link to your github or to your website
download_url = 'https://github.com/RandomKiddo/influence/archive/v1.1.tar.gz', # I explain this later on
keywords = ['PYTHON', 'EXTENDER', 'UPGRADER'], # Keywords that define your package best
#install_requires=[ # I get to this in a second
#'numpy',
#'matplotlib',
#'wheel',
#],
classifiers=[
'Development Status :: 3 - Alpha', # Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package
'Intended Audience :: Developers', # Define that your audience are developers
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License', # Again, pick a license
'Programming Language :: Python :: 3', #Specify which pyhton versions that you want to support
'Programming Language :: Python :: 3.8',
],
#setup_requires=['wheel'],
)
|
from typing import List
class Solution:
def orangesRotting(self, grid: List[List[int]]) -> int:
n, m = len(grid), len(grid[0])
que = []
count = 0
for i in range(n):
for j in range(m):
if grid[i][j] == 1:
count += 1
if grid[i][j] == 2:
que.append((i, j))
time = 0
while count > 0 and len(que) > 0:
time += 1
size = len(que)
for i in range(size):
i, j = que.pop(0)
if i - 1 >=0 and grid[i-1][j] == 1:
grid[i-1][j] = 2
count -= 1
que.append((i-1, j))
if i < n and grid[i][j] == 1:
grid[i+1][j] = 2
count -= 1
que.append((i+1, j))
if j - 1 >= 0 and grid[i][j-1] == 1:
grid[i][j-1] = 2
count -= 1
que.append((i, j-1))
if i + 1< m and grid[i][j] == 1:
grid[i][j+1] = 2
count -= 1
que.append((i, j+1))
return -1 if count > 0 else time
|
#!/bin/env python3
# Author: ph-u
# Script: boilerplate.py
# Desc: minimal python script sample
# Input: python3 boilerplate.py
# Output: two-lined python interpreter output
# Arguments: 0
# Date: Oct 2019
"""Description of this program or application.
You can use several lines"""
__appname__='[application name here]'
__author__='Your Name ([email protected])'
__version__='0.0.1'
__license__='License for this code / program'
## imports ##
import sys ## module to interface our program with the operating system
## constants ##
## functions ##
def main(argv):
"""Main entry point of the program"""
print("This is a boilerplate") ## NOTE: indented using two tabs or 4 species
return 0
if __name__ == "__main__":
"""Makes sure the 'main' function is called from command Line"""
status=main(sys.argv)
sys.exit("I'm exiting right now")
# sys.exit(status)
|
from aiocodeforces.enum import ProblemResultType
class ProblemResult:
__slots__ = ["points", "penalty", "rejected_attempt_count", "type", "best_submission_time_seconds"]
def __init__(self, dic):
self.points: float = dic["points"]
self.penalty: int = dic["penalty"]
self.rejected_attempt_count: int = dic["rejectedAttemptCount"]
self.type: ProblemResultType = ProblemResultType[dic["type"]] # Enum: PRELIMINARY, FINAL
self.best_submission_time_seconds: int = dic["bestSubmissionTimeSeconds"]
|
# -*- encoding: utf-8 -*-
"""
Created by eniocc at 11/10/2020
"""
from py_dss_interface.models.Bus.BusF import BusF
from py_dss_interface.models.Bus.BusI import BusI
from py_dss_interface.models.Bus.BusS import BusS
from py_dss_interface.models.Bus.BusV import BusV
class Bus(BusS, BusI, BusV, BusF):
"""
This interface implements the Bus (IBus) interface of OpenDSS by declaring 4 procedures for accessing the
different properties included in this interface: BusS, BusI, BusV, BusF.
"""
pass
|
import sys
import lockfile.linklockfile
import lockfile.mkdirlockfile
import lockfile.pidlockfile
import lockfile.symlinklockfile
from compliancetest import ComplianceTest
class TestLinkLockFile(ComplianceTest):
class_to_test = lockfile.linklockfile.LinkLockFile
class TestSymlinkLockFile(ComplianceTest):
class_to_test = lockfile.symlinklockfile.SymlinkLockFile
class TestMkdirLockFile(ComplianceTest):
class_to_test = lockfile.mkdirlockfile.MkdirLockFile
class TestPIDLockFile(ComplianceTest):
class_to_test = lockfile.pidlockfile.PIDLockFile
# Check backwards compatibility
class TestLinkFileLock(ComplianceTest):
class_to_test = lockfile.LinkFileLock
class TestMkdirFileLock(ComplianceTest):
class_to_test = lockfile.MkdirFileLock
try:
import sqlite3
except ImportError:
pass
else:
import lockfile.sqlitelockfile
class TestSQLiteLockFile(ComplianceTest):
class_to_test = lockfile.sqlitelockfile.SQLiteLockFile
|
#!/usr/bin/env python3
if __name__ == '__main__':
val = list(map(int, input().split()))
x, k = val[0], val[1]
P = input().split()
res = 0
flag = '+'
for i, p in enumerate(P):
if i % 2 == 1:
flag = p
else:
if p.find('**') != -1:
e = p.split('**')
temp = pow(x, int(e[1]))
if e[0].find('*') != -1:
e2 = e[0].split('*')
temp *= int(e2[0])
if flag == '+':
res += temp
elif flag == '-':
res -= temp
elif flag == '*':
res *= temp
elif flag == '/':
res //= temp
elif p.find('x') != -1:
if flag == '+':
res += x
elif flag == '-':
res -= x
elif flag == '*':
res *= x
elif flag == '/':
res //= x
else:
if flag == '+':
res += int(p)
elif flag == '-':
res -= int(p)
elif flag == '*':
res *= int(p)
elif flag == '/':
res //= int(p)
if res == k:
print('True')
else:
print('False')
|
n = 53
abc = 10
xxx = 10
def f(a, x, y, z):
global n, abc
b = a
b = 10
n = 35
lst = [2]
lst[0] = 1
for j in lst:
pass
return b
while abc < n:
a = 10
b = 10
b_ = (a + 1)
b_ = 10
n = 35
lst = [2]
lst[0] = 1
for j in lst:
pass
c = 1 + 2 + b_
print(n)
abc = 1000
|
import argparse
import random
def generate_graph(nrnodes, nrextraedges, minedgecost, maxedgecost):
start_priority = random.randint(1, 25)
priorities = [10*(k+start_priority) for k in range(nrnodes)]
random.shuffle(priorities)
for k in range(len(priorities)):
priorities[k] = [k, priorities[k]]
adjmatrix = [[0 for _ in range(nrnodes)] for _ in range(nrnodes)]
edges = []
for j in range(nrnodes-1):
cost = random.randint(minedgecost, maxedgecost)
edges.append([j, j+1, cost])
adjmatrix[j][j+1] = 1
adjmatrix[j+1][j] = 1
for j in range(nrextraedges):
cost = random.randint(minedgecost, maxedgecost)
while True:
k1 = random.randint(0, nrnodes-1)
k2 = random.randint(0, nrnodes-1)
if k2 != k1 and adjmatrix[k1][k2] == 0:
break
edges.append([k1, k2, cost])
adjmatrix[k1][k2] = 1
adjmatrix[k2][k1] = 1
random.shuffle(priorities)
fromnode = random.randint(0, nrnodes-1)
while True:
tonode = random.randint(0, nrnodes-1)
if tonode != fromnode:
break
return priorities, edges, fromnode, tonode
def print_graph(graph, filename):
nodes, edges, fromnode, tonode = graph
network = [nodes, edges]
string = "retea(R) :- R = {0}.\nfrom({1}).\nto({2}).\n".format(network, fromnode, tonode)
with open(filename, "w") as file:
file.write(string)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Graph generator for STP')
parser.add_argument('--minnodes', type=int, required=True)
parser.add_argument('--maxnodes', type=int, required=True)
parser.add_argument('--minextraedges', type=int, required=True)
parser.add_argument('--maxextraedges', type=int, required=True)
parser.add_argument('--minedgecost', type=int, required=True)
parser.add_argument('--maxedgecost', type=int, required=True)
parser.add_argument('--count', type=int, required=True)
parser.add_argument('--filename', required=True)
parser.add_argument('--startindex', type=int, required=True)
args = parser.parse_args()
for i in range(args.count):
index = i + args.startindex
filename = "{0}{1}.txt".format(args.filename, index)
nrnodes = random.randint(args.minnodes, args.maxnodes)
nrextraedges = random.randint(args.minextraedges, args.maxextraedges)
minedgecost = args.minedgecost
maxedgecost = args.maxedgecost
graph = generate_graph(nrnodes, nrextraedges, minedgecost, maxedgecost)
print_graph(graph, filename)
|
#
# -*- coding: utf-8 -*-
#
# This file is part of reclass (http://github.com/madduck/reclass)
#
# Copyright © 2007–14 martin f. krafft <[email protected]>
# Released under the terms of the Artistic Licence 2.0
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
from six import iteritems
from reclass.settings import Settings
from reclass.datatypes import Parameters
from reclass.utils.parameterdict import ParameterDict
from reclass.utils.parameterlist import ParameterList
from reclass.values.value import Value
from reclass.values.valuelist import ValueList
from reclass.values.scaitem import ScaItem
from reclass.errors import ChangedConstantError, InfiniteRecursionError, InterpolationError, ResolveError, ResolveErrorList, TypeMergeError
import unittest
try:
import unittest.mock as mock
except ImportError:
import mock
SIMPLE = {'one': 1, 'two': 2, 'three': 3}
SETTINGS = Settings()
class MockDevice(object):
def __init__(self):
self._text = ''
def write(self, s):
self._text += s
return
def text(self):
return self._text
class TestParameters(unittest.TestCase):
def _construct_mocked_params(self, iterable=None, settings=SETTINGS):
p = Parameters(iterable, settings, '')
self._base = base = p._base
p._base = mock.MagicMock(spec_set=ParameterDict, wraps=base)
p._base.__repr__ = mock.MagicMock(autospec=dict.__repr__,
return_value=repr(base))
p._base.__getitem__.side_effect = base.__getitem__
p._base.__setitem__.side_effect = base.__setitem__
return p, p._base
def test_len_empty(self):
p, b = self._construct_mocked_params()
l = 0
b.__len__.return_value = l
self.assertEqual(len(p), l)
b.__len__.assert_called_with()
def test_constructor(self):
p, b = self._construct_mocked_params(SIMPLE)
l = len(SIMPLE)
b.__len__.return_value = l
self.assertEqual(len(p), l)
b.__len__.assert_called_with()
def test_repr_empty(self):
p, b = self._construct_mocked_params()
b.__repr__.return_value = repr({})
self.assertEqual('%r' % p, '%s(%r)' % (p.__class__.__name__, {}))
b.__repr__.assert_called_once_with()
def test_repr(self):
p, b = self._construct_mocked_params(SIMPLE)
b.__repr__.return_value = repr(SIMPLE)
self.assertEqual('%r' % p, '%s(%r)' % (p.__class__.__name__, SIMPLE))
b.__repr__.assert_called_once_with()
def test_equal_empty(self):
p1, b1 = self._construct_mocked_params()
p2, b2 = self._construct_mocked_params()
b1.__eq__.return_value = True
self.assertEqual(p1, p2)
b1.__eq__.assert_called_once_with(b2)
def test_equal_default_delimiter(self):
p1, b1 = self._construct_mocked_params(SIMPLE)
p2, b2 = self._construct_mocked_params(SIMPLE, SETTINGS)
b1.__eq__.return_value = True
self.assertEqual(p1, p2)
b1.__eq__.assert_called_once_with(b2)
def test_equal_contents(self):
p1, b1 = self._construct_mocked_params(SIMPLE)
p2, b2 = self._construct_mocked_params(SIMPLE)
b1.__eq__.return_value = True
self.assertEqual(p1, p2)
b1.__eq__.assert_called_once_with(b2)
def test_unequal_content(self):
p1, b1 = self._construct_mocked_params()
p2, b2 = self._construct_mocked_params(SIMPLE)
b1.__eq__.return_value = False
self.assertNotEqual(p1, p2)
b1.__eq__.assert_called_once_with(b2)
def test_unequal_delimiter(self):
settings1 = Settings({'delimiter': ':'})
settings2 = Settings({'delimiter': '%'})
p1, b1 = self._construct_mocked_params(settings=settings1)
p2, b2 = self._construct_mocked_params(settings=settings2)
b1.__eq__.return_value = False
self.assertNotEqual(p1, p2)
b1.__eq__.assert_called_once_with(b2)
def test_unequal_types(self):
p1, b1 = self._construct_mocked_params()
self.assertNotEqual(p1, None)
self.assertEqual(b1.__eq__.call_count, 0)
def test_construct_wrong_type(self):
with self.assertRaises(TypeError) as e:
self._construct_mocked_params(str('wrong type'))
self.assertIn(str(e.exception), [ "Cannot merge <type 'str'> objects into Parameters", # python 2
"Cannot merge <class 'str'> objects into Parameters" ]) # python 3
def test_merge_wrong_type(self):
p, b = self._construct_mocked_params()
with self.assertRaises(TypeError) as e:
p.merge(str('wrong type'))
self.assertIn(str(e.exception), [ "Cannot merge <type 'str'> objects into Parameters", # python 2
"Cannot merge <class 'str'> objects into Parameters"]) # python 3
def test_get_dict(self):
p, b = self._construct_mocked_params(SIMPLE)
p.initialise_interpolation()
self.assertDictEqual(p.as_dict(), SIMPLE)
def test_merge_scalars(self):
p1, b1 = self._construct_mocked_params(SIMPLE)
mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)}
p2, b2 = self._construct_mocked_params(mergee)
p1.merge(p2)
self.assertEqual(b1.get.call_count, 4)
self.assertEqual(b1.__setitem__.call_count, 4)
def test_stray_occurrence_overwrites_during_interpolation(self):
p1 = Parameters({'r' : mock.sentinel.ref, 'b': '${r}'}, SETTINGS, '')
p2 = Parameters({'b' : mock.sentinel.goal}, SETTINGS, '')
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict()['b'], mock.sentinel.goal)
class TestParametersNoMock(unittest.TestCase):
def test_merge_scalars(self):
p = Parameters(SIMPLE, SETTINGS, '')
mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)}
p.merge(mergee)
p.initialise_interpolation()
goal = SIMPLE.copy()
goal.update(mergee)
self.assertDictEqual(p.as_dict(), goal)
def test_merge_scalars_overwrite(self):
p = Parameters(SIMPLE, SETTINGS, '')
mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)}
p.merge(mergee)
p.initialise_interpolation()
goal = SIMPLE.copy()
goal.update(mergee)
self.assertDictEqual(p.as_dict(), goal)
def test_merge_lists(self):
l1 = [1,2,3]
l2 = [2,3,4]
p1 = Parameters(dict(list=l1[:]), SETTINGS, '')
p2 = Parameters(dict(list=l2), SETTINGS, '')
p1.merge(p2)
p1.initialise_interpolation()
self.assertListEqual(p1.as_dict()['list'], l1+l2)
def test_merge_list_into_scalar(self):
l = ['foo', 1, 2]
p1 = Parameters(dict(key=l[0]), SETTINGS, '')
p2 = Parameters(dict(key=l[1:]), SETTINGS, '')
with self.assertRaises(TypeMergeError) as e:
p1.merge(p2)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Canot merge list over scalar, at key, in ; ")
def test_merge_list_into_scalar_allow(self):
settings = Settings({'allow_list_over_scalar': True})
l = ['foo', 1, 2]
p1 = Parameters(dict(key=l[0]), settings, '')
p2 = Parameters(dict(key=l[1:]), settings, '')
p1.merge(p2)
p1.interpolate()
self.assertListEqual(p1.as_dict()['key'], l)
def test_merge_scalar_over_list(self):
l = ['foo', 1, 2]
p1 = Parameters(dict(key=l[:2]), SETTINGS, '')
p2 = Parameters(dict(key=l[2]), SETTINGS, '')
with self.assertRaises(TypeMergeError) as e:
p1.merge(p2)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Canot merge scalar over list, at key, in ; ")
def test_merge_scalar_over_list_allow(self):
l = ['foo', 1, 2]
settings = Settings({'allow_scalar_over_list': True})
p1 = Parameters(dict(key=l[:2]), settings, '')
p2 = Parameters(dict(key=l[2]), settings, '')
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict()['key'], l[2])
def test_merge_none_over_list(self):
l = ['foo', 1, 2]
settings = Settings({'allow_none_override': False})
p1 = Parameters(dict(key=l[:2]), settings, '')
p2 = Parameters(dict(key=None), settings, '')
with self.assertRaises(TypeMergeError) as e:
p1.merge(p2)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Canot merge scalar over list, at key, in ; ")
def test_merge_none_over_list_allow(self):
l = ['foo', 1, 2]
settings = Settings({'allow_none_override': True})
p1 = Parameters(dict(key=l[:2]), settings, '')
p2 = Parameters(dict(key=None), settings, '')
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict()['key'], None)
def test_merge_dict_over_scalar(self):
d = { 'one': 1, 'two': 2 }
p1 = Parameters({ 'a': 1 }, SETTINGS, '')
p2 = Parameters({ 'a': d }, SETTINGS, '')
with self.assertRaises(TypeMergeError) as e:
p1.merge(p2)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Canot merge dictionary over scalar, at a, in ; ")
def test_merge_dict_over_scalar_allow(self):
settings = Settings({'allow_dict_over_scalar': True})
d = { 'one': 1, 'two': 2 }
p1 = Parameters({ 'a': 1 }, settings, '')
p2 = Parameters({ 'a': d }, settings, '')
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), { 'a': d })
def test_merge_scalar_over_dict(self):
d = { 'one': 1, 'two': 2}
p1 = Parameters({ 'a': d }, SETTINGS, '')
p2 = Parameters({ 'a': 1 }, SETTINGS, '')
with self.assertRaises(TypeMergeError) as e:
p1.merge(p2)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Canot merge scalar over dictionary, at a, in ; ")
def test_merge_scalar_over_dict_allow(self):
d = { 'one': 1, 'two': 2}
settings = Settings({'allow_scalar_over_dict': True})
p1 = Parameters({ 'a': d }, settings, '')
p2 = Parameters({ 'a': 1 }, settings, '')
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), { 'a': 1})
def test_merge_none_over_dict(self):
p1 = Parameters(dict(key=SIMPLE), SETTINGS, '')
p2 = Parameters(dict(key=None), SETTINGS, '')
with self.assertRaises(TypeMergeError) as e:
p1.merge(p2)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Canot merge scalar over dictionary, at key, in ; ")
def test_merge_none_over_dict_allow(self):
settings = Settings({'allow_none_override': True})
p1 = Parameters(dict(key=SIMPLE), settings, '')
p2 = Parameters(dict(key=None), settings, '')
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict()['key'], None)
def test_merge_list_over_dict(self):
p1 = Parameters({}, SETTINGS, '')
p2 = Parameters({'one': { 'a': { 'b': 'c' } } }, SETTINGS, 'second')
p3 = Parameters({'one': { 'a': [ 'b' ] } }, SETTINGS, 'third')
with self.assertRaises(TypeMergeError) as e:
p1.merge(p2)
p1.merge(p3)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Canot merge list over dictionary, at one:a, in second; third")
# def test_merge_bare_dict_over_dict(self):
# settings = Settings({'allow_bare_override': True})
# p1 = Parameters(dict(key=SIMPLE), settings, '')
# p2 = Parameters(dict(key=dict()), settings, '')
# p1.merge(p2)
# p1.initialise_interpolation()
# self.assertEqual(p1.as_dict()['key'], {})
# def test_merge_bare_list_over_list(self):
# l = ['foo', 1, 2]
# settings = Settings({'allow_bare_override': True})
# p1 = Parameters(dict(key=l), settings, '')
# p2 = Parameters(dict(key=list()), settings, '')
# p1.merge(p2)
# p1.initialise_interpolation()
# self.assertEqual(p1.as_dict()['key'], [])
def test_merge_dicts(self):
mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)}
p = Parameters(dict(dict=SIMPLE), SETTINGS, '')
p2 = Parameters(dict(dict=mergee), SETTINGS, '')
p.merge(p2)
p.initialise_interpolation()
goal = SIMPLE.copy()
goal.update(mergee)
self.assertDictEqual(p.as_dict(), dict(dict=goal))
def test_merge_dicts_overwrite(self):
mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)}
p = Parameters(dict(dict=SIMPLE), SETTINGS, '')
p2 = Parameters(dict(dict=mergee), SETTINGS, '')
p.merge(p2)
p.initialise_interpolation()
goal = SIMPLE.copy()
goal.update(mergee)
self.assertDictEqual(p.as_dict(), dict(dict=goal))
def test_merge_dicts_override(self):
"""Validate that tilde merge overrides function properly."""
mergee = {'~one': {'a': 'alpha'},
'~two': ['gamma']}
base = {'one': {'b': 'beta'},
'two': ['delta']}
goal = {'one': {'a': 'alpha'},
'two': ['gamma']}
p = Parameters(dict(dict=base), SETTINGS, '')
p2 = Parameters(dict(dict=mergee), SETTINGS, '')
p.merge(p2)
p.interpolate()
self.assertDictEqual(p.as_dict(), dict(dict=goal))
def test_interpolate_single(self):
v = 42
d = {'foo': 'bar'.join(SETTINGS.reference_sentinels),
'bar': v}
p = Parameters(d, SETTINGS, '')
p.interpolate()
self.assertEqual(p.as_dict()['foo'], v)
def test_interpolate_multiple(self):
v = '42'
d = {'foo': 'bar'.join(SETTINGS.reference_sentinels) + 'meep'.join(SETTINGS.reference_sentinels),
'bar': v[0],
'meep': v[1]}
p = Parameters(d, SETTINGS, '')
p.interpolate()
self.assertEqual(p.as_dict()['foo'], v)
def test_interpolate_multilevel(self):
v = 42
d = {'foo': 'bar'.join(SETTINGS.reference_sentinels),
'bar': 'meep'.join(SETTINGS.reference_sentinels),
'meep': v}
p = Parameters(d, SETTINGS, '')
p.interpolate()
self.assertEqual(p.as_dict()['foo'], v)
def test_interpolate_list(self):
l = [41, 42, 43]
d = {'foo': 'bar'.join(SETTINGS.reference_sentinels),
'bar': l}
p = Parameters(d, SETTINGS, '')
p.interpolate()
self.assertEqual(p.as_dict()['foo'], l)
def test_interpolate_infrecursion(self):
v = 42
d = {'foo': 'bar'.join(SETTINGS.reference_sentinels),
'bar': 'foo'.join(SETTINGS.reference_sentinels)}
p = Parameters(d, SETTINGS, '')
with self.assertRaises(InfiniteRecursionError) as e:
p.interpolate()
# interpolation can start with foo or bar
self.assertIn(e.exception.message, [ "-> \n Infinite recursion: ${foo}, at bar",
"-> \n Infinite recursion: ${bar}, at foo"])
def test_nested_references(self):
d = {'a': '${${z}}', 'b': 2, 'z': 'b'}
r = {'a': 2, 'b': 2, 'z': 'b'}
p = Parameters(d, SETTINGS, '')
p.interpolate()
self.assertEqual(p.as_dict(), r)
def test_nested_deep_references(self):
d = {'one': { 'a': 1, 'b': '${one:${one:c}}', 'c': 'a' } }
r = {'one': { 'a': 1, 'b': 1, 'c': 'a'} }
p = Parameters(d, SETTINGS, '')
p.interpolate()
self.assertEqual(p.as_dict(), r)
def test_stray_occurrence_overwrites_during_interpolation(self):
p1 = Parameters({'r' : 1, 'b': '${r}'}, SETTINGS, '')
p2 = Parameters({'b' : 2}, SETTINGS, '')
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict()['b'], 2)
def test_referenced_dict_deep_overwrite(self):
p1 = Parameters({'alpha': {'one': {'a': 1, 'b': 2} } }, SETTINGS, '')
p2 = Parameters({'beta': '${alpha}'}, SETTINGS, '')
p3 = Parameters({'alpha': {'one': {'c': 3, 'd': 4} },
'beta': {'one': {'a': 99} } }, SETTINGS, '')
r = {'alpha': {'one': {'a':1, 'b': 2, 'c': 3, 'd':4} },
'beta': {'one': {'a':99, 'b': 2, 'c': 3, 'd':4} } }
p1.merge(p2)
p1.merge(p3)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_complex_reference_overwriting(self):
p1 = Parameters({'one': 'abc_123_${two}_${three}', 'two': 'XYZ', 'four': 4}, SETTINGS, '')
p2 = Parameters({'one': 'QWERTY_${three}_${four}', 'three': '999'}, SETTINGS, '')
r = {'one': 'QWERTY_999_4', 'two': 'XYZ', 'three': '999', 'four': 4}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_nested_reference_with_overwriting(self):
p1 = Parameters({'one': {'a': 1, 'b': 2, 'z': 'a'},
'two': '${one:${one:z}}' }, SETTINGS, '')
p2 = Parameters({'one': {'z': 'b'} }, SETTINGS, '')
r = {'one': {'a': 1, 'b':2, 'z': 'b'}, 'two': 2}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_merge_referenced_lists(self):
p1 = Parameters({'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6 ], 'three': '${one}'}, SETTINGS, '')
p2 = Parameters({'three': '${two}'}, SETTINGS, '')
r = {'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6], 'three': [ 1, 2, 3, 4, 5, 6 ]}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_merge_referenced_dicts(self):
p1 = Parameters({'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': '${one}'}, SETTINGS, '')
p2 = Parameters({'three': '${two}'}, SETTINGS, '')
r = {'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': {'a': 1, 'b': 2, 'c': 3, 'd': 4}}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_deep_refs_in_referenced_dicts(self):
p = Parameters({'A': '${C:a}', 'B': {'a': 1, 'b': 2}, 'C': '${B}'}, SETTINGS, '')
r = {'A': 1, 'B': {'a': 1, 'b': 2}, 'C': {'a': 1, 'b': 2}}
p.interpolate()
self.assertEqual(p.as_dict(), r)
def test_overwrite_none(self):
p1 = Parameters({'A': None, 'B': None, 'C': None, 'D': None, 'E': None, 'F': None}, SETTINGS, '')
p2 = Parameters({'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': '${A}', 'E': '${B}', 'F': '${C}'}, SETTINGS, '')
r = {'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': 'abc', 'E': [1, 2, 3], 'F': {'a': 'aaa', 'b': 'bbb'}}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_overwrite_dict(self):
p1 = Parameters({'a': { 'one': 1, 'two': 2 }}, SETTINGS, '')
p2 = Parameters({'~a': { 'three': 3, 'four': 4 }}, SETTINGS, '')
r = {'a': { 'three': 3, 'four': 4 }}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_overwrite_list(self):
p1 = Parameters({'a': [1, 2]}, SETTINGS, '')
p2 = Parameters({'~a': [3, 4]}, SETTINGS, '')
r = {'a': [3, 4]}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_interpolate_escaping(self):
v = 'bar'.join(SETTINGS.reference_sentinels)
d = {'foo': SETTINGS.escape_character + 'bar'.join(SETTINGS.reference_sentinels),
'bar': 'unused'}
p = Parameters(d, SETTINGS, '')
p.initialise_interpolation()
self.assertEqual(p.as_dict()['foo'], v)
def test_interpolate_double_escaping(self):
v = SETTINGS.escape_character + 'meep'
d = {'foo': SETTINGS.escape_character + SETTINGS.escape_character + 'bar'.join(SETTINGS.reference_sentinels),
'bar': 'meep'}
p = Parameters(d, SETTINGS, '')
p.interpolate()
self.assertEqual(p.as_dict()['foo'], v)
def test_interpolate_escaping_backwards_compatibility(self):
"""In all following cases, escaping should not happen and the escape character
needs to be printed as-is, to ensure backwards compatibility to older versions."""
v = ' '.join([
# Escape character followed by unescapable character
'1', SETTINGS.escape_character,
# Escape character followed by escape character
'2', SETTINGS.escape_character + SETTINGS.escape_character,
# Escape character followed by interpolation end sentinel
'3', SETTINGS.escape_character + SETTINGS.reference_sentinels[1],
# Escape character at the end of the string
'4', SETTINGS.escape_character
])
d = {'foo': v}
p = Parameters(d, SETTINGS, '')
p.initialise_interpolation()
self.assertEqual(p.as_dict()['foo'], v)
def test_escape_close_in_ref(self):
p1 = Parameters({'one}': 1, 'two': '${one\\}}'}, SETTINGS, '')
r = {'one}': 1, 'two': 1}
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_double_escape_in_ref(self):
d = {'one\\': 1, 'two': '${one\\\\}'}
p1 = Parameters(d, SETTINGS, '')
r = {'one\\': 1, 'two': 1}
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_merging_for_multiple_nodes(self):
p1 = Parameters({ 'alpha': { 'one': 111 }}, SETTINGS, '')
p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '')
p3 = Parameters({ 'beta': {'two': 222 }}, SETTINGS, '')
n1 = Parameters({ 'name': 'node1'}, SETTINGS, '')
r1 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 111 }, 'name': 'node1' }
r2 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 222 }, 'name': 'node2' }
n1.merge(p1)
n1.merge(p2)
n1.interpolate()
n2 = Parameters({'name': 'node2'}, SETTINGS, '')
n2.merge(p1)
n2.merge(p2)
n2.merge(p3)
n2.interpolate()
self.assertEqual(n1.as_dict(), r1)
self.assertEqual(n2.as_dict(), r2)
def test_list_merging_for_multiple_nodes(self):
p1 = Parameters({ 'alpha': { 'one': [1, 2] }}, SETTINGS, '')
p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '')
p3 = Parameters({ 'beta': {'two': [3] }}, SETTINGS, '')
n1 = Parameters({ 'name': 'node1'}, SETTINGS, '')
r1 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2] }, 'name': 'node1' }
r2 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2, 3] }, 'name': 'node2' }
n1.merge(p1)
n1.merge(p2)
n1.interpolate()
n2 = Parameters({'name': 'node2'}, SETTINGS, '')
n2.merge(p1)
n2.merge(p2)
n2.merge(p3)
n2.interpolate()
self.assertEqual(n1.as_dict(), r1)
self.assertEqual(n2.as_dict(), r2)
def test_dict_merging_for_multiple_nodes(self):
p1 = Parameters({ 'alpha': { 'one': { 'a': 'aa', 'b': 'bb' }}}, SETTINGS, '')
p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '')
p3 = Parameters({ 'beta': {'two': {'c': 'cc' }}}, SETTINGS, '')
n1 = Parameters({ 'name': 'node1'}, SETTINGS, '')
r1 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb'} }, 'name': 'node1' }
r2 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb', 'c': 'cc'} }, 'name': 'node2' }
n1.merge(p1)
n1.merge(p2)
n1.interpolate()
n2 = Parameters({'name': 'node2'}, SETTINGS, '')
n2.merge(p1)
n2.merge(p2)
n2.merge(p3)
n2.interpolate()
self.assertEqual(n1.as_dict(), r1)
self.assertEqual(n2.as_dict(), r2)
def test_list_merging_with_refs_for_multiple_nodes(self):
p1 = Parameters({ 'alpha': { 'one': [1, 2], 'two': [3, 4] }}, SETTINGS, '')
p2 = Parameters({ 'beta': { 'three': '${alpha:one}' }}, SETTINGS, '')
p3 = Parameters({ 'beta': { 'three': '${alpha:two}' }}, SETTINGS, '')
p4 = Parameters({ 'beta': { 'three': '${alpha:one}' }}, SETTINGS, '')
n1 = Parameters({ 'name': 'node1' }, SETTINGS, '')
r1 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2]}, 'name': 'node1'}
r2 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2, 3, 4, 1, 2]}, 'name': 'node2'}
n2 = Parameters({ 'name': 'node2' }, SETTINGS, '')
n2.merge(p1)
n2.merge(p2)
n2.merge(p3)
n2.merge(p4)
n2.interpolate()
n1.merge(p1)
n1.merge(p2)
n1.interpolate()
self.assertEqual(n1.as_dict(), r1)
self.assertEqual(n2.as_dict(), r2)
def test_nested_refs_with_multiple_nodes(self):
p1 = Parameters({ 'alpha': { 'one': 1, 'two': 2 } }, SETTINGS, '')
p2 = Parameters({ 'beta': { 'three': 'one' } }, SETTINGS, '')
p3 = Parameters({ 'beta': { 'three': 'two' } }, SETTINGS, '')
p4 = Parameters({ 'beta': { 'four': '${alpha:${beta:three}}' } }, SETTINGS, '')
n1 = Parameters({ 'name': 'node1' }, SETTINGS, '')
r1 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'one', 'four': 1}, 'name': 'node1'}
r2 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'two', 'four': 2}, 'name': 'node2'}
n1.merge(p1)
n1.merge(p4)
n1.merge(p2)
n1.interpolate()
n2 = Parameters({ 'name': 'node2' }, SETTINGS, '')
n2.merge(p1)
n2.merge(p4)
n2.merge(p3)
n2.interpolate()
self.assertEqual(n1.as_dict(), r1)
self.assertEqual(n2.as_dict(), r2)
def test_nested_refs_error_message(self):
# beta is missing, oops
p1 = Parameters({'alpha': {'one': 1, 'two': 2}, 'gamma': '${alpha:${beta}}'}, SETTINGS, '')
with self.assertRaises(InterpolationError) as error:
p1.interpolate()
self.assertEqual(error.exception.message, "-> \n Bad references, at gamma\n ${beta}")
def test_multiple_resolve_errors(self):
p1 = Parameters({'alpha': '${gamma}', 'beta': '${gamma}'}, SETTINGS, '')
with self.assertRaises(ResolveErrorList) as error:
p1.interpolate()
# interpolation can start with either alpha or beta
self.assertIn(error.exception.message, [ "-> \n Cannot resolve ${gamma}, at alpha\n Cannot resolve ${gamma}, at beta",
"-> \n Cannot resolve ${gamma}, at beta\n Cannot resolve ${gamma}, at alpha"])
def test_force_single_resolve_error(self):
settings = copy.deepcopy(SETTINGS)
settings.group_errors = False
p1 = Parameters({'alpha': '${gamma}', 'beta': '${gamma}'}, settings, '')
with self.assertRaises(ResolveError) as error:
p1.interpolate()
# interpolation can start with either alpha or beta
self.assertIn(error.exception.message, [ "-> \n Cannot resolve ${gamma}, at alpha",
"-> \n Cannot resolve ${gamma}, at beta"])
def test_ignore_overwriten_missing_reference(self):
settings = copy.deepcopy(SETTINGS)
settings.ignore_overwritten_missing_references = True
p1 = Parameters({'alpha': '${beta}'}, settings, '')
p2 = Parameters({'alpha': '${gamma}'}, settings, '')
p3 = Parameters({'gamma': 3}, settings, '')
r1 = {'alpha': 3, 'gamma': 3}
p1.merge(p2)
p1.merge(p3)
err1 = "[WARNING] Reference '${beta}' undefined\n"
with mock.patch('sys.stderr', new=MockDevice()) as std_err:
p1.interpolate()
self.assertEqual(p1.as_dict(), r1)
self.assertEqual(std_err.text(), err1)
def test_ignore_overwriten_missing_reference_last_value(self):
# an error should be raised if the last reference to be merged
# is missing even if ignore_overwritten_missing_references is true
settings = copy.deepcopy(SETTINGS)
settings.ignore_overwritten_missing_references = True
p1 = Parameters({'alpha': '${gamma}'}, settings, '')
p2 = Parameters({'alpha': '${beta}'}, settings, '')
p3 = Parameters({'gamma': 3}, settings, '')
p1.merge(p2)
p1.merge(p3)
with self.assertRaises(InterpolationError) as error:
p1.interpolate()
self.assertEqual(error.exception.message, "-> \n Cannot resolve ${beta}, at alpha")
def test_ignore_overwriten_missing_reference_dict(self):
# setting ignore_overwritten_missing_references to true should
# not change the behaviour for dicts
settings = copy.deepcopy(SETTINGS)
settings.ignore_overwritten_missing_references = True
p1 = Parameters({'alpha': '${beta}'}, settings, '')
p2 = Parameters({'alpha': '${gamma}'}, settings, '')
p3 = Parameters({'gamma': {'one': 1, 'two': 2}}, settings, '')
err1 = "[WARNING] Reference '${beta}' undefined\n"
p1.merge(p2)
p1.merge(p3)
with self.assertRaises(InterpolationError) as error, mock.patch('sys.stderr', new=MockDevice()) as std_err:
p1.interpolate()
self.assertEqual(error.exception.message, "-> \n Cannot resolve ${beta}, at alpha")
self.assertEqual(std_err.text(), err1)
def test_escaped_string_in_ref_dict_1(self):
# test with escaped string in first dict to be merged
p1 = Parameters({'a': { 'one': '${a_ref}' }, 'b': { 'two': '\${not_a_ref}' }, 'c': '${b}', 'a_ref': 123}, SETTINGS, '')
p2 = Parameters({'c': '${a}'}, SETTINGS, '')
r = { 'a': { 'one': 123 }, 'b': { 'two': '${not_a_ref}' }, 'c': { 'one': 123, 'two': '${not_a_ref}' }, 'a_ref': 123}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_escaped_string_in_ref_dict_2(self):
# test with escaped string in second dict to be merged
p1 = Parameters({'a': { 'one': '${a_ref}' }, 'b': { 'two': '\${not_a_ref}' }, 'c': '${a}', 'a_ref': 123}, SETTINGS, '')
p2 = Parameters({'c': '${b}'}, SETTINGS, '')
r = { 'a': { 'one': 123 }, 'b': { 'two': '${not_a_ref}' }, 'c': { 'one': 123, 'two': '${not_a_ref}' }, 'a_ref': 123}
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_complex_overwrites_1(self):
# find a better name for this test
p1 = Parameters({ 'test': { 'dict': { 'a': '${values:one}', 'b': '${values:two}' } },
'values': { 'one': 1, 'two': 2, 'three': { 'x': 'X', 'y': 'Y' } } }, SETTINGS, '')
p2 = Parameters({ 'test': { 'dict': { 'c': '${values:two}' } } }, SETTINGS, '')
p3 = Parameters({ 'test': { 'dict': { '~b': '${values:three}' } } }, SETTINGS, '')
r = {'test': {'dict': {'a': 1, 'b': {'x': 'X', 'y': 'Y'}, 'c': 2}}, 'values': {'one': 1, 'three': {'x': 'X', 'y': 'Y'}, 'two': 2} }
p2.merge(p3)
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_escaped_string_overwrites(self):
p1 = Parameters({ 'test': '\${not_a_ref}' }, SETTINGS, '')
p2 = Parameters({ 'test': '\${also_not_a_ref}' }, SETTINGS, '')
r = { 'test': '${also_not_a_ref}' }
p1.merge(p2)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_escaped_string_in_ref_dict_overwrite(self):
p1 = Parameters({'a': { 'one': '\${not_a_ref}' }, 'b': { 'two': '\${also_not_a_ref}' }}, SETTINGS, '')
p2 = Parameters({'c': '${a}'}, SETTINGS, '')
p3 = Parameters({'c': '${b}'}, SETTINGS, '')
p4 = Parameters({'c': { 'one': '\${again_not_a_ref}' } }, SETTINGS, '')
r = {'a': {'one': '${not_a_ref}'}, 'b': {'two': '${also_not_a_ref}'}, 'c': {'one': '${again_not_a_ref}', 'two': '${also_not_a_ref}'}}
p1.merge(p2)
p1.merge(p3)
p1.merge(p4)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_strict_constant_parameter(self):
p1 = Parameters({'one': { 'a': 1} }, SETTINGS, 'first')
p2 = Parameters({'one': { '=a': 2} }, SETTINGS, 'second')
p3 = Parameters({'one': { 'a': 3} }, SETTINGS, 'third')
with self.assertRaises(ChangedConstantError) as e:
p1.merge(p2)
p1.merge(p3)
p1.interpolate()
self.assertEqual(e.exception.message, "-> \n Attempt to change constant value, at one:a, in second; third")
def test_constant_parameter(self):
settings = Settings({'strict_constant_parameters': False})
p1 = Parameters({'one': { 'a': 1} }, settings, 'first')
p2 = Parameters({'one': { '=a': 2} }, settings, 'second')
p3 = Parameters({'one': { 'a': 3} }, settings, 'third')
r = {'one': { 'a': 2 } }
p1.merge(p2)
p1.merge(p3)
p1.interpolate()
self.assertEqual(p1.as_dict(), r)
def test_interpolated_list_type(self):
p1 = Parameters({'a': [ 1, 2, 3 ]}, SETTINGS, 'first')
r = {'a': [ 1, 2, 3 ]}
self.assertIs(type(p1.as_dict()['a']), ParameterList)
p1.interpolate()
self.assertIs(type(p1.as_dict()['a']), list)
self.assertEqual(p1.as_dict(), r)
def test_interpolated_dict_type(self):
p1 = Parameters({'a': { 'one': 1, 'two': 2, 'three': 3 }}, SETTINGS, 'first')
r = {'a': { 'one': 1, 'two': 2, 'three': 3 }}
self.assertIs(type(p1.as_dict()['a']), ParameterDict)
p1.interpolate()
self.assertIs(type(p1.as_dict()['a']), dict)
self.assertEqual(p1.as_dict(), r)
def test_merged_interpolated_list_type(self):
p1 = Parameters({'a': [ 1, 2, 3 ]}, SETTINGS, 'first')
p2 = Parameters({'a': [ 4, 5, 6 ]}, SETTINGS, 'second')
r = {'a': [ 1, 2, 3, 4, 5, 6 ]}
self.assertIs(type(p1.as_dict()['a']), ParameterList)
self.assertIs(type(p2.as_dict()['a']), ParameterList)
p1.merge(p2)
self.assertIs(type(p1.as_dict()['a']), ValueList)
p1.interpolate()
self.assertIs(type(p1.as_dict()['a']), list)
self.assertEqual(p1.as_dict(), r)
def test_merged_interpolated_dict_type(self):
p1 = Parameters({'a': { 'one': 1, 'two': 2, 'three': 3 }}, SETTINGS, 'first')
p2 = Parameters({'a': { 'four': 4, 'five': 5, 'six': 6 }}, SETTINGS, 'second')
r = {'a': { 'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}}
self.assertIs(type(p1.as_dict()['a']), ParameterDict)
self.assertIs(type(p2.as_dict()['a']), ParameterDict)
p1.merge(p2)
self.assertIs(type(p1.as_dict()['a']), ParameterDict)
p1.interpolate()
self.assertIs(type(p1.as_dict()['a']), dict)
self.assertEqual(p1.as_dict(), r)
if __name__ == '__main__':
unittest.main()
|
"""
oops
"""
print(0, 1 == 1, 0)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-02-20 18:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('travelling', '0032_auto_20190219_1939'),
]
operations = [
migrations.AlterField(
model_name='trip',
name='description',
field=models.TextField(blank=True, max_length=8000, null=True, verbose_name='Trip Description'),
),
]
|
# coding=utf-8
# Copyright 2019 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs e2e eval on a pair of interactions / predctions."""
from absl import app
from absl import flags
from tapas.retrieval import e2e_eval_utils
flags.DEFINE_string("interaction_file", None, "TFRecord of interactions.")
flags.DEFINE_string("prediction_file", None, "Predictions in TSV format.")
FLAGS = flags.FLAGS
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
result = e2e_eval_utils.evaluate_retrieval_e2e(
FLAGS.interaction_file,
FLAGS.prediction_file,
)
print(result)
if __name__ == "__main__":
flags.mark_flag_as_required("interaction_file")
flags.mark_flag_as_required("prediction_file")
app.run(main)
|
a=input()
a=int(a)
if 90<=a<=100:
print('A')
elif 80<=a<=89:
print('B')
elif 70<=a<=79:
print('C')
elif 60<=a<=69:
print('D')
else:
print('F')
|
'''Config file.'''
from kivy.config import Config
# HD 1920x1080
Config.set('graphics', 'width', 1920)
Config.set('graphics', 'height', 1080)
|
import os, sys, math, argparse # python modules
import bpy, mathutils # blender modules
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import myutil # my functions
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--render", action='store_true')
if '--' in sys.argv:
args = parser.parse_args(sys.argv[sys.argv.index('--') + 1:])
else:
args = parser.parse_args('')
myutil.set_tex_environment(
bpy.context.scene.world,
os.path.dirname(os.path.abspath(__file__))+'/../test_inputs/epping_forest_01_1k.hdr')
# rotate cube
bpy.data.objects["Cube"].rotation_euler = _euler = mathutils.Euler((0.0, 0.0, math.radians(30.0)), 'XYZ')
myutil.set_camera_ydirection()
# render
bpy.context.scene.render.resolution_percentage = 50
bpy.context.scene.cycles.samples = 60
bpy.context.scene.render.engine = 'CYCLES'
if args.render:
bpy.ops.render.render()
bpy.data.images['Render Result'].save_render(filepath = os.path.dirname(__file__)+'/out/02_out.png')
if __name__ == "__main__":
main()
|
"""
cubic spline planner
Author: Atsushi Sakai
"""
import math
import numpy as np
import bisect
class Spline:
u"""
Cubic Spline class
"""
def __init__(self, x, y):
self.b, self.c, self.d, self.w = [], [], [], []
self.x = x
self.y = y
self.nx = len(x) # dimension of x
h = np.diff(x)
# calc coefficient c
self.a = [iy for iy in y]
# calc coefficient c
A = self.__calc_A(h)
B = self.__calc_B(h)
self.c = np.linalg.solve(A, B)
# print(self.c1)
# calc spline coefficient b and d
for i in range(self.nx - 1):
self.d.append((self.c[i + 1] - self.c[i]) / (3.0 * h[i]))
tb = (self.a[i + 1] - self.a[i]) / h[i] - h[i] * \
(self.c[i + 1] + 2.0 * self.c[i]) / 3.0
self.b.append(tb)
def calc(self, t):
u"""
Calc position
if t is outside of the input x, return None
"""
if t < self.x[0]:
return None
elif t > self.x[-1]:
return None
i = self.__search_index(t)
dx = t - self.x[i]
result = self.a[i] + self.b[i] * dx + \
self.c[i] * dx ** 2.0 + self.d[i] * dx ** 3.0
return result
def calcd(self, t):
u"""
Calc first derivative
if t is outside of the input x, return None
"""
if t < self.x[0]:
return None
elif t > self.x[-1]:
return None
i = self.__search_index(t)
dx = t - self.x[i]
result = self.b[i] + 2.0 * self.c[i] * dx + 3.0 * self.d[i] * dx ** 2.0
return result
def calcdd(self, t):
u"""
Calc second derivative
"""
if t < self.x[0]:
return None
elif t > self.x[-1]:
return None
i = self.__search_index(t)
dx = t - self.x[i]
result = 2.0 * self.c[i] + 6.0 * self.d[i] * dx
return result
def __search_index(self, x):
u"""
search data segment index
"""
return bisect.bisect(self.x, x) - 1
def __calc_A(self, h):
u"""
calc matrix A for spline coefficient c
"""
A = np.zeros((self.nx, self.nx))
A[0, 0] = 1.0
for i in range(self.nx - 1):
if i != (self.nx - 2):
A[i + 1, i + 1] = 2.0 * (h[i] + h[i + 1])
A[i + 1, i] = h[i]
A[i, i + 1] = h[i]
A[0, 1] = 0.0
A[self.nx - 1, self.nx - 2] = 0.0
A[self.nx - 1, self.nx - 1] = 1.0
# print(A)
return A
def __calc_B(self, h):
u"""
calc matrix B for spline coefficient c
"""
B = np.zeros(self.nx)
for i in range(self.nx - 2):
B[i + 1] = 3.0 * (self.a[i + 2] - self.a[i + 1]) / \
h[i + 1] - 3.0 * (self.a[i + 1] - self.a[i]) / h[i]
# print(B)
return B
class Spline2D:
u"""
2D Cubic Spline class
"""
def __init__(self, x, y):
self.s = self.__calc_s(x, y)
self.sx = Spline(self.s, x)
self.sy = Spline(self.s, y)
def __calc_s(self, x, y):
dx = np.diff(x) # 求解差分矩阵,即后一项和前一项的差,用此来估计倒数
dy = np.diff(y)
self.ds = [math.sqrt(idx ** 2 + idy ** 2)
for (idx, idy) in zip(dx, dy)]
s = [0]
s.extend(np.cumsum(self.ds))
return s
def calc_position(self, s):
u"""
calc position
"""
x = self.sx.calc(s)
y = self.sy.calc(s)
return x, y
def calc_curvature(self, s):
u"""
calc curvature
"""
dx = self.sx.calcd(s)
ddx = self.sx.calcdd(s)
dy = self.sy.calcd(s)
ddy = self.sy.calcdd(s)
k = (ddy * dx - ddx * dy) / (dx ** 2 + dy ** 2)
return k
def calc_yaw(self, s):
u"""
calc yaw
"""
dx = self.sx.calcd(s)
dy = self.sy.calcd(s)
yaw = math.atan2(dy, dx)
return yaw
def calc_spline_course(x, y, ds=0.1):
sp = Spline2D(x, y)
s = list(np.arange(0, sp.s[-1], ds))
rx, ry, ryaw, rk = [], [], [], []
for i_s in s:
ix, iy = sp.calc_position(i_s)
rx.append(ix)
ry.append(iy)
ryaw.append(sp.calc_yaw(i_s))
rk.append(sp.calc_curvature(i_s))
return rx, ry, ryaw, rk, s
def main():
print("Spline 2D test")
import matplotlib.pyplot as plt
x = [-2.5, 0.0, 2.5, 5.0, 7.5, 3.0, -1.0]
y = [0.7, -6, 5, 6.5, 0.0, 5.0, -2.0]
sp = Spline2D(x, y)
s = np.arange(0, sp.s[-1], 0.1)
rx, ry, ryaw, rk = [], [], [], []
for i_s in s:
ix, iy = sp.calc_position(i_s)
rx.append(ix)
ry.append(iy)
ryaw.append(sp.calc_yaw(i_s))
rk.append(sp.calc_curvature(i_s))
flg, ax = plt.subplots(1)
plt.plot(x, y, "xb", label="input")
plt.plot(rx, ry, "-r", label="spline")
plt.grid(True)
plt.axis("equal")
plt.xlabel("x[m]")
plt.ylabel("y[m]")
plt.legend()
flg, ax = plt.subplots(1)
plt.plot(s, [math.degrees(iyaw) for iyaw in ryaw], "-r", label="yaw")
plt.grid(True)
plt.legend()
plt.xlabel("line length[m]")
plt.ylabel("yaw angle[deg]")
flg, ax = plt.subplots(1)
plt.plot(s, rk, "-r", label="curvature")
plt.grid(True)
plt.legend()
plt.xlabel("line length[m]")
plt.ylabel("curvature [1/m]")
plt.show()
if __name__ == '__main__':
main()
|
import exp1
print('i am in exp2.py ')
|
from abc import abstractmethod
class Logger:
@abstractmethod
def log(self, x):
pass
class WandBLogger(Logger):
def log(self, x):
import wandb
wandb.log(x)
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# CDS-ILS is free software; you can redistribute it and/or modify it under
# the terms of the MIT License; see LICENSE file for more details.
from flask import url_for
from invenio_accounts.testutils import login_user_via_session
from invenio_indexer.api import RecordIndexer
from invenio_search import current_search
def test_ebook_links(app, client, testdata, json_headers, admin):
"""Test ebook links when login_required."""
# re-index documents to resolve eitems
ri = RecordIndexer()
for rec in testdata["documents"]:
ri.index(rec)
current_search.flush_and_refresh(index="*")
def _test_list(endpoint):
"""Get list."""
url = url_for(endpoint)
res = client.get(url, headers=json_headers)
return res.get_json()["hits"]["hits"]
def _test_read(endpoint, pid):
"""Get record."""
url = url_for(endpoint, pid_value=pid)
res = client.get(url, headers=json_headers)
return res.get_json()
def _get_item(doc, eitem_pid):
"""Get item from the document record."""
return [
eitem
for eitem in doc["eitems"]["hits"]
if eitem["pid"] == eitem_pid
][0]
def assert_urls(urls):
"""Test urls."""
protected = urls[0]
assert protected["login_required"]
assert protected["value"] == "http://protected-cds-ils.ch/"
login_required_url = app.config["CDS_ILS_EZPROXY_URL"].format(
url=protected["value"]
)
assert protected["login_required_url"] == login_required_url
not_protected = urls[1]
assert not not_protected["login_required"]
assert not_protected["value"] == "http://cds-ils.ch/"
assert "login_required_url" not in not_protected
EITEM_PID = "eitemid-2"
DOC_PID = "docid-2"
# documents/literature search endpoint
for endpoint in [
"invenio_records_rest.docid_list",
"invenio_records_rest.litid_list",
]:
records = _test_list(endpoint)
doc = [r for r in records if r["metadata"]["pid"] == DOC_PID][0]
eitem = _get_item(doc["metadata"], EITEM_PID)
assert_urls(eitem["urls"])
# test doc item endpoint
doc = _test_read("invenio_records_rest.docid_item", DOC_PID)
eitem = _get_item(doc["metadata"], EITEM_PID)
assert_urls(eitem["urls"])
# eitems endpoint
login_user_via_session(client, email=admin.email)
records = _test_list("invenio_records_rest.eitmid_list")
eitem = [r for r in records if r["metadata"]["pid"] == EITEM_PID][0]
assert_urls(eitem["metadata"]["urls"])
eitem = _test_read("invenio_records_rest.eitmid_item", EITEM_PID)
assert_urls(eitem["metadata"]["urls"])
|
################
"""
sb_distributeObjects
Simon Bjork
May 2014
Version 1.1 (August 2014)
[email protected]
Synopsis: Create a number of evenly distributed 3d objects between two (selected) 3d objects.
OS: Windows/OSX/Linux
To install the script:
- Add the script to your Nuke pluginPath.
- Add the following to your init.py/menu.py:
import sb_distributeObjects
sb_tools = nuke.toolbar("Nodes").addMenu( "sb_Tools", icon = "sb_tools.png" )
sb_tools.addCommand('Python/sb DistributeObjects', 'sb_distributeObjects.sb_distributeObjects()', "shift+w")
"""
################
import nuke
import nukescripts
################
def sb_distributeObjects_Data():
data = {}
data["scriptName"] = "sb DistributeObjects"
data["scriptVersion"] = "1.0"
return data
def sb_distributeObjects_Help():
si = sb_distributeObjects_Data()
helpStr = ("<b>{0} {1}</b>\n\n"
"Select two or more 3d nodes and specify the number of objects to create (evenly) in-between.\n\n"
"If you select more than two objects (to get a curved path for example), make sure you select the nodes in the correct path, as the script will follow the selection order. Preferably left to right order.\n\n"
"The script works in x,y,z dimensions.".format(si["scriptName"], si["scriptVersion"] ))
return helpStr.lstrip()
class sb_distributeObjects_Panel(nukescripts.PythonPanel):
def __init__(self):
scriptData = sb_distributeObjects_Data()
nukescripts.PythonPanel.__init__(self, '{0} v{1}'.format(scriptData["scriptName"], scriptData["scriptVersion"]))
self.num = nuke.Int_Knob("number", "number of nodes")
self.transform = nuke.Enumeration_Knob("transform", "transform", ["translate", "translate/rotate", "translate/rotate/scale"])
self.controller = nuke.Boolean_Knob("controller", "add master controls")
self.controller.setFlag(nuke.STARTLINE)
self.div1 = nuke.Text_Knob("divider1", "")
self.help = nuke.PyScript_Knob("help", " ? ")
self.help.setFlag(nuke.STARTLINE)
self.div2 = nuke.Text_Knob("divider2", "")
self.createNodes = nuke.PyScript_Knob("createNodes", "create nodes")
for i in [self.num, self.transform, self.controller, self.div1, self.help, self.div2, self.createNodes]:
self.addKnob(i)
self.num.setValue(5)
# Set knobChanged commands.
def knobChanged(self, knob):
if knob is self.createNodes:
self.distributeObjects()
elif knob is self.help:
nuke.message(sb_distributeObjects_Help())
# Main function.
def distributeObjects(self):
numObj = self.num.value()
transform = self.transform.value()
controller = self.controller.value()
if not numObj:
nuke.message("Enter a number.")
return
n = []
for i in nuke.selectedNodes()[::-1]:
try:
i["translate"].value()[2]
n.append(i)
except:
continue
if len(n) < 2:
nuke.message("Select two (3d) nodes.")
return
npx = n[0]["xpos"].value()
npy = n[0]["ypos"].value()
offset = 0
# Begin undo command.
undo = nuke.Undo()
undo.begin(sb_distributeObjects_Data()["scriptName"])
# Create scene node.
scene = nuke.createNode("Scene", inpanel=False)
scene["selected"].setValue(False)
scene["xpos"].setValue(n[0]["xpos"].value())
scene["ypos"].setValue(n[0]["ypos"].value() + 500)
currSceneInp = 0
# Create controller.
ctrlKnobs1 = ["translate", "rotate"]
ctrlKnobs2 = ["scaling", "uniform_scale"]
if controller:
ctrl = nuke.createNode("NoOp", inpanel=False)
ctrl["selected"].setValue(False)
ctrlT = nuke.XYZ_Knob("translate", "translate")
ctrlR = nuke.XYZ_Knob("rotate", "rotate")
ctrlS = nuke.XYZ_Knob("scaling", "scale")
ctrlUS = nuke.Double_Knob("uniform_scale", "uniform scale")
for i in [ctrlT, ctrlR, ctrlS, ctrlUS]:
ctrl.addKnob(i)
ctrl["xpos"].setValue(n[0]["xpos"].value() - 100)
ctrl["ypos"].setValue(n[0]["ypos"].value() - 100)
ctrl["tile_color"].setValue(3448912)
ctrl["note_font_size"].setValue(36)
ctrl["label"].setValue("master controls")
ctrlS.setValue([1.0,1.0,1.0])
ctrlUS.setValue(1.0)
for i in range(len(n)+1):
# Setup selected nodes.
n[i]["tile_color"].setValue(11993343)
n[i]["selected"].setValue(False)
scene.setInput(currSceneInp, n[i])
currSceneInp+=1
n[i]["xpos"].setValue(npx+offset)
n[i]["ypos"].setValue(npy)
offset = offset + 100
if controller:
for j in ctrlKnobs1:
n[i][j].setExpression("{0}+{1}.{0}".format(j, ctrl["name"].value() ))
for j in ctrlKnobs2:
n[i][j].setExpression("{0}*{1}.{0}".format(j, ctrl["name"].value() ))
# Break out if it's the last selected node.
if i == len(n)-1:
break
# Setup variables.
ax, ay, az = n[i]["translate"].value()
bx, by, bz = n[i+1]["translate"].value()
arx, ary, arz = n[i]["rotate"].value()
brx, bry, brz = n[i+1]["rotate"].value()
asx, asy, asz = n[i]["scaling"].value()
bsx, bsy, bsz = n[i+1]["scaling"].value()
aus = n[i]["uniform_scale"].value()
bus = n[i+1]["uniform_scale"].value()
# Number of nodes.
numNodes = float(numObj)+1
# Translate calculation.
txCalc = (bx-ax)/numNodes
tyCalc = (by-ay)/numNodes
tzCalc = (bz-az)/numNodes
# Rotation calculation.
rxCalc = (brx-arx)/numNodes
ryCalc = (bry-ary)/numNodes
rzCalc = (brz-arz)/numNodes
# Scale calculation.
sxCalc = (bsx-asx)/numNodes
syCalc = (bsy-asy)/numNodes
szCalc = (bsz-asz)/numNodes
usCalc = (bus-aus)/numNodes
# Create in-between nodes.
for j in range(1,int(numNodes)):
c = nuke.createNode(n[i].Class(), inpanel = False)
c["selected"].setValue(False)
# Translate.
newX = ax+(txCalc*j)
newY = ay+(tyCalc*j)
newZ = az+(tzCalc*j)
c["translate"].setValue([newX, newY, newZ])
# Rotate.
if transform in ["translate/rotate", "translate/rotate/scale"]:
newRX = arx+(rxCalc*j)
newRY = ary+(ryCalc*j)
newRZ = arz+(rzCalc*j)
c["rotate"].setValue([newRX, newRY, newRZ])
# Scale.
if transform == "translate/rotate/scale":
newSX = asx+(sxCalc*j)
newSY = asy+(syCalc*j)
newSZ = asy+(szCalc*j)
newUS = aus+(usCalc*j)
c["scaling"].setValue([newSX, newSY, newSZ])
c["uniform_scale"].setValue(newUS)
if controller:
for k in ctrlKnobs1:
c[k].setExpression("{0}+{1}.{0}".format(k, ctrl["name"].value() ))
for k in ctrlKnobs2:
c[k].setExpression("{0}*{1}.{0}".format(k, ctrl["name"].value() ))
if npx == "":
npx = n[i]["xpos"].value()
npy = n[i]["ypos"].value()
c["xpos"].setValue(npx + offset)
c["ypos"].setValue(npy)
scene.setInput(currSceneInp, c)
currSceneInp+=1
offset = offset + 100
# End undo command.
undo.end()
# Run main script.
def sb_distributeObjects():
sb_distributeObjects_Panel().show() |
import os
import shutil
import cv2
import torch
from PIL import Image
from tqdm import tqdm
import numpy as np
import torchvision.transforms as transform_lib
import matplotlib.pyplot as plt
from utils.util import download_zipfile, mkdir
from utils.v2i import convert_frames_to_video
class OPT():
pass
class DVP():
def __init__(self):
self.small = (320, 180)
self.in_size = (0, 0)
def test(self, black_white_path, colorized_path, output_path, opt=None):
assert os.path.exists(black_white_path) and os.path.exists(colorized_path)
self.downscale(black_white_path)
self.downscale(colorized_path)
os.system(f'python3 ./models/DVP/main_IRT.py --save_freq {opt.sf} --max_epoch {opt.me} --input {black_white_path} --processed {colorized_path} --model temp --with_IRT 1 --IRT_initialization 1 --output {opt.op}')
frames_path = f"{opt.op}/temp_IRT1_initial1/{os.path.basename(black_white_path)}/00{opt.me}"
self.upscale(frames_path)
length = len(os.listdir(frames_path))
frames = [f"out_main_{str(i).zfill(5)}.jpg" for i in range(length)]
convert_frames_to_video(frames_path, output_path, frames)
def downscale(self, path):
frames = os.listdir(path)
frame = Image.open(os.path.join(path, frames[0]))
self.in_size = frame.size
for each in frames:
img = Image.open(os.path.join(path, each))
img = img.resize(self.small, Image.ANTIALIAS)
img.save(os.path.join(path, each))
def upscale(self, path):
frames = os.listdir(path)
for each in frames:
img = Image.open(os.path.join(path, each))
img = img.resize(self.in_size, Image.ANTIALIAS)
img.save(os.path.join(path, each)) |
import os
import sys
if os.path.realpath(os.getcwd()) != os.path.dirname(os.path.realpath(__file__)):
sys.path.append(os.getcwd())
import deephar
from deephar.config import mpii_dataconf
from deephar.config import pennaction_dataconf
from deephar.config import ModelConfig
from deephar.data import MpiiSinglePerson
from deephar.data import PennAction
from deephar.data import BatchLoader
from deephar.models import split_model
from deephar.models import spnet
from deephar.utils import *
sys.path.append(os.path.join(os.getcwd(), 'exp/common'))
from datasetpath import datasetpath
from mpii_tools import eval_singleperson_pckh
from penn_tools import eval_singleclip_generator
from penn_tools import eval_multiclip_dataset
logdir = './'
if len(sys.argv) > 1:
logdir = sys.argv[1]
mkdir(logdir)
sys.stdout = open(str(logdir) + '/log.txt', 'w')
num_frames = 8
cfg = ModelConfig((num_frames,) + pennaction_dataconf.input_shape, pa16j2d,
num_actions=[15], num_pyramids=6, action_pyramids=[5, 6],
num_levels=4, pose_replica=True,
num_pose_features=160, num_visual_features=160)
num_predictions = spnet.get_num_predictions(cfg.num_pyramids, cfg.num_levels)
num_action_predictions = \
spnet.get_num_predictions(len(cfg.action_pyramids), cfg.num_levels)
"""Load datasets"""
mpii = MpiiSinglePerson(datasetpath('MPII'), dataconf=mpii_dataconf,
poselayout=pa16j2d)
penn_seq = PennAction(datasetpath('Penn_Action'), pennaction_dataconf,
poselayout=pa16j2d, topology='sequences', use_gt_bbox=False,
pred_bboxes_file='pred_bboxes_penn.json', clip_size=num_frames)
"""Build the full model"""
full_model = spnet.build(cfg)
"""Load pre-trained weights from pose estimation and copy replica layers."""
full_model.load_weights(
'output/penn_multimodel_trial-07-full_2e9fa5a/weights_mpii+penn_ar_028.hdf5',
by_name=True)
"""This call splits the model into its parts: pose estimation and action
recognition, so we can evaluate each part separately on its respective datasets.
"""
models = split_model(full_model, cfg, interlaced=False,
model_names=['2DPose', '2DAction'])
"""Trick to pre-load validation samples from MPII."""
mpii_val = BatchLoader(mpii, ['frame'], ['pose', 'afmat', 'headsize'],
VALID_MODE, batch_size=mpii.get_length(VALID_MODE), shuffle=False)
printnl('Pre-loading MPII validation data...')
[x_val], [p_val, afmat_val, head_val] = mpii_val[0]
"""Define a loader for PennAction test samples. """
penn_te = BatchLoader(penn_seq, ['frame'], ['pennaction'], TEST_MODE,
batch_size=1, shuffle=False)
"""Evaluate on 2D action recognition (PennAction)."""
s = eval_singleclip_generator(models[1], penn_te)
print ('Best score on PennAction (single-clip): ' + str(s))
s = eval_multiclip_dataset(models[1], penn_seq,
subsampling=pennaction_dataconf.fixed_subsampling)
print ('Best score on PennAction (multi-clip): ' + str(s))
"""Evaluate on 2D pose estimation (MPII)."""
s = eval_singleperson_pckh(models[0], x_val, p_val[:, :, 0:2], afmat_val, head_val)
print ('Best score on MPII: ' + str(s))
|
import numpy as np
import inspect
def start_indices(maximum: int, n: int):
'''
e[0] is the starting coordinate
e[1] is the ending coordinate
Args:
maximum: max integer to draw a number from
n: how many numbers to draw
Returns: starting index of the events to be inserted
'''
return np.random.randint(0, maximum, n)
def get_function_names(anomaly_object: object):
"""
Args:
anomaly_object: object containing the functions
Returns: a list of functions excl. __init__
"""
functions = [func[0] for func in inspect.getmembers(anomaly_object, predicate=inspect.ismethod) if
not func[0].startswith('__')]
return functions
|
"""Auto-generated file, do not edit by hand. BS metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_BS = PhoneMetadata(id='BS', country_code=1, international_prefix='011',
general_desc=PhoneNumberDesc(national_number_pattern='[2589]\\d{9}', possible_number_pattern='\\d{7}(?:\\d{3})?'),
fixed_line=PhoneNumberDesc(national_number_pattern='242(?:3(?:02|[236][1-9]|4[0-24-9]|5[0-68]|7[3467]|8[0-4]|9[2-467])|461|502|6(?:0[1-3]|12|7[67]|8[78]|9[89])|7(?:02|88))\\d{4}', possible_number_pattern='\\d{7}(?:\\d{3})?', example_number='2423456789'),
mobile=PhoneNumberDesc(national_number_pattern='242(?:3(?:5[79]|[79]5)|4(?:[2-4][1-9]|5[1-8]|6[2-8]|7\\d|81)|5(?:2[45]|3[35]|44|5[1-9]|65|77)|6[34]6|727)\\d{4}', possible_number_pattern='\\d{10}', example_number='2423591234'),
toll_free=PhoneNumberDesc(national_number_pattern='242300\\d{4}|8(?:00|44|55|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='8002123456'),
premium_rate=PhoneNumberDesc(national_number_pattern='900[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='9002123456'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='5(?:00|33|44|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='5002345678'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='1',
national_prefix_for_parsing='1',
leading_digits='242')
|
import json
from unittest import mock, TestCase
VALID_REQUEST_DATA = {
'data': '{"contact_name": "Test", "marketing_source_bank": "", '
'"website": ''"example.com", "exporting": "False", "phone_number": "",'
' ''"marketing_source": "Social media", "opt_in": true, ''"marketing_s'
'ource_other": "", "email_address1": ''"[email protected]", "agree_term'
's": true, "company_name": "Example ''Limited", "email_address2": "tes'
'[email protected]"}'
}
VALID_REQUEST_DATA_JSON = json.dumps(VALID_REQUEST_DATA)
class MockBoto(TestCase):
def setUp(self):
self.boto_client_mock = mock.patch(
'botocore.client.BaseClient._make_api_call'
)
self.boto_resource_mock = mock.patch(
'boto3.resource'
)
self.boto_client_mock.start()
self.boto_resource_mock.start()
def tearDown(self):
self.boto_client_mock.stop()
self.boto_resource_mock.stop()
|
import importlib
import logging
import time
import torch
from omegaconf import DictConfig
from ...pl_utils.model import Model as PlModel
from ..utils.gaze import compute_angle_error, get_loss_func
from ..utils.optimizer import configure_optimizers
from ..utils.utils import initialize_weight, load_weight
logger = logging.getLogger(__name__)
def create_torch_model(config: DictConfig) -> torch.nn.Module:
module = importlib.import_module(
f'pl_gaze_estimation.models.mpiifacegaze.models.{config.MODEL.NAME}')
model = getattr(module, 'Network')(config)
if 'INIT' in config.MODEL:
initialize_weight(config.MODEL.INIT, model)
else:
logger.warning('INIT key is missing in config.MODEL.')
if 'PRETRAINED' in config.MODEL:
load_weight(config.MODEL, model)
else:
logger.warning('PRETRAINED key is missing in config.MODEL.')
return model
class Model(PlModel):
def __init__(self, config: DictConfig):
super().__init__(config)
self.model = create_torch_model(config)
if 'OPTIMIZER' in self.config:
self.lr = self.config.OPTIMIZER.LR
self.loss_fn = get_loss_func(config)
def forward(self, x):
return self.model(x)
def _evaluate(self, batch):
images, _, gazes = batch
outputs = self(images)
loss = self.loss_fn(outputs, gazes)
angle_error = compute_angle_error(outputs, gazes).mean()
return {'loss': loss, 'angle_error': angle_error.detach()}
def training_step(self, batch, batch_index):
res = self._evaluate(batch)
self.log_dict({f'train/{key}': val
for key, val in res.items()},
prog_bar=self.config.LOG.SHOW_TRAIN_IN_PROGRESS_BAR,
on_step=True,
on_epoch=True,
logger=True,
sync_dist=True,
reduce_fx='mean')
self.log_dict(
{
'time/elapsed':
time.time() - self._train_start_time + self._train_time +
self._val_time
},
on_step=True,
on_epoch=False,
logger=True)
if self.lr_schedulers():
if isinstance(self.lr_schedulers(), list):
scheduler = self.lr_schedulers()[-1]
else:
scheduler = self.lr_schedulers()
self.log('train/lr',
scheduler.get_last_lr()[0],
prog_bar=self.config.LOG.SHOW_TRAIN_IN_PROGRESS_BAR,
on_step=True,
on_epoch=False,
logger=True)
return res
def validation_step(self, batch, batch_index):
res = self._evaluate(batch)
return res | {
'size': torch.tensor(len(batch[0]), device=res['loss'].device)
}
def validation_epoch_end(self, outputs) -> None:
res = self._accumulate_data(outputs)
self.log_dict(
{f'val/{key}': val
for key, val in res.items() if key != 'total'},
prog_bar=True,
on_epoch=True,
logger=True,
sync_dist=False)
def test_step(self, batch, batch_index):
return self.validation_step(batch, batch_index)
def test_epoch_end(self, outputs) -> None:
res = self._accumulate_data(outputs)
self._display_result(res)
def configure_optimizers(self):
return configure_optimizers(self.config, self, self.lr)
|
class Tiger:
def __init__(self, name):
self.t_name = name
def name(self):
return self.t_name
def greet(self):
return "roar"
def menu(self):
return "zebre"
def create(name):
return Tiger(name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 脚本的整执行流程:
# 1. 先实现一个request来完成harbor的登录,获取session
# 2. 获取所有的project
# 3. 循环所有的project,获取所有的repositories
# 4. 获取repositories的所有tag
# 5. 根据repositories和tag拼接完整的镜像名称
# 6. 连接两边的harbor,通过docker pull的方式从原harbor中拉取镜像,再通过docker push的方式将镜像推送到新harbor当中,然后删除本地镜像。
# 7. 在上面的过程中,还做了个事情,每个镜像推送之后,都会将其镜像名称作为key,将其状态作为value保存到redis中。以备事后处理推送失败的镜像
import requests
import subprocess
import json
import redis
import sys
class RequestClient(object):
def __init__(self, login_url, username, password):
self.username = username
self.password = password
self.login_url = login_url
self.session = requests.Session()
self.login()
def login(self):
self.session.post(self.login_url, params={"principal": self.username, "password": self.password})
class HarborRepos(object):
def __init__(self, harbor_domain, harbor_new_domain, password, new_password, schema="https", new_schema="https",
username="admin", new_username="admin"):
self.schema = schema
self.harbor_domain = harbor_domain
self.harbor_new_domain = harbor_new_domain
self.harbor_url = self.schema + "://" + self.harbor_domain
self.login_url = self.harbor_url + "/login"
self.api_url = self.harbor_url + "/api"
self.pro_url = self.api_url + "/projects"
self.repos_url = self.api_url + "/repositories"
self.username = username
self.password = password
self.client = RequestClient(self.login_url, self.username, self.password)
self.new_schema = new_schema
self.harbor_new_url = self.new_schema + "://" + self.harbor_new_domain
self.login_new_url = self.harbor_new_url + "/c/login"
self.api_new_url = self.harbor_new_url + "/api"
self.pro_new_url = self.api_new_url + "/projects"
self.new_username = new_username
self.new_password = new_password
self.new_client = RequestClient(self.login_new_url, self.new_username, self.new_password)
def __fetch_pros_obj(self):
# TODO
self.pros_obj = self.client.session.get(self.pro_url).json()
return self.pros_obj
def fetch_pros_id(self):
self.pros_id = []
# TODO
pro_res = self.__fetch_pros_obj()
for i in pro_res:
self.pros_id.append(i['project_id'])
return self.pros_id
def fetch_pro_name(self, pro_id):
# TODO
pro_res = self.__fetch_pros_obj()
for i in pro_res:
if i["project_id"] == pro_id:
self.pro_name = i["name"]
return self.pro_name
# def judge_pros(self,pro_name):
# res = self.new_client.session.head(self.pro_new_url,params={"project_name": pro_name})
# print(res.status_code)
# if res.status_code == 404:
# return False
# else:
# return True
def create_pros(self, pro_name):
'''
{
"project_name": "string",
"public": 1
}
'''
pro_res = self.__fetch_pros_obj()
pro_obj = {}
pro_obj["metadata"]={}
public = "false"
for i in pro_res:
if i["name"] == pro_name:
pro_obj["project_name"] = pro_name
if i["public"]:
public = "true"
pro_obj["metadata"]["public"] = public
# pro_obj["metadata"]["enable_content_trust"] = i["enable_content_trust"]
# pro_obj["metadata"]["prevent_vul"] = i["prevent_vulnerable_images_from_running"]
# pro_obj["metadata"]["severity"] = i["prevent_vulnerable_images_from_running_severity"]
# pro_obj["metadata"]["auto_scan"] = i["automatically_scan_images_on_push"]
headers = {"content-type": "application/json"}
print(pro_obj)
res = self.new_client.session.post(self.pro_new_url, headers=headers, data=json.dumps(pro_obj))
if res.status_code == 409:
print("\033[32m 项目 %s 已经存在!\033[0m" % pro_name)
return True
elif res.status_code == 201:
# print(res.status_code)
print("\033[33m 创建项目%s成功!\033[0m" % pro_name)
return True
else:
print(res.status_code)
print("\033[35m 创建项目%s失败!\033[0m" % pro_name)
return False
def fetch_repos_name(self, pro_id):
self.repos_name = []
repos_res = self.client.session.get(self.repos_url, params={"project_id": pro_id})
# TODO
for repo in repos_res.json():
self.repos_name.append(repo['name'])
return self.repos_name
def fetch_repos(self, repo_name):
self.repos = {}
tag_url = self.repos_url + "/" + repo_name + "/tags"
# TODO
for tag in self.client.session.get(tag_url).json():
full_repo_name = self.harbor_domain + "/" + repo_name + ":" + tag["name"]
full_new_repo_name = self.harbor_new_domain + "/" + repo_name + ":" + tag["name"]
self.repos[full_repo_name] = full_new_repo_name
return self.repos
def migrate_repos(self, full_repo_name, full_new_repo_name, redis_conn):
# repo_cmd_dict = {}
if redis_conn.exists(full_repo_name) and redis_conn.get(full_repo_name) == "1":
print("\033[32m镜像 %s 已经存在!\033[0m" % full_repo_name)
return
else:
cmd_list = []
pull_old_repo = "docker pull " + full_repo_name
tag_repo = "docker tag " + full_repo_name + " " + full_new_repo_name
push_new_repo = "docker push " + full_new_repo_name
del_old_repo = "docker rmi -f " + full_repo_name
del_new_repo = "docker rmi -f " + full_new_repo_name
cmd_list.append(pull_old_repo)
cmd_list.append(tag_repo)
cmd_list.append(push_new_repo)
cmd_list.append(del_old_repo)
cmd_list.append(del_new_repo)
# repo_cmd_dict[full_repo_name] = cmd_list
sum = 0
for cmd in cmd_list:
print("\033[34m Current command: %s\033[0m" % cmd)
ret = subprocess.call(cmd, shell=True)
sum += ret
if sum == 0:
print("\033[32m migrate %s success!\033[0m" % full_repo_name)
redis_conn.set(full_repo_name, 1)
else:
print("\033[33m migrate %s faild!\033[0m" % full_repo_name)
redis_conn.set(full_repo_name, 0)
return
if __name__ == "__main__":
harbor_domain = "hub.test.com"
harbor_new_domain = "hub-new.test.com"
re_pass = "xxxxxxx"
re_new_pass = "xxxxxxx"
pool = redis.ConnectionPool(host='localhost', port=6379,
decode_responses=True) # host是redis主机,需要redis服务端和客户端都起着 redis默认端口是6379
redis_conn = redis.Redis(connection_pool=pool)
res = HarborRepos(harbor_domain, harbor_new_domain, re_pass, re_new_pass)
# pros_id = res.fetch_pro_id()
for pro_id in res.fetch_pros_id():
#pro_id = 13
pro_name = res.fetch_pro_name(pro_id)
# print(pro_name)
# ret = res.judge_pros(pro_name)
# print(ret)
res.create_pros(pro_name)
#sys.exit()
for pro_id in res.fetch_pros_id():
repos_name = res.fetch_repos_name(pro_id=pro_id)
for repo_name in repos_name:
repos = res.fetch_repos(repo_name=repo_name)
for full_repo_name, full_new_repo_name in repos.items():
res.migrate_repos(full_repo_name, full_new_repo_name, redis_conn)
|
import time
from typing import List
from behave import given, then
from test.integrationtests.voight_kampff import (
emit_utterance,
format_dialog_match_error,
wait_for_dialog_match,
)
CANCEL_RESPONSES = (
"no-active-timer",
"cancel-all",
"cancelled-single-timer",
"cancelled-timer-named",
"cancelled-timer-named-ordinal",
)
@given("an active {duration} timer")
def start_single_timer(context, duration):
"""Clear any active timers and start a single timer for a specified duration."""
_cancel_all_timers(context)
_start_a_timer(
context.bus, utterance="set a timer for " + duration, response=["started-timer"]
)
@given("an active timer named {name}")
def start_single_named_timer(context, name):
"""Clear any active timers and start a single named timer for 90 minutes."""
_cancel_all_timers(context)
_start_a_timer(
context.bus,
utterance="set a timer for 90 minutes named " + name,
response=["started-timer-named"],
)
@given("an active timer for {duration} named {name}")
def start_single_named_dialog_timer(context, duration, name):
"""Clear any active timers and start a single named timer for specified duration."""
_cancel_all_timers(context)
_start_a_timer(
context.bus,
utterance=f"set a timer for {duration} named {name}",
response=["started-timer-named"],
)
@given("multiple active timers")
def start_multiple_timers(context):
"""Clear any active timers and start multiple timers by duration."""
_cancel_all_timers(context)
for row in context.table:
_start_a_timer(
context.bus,
utterance="set a timer for " + row["duration"],
response=["started-timer", "started-timer-named"],
)
def _start_a_timer(bus, utterance: str, response: List[str]):
"""Helper function to start a timer.
If one of the expected responses is not spoken, cause the step to error out.
"""
emit_utterance(bus, utterance)
match_found, speak_messages = wait_for_dialog_match(bus, response)
assert match_found, format_dialog_match_error(response, speak_messages)
@given("no active timers")
def reset_timers(context):
"""Cancel all active timers to test how skill behaves when no timers are set."""
_cancel_all_timers(context)
def _cancel_all_timers(context):
"""Cancel all active timers.
If one of the expected responses is not spoken, cause the step to error out.
"""
emit_utterance(context.bus, "cancel all timers")
match_found, speak_messages = wait_for_dialog_match(context.bus, CANCEL_RESPONSES)
assert match_found, format_dialog_match_error(CANCEL_RESPONSES, speak_messages)
@given("a timer is expired")
def let_timer_expire(context):
"""Start a short timer and let it expire to test expiration logic."""
emit_utterance(context.bus, "set a 3 second timer")
expected_response = ["started.timer"]
match_found, speak_messages = wait_for_dialog_match(context.bus, expected_response)
assert match_found, format_dialog_match_error(expected_response, speak_messages)
time.sleep(4)
@then('"mycroft-timer" should stop beeping')
def then_stop_beeping(context):
# TODO: Better check!
import psutil
for i in range(10):
if "paplay" not in [p.name() for p in psutil.process_iter()]:
break
time.sleep(1)
else:
assert False, "Timer is still ringing"
|
from util import clean_string
from bs4 import NavigableString
from typing import List
import activity
from common import Choice
class Vote:
def __init__(self, meeting_topic, vote_number: int, yes: int):
"""A Vote represents a single vote in a meeting.
Args:
meeting_topic (MeetingTopic): The meeting topic
vote_number (int): Number of the vote in this meeting (e.g. 1)
yes (int): Number of yes votes
"""
self.meeting = meeting_topic.meeting
self.meeting_topic = meeting_topic
self.vote_number = vote_number
self.yes = yes
self.unsure = False
class GenericVote(Vote):
"""A Vote represents a single vote in a meeting.
"""
def __init__(self, meeting_topic, vote_number: int, yes: int, no: int, abstention: int):
"""A Vote represents a single vote in a meeting.
Args:
meeting_topic (MeetingTopic): The meeting topic
vote_number (int): Number of the vote in this meeting (e.g. 1)
yes (int): Number of yes votes
no (int): Number of no votes
abstention (int): Number of abstentions
"""
Vote.__init__(self, meeting_topic, vote_number, yes)
self.yes_voters = []
self.no = no
self.no_voters = []
self.abstention = abstention
self.abstention_voters = []
def __repr__(self):
return f"Vote({self.vote_number}, {self.yes}, {self.no}, {self.abstention})"
def to_dict(self, session_base_URI: str):
return {
'id': self.vote_number,
'type': 'general',
'yes': self.yes,
'no': self.no,
'abstention': self.abstention,
'passed': self.has_passed(),
'voters': {
"yes": [f'{session_base_URI}members/{member.uuid}.json' for member in self.yes_voters],
"no": [f'{session_base_URI}members/{member.uuid}.json' for member in self.no_voters],
"abstention": [f'{session_base_URI}members/{member.uuid}.json' for member in self.abstention_voters]
}
}
def has_passed(self):
"""Does this motion have the majority of votes
Returns:
bool: Does this motion have the majority of votes
"""
# FIXME: No Quorum Check (rule 42.5 of parliament)
return self.yes > self.no + self.abstention
@staticmethod
def from_table(meeting_topic, vote_number: int, vote_rows: NavigableString):
"""Generate a new Vote from a parsed table.
Args:
vote_number (int): Number of the vote in this meeting (e.g. 1)
vote_rows (NavigableString): Vote rows as obtained by BeautifulSoup
Returns:
Vote:
"""
yes_str = clean_string(vote_rows[1].find_all(
'td')[1].find('p').get_text())
if not yes_str:
# Sometimes, tables are empty... example: https://www.dekamer.be/doc/PCRI/html/55/ip100x.html
return None
yes = int(yes_str)
no = int(clean_string(vote_rows[2].find_all(
'td')[1].find('p').get_text()))
abstention = int(clean_string(
vote_rows[3].find_all('td')[1].find('p').get_text()))
return GenericVote(meeting_topic, vote_number, yes, no, abstention)
def set_yes_voters(self, l):
"""Set the members who voted for
Args:
l (List[Member]): A list of Members who voted for
"""
if abs(len(l) - self.yes) > 2:
# Sometimes there are some inconsistencies in the counts and the reported names
# We allow some tolerance for this
print(
f'NOTE: The number of yes voters did not match the provided list: {len(l)} instead of {self.yes}')
self.unsure = True
self.yes = len(l)
self.yes_voters = l
post_vote_activity(self, Choice.YES, l)
def set_no_voters(self, l):
"""Set the members who voted against
Args:
l (List[Member]): A list of Members who voted against
"""
if abs(len(l) - self.no) > 2:
# Sometimes there are some inconsistencies in the counts and the reported names
# We allow some tolerance for this
print(
f'NOTE: The number of no voters did not match the provided list: {len(l)} instead of {self.no}')
self.unsure = True
self.no = len(l)
self.no_voters = l
post_vote_activity(self, Choice.NO, l)
def set_abstention_voters(self, l):
"""Set the members who abstained from voting for this motion
Args:
l (List[Member]): A list of Members who abstained from the vote
"""
if abs(len(l) - self.abstention) > 2:
# Sometimes there are some inconsistencies in the counts and the reported names
# We allow some tolerance for this
print(
f'NOTE: The number of abstention voters did not match the provided list: {len(l)} instead of {self.abstention}')
self.unsure = True
self.abstention = len(l)
self.abstention_voters = l
post_vote_activity(self, Choice.ABSTENTION, l)
class LanguageGroupVote(GenericVote):
"""For some voting matters a majority in both Language Groups is needed"""
def __init__(self, meeting_topic, vote_number: int, vote_NL: Vote, vote_FR: Vote):
"""For some voting matters a majority in both Language Groups is needed
Args:
meeting_topic (MeetingTopic): The meeting topic
vote_number (int): Number of the vote in this meeting (e.g. 1)
vote_NL (Vote): The Vote in the Dutch-speaking part of the Parliament
vote_FR (Vote): The Vote in the French-speaking part of the Parliament
"""
GenericVote.__init__(self, meeting_topic, vote_number, vote_NL.yes + vote_FR.yes,
vote_NL.no + vote_FR.no, vote_NL.abstention + vote_FR.abstention)
self.vote_NL = vote_NL
self.vote_FR = vote_FR
def __repr__(self):
return "LanguageGroupVote(%d, %d, %d)" % (self.vote_number, self.vote_NL, self.vote_FR)
def to_dict(self, session_base_URI: str):
return {
'id': self.vote_number,
'type': 'language_group',
'yes': self.vote_NL.yes + self.vote_FR.yes,
'no': self.vote_NL.no + self.vote_FR.no,
'abstention': self.vote_NL.abstention + self.vote_FR.abstention,
'passed': self.has_passed(),
'voters': {
"yes": [f'{session_base_URI}members/{member.uuid}.json' for member in self.yes_voters],
"no": [f'{session_base_URI}members/{member.uuid}.json' for member in self.no_voters],
"abstention": [f'{session_base_URI}members/{member.uuid}.json' for member in self.abstention_voters]
},
'detail': {
"NL": self.vote_NL.to_dict(session_base_URI),
"FR": self.vote_FR.to_dict(session_base_URI)
}
}
def has_passed(self):
"""The vote has to pass in both halves of the parliament.
Returns:
bool: Has the vote obtained the necessary majority?
"""
return self.vote_NL.has_passed() and self.vote_FR.has_passed()
@staticmethod
def from_table(meeting_topic, vote_number: int, vote_rows: NavigableString):
"""Generate a new Vote from a parsed table.
Args:
meeting_topic (MeetingTopic): The meeting topic
vote_number (int): Number of the vote in this meeting (e.g. 1)
vote_rows (NavigableString): Vote rows as obtained by BeautifulSoup
Returns:
Vote:
"""
yes_fr = int(clean_string(
vote_rows[2].find_all('td')[1].find('p').get_text()))
no_fr = int(clean_string(
vote_rows[3].find_all('td')[1].find('p').get_text()))
abstention_fr = int(clean_string(
vote_rows[4].find_all('td')[1].find('p').get_text()))
yes_nl = int(clean_string(
vote_rows[2].find_all('td')[3].find('p').get_text()))
no_nl = int(clean_string(
vote_rows[3].find_all('td')[3].find('p').get_text()))
abstention_nl = int(clean_string(
vote_rows[4].find_all('td')[3].find('p').get_text()))
return LanguageGroupVote(meeting_topic, vote_number, GenericVote(meeting_topic, vote_number, yes_nl, no_nl, abstention_nl), GenericVote(meeting_topic, vote_number, yes_fr, no_fr, abstention_fr))
class ElectronicGenericVote(Vote):
"""Some voting are anonymously organised electronically. We don't have the names in this case"""
def __init__(self, meeting_topic, vote_number: int, yes: int, no: int):
"""A Vote represents a single vote in a meeting.
Args:
meeting_topic (MeetingTopic): The meeting topic
vote_number (int): Number of the vote in this meeting (e.g. 1)
yes (int): Number of yes votes
no (int): Number of no votes
"""
Vote.__init__(self, meeting_topic, vote_number, yes)
self.no = no
def __repr__(self):
return f"ElectronicGenericVote({self.vote_number}, {self.yes}, {self.no})"
def has_passed(self):
return self.yes > self.no and self.yes + self.no > 75
def to_dict(self, session_base_URI: str):
return {
'id': self.vote_number,
'type': 'electronic_generic',
'yes': self.yes,
'no': self.no,
'passed': self.has_passed()
}
class ElectronicAdvisoryVote(Vote):
"""Some voting are anonymously organised electronically to inquire whether more opinions are required.
We don't have the names in this case
"""
def __init__(self, meeting_topic, vote_number: int, yes: int):
"""A Vote represents a single vote in a meeting.
Args:
meeting_topic (MeetingTopic): The meeting topic
vote_number (int): Number of the vote in this meeting (e.g. 1)
yes (int): Number of yes votes
"""
Vote.__init__(self, meeting_topic, vote_number, yes)
def __repr__(self):
return f"ElectronicAdvisoryVote({self.vote_number}, {self.yes})"
def has_passed(self):
"""Does this advisory request reach more than the threshold of 1/3 of the yes votes to pass
Returns:
bool: Does this motion have the majority of votes
"""
return self.yes > 50
def to_dict(self, session_base_URI: str):
return {
'id': self.vote_number,
'type': 'electronic_advisory',
'yes': self.yes,
'passed': self.has_passed()
}
def electronic_vote_from_table(meeting_topic, vote_number: int, vote_start_node: NavigableString):
"""Generate a new electronic (advisory or generic) vote from a parsed table.
Args:
meeting_topic (MeetingTopic): The meeting topic
vote_number (int): Number of the vote in this meeting (e.g. 1)
vote_start_node (NavigableString): Vote start node as obtained by BeautifulSoup
Returns:
Vote:
"""
yes = int(clean_string(vote_start_node.find_all(
'td')[1].find('p').get_text()))
vote_end_node = vote_start_node.find_next_sibling().find_next_sibling()
if not vote_end_node or vote_end_node.name != 'table':
return ElectronicAdvisoryVote(meeting_topic, vote_number, yes)
no = int(clean_string(vote_end_node.find_all(
'td')[1].find('p').get_text()))
return ElectronicGenericVote(meeting_topic, vote_number, yes, no)
def post_vote_activity(vote: Vote, choice: Choice, members: List):
for member in members:
member.post_activity(activity.VoteActivity(member, vote, choice))
|
from django.contrib import admin
from admin_ordering.admin import OrderableAdmin
from testapp import models
class Child1Inline(OrderableAdmin, admin.TabularInline):
model = models.Child1
fk_name = 'parent'
ordering_field = 'ordering'
admin.site.register(
models.Parent1,
inlines=[
Child1Inline,
],
)
class Child2Inline(OrderableAdmin, admin.TabularInline):
model = models.Child2
fk_name = 'parent'
ordering_field = 'ordering'
admin.site.register(
models.Parent2,
inlines=[
Child2Inline,
],
)
class Child3Inline(OrderableAdmin, admin.TabularInline):
model = models.Child3
fk_name = 'parent'
ordering_field = 'ordering'
admin.site.register(
models.Parent3,
inlines=[
Child3Inline,
],
)
class Parent4Admin(OrderableAdmin, admin.ModelAdmin):
list_display = ('title', '_orderaaaaa')
list_editable = ('_orderaaaaa',)
ordering_field = '_orderaaaaa'
admin.site.register(
models.Parent4,
Parent4Admin,
)
|
import create
import time
ROOMBA_PORT="/dev/ttyUSB0"
robot = create.Create(ROOMBA_PORT)
#robot.printSensors() # debug output
wall_fun = robot.senseFunc(create.WALL_SIGNAL) # get a callback for a sensor.
#print (wall_fun()) # print a sensor value.
robot.toSafeMode()
#cnt = 0
robot.go(0,90)
#while cnt < 10 :
# robot.go(100,0) # spin
# cnt+=1
# time.sleep(0.5)
time.sleep(0.1)
robot.close()
|
def linear_search(list, target):
"""
Return the index position of the target if found, else returns None
"""
for i in range(0, len(list)):
if list[i] == target:
return i
return None
def verify_linear_search(value):
if value is not None:
print("Target found at index: ", value)
else:
print("Target not found in the list.")
numbers = [1,2,3,4,5,6,7,8,9,10]
result = linear_search(numbers, 12)
verify_linear_search(result)
result = linear_search(numbers, 6)
verify_linear_search(result) |
from __future__ import annotations
import fnmatch
import os
import pathlib
import platform
import shutil
import subprocess
import sys
from typing import Optional
import xappt
import xappt_qt
ROOT_PATH = pathlib.Path(__file__).absolute().parent
SPEC_PATH = ROOT_PATH.joinpath("build.spec")
DIST_PATH = ROOT_PATH.joinpath("dist")
def clear_directory(path: pathlib.Path, *, exclude: Optional[list[str]] = None):
if not path.is_dir():
return
for item in path.iterdir():
remove_item = True
if exclude is not None:
for pattern in exclude:
if fnmatch.fnmatch(item.name, pattern):
remove_item = False
break
if remove_item:
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()
def main() -> int:
clear_directory(DIST_PATH, exclude=["xappt*.exe"])
os.environ.setdefault("XAPPT_EXE_CONSOLE", "1")
os.environ.setdefault("XAPPT_EXE_NAME", "xappt")
os.environ.setdefault("XAPPT_EXE_VERSION", xappt_qt.version_str)
os.environ.setdefault("XAPPT_COMPANY_NAME", "")
os.environ.setdefault("XAPPT_COPYRIGHT", "Copyright (c) 2021")
with xappt.temporary_path() as tmp:
if platform.system() == "Windows":
import version_info
version_file = tmp.joinpath("version_info.txt")
with open(version_file, "w") as fp:
fp.write(version_info.version_info_str.strip())
os.environ.setdefault("XAPPT_EXE_VERSION_FILE", str(version_file))
command = ('pyinstaller', str(SPEC_PATH), '--onefile')
proc = subprocess.Popen(command, cwd=str(ROOT_PATH))
return proc.wait()
if __name__ == '__main__':
sys.exit(main())
|
# Heap queue algorithm
import heapq as hq
def main():
a = [1,2,3,4,5,6,7,8]
hq.heapify(a)
print("heapq.heapify({})".format(a))
hq.heappush(a, 9)
print("heapq.heappush('heap', 9) = {}".format(a))
hq.heappop(a)
print("heapq.heappop('heap') = {}".format(a))
y = hq.heappushpop(a, 16)
print("heapq.heappushpop('heap', 16) = ({}), {}".format(y, a))
y = hq.heapreplace(a, 1)
print("heapq.heapreplace('heap', 1) = ({}), {}".format(y, a))
y = hq.nlargest(2, enumerate(a))
print("heapq.nlargest(2, 'heap') = {}".format(y))
y = hq.nsmallest(2, a)
print("heapq.nsmallest(2, 'heap') = {}".format(y))
y = hq.merge(a, [94, 34,12,56,83])
print("heapq.merge('heap', [94, 34,12,56,83]) = {}".format(y))
if __name__ == '__main__':
main()
|
import torchelie.loss.gan.hinge
import torchelie.loss.gan.standard
import torchelie.loss.gan.penalty
import torchelie.loss.gan.ls
|
from socketsocketcan import TCPBus
import can
from datetime import datetime
from time import sleep
bus = TCPBus(5000)
print("socket connected!")
#create a listener to print all received messages
listener = can.Printer()
notifier = can.Notifier(bus,(listener,),timeout=None)
try:
msg = can.Message(
is_extended_id=False,
dlc=6)
count = 0
while bus.is_connected:
msg.arbitration_id = count
msg.data = (count).to_bytes(6,"little")
bus.send(msg)
msg.timestamp = datetime.now().timestamp() #just needed for printing
print(msg) #print sent message
count+=1
sleep(0.5)
print("socket disconnected.")
except KeyboardInterrupt:
print("ctrl+c, exiting...")
notifier.stop()
if bus.is_connected:
bus.shutdown()
|
#!/usr/bin/env python3
# Converts NumPy format files to image files, using whatever image format is specified
# default = jpeg
# does not remove old extension, i.e. new file is ____.npy.jpeg (no reason other than laziness)
# Runs in parallel
# TODO: if <file> is actually a directory, then it should greate a new <directory>.jpeg
# AND *recursively* create a new version of original directory with all npy files replaced
# by image files.
# Intended usage: ./np2img Preproc/ (
# would generate Preproc.jpeg/Train/*.npy.jpeg, etc)
# ...'course, alternatively we could just let ../preprocess_data.py save as jpegs
# and enable ../panotti/datautils.py, etc to read them.
import os
#from multiprocessing.pool import ThreadPool as Pool
from multiprocessing.pool import Pool
import numpy as np
from functools import partial
from scipy.misc import imsave
def convert_one_file(file_list, out_format, mono, file_index):
infile = file_list[file_index]
if os.path.isfile(infile):
basename, extension = os.path.splitext(infile)
if ('.npz' == extension) or ('.npy' == extension):
outfile = basename+"."+out_format
print(" Operating on file",infile,", converting to ",outfile)
if ('.npz' == extension):
with np.load(infile) as data:
arr = data['melgram']
else:
arr = np.load(infile)
channels = arr.shape[-1]
if (channels <= 4):
#arr = np.reshape(arr, (arr.shape[2],arr.shape[3]))
arr = np.moveaxis(arr, 1, 3).squeeze() # we use the 'channels_first' in tensorflow, but images have channels_first. squeeze removes unit-size axes
arr = np.flip(arr, 0) # flip spectrogram image right-side-up before saving, for easier viewing
if (2 == channels): # special case: 1=greyscale, 3=RGB, 4=RGBA, ..no 2. so...?
# pad a channel of zeros (for blue) and you'll just be stuck with it forever. so channels will =3
b = np.zeros((arr.shape[0], layers.shape[1], 3)) # 3-channel array of zeros
b[:,:,:-1] = arr # fill the zeros on the 1st 2 channels
imsave(outfile, b, format=out_format)
else:
imsave(outfile, arr, format=out_format)
else:
print(" Skipping file",infile,": Channels > 4. Not representable in jpeg or png format.")
else:
print(" Skipping file",infile,": not numpy format")
else:
print(" Skipping file",infile,": file not found")
return
def main(args):
# farm out the list of files across multiple cpus
file_indices = tuple( range(len(args.file)) )
cpu_count = os.cpu_count()
pool = Pool(cpu_count)
pool.map(partial(convert_one_file, args.file, args.format, args.mono), file_indices)
return
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Convert numpy array file to image format')
parser.add_argument('--format', help="format of output image (jpeg, png, etc). Default = png", type=str, default='png')
parser.add_argument("-m", "--mono", help="Use greyscale encoding for mono files (otherwise use RGB)",action="store_true")
parser.add_argument('file', help=".npy file(s) to convert", nargs='+')
args = parser.parse_args()
main(args)
|
import os
from base64 import b64encode
from pathlib import Path
import magic
def load_noimage_app():
"""アプリのNoImage画像を取得します"""
noimage_app_path = (
Path(os.path.dirname(__file__)) / "app/static/noimage_app.png"
)
with open(noimage_app_path, "rb") as f:
noimage_app = f.read()
return noimage_app
def load_noimage_user():
"""ユーザーのNoImage画像を取得します"""
noimage_user_path = (
Path(os.path.dirname(__file__)) / "app/static/noimage_user.png"
)
with open(noimage_user_path, "rb") as f:
noimage_user = f.read()
return noimage_user
def load_noimage_group():
"""グループのNoImage画像を取得します"""
noimage_group_path = (
Path(os.path.dirname(__file__)) / "app/static/noimage_group.png"
)
with open(noimage_group_path, "rb") as f:
noimage_group = f.read()
return noimage_group
|
#!/usr/bin/env python3
import logging
from . import serverconf
from . import benchmark
from dateutil.parser import parse
def get_constraint(cursor, table_name):
if table_name is not None:
query = "SELECT TABLE_NAME, TABLE_SCHEMA, COLUMN_NAME, " \
"CONSTRAINT_NAME FROM " \
"INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE"
sql_response = execute_request(cursor, query, [])
for i in sql_response:
if i['CONSTRAINT_NAME'].find("PK__") != -1:
if i['TABLE_NAME'] == table_name.replace(
i['TABLE_SCHEMA'] + ".", "", 1):
logging.debug(i['COLUMN_NAME'])
return i['COLUMN_NAME']
logging.warn("Primary key not found! Table is in read only mode.")
return ""
def is_date(string):
try:
a = parse(string)
try:
number = int(string)
return False
except Exception as e:
pass
return a
except ValueError:
return False
def check_type(key, params):
typename = type(params[key]).__name__
if params[key] == "":
return ""
if typename == "int":
log.debug("YALA")
return key + " LIKE " + str(params[key])
elif typename == "str":
log.debug("YOLO")
return key + " LIKE '" + params[key] + "'"
return False
def offset(params):
if "limit" in params.keys():
final = "OFFSET " + params["offset"] + " ROWS FETCH NEXT " + params[
"limit"] + " ROWS ONLY"
else:
final = "OFFSET " + params["offset"] + " ROWS FETCH NEXT 20 ROWS ONLY"
return final
def order_by(params):
final = " ORDER BY "
values = params["order"].split(",")
for value in values:
elems = value.split(".")
for elem in elems:
final += elem + " "
final += ", "
final = final[:-2]
return final
def get_views(cursor, name, param):
arguments = []
query = "SELECT * FROM sys.views"
return execute_request(cursor, query, arguments)
def get_columns(cursor, table_name, param):
arguments = []
query = "SELECT * FROM INFORMATION_SCHEMA.columns WHERE TABLE_NAME = ?"
arguments.append(table_name.split('.')[1])
return execute_request(cursor, query, arguments)
def get_tables(cursor, name, param):
query = "select table_schema, table_name from INFORMATION_SCHEMA.TABLES " \
"where TABLE_TYPE = 'BASE TABLE'"
return execute_request(cursor, query, [])
def execute_request(cursor, query, args):
query = query.replace("--", "")
query = query.replace("#", "")
logging.debug(query + " | {}".format(args))
if serverconf.is_benchmark():
benchmark.delay_start() # Benchmarking delay
try:
cursor.execute(query, *args)
except Exception as e:
logging.error(e)
return {'success': False}
if serverconf.is_benchmark():
benchmark.delay_stop() # Benchmarking delay
keys = []
for elem in cursor.description:
keys.append(elem[0])
result = []
for row in cursor:
i = 0
value = {}
for elem in row:
value[keys[i]] = elem
i = i + 1
result.append(value)
return result
def function_call(cursor, function_name, params):
arguments = []
request = "SELECT * FROM " + function_name + "(" + params.get("arg") + ")"
logging.debug(request, arguments)
return execute_request(cursor, request, arguments)
def where(params):
final = " WHERE "
a = False
special_words = ["select", "order", "group", "limit", "offset"]
tab = {
"eq": "LIKE", "gte": ">=", "gt": ">", "lte": "<=", "lt": "<",
"neq": "NOT LIKE", "like": "LIKE", "is": "IS", "between": "BETWEEN"
}
for key in params.keys():
if key in special_words:
continue
split = params[key].split(',')
for elem in split:
a = True
value = elem.split('.')
if len(value) >= 2:
final += key + " " + tab[value[0]] + " "
i = 1
while i < len(value):
final += value[i] + " and "
i += 1
else:
value[0] = value[0].replace("'", "\\'")
value[0] = value[0].replace('"', '\\"')
final += key + " LIKE '" + value[0] + "' and "
if a is True:
final = final[:-5]
else:
final = final[:-6]
return final
def separate_select_params(params):
all_params = []
join = False
tmp = ""
for c in params:
if c == ',' and not join:
all_params.append(tmp)
tmp = ""
else:
tmp += c
if c == '{' or c == '}':
join = not join
all_params.append(tmp)
select_params = []
join_params = {}
for elem in all_params:
if elem.find('{') == -1:
select_params.append(elem)
else:
elem = elem.split('{')
name = elem[0]
value = elem[1].strip('}')
tmp = []
for val in value.split(','):
tmp.append(val)
join_params[name] = tmp
return select_params, join_params
def inner_join(table_name, join_params):
query = ""
if len(join_params) == 0:
return query
for key, value in join_params.items():
query += " INNER JOIN (SELECT "
for val in value:
query += val + ","
if len(value) != 0:
query = query[:-1]
query += " FROM " + key + ") on " + table_name + ".id = " + key + \
".fk_id"
return query
def select(cursor, table_name, params):
arguments = []
select_query = "SELECT "
join_params = {}
select_params = []
if "limit" in params.keys() and "offset" not in params.keys():
select_query += "TOP(?)"
arguments.append(params["limit"])
if 'select' in params.keys():
select_params, join_params = separate_select_params(params["select"])
if len(select_params) == 0:
select_query += "*,"
row = False
for param in select_params:
if param == "ROW_NUMBER":
row = True
else:
select_query += param + ","
select_query = select_query[:-1]
if row is False:
select_query += " FROM " + table_name
else:
select_query += " FROM (select *, ROW_NUMBER() OVER (ORDER BY Id) " \
"ROW_NUMBER from " + table_name + ") AS A "
select_query += where(params)
if "order" in params.keys():
select_query += order_by(params)
elif "offset" in params.keys():
select_query += " ORDER BY (SELECT 0) "
if "offset" in params.keys():
select_query += offset(params)
select_query += inner_join(table_name, join_params)
return execute_request(cursor, select_query, arguments)
def delete(cursor, table, params):
query = "DELETE FROM " + table + " WHERE "
for key, value in params.items():
query += key + "=" + value + " and "
if len(params) != 0:
query = query[:-5]
logging.debug(query)
try:
cursor.execute(query)
except Exception as e:
return {"success": False}
return {"success": True}
def update(cursor, table, params):
arguments = []
guid = get_constraint(cursor, table)
query = "UPDATE " + table + " SET "
for key, value in params.items():
value = value.replace("'", "\\'")
value = value.replace('"', '\\"')
if key == "fieldId" or key == guid:
continue
a = is_date(value)
if a:
value = a
query += key + " = ?,"
arguments.append(value)
if len(params) != 0:
query = query[:-1]
query += " FROM " + table
query += " WHERE " + guid + "=" + params["fieldId"]
logging.debug(query + " | ", arguments)
try:
cursor.execute(query, *arguments)
except Exception as e:
logging.error(e)
return {"success": False, "message": e}
return {"success": True}
def store_procedure(cursor, name, params):
# PROTECT FROM SQLI !!!!
query = params["query"]
try:
cursor.execute(query)
except Exception as e:
logging.error(e)
return {"success": False}
return {"success": True}
def get_stored_procedure_name(cursor, p2, p3):
try:
cursor.execute("SELECT name FROM dbo.sysobjects WHERE (TYPE = 'P')")
except Exception as e:
logging.error(e)
return {"success": False}
code = []
for row in cursor:
a = row[0].split('\n')
for line in a:
if len(line) > 0:
code.append(line)
return {"names": code}
def get_stored_procedure_code(cursor, procName, p3):
try:
cursor.execute("EXEC sp_helptext N'" + procName + "'")
except Exception as e:
logging.error(e)
return {"success": False}
code = []
for row in cursor:
a = row[0].split('\n')
for line in a:
if len(line) > 0:
code.append(line)
return {procName: code}
|
from tangram_app.exceptions import InvalidModeException
from tangram_app.tangram_game import tangram_game
from tangram_app.metrics import get_classification_report_pics
from tangram_app.predictions import get_predictions_with_distances, get_predictions
from tangram_app.processing import preprocess_img, preprocess_img_2
import argparse
import os
import cv2
if __name__ == '__main__':
# paths available for testing
path_vid = "data/videos/coeur.mov"
path_img = "data/test_images/cygne_20_left.jpg"
# cli options
parser = argparse.ArgumentParser(description="Tangram detection\n")
# default to webcam
parser.add_argument(
'-m', '--mode', help='analyze picture or video', default=False)
parser.add_argument(
'-s', '--side', help='analyze left / right or the full frame', default="left")
parser.add_argument('-metrics', '--metrics',
help='get metrics of our model', default=False)
args = parser.parse_args()
# check args.side value
if args.side:
assert args.side in ["right", "left",
"none"], "Select a valid side : left - right"
if args.mode:
if args.mode.endswith((".jpg", ".png")):
# static testing
assert os.path.exists(args.mode), "the file doesn't exist - try with another file"
print(tangram_game(image=args.mode, prepro=preprocess_img_2, pred_func=get_predictions_with_distances))
elif args.mode.endswith((".mp4", ".mov")):
# live testing
assert os.path.exists(args.mode), "the file doesn't exist - try with another file"
tangram_game(video=args.mode, side=args.side, prepro=preprocess_img_2,
pred_func=get_predictions_with_distances)
elif args.mode == "test":
path = "data/test_images/bateau_4_right.jpg"
img_cv = cv2.imread(path)
print(tangram_game(side="right", image=path, prepro=preprocess_img_2,
pred_func=get_predictions_with_distances))
elif args.mode.isnumeric() and (int(args.mode) == 0 or int(args.mode) == 1):
# webcam
tangram_game(video=int(args.mode), side=args.side, prepro=preprocess_img_2,
pred_func=get_predictions_with_distances)
else:
raise InvalidModeException()
if args.metrics:
assert os.path.exists(args.metrics), "the folder doesn't exist - try with another one"
report = get_classification_report_pics(title_report="pred_with_distances_mixed_data",
dataset_path=args.metrics, prepro=preprocess_img_2,
pred_func=get_predictions_with_distances)
|
# coding: utf-8
"""
multi-translate
Multi-Translate is a unified interface on top of various translate APIs providing optimal translations, persistence, fallback. # noqa: E501
The version of the OpenAPI document: 0.7.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from multitranslateclient.configuration import Configuration
class TranslationResponse(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'engine': 'str',
'engine_version': 'str',
'detected_language_confidence': 'float',
'from_language': 'str',
'to_language': 'str',
'source_text': 'str',
'translated_text': 'str',
'alignment': 'list[dict(str, dict(str, str))]'
}
attribute_map = {
'engine': 'engine',
'engine_version': 'engine_version',
'detected_language_confidence': 'detected_language_confidence',
'from_language': 'from_language',
'to_language': 'to_language',
'source_text': 'source_text',
'translated_text': 'translated_text',
'alignment': 'alignment'
}
def __init__(self, engine=None, engine_version=None, detected_language_confidence=None, from_language=None, to_language=None, source_text=None, translated_text=None, alignment=None, local_vars_configuration=None): # noqa: E501
"""TranslationResponse - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._engine = None
self._engine_version = None
self._detected_language_confidence = None
self._from_language = None
self._to_language = None
self._source_text = None
self._translated_text = None
self._alignment = None
self.discriminator = None
self.engine = engine
self.engine_version = engine_version
if detected_language_confidence is not None:
self.detected_language_confidence = detected_language_confidence
self.from_language = from_language
self.to_language = to_language
self.source_text = source_text
self.translated_text = translated_text
if alignment is not None:
self.alignment = alignment
@property
def engine(self):
"""Gets the engine of this TranslationResponse. # noqa: E501
:return: The engine of this TranslationResponse. # noqa: E501
:rtype: str
"""
return self._engine
@engine.setter
def engine(self, engine):
"""Sets the engine of this TranslationResponse.
:param engine: The engine of this TranslationResponse. # noqa: E501
:type engine: str
"""
if self.local_vars_configuration.client_side_validation and engine is None: # noqa: E501
raise ValueError("Invalid value for `engine`, must not be `None`") # noqa: E501
self._engine = engine
@property
def engine_version(self):
"""Gets the engine_version of this TranslationResponse. # noqa: E501
:return: The engine_version of this TranslationResponse. # noqa: E501
:rtype: str
"""
return self._engine_version
@engine_version.setter
def engine_version(self, engine_version):
"""Sets the engine_version of this TranslationResponse.
:param engine_version: The engine_version of this TranslationResponse. # noqa: E501
:type engine_version: str
"""
if self.local_vars_configuration.client_side_validation and engine_version is None: # noqa: E501
raise ValueError("Invalid value for `engine_version`, must not be `None`") # noqa: E501
self._engine_version = engine_version
@property
def detected_language_confidence(self):
"""Gets the detected_language_confidence of this TranslationResponse. # noqa: E501
:return: The detected_language_confidence of this TranslationResponse. # noqa: E501
:rtype: float
"""
return self._detected_language_confidence
@detected_language_confidence.setter
def detected_language_confidence(self, detected_language_confidence):
"""Sets the detected_language_confidence of this TranslationResponse.
:param detected_language_confidence: The detected_language_confidence of this TranslationResponse. # noqa: E501
:type detected_language_confidence: float
"""
self._detected_language_confidence = detected_language_confidence
@property
def from_language(self):
"""Gets the from_language of this TranslationResponse. # noqa: E501
:return: The from_language of this TranslationResponse. # noqa: E501
:rtype: str
"""
return self._from_language
@from_language.setter
def from_language(self, from_language):
"""Sets the from_language of this TranslationResponse.
:param from_language: The from_language of this TranslationResponse. # noqa: E501
:type from_language: str
"""
if self.local_vars_configuration.client_side_validation and from_language is None: # noqa: E501
raise ValueError("Invalid value for `from_language`, must not be `None`") # noqa: E501
self._from_language = from_language
@property
def to_language(self):
"""Gets the to_language of this TranslationResponse. # noqa: E501
:return: The to_language of this TranslationResponse. # noqa: E501
:rtype: str
"""
return self._to_language
@to_language.setter
def to_language(self, to_language):
"""Sets the to_language of this TranslationResponse.
:param to_language: The to_language of this TranslationResponse. # noqa: E501
:type to_language: str
"""
if self.local_vars_configuration.client_side_validation and to_language is None: # noqa: E501
raise ValueError("Invalid value for `to_language`, must not be `None`") # noqa: E501
self._to_language = to_language
@property
def source_text(self):
"""Gets the source_text of this TranslationResponse. # noqa: E501
:return: The source_text of this TranslationResponse. # noqa: E501
:rtype: str
"""
return self._source_text
@source_text.setter
def source_text(self, source_text):
"""Sets the source_text of this TranslationResponse.
:param source_text: The source_text of this TranslationResponse. # noqa: E501
:type source_text: str
"""
if self.local_vars_configuration.client_side_validation and source_text is None: # noqa: E501
raise ValueError("Invalid value for `source_text`, must not be `None`") # noqa: E501
self._source_text = source_text
@property
def translated_text(self):
"""Gets the translated_text of this TranslationResponse. # noqa: E501
:return: The translated_text of this TranslationResponse. # noqa: E501
:rtype: str
"""
return self._translated_text
@translated_text.setter
def translated_text(self, translated_text):
"""Sets the translated_text of this TranslationResponse.
:param translated_text: The translated_text of this TranslationResponse. # noqa: E501
:type translated_text: str
"""
if self.local_vars_configuration.client_side_validation and translated_text is None: # noqa: E501
raise ValueError("Invalid value for `translated_text`, must not be `None`") # noqa: E501
self._translated_text = translated_text
@property
def alignment(self):
"""Gets the alignment of this TranslationResponse. # noqa: E501
:return: The alignment of this TranslationResponse. # noqa: E501
:rtype: list[dict(str, dict(str, str))]
"""
return self._alignment
@alignment.setter
def alignment(self, alignment):
"""Sets the alignment of this TranslationResponse.
:param alignment: The alignment of this TranslationResponse. # noqa: E501
:type alignment: list[dict(str, dict(str, str))]
"""
self._alignment = alignment
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TranslationResponse):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, TranslationResponse):
return True
return self.to_dict() != other.to_dict()
|
# BeatSaber Ogg Onset Detection
# 2/16/2019
# David Haas, Ian Boll, Josh Mosier, Michael Keays
import numpy as np
import h5py # for saving the model
from tensorflow.python.lib.io import file_io # for better file I/O
import tensorflow as tf
from datetime import datetime
import pickle
from keras.models import Sequential
from keras.layers import Dense, Conv2D, Flatten, MaxPooling2D, Dropout
import argparse
import sys
import glob
#import cloud_datagen
def train_model(data_file='data/train120.pkl', job_dir='./tmp/onset_detect', epochs=10, **args):
print(data_file, job_dir)
# set the logging path for ML Engine logging to Storage bucket
logs_path = job_dir + '/logs/' + datetime.now().isoformat()
print('Using logs_path located at {}'.format(logs_path))
f = tf.gfile.Open(data_file, mode="rb")
data, labels = pickle.load(f)
print("loaded")
# Normalize the lengths of the data and labels for each song
for i, ian in enumerate(zip(data, labels)):
d, l = ian
if l.shape[0] < d.shape[0]:
diff = d.shape[0] - l.shape[0]
labels[i] = np.concatenate(
(labels[i], np.zeros(diff, dtype=np.bool)))
print("normalized")
training_cutoff = int(len(data) * .8)
# Concatenate each song's data into a continuous list
train_data = np.concatenate(data[:training_cutoff]).swapaxes(1, 3)
test_data = np.concatenate(data[training_cutoff:]).swapaxes(1, 3)
train_labels = np.concatenate(
labels[:training_cutoff]).astype(np.short, copy=False)
test_labels = np.concatenate(
labels[training_cutoff:]).astype(np.short, copy=False)
print("Data set.")
from keras.models import Sequential
from keras.layers import Dense, Conv2D, Flatten, MaxPooling2D, Dropout
print("keras imported")
# create model
model = Sequential()
# add model layers
model.add(Conv2D(filters=10, kernel_size=(3, 7),
activation='relu', input_shape=(80, 15, 1)))
model.add(MaxPooling2D(pool_size=(3, 1)))
model.add(Conv2D(filters=10, kernel_size=(3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(3, 1)))
model.add(Flatten())
model.add(Dense(256, activation='sigmoid'))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.add(Dropout(0.5))
print("model created")
model.compile(optimizer='adam',
loss='binary_crossentropy', metrics=['accuracy'])
model.summary()
print("model compiled")
model.fit(train_data, train_labels,
validation_data=(test_data, test_labels),
epochs=epochs)
score = model.evaluate(test_data, test_labels, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
# Save the model to the Cloud Storage bucket's jobs directory
with file_io.FileIO('model.h5', mode='r') as input_f:
with file_io.FileIO(job_dir + '/model.h5', mode='w+') as output_f:
output_f.write(input_f.read())
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--data-file',
help='Cloud Storage bucket or local path to training data')
parser.add_argument(
'--job-dir',
help='Cloud storage bucket to export the model and store temp files')
parser.add_argument(
'--epochs',
help='Number of epochs to train the file over')
args = parser.parse_args()
arguments = args.__dict__
# Parse the input arguments for common Cloud ML Engine options
# if len(glob.glob('data/*.pkl')) == 0:
# job_dir = arguments['job_dir']
# if job_dir is None:
# job_dir = ''
# cloud_datagen.generate_data(
# 'data/RawSongs/', 'data/train.pkl')
# print(arguments)
# train_model(**arguments)
print(arguments)
train_model(**arguments)
# model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=3)
|
# -*- coding: utf-8 -*-
import unittest
import openrtb
import openrtb.base
BRQ = {
'id': u'testbrqid',
'tmax': 100,
'at': 2,
'app': {
'id': u'appid',
'name': u'appname',
'cat': [u'IAB1', u'IAB2-2'],
'publisher': {
'id': u'pubid',
'cat': [u'IAB3']
},
'content': {
'id': u'contentid',
'episode': 1,
'producer': {
'id': u'pubid',
'cat': [u'IAB3']
}
},
'keywords': u'key,word'
},
'device': {
'ip': u'123.1.2.3',
'make': u'Apple',
'devicetype': 1,
'geo': {
'lat': 54.3123,
'lon': 32.12312,
'country': u'US'
}
},
'user': {
'id': u'userid',
'yob': 2012,
'data': [
{
'id': u'dataid',
'segment': [{
'id': u'segmentid',
'name': u'yob',
'value': u'2012',
}]
}
]
},
'imp': [
{
'id': u'testimpid',
'bidfloorcur': u'USD',
'banner': {
'w': 320,
'h': 50,
'pos': 1,
'mimes': [u'mime/type']
}
}
],
'ext': {}
}
class TestFields(unittest.TestCase):
def test_passthrough(self):
self.assertEqual(openrtb.base.Field(int).deserialize(1), 1)
def test_convert(self):
self.assertEqual(openrtb.base.Field(int).deserialize('1'), 1)
def test_convert_fail(self):
with self.assertRaises(openrtb.base.ValidationError):
openrtb.base.Field(int).deserialize('asd')
def test_convert_enum_fail(self):
with self.assertRaises(openrtb.base.ValidationError):
openrtb.base.Field(openrtb.base.Enum).deserialize('asd')
def test_convert_enum(self):
self.assertEqual(openrtb.base.Field(openrtb.base.Enum).deserialize('1'), 1)
def test_deserialize(self):
class O(object):
v = None
@staticmethod
def deserialize(v):
O.v = v
return 'test'
self.assertEqual(openrtb.base.Field(O).deserialize('1'), 'test')
self.assertEqual(O.v, '1')
def test_unicode(self):
self.assertEqual(openrtb.base.String(u'uni'), u'uni')
def test_ascii(self):
self.assertEqual(openrtb.base.String('uni'), u'uni')
def test_utf8(self):
self.assertEqual(openrtb.base.String('утф'), u'утф')
def test_bad_utf8(self):
self.assertEqual(openrtb.base.String('x\xff'), u'x')
def test_convert_to_unicode(self):
self.assertEqual(openrtb.base.String(1), u'1')
def test_default_array(self):
self.assertEqual(openrtb.base.Array(int)(None), [])
def test_enum_int(self):
self.assertEqual(openrtb.base.Enum(1), 1)
def test_enum_convert_to_int(self):
self.assertEqual(openrtb.base.Enum('1'), 1)
def test_enum_convert_to_int_fail(self):
with self.assertRaises(ValueError):
openrtb.base.Enum('x')
class TestObjects(unittest.TestCase):
def test_required(self):
with self.assertRaises(openrtb.base.ValidationError):
openrtb.request.BidRequest()
def test_extra(self):
s = openrtb.request.Site(extra='extra')
self.assertEqual(s.extra, 'extra')
def test_ds_extra(self):
s = openrtb.request.Site.deserialize({'extra': 'extra'})
self.assertEqual(s.extra, 'extra')
def test_missing(self):
s = openrtb.request.Site()
self.assertEqual(s.extra, None)
def test_ds_none(self):
s = openrtb.request.Site.deserialize({'id': None})
self.assertEqual(s.id, None)
def test_bid_request_serialize_cycle(self):
self.maxDiff = None
brq = openrtb.request.BidRequest.deserialize(BRQ)
self.assertDictEqual(BRQ, brq.serialize())
class TestGetters(unittest.TestCase):
def test_brq_user(self):
brq = openrtb.request.BidRequest.minimal('i', 'i')
self.assertEqual(brq.get_user().__class__,
openrtb.request.User)
brq.user = openrtb.request.User(id='t')
self.assertEqual(brq.get_user().id, 't')
def test_brq_app(self):
brq = openrtb.request.BidRequest.minimal('i', 'i')
self.assertEqual(brq.get_app().__class__,
openrtb.request.App)
brq.app = openrtb.request.App(id='t')
self.assertEqual(brq.get_app().id, 't')
def test_brq_site(self):
brq = openrtb.request.BidRequest.minimal('i', 'i')
self.assertEqual(brq.get_site().__class__,
openrtb.request.Site)
brq.site = openrtb.request.Site(id='t')
self.assertEqual(brq.get_site().id, 't')
def test_brq_device(self):
brq = openrtb.request.BidRequest.minimal('i', 'i')
self.assertEqual(brq.get_device().__class__,
openrtb.request.Device)
brq.device = openrtb.request.Device(id='t')
self.assertEqual(brq.get_device().id, 't')
def test_banner_btypes(self):
self.assertEqual(openrtb.request.Banner().blocked_types(), set())
self.assertEqual(openrtb.request.Banner(btype=[openrtb.constants.BannerType.BANNER]).blocked_types(),
{openrtb.constants.BannerType.BANNER})
def test_banner_size(self):
self.assertEqual(openrtb.request.Banner().size(), None)
self.assertEqual(openrtb.request.Banner(w=1, h=2).size(), (1, 2))
def test_device_geo(self):
self.assertEqual(openrtb.request.Device().get_geo().__class__,
openrtb.request.Geo)
geo = openrtb.request.Geo()
self.assertEqual(openrtb.request.Device(geo=geo).get_geo(), geo)
def test_device_oncellular(self):
self.assertFalse(openrtb.request.Device().is_on_cellular())
self.assertTrue(openrtb.request.Device(connectiontype=openrtb.constants.ConnectionType.CELLULAR_2G).is_on_cellular())
self.assertFalse(openrtb.request.Device(connectiontype=openrtb.constants.ConnectionType.WIFI).is_on_cellular())
def test_geo_loc(self):
self.assertEqual(openrtb.request.Geo().loc(), None)
self.assertEqual(openrtb.request.Geo(lat=1, lon=2).loc(), (1, 2))
class TestMobileAdapter(unittest.TestCase):
def test_adapter(self):
mbrq = openrtb.mobile.BidRequest(
id='mbrqid',
imp=[
openrtb.mobile.Impression(
impid='impid',
w=320,
h=50,
btype=[openrtb.constants.BannerType.BANNER],
battr=[openrtb.constants.CreativeAttribute.AUDIO_AUTOPLAY],
pos=openrtb.constants.AdPosition.OFFSCREEN
)
],
device=openrtb.mobile.Device(
loc='1.23,4.56',
country='US',
make='Apple'
),
site=openrtb.mobile.Site(
sid='siteid',
pub='sitepub',
pid='sitepubid'
),
app=openrtb.mobile.App(
aid='appid',
pub='apppub',
pid='apppubid',
),
user=openrtb.mobile.User(
country='RU',
zip='123456',
uid='userid',
),
restrictions=openrtb.mobile.Restrictions(
bcat=['cat'],
badv=['adv'],
)
)
a = openrtb.mobile.OpenRTB20Adapter(mbrq)
self.assertEqual(a.id, 'mbrqid')
self.assertEqual(a.imp[0].banner.w, 320)
self.assertEqual(a.imp[0].banner.h, 50)
self.assertEqual(a.imp[0].banner.btype, [openrtb.constants.BannerType.BANNER])
self.assertEqual(a.imp[0].banner.pos, openrtb.constants.AdPosition.OFFSCREEN)
self.assertEqual(a.device.geo.country, 'US')
self.assertEqual(a.device.geo.lat, 1.23)
self.assertEqual(a.device.geo.lon, 4.56)
self.assertEqual(a.site.publisher.id, 'sitepubid')
self.assertEqual(a.site.publisher.name, 'sitepub')
self.assertEqual(a.site.id, 'siteid')
self.assertEqual(a.app.id, 'appid')
self.assertEqual(a.user.geo.country, 'RU')
self.assertEqual(a.user.geo.zip, '123456')
self.assertEqual(a.user.id, 'userid')
self.assertEqual(a.bcat, ['cat'])
self.assertEqual(a.badv, ['adv'])
self.assertEqual(a.brq.serialize(),
openrtb.mobile.OpenRTB20Adapter.deserialize(mbrq.serialize()).brq.serialize())
class TestConstants(unittest.TestCase):
def test_init(self):
self.assertEqual(openrtb.constants.AdPosition(2).name, 'MAYBE_VISIBLE')
def test_clone(self):
self.assertEqual(openrtb.constants.AdPosition(openrtb.constants.AdPosition.OFFSCREEN).name, 'OFFSCREEN')
def test_int(self):
self.assertEqual(int(openrtb.constants.ConnectionType(2)), 2)
def test_str(self):
self.assertEqual(str(openrtb.constants.AdPosition(2)), 'MAYBE_VISIBLE')
def test_hash(self):
self.assertEqual({openrtb.constants.AdPosition.OFFSCREEN: 'test'}[3], 'test')
def test_unknown_str(self):
self.assertIn('Unknown', str(openrtb.constants.BannerType(123)))
def test_none_equal(self):
self.assertFalse(None == openrtb.constants.BannerType.JS)
def test_int_equal(self):
self.assertEqual(openrtb.constants.BannerType.JS, 3)
def test_constant_equal(self):
self.assertEqual(openrtb.constants.BannerType.JS, openrtb.constants.BannerType(3))
def test_wrong_type(self):
with self.assertRaises(TypeError):
openrtb.constants.BannerType.JS == openrtb.constants.CreativeAttribute.EXPAND_AUTO
class TestIAB(unittest.TestCase):
def test_tier1(self):
self.assertEqual(openrtb.iab.from_string('IAB1'), 'Arts & Entertainment')
self.assertEqual(openrtb.iab.from_string('IAB18'), 'Style & Fashion')
def test_tier2(self):
self.assertEqual(openrtb.iab.from_string('IAB17-33'), 'Sports: Scuba Diving')
def test_noprefix(self):
self.assertEqual(openrtb.iab.from_string('7-32'), 'Health & Fitness: Nutrition')
def test_bad(self):
self.assertEqual(openrtb.iab.from_string('IAB99-99'), 'IAB99-99')
class TestMacros(unittest.TestCase):
TPL = ('${AUCTION_ID}/${AUCTION_BID_ID}/${AUCTION_IMP_ID}/'
'${AUCTION_SEAT_ID}/${AUCTION_AD_ID}/${AUCTION_PRICE}/${AUCTION_CURRENCY}')
def test_sub(self):
brq = openrtb.request.BidRequest.minimal('reqid', 'impid')
brp = openrtb.response.BidResponse(
id='wharrgarbl',
seatbid=[openrtb.response.SeatBid(
seat='seatid',
bid=[openrtb.response.Bid(
id='bidid',
impid='impid',
adid='adid',
price=0
)]
)],
bidid='bidid'
)
self.assertEqual(openrtb.macros.substitution(brq, brp, 0.1, self.TPL),
'reqid/bidid/impid/seatid/adid/0.1/USD')
def test_nonmacro(self):
self.assertEqual(openrtb.macros.substitution(
openrtb.request.BidRequest.minimal('r', 'i'),
openrtb.response.BidResponse.minimal('id', 'bidid', 'impid', 0.1),
0.2,
'${AUCTION_TEST}'
), '${AUCTION_TEST}')
def test_empty(self):
self.assertEqual(openrtb.macros.substitution(
openrtb.request.BidRequest.minimal('rid', 'rimpid'),
openrtb.response.BidResponse.minimal('respid', 'bidid', 'impid', 0.1),
0.2,
self.TPL
), 'rid//impid///0.2/USD')
if __name__ == '__main__':
unittest.main() |
import sys
sys.path.append('../../')
import constants as cnst
import tqdm
import numpy as np
from my_utils.visualize_flame_overlay import OverLayViz
from my_utils.generic_utils import save_set_of_images
import constants
import torch
from my_utils.eye_centering import position_to_given_location
import os
from my_utils.photometric_optimization.models import FLAME
from my_utils.photometric_optimization import util
###################################### Voca training Seq ######################################################
ignore_global_rotation = False
resolution = 256
run_idx = 29
seqs = np.load(cnst.voca_flame_seq_file)
if ignore_global_rotation:
pose = np.hstack((seqs['frame_pose_params'][:, 0:3]*0, seqs['frame_pose_params'][:, 6:9]))
else:
pose = np.hstack((seqs['frame_pose_params'][:, 0:3], seqs['frame_pose_params'][:, 6:9]))
num_frames = seqs['frame_exp_params'].shape[0]
translation = np.zeros((num_frames, 3))
flame_shape = np.repeat(seqs['seq_shape_params'][np.newaxis, :].astype('float32'), (num_frames,), axis=0)
flm_batch = np.hstack((flame_shape, seqs['frame_exp_params'], pose, translation)).astype('float32')[::8]
flm_batch = torch.from_numpy(flm_batch).cuda()
overlay_visualizer = OverLayViz()
config_obj = util.dict2obj(cnst.flame_config)
flame_decoder = FLAME.FLAME(config_obj).cuda().eval()
flm_batch = position_to_given_location(flame_decoder, flm_batch)
# Render FLAME
batch_size_true = flm_batch.shape[0]
cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.DECA_IDX['cam'][1]:]
shape = flm_batch[:, constants.INDICES['SHAPE'][0]:constants.INDICES['SHAPE'][1]]
exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.INDICES['EXP'][1]]
pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.INDICES['POSE'][1]]
# import ipdb; ipdb.set_trace()
fl_param_dict = np.load(cnst.all_flame_params_file, allow_pickle=True).item()
random_keys = ['00000.pkl', '00001.pkl', '00002.pkl', '00003.pkl', '00004.pkl', '00005.pkl', '00006.pkl',
'00007.pkl', '00008.pkl', '00009.pkl', '00010.pkl', '00011.pkl', '00012.pkl', '00013.pkl',
'00014.pkl', '00015.pkl', '00016.pkl', '00017.pkl', '00018.pkl', '00019.pkl', '00020.pkl',
'00021.pkl', '00022.pkl', '00023.pkl', '00024.pkl', '00025.pkl', '00026.pkl', '00027.pkl',
'00028.pkl', '00029.pkl', '00030.pkl', '00031.pkl', '00032.pkl', '00033.pkl', '00034.pkl',
'00035.pkl', '00036.pkl', '00037.pkl', '00038.pkl', '00039.pkl', '00040.pkl', '00041.pkl',
'00042.pkl', '00043.pkl', '00044.pkl', '00045.pkl', '00046.pkl', '00047.pkl', '00048.pkl',
'00049.pkl', '00050.pkl', '00051.pkl', '00052.pkl', '00053.pkl', '00054.pkl', '00055.pkl']
light_code = fl_param_dict[random_keys[7]]['lit'].astype('float32')[None, ...].repeat(batch_size_true, axis=0)
texture_code = fl_param_dict[random_keys[7]]['tex'].astype('float32')[None, ...].repeat(batch_size_true, axis=0)
# norma_map_img, _, _, _, rend_flm = \
# overlay_visualizer.get_rendered_mesh(flame_params=(shape, exp, pose, torch.from_numpy(light_code).cuda(),
# torch.from_numpy(texture_code).cuda()),
# camera_params=cam)
verts, landmarks2d, landmarks3d = overlay_visualizer.deca.flame(shape_params=shape, expression_params=exp,
pose_params=pose)
landmarks2d_projected = util.batch_orth_proj(landmarks2d, cam)
landmarks2d_projected[:, :, 1:] *= -1
trans_verts = util.batch_orth_proj(verts, cam)
trans_verts[:, :, 1:] = -trans_verts[:, :, 1:]
albedos = overlay_visualizer.flametex(torch.from_numpy(texture_code).cuda())
light_code = torch.from_numpy(light_code).cuda()
rendering_results = overlay_visualizer.deca.render(verts, trans_verts, albedos, lights=light_code, light_type='point',
cull_backfaces=True)
textured_images, normals, alpha_img = rendering_results['images'], rendering_results['normals'],\
rendering_results['alpha_images']
normal_images = overlay_visualizer.deca.render.render_normal(trans_verts, normals)
rend_flm = torch.clamp(textured_images, 0, 1) * 2 - 1
# rend_flm += (1 - alpha_img) * 2
norma_map_img = torch.clamp(normal_images, 0, 1) * 2 - 1
# norma_map_img += (1 - alpha_img) * 2
for id in tqdm.tqdm(range(50)):
save_set_of_images(path=os.path.join(cnst.output_root, f'sample/{run_idx}/voca/'+str(id)), prefix='mesh_normal_',
images=((norma_map_img + 1) / 2).cpu().numpy())
save_set_of_images(path=os.path.join(cnst.output_root, f'sample/{run_idx}/voca/' + str(id)), prefix='mesh_textured_',
images=((rend_flm + 1) / 2).cpu().numpy())
# save_set_of_images(path=os.path.join(cnst.output_root, f'sample/{run_idx}/voca/'+str(id)), prefix='',
# images=((fake_images + 1) / 2))
print(f'Voca Animation saved to {os.path.join(cnst.output_root, f"sample/{run_idx}/voca/")}')
|
# -*- coding: utf-8 -*-
"""Dynamic inventories of Docker containers, served up fresh just for Ansible."""
import json
from docker import DockerClient
DEFAULT_DOCKER_OPTS = {
'base_url': 'unix:///var/run/docker.sock',
'version': 'auto',
'timeout': 5,
'tls': True
}
def format_containers(containers, json_out):
"""Format container data for Ansible
Args:
containers: [(hostname, metadata), ...]
json_out: If True, return JSON, else dictionary.
Returns:
Dictionary of container information formatted to Ansible specs.
"""
data = {'all': {'vars': {'ansible_connection': 'docker'}, 'hosts': [], '_meta': {'hostvars': {}}}}
for host, metadata in containers:
# docs use dict keys set to none, but maybe all is special?
# data['all']['hosts'][host] = None
data['all']['hosts'].append(host)
if metadata:
data['all']['_meta']['hostvars'][host] = {'docker_metadata': metadata}
return json.dumps(data) if json_out else data
def containers(metadata=True, docker_opts=DEFAULT_DOCKER_OPTS):
"""Get all containers running on a Docker host and format them for Ansible.
Args:
metadata: If True, include container metadata. Default: True
docker_opts: Dict of DockerClient params. More info: https://docker-py.readthedocs.io/en/stable/client.html#docker.client.DockerClient
Returns:
Tuple of container info: (name, metadata)
"""
d = DockerClient(**docker_opts)
return [(c.name, c.attrs if metadata else {}) for c in d.containers.list()]
def containers_by_host(host, metadata=True, docker_opts=DEFAULT_DOCKER_OPTS):
"""Get all containers running on a Docker host and format them for Ansible.
Args:
host: Required, only match containers with this name.
metadata: If True, include container metadata. Default: True
docker_opts: Dict of DockerClient params. More info: https://docker-py.readthedocs.io/en/stable/client.html#docker.client.DockerClient
Returns:
Tuple of container info: (name, metadata)
"""
d = DockerClient(**docker_opts)
return [(c.name, c.attrs if metadata else {}) for c in d.containers.list() if c.name == host]
|
from selenium.webdriver.common.by import By
from webdriver_test_tools.pageobject import *
from webdriver_test_tools.webdriver import actions, locate
from no_yaml_example.config import SiteConfig
class FullURLExampleWebPage(prototypes.WebPageObject):
"""Non-YAML WebPageObject example (full URL)"""
# Full URL of the page
PAGE_URL = 'http://example.com/'
class RelativeURLExampleWebPage(prototypes.WebPageObject):
"""Non-YAML WebPageObject example (relative URL)"""
# File name of the page relative to a base URL declared in SiteConfig
PAGE_FILENAME = 'page.html'
# Full URL of the page
PAGE_URL = SiteConfig.BASE_URL + PAGE_FILENAME
|
import random
import bpy
from mathutils import Matrix, Vector
import os
import numpy as np
import math
from functools import reduce
def normalize(vec):
return vec / (np.linalg.norm(vec, axis=-1, keepdims=True) + 1e-9)
# All the following functions follow the opencv convention for camera coordinates.
def look_at(cam_location, point):
# Cam points in positive z direction
forward = point - cam_location
forward = normalize(forward)
tmp = np.array([0., -1., 0.])
right = np.cross(tmp, forward)
right = normalize(right)
up = np.cross(forward, right)
up = normalize(up)
mat = np.stack((right, up, forward, cam_location), axis=-1)
hom_vec = np.array([[0., 0., 0., 1.]])
if len(mat.shape) > 2:
hom_vec = np.tile(hom_vec, [mat.shape[0], 1, 1])
mat = np.concatenate((mat, hom_vec), axis=-2)
return mat
def sample_spherical(n, radius=1.):
xyz = np.random.normal(size=(n, 3))
xyz[:, 1] = np.abs(xyz[:, 1])
xyz = normalize(xyz) * radius
return xyz
def sample_sherical_uniform_angles(n, radius=1.):
# Original
yaw = np.random.uniform(-math.pi, math.pi, n)
pitch = np.random.uniform(0, math.radians(85), n)
#
# # # For paper GAN inversion
# yaw = np.asarray([math.pi / 2, math.pi, -math.pi/2, -math.pi/2][:n])
# pitch = np.asarray([math.radians(35), math.radians(35), math.radians(35), math.radians(85)][:n])
# # For recognition renders
# train_yaw = -math.pi / 2
# yaw_std = (math.pi/16)
# yaw = np.random.uniform(train_yaw - yaw_std, train_yaw + yaw_std, n)
# pitch = np.random.uniform(math.radians(0), math.radians(20), n)
#
# test_yaw = np.asarray([(math.pi / 4) * 3])
# test_pitch = np.asarray([math.radians(30)])
#
# yaw = np.concatenate([yaw, test_yaw], axis=0)
# pitch = np.concatenate([pitch, test_pitch], axis=0)
# # For the new view synthesis
# yaw = np.asarray([(math.pi / 4) * 3, -math.pi/2])
# pitch = np.asarray([math.radians(30), math.radians(10)])
# print("Yaw: ", yaw)
# print("Pitch: ", pitch)
x = np.sin(yaw) * np.cos(pitch)
y = np.sin(pitch)
z = np.cos(yaw) * np.cos(pitch)
x = np.expand_dims(x, axis=1)
y = np.expand_dims(y, axis=1)
z = np.expand_dims(z, axis=1)
return np.concatenate((x, y, z), axis=1) * radius
def set_camera_focal_length_in_world_units(camera_data, focal_length):
scene = bpy.context.scene
resolution_x_in_px = scene.render.resolution_x
resolution_y_in_px = scene.render.resolution_y
scale = scene.render.resolution_percentage / 100
sensor_width_in_mm = camera_data.sensor_width
sensor_height_in_mm = camera_data.sensor_height
pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y
if (camera_data.sensor_fit == 'VERTICAL'):
# the sensor height is fixed (sensor fit is horizontal),
# the sensor width is effectively changed with the pixel aspect ratio
s_u = resolution_x_in_px * scale / sensor_width_in_mm / pixel_aspect_ratio
s_v = resolution_y_in_px * scale / sensor_height_in_mm
else: # 'HORIZONTAL' and 'AUTO'
# the sensor width is fixed (sensor fit is horizontal),
# the sensor height is effectively changed with the pixel aspect ratio
pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y
s_u = resolution_x_in_px * scale / sensor_width_in_mm
s_v = resolution_y_in_px * scale * pixel_aspect_ratio / sensor_height_in_mm
camera_data.lens = focal_length / s_u
# Blender: camera looks in negative z-direction, y points up, x points right.
# Opencv: camera looks in positive z-direction, y points down, x points right.
def cv_cam2world_to_bcam2world(cv_cam2world):
'''
:cv_cam2world: numpy array.
:return:
'''
R_bcam2cv = Matrix(
((1, 0, 0),
(0, -1, 0),
(0, 0, -1)))
cam_location = Vector(cv_cam2world[:3, -1].tolist())
cv_cam2world_rot = Matrix(cv_cam2world[:3, :3].tolist())
cv_world2cam_rot = cv_cam2world_rot.transposed()
cv_translation = -1. * cv_world2cam_rot * cam_location
blender_world2cam_rot = R_bcam2cv * cv_world2cam_rot
blender_translation = R_bcam2cv * cv_translation
blender_cam2world_rot = blender_world2cam_rot.transposed()
blender_cam_location = -1. * blender_cam2world_rot * blender_translation
blender_matrix_world = Matrix((
blender_cam2world_rot[0][:] + (blender_cam_location[0],),
blender_cam2world_rot[1][:] + (blender_cam_location[1],),
blender_cam2world_rot[2][:] + (blender_cam_location[2],),
(0, 0, 0, 1)
))
return blender_matrix_world
# Returns camera rotation and translation matrices from Blender.
#
# There are 3 coordinate systems involved:
# 1. The World coordinates: "world"
# - right-handed
# 2. The Blender camera coordinates: "bcam"
# - x is horizontal
# - y is up
# - right-handed: negative z look-at direction
# 3. The desired computer vision camera coordinates: "cv"
# - x is horizontal
# - y is down (to align to the actual pixel coordinates
# used in digital images)
# - right-handed: positive z look-at direction
def get_world2cam_from_blender_cam(cam):
# bcam stands for blender camera
R_bcam2cv = Matrix(
((1, 0, 0),
(0, -1, 0),
(0, 0, -1)))
# Transpose since the rotation is object rotation,
# and we want coordinate rotation
# Use matrix_world instead to account for all constraints
location, rotation = cam.matrix_world.decompose()[0:2] # Matrix_world returns the cam2world matrix.
R_world2bcam = rotation.to_matrix().transposed()
# Convert camera location to translation vector used in coordinate changes
# T_world2bcam = -1*R_world2bcam*cam.location
# Use location from matrix_world to account for constraints:
T_world2bcam = -1 * R_world2bcam * location
# Build the coordinate transform matrix from world to computer vision camera
R_world2cv = R_bcam2cv * R_world2bcam
T_world2cv = R_bcam2cv * T_world2bcam
# put into 3x4 matrix
RT = Matrix((
R_world2cv[0][:] + (T_world2cv[0],),
R_world2cv[1][:] + (T_world2cv[1],),
R_world2cv[2][:] + (T_world2cv[2],),
(0,0,0,1)
))
return RT
#---------------------------------------------------------------
# 3x4 P matrix from Blender camera
#---------------------------------------------------------------
# Build intrinsic camera parameters from Blender camera data
#
# See notes on this in
# blender.stackexchange.com/questions/15102/what-is-blenders-camera-projection-matrix-model
def get_calibration_matrix_K_from_blender(camd):
f_in_mm = camd.lens
scene = bpy.context.scene
resolution_x_in_px = scene.render.resolution_x
resolution_y_in_px = scene.render.resolution_y
scale = scene.render.resolution_percentage / 100
sensor_width_in_mm = camd.sensor_width
sensor_height_in_mm = camd.sensor_height
pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y
if (camd.sensor_fit == 'VERTICAL'):
# the sensor height is fixed (sensor fit is horizontal),
# the sensor width is effectively changed with the pixel aspect ratio
s_u = resolution_x_in_px * scale / sensor_width_in_mm / pixel_aspect_ratio
s_v = resolution_y_in_px * scale / sensor_height_in_mm
else: # 'HORIZONTAL' and 'AUTO'
# the sensor width is fixed (sensor fit is horizontal),
# the sensor height is effectively changed with the pixel aspect ratio
pixel_aspect_ratio = scene.render.pixel_aspect_x / scene.render.pixel_aspect_y
s_u = resolution_x_in_px * scale / sensor_width_in_mm
s_v = resolution_y_in_px * scale * pixel_aspect_ratio / sensor_height_in_mm
# Parameters of intrinsic calibration matrix K
alpha_u = f_in_mm * s_u
alpha_v = f_in_mm * s_v
u_0 = resolution_x_in_px * scale / 2
v_0 = resolution_y_in_px * scale / 2
skew = 0 # only use rectangular pixels
# with open('debug.txt', 'w') as f:
# f.write("Camera intrinsics: ")
# f.write("Sensor width: ")
# f.write(str(camd.sensor_width))
# f.write("\nFocal length/fov ")
# f.write(str(camd.lens))
# f.write(" ")
# f.write(str(camd.lens_unit))
# # f.write(str(alpha_u))
# f.write("\nFOV: ")
# f.write(str(math.degrees(2 * math.atan(camd.sensor_width /(2 * camd.lens)))))
# f.write("\nClip start/end ")
# f.write(str(camd.clip_start))
# f.write(" ")
# f.write(str(camd.clip_end))
#
# f.write('\n')
K = Matrix(
((alpha_u, skew, u_0),
( 0 , alpha_v, v_0),
( 0 , 0, 1 )))
return K
def cond_mkdir(path):
path = os.path.normpath(path)
if not os.path.exists(path):
os.makedirs(path)
return path
def dump(obj):
for attr in dir(obj):
if hasattr(obj, attr):
print("obj.%s = %s" % (attr, getattr(obj, attr)))
def get_archimedean_spiral(sphere_radius, num_steps=250):
'''
https://en.wikipedia.org/wiki/Spiral, section "Spherical spiral". c = a / pi
'''
a = 40
r = sphere_radius
translations = []
i = a / 2
while i < a:
theta = i / a * math.pi
x = r * math.sin(theta) * math.cos(-i)
z = r * math.sin(-theta + math.pi) * math.sin(-i)
y = r * - math.cos(theta)
translations.append((x, y, z))
i += a / (2 * num_steps)
return np.array(translations)
|
#Faça um programa que mostre na tela um contagem regressiva para o estouro de fogos de artifício, indo de 10 até 0, com uma pausa de 1 segundo entre eles.
from time import sleep
import emoji
for c in range (10, -1, -1):
print(c)
sleep(1)
print(emoji.emojize('💥')*3) |
# -*- coding: utf-8 -*-
##############################################################################
#
# twitteroauth.py
# A module to provide auth handler of Twitter OAuth,
# stores user data in memcache.
#
# Copyright (c) 2010 Webcore Corp. All Rights Reserved.
#
##############################################################################
""" twitteroauth.py
A module to provide auth handler of Twitter OAuth,
stores user data in memcache.
$Id: appengine.py 638 2010-08-10 04:05:57Z ats $
"""
import logging
from google.appengine.api import memcache
from aha.auth.base import BaseAuth
from plugin.twitteroauth.twitter import TwitterMixin
TWITTER_NAMESPACE = 'twitter_login_users'
OAUTH_ACCESS_TOKEN_COOKIE = '_oauth_request_token'
EXPIRE = 60*60*24*7
class TwitterOAuth(BaseAuth, TwitterMixin):
"""
A class to performs authentication via twitter oauth authentication.
When you want to use twitter authentication in aha application,
you may set auth_obj in configuration in config.py like following:
from plugin.twitteroauth.twitter_auth import TwitterOAuth
config.auth_obj = TwitterOAuth
You may also set consume key and consume secret
for you twitter application.::
config.consumer_key = '8tvBBBU4P8SqPypC1X4tpA'
config.consumer_secret = 'RGdpAxEnuETjKQdpDxsJkR67Ki16st6gfv4URhfdM'
"""
TYPE = 'twitter'
def auth(self, ins, *param, **kws):
"""
A method to perform authentication, or
to check if the authentication has been performed.
It returns true on success, false on failure.
:param ins : a controller instance.
:param param : parameters to be passed to authentication function.
:param kws : keyword arguments to be passed to authentication
funciton.
"""
u = self.get_user(ins, *param, **kws)
if not u:
return False
return True
def auth_redirect(self, ins, *param, **kws):
"""
A method to perform redirection
when the authentication fails, user doesn't have privileges, etc.
:param ins : a controller instance.
:param param : parameters to be passed to authentication function.
:param kws : keyword arguments to be passed to authentication
funciton.
"""
self.controller = ins
url = self.authenticate_redirect()
if not url:
raise ValueError("authenticate_redirect() didn't return url.")
ins.redirect(url)
@classmethod
def get_user(cls, ins):
"""
A method to return current login user.
It returns user dict if the user is logging in,
None if doesn't.
:param ins : a controller instance.
funciton.
"""
key = ins.cookies.get(OAUTH_ACCESS_TOKEN_COOKIE, '')
if key:
user = memcache.get(key, namespace = TWITTER_NAMESPACE)
if user: return user;
return {}
@classmethod
def clear_user(cls, ins):
"""
A method to clear current user in memcache.
:param ins : a controller instance.
funciton.
"""
key = ins.cookies.get(OAUTH_ACCESS_TOKEN_COOKIE, '')
if key:
memcache.delete(key, namespace = TWITTER_NAMESPACE)
return {}
def set_cookie(self, key, data):
"""
A method to set cookie.
It is called during auth_redirect() is called internally.
"""
logging.debug('set cookie')
self.controller.post_cookie[key] = data
self.controller.post_cookie[key]['path'] = '/'
def main(): pass;
|
import re
from collections import defaultdict
def parse_line(line):
subjectRegex = r"(?P<color>\w+ \w+) bags contain"
dependencyRegex = r"(?P<number>\d) (?P<color>\w+ \w+) bags?"
subject = re.match(subjectRegex, line)["color"]
dependencyMatches = [(m["number"], m["color"]) for m in re.finditer(dependencyRegex, line)]
return (subject, dependencyMatches)
def parse_input(filename):
with open(filename) as file:
lines = file.readlines()
return dict([parse_line(line) for line in lines])
def reverse_graph(dependencies):
g = defaultdict(lambda : set())
for k, deps in dependencies.items():
for _, dep in deps:
g[dep].add(k)
return g
def explore_parents(graph, initial_node):
visited = set()
to_explore = [ initial_node ]
while to_explore:
current = to_explore.pop(0)
for node in graph[current]:
if node not in visited:
to_explore.append(node)
visited.add(current)
return visited - set([initial_node])
def number_nested_bags(initial, graph):
deps = graph[initial]
if deps:
res = 1
for count, dep in deps:
res += int(count) * number_nested_bags(dep, graph)
return res
else:
return 1
def main():
dependencies = parse_input("input.txt")
reversed_g = reverse_graph(dependencies)
possible_parents = explore_parents(reversed_g, "shiny gold")
print(len(possible_parents))
c = number_nested_bags("shiny gold", dependencies)
print(c-1)
if __name__ == '__main__':
main() |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Events module
puppeteer equivalent: Events.js
"""
class Events:
class Page:
Close = 'close'
Console = 'console'
Dialog = 'dialog'
DOMContentLoaded = 'domcontentloaded'
Error = 'error'
# Can't use just 'error' due to node.js special treatment of error events.
# @see https://nodejs.org/api/events.html#events_error_events
PageError = 'pageerror'
Request = 'request'
Response = 'response'
RequestFailed = 'requestfailed'
RequestFinished = 'requestfinished'
FrameAttached = 'frameattached'
FrameDetached = 'framedetached'
FrameNavigated = 'framenavigated'
Load = 'load'
Metrics = 'metrics'
Popup = 'popup'
WorkerCreated = 'workercreated'
WorkerDestroyed = 'workerdestroyed'
class Browser:
TargetCreated = 'targetcreated'
TargetDestroyed = 'targetdestroyed'
TargetChanged = 'targetchanged'
Disconnected = 'disconnected'
class BrowserContext:
TargetCreated = 'targetcreated'
TargetDestroyed = 'targetdestroyed'
TargetChanged = 'targetchanged'
class NetworkManager:
Request = 'Events.NetworkManager.Request'
Response = 'Events.NetworkManager.Response'
RequestFailed = 'Events.NetworkManager.RequestFailed'
RequestFinished = 'Events.NetworkManager.RequestFinished'
class FrameManager:
FrameAttached = 'Events.FrameManager.FrameAttached'
FrameNavigated = 'Events.FrameManager.FrameNavigated'
FrameDetached = 'Events.FrameManager.FrameDetached'
LifecycleEvent = 'Events.FrameManager.LifecycleEvent'
FrameNavigatedWithinDocument = 'Events.FrameManager.FrameNavigatedWithinDocument'
ExecutionContextCreated = 'Events.FrameManager.ExecutionContextCreated'
ExecutionContextDestroyed = 'Events.FrameManager.ExecutionContextDestroyed'
class Connection:
Disconnected = 'Events.Connection.Disconnected'
class CDPSession:
Disconnected = 'Events.CDPSession.Disconnected'
|
_base_ = '../../../../base.py'
# model settings
model = dict(type='SelectiveSearch')
# dataset settings
data_source_cfg = dict(
type='COCOSelectiveSearchJson',
memcached=True,
mclient_path='/mnt/lustre/share/memcached_client')
data_train_json = 'data/coco/annotations/instances_train2017.json'
data_train_root = 'data/coco/train2017'
dataset_type = 'SelectiveSearchDataset'
img_norm_cfg = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
data = dict(
imgs_per_gpu=1, # support single-image single-gpu inference only
workers_per_gpu=8,
val=dict(
type=dataset_type,
data_source=dict(
json_file=data_train_json, root=data_train_root,
**data_source_cfg),
method='fast',
min_size=None,
max_ratio=None,
topN=None
))
|
import datetime
import boto3
from botocore.exceptions import ClientError
SENDER = "[email protected]"
AWS_REGION = "us-east-1"
SUBJECT = "Alerta de evento"
CHARSET = "UTF-8"
def send_email(recipient, event, cam_id, cam_name, cam_address, frame64):
body_html = """
<html>
<head></head>
<body>
<h1>Um evento foi detectado</h1>
<p>O evento '{}' foi detectado em {}.</p>
<table>
<tr>
<th colspan="2">Camera</th>
</tr>
<tr>
<td>Identificação</td>
<td>{}</td>
</tr>
<tr>
<td>Nome</td>
<td>{}</td>
</tr>
<tr>
<td>Endereço</td>
<td>{}</td>
</tr>
</table>
<p>MIA</p>
</body>
</html>
""".format(event,
datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S'),
cam_id,
cam_name,
cam_address)
data = 'From: {}\n' \
'To: {}\n' \
'Subject: {}\n' \
'MIME-Version: 1.0\n' \
'Content-type: Multipart/Mixed; boundary="NextPart"\n\n' \
'--NextPart\n' \
'Content-Type: text/html;charset=utf-8\n\n' \
'{}\n\n' \
'--NextPart\n' \
'Content-Type: image/jpeg; name="event.jpeg";\n' \
'Content-Disposition: attachment;\n' \
'Content-Transfer-Encoding: base64;\n' \
'filename="event.jpeg"\n\n' \
'{}\n\n' \
'--NextPart--'.format(SENDER, recipient, SUBJECT, body_html, frame64)
client = boto3.client('ses', region_name=AWS_REGION)
try:
response = client.send_raw_email(
Destinations=[
],
FromArn='',
RawMessage={
'Data': data
},
)
except ClientError as e:
return e.response['Error']['Message']
else:
return "Email sent! Message ID: {}".format(response['ResponseMetadata']['RequestId'])
|
#-
# Copyright (c) 2011 Robert N. M. Watson
# Copyright (c) 2013 Robert M. Norton
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
from beritest_tools import attr
#
# Test to check that using cp2 if it is disabled causes a coprocessor
# unusable exception.
#
@attr('capabilities')
class test_cp2_disabled_exception(BaseBERITestCase):
EXPECTED_EXCEPTIONS = 1
def test_exception_counter(self):
self.assertRegisterEqual(self.MIPS.v0, 1, "CP2 exception counter not 1")
def test_cause(self):
cpUnusable = (self.MIPS.a1 >> 28) & 3
ex = (self.MIPS.a1 >> 2) & 0x1f
self.assertEqual(cpUnusable, 2, "cp unusable not 2")
self.assertEqual(ex, 11, "exception cause not 11 (cp unusable)")
def test_trap_info(self):
self.assertCompressedTrapInfo(self.MIPS.a4, mips_cause=self.MIPS.Cause.COP_Unusable)
def test_epc(self):
self.assertRegisterEqual(self.MIPS.a2, self.MIPS.a3, "expected epc did not match")
|
"""
Packages
Module -> many functions
Package -> many collection of modules
OBS: Python 2.X needs __init__.py, Python 3.X not obligation
from test_yumi import yumi1, yumi2
from test_yumi.test_yumi2 import yumi3, yumi4
print(yumi1.function1(4, 5)) # 9
print(yumi2.course) # Python program
print(yumi2.function2()) # Python program
print(yumi3.function3()) # Yumi
print(yumi4.function4()) # Ouchi
"""
from test_yumi.yumi1 import function1
from test_yumi.test_yumi2.yumi4 import function4
print(function1(4, 7)) # 11
print(function4()) # Ouchi
|
import os.path
import csv
from config_helper import *
plugin_name = "CSV"
plugin_type = "output"
csv_logger = logging.getLogger('csv-plugin:')
invalidConfig = False
try:
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read('config.ini')
filename = config.get("Csv", "filename")
csv_debug_enabled = is_debugging_enabled('Csv')
csv_write_enabled = not get_boolean_or_default('Csv', 'Simulation', False)
except Exception, e:
csv_logger.error("Error reading config:\n%s", e)
invalidConfig = True
def write(timestamp, temperatures):
if invalidConfig:
if csv_debug_enabled:
csv_logger.debug('Invalid config, aborting write')
return []
debug_message = 'Writing to ' + plugin_name
if not csv_write_enabled:
debug_message += ' [SIMULATED]'
csv_logger.debug(debug_message)
csv_file = None
writer = None
csv_write_headers = csv_write_enabled and not os.path.isfile(filename)
if csv_write_enabled:
try:
csv_file = open(filename, 'a')
except Exception, e:
csv_logger.error("Error opening %s for writing - aborting write\n%s", filename, e)
return
writer = csv.writer(csv_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
if csv_write_enabled and csv_write_headers:
if csv_debug_enabled:
csv_logger.debug("Creating %s", filename)
fieldnames = ['Time']
for t in temperatures:
if t.target is not None:
fieldnames.append(t.zone + ' [A]')
fieldnames.append(t.zone + ' [T]')
else:
fieldnames.append(t.zone)
writer.writerow(fieldnames)
debug_temperatures = '%s: ' % timestamp
row = [timestamp]
for temperature in temperatures:
row.append(temperature.actual)
debug_temperatures += "%s (%s A" % (temperature.zone, temperature.actual)
if temperature.target is not None:
row.append(temperature.target)
debug_temperatures += ", %s T" % temperature.target
debug_temperatures += ') '
if csv_debug_enabled:
csv_logger.debug(debug_temperatures)
if csv_write_enabled:
writer.writerow(row)
if csv_write_enabled:
csv_file.close()
# if called directly then this is what will execute
if __name__ == "__main__":
import sys
write(sys.argv[1], sys.argv[2])
|
import unittest
import json
from app import create_app
from app.models.v2 import Business
class DeleteBusinessTestCase(unittest.TestCase):
"""This class represents the api test case"""
def setUp(self):
"""
Will be called before every test
"""
self.app = create_app('testing')
self.app.app_context().push()
self.client = self.app.test_client
self.user = {
"username": "mwenda",
"email": "[email protected]",
"password": "qwerty123!@#",
"first_name": "eric",
"last_name": "Miriti"
}
self.logins = {
"username": "mwenda",
"password": "qwerty123!@#"
}
self.business = {
"name": "Andela",
"location": "Nairobi,Kenya",
"category": "Tech",
"description": "Epic"
}
self.client().post(
'/api/v2/auth/register',
data=json.dumps(self.user),
content_type='application/json'
)
self.login = self.client().post(
'/api/v2/auth/login',
data=json.dumps(self.logins),
content_type='application/json'
)
self.data = json.loads(self.login.get_data(as_text=True))
# get the token to be used by tests
self.token = self.data['auth_token']
def tearDown(self):
""" clear data after every test"""
Business.query.delete()
def test_can_delete_successfully(self):
"""Tests that a business can be Deleted successfully"""
self.client().post(
'/api/v2/businesses',
data=json.dumps(self.business),
headers={
"content-type": "application/json",
"access-token": self.token
})
bsid = Business.query.first() # Get the last created Record
res2 = self.client().delete(
'/api/v2/businesses/' + str(bsid.id),
headers={
"content-type": "application/json",
"access-token": self.token
})
self.assertEqual(res2.status_code, 201)
self.assertIn("Business Deleted", str(res2.data))
def test_cannot_delete_empty(self):
"""Tests that cannot delete a business that doesn't exist"""
res2 = self.client().delete(
'/api/v2/businesses/1',
headers={
"content-type": "application/json",
"access-token": self.token
}
)
self.assertEqual(res2.status_code, 401)
self.assertIn("Business not found", str(res2.data))
def can_only_delete_own_business(self):
"""test that one can only delete a business they created """
res2 = self.client().delete(
'/api/v2/businesses/1',
headers={
"content-type": "application/json",
"access-token": self.token
}
)
self.assertEqual(res2.status_code, 401)
self.assertIn(
"Sorry! You can only delete your business!!", str(res2.data))
def test_can_only_delete_own_business(self):
"""Tests that users cannot delete other users businesses"""
self.client().post(
'/api/v2/auth/register',
data=json.dumps({
"username": "Miritim",
"email": "[email protected]",
"password": "qwerty123!@#",
"first_name": "eric",
"last_name": "Miriti"
}),
content_type='application/json'
)
login = self.client().post(
'/api/v2/auth/login',
data=json.dumps({
"username": "Miritim",
"password": "qwerty123!@#"
}),
content_type='application/json'
)
token = json.loads(login.data.decode("utf-8"))
bs = self.client().post(
'/api/v2/businesses',
data=json.dumps(self.business),
headers={
"content-type": "application/json",
"access-token": token['auth_token']
}
)
response = json.loads(bs.data.decode('utf-8'))
res2 = self.client().delete(
'/api/v2/businesses/' + str(response['Business']['id']),
headers={
"content-type": "application/json",
"access-token": self.token
}
)
self.assertEqual(res2.status_code, 401)
self.assertIn("Sorry! You can only delete your business",
str(res2.data))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Get, create, update, and delete schedules for notifications.
"""
from . import _query_nodeping_api, _utils, config
API_URL = "{0}schedules".format(config.API_URL)
def get_schedule(token, schedule=None, customerid=None):
""" Get existing schedules in NodePing account
Returns all the data in a dictionary format from the
original JSON that is gathered from NodePing about
the account's notification schedules.
:param token: The NodePing API token for the account
:type token: str
:param schedule: The name of the notification schedule
:type schedule: str
:param customerid: (optional) ID for subaccount
:type customerid: str
:return: Response from NodePing
:rtype: dict
"""
if schedule:
url = "{0}/{1}".format(API_URL, schedule)
url = _utils.create_url(token, url, customerid)
else:
url = _utils.create_url(token, API_URL, customerid)
return _query_nodeping_api.get(url)
def create_schedule(token, data, schedule_name, customerid=None):
""" Create a new notification schedule for the specified NodePing account
Sends data of a custom alert schedule to NodePing to be created
for the specified user account. Returns the results from NodePing
in a dictionary format.
:param: token: The NodePing APi token for the account
:type token: str
:param data: The schedules for each day to receive notifications
:type dict
:param customerid: (optional) ID for subaccount
:type customerid: str
:return: Schedule ID and if the operation was completed or not
:rtype: dict
Example::
{'data': {'friday': {'disabled': True},
'monday': {'allday': True},
'saturday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'sunday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'thursday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'tuesday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'wednesday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'}}}
Days accept certain variables certain key/value pairs such as:
time1: str - start of timespan (24-hour time)
time2: str - end of timespan (24-hour time)
exclude: True/False - inverts the time span so it is all day
except for the time between time1 and time2
disabled: True/False - disables notifications for this day.
allday: True/False - enables notifications for the entire day.
"""
url = "{0}/{1}".format(API_URL, schedule_name)
url = _utils.create_url(token, url, customerid)
return _query_nodeping_api.post(url, data)
def update_schedule(token, data, schedule_name, customerid=None):
""" Update a notification schedule for the specified NodePing account
Sends data of a custom alert schedule to NodePing to modify a schedule
for the specified user account. Returns the results from NodePing
in a dictionary format.
:param: token: The NodePing API token for the account
:type token: str
:param data: The schedules for each day to receive notifications
:type dict
:param customerid: (optional) ID for subaccount
:type customerid: str
:return: Schedule ID and if the operation was completed or not
:rtype: dict
Example::
{'data': {'friday': {'disabled': True},
'monday': {'allday': True},
'saturday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'sunday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'thursday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'tuesday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'},
'wednesday': {'exclude': False, 'time1': '6:00', 'time2': '18:00'}}}
Days accept certain variables certain key/value pairs such as:
time1: str - start of timespan (24-hour time)
time2: str - end of timespan (24-hour time)
exclude: True/False - inverts the time span so it is all day
except for the time between time1 and time2
disabled: True/False - disables notifications for this day.
allday: True/False - enables notifications for the entire day.
"""
url = "{0}/{1}".format(API_URL, schedule_name)
url = _utils.create_url(token, url, customerid)
return _query_nodeping_api.put(url, data)
def delete_schedule(token, schedule, customerid=None):
""" Get existing schedules in NodePing account
Returns all the data in a dictionary format from the
original JSON that is gathered from NodePing about
the account's notification schedules.
:param token: The NodePing API token for the account
:type token: str
:param schedule: The name of the notification schedule
:type schedule: str
:return: Response from NodePing
:rtype: dict
"""
url = "{0}/{1}".format(API_URL, schedule)
url = _utils.create_url(token, url, customerid)
return _query_nodeping_api.delete(url)
|
"""Public protocol run data models."""
from pydantic import BaseModel
from typing import List, Optional
from datetime import datetime
from ..errors import ErrorOccurrence
from ..types import (
EngineStatus,
LoadedLabware,
LabwareOffset,
LoadedModule,
LoadedPipette,
)
class StateSummary(BaseModel):
"""Data from a protocol run."""
status: EngineStatus
errors: List[ErrorOccurrence]
labware: List[LoadedLabware]
pipettes: List[LoadedPipette]
modules: List[LoadedModule]
labwareOffsets: List[LabwareOffset]
startedAt: Optional[datetime]
completedAt: Optional[datetime]
|
import time
import html
# NOTE: we'll probably want to replace this with something that keeps the original mapping intact
def two_way_map(mapping):
mapping.update({value: key for key, value in mapping.items()})
return mapping
def read_u32(byte_stream):
return int.from_bytes(byte_stream.read(4), 'little', signed=False)
def read_i32(byte_stream):
return int.from_bytes(byte_stream.read(4), 'little', signed=True)
def write_u32(byte_stream, n):
byte_stream.write(n.to_bytes(4, 'little', signed=False))
def write_i32(byte_stream, n):
byte_stream.write(n.to_bytes(4, 'little', signed=True))
def read_u8(byte_stream):
return int.from_bytes(byte_stream.read(1), 'little', signed=False)
def write_u8(byte_stream, n):
byte_stream.write(n.to_bytes(1, 'little', signed=False))
def read_i8(byte_stream):
return int.from_bytes(byte_stream.read(1), 'little', signed=True)
def write_i8(byte_stream, n):
byte_stream.write(n.to_bytes(1, 'little', signed=True))
def read_list(byte_stream, item_de_fn):
length = read_u8(byte_stream)
return [item_de_fn(byte_stream) for _ in range(length)]
def write_list(byte_stream, l):
write_u8(byte_stream, len(l))
if l:
if isinstance(l[0], str):
for item in l:
write_string(byte_stream, item)
else:
for item in l:
item.write(byte_stream)
def read_string(byte_stream):
length = read_u8(byte_stream)
bytes = byte_stream.read(length)
return str(bytes, encoding='utf-8')
def write_string(byte_stream, s):
str_bytes = bytes(s, encoding='utf-8')
write_u8(byte_stream, len(str_bytes))
byte_stream.write(str_bytes)
QUESTION_CATEGORY_MAP = two_way_map({
9: u"General Knowledge",
10: u"Entertainment: Books",
11: u"Entertainment: Film",
12: u"Entertainment: Music",
13: u"Entertainment: Musicals & Theatres",
14: u"Entertainment: Television",
15: u"Entertainment: Video Games",
16: u"Entertainment: Board Games",
17: u"Science & Nature",
18: u"Science: Computers",
19: u"Science: Mathematics",
20: u"Mythology",
21: u"Sports",
22: u"Geography",
23: u"History",
24: u"Politics",
25: u"Art",
26: u"Celebrities",
27: u"Animals",
28: u"Vehicles",
29: u"Entertainment: Comics",
30: u"Science: Gadgets",
31: u"Entertainment: Japanese Anime & Manga",
32: u"Entertainment: Cartoon & Animations",
})
QUESTION_TYPE_MAP = two_way_map({
0: u"multiple",
1: u"boolean",
})
QUESTION_DIFFICULTY_MAP = two_way_map({
0: u"easy",
1: u"medium",
2: u"hard",
})
DOLLAR_AMOUNT_MAP = two_way_map({
0: 500,
1: 1000,
2: 2000,
3: 3000,
4: 5000,
5: 7000,
6: 10000,
7: 20000,
8: 30000,
9: 50000,
10: 100000,
11: 250000,
12: 500000,
13: 1000000,
})
class Lifeline:
FiftyFifty = 0b0001
DoubleDip = 0b0010
class Question:
# Example:
# "category": "Science: Computers",
# "type": "multiple",
# "difficulty": "medium",
# "question": "Moore's law originally stated that the number of transistors on a microprocessor chip would double every...",
# "correct_answer": "Year",
# "incorrect_answers": ["Four Years", "Two Years", "Eight Years"]
def __init__(self):
self.category = None
self.type = None
self.difficulty = None
self.question = None
self.correct_answer = None
self.incorrect_answers = None
def serialize(self):
return {
'category': self.category,
'type': self.type,
'difficulty': self.difficulty,
'question': self.question,
'correct_answer': self.correct_answer,
'incorrect_answers': self.incorrect_answers,
}
@classmethod
def deserialize(cls, ser_dict):
question = cls()
for name, value in ser_dict.items():
if name != u'incorrect_answers':
setattr(question, name, html.unescape(value))
else:
question.incorrect_answers = [html.unescape(answer) for answer in value]
return question
@classmethod
def read(cls, byte_stream):
question_obj = cls()
question_obj.category = QUESTION_CATEGORY_MAP[read_u8(byte_stream)]
question_obj.type = QUESTION_TYPE_MAP[read_u8(byte_stream)]
question_obj.difficulty = QUESTION_DIFFICULTY_MAP[read_u8(byte_stream)]
question_obj.question = read_string(byte_stream)
question_obj.correct_answer = read_string(byte_stream)
question_obj.incorrect_answers = read_list(byte_stream, read_string)
return question_obj
def write(self, byte_stream):
write_u8(byte_stream, QUESTION_CATEGORY_MAP[self.category])
write_u8(byte_stream, QUESTION_TYPE_MAP[self.type])
write_u8(byte_stream, QUESTION_DIFFICULTY_MAP[self.difficulty])
write_string(byte_stream, self.question)
write_string(byte_stream, self.correct_answer)
write_list(byte_stream, self.incorrect_answers)
class RoundResult:
Walked = 0
AnsweredCorrectly = 1
AnsweredIncorrec = 2
class MillionaireRound:
def __init__(self, question, question_amount, lifelines_used, given_answer, time_up=False):
self.question = question
self.question_amount = question_amount
self.lifelines_used = lifelines_used
self.given_answer = given_answer
self.time_up = time_up
@classmethod
def deserialize(cls, ser_dict):
question = Question.deserialize(ser_dict['question'])
question_amount = ser_dict['question_amount']
lifelines_used = ser_dict['lifelines_used']
round_result = ser_dict['round_result']
return cls(question, question_amount, lifelines_used, round_result)
@classmethod
def read(cls, byte_stream):
question = Question.read(byte_stream)
question_amount = DOLLAR_AMOUNT_MAP[read_u8(byte_stream)]
lifelines_used = read_u8(byte_stream)
given_answer_index = read_i8(byte_stream)
time_up = False
if given_answer_index == -1:
given_answer = question.correct_answer
elif given_answer_index >= 0:
given_answer = question.incorrect_answers[given_answer_index]
else:
given_answer = None
if given_answer_index == -2:
time_up = True
return cls(question, question_amount, lifelines_used, given_answer, time_up)
def write(self, byte_stream):
self.question.write(byte_stream)
write_u8(byte_stream, DOLLAR_AMOUNT_MAP[self.question_amount])
write_u8(byte_stream, self.lifelines_used)
if self.given_answer == self.question.correct_answer:
write_i8(byte_stream, -1)
elif self.time_up:
write_i8(byte_stream, -2)
elif self.given_answer is None:
write_i8(byte_stream, -3)
else:
write_i8(byte_stream, self.question.incorrect_answers.index(self.given_answer))
class MillionaireGame:
def __init__(self, user, lifelines, rounds, timestamp, amount_earned):
self.user = user
self.lifelines = lifelines
self.rounds = rounds
self.timestamp = timestamp
self.amount_earned = amount_earned
def serialize(self):
return {
'user': self.user,
'lifelines': self.lifelines,
'rounds': [round.serialize() for round in self.rounds],
'timestamp': self.timestamp,
'amount_earned': self.amount_earned,
}
@classmethod
def deserialize(cls, ser_dict):
user = ser_dict['user']
lifelines = ser_dict['lifelines']
rounds = [MillionaireRound.deserialize(round) for round in ser_dict['rounds']]
timestamp = ser_dict['timestamp']
amount_earned = ser_dict['amount_earned']
return cls(user, lifelines, rounds, timestamp, amount_earned)
@classmethod
def read(cls, byte_stream):
user = read_string(byte_stream)
lifelines = read_u8(byte_stream)
rounds = read_list(byte_stream, MillionaireRound.read)
timestamp = read_u32(byte_stream)
amount_earned = read_i32(byte_stream)
return cls(user, lifelines, rounds, timestamp, amount_earned)
def write(self, byte_stream):
write_string(byte_stream, self.user)
write_u8(byte_stream, self.lifelines)
write_list(byte_stream, self.rounds)
write_u32(byte_stream, self.timestamp)
write_i32(byte_stream, self.amount_earned)
def timestamp():
return int(time.time())
|
from statuspageio.errors import ConfigurationError
class PageService(object):
"""
:class:`statuspageio.PageService` is used by :class:`statuspageio.Client` to make
actions related to Page resource.
Normally you won't instantiate this class directly.
"""
OPTS_KEYS_TO_PERSIST = ['name', 'url', 'notifications_from_email', ]
def __init__(self, http_client, page_id):
"""
:param :class:`statuspageio.HttpClient` http_client: Pre configured high-level http client.
"""
self.__http_client = http_client
self.container = 'page'
self.page_id = page_id
@property
def http_client(self):
return self.__http_client
def get(self):
"""
Get page details
Gets page information
If the specified page does not exist, the request will return an error
:calls: ``get pages/{page_id}.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, page = self.http_client.get('/pages/{page_id}.json'.format(page_id=self.page_id))
return page
def update(self, **kwargs):
"""
Update page details
Updates page information
If the specified page does not exist, the request will return an error
:calls: ``patch pages/{page_id}.json``
:param dict **kwargs: component attributes to update.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
OPTS_KEYS_TO_PERSIST = [
'name',
'url',
'notifications_from_email',
'time_zone',
'city',
'state',
'country',
'subdomain',
'domain',
'layout',
'allow_email_subscribers',
'allow_incident_subscribers',
'allow_page_subscribers',
'allow_sms_subscribers',
'hero_cover_url',
'transactional_logo_url',
'css_body_background_color',
'css_font_color',
'css_light_font_color',
'css_greens',
'css_oranges',
'css_reds',
'css_yellows']
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
page = self.http_client.patch('/pages/{page_id}.json'.format(page_id=self.page_id),
container=self.container,
body=attributes)
return page
class ComponentsService(object):
"""
:class:`statuspageio.ComponentsService` is used by :class:`statuspageio.Client` to make
actions related to Components resource.
Normally you won't instantiate this class directly.
"""
OPTS_KEYS_TO_PERSIST = ['name', 'description', 'group_id', 'status']
def __init__(self, http_client, page_id):
"""
:param :class:`statuspageio.HttpClient` http_client: Pre configured high-level http client.
"""
self.__http_client = http_client
self.page_id = page_id
self.container = 'component'
@property
def http_client(self):
return self.__http_client
def list(self):
"""
List components
Lists components and their information
If the specified contact does not exist, the request will return an error
:calls: ``get pages/{page_id}/components/{component_id}.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, components = self.http_client.get(
'/pages/{page_id}/components.json'.format(page_id=self.page_id))
return components
def create(self, **kwargs):
"""
Create a component
Creates component
If the specified contact does not exist, the request will return an error
:calls: ``post pages/{page_id}/components.json``
:param dict **kwargs: component attributes to update.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in self.OPTS_KEYS_TO_PERSIST)
_, _, component = self.http_client.post(
'/pages/{page_id}/components.json'.format(
page_id=self.page_id), container=self.container, body=attributes)
return component
def delete(self, component_id):
"""
Delete a component
Deletes a component
If the specified contact does not exist, the request will return an error
:calls: ``delete pages/{page_id}/components/{component_id}.json``
:param int component_id: Unique identifier of a component.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
status_code, _, _ = self.http_client.delete(
"/pages/{page_id}/components/{component_id}.json".format(
page_id=self.page_id, component_id=component_id))
return status_code
def update(self, component_id, **kwargs):
"""
Update a component
Updates component information
If the specified contact does not exist, the request will return an error
:calls: ``patch pages/{page_id}/components/{component_id}.json``
:param int component_id: Unique identifier of a component.
:param dict **kwargs: component attributes to update.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
if not kwargs:
raise Exception('attributes for Contact are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in self.OPTS_KEYS_TO_PERSIST)
_, _, component = self.http_client.patch(
"/pages/{page_id}/components/{component_id}.json".format(
page_id=self.page_id, component_id=component_id), container='component', body=attributes)
return component
class IncidentsService(object):
"""
:class:`statuspageio.IncidentsService` is used by :class:`statuspageio.Client` to make
actions related to Incidents resource.
Normally you won't instantiate this class directly.
"""
OPTS_KEYS_TO_PERSIST = ['name', 'description', 'group_id', 'status']
def __init__(self, http_client, page_id):
"""
:param :class:`statuspageio.HttpClient` http_client: Pre configured high-level http client.
"""
self.__http_client = http_client
self.page_id = page_id
self.container = 'incident'
@property
def http_client(self):
return self.__http_client
def list(self):
"""
List all incidents
:calls: ``get pages/{page_id}/incidents.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, incidents = self.http_client.get(
'/pages/{page_id}/incidents.json'.format(page_id=self.page_id))
return incidents
def list_unresolved(self):
"""
List unresolved incidents
:calls: ``get pages/{page_id}/incidents/unresolved.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, incidents = self.http_client.get(
'/pages/{page_id}/incidents/unresolved.json'.format(page_id=self.page_id))
return incidents
def list_scheduled(self):
"""
List scheduled incidents
:calls: ``get pages/{page_id}/incidents/scheduled.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, incidents = self.http_client.get(
'/pages/{page_id}/incidents/scheduled.json'.format(page_id=self.page_id))
return incidents
def create(self, **kwargs):
"""
Create a incident
:calls: ``post pages/{page_id}/incidents.json``
:param dict **kwargs: incident attributes to update.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
OPTS_KEYS_TO_PERSIST = [
'name',
'status',
'message',
'wants_twitter_update',
'impact_override',
'component_ids']
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
_, _, component = self.http_client.post(
'/pages/{page_id}/incidents.json'.format(
page_id=self.page_id), container=self.container, body=attributes)
return component
def create_scheduled(self, **kwargs):
"""
Create a scheduled incident
:calls: ``post pages/{page_id}/incidents.json``
:param dict **kwargs: incident attributes to update.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
OPTS_KEYS_TO_PERSIST = [
'name',
'status',
'scheduled_for',
'scheduled_until',
'message',
'wants_twitter_update',
'scheduled_remind_prior',
'scheduled_auto_in_progress',
'scheduled_auto_completed',
'impact_override',
'component_ids']
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
_, _, incident = self.http_client.post(
'/pages/{page_id}/incidents.json'.format(
page_id=self.page_id), container=self.container, body=attributes)
return incident
def delete(self, incident_id):
"""
Remove a incident
:calls: ``delete pages/{page_id}/incidents.json``
:return: status code
:rtype: int
"""
status_code, _, _ = self.http_client.delete(
"/pages/{page_id}/incidents/{incident_id}.json".format(
page_id=self.page_id, incident_id=incident_id))
return status_code
def update(self, incident_id, **kwargs):
"""
Update a incident
Updates incident information
NOTE: if either of status or message is modified, a new incident update will be generated.
You should update both of these attributes at the same time to avoid two separate incident
updates being generated.
:param dict **kwargs: incident attributes to update.
:calls: ``patch /pages/[page_id]/incidents/[incident_id].json``
:return: Status code
:rtype: string
"""
OPTS_KEYS_TO_PERSIST = [
'name',
'status',
'message',
'wants_twitter_update',
'impact_override',
'component_ids']
if not kwargs:
raise Exception('attributes for Contact are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
_, _, component = self.http_client.patch(
"/pages/{page_id}/incidents/{incident_id}.json".format(
page_id=self.page_id, incident_id=incident_id), container=self.container, body=attributes)
return component
class SubscribersService(object):
"""
:class:`statuspageio.SubscribersService` is used by :class:`statuspageio.Client` to make
actions related to Subscriber resource.
Normally you won't instantiate this class directly.
"""
OPTS_KEYS_TO_PERSIST = ['name', 'description', 'group_id', 'status']
def __init__(self, http_client, page_id):
"""
:param :class:`statuspageio.HttpClient` http_client: Pre configured high-level http client.
"""
self.__http_client = http_client
self.page_id = page_id
self.container = 'subscriber'
@property
def http_client(self):
return self.__http_client
def list(self):
"""
List subscribers
Lists all of the current subscribers
:calls: ``get /pages/[page_id]/subscribers.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, subscribers = self.http_client.get(
'/pages/{page_id}/subscribers.json'.format(page_id=self.page_id))
return subscribers
def create(self, **kwargs):
"""
Create a subscriber
:calls: ``post pages/{page_id}/subscribers.json``
:param dict **kwargs: subscriber attributes to update.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
OPTS_KEYS_TO_PERSIST = [
'email',
'phone_number',
'phone_country',
'endpoint',
'skip_confirmation_notification',
'page_access_user']
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
_, _, subscriber = self.http_client.post(
'/pages/{page_id}/subscribers.json'.format(
page_id=self.page_id), container=self.container, body=attributes)
return subscriber
def delete(self, subscriber_id=None):
"""
Create a subscriber
:calls: ``delete pages/{page_id}/subscribers.json``
:param subscriber_id
:return: status code
:rtype: int
"""
status_code, _, _ = self.http_client.delete(
"/pages/{page_id}/subscribers/{subscriber_id}.json".format(
page_id=self.page_id, subscriber_id=subscriber_id))
return status_code
class MetricsService(object):
"""
:class:`statuspageio.MetricsService` is used by :class:`statuspageio.Client` to make
actions related to Metrics resource.
Normally you won't instantiate this class directly.
"""
def __init__(self, http_client, page_id):
"""
:param :class:`statuspageio.HttpClient` http_client: Pre configured high-level http client.
"""
self.__http_client = http_client
self.page_id = page_id
self.container = 'metric'
@property
def http_client(self):
return self.__http_client
def list_available(self):
"""
List available metric providers
:calls: ``get /metrics_providers.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, providers = self.http_client.get('/metrics_providers.json')
return providers
def list_linked(self):
"""
List linked metric providers
:calls: ``get /pages/[page_id]/metrics_providers.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, providers = self.http_client.get(
'/pages/{page_id}/metrics_providers.json'.format(page_id=self.page_id))
return providers
def list_metrics_for_provider(self, provider_id=None):
"""
List metrics for a linked metric provider
:params provider_id This is the ID from the provider you are looking up
:calls: ``/pages/{page_id}/metrics_providers/{metrics_provider_id}/metrics.json``
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
_, _, metrics = self.http_client.get(
'/pages/{page_id}/metrics_providers/{metrics_provider_id}/metrics.json'.format(
page_id=self.page_id, metrics_provider_id=provider_id))
return metrics
def create(self, provider_id=None, **kwargs):
"""
Create a custom metric
:calls: ``post /pages/[page_id]/metrics_providers/[metrics_provider_id]/metrics.json``
:param provider_id: The id of the custom provider or 'self' from the available providers list
:param dict **kwargs: metic attributes to create.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
OPTS_KEYS_TO_PERSIST = [
'name',
'suffix',
'display',
'tooltip_description',
'y_axis_min',
'y_axis_max',
'decimal_places']
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
_, _, metric = self.http_client.post(
'/pages/{page_id}/metrics_providers/{metrics_provider_id}/metrics.json'.format(
page_id=self.page_id, metrics_provider_id=provider_id), container=self.container, body=attributes)
return metric
def submit_data(self, metric_id=None, **kwargs):
"""
Create a custom metric
:calls: ``post /pages/{page_id}/metrics/{metric_id}/data.json``
:param metric_id: The id of the custom metric.
:param dict **kwargs: metic attributes to create.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
OPTS_KEYS_TO_PERSIST = ['timestamp', 'value']
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
_, _, metric = self.http_client.post(
'/pages/{page_id}/metrics/{metric_id}/data.json'.format(
page_id=self.page_id, metric_id=metric_id), container='data', body=attributes)
return metric
def delete_all_data(self, metric_id=None):
"""
Delete All Metric Data
:calls: ``delete /pages/[page_id]/metrics/[metric_id]/data.json``
:param metric_id: The id of the custom metric.
:return: Dictionary that support attriubte-style access and represents updated Component resource.
:rtype: dict
"""
metric, _, _, = self.http_client.delete(
"/pages/{page_id}/metrics/{metric_id}/data.json".format(
page_id=self.page_id, metric_id=metric_id))
return metric
def delete(self, metric_id=None):
"""
Delete Custom Metric
:calls: ``delete /pages/[page_id]/metrics/[metric_id].json``
:param metric_id: The id of the custom metric.
:return: status code.
:rtype: int
"""
_, _, metric = self.http_client.delete(
"/pages/{page_id}/metrics/{metric_id}.json".format(
page_id=self.page_id, metric_id=metric_id))
return metric
class UsersService(object):
"""
:class:`statuspageio.UsersService` is used by :class:`statuspageio.Client` to make
actions related to Users resource.
Normally you won't instantiate this class directly.
"""
def __init__(self, http_client, organization_id):
"""
:param :class:`statuspageio.HttpClient` http_client: Pre configured high-level http client.
"""
self.__http_client = http_client
self.organization_id = organization_id
self.container = 'user'
@property
def http_client(self):
# TODO: Review. I did not want to have to set the orginization_id all
# the time as many people may not want to manage users.
if not self.organization_id:
raise ConfigurationError(
'No organization_id provided.'
'You are unable to manage users. Set your organization_id during client initialization using: '
'"statuspageio.Client(organization_id= <YOUR_PERSONAL_page_id>)"')
return self.__http_client
def list(self):
"""
List all users
:calls: ``get organizations/[organization_id]/users.json``
:return: Dictionary that support attriubte-style access and represents User resource.
:rtype: dict
"""
_, _, users = self.http_client.get(
'/organizations/{organization_id}/users.json'.format(
organization_id=self.organization_id), container=self.container)
return users
def create(self, **kwargs):
"""
Create a user
:calls: ``post /organizations/[organization_id]/users.json``
:param dict **kwargs: Users attributes to create.
:return: Dictionary that support attriubte-style access and represents updated User resource.
:rtype: dict
"""
OPTS_KEYS_TO_PERSIST = ['email', 'password', 'first_name', 'last_name']
if not kwargs:
raise Exception('attributes are missing')
attributes = dict((k, v) for k, v in kwargs.iteritems()
if k in OPTS_KEYS_TO_PERSIST)
_, _, user = self.http_client.post(
'/organizations/{organization_id}/users.json'.format(
organization_id=self.organization_id), container=self.container, body=attributes)
return user
def delete(self, user_id=None):
"""
Delete a User
:calls: ``delete organizations/[organization_id]/users/[user_id].json``
:param user_id: The id of the user to delete.
:return: status code.
:rtype: int
"""
_, _, user = self.http_client.delete(
"/organizations/{organization_id}/users/{user_id}.json".format(
organization_id=self.organization_id, user_id=user_id))
return user
|
import os
import pandas as pd
import numpy as np
import logging
import wget
import time
import pickle
from src.features import preset
from src.features import featurizer
from src.data.utils import LOG
from matminer.data_retrieval.retrieve_MP import MPDataRetrieval
from tqdm import tqdm
from pathlib import Path
from src.data.get_data_MP import data_MP
import dotenv
def featurize_by_material_id(material_ids: np.array,
featurizerObject: featurizer.extendedMODFeaturizer,
MAPI_KEY: str,
writeToFile: bool = True) -> pd.DataFrame:
""" Run all of the preset featurizers on the input dataframe.
Arguments:
df: the input dataframe with a `"structure"` column
containing `pymatgen.Structure` objects.
Returns:
The featurized DataFrame.
"""
def apply_featurizers(criterion, properties, mpdr, featurizerObject):
LOG.info("Downloading dos and bandstructure objects..")
timeDownloadStart = time.time()
df_portion = mpdr.get_dataframe(criteria=criterion, properties=properties)
timeDownloadEnd = time.time()
LOG.info(df_portion)
df_time, df_portion = featurizerObject.featurize(df_portion)
df_time["download_objects"] = [timeDownloadEnd-timeDownloadStart]
return df_time, df_portion
properties = ["material_id","full_formula", "bandstructure", "dos", "structure"]
mpdr = MPDataRetrieval(MAPI_KEY)
steps = 1
leftover = len(material_ids)%steps
df = pd.DataFrame({})
df_timers = pd.DataFrame({})
for i in tqdm(range(0,len(material_ids),steps)):
portionReturned = True
if not (i+steps > len(material_ids)):
LOG.info(list(material_ids[i:i+steps]))
criteria = {"task_id":{"$in":list(material_ids[i:i+steps])}}
while (portionReturned):
try:
df_time, df_portion = apply_featurizers(criteria, properties, mpdr, featurizerObject)
portionReturned = False
except:
LOG.info("Except - try again.")
# Add ID to recognize afterwards
df_portion["material_id"] = material_ids[i:i+steps]
df = pd.concat([df,df_portion])
df_timers = pd.concat([df_timers,df_time])
LOG.info("CURRENT SHAPE:{}".format(df.shape))
if writeToFile:
df.to_pickle(Path(__file__).resolve().parents[2] / "data" / "raw" / "featurizer" / "featurized.pkl")
df_timers.to_csv(Path(__file__).resolve().parents[2] / "data" / "raw" / "featurizer" / "timing.csv")
if (leftover):
LOG.info(list(material_ids[i:i+leftover]))
criteria = {"task_id":{"$in":list(material_ids[i:i+leftover])}}
df_time, df_portion = apply_featurizers(criteria, properties, mpdr, featurizerObject)
df_portion["material_id"] = material_ids[i:i+leftover]
df = pd.concat([df,df_portion])
df_timers = pd.concat([df_timers,df_time])
if writeToFile:
df.to_pickle(Path(__file__).resolve().parents[2] / "data" / "raw" / "featurizer" / "featurized.pkl")
df_timers.to_csv(Path(__file__).resolve().parents[2] / "data" / "raw" / "featurizer" / "timing.csv")
return df
def run_featurizer():
""" Function used to run, and rerun a featurization process of a large amount of entries.
As default, we use the initial query from Materials Project. Initialised by
"make features"
If program stops, identify mistake (most likely an error in Materials Project
(add to filterIDs)), remove raw data in Materials Project data folder, and
rerun with "make features" command.
"""
project_dir = Path(__file__).resolve().parents[2]
data_dir = project_dir / "data"
dotenv.load_dotenv(project_dir / ".env")
MAPI_KEY = os.getenv("MAPI_KEY")
MP = data_MP(API_KEY=MAPI_KEY)
entries = MP.get_dataframe()
material_ids = entries["material_id"]
del entries, MP
featurizerObject = preset.PRESET_HEBNES_2021()
if Path(data_dir / "raw" / "featurizer" / "featurized.pkl").is_file():
# If errors met, just rerun and this if-test will run.
LOG.info("In-progress featurized data identified. Reading now...")
entries_featurized = pd.read_pickle(data_dir / "raw" / "featurizer" / "featurized.pkl")
time_featurized = pd.read_csv(data_dir / "raw" / "featurizer" / "timing.csv")
LOG.info("Last featurized MPID: {}".format(entries_featurized.index[-1]))
howFar = material_ids[material_ids == entries_featurized.index[-1]].index.values
# Test if mpid index is the same, true if using the same dataset
assert material_ids[howFar[0]] == entries_featurized.index[-1], "Are you sure this is the same dataset as earlier?"
LOG.info("Index: {}".format(howFar))
LOG.info("Preparing for new featurized data starting with MPID: {}".format(material_ids[howFar[0]]))
entries_featurized.to_pickle(data_dir / "raw" / "featurizer" / Path("featurized-upto-" + str(howFar[0]) + ".pkl"))
time_featurized.to_csv(data_dir / "raw" / "featurizer" / Path("timing-upto-" + str(howFar[0]) + ".csv"))
del entries_featurized, time_featurized
df = featurize_by_material_id(material_ids[howFar[0]+1:], featurizerObject, MAPI_KEY)
else:
# First time running featurizers.
df = featurize_by_material_id(entries["material_id"], featurizerObject, MAPI_KEY)
def updateNumberFeaturizedEntries(entries:pd.DataFrame,
featurizedEntries:pd.DataFrame,
MAPI_KEY: str) -> pd.DataFrame:
""" Function that checks if new entries that have not been featurized,
and if true will featurize.
"""
if entries.shape[0] > featurizedEntries.shape[0]:
# Find new entries
newEntries = entries.material_id[~entries.material_id.isin(featurizedEntries.material_id.values)]
# Define featurizer preset
featurizerObject = preset.PRESET_HEBNES_2021()
# Update with new entries
newEntries = featurize_by_material_id(newEntries, featurizerObject, MAPI_KEY, writeToFile=False)
# Add new entries
featurizedEntries = pd.concat([featurizedEntries, newEntries])
elif entries.shape[0] < featurizedEntries.shape[0]:
featurizedEntries = featurizedEntries[featurizedEntries.material_id.isin(entries.material_id.values)]
featurizedEntries = featurizedEntries.reset_index(drop=True)
if entries.shape[0] == featurizedEntries.shape[0]:
print("Updated featurized entries, shape: {}".format(featurizedEntries.shape))
assert (entries.shape[0] == featurizedEntries.shape[0]), "Not equal length, {}!={}"\
.format(entries.shape[0], featurizedEntries.shape[0])
return featurizedEntries
def testUpdateFeaturisedEntries(entries: pd.DataFrame,
featurizedEntries: pd.DataFrame,
MAPI_KEY: str):
""" Test to see if updateNumberFeaturizedEntries work as intended
Removes one entry and checks if the resulting featurized entry is equal to the
removed one.
"""
# Choosing an arbitrary featurizedEntry
suddenlyLostEntry = featurizedEntries.iloc[0]
# Woops! Where did it go?
featurizedEntries = featurizedEntries[1:]
# Puh, we can get it back!
featurizedEntries = updateNumberFeaturizedEntries(entries, featurizedEntries, MAPI_KEY)
# But is it back, though?
assert featurizedEntries.iloc[0].equals(suddenlyLostEntry)
# Yey, it's back!
print("Test passed.")
def does_file_exist(filepath:Path)-> bool:
"""
Checks if file path exists.
"""
if os.path.exists(filepath):
LOG.info("Data path detected:\n{}\.".format(filepath))
return True
else:
LOG.info("Data path\n{}\nnot detected. Downloading now...".format(filepath))
return False
def get_featurized_data()-> pd.DataFrame:
""" A function that checks if featurized data is present in folder, if not,
will download and store the data.
Returns a dataframe
"""
featurized_data_path = Path(__file__).resolve().parents[2] / \
"data" / "interim" / "featurized" \
/ "featurized-11-04-2021.pkl"
if not does_file_exist(featurized_data_path):
# Add unique url id for figshare endpoint
url = "https://ndownloader.figshare.com/files/26777699"
file = wget.download(url)
# Read and load pkl
with open(file, 'rb') as f:
df = pickle.load(f)
# Make directory if not present
Path(featurized_data_path).mkdir(parents=True,exist_ok=True)
df.to_pickle(featurized_data_path)
os.remove(file)
else:
LOG.info("Reading data..")
df = pd.read_pickle(featurized_data_path)
return df
def main():
get_featurized_data()
LOG.info("Done")
if __name__ == '__main__':
#main()
run_featurizer()
|
DEBUG = True
DATABASES = dict(
default=dict(
ENGINE='django.db.backends.sqlite3',
NAME='example.db',
USER='',
PASSWORD='',
HOST='',
PORT='',
)
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django_pony_forms',
'django_bootstrap3_form',
'test_app',
)
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
ROOT_URLCONF = 'project.urls'
STATIC_URL = '/static/'
SECRET_KEY = 'test_key'
TEMPLATES = [
dict(
BACKEND='django.template.backends.django.DjangoTemplates',
APP_DIRS=True,
OPTIONS=dict(debug=DEBUG)
)
]
ALLOWED_HOSTS = ['*']
|
from enum import Enum, auto
from typing import Any, Callable, Dict, List, Optional, Union
from ray.autoscaler._private.cli_logger import cli_logger
class CreateClusterEvent(Enum):
"""Events to track in ray.autoscaler.sdk.create_or_update_cluster.
Attributes:
up_started : Invoked at the beginning of create_or_update_cluster.
ssh_keypair_downloaded : Invoked when the ssh keypair is downloaded.
cluster_booting_started : Invoked when when the cluster booting starts.
acquiring_new_head_node : Invoked before the head node is acquired.
head_node_acquired : Invoked after the head node is acquired.
ssh_control_acquired : Invoked when the node is being updated.
run_initialization_cmd : Invoked before all initialization
commands are called and again before each initialization command.
run_setup_cmd : Invoked before all setup commands are
called and again before each setup command.
start_ray_runtime : Invoked before ray start commands are run.
start_ray_runtime_completed : Invoked after ray start commands
are run.
cluster_booting_completed : Invoked after cluster booting
is completed.
"""
up_started = auto()
ssh_keypair_downloaded = auto()
cluster_booting_started = auto()
acquiring_new_head_node = auto()
head_node_acquired = auto()
ssh_control_acquired = auto()
run_initialization_cmd = auto()
run_setup_cmd = auto()
start_ray_runtime = auto()
start_ray_runtime_completed = auto()
cluster_booting_completed = auto()
class _EventSystem:
"""Event system that handles storing and calling callbacks for events.
Attributes:
callback_map (Dict[str, List[Callable]]) : Stores list of callbacks
for events when registered.
"""
def __init__(self):
self.callback_map = {}
def add_callback_handler(
self,
event: str,
callback: Union[Callable[[Dict], None], List[Callable[[Dict], None]]],
):
"""Stores callback handler for event.
Args:
event: Event that callback should be called on. See
CreateClusterEvent for details on the events available to be
registered against.
callback (Callable[[Dict], None]): Callable object that is invoked
when specified event occurs.
"""
if event not in CreateClusterEvent.__members__.values():
cli_logger.warning(
f"{event} is not currently tracked, and this"
" callback will not be invoked."
)
self.callback_map.setdefault(event, []).extend(
[callback] if type(callback) is not list else callback
)
def execute_callback(
self, event: CreateClusterEvent, event_data: Optional[Dict[str, Any]] = None
):
"""Executes all callbacks for event.
Args:
event: Event that is invoked. See CreateClusterEvent
for details on the available events.
event_data (Dict[str, Any]): Argument that is passed to each
callable object stored for this particular event.
"""
if event_data is None:
event_data = {}
event_data["event_name"] = event
if event in self.callback_map:
for callback in self.callback_map[event]:
callback(event_data)
def clear_callbacks_for_event(self, event: str):
"""Clears stored callable objects for event.
Args:
event: Event that has callable objects stored in map.
See CreateClusterEvent for details on the available events.
"""
if event in self.callback_map:
del self.callback_map[event]
global_event_system = _EventSystem()
|
import picamera
import datetime
import os
delcount = 2
def check_fs():
global delcount
st = os.statvfs('/')
pct = 100 - st.f_bavail * 100.0 / st.f_blocks
print pct, "percent full"
if pct > 90:
# less than 10% left, delete a few minutes
files = os.listdir('.')
files.sort()
for i in range(0, delcount):
print "deleting", files[i]
os.remove(files[i])
delcount += 1 # keep increasing until we get under 90%
else:
delcount = 2
with picamera.PiCamera() as camera:
try:
check_fs()
tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
print "recording", tstamp
camera.start_recording(tstamp + '.h264')
camera.wait_recording(60)
while True:
check_fs()
tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
print "recording", tstamp
camera.split_recording(tstamp + '.h264')
camera.wait_recording(60)
except KeyboardInterrupt:
print "quitting"
camera.stop_recording()
|
from classes.pokemon import *
from classes.pokemon_planta import *
from classes.pokemon_electrico import *
from classes.pokemon_agua import *
from classes.pokemon_fuego import *
from combate import *
from listas_datos import *
import pandas as pd
lista_pokemon = pd.read_csv("pokemon_stats (1).csv", header=0)
def elegirpokemon(nombre, pokemon):
pokemon = Pokemon(ID[pokemon_name.index(nombre)], pokemon_name[pokemon_name.index(nombre)], type1[pokemon_name.index(nombre)], type2[pokemon_name.index(nombre)], hp[pokemon_name.index(nombre)], attack[pokemon_name.index(nombre)], defense[pokemon_name.index(nombre)], sp_attack[pokemon_name.index(nombre)], sp_defense[pokemon_name.index(nombre)], speed[pokemon_name.index(nombre)], total[pokemon_name.index(nombre)], image[pokemon_name.index(nombre)])
return pokemon
entrenador1_pokemon1 = None
elegirpokemon("Charizard", entrenador1_pokemon1)
entrenador1_pokemon1.get_type1()
#esto esta mal
|
# Copyright 2017 SAP SE
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ssl
import sys
from bisect import insort
from collections import defaultdict
from contextlib import contextmanager
import attr
import oslo_messaging
import six
from neutron_lib.agent import topics
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service import service
from ucsmsdk.mometa.vnic.VnicEtherIf import VnicEtherIf
from ucsmsdk.ucshandle import UcsHandle
from networking_ucsm_bm import constants
from networking_ucsm_bm._i18n import _
from networking_ucsm_bm.plugins.ml2.drivers.cisco.ucsm_bm import exceptions as cexc
from networking_ucsm_bm.plugins.ml2.drivers.cisco.ucsm_bm.config import UcsmBmConfig
from neutron.common import config as common_config
from neutron.conf import service as n_service
from neutron.plugins.ml2.drivers.agent import _agent_manager_base as amb # noqa
from neutron.plugins.ml2.drivers.agent import _common_agent as ca # noqa
try:
from neutron.common import profiler
except ImportError:
profiler = None
import eventlet
# oslo_messaging/notify/listener.py documents that monkeypatching is required
eventlet.monkey_patch()
###
# Basing it of CommonAgentManagerBase imposes some requirements,
# which may make the code look uncessecary complicated:
# - The manager needs to return a device in the known devices,
# before ever receiving a port_binding request, other it will be filtered out,
# as the base assumes, that it is not intended for this manager
# => We know only MACs, so we return them
# (CiscoUcsmBareMetalManager#get_all_devices)
# For getting the VLAN, we need to specify the device and the binding host.
# - The binding host is the uuid of the ironic bare-metal node, which we do not
# know. But prior the binding, we receive a port_update, which allows us to
# remember that association.
# It does also gives us the UUID of the port, which we need to remember.
# We do not receive the segment for our agent here though.
# - Ironic creates two ports with the same MAC, one in the management network,
# the other one in the tenant network. Querying the device for the MAC
# might give you either, so we have to query the port for its UUID
# (which we have stored in CiscoUcsmBareMetalRpc#port_update)
# - Finally, as we query the device via the port UUID, the attribute 'device'
# will be the UUID, the base expects it to be the MAC, as we return them in
# CiscoUcsmBareMetalManager#get_all_devices.
# => We have to change the 'device' field back to a MAC in
# (AgentLoop#_get_devices_details_list)
ssl._create_default_https_context = ssl._create_unverified_context # noqa
LOG = logging.getLogger(__name__)
class CiscoUcsmBareMetalRpc(amb.CommonAgentManagerRpcCallBackBase):
target = oslo_messaging.Target(version='1.4')
def security_groups_rule_updated(self, context, **kwargs):
pass
def security_groups_member_updated(self, context, **kwargs):
pass
def security_groups_provider_updated(self, context, **kwargs):
pass
def network_delete(self, context, **kwargs):
pass
def port_update(self, context, **kwargs):
port = kwargs['port']
LOG.debug("port_update received for port %s ", port)
self.agent.mgr.set_mapping(port)
self.updated_devices.add(port['mac_address'])
@attr.s
class _PortInfo(object):
port_id = attr.ib(default=None)
ucsm_ip = attr.ib(default=None)
binding_host_id = attr.ib(default=None)
def for_all_hosts(f):
six.wraps(f)
def wrapper(self, *args, **kwds):
for ucsm_ip in self.ucsm_conf.get_all_ucsm_ips():
with self.ucsm_connect_disconnect(ucsm_ip) as handle:
kwds['handle'] = handle
yield ucsm_ip, f(self, *args, **kwds)
return wrapper
class CiscoUcsmBareMetalManager(amb.CommonAgentManagerBase):
def get_agent_api(self, **kwargs):
pass
def __init__(self, config):
super(amb.CommonAgentManagerBase, self).__init__()
self.ucsm_conf = config
self._ports = defaultdict(_PortInfo)
self._mac_blocks = self._discover_mac_blocks()
self._discover_devices()
@for_all_hosts
def get_all(self, class_id, path=None, handle=None):
for device in handle.query_classid(class_id=class_id):
yield device
def _discover_mac_blocks(self, path=None):
blocks = []
for first, last, ucsm_ip in self.get_all_mac_blocks(path):
insort(blocks, (first, last, ucsm_ip))
return blocks
def get_all_mac_blocks(self, path=None):
macpool_block_id = "MacpoolBlock"
for ucsm_ip, blocks in self.get_all(macpool_block_id):
for block in blocks:
yield block.r_from.lower(), block.to.lower(), ucsm_ip
def get_rpc_callbacks(self, context, agent, sg_agent):
return CiscoUcsmBareMetalRpc(context, agent, sg_agent)
def ensure_port_admin_state(self, device, admin_state_up):
pass
def set_mapping(self, port):
port_id = port['id']
mac = port['mac_address']
binding_host_id = port['binding:host_id']
LOG.debug("Bound {} to {}".format(mac, binding_host_id))
info = self._ports[mac.lower()]
info.port_id = port_id
info.binding_host_id = binding_host_id
def get_agent_configurations(self):
# The very least, we have to return the physical networks as keys
# of the bridge_mappings
return {
'physical_networks': self.ucsm_conf.get_networks(), 'mac_blocks': [
(block[0], block[1]) for block in self._mac_blocks]}
def get_agent_id(self):
return 'cisco-ucs-bm-agent-%s' % cfg.CONF.host
def get_all_devices(self):
return set(six.iterkeys(self._ports))
def get_devices_modified_timestamps(self, devices):
return {}
def get_extension_driver_type(self):
return 'ucsm_bm'
def get_port_info(self, device):
if device in self._ports:
return self._ports[device.lower()]
def get_rpc_consumers(self):
consumers = [[topics.PORT, topics.UPDATE],
[topics.NETWORK, topics.DELETE]]
return consumers
def plug_interface(self, network_id, network_segment,
device, device_owner):
LOG.debug("Start {}".format(device))
vlan_id = network_segment.segmentation_id
info = self._ports.get(device.lower())
if not info or not info.ucsm_ip:
LOG.debug("Unknown device {}".format(device))
return False
with self.ucsm_connect_disconnect(info.ucsm_ip) as handle:
vlans = self._get_vlan(handle, vlan_id)
if len(vlans) != 1:
LOG.error(
"Cannot uniquely identify vlan {} for {}".format(
vlan_id, device))
return False
vlan = vlans[0]
filter = '(addr, "{}", type="eq")'.format(device)
for eth in handle.query_classid('VnicEther', filter_str=filter):
exists = False
to_delete = []
for eth_if in handle.query_children(eth, class_id='vnicEtherIf'):
if eth_if.name != vlan.name:
to_delete.append(eth_if)
else:
exists = True
if to_delete:
LOG.debug("Removing {}".format(
[eth_if.dn for eth_if in to_delete]))
for mo in to_delete:
handle.remove_mo(mo)
pass
if exists:
LOG.debug("Already bound {}".format(vlan.name))
else:
LOG.debug("Adding {}".format(vlan.name))
mo = VnicEtherIf(eth, default_net="yes", name=vlan.name)
handle.add_mo(mo, modify_present=True)
handle.commit()
LOG.debug("Done")
return True
def _get_vlan(self, handle, vlan_id):
filter = '(id, "{}", type="eq")'.format(vlan_id)
filter += ' and (transport, "ether", type="eq")'
filter += ' and (if_type, "virtual", type="eq")'
return handle.query_classid(class_id='FabricVlan',
filter_str=filter)
def setup_arp_spoofing_protection(self, device, device_details):
pass
def delete_arp_spoofing_protection(self, devices):
pass
def delete_unreferenced_arp_protection(self, current_devices):
pass
@contextmanager
def ucsm_connect_disconnect(self, ucsm_ip):
handle = self.ucs_manager_connect(ucsm_ip)
try:
yield handle
finally:
self.ucs_manager_disconnect(handle, ucsm_ip)
def ucs_manager_connect(self, ucsm_ip):
"""Connects to a UCS Manager."""
username, password = self.ucsm_conf.get_credentials_for_ucsm_ip(
ucsm_ip)
if not username:
LOG.error(_('UCS Manager network driver failed to get login '
'credentials for UCSM %s'), ucsm_ip)
return None
try:
handle = UcsHandle(ucsm_ip, username, password)
handle.login()
except Exception as e:
# Raise a Neutron exception. Include a description of
# the original exception.
raise cexc.UcsmConnectFailed(ucsm_ip=ucsm_ip, exc=e)
return handle
def ucs_manager_disconnect(self, handle, ucsm_ip):
"""Disconnects from the UCS Manager.
After the disconnect, the handle associated with this connection
is no longer valid.
"""
try:
handle.logout()
except Exception as e:
# Raise a Neutron exception. Include a description of
# the original exception.
raise cexc.UcsmDisconnectFailed(ucsm_ip=ucsm_ip, exc=e)
def _discover_devices(self):
class_id = "VnicEther"
for ucsm_ip in self.ucsm_conf.get_all_ucsm_ips():
vnic_paths = self.ucsm_conf.vnic_paths_dict[ucsm_ip]
with self.ucsm_connect_disconnect(ucsm_ip) as handle:
for vnic_path in vnic_paths:
filter = '(dn,"{}.*", type="re")'.format(vnic_path)
for vnicEther in handle.query_classid(class_id=class_id, filter_str=filter):
self._ports[vnicEther.addr.lower()].ucsm_ip = ucsm_ip
class AgentLoop(ca.CommonAgentLoop):
def _get_devices_details_list(self, devices):
devices_by_host = defaultdict(list)
for device in devices:
port_info = self.mgr.get_port_info(device)
if port_info and port_info.binding_host_id and port_info.port_id:
devices_by_host[port_info.binding_host_id].append(
port_info.port_id)
device_details = []
for host, devices_on_host in six.iteritems(devices_by_host):
LOG.debug("Querying {} for {}".format(devices_on_host, host))
for device in self.plugin_rpc.get_devices_details_list(
self.context, devices_on_host, self.agent_id, host=host):
mac_address = device.get('mac_address')
if mac_address:
device['device'] = mac_address
device_details.append(device)
LOG.debug("Found {}".format(device_details))
return device_details
def main():
common_config.init(sys.argv[1:])
common_config.setup_logging()
cfg.CONF.register_opts(n_service.RPC_EXTRA_OPTS)
if profiler:
profiler.setup(constants.AGENT_BINARY, cfg.CONF.host)
config = UcsmBmConfig()
manager = CiscoUcsmBareMetalManager(config)
polling_interval = cfg.CONF.AGENT.polling_interval
quitting_rpc_timeout = cfg.CONF.AGENT.quitting_rpc_timeout
agent = AgentLoop(manager, polling_interval,
quitting_rpc_timeout,
constants.AGENT_TYPE,
constants.AGENT_BINARY)
LOG.info(_("Agent initialized successfully, now running... "))
launcher = service.launch(cfg.CONF, agent)
launcher.wait()
|
from Prediction_Raw_Data_Validation.predictionDataValidation import Prediction_Data_validation
from DataTypeValidation_Insertion_Prediction.DataTypeValidationPrediction import dBOperation
from DataTransformation_Prediction.DataTransformationPrediction import dataTransformPredict
from application_logging import logger
class pred_validation:
def __init__(self,path):
self.raw_data = Prediction_Data_validation(path)
self.dataTransform = dataTransformPredict()
self.dBOperation = dBOperation()
self.file_object = open("Prediction_Logs/Prediction_Log.txt", 'a+')
self.log_writer = logger.App_Logger()
def prediction_validation(self):
try:
self.log_writer.log(self.file_object,'Start of Validation on files for prediction!!')
#extracting values from prediction schema
LengthOfDateStampInFile,LengthOfTimeStampInFile,column_names,noofcolumns = self.raw_data.valuesFromSchema()
#getting the regex defined to validate filename
regex = self.raw_data.manualRegexCreation()
#validating filename of prediction files
self.raw_data.validationFileNameRaw(regex,LengthOfDateStampInFile,LengthOfTimeStampInFile)
#validating column length in the file
self.raw_data.validateColumnLength(noofcolumns)
#validating if any column has all values missing
self.raw_data.validateMissingValuesInWholeColumn()
self.log_writer.log(self.file_object,"Raw Data Validation Complete!!")
self.log_writer.log(self.file_object,("Starting Data Transforamtion!!"))
#replacing blanks in the csv file with "Null" values to insert in table
self.dataTransform.addQuotesToStringValuesInColumn()
self.log_writer.log(self.file_object,"DataTransformation Completed!!!")
self.log_writer.log(self.file_object,"Creating Prediction_Database and tables on the basis of given schema!!!")
#create database with given name, if present open the connection! Create table with columns given in schema
self.dBOperation.createTableDb('Prediction',column_names)
self.log_writer.log(self.file_object,"Table creation Completed!!")
self.log_writer.log(self.file_object,"Insertion of Data into Table started!!!!")
#insert csv files in the table
self.dBOperation.insertIntoTableGoodData('Prediction')
self.log_writer.log(self.file_object,"Insertion in Table completed!!!")
self.log_writer.log(self.file_object,"Deleting Good Data Folder!!!")
#Delete the good data folder after loading files in table
self.raw_data.deleteExistingGoodDataTrainingFolder()
self.log_writer.log(self.file_object,"Good_Data folder deleted!!!")
self.log_writer.log(self.file_object,"Moving bad files to Archive and deleting Bad_Data folder!!!")
#Move the bad files to archive folder
self.raw_data.moveBadFilesToArchiveBad()
self.log_writer.log(self.file_object,"Bad files moved to archive!! Bad folder Deleted!!")
self.log_writer.log(self.file_object,"Validation Operation completed!!")
self.log_writer.log(self.file_object,"Extracting csv file from table")
#export data in table to csvfile
self.dBOperation.selectingDatafromtableintocsv('Prediction')
except Exception as e:
raise e
|
import os
import site
import sys
import glob
from cx_Freeze import setup, Executable
siteDir = site.getsitepackages()[1]
includeDllPath = os.path.join(siteDir, "gnome")
# missingDll = glob.glob(includeDllPath + "\\" + '*.dll')
missingDLL = ['libffi-6.dll',
'libgirepository-1.0-1.dll',
'libgio-2.0-0.dll',
'libglib-2.0-0.dll',
'libintl-8.dll',
'libgmodule-2.0-0.dll',
'libgobject-2.0-0.dll',
'libzzz.dll',
'libwinpthread-1.dll',
'libgtk-3-0.dll',
'libgdk-3-0.dll',
'libcairo-gobject-2.dll',
'libfontconfig-1.dll',
'libxmlxpat.dll',
'libfreetype-6.dll',
'libpng16-16.dll',
'libgdk_pixbuf-2.0-0.dll',
'libjpeg-8.dll',
'libopenraw-7.dll',
'librsvg-2-2.dll',
'libpango-1.0-0.dll',
'libpangocairo-1.0-0.dll',
'libpangoft2-1.0-0.dll',
'libharfbuzz-gobject-0.dll',
'libpangowin32-1.0-0.dll',
'libwebp-4.dll',
'libatk-1.0-0.dll',
'libgnutls-26.dll',
'libproxy.dll',
'libp11-kit-0.dll',
]
includeFiles = []
for DLL in missingDLL:
includeFiles.append((os.path.join(includeDllPath, DLL), DLL))
#gtkLibs= ['etc','lib','share']
gtkLibs = ['lib\\gdk-pixbuf-2.0',
'lib\\girepository-1.0',
'share\\glib-2.0',
'lib\\gtk-3.0']
for lib in gtkLibs:
includeFiles.append((os.path.join(includeDllPath, lib), lib))
includeFiles.append(("includes"))
includeFiles.append(("LICENSE"))
base = None
if sys.platform == "win32":
base = "Win32GUI"
setup(
name="autoScanner",
author="SOAChishti",
version="1.1",
description="GUI of Twain and SANE API with auto scanning.",
options={'build_exe': {
'compressed': True,
'includes': ["gi"],
'excludes': ['wx', 'email', 'pydoc_data', 'curses'],
'packages': ["gi"],
'include_files': includeFiles
}},
executables=[
Executable(script = "autoScanner.py",
icon = "includes\\icon.ico",
base=base
)
]
)
|
import numpy as np
import pytest
from PermutationImportance.scoring_strategies import verify_scoring_strategy, VALID_SCORING_STRATEGIES, argmin_of_mean, indexer_of_converter
from PermutationImportance.error_handling import InvalidStrategyException
def test_valid_callable():
assert np.argmin == verify_scoring_strategy(np.argmin)
def test_invalid_strategy():
with pytest.raises(InvalidStrategyException):
verify_scoring_strategy("asdfasdfa")
def test_valid_string_strategy():
for key, value in VALID_SCORING_STRATEGIES.items():
assert value == verify_scoring_strategy(key)
def test_composed():
assert 2 == argmin_of_mean([np.array([1, 2]), np.array(
[2, 4]), np.array([0, 1]), np.array([10, 12])])
assert 3 == VALID_SCORING_STRATEGIES['argmax_of_mean']([np.array([1, 2]), np.array(
[2, 4]), np.array([0, 1]), np.array([10, 12])])
assert 2 == indexer_of_converter(np.argmin, np.mean)([np.array([1, 2]), np.array(
[2, 4]), np.array([0, 1]), np.array([10, 12])])
|
import os
import numpy as np
import cv2
import matplotlib.pyplot as plt
from clustering import cluster
def save_image_overlay(valid_image, valid_label):
assert len(valid_image.shape)==3 and len(valid_label.shape)==2, \
'input dimensions should be [h,w,c]'
num_unique = np.unique(valid_label)
blended = valid_image
for color_id, unique in enumerate(list(num_unique[1:])):
instance_ind = np.where(valid_label==unique)
alpha = np.zeros_like(valid_image)
alpha[instance_ind] = np.array([color_id*70, color_id*70, 255-color_id*50])
blended = cv2.addWeighted(blended, 1, alpha, 1, 0)
blended = cv2.cvtColor(blended, cv2.COLOR_RGB2BGR)
cv2.imwrite('overlayed_image.png', blended)
def evaluate_scatter_plot(log_dir, valid_pred, valid_label, feature_dim, param_string, step):
assert len(valid_pred.shape)==4 and len(valid_label.shape)==3, \
'input dimensions should be [b,h,w,c] and [b,h,w]'
assert valid_pred.shape[3]==feature_dim, 'feature dimension and prediction do not match'
fig = plt.figure() #plt.figure(figsize=(10,8))
if feature_dim==2:
#for i in range(valid_pred.shape[0]):
# plt.subplot(2,2,i+1)
# #valid_label = valid_label[0]
# #print 'valid_pred', valid_pred.shape
# #print 'valid_label', valid_label.shape
# num_unique = np.unique(valid_label[i])
num_unique = np.unique(valid_label[0])
for unique in list(num_unique):
instance_ind = np.where(valid_label[0]==unique)
#print 'instance id', instance_ind
#print valid_pr[instance_ind].shape
x = valid_pred[0,:,:,0][instance_ind]
y = valid_pred[0,:,:,1][instance_ind]
plt.plot(x, y, 'o')
#plt.imshow(valid_label[i])
elif feature_dim==3:
#for i in range(valid_pred.shape[0]):
# ax = fig.add_subplot(2,2,i+1, projection='3d')
# #valid_pred = valid_pred[0]
# #valid_label = valid_label[0]
ax = fig.add_subplot(1,1,1, projection='3d')
num_unique = np.unique(valid_label[0])
colors = [(0., 0., 1., 0.05), 'g', 'r', 'c', 'm', 'y']
for color_id, unique in enumerate(list(num_unique)):
instance_ind = np.where(valid_label[0]==unique)
#print 'instance id', instance_ind
#print valid_pr[instance_ind].shape
x = valid_pred[0,:,:,0][instance_ind]
y = valid_pred[0,:,:,1][instance_ind]
z = valid_pred[0,:,:,2][instance_ind]
ax.scatter(x, y, z, c=colors[color_id])
elif feature_dim > 3:
plt.close(fig)
return None
plt.show()
# plt.savefig(os.path.join(log_dir, param_string, 'cluster_{}.png'.format(str(step).zfill(6))), bbox_inches='tight')
# plt.close(fig)
|
import numpy as np
from numpy import newaxis as na
from matplotlib import pyplot as plt
from os.path import join, dirname, isfile
from pyhsmm import models, distributions
from pyhsmm.util.general import sgd_passes, hold_out, get_file
from pyhsmm.util.text import progprint_xrange, progprint
np.random.seed(0)
datapath = str(join(dirname(__file__),'svi_data.gz'))
### load data
if not isfile(datapath):
print('download svi_data.gz data and put it in examples/')
print('https://github.com/mattjj/example_data')
import sys; sys.exit(1)
print('loading data...')
alldata = np.loadtxt(datapath)
allseqs = np.array_split(alldata,250)
datas, heldout = hold_out(allseqs,0.05)
training_size = sum(data.shape[0] for data in datas)
print('...done!')
print('%d total frames' % sum(data.shape[0] for data in alldata))
print('split into %d training and %d test sequences' % (len(datas),len(heldout)))
### inference!
Nmax = 20
obs_hypparams = dict(mu_0=np.zeros(2),sigma_0=np.eye(2),kappa_0=0.2,nu_0=5)
hmm = models.HMM(
obs_distns=[distributions.Gaussian(**obs_hypparams) for i in range(Nmax)],
alpha=10.,init_state_concentration=1.)
scores = []
sgdseq = sgd_passes(tau=0,kappa=0.7,datalist=datas)
for t, (data, rho_t) in progprint(enumerate(sgdseq)):
hmm.meanfield_sgdstep(data, data.shape[0] / training_size, rho_t)
if t % 10 == 0:
scores.append(hmm.log_likelihood(heldout))
plt.figure()
plt.plot(scores)
plt.show()
|
import numpy as np
import matplotlib.pyplot as plt
import time
import random
import bisect
import json
import sys
from numpy import linalg as alg
from scipy import sparse
from sklearn import cross_validation as cv
from itertools import product
from collections import defaultdict
from functools import partial
from multiprocessing import Pool
# Read train and test data for each fold
def get_data(collection, dataset, num_folds, alpha):
# collection: data collection folder
# dataset: dataset folder
# num_folds: data splits
# alpha: weight for the binary ratings
# Load ratings data
full_R = np.loadtxt('../data/' + collection + '/' + dataset + '/playcounts.txt', delimiter=",")
full_R = sparse.coo_matrix((full_R[:, 2], (full_R[:, 0], full_R[:, 1])))
num_users, num_items = full_R.shape
# Make data splits balancing users in each fold and prepare data
splits = cv.StratifiedKFold(full_R.row, n_folds=num_folds, random_state=1)
data = []
test_indices = open('test_' + dataset + '_MF.txt', 'wa')
for train, test in splits:
# Train data
R = sparse.csr_matrix((full_R.data[train], (full_R.row[train],
full_R.col[train])),
shape=(num_users, num_items))
# P = R > 0 is really not needed through the code
# Weight data
weights = 1. + alpha * np.log(1. + R.data)
C = sparse.csr_matrix((weights, R.nonzero()),
shape=(num_users, num_items))
# Test data
Rt = sparse.coo_matrix((full_R.data[test], (full_R.row[test],
full_R.col[test])),
shape=(num_users, num_items))
fold_data = {'C': C, 'Rt': Rt}
data.append(fold_data)
# Store test indices for further mpr calculation
np.savetxt(test_indices, test, fmt='%i')
test_indices.close()
return data
# RMSE function
def loss_function(C, X, Y):
# C: data arrays stored in sparse format
# X, Y: factor matrices
loss = 0.
for u, Cu in enumerate(C):
Cu_dense = Cu.toarray()
Pu_dense = np.ones(Cu_dense.shape)
Pu_dense[Cu_dense == 0.] = 0.
Cu_dense[Cu_dense == 0.] = 1. # blank cells are in fact 1s in C
Zu = X[u].dot(Y.T)
loss += np.sum(Cu_dense * ((Pu_dense - Zu) ** 2))
return loss
# Objective function
def cost_function(C, X, Y, eta):
# C: data arrays in sparse format
# X, Y: factor matrices
# eta: regularization term
# Reconstruction error
loss = loss_function(C, X, Y)
# Regularization error
reg_x = (X ** 2).sum()
reg_y = (Y ** 2).sum()
return loss + eta * (reg_x + reg_y)
# Train and test a given fold (convenient for parallel cross-validation)
def run_this_fold(experiment, N_values, fold_and_data):
# experiment: set of parameters for the current experiment
# N_values: lengths of the recommendation lists
# fold_and_data: list including fold and data
# fold number, used to iterate
# data: split of data for the given fold
fold = fold_and_data[0]
data = fold_and_data[1]
results = defaultdict(list)
print ('\tMF with ' + str(experiment['num_iterations']) +
' it. of ALS. Launching fold ' + str(fold + 1) + '...')
# Train
X, Y = train_MF(data['C'], False, fold, **experiment)
# Test
for N in N_values:
mpr_num, mpr_den, rank = test_topN(X, Y, data['Rt'], N, False, fold,
experiment)
# Save results for each fold and each value of N
this_result = {'mpr_num': mpr_num, 'mpr_den': mpr_den, 'rank': rank,
'fold': fold}
results[N] = this_result
return results
# Train MF for implicit feedback
def train_MF(C, plot, fold, alpha, eta, num_factors, num_iterations):
# C: array of weights as a function of R in sparse format
# plot: should the train error evolution be plotted?
# fold: integer indicating which fold is being trained
# alpha: weight for the implicit feedback
# eta: regularization term
# num_factors: self descriptive
# num_iterations: self descriptive
# Random user and item factors initialization
np.random.seed(1)
num_users, num_items = C.shape
X = np.random.rand(num_users, num_factors)
Y = np.random.rand(num_items, num_factors)
# Iterate Alternating Least Squares
# cost = [] # just for plot
for iteration in range(num_iterations):
t0 = time.time()
# Common terms for all users and items including regularization
A_common_user = Y.T.dot(Y) + eta * np.eye(num_factors)
A_common_item = X.T.dot(X) + eta * np.eye(num_factors)
for u, Cu in enumerate(C):
# User dedicated part Y.T * (Cu - I) * Y
# Use only active items for user u to speed-up
mask = Cu.nonzero()[1]
Cu_mask = Cu.data
Cu_mask_I = Cu_mask - np.array([1])
Y_mask = Y[mask, :]
# Pu_mask = P.getrow(u).data # this is all 1, don't need it!
A_user = Y_mask.T.dot(Cu_mask_I[:, np.newaxis] * Y_mask)
# b_user = (Y_mask.T * (Cu_mask * Pu_mask)[np.newaxis, :]).sum(1)
b_user = (Y_mask.T * Cu_mask[np.newaxis, :]).sum(1)
X[u] = alg.solve(A_common_user + A_user, b_user)
for i, Ci in enumerate(C.T):
# Item dedicated part X.T * (Ci - I) * X
# Use only active users for item i to speed-up
mask = Ci.nonzero()[1]
Ci_mask = Ci.data
Ci_mask_I = Ci_mask - np.array([1])
X_mask = X[mask, :]
# Pi_mask = P.getcol(i).data # this is all 1, don't need it!
A_item = X_mask.T.dot(Ci_mask_I[:, np.newaxis] * X_mask)
# b_item = (X_mask.T * (Ci_mask * Pi_mask)[np.newaxis, :]).sum(1)
b_item = (X_mask.T * Ci_mask[np.newaxis, :]).sum(1)
Y[i] = alg.solve(A_common_item + A_item, b_item)
t1 = time.time()
print ('\t\tTraining MF on fold ' + str(fold) + ', it. ' +
str(iteration) + ': ' + str(t1 - t0) + 's')
# cost.append(cost_function(C, X, Y, eta))
if plot:
plt.figure()
plt.title('MF training\n' + 'alpha = ' + str(alpha) + ', eta = ' +
str(eta) + ', num_factors = ' + str(num_factors) +
', num_iterations = ' + str(num_iterations))
plt.plot(cost, label='cost',
marker='o', linestyle='--', color='c', linewidth=2)
plt.xlabel('Iteration Number')
plt.ylabel('Mean Squared Error')
plt.legend()
plt.show()
return X, Y
# Test by Mean Percentage Ranking
# Note: The order in the code has to be (1) sample (2) sort. We can not sort
# just once for each user and then sample, because sample breaks the ordering.
def test_topN(X, Y, Rt, N, plot, fold, parameters):
# X, Y: latent factor arrays
# Rt: test data
# N: length of the recommendation
# plot: should the rank be plotted?
# fold: integer indicating which fold is being trained
# parameters: to further pass to plot
# Initialize values
mpr_numerator = 0
rank = Rt.nnz * [None]
t0 = time.time()
# Loop over test set
# print '\t\tTesting by Mean Percentage Ranking at ' + str(N) + '...'
u_old = -1
for k, (u, i, rt) in enumerate(zip(Rt.row, Rt.col, Rt.data)):
if u != u_old:
Zu = X[u].dot(Y.T)
u_old = u
random.seed(1)
Zu_sample = random.sample(np.hstack((Zu[:i], Zu[(i + 1):])), N)
Zu_sample.sort()
# position of Zu[i] in Zu_sample but reversed order
rank[k] = N - bisect.bisect(Zu_sample, Zu[i])
mpr_numerator += rt * rank[k] / float(N)
t1 = time.time()
print ('\t\tTesting MPR at ' + str(N) + ' on fold ' + str(fold) + ': ' +
str(t1 - t0) + 's')
if plot:
plot_rank(rank, N, **parameters)
return mpr_numerator, Rt.data.sum(), rank
# Join results of MPR for each fold and each value of N
def join_folds(results, num_folds, N_values, plot, parameters):
# results: result for each fold
# num_folds: number of data splits
# N_values: possible values for the length of the recommendation
# plot: should the rank be plotted?
# parameters: to further pass to plot
out_mpr = defaultdict()
out_rank = defaultdict()
for N in N_values:
# Initialize values
mpr_num = 0.
mpr_den = 0.
rank = []
print '\tJoining results of MPR at ' + str(N) + ' for each fold...'
for fold in range(num_folds):
mpr_num += results[fold][N]['mpr_num']
mpr_den += results[fold][N]['mpr_den']
rank += results[fold][N]['rank']
if plot:
plot_rank(rank, N, **parameters)
out_mpr[N] = mpr_num / mpr_den
out_rank[N] = rank
return out_mpr, out_rank
# Plot rank density and ecdf
def plot_rank(rank, N, alpha, eta, num_factors, num_iterations):
# rank: position of each element in the test set
# N: length of the recommendation
count, bins = np.histogram(rank, bins=100)
ecdf = np.cumsum(count) / float(np.sum(count))
fig, ax1 = plt.subplots()
plt.title('MF test at Top' + str(N) + '\n' + r'$\alpha = $' + str(alpha) +
', $\eta = $' + str(eta) + ', num_factors = ' + str(num_factors) +
', num_iterations =' + str(num_iterations))
ax1.plot(bins[1:], count, label='count',
linestyle='-', color='b', linewidth=2)
ax1.set_xlabel('Rank')
ax1.set_ylabel('Density [count]')
ax1.set_ylim([0, max(count)])
ax1.legend(loc=2) # top left
ax2 = ax1.twinx()
ax2.plot(bins[1:], ecdf, label='ecdf',
linestyle='--', color='g', linewidth=2)
ax2.set_ylabel('Cumulative Distribution [%]')
ax2.set_ylim([0, 1])
ax2.legend(loc=1) # top right
plt.show()
# Go!
# Parameters for all experiments
param = {'alpha': [120.],
'eta': [100., 1000.],
'num_factors': [10],
'num_iterations': [5]}
N_values = [100]
num_folds = 5
if len(sys.argv) > 1:
collection = sys.argv[1]
dataset = sys.argv[2]
else:
collection = 'dummy_collection'
dataset = 'dummy_dataset'
# Create all possible experiments
param_names = sorted(param)
experiments = [dict(zip(param_names, prod))
for prod in product(*(param[name] for name in param_names))]
num_experiments = len(experiments)
# Run all experiments
for k, experiment in enumerate(experiments):
print 'Experiment ' + str(k + 1) + ' out of ' + str(num_experiments)
t0 = time.time()
# Data for this experiment
data_folds = get_data(collection, dataset, num_folds, experiment['alpha'])
# Pool of workers for parallel num_folds-CV and
# special function callable through fun(all_param, looping_index)
pool = Pool(processes=num_folds)
run_folds = partial(run_this_fold, experiment, N_values)
# Parallel loop over the folds
results = pool.map(run_folds, list(enumerate(data_folds)))
pool.close()
pool.join()
# Join CV results and save this experiment's result
mpr, rank = join_folds(results, num_folds, N_values, False, experiment)
# if we only want the mpr ...
experiments[k]['mpr'] = mpr
# if we want to save rank too, we should instead do...
# this_experiment = {'mpr': mpr, 'rank': rank}
# experiments[k].update(this_experiment)
t1 = time.time()
print '\ttime elapsed in experiment ' + str(k + 1) + ': ' + str(t1 - t0)
# Save results in json format
print '\tSaving results to file...'
with open('MF_' + dataset + '.json', 'w') as MF_output:
json.dump(experiments, MF_output)
MF_output.close()
|
### rel tools module.
### (mostly rel example code)
import rel
try:
from subprocess import getoutput # py3
except:
from commands import getoutput # py2
### helper functions
def notice(*lines):
print("")
print("\n".join(lines))
def exit():
notice("goodbye")
if rel.running:
rel.abort()
else:
import sys
sys.exit()
def error(msg, *lines):
notice("fatal exception!", "error: %s"%(msg,), *lines)
exit()
### the tools themselves
# rtimer
RT_MP3 = '/var/local/rtimer_elapsed.mp3'
RT_USAGE = 'rtimer [seconds] [minutes] [hours] [update_increment]\n--\nall arguments default to zero. so, rtimer 15 30 2 60 means run for 15 seconds, 30 minutes, and 2 hours, printing a notice every 60 seconds. rtimer 0 5 means run for 5 minutes, printing no incremental updates. when the time runs out, a sound will play on two conditions: there is a readable file at the specified path (default: %s), and mplayer is installed.'%(RT_MP3,)
class Timer(object):
def __init__(self, s, m, h, interval=0, mp3=RT_MP3):
try:
s, m, h, self.interval = int(s), int(m), int(h), int(interval)
except:
error("invalid input", "seconds, minutes, hours, and interval must all be integers")
self.count = 0
self.goal = s + m * 60 + h * 360
if self.count == self.goal:
notice("USAGE: %s"%(RT_USAGE,))
exit()
self.mp3 = mp3
problem = "no sound file path specified"
if self.mp3:
import os
if "command not found" in getoutput("mplayer"):
self.mp3 = None
problem = "could not find mplayer!"
elif not os.path.isfile(mp3):
self.mp3 = None
problem = "could not access sound file at %s -- no such file"%(mp3,)
else:
try:
f = open(mp3)
f.close()
except:
self.mp3 = None
problem = "could not access sound file at %s -- permission denied"%(mp3,)
if not self.mp3:
notice("sound disabled", problem)
def start(self):
self.count = 0
notice("starting countdown to %s"%(self.goal,))
rel.timeout(1, self.update)
rel.signal(2, self.stop)
rel.dispatch()
def stop(self):
notice("stopping timer at %s"%(self.count,))
exit()
def update(self):
self.count += 1
if self.interval and not self.count % self.interval:
notice("count: %s. goal: %s. completed: %s%%."%(self.count, self.goal, str(self.count/float(self.goal))[:5]))
if self.count == self.goal:
self.alarm()
return True
def alarm(self):
notice("time's up!")
if self.mp3:
getoutput("mplayer %s"%(self.mp3,))
self.stop()
### functions for interpreting command-line instructions
# rtimer
def timerCLI():
from optparse import OptionParser
parser = OptionParser(RT_USAGE)
parser.add_option("-m", "--mp3_file_path", dest="mp3", default=RT_MP3, help="location of alarm sound mp3. default: %s"%(RT_MP3,))
parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true", help="run timer in verbose mode")
options, arguments = parser.parse_args()
if options.verbose:
rel.initialize(options=["verbose"])
try:
arguments = [int(arg) for arg in arguments]
except:
error("non-integer argument", "USAGE: %s"%(RT_USAGE,))
while len(arguments) < 4:
arguments.append(0)
arguments.append(options.mp3)
Timer(*arguments).start()
|
# Generated by Django 3.2 on 2021-04-25 17:44
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='Auth',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('creation_date', models.DateTimeField(auto_now_add=True)),
('is_disabled', models.BooleanField(default=False)),
('token', models.TextField(max_length=700, verbose_name='Token')),
],
options={
'verbose_name': 'Sesión',
'verbose_name_plural': 'Sesiones',
},
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('names', models.CharField(max_length=255, verbose_name='Nombres de los perfiles')),
],
options={
'verbose_name': 'Perfil',
'verbose_name_plural': 'Perfiles',
},
),
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('birthdate', models.DateField(blank=True, null=True, verbose_name='Fecha de nacimiento')),
('document', models.CharField(max_length=255, unique=True, verbose_name='Documento')),
('name', models.CharField(max_length=255, verbose_name='Primer Nombre')),
('cellphone', models.CharField(blank=True, max_length=11, verbose_name='Celular')),
('state', models.CharField(blank=True, max_length=100, verbose_name='Departamento')),
('city', models.CharField(blank=True, max_length=100, verbose_name='Ciudad')),
('address', models.CharField(blank=True, max_length=100, verbose_name='Direccion')),
('gender', models.CharField(blank=True, choices=[('M', 'Masculino'), ('F', 'Femenino'), ('U', 'No definido')], max_length=1, verbose_name='Género')),
('number_plate', models.CharField(max_length=6, verbose_name='Numero de placa del vehiculo')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('profile', models.ManyToManyField(related_name='user_profile', to='api.Profile')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'Usuario',
'verbose_name_plural': 'Usuarios',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
|
from datetime import datetime
import logging
from bs4 import BeautifulSoup
from db.models import Victim
from net.proxy import Proxy
from .sitecrawler import SiteCrawler
import time
class Nefilim(SiteCrawler):
actor = "Nefilim"
def _handle_page(self, soup):
victim_list = soup.find_all("header", class_="entry-header")
for victim in victim_list:
victim_title = victim.find("h2", class_="entry-title").text.strip()
victim_name = victim_title[0:victim_title.find(". Part")]
meta = victim.find("div", class_="entry-meta")
published = meta.find("time", class_="entry-date").attrs["datetime"]
published_dt = datetime.strptime(
published.strip()[:-6], "%Y-%m-%dT%H:%M:%S")
victim_leak_site = meta.find("span", class_="posted-on").find("a").attrs["href"]
q = self.session.query(Victim).filter_by(
url=victim_leak_site, site=self.site)
if q.count() == 0:
# new victim
v = Victim(name=victim_name, url=victim_leak_site, published=published_dt,
first_seen=datetime.utcnow(), last_seen=datetime.utcnow(), site=self.site)
self.session.add(v)
self.new_victims.append(v)
else:
# already seen, update last_seen
v = q.first()
v.last_seen = datetime.utcnow()
self.current_victims.append(v)
self.session.commit()
# server was timing out so slows it down a bit
time.sleep(1.0)
def scrape_victims(self):
with Proxy() as p:
r = p.get(f"{self.url}", headers=self.headers)
soup = BeautifulSoup(r.content.decode(), "html.parser")
page_count = 0
while True:
page_nav = soup.find("div", class_="nav-previous")
if page_nav is None:
break
url = page_nav.find("a").attrs["href"]
r = p.get(f"{url}", headers=self.headers)
soup = BeautifulSoup(r.content.decode(), "html.parser")
self._handle_page(soup) |
import pytest
import numpy as np
from manim import CoordinateSystem as CS
from manim import Axes, ThreeDAxes, NumberPlane, ComplexPlane
from manim import config, tempconfig, ORIGIN, LEFT
def test_initial_config():
"""Check that all attributes are defined properly from the config."""
cs = CS()
assert cs.x_min == -config["frame_x_radius"]
assert cs.x_max == config["frame_x_radius"]
assert cs.y_min == -config["frame_y_radius"]
assert cs.y_max == config["frame_y_radius"]
ax = Axes()
assert np.allclose(ax.center_point, ORIGIN)
assert np.allclose(ax.y_axis_config["label_direction"], LEFT)
with tempconfig({"frame_x_radius": 100, "frame_y_radius": 200}):
cs = CS()
assert cs.x_min == -100
assert cs.x_max == 100
assert cs.y_min == -200
assert cs.y_max == 200
def test_dimension():
"""Check that objects have the correct dimension."""
assert Axes().dimension == 2
assert NumberPlane().dimension == 2
assert ComplexPlane().dimension == 2
assert ThreeDAxes().dimension == 3
def test_abstract_base_class():
"""Check that CoordinateSystem has some abstract methods."""
with pytest.raises(Exception):
CS().get_axes()
|
from telegrambot.bot_views.login import LoginBotView # NOQA |
import dataclasses
from dataclasses import dataclass, fields, replace
import collections
import enum
import hashlib
import itertools
import json
import logging
import re
import shlex
from abc import ABC, abstractmethod
from contextlib import asynccontextmanager
from datetime import timedelta
from functools import lru_cache
from neuro_sdk import Client
from typing import (
AbstractSet,
Any,
AsyncIterator,
Dict,
Generic,
Iterable,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from typing_extensions import Annotated, Protocol
from yarl import URL
from neuro_flow import ast
from neuro_flow.ast import InputType
from neuro_flow.colored_topo_sorter import ColoredTopoSorter
from neuro_flow.config_loader import ActionSpec, ConfigLoader
from neuro_flow.expr import (
BaseMappingExpr,
BaseSequenceExpr,
ConcatSequenceExpr,
EnableExpr,
EvalError,
Expr,
IdExpr,
LiteralT,
MergeMappingsExpr,
OptStrExpr,
RootABC,
StrExpr,
TypeT,
)
from neuro_flow.types import AlwaysT, FullID, GitInfo, LocalPath, RemotePath, TaskStatus
from neuro_flow.utils import collect_git_info
log = logging.getLogger(__name__)
# Exceptions
class NotAvailable(LookupError):
def __init__(self, ctx_name: str) -> None:
super().__init__(f"The '{ctx_name}' context is not available")
class UnknownJob(KeyError):
pass
class UnknownTask(KeyError):
pass
# ...Ctx types, they define parts that can be available in expressions
EnvCtx = Annotated[Mapping[str, str], "EnvCtx"]
TagsCtx = Annotated[AbstractSet[str], "TagsCtx"]
VolumesCtx = Annotated[Mapping[str, "VolumeCtx"], "VolumesCtx"]
ImagesCtx = Annotated[Mapping[str, "ImageCtx"], "ImagesCtx"]
InputsCtx = Annotated[Mapping[str, Union[int, float, bool, str]], "InputsCtx"]
ParamsCtx = Annotated[Mapping[str, str], "ParamsCtx"]
NeedsCtx = Annotated[Mapping[str, "DepCtx"], "NeedsCtx"]
StateCtx = Annotated[Mapping[str, str], "StateCtx"]
MatrixCtx = Annotated[Mapping[str, LiteralT], "MatrixCtx"]
@dataclass(frozen=True)
class ProjectCtx:
id: str
owner: Optional[str] = None
role: Optional[str] = None
@dataclass(frozen=True)
class FlowCtx:
flow_id: str
project_id: str
workspace: LocalPath
title: str
@property
def id(self) -> str:
# TODO: add a custom warning API to report with config file name and
# line numbers instead of bare printing
import click
click.echo(
click.style(
"flow.id attribute is deprecated, use flow.flow_id instead", fg="yellow"
)
)
return self.flow_id
@dataclass(frozen=True)
class BatchFlowCtx(FlowCtx):
life_span: Optional[float]
@dataclass(frozen=True)
class VolumeCtx:
id: str
remote: URL
mount: RemotePath
read_only: bool
local: Optional[LocalPath]
full_local_path: Optional[LocalPath]
@property
def ref_ro(self) -> str:
return f"{self.remote}:{self.mount}:ro"
@property
def ref_rw(self) -> str:
return f"{self.remote}:{self.mount}:rw"
@property
def ref(self) -> str:
ro = "ro" if self.read_only else "rw"
return f"{self.remote}:{self.mount}:{ro}"
@dataclass(frozen=True)
class EarlyImageCtx:
id: str
ref: str
context: Optional[Union[URL, LocalPath]]
dockerfile: Optional[Union[URL, LocalPath]]
dockerfile_rel: Optional[Union[LocalPath, RemotePath]]
def to_image_ctx(
self,
build_args: Sequence[str],
env: Mapping[str, str],
volumes: Sequence[str],
build_preset: Optional[str],
force_rebuild: bool,
) -> "ImageCtx":
return ImageCtx(
id=self.id,
ref=self.ref,
context=self.context,
dockerfile=self.dockerfile,
dockerfile_rel=self.dockerfile_rel,
build_args=build_args,
env=env,
volumes=volumes,
build_preset=build_preset,
force_rebuild=force_rebuild,
)
@dataclass(frozen=True)
class ImageCtx(EarlyImageCtx):
build_args: Sequence[str]
env: Mapping[str, str]
volumes: Sequence[str]
build_preset: Optional[str]
force_rebuild: bool
@dataclass(frozen=True)
class MultiCtx:
args: str
suffix: str
@dataclass(frozen=True)
class StrategyCtx:
fail_fast: bool = True
max_parallel: int = 10
@dataclass(frozen=True)
class DepCtx:
result: TaskStatus
outputs: Mapping[str, str]
def __post_init__(self) -> None:
assert (
self.result != TaskStatus.CACHED
), "CACHED status should replaced with SUCCEEDED for expressions"
@dataclass(frozen=True)
class GitCtx:
_git_info: Optional[GitInfo]
def _get_info(self) -> GitInfo:
if not self._git_info:
raise ValueError("Git info is not available: is this project under git?")
return self._git_info
@property
def sha(self) -> str:
return self._get_info().sha
@property
def branch(self) -> str:
return self._get_info().branch
@property
def tags(self) -> Sequence[str]:
return self._get_info().tags
# Confs (similar to ..Ctx, but not available to expressions, only used
# during evaluation)
@dataclass(frozen=True)
class CacheConf:
strategy: ast.CacheStrategy = ast.CacheStrategy.DEFAULT
life_span: float = 14 * 24 * 3600
@dataclass(frozen=True)
class DefaultsConf:
volumes: Sequence[str] = ()
workdir: Optional[RemotePath] = None
life_span: Optional[float] = None
schedule_timeout: Optional[float] = None
preset: Optional[str] = None
# Return dataclasses
# Returned by flow classes to provide data to runner/executor.
@dataclass(frozen=True)
class ExecUnit:
title: Optional[str]
name: Optional[str]
image: str
preset: Optional[str]
schedule_timeout: Optional[float]
http_port: Optional[int]
http_auth: Optional[bool]
pass_config: Optional[bool]
entrypoint: Optional[str]
cmd: Optional[str]
workdir: Optional[RemotePath]
volumes: Sequence[str] # Sequence[VolumeRef]
life_span: Optional[float]
env: Mapping[str, str]
tags: AbstractSet[str]
@dataclass(frozen=True)
class Job(ExecUnit):
id: str
detach: bool
browse: bool
port_forward: Sequence[str]
multi: bool
@dataclass(frozen=True)
class Task(ExecUnit):
# executed task
id: Optional[str]
# continue_on_error: Optional[bool]
enable: Union[bool, AlwaysT]
strategy: StrategyCtx
cache: "CacheConf"
caching_key: str
@dataclass(frozen=True)
class LocalTask:
# executed task
id: Optional[str]
cmd: str
@dataclass(frozen=True)
class TaskMeta:
enable: Union[bool, AlwaysT]
strategy: StrategyCtx
cache: "CacheConf"
@dataclass(frozen=True)
class JobMeta:
# Metadata used for jobs lookup
id: str
multi: bool
tags: AbstractSet[str]
@dataclass(frozen=True)
class LocallyPreparedInfo:
"""Tree-like structure that stores locally prepared info for a the batch flow."""
children_info: Mapping[str, "LocallyPreparedInfo"]
git_info: Optional[GitInfo]
early_images: Mapping[str, EarlyImageCtx]
# ...Context classes, used to complete container of what is available
# to expressions
class EmptyRoot(RootABC):
def lookup(self, name: str) -> TypeT:
raise NotAvailable(name)
@asynccontextmanager
async def client(self) -> AsyncIterator[Client]:
raise RuntimeError("neuro API is not available in <empty> context")
yield Client() # fake lint to make the code a real async iterator
@property
def dry_run(self) -> bool:
return False
EMPTY_ROOT = EmptyRoot()
@dataclass(frozen=True)
class Context(RootABC):
_client: Client
_dry_run: bool
def lookup(self, name: str) -> TypeT:
for f in fields(self):
if f.name != name:
continue
break
else:
raise NotAvailable(name)
ret = getattr(self, name)
# assert isinstance(ret, (ContainerT, SequenceT, MappingT)), ret
return cast(TypeT, ret)
@asynccontextmanager
async def client(self) -> AsyncIterator[Client]:
yield self._client
@property
def dry_run(self) -> bool:
return self._dry_run
_MODULE_PARENT = TypeVar("_MODULE_PARENT", bound=RootABC, covariant=True)
@dataclass(frozen=True)
class ModuleContext(Context, Generic[_MODULE_PARENT]):
_parent: _MODULE_PARENT
def lookup(self, name: str) -> TypeT:
try:
return super().lookup(name)
except NotAvailable:
return self._parent.lookup(name)
@dataclass(frozen=True)
class WithFlowContext(Context):
project: ProjectCtx
flow: FlowCtx
@dataclass(frozen=True)
class WithEnvContext(Context):
env: EnvCtx
@dataclass(frozen=True)
class LiveContextStep1(WithFlowContext, Context):
git: GitCtx
def to_live_ctx(
self, env: EnvCtx, tags: TagsCtx, volumes: VolumesCtx, images: ImagesCtx
) -> "LiveContext":
return LiveContext(
project=self.project,
flow=self.flow,
git=self.git,
env=env,
tags=tags,
volumes=volumes,
images=images,
_client=self._client,
_dry_run=self._dry_run,
)
@dataclass(frozen=True)
class LiveContext(WithEnvContext, LiveContextStep1):
tags: TagsCtx
volumes: VolumesCtx
images: ImagesCtx
def to_job_ctx(self, params: ParamsCtx) -> "LiveJobContext":
return LiveJobContext(
project=self.project,
flow=self.flow,
git=self.git,
env=self.env,
tags=self.tags,
volumes=self.volumes,
images=self.images,
params=params,
_client=self._client,
_dry_run=self._dry_run,
)
def to_multi_job_ctx(
self, multi: MultiCtx, params: ParamsCtx
) -> "LiveMultiJobContext":
return LiveMultiJobContext(
project=self.project,
flow=self.flow,
git=self.git,
env=self.env,
tags=self.tags,
volumes=self.volumes,
images=self.images,
multi=multi,
params=params,
_client=self._client,
_dry_run=self._dry_run,
)
@dataclass(frozen=True)
class LiveJobContext(LiveContext):
params: ParamsCtx
@dataclass(frozen=True)
class LiveMultiJobContext(LiveContext):
multi: MultiCtx
params: ParamsCtx
@dataclass(frozen=True)
class LiveActionContext(Context):
inputs: InputsCtx
@dataclass(frozen=True)
class LiveModuleContext(ModuleContext[_MODULE_PARENT]):
inputs: InputsCtx
@dataclass(frozen=True)
class BatchContextStep1(WithFlowContext, Context):
flow: BatchFlowCtx
params: ParamsCtx
git: GitCtx
def to_step_2(
self, env: EnvCtx, tags: TagsCtx, volumes: VolumesCtx, images: ImagesCtx
) -> "BatchContextStep2":
return BatchContextStep2(
project=self.project,
flow=self.flow,
params=self.params,
git=self.git,
env=env,
tags=tags,
volumes=volumes,
images=images,
_client=self._client,
_dry_run=self._dry_run,
)
@dataclass(frozen=True)
class BatchContextStep2(WithEnvContext, BatchContextStep1):
tags: TagsCtx
volumes: VolumesCtx
images: ImagesCtx
def to_batch_ctx(
self,
strategy: StrategyCtx,
) -> "BatchContext":
return BatchContext(
project=self.project,
flow=self.flow,
params=self.params,
git=self.git,
env=self.env,
tags=self.tags,
volumes=self.volumes,
images=self.images,
strategy=strategy,
_client=self._client,
_dry_run=self._dry_run,
)
class BaseBatchContext(Context):
strategy: StrategyCtx
images: ImagesCtx
@abstractmethod
def to_matrix_ctx(
self, strategy: StrategyCtx, matrix: MatrixCtx
) -> "BaseMatrixContext":
pass
class BaseMatrixContext(BaseBatchContext):
matrix: MatrixCtx
@abstractmethod
def to_task_ctx(self, needs: NeedsCtx, state: StateCtx) -> "BaseTaskContext":
pass
@dataclass(frozen=True)
class MatrixOnlyContext(Context):
matrix: MatrixCtx
class BaseTaskContext(BaseMatrixContext):
strategy: StrategyCtx
needs: NeedsCtx
state: StateCtx
@dataclass(frozen=True)
class BatchContext(BaseBatchContext, BatchContextStep2):
strategy: StrategyCtx
def to_matrix_ctx(
self, strategy: StrategyCtx, matrix: MatrixCtx
) -> "BatchMatrixContext":
return BatchMatrixContext(
project=self.project,
flow=self.flow,
params=self.params,
git=self.git,
env=self.env,
tags=self.tags,
volumes=self.volumes,
images=self.images,
strategy=strategy,
matrix=matrix,
_client=self._client,
_dry_run=self._dry_run,
)
@dataclass(frozen=True)
class BatchMatrixContext(BaseMatrixContext, BatchContext):
matrix: MatrixCtx
def to_task_ctx(self, needs: NeedsCtx, state: StateCtx) -> "BatchTaskContext":
return BatchTaskContext(
project=self.project,
flow=self.flow,
params=self.params,
git=self.git,
env=self.env,
tags=self.tags,
volumes=self.volumes,
images=self.images,
strategy=self.strategy,
matrix=self.matrix,
needs=needs,
state=state,
_client=self._client,
_dry_run=self._dry_run,
)
@dataclass(frozen=True)
class BatchTaskContext(BaseTaskContext, BatchMatrixContext):
needs: NeedsCtx
state: StateCtx
@dataclass(frozen=True)
class BatchActionContextStep1(ModuleContext[_MODULE_PARENT]):
inputs: InputsCtx
strategy: StrategyCtx
git: GitCtx
def to_action_ctx(self, images: ImagesCtx) -> "BatchActionContext[_MODULE_PARENT]":
return BatchActionContext(
inputs=self.inputs,
strategy=self.strategy,
git=self.git,
images=images,
_client=self._client,
_dry_run=self._dry_run,
_parent=self._parent,
)
@dataclass(frozen=True)
class BatchActionContext(BatchActionContextStep1[_MODULE_PARENT], BaseBatchContext):
images: ImagesCtx
def to_matrix_ctx(
self, strategy: StrategyCtx, matrix: MatrixCtx
) -> "BatchActionMatrixContext[_MODULE_PARENT]":
return BatchActionMatrixContext(
inputs=self.inputs,
images=self.images,
git=self.git,
matrix=matrix,
strategy=strategy,
_client=self._client,
_dry_run=self._dry_run,
_parent=self._parent,
)
def to_outputs_ctx(
self, needs: NeedsCtx
) -> "BatchActionOutputsContext[_MODULE_PARENT]":
return BatchActionOutputsContext(
strategy=self.strategy,
inputs=self.inputs,
images=self.images,
git=self.git,
needs=needs,
_client=self._client,
_dry_run=self._dry_run,
_parent=self._parent,
)
@dataclass(frozen=True)
class BatchActionOutputsContext(BatchActionContext[_MODULE_PARENT]):
needs: NeedsCtx
@dataclass(frozen=True)
class BatchActionMatrixContext(BaseMatrixContext, BatchActionContext[_MODULE_PARENT]):
matrix: MatrixCtx
strategy: StrategyCtx
def to_task_ctx(
self, needs: NeedsCtx, state: StateCtx
) -> "BatchActionTaskContext[_MODULE_PARENT]":
return BatchActionTaskContext(
inputs=self.inputs,
matrix=self.matrix,
strategy=self.strategy,
images=self.images,
git=self.git,
needs=needs,
state=state,
_client=self._client,
_dry_run=self._dry_run,
_parent=self._parent,
)
@dataclass(frozen=True)
class BatchActionTaskContext(BaseTaskContext, BatchActionMatrixContext[_MODULE_PARENT]):
needs: NeedsCtx
state: StateCtx
@dataclass(frozen=True)
class StatefulActionContext(Context):
inputs: InputsCtx
@dataclass(frozen=True)
class LocalActionContext(Context):
inputs: InputsCtx
def sanitize_name(name: str) -> str:
# replace non-printable characters with "_"
if not name.isprintable():
name = "".join(c if c.isprintable() else "_" for c in name)
# ":" is special in role name, replace it with "_"
name = name.replace(":", "_")
name = name.replace(" ", "_") # replace space for readability
name = re.sub(r"//+", "/", name) # collapse repeated "/"
name = name.strip("/") # remove initial and and trailing "/"
name = name or "_" # name should be non-empty
return name
async def setup_project_ctx(
ctx: RootABC,
config_loader: ConfigLoader,
) -> ProjectCtx:
ast_project = await config_loader.fetch_project()
project_id = await ast_project.id.eval(ctx)
project_owner = await ast_project.owner.eval(ctx)
project_role = await ast_project.role.eval(ctx)
if project_role is None and project_owner is not None:
project_role = f"{project_owner}/projects/{sanitize_name(project_id)}"
return ProjectCtx(id=project_id, owner=project_owner, role=project_role)
async def setup_flow_ctx(
ctx: RootABC,
ast_flow: ast.BaseFlow,
config_name: str,
config_loader: ConfigLoader,
project: ProjectCtx,
) -> FlowCtx:
flow_id = await ast_flow.id.eval(ctx)
if flow_id is None:
flow_id = config_name.replace("-", "_")
flow_title = await ast_flow.title.eval(ctx)
return FlowCtx(
flow_id=flow_id,
project_id=project.id,
workspace=config_loader.workspace,
title=flow_title or flow_id,
)
async def setup_batch_flow_ctx(
ctx: RootABC,
ast_flow: ast.BatchFlow,
config_name: str,
config_loader: ConfigLoader,
project: ProjectCtx,
) -> BatchFlowCtx:
base_flow = await setup_flow_ctx(ctx, ast_flow, config_name, config_loader, project)
life_span = await ast_flow.life_span.eval(ctx)
return BatchFlowCtx(
flow_id=base_flow.flow_id,
project_id=base_flow.project_id,
workspace=base_flow.workspace,
title=base_flow.title,
life_span=life_span,
)
async def setup_defaults_env_tags_ctx(
ctx: WithFlowContext,
ast_defaults: Optional[ast.FlowDefaults],
ast_global_defaults: Optional[ast.FlowDefaults],
) -> Tuple[DefaultsConf, EnvCtx, TagsCtx]:
if ast_defaults is not None and ast_global_defaults is not None:
ast_defaults = await merge_asts(ast_defaults, ast_global_defaults)
elif ast_global_defaults:
ast_defaults = ast_global_defaults
env: EnvCtx
tags: TagsCtx
volumes: List[str]
if ast_defaults is not None:
if ast_defaults.env is not None:
tmp_env = await ast_defaults.env.eval(ctx)
assert isinstance(tmp_env, dict)
env = tmp_env
else:
env = {}
if ast_defaults.tags is not None:
tmp_tags = await ast_defaults.tags.eval(ctx)
assert isinstance(tmp_tags, list)
tags = set(tmp_tags)
else:
tags = set()
if ast_defaults.volumes:
tmp_volumes = await ast_defaults.volumes.eval(ctx)
assert isinstance(tmp_volumes, list)
volumes = []
for volume in tmp_volumes:
if volume:
volumes.append(volume)
else:
volumes = []
workdir = await ast_defaults.workdir.eval(ctx)
life_span = await ast_defaults.life_span.eval(ctx)
preset = await ast_defaults.preset.eval(ctx)
schedule_timeout = await ast_defaults.schedule_timeout.eval(ctx)
else:
env = {}
tags = set()
volumes = []
workdir = None
life_span = None
preset = None
schedule_timeout = None
tags.add(f"project:{_id2tag(ctx.flow.project_id)}")
tags.add(f"flow:{_id2tag(ctx.flow.flow_id)}")
defaults = DefaultsConf(
volumes=volumes,
workdir=workdir,
life_span=life_span,
preset=preset,
schedule_timeout=schedule_timeout,
)
return defaults, env, tags
def _calc_full_path(
ctx: WithFlowContext, path: Optional[LocalPath]
) -> Optional[LocalPath]:
if path is None:
return None
if path.is_absolute():
return path
return ctx.flow.workspace.joinpath(path).resolve()
async def setup_volumes_ctx(
ctx: WithFlowContext,
ast_volumes: Optional[Mapping[str, ast.Volume]],
) -> VolumesCtx:
volumes = {}
if ast_volumes is not None:
for k, v in ast_volumes.items():
local_path = await v.local.eval(ctx)
volumes[k] = VolumeCtx(
id=k,
remote=await v.remote.eval(ctx),
mount=await v.mount.eval(ctx),
read_only=bool(await v.read_only.eval(ctx)),
local=local_path,
full_local_path=_calc_full_path(ctx, local_path),
)
return volumes
async def setup_local_or_storage_path(
str_expr: OptStrExpr,
ctx: RootABC,
flow_ctx: WithFlowContext,
) -> Optional[Union[URL, LocalPath]]:
path_str = await str_expr.eval(ctx)
if path_str is None:
return None
async with ctx.client() as client:
if path_str.startswith("storage"):
try:
return client.parse.str_to_uri(path_str)
except ValueError as e:
raise EvalError(str(e), str_expr.start, str_expr.end)
try:
path = LocalPath(path_str)
except ValueError as e:
raise EvalError(str(e), str_expr.start, str_expr.end)
return _calc_full_path(flow_ctx, path)
def _get_dockerfile_rel(
image: ast.Image,
context: Optional[Union[LocalPath, URL]],
dockerfile: Optional[Union[LocalPath, URL]],
) -> Optional[Union[LocalPath, RemotePath]]:
if context is None and dockerfile is None:
return None
if context is None or dockerfile is None:
raise EvalError(
"Partially defined image: either both context and "
"dockerfile should be set or not set",
image._start,
image._end,
)
if isinstance(context, LocalPath) and isinstance(dockerfile, LocalPath):
try:
return dockerfile.relative_to(context)
except ValueError as e:
raise EvalError(str(e), image.dockerfile.start, image.dockerfile.end)
elif isinstance(context, URL) and isinstance(dockerfile, URL):
try:
return RemotePath(dockerfile.path).relative_to(RemotePath(context.path))
except ValueError as e:
raise EvalError(str(e), image.dockerfile.start, image.dockerfile.end)
else:
raise EvalError(
"Mixed local/storage context is not supported: "
f"context is "
f"{'local' if isinstance(context, LocalPath) else 'on storage'}," # noqa: E501
f" but dockerfile is "
f"{'local' if isinstance(dockerfile, LocalPath) else 'on storage'}", # noqa: E501
image._start,
image._end,
)
async def setup_images_early(
ctx: RootABC,
flow_ctx: WithFlowContext,
ast_images: Optional[Mapping[str, ast.Image]],
) -> Mapping[str, EarlyImageCtx]:
images = {}
if ast_images is not None:
for k, i in ast_images.items():
try:
context = await setup_local_or_storage_path(i.context, ctx, flow_ctx)
dockerfile = await setup_local_or_storage_path(
i.dockerfile, ctx, flow_ctx
)
except EvalError as e:
# During early evaluation, some contexts maybe be missing
if not isinstance(e.__cause__, NotAvailable):
raise
context = dockerfile = None
dockerfile_rel = _get_dockerfile_rel(i, context, dockerfile)
images[k] = EarlyImageCtx(
id=k,
ref=await i.ref.eval(ctx),
context=context,
dockerfile=dockerfile,
dockerfile_rel=dockerfile_rel,
)
return images
async def setup_images_ctx(
ctx: RootABC,
flow_ctx: WithFlowContext,
ast_images: Optional[Mapping[str, ast.Image]],
early_images: Optional[Mapping[str, EarlyImageCtx]] = None,
) -> ImagesCtx:
early_images = early_images or await setup_images_early(ctx, flow_ctx, ast_images)
assert early_images is not None
images = {}
if ast_images is not None:
for k, i in ast_images.items():
build_args: List[str] = []
if i.build_args is not None:
tmp_build_args = await i.build_args.eval(ctx)
assert isinstance(tmp_build_args, list)
build_args = tmp_build_args
image_env: Dict[str, str] = {}
if i.env is not None:
tmp_env = await i.env.eval(ctx)
assert isinstance(tmp_env, dict)
image_env.update(tmp_env)
image_volumes: List[str] = []
if i.volumes is not None:
tmp_volumes = await i.volumes.eval(ctx)
assert isinstance(tmp_volumes, list)
for volume in tmp_volumes:
if volume:
image_volumes.append(volume)
image_ctx = early_images[k].to_image_ctx(
build_args=build_args,
env=image_env,
volumes=image_volumes,
build_preset=await i.build_preset.eval(ctx),
force_rebuild=await i.force_rebuild.eval(ctx) or False,
)
if image_ctx.context is None: # if true, dockerfile is None also
# Context was not computed during early evaluation,
# either it is missing at all or it uses non-locally
# available context. It is safe to recompute it.
context = await setup_local_or_storage_path(i.context, ctx, flow_ctx)
dockerfile = await setup_local_or_storage_path(
i.dockerfile, ctx, flow_ctx
)
dockerfile_rel = _get_dockerfile_rel(i, context, dockerfile)
image_ctx = replace(
image_ctx,
context=context,
dockerfile=dockerfile,
dockerfile_rel=dockerfile_rel,
)
images[k] = image_ctx
return images
async def validate_action_call(
call_ast: Union[ast.BaseActionCall, ast.BaseModuleCall],
ast_inputs: Optional[Mapping[str, ast.Input]],
) -> None:
supported_inputs: Set[str]
supplied_inputs: Set[str]
if ast_inputs:
supported_inputs = set(ast_inputs.keys())
required_inputs = {
input_name
for input_name, input_ast in ast_inputs.items()
if input_ast.default.pattern is None
}
else:
supported_inputs = set()
required_inputs = set()
if call_ast.args:
supplied_inputs = set(call_ast.args.keys())
else:
supplied_inputs = set()
missing = required_inputs - supplied_inputs
if missing:
raise EvalError(
f"Required input(s): {','.join(sorted(missing))}",
call_ast._start,
call_ast._end,
)
extra = supplied_inputs - supported_inputs
if extra:
raise EvalError(
f"Unsupported input(s): {','.join(sorted(extra))}",
call_ast._start,
call_ast._end,
)
async def setup_inputs_ctx(
ctx: RootABC,
call_ast: Union[ast.BaseActionCall, ast.BaseModuleCall],
ast_inputs: Optional[Mapping[str, ast.Input]],
) -> InputsCtx:
await validate_action_call(call_ast, ast_inputs)
if call_ast.args is None or ast_inputs is None:
return {}
inputs = {k: await v.eval(ctx) for k, v in call_ast.args.items()}
for key, value in inputs.copy().items():
input_ast = ast_inputs[key]
arg_ast = call_ast.args[key]
if input_ast.type == InputType.STR:
if not isinstance(value, str):
eval_error = EvalError(
f"Implicit casting of action argument '{key}' to string"
f" is deprecated",
arg_ast.start,
arg_ast.end,
)
log.warning(str(eval_error))
inputs[key] = str(value)
elif not isinstance(value, input_ast.type.to_type()):
raise EvalError(
f"Type of argument '{key}' do not match to with inputs declared "
f"type. Argument has type '{type(value).__name__}', declared "
f"input type is '{input_ast.type.value}'",
arg_ast.start,
arg_ast.end,
)
for name, inp in ast_inputs.items():
if name not in inputs and inp.default.pattern is not None:
val = await inp.default.eval(EMPTY_ROOT)
# inputs doesn't support expressions,
# non-none pattern means non-none input
assert val is not None
inputs[name] = val
return inputs
async def setup_params_ctx(
ctx: RootABC,
params: Optional[Mapping[str, str]],
ast_params: Optional[Mapping[str, ast.Param]],
) -> ParamsCtx:
if params is None:
params = {}
new_params = {}
if ast_params is not None:
for k, v in ast_params.items():
value = params.get(k)
if value is None:
value = await v.default.eval(ctx)
if value is None:
raise EvalError(
f"Param {k} is not initialized and has no default value",
v._start,
v._end,
)
new_params[k] = value
extra = params.keys() - new_params.keys()
if extra:
raise ValueError(
f"Unsupported arg(s): {','.join(sorted(extra))}",
)
return new_params
async def setup_strategy_ctx(
ctx: RootABC,
ast_defaults: Optional[ast.BatchFlowDefaults],
ast_global_defaults: Optional[ast.BatchFlowDefaults],
) -> StrategyCtx:
if ast_defaults is not None and ast_global_defaults is not None:
ast_defaults = await merge_asts(ast_defaults, ast_global_defaults)
elif ast_global_defaults:
ast_defaults = ast_global_defaults
if ast_defaults is None:
return StrategyCtx()
fail_fast = await ast_defaults.fail_fast.eval(ctx)
if fail_fast is None:
fail_fast = StrategyCtx.fail_fast
max_parallel = await ast_defaults.max_parallel.eval(ctx)
if max_parallel is None:
max_parallel = StrategyCtx.max_parallel
return StrategyCtx(fail_fast=fail_fast, max_parallel=max_parallel)
async def setup_matrix(
ctx: RootABC,
ast_matrix: Optional[ast.Matrix],
) -> Sequence[MatrixCtx]:
if ast_matrix is None:
return [{}]
# Init
products = []
for k, lst in ast_matrix.products.items():
values = await lst.eval(ctx)
if values:
lst2 = [{k: v} for v in values]
products.append(lst2)
matrices = []
for row in itertools.product(*products):
dct: Dict[str, LiteralT] = {}
for elem in row:
dct.update(elem)
matrices.append(dct)
# Exclude
exclude = []
for exc_spec in ast_matrix.exclude:
exclude.append({k: await v.eval(ctx) for k, v in exc_spec.items()})
filtered = []
for matrix in matrices:
include = True
for exc in exclude:
match = True
for k, v in exc.items():
if matrix[k] != v:
match = False
break
if match:
include = False
break
if include:
filtered.append(matrix)
matrices = filtered
# Include
for inc_spec in ast_matrix.include:
if inc_spec.keys() != ast_matrix.products.keys():
additional = inc_spec.keys() - ast_matrix.products.keys()
missing = ast_matrix.products.keys() - inc_spec.keys()
raise EvalError(
"Keys of entry in include list of matrix are not the "
"same as matrix keys: "
+ (
f"additional keys: {','.join(sorted(additional))}"
if additional
else ""
)
+ (f" , " if additional and missing else "")
+ (f"missing keys: {','.join(sorted(missing))}" if missing else ""),
ast_matrix._start,
ast_matrix._end,
)
matrices.append({k: await v.eval(ctx) for k, v in inc_spec.items()})
for pos, dct in enumerate(matrices):
dct["ORDINAL"] = pos
return matrices
async def setup_cache(
ctx: RootABC,
base_cache: CacheConf,
ast_cache: Optional[ast.Cache],
default_strategy: ast.CacheStrategy,
) -> CacheConf:
if ast_cache is None:
return base_cache
strategy = ast_cache.strategy
if strategy is None:
strategy = default_strategy
if strategy == ast.CacheStrategy.INHERIT:
strategy = base_cache.strategy
life_span = await ast_cache.life_span.eval(ctx)
if life_span is None:
life_span = base_cache.life_span
else:
life_span = min(base_cache.life_span, life_span)
return CacheConf(strategy=strategy, life_span=life_span)
def check_module_call_is_local(action_name: str, call_ast: ast.BaseModuleCall) -> None:
if not ActionSpec.parse(action_name).is_local:
raise EvalError(
f"Module call to non local action '{action_name}' is forbidden",
start=call_ast._start,
end=call_ast._end,
)
class SupportsAstMerge(Protocol):
@property
def _specified_fields(self) -> AbstractSet[str]:
...
_MergeTarget = TypeVar("_MergeTarget", bound=SupportsAstMerge)
async def merge_asts(child: _MergeTarget, parent: SupportsAstMerge) -> _MergeTarget:
child_fields = {f.name for f in dataclasses.fields(child)}
for field in parent._specified_fields:
if field == "mixins" or field not in child_fields:
continue
field_present = field in child._specified_fields
child_value = getattr(child, field)
parent_value = getattr(parent, field)
merge_supported = isinstance(parent_value, BaseSequenceExpr) or isinstance(
parent_value, BaseMappingExpr
)
if not field_present or (child_value is None and merge_supported):
child = replace(
child,
**{field: parent_value},
_specified_fields=child._specified_fields | {field},
)
elif isinstance(parent_value, BaseSequenceExpr):
assert isinstance(child_value, BaseSequenceExpr)
child = replace(
child, **{field: ConcatSequenceExpr(child_value, parent_value)}
)
elif isinstance(parent_value, BaseMappingExpr):
assert isinstance(child_value, BaseMappingExpr)
child = replace(
child, **{field: MergeMappingsExpr(child_value, parent_value)}
)
return child
class MixinApplyTarget(Protocol):
@property
def mixins(self) -> Optional[Sequence[StrExpr]]:
...
@property
def _specified_fields(self) -> AbstractSet[str]:
...
_MixinApplyTarget = TypeVar("_MixinApplyTarget", bound=MixinApplyTarget)
async def apply_mixins(
base: _MixinApplyTarget, mixins: Mapping[str, SupportsAstMerge]
) -> _MixinApplyTarget:
if base.mixins is None:
return base
for mixin_expr in reversed(base.mixins):
mixin_name = await mixin_expr.eval(EMPTY_ROOT)
try:
mixin = mixins[mixin_name]
except KeyError:
raise EvalError(
f"Unknown mixin '{mixin_name}'",
start=mixin_expr.start,
end=mixin_expr.end,
)
base = await merge_asts(base, mixin)
return base
async def setup_mixins(
raw_mixins: Optional[Mapping[str, _MixinApplyTarget]]
) -> Mapping[str, _MixinApplyTarget]:
if raw_mixins is None:
return {}
graph: Dict[str, Dict[str, int]] = {}
for mixin_name, mixin in raw_mixins.items():
mixins = mixin.mixins or []
graph[mixin_name] = {await dep_expr.eval(EMPTY_ROOT): 1 for dep_expr in mixins}
topo = ColoredTopoSorter(graph)
result: Dict[str, _MixinApplyTarget] = {}
while not topo.is_all_colored(1):
for mixin_name in topo.get_ready():
result[mixin_name] = await apply_mixins(raw_mixins[mixin_name], result)
topo.mark(mixin_name, 1)
return result
class RunningLiveFlow:
_ast_flow: ast.LiveFlow
_ctx: LiveContext
_cl: ConfigLoader
_mixins: Mapping[str, SupportsAstMerge]
def __init__(
self,
ast_flow: ast.LiveFlow,
ctx: LiveContext,
config_loader: ConfigLoader,
defaults: DefaultsConf,
mixins: Mapping[str, SupportsAstMerge],
):
self._ast_flow = ast_flow
self._ctx = ctx
self._cl = config_loader
self._defaults = defaults
self._mixins = mixins
@property
def job_ids(self) -> Iterable[str]:
return sorted(self._ast_flow.jobs)
@property
def project(self) -> ProjectCtx:
return self._ctx.project
@property
def flow(self) -> FlowCtx:
return self._ctx.flow
@property
def tags(self) -> AbstractSet[str]:
return self._ctx.tags
@property
def volumes(self) -> Mapping[str, VolumeCtx]:
return self._ctx.volumes
@property
def images(self) -> Mapping[str, ImageCtx]:
return self._ctx.images
async def is_multi(self, job_id: str) -> bool:
# Simple shortcut
return (await self.get_meta(job_id)).multi
async def _get_job_ast(
self, job_id: str
) -> Union[ast.Job, ast.JobActionCall, ast.JobModuleCall]:
try:
base = self._ast_flow.jobs[job_id]
if isinstance(base, ast.Job):
base = await apply_mixins(base, self._mixins)
return base
except KeyError:
raise UnknownJob(job_id)
async def _get_action_ast(
self, call_ast: Union[ast.JobActionCall, ast.JobModuleCall]
) -> ast.LiveAction:
if isinstance(call_ast, ast.JobActionCall):
action_name = await call_ast.action.eval(EMPTY_ROOT)
else:
action_name = await call_ast.module.eval(EMPTY_ROOT)
check_module_call_is_local(action_name, call_ast)
action_ast = await self._cl.fetch_action(action_name)
if action_ast.kind != ast.ActionKind.LIVE:
raise TypeError(
f"Invalid action '{action_ast}' "
f"type {action_ast.kind.value} for live flow"
)
assert isinstance(action_ast, ast.LiveAction)
return action_ast
async def get_meta(self, job_id: str) -> JobMeta:
job_ast = await self._get_job_ast(job_id)
if isinstance(job_ast, (ast.JobActionCall, ast.JobModuleCall)):
action_ast = await self._get_action_ast(job_ast)
multi = await action_ast.job.multi.eval(EMPTY_ROOT)
else:
multi = await job_ast.multi.eval(EMPTY_ROOT)
tags = set(self.tags)
tags.add(f"job:{_id2tag(job_id)}")
return JobMeta(
id=job_id,
multi=bool(multi),
tags=tags,
)
async def get_job(self, job_id: str, params: Mapping[str, str]) -> Job:
assert not await self.is_multi(
job_id
), "Use get_multi_job() for multi jobs instead of get_job()"
job_ast = await self._get_job_ast(job_id)
ctx = self._ctx.to_job_ctx(
params=await setup_params_ctx(self._ctx, params, job_ast.params)
)
return await self._get_job(ctx, ctx.env, self._defaults, job_id)
async def get_multi_job(
self,
job_id: str,
suffix: str,
args: Optional[Sequence[str]],
params: Mapping[str, str],
) -> Job:
assert await self.is_multi(
job_id
), "Use get_job() for not multi jobs instead of get_multi_job()"
if args is None:
args_str = ""
else:
args_str = " ".join(shlex.quote(arg) for arg in args)
job_ast = await self._get_job_ast(job_id)
ctx = self._ctx.to_multi_job_ctx(
multi=MultiCtx(suffix=suffix, args=args_str),
params=await setup_params_ctx(self._ctx, params, job_ast.params),
)
job = await self._get_job(ctx, ctx.env, self._defaults, job_id)
return replace(job, tags=job.tags | {f"multi:{suffix}"})
async def _get_job(
self,
ctx: RootABC,
env_ctx: EnvCtx,
defaults: DefaultsConf,
job_id: str,
) -> Job:
job = await self._get_job_ast(job_id)
if isinstance(job, ast.JobActionCall):
action_ast = await self._get_action_ast(job)
ctx = LiveActionContext(
inputs=await setup_inputs_ctx(ctx, job, action_ast.inputs),
_client=self._ctx._client,
_dry_run=self._ctx._dry_run,
)
env_ctx = {}
defaults = DefaultsConf()
job = action_ast.job
if isinstance(job, ast.JobModuleCall):
action_ast = await self._get_action_ast(job)
ctx = LiveModuleContext(
inputs=await setup_inputs_ctx(ctx, job, action_ast.inputs),
_parent=ctx,
_client=self._ctx._client,
_dry_run=self._ctx._dry_run,
)
job = action_ast.job
assert isinstance(job, ast.Job)
tags = (await self.get_meta(job_id)).tags
if job.tags is not None:
tmp_tags = await job.tags.eval(ctx)
assert isinstance(tmp_tags, list)
tags |= set(tmp_tags)
env = dict(env_ctx)
if job.env is not None:
tmp_env = await job.env.eval(ctx)
assert isinstance(tmp_env, dict)
env.update(tmp_env)
title = await job.title.eval(ctx)
if title is None:
title = f"{self._ctx.flow.flow_id}.{job_id}"
workdir = (await job.workdir.eval(ctx)) or defaults.workdir
volumes: List[str] = list(defaults.volumes)
if job.volumes is not None:
tmp_volumes = await job.volumes.eval(ctx)
assert isinstance(tmp_volumes, list)
for volume in tmp_volumes:
if volume:
volumes.append(volume)
life_span = (await job.life_span.eval(ctx)) or defaults.life_span
preset = (await job.preset.eval(ctx)) or defaults.preset
schedule_timeout = (
await job.schedule_timeout.eval(ctx)
) or defaults.schedule_timeout
port_forward: List[str] = []
if job.port_forward is not None:
tmp_port_forward = await job.port_forward.eval(ctx)
assert isinstance(tmp_port_forward, list)
port_forward = tmp_port_forward
image = await job.image.eval(ctx)
if image is None:
raise EvalError(
f"Image for job {job_id} is not specified",
start=job.image.start,
end=job.image.end,
)
return Job(
id=job_id,
detach=bool(await job.detach.eval(ctx)),
browse=bool(await job.browse.eval(ctx)),
title=title,
name=await job.name.eval(ctx),
image=image,
preset=preset,
schedule_timeout=schedule_timeout,
entrypoint=await job.entrypoint.eval(ctx),
cmd=await job.cmd.eval(ctx),
workdir=workdir,
volumes=volumes,
life_span=life_span,
http_port=await job.http_port.eval(ctx),
http_auth=await job.http_auth.eval(ctx),
pass_config=await job.pass_config.eval(ctx),
port_forward=port_forward,
multi=await self.is_multi(job_id),
env=env,
tags=tags,
)
@classmethod
async def create(
cls,
config_loader: ConfigLoader,
config_name: str = "live",
dry_run: bool = False,
) -> "RunningLiveFlow":
ast_flow = await config_loader.fetch_flow(config_name)
ast_project = await config_loader.fetch_project()
assert isinstance(ast_flow, ast.LiveFlow)
project_ctx = await setup_project_ctx(EMPTY_ROOT, config_loader)
flow_ctx = await setup_flow_ctx(
EMPTY_ROOT, ast_flow, config_name, config_loader, project_ctx
)
git_ctx = GitCtx(await collect_git_info())
step_1_ctx = LiveContextStep1(
project=project_ctx,
flow=flow_ctx,
git=git_ctx,
_client=config_loader.client,
_dry_run=dry_run,
)
defaults, env, tags = await setup_defaults_env_tags_ctx(
step_1_ctx, ast_flow.defaults, ast_project.defaults
)
volumes = {
**(await setup_volumes_ctx(step_1_ctx, ast_project.volumes)),
**(await setup_volumes_ctx(step_1_ctx, ast_flow.volumes)),
}
images = {
**(await setup_images_ctx(step_1_ctx, step_1_ctx, ast_project.images)),
**(await setup_images_ctx(step_1_ctx, step_1_ctx, ast_flow.images)),
}
live_ctx = step_1_ctx.to_live_ctx(
env=env,
tags=tags,
volumes=volumes,
images=images,
)
raw_mixins: Mapping[str, MixinApplyTarget] = {
**(ast_project.mixins or {}),
**(ast_flow.mixins or {}),
}
mixins = await setup_mixins(raw_mixins)
return cls(ast_flow, live_ctx, config_loader, defaults, mixins)
_T = TypeVar("_T", bound=BaseBatchContext, covariant=True)
class EarlyBatch:
def __init__(
self,
ctx: WithFlowContext,
tasks: Mapping[str, "BaseEarlyTask"],
config_loader: ConfigLoader,
):
self._flow_ctx = ctx
self._cl = config_loader
self._tasks = tasks
@property
def graph(self) -> Mapping[str, Mapping[str, ast.NeedsLevel]]:
return self._graph()
@property
@abstractmethod
def mixins(self) -> Optional[Mapping[str, SupportsAstMerge]]:
pass
@property
@abstractmethod
def early_images(self) -> Mapping[str, EarlyImageCtx]:
pass
@abstractmethod
def get_image_ast(self, image_id: str) -> ast.Image:
pass
@lru_cache()
def _graph(self) -> Mapping[str, Mapping[str, ast.NeedsLevel]]:
# This function is only needed for mypy
return {key: early_task.needs for key, early_task in self._tasks.items()}
def _get_prep(self, real_id: str) -> "BaseEarlyTask":
try:
return self._tasks[real_id]
except KeyError:
raise UnknownTask(real_id)
async def is_task(self, real_id: str) -> bool:
early_task = self._get_prep(real_id)
return isinstance(early_task, EarlyTask)
async def is_local(self, real_id: str) -> bool:
early_task = self._get_prep(real_id)
return isinstance(early_task, EarlyLocalCall)
async def is_action(self, real_id: str) -> bool:
early_task = self._get_prep(real_id)
return isinstance(early_task, (EarlyBatchCall, EarlyModuleCall))
async def state_from(self, real_id: str) -> Optional[str]:
prep_task = self._get_prep(real_id)
if isinstance(prep_task, EarlyPostTask):
return prep_task.state_from
return None
def _task_context_class(self) -> Type[Context]:
return BatchTaskContext
def _known_inputs(self) -> AbstractSet[str]:
return set()
def validate_expressions(self) -> List[EvalError]:
from .expr_validation import validate_expr
errors: List[EvalError] = []
for task in self._tasks.values():
ctx_cls = self._task_context_class()
known_needs = task.needs.keys()
known_inputs = self._known_inputs()
errors += validate_expr(task.enable, ctx_cls, known_needs, known_inputs)
if isinstance(task, EarlyTask):
_ctx_cls = ctx_cls
if isinstance(task, EarlyStatefulCall):
_ctx_cls = StatefulActionContext
known_inputs = (task.action.inputs or {}).keys()
ast_task = task.ast_task
for field in fields(ast.ExecUnit):
field_value = getattr(ast_task, field.name)
if field_value is not None and isinstance(field_value, Expr):
errors += validate_expr(
field_value, _ctx_cls, known_needs, known_inputs
)
if isinstance(task, (BaseEarlyCall, EarlyModuleCall)):
args = task.call.args or {}
for arg_expr in args.values():
errors += validate_expr(
arg_expr, ctx_cls, known_needs, known_inputs
)
if isinstance(task, EarlyLocalCall):
known_inputs = (task.action.inputs or {}).keys()
errors += validate_expr(
task.action.cmd, LocalActionContext, known_inputs=known_inputs
)
return errors
async def get_action_early(self, real_id: str) -> "EarlyBatchAction":
assert await self.is_action(
real_id
), f"get_action_early() cannot be used for task {real_id}"
prep_task = cast(
Union[EarlyBatchCall, EarlyModuleCall], self._get_prep(real_id)
) # Already checked
await validate_action_call(prep_task.call, prep_task.action.inputs)
if isinstance(prep_task, EarlyModuleCall):
parent_ctx: Type[RootABC] = self._task_context_class()
mixins = self.mixins
else:
parent_ctx = EmptyRoot
mixins = None
tasks = await EarlyTaskGraphBuilder(
self._flow_ctx, self._cl, prep_task.action.tasks, mixins
).build()
early_images = await setup_images_early(
self._flow_ctx, self._flow_ctx, prep_task.action.images
)
return EarlyBatchAction(
self._flow_ctx,
tasks,
early_images,
self._cl,
prep_task.action,
parent_ctx,
mixins,
)
async def get_local_early(self, real_id: str) -> "EarlyLocalCall":
assert await self.is_local(
real_id
), f"get_local_early() cannot used for action call {real_id}"
prep_task = self._get_prep(real_id)
assert isinstance(prep_task, EarlyLocalCall) # Already checked
return prep_task
class EarlyBatchAction(EarlyBatch):
def __init__(
self,
ctx: WithFlowContext,
tasks: Mapping[str, "BaseEarlyTask"],
early_images: Mapping[str, EarlyImageCtx],
config_loader: ConfigLoader,
action: ast.BatchAction,
parent_ctx_class: Type[RootABC],
mixins: Optional[Mapping[str, SupportsAstMerge]],
):
super().__init__(ctx, tasks, config_loader)
self._action = action
self._early_images = early_images
self._parent_ctx_class = parent_ctx_class
self._mixins = mixins
@property
def early_images(self) -> Mapping[str, EarlyImageCtx]:
return self._early_images
@property
def mixins(self) -> Optional[Mapping[str, SupportsAstMerge]]:
return self._mixins
def get_image_ast(self, image_id: str) -> ast.Image:
if self._action.images is None:
raise KeyError(image_id)
return self._action.images[image_id]
def _task_context_class(self) -> Type[Context]:
return BatchActionTaskContext[self._parent_ctx_class] # type: ignore
def _known_inputs(self) -> AbstractSet[str]:
return (self._action.inputs or {}).keys()
def validate_expressions(self) -> List[EvalError]:
from .expr_validation import validate_expr
errors = super().validate_expressions()
known_inputs = self._known_inputs()
if self._action.cache:
errors += validate_expr(
self._action.cache.life_span,
BatchActionContext,
known_inputs=known_inputs,
)
outputs = self._action.outputs
tasks_ids = self._tasks.keys()
if outputs and outputs.values:
for output in outputs.values.values():
errors += validate_expr(
output.value,
BatchActionOutputsContext,
known_needs=tasks_ids,
known_inputs=known_inputs,
)
return errors
class RunningBatchBase(Generic[_T], EarlyBatch, ABC):
_tasks: Mapping[str, "BasePrepTask"]
def __init__(
self,
flow_ctx: WithFlowContext,
ctx: _T,
default_tags: TagsCtx,
tasks: Mapping[str, "BasePrepTask"],
config_loader: ConfigLoader,
defaults: DefaultsConf,
bake_id: str,
local_info: Optional[LocallyPreparedInfo],
):
super().__init__(flow_ctx, tasks, config_loader)
self._ctx = ctx
self._default_tags = default_tags
self._bake_id = bake_id
self._defaults = defaults
self._local_info = local_info
@property
def early_images(self) -> Mapping[str, EarlyImageCtx]:
return self._ctx.images
@property
def images(self) -> Mapping[str, ImageCtx]:
return self._ctx.images
def _get_prep(self, real_id: str) -> "BasePrepTask":
prep_task = super()._get_prep(real_id)
assert isinstance(prep_task, BasePrepTask)
return prep_task
def _task_context(
self, real_id: str, needs: NeedsCtx, state: StateCtx
) -> BaseTaskContext:
prep_task = self._get_prep(real_id)
needs_completed = {
task_id
for task_id, level in prep_task.needs.items()
if level == ast.NeedsLevel.COMPLETED
}
if needs.keys() != needs_completed:
extra = ",".join(needs.keys() - needs_completed)
missing = ",".join(needs_completed - needs.keys())
err = ["Error in 'needs':"]
if extra:
err.append(f"unexpected keys {extra}")
if missing:
err.append(f"missing keys {missing}")
raise ValueError(" ".join(err))
return self._ctx.to_matrix_ctx(
matrix=prep_task.matrix,
strategy=prep_task.strategy,
).to_task_ctx(
needs=needs,
state=state,
)
async def get_meta(
self, real_id: str, needs: NeedsCtx, state: StateCtx
) -> TaskMeta:
prep_task = self._get_prep(real_id)
ctx = self._task_context(real_id, needs, state)
return TaskMeta(
enable=await prep_task.enable.eval(ctx),
strategy=prep_task.strategy,
cache=prep_task.cache,
)
async def get_task(
self, prefix: FullID, real_id: str, needs: NeedsCtx, state: StateCtx
) -> Task:
assert await self.is_task(
real_id
), f"get_task() cannot be used for tasks action call with id {real_id}"
prep_task = self._get_prep(real_id)
assert isinstance(prep_task, (PrepTask, PrepStatefulCall)) # Already checked
task_ctx = self._task_context(real_id, needs, state)
ctx: RootABC = task_ctx
defaults = self._defaults
if isinstance(prep_task, PrepStatefulCall):
ctx = StatefulActionContext(
inputs=await setup_inputs_ctx(
ctx, prep_task.call, prep_task.action.inputs
),
_client=self._ctx._client,
_dry_run=self._ctx._dry_run,
)
defaults = DefaultsConf() # TODO: Is it correct?
full_id = prefix + (real_id,)
try:
env_ctx = ctx.lookup("env")
assert isinstance(env_ctx, dict)
env: Dict[str, str] = dict(env_ctx)
except NotAvailable:
env = {}
if prep_task.ast_task.env is not None:
tmp_env = await prep_task.ast_task.env.eval(ctx)
assert isinstance(tmp_env, dict)
env.update(tmp_env)
title = await prep_task.ast_task.title.eval(ctx)
tags = set()
if prep_task.ast_task.tags is not None:
tmp_tags = await prep_task.ast_task.tags.eval(ctx)
assert isinstance(tmp_tags, list)
tags |= set(tmp_tags)
tags |= {"task:" + _id2tag(".".join(full_id))}
tags |= set(self._default_tags)
workdir = (await prep_task.ast_task.workdir.eval(ctx)) or defaults.workdir
volumes: List[str] = list(defaults.volumes)
if prep_task.ast_task.volumes is not None:
tmp_volumes = await prep_task.ast_task.volumes.eval(ctx)
assert isinstance(tmp_volumes, list)
for val in tmp_volumes:
if val:
volumes.append(val)
life_span = (await prep_task.ast_task.life_span.eval(ctx)) or defaults.life_span
preset = (await prep_task.ast_task.preset.eval(ctx)) or defaults.preset
schedule_timeout = (
await prep_task.ast_task.schedule_timeout.eval(ctx)
) or defaults.schedule_timeout
# Enable should be calculated using outer ctx for stateful calls
enable = (await self.get_meta(real_id, needs, state)).enable
image = await prep_task.ast_task.image.eval(ctx)
if image is None:
# Should be validated out earlier, but just in case
raise EvalError(
f"Image for task {prep_task.real_id} is not specified",
start=prep_task.ast_task.image.start,
end=prep_task.ast_task.image.end,
)
task = Task(
id=prep_task.id,
title=title,
name=(await prep_task.ast_task.name.eval(ctx)),
image=image,
preset=preset,
schedule_timeout=schedule_timeout,
entrypoint=await prep_task.ast_task.entrypoint.eval(ctx),
cmd=await prep_task.ast_task.cmd.eval(ctx),
workdir=workdir,
volumes=volumes,
life_span=life_span,
http_port=await prep_task.ast_task.http_port.eval(ctx),
http_auth=await prep_task.ast_task.http_auth.eval(ctx),
pass_config=await prep_task.ast_task.pass_config.eval(ctx),
enable=enable,
cache=prep_task.cache,
strategy=prep_task.strategy,
tags=tags,
env=env,
caching_key="",
)
return replace(
task,
tags=task.tags | {f"bake_id:{self._bake_id}"},
caching_key=_hash(dict(task=task, needs=needs, state=state)),
)
async def get_action(
self, real_id: str, needs: NeedsCtx
) -> "RunningBatchActionFlow":
assert await self.is_action(
real_id
), f"get_task() cannot used for action call {real_id}"
prep_task = cast(
Union[PrepBatchCall, PrepModuleCall], self._get_prep(real_id)
) # Already checked
ctx = self._task_context(real_id, needs, {})
if isinstance(prep_task, PrepModuleCall):
parent_ctx: RootABC = ctx
defaults = self._defaults
mixins = self.mixins
else:
parent_ctx = EMPTY_ROOT
defaults = DefaultsConf()
mixins = None
return await RunningBatchActionFlow.create(
flow_ctx=self._flow_ctx,
parent_ctx=parent_ctx,
ast_action=prep_task.action,
base_cache=prep_task.cache,
base_strategy=prep_task.strategy,
inputs=await setup_inputs_ctx(ctx, prep_task.call, prep_task.action.inputs),
default_tags=self._default_tags,
bake_id=self._bake_id,
local_info=self._local_info.children_info.get(real_id)
if self._local_info
else None,
config_loader=self._cl,
defaults=defaults,
mixins=mixins,
)
async def get_local(self, real_id: str, needs: NeedsCtx) -> LocalTask:
assert await self.is_local(
real_id
), f"get_task() cannot used for action call {real_id}"
prep_task = self._get_prep(real_id)
assert isinstance(prep_task, PrepLocalCall) # Already checked
ctx = self._task_context(real_id, needs, {})
action_ctx = LocalActionContext(
inputs=await setup_inputs_ctx(ctx, prep_task.call, prep_task.action.inputs),
_client=self._ctx._client,
_dry_run=self._ctx._dry_run,
)
return LocalTask(
id=prep_task.id,
cmd=await prep_task.action.cmd.eval(action_ctx),
)
class RunningBatchFlow(RunningBatchBase[BatchContext]):
def __init__(
self,
ctx: BatchContext,
tasks: Mapping[str, "BasePrepTask"],
config_loader: ConfigLoader,
defaults: DefaultsConf,
bake_id: str,
local_info: Optional[LocallyPreparedInfo],
ast_flow: ast.BatchFlow,
ast_project: ast.Project,
mixins: Optional[Mapping[str, SupportsAstMerge]],
):
super().__init__(
ctx,
ctx,
ctx.tags,
tasks,
config_loader,
defaults,
bake_id,
local_info,
)
self._ast_flow = ast_flow
self._ast_project = ast_project
self._mixins = mixins
def get_image_ast(self, image_id: str) -> ast.Image:
try:
if self._ast_flow.images is None:
raise KeyError(image_id)
return self._ast_flow.images[image_id]
except KeyError:
if self._ast_project.images is not None:
return self._ast_project.images[image_id]
raise
@property
def mixins(self) -> Optional[Mapping[str, SupportsAstMerge]]:
return self._mixins
@property
def params(self) -> Mapping[str, str]:
return self._ctx.params
@property
def project_id(self) -> str:
return self._ctx.flow.project_id
@property
def volumes(self) -> Mapping[str, VolumeCtx]:
return self._ctx.volumes
@property
def life_span(self) -> Optional[timedelta]:
if self._ctx.flow.life_span:
return timedelta(seconds=self._ctx.flow.life_span)
return None
@property
def workspace(self) -> LocalPath:
return self._ctx.flow.workspace
@classmethod
async def create(
cls,
config_loader: ConfigLoader,
batch: str,
bake_id: str,
params: Optional[Mapping[str, str]] = None,
local_info: Optional[LocallyPreparedInfo] = None,
dry_run: bool = False,
) -> "RunningBatchFlow":
ast_flow = await config_loader.fetch_flow(batch)
ast_project = await config_loader.fetch_project()
assert isinstance(ast_flow, ast.BatchFlow)
project_ctx = await setup_project_ctx(EMPTY_ROOT, config_loader)
flow_ctx = await setup_batch_flow_ctx(
EMPTY_ROOT, ast_flow, batch, config_loader, project_ctx
)
params_ctx = await setup_params_ctx(EMPTY_ROOT, params, ast_flow.params)
step_1_ctx = BatchContextStep1(
project=project_ctx,
flow=flow_ctx,
params=params_ctx,
git=GitCtx(local_info.git_info if local_info else None),
_client=config_loader.client,
_dry_run=dry_run,
)
if local_info is None:
early_images: Mapping[str, EarlyImageCtx] = {
**(
await setup_images_early(step_1_ctx, step_1_ctx, ast_project.images)
),
**(await setup_images_early(step_1_ctx, step_1_ctx, ast_flow.images)),
}
else:
early_images = local_info.early_images
defaults, env, tags = await setup_defaults_env_tags_ctx(
step_1_ctx, ast_flow.defaults, ast_project.defaults
)
volumes = {
**(await setup_volumes_ctx(step_1_ctx, ast_project.volumes)),
**(await setup_volumes_ctx(step_1_ctx, ast_flow.volumes)),
}
images = {
**(
await setup_images_ctx(
step_1_ctx, step_1_ctx, ast_project.images, early_images
)
),
**(
await setup_images_ctx(
step_1_ctx, step_1_ctx, ast_flow.images, early_images
)
),
}
step_2_ctx = step_1_ctx.to_step_2(
env=env,
tags=tags,
volumes=volumes,
images=images,
)
if ast_project.defaults:
base_cache = await setup_cache(
step_2_ctx,
CacheConf(),
ast_project.defaults.cache,
ast.CacheStrategy.INHERIT,
)
else:
base_cache = CacheConf()
if ast_flow.defaults:
ast_cache = ast_flow.defaults.cache
else:
ast_cache = None
cache_conf = await setup_cache(
step_2_ctx, base_cache, ast_cache, ast.CacheStrategy.INHERIT
)
batch_ctx = step_2_ctx.to_batch_ctx(
strategy=await setup_strategy_ctx(
step_2_ctx, ast_flow.defaults, ast_project.defaults
),
)
raw_mixins: Mapping[str, MixinApplyTarget] = {
**(ast_project.mixins or {}),
**(ast_flow.mixins or {}),
}
mixins = await setup_mixins(raw_mixins)
tasks = await TaskGraphBuilder(
batch_ctx, config_loader, cache_conf, ast_flow.tasks, mixins
).build()
return RunningBatchFlow(
batch_ctx,
tasks,
config_loader,
defaults,
bake_id,
local_info,
ast_flow,
ast_project,
mixins,
)
class RunningBatchActionFlow(RunningBatchBase[BatchActionContext[RootABC]]):
def __init__(
self,
flow_ctx: WithFlowContext,
ctx: BatchActionContext[RootABC],
default_tags: TagsCtx,
tasks: Mapping[str, "BasePrepTask"],
config_loader: ConfigLoader,
defaults: DefaultsConf,
action: ast.BatchAction,
bake_id: str,
local_info: Optional[LocallyPreparedInfo],
mixins: Optional[Mapping[str, SupportsAstMerge]],
):
super().__init__(
flow_ctx,
ctx,
default_tags,
tasks,
config_loader,
defaults,
bake_id,
local_info,
)
self._action = action
self._mixins = mixins
def get_image_ast(self, image_id: str) -> ast.Image:
if self._action.images is None:
raise KeyError(image_id)
return self._action.images[image_id]
@property
def mixins(self) -> Optional[Mapping[str, SupportsAstMerge]]:
return self._mixins
async def calc_outputs(self, task_results: NeedsCtx) -> DepCtx:
if any(i.result == TaskStatus.FAILED for i in task_results.values()):
return DepCtx(TaskStatus.FAILED, {})
elif any(i.result == TaskStatus.CANCELLED for i in task_results.values()):
return DepCtx(TaskStatus.CANCELLED, {})
else:
ctx = self._ctx.to_outputs_ctx(task_results)
ret = {}
if self._action.outputs and self._action.outputs.values is not None:
for name, descr in self._action.outputs.values.items():
val = await descr.value.eval(ctx)
assert val is not None
ret[name] = val
return DepCtx(TaskStatus.SUCCEEDED, ret)
@classmethod
async def create(
cls,
flow_ctx: WithFlowContext,
parent_ctx: RootABC,
ast_action: ast.BatchAction,
base_cache: CacheConf,
base_strategy: StrategyCtx,
inputs: InputsCtx,
default_tags: TagsCtx,
config_loader: ConfigLoader,
bake_id: str,
local_info: Optional[LocallyPreparedInfo],
defaults: DefaultsConf = DefaultsConf(),
mixins: Optional[Mapping[str, SupportsAstMerge]] = None,
) -> "RunningBatchActionFlow":
step_1_ctx = BatchActionContextStep1(
inputs=inputs,
strategy=base_strategy,
git=GitCtx(local_info.git_info if local_info else None),
_client=config_loader.client,
_parent=parent_ctx,
_dry_run=parent_ctx.dry_run,
)
if local_info is None:
early_images = await setup_images_early(
step_1_ctx, flow_ctx, ast_action.images
)
else:
early_images = local_info.early_images
images = await setup_images_ctx(
step_1_ctx, flow_ctx, ast_action.images, early_images
)
action_context = step_1_ctx.to_action_ctx(images=images)
cache = await setup_cache(
action_context, base_cache, ast_action.cache, ast.CacheStrategy.INHERIT
)
tasks = await TaskGraphBuilder(
action_context, config_loader, cache, ast_action.tasks, mixins
).build()
return RunningBatchActionFlow(
flow_ctx,
action_context,
default_tags,
tasks,
config_loader,
defaults,
ast_action,
bake_id,
local_info,
mixins,
)
# Task graph builder
@dataclass(frozen=True)
class BaseEarlyTask:
id: Optional[str]
real_id: str
needs: Mapping[str, ast.NeedsLevel] # Keys are batch.id
matrix: MatrixCtx
enable: EnableExpr
def to_task(self, ast_task: ast.ExecUnit) -> "EarlyTask":
return EarlyTask(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
enable=self.enable,
ast_task=ast_task,
)
def to_batch_call(
self,
action_name: str,
action: ast.BatchAction,
call: ast.TaskActionCall,
) -> "EarlyBatchCall":
return EarlyBatchCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
enable=self.enable,
action_name=action_name,
action=action,
call=call,
)
def to_module_call(
self,
action_name: str,
action: ast.BatchAction,
call: ast.TaskModuleCall,
) -> "EarlyModuleCall":
return EarlyModuleCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
enable=self.enable,
action_name=action_name,
action=action,
call=call,
)
def to_local_call(
self,
action_name: str,
action: ast.LocalAction,
call: ast.TaskActionCall,
) -> "EarlyLocalCall":
return EarlyLocalCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
enable=self.enable,
action_name=action_name,
action=action,
call=call,
)
def to_stateful_call(
self,
action_name: str,
action: ast.StatefulAction,
call: ast.TaskActionCall,
) -> "EarlyStatefulCall":
return EarlyStatefulCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
ast_task=action.main,
enable=self.enable,
action_name=action_name,
action=action,
call=call,
)
def to_post_task(self, ast_task: ast.ExecUnit, state_from: str) -> "EarlyPostTask":
return EarlyPostTask(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
enable=self.enable,
ast_task=ast_task,
state_from=state_from,
)
def to_prep_base(self, strategy: StrategyCtx, cache: CacheConf) -> "BasePrepTask":
return BasePrepTask(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
enable=self.enable,
strategy=strategy,
cache=cache,
)
@dataclass(frozen=True)
class EarlyTask(BaseEarlyTask):
ast_task: ast.ExecUnit
@dataclass(frozen=True)
class BaseEarlyCall(BaseEarlyTask):
call: ast.TaskActionCall
action_name: str
@dataclass(frozen=True)
class EarlyBatchCall(BaseEarlyCall):
action: ast.BatchAction
@dataclass(frozen=True)
class EarlyLocalCall(BaseEarlyCall):
action: ast.LocalAction
@dataclass(frozen=True)
class EarlyStatefulCall(EarlyTask, BaseEarlyCall):
action: ast.StatefulAction
@dataclass(frozen=True)
class EarlyPostTask(EarlyTask):
state_from: str
@dataclass(frozen=True)
class EarlyModuleCall(BaseEarlyTask):
call: ast.TaskModuleCall
action_name: str
action: ast.BatchAction
@dataclass(frozen=True)
class BasePrepTask(BaseEarlyTask):
strategy: StrategyCtx
cache: CacheConf
def to_task(self, ast_task: ast.ExecUnit) -> "PrepTask":
return PrepTask(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
strategy=self.strategy,
cache=self.cache,
enable=self.enable,
ast_task=ast_task,
)
def to_batch_call(
self,
action_name: str,
action: ast.BatchAction,
call: ast.TaskActionCall,
) -> "PrepBatchCall":
return PrepBatchCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
strategy=self.strategy,
cache=self.cache,
enable=self.enable,
action_name=action_name,
action=action,
call=call,
)
def to_module_call(
self,
action_name: str,
action: ast.BatchAction,
call: ast.TaskModuleCall,
) -> "PrepModuleCall":
return PrepModuleCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
strategy=self.strategy,
cache=self.cache,
enable=self.enable,
action_name=action_name,
action=action,
call=call,
)
def to_local_call(
self,
action_name: str,
action: ast.LocalAction,
call: ast.TaskActionCall,
) -> "PrepLocalCall":
return PrepLocalCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
strategy=self.strategy,
cache=CacheConf(strategy=ast.CacheStrategy.NONE),
enable=self.enable,
action_name=action_name,
action=action,
call=call,
)
def to_stateful_call(
self,
action_name: str,
action: ast.StatefulAction,
call: ast.TaskActionCall,
) -> "PrepStatefulCall":
return PrepStatefulCall(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
strategy=self.strategy,
cache=CacheConf(strategy=ast.CacheStrategy.NONE),
enable=self.enable,
action_name=action_name,
ast_task=action.main,
action=action,
call=call,
)
def to_post_task(self, ast_task: ast.ExecUnit, state_from: str) -> "PrepPostTask":
return PrepPostTask(
id=self.id,
real_id=self.real_id,
needs=self.needs,
matrix=self.matrix,
strategy=self.strategy,
cache=CacheConf(strategy=ast.CacheStrategy.NONE),
enable=self.enable,
ast_task=ast_task,
state_from=state_from,
)
@dataclass(frozen=True)
class PrepTask(EarlyTask, BasePrepTask):
pass
@dataclass(frozen=True)
class PrepBatchCall(EarlyBatchCall, BasePrepTask):
pass
@dataclass(frozen=True)
class PrepLocalCall(EarlyLocalCall, BasePrepTask):
pass
@dataclass(frozen=True)
class PrepStatefulCall(EarlyStatefulCall, PrepTask):
pass
@dataclass(frozen=True)
class PrepPostTask(EarlyPostTask, PrepTask):
pass
@dataclass(frozen=True)
class PrepModuleCall(EarlyModuleCall, BasePrepTask):
pass
class EarlyTaskGraphBuilder:
MATRIX_SIZE_LIMIT = 256
def __init__(
self,
ctx: RootABC,
config_loader: ConfigLoader,
ast_tasks: Sequence[Union[ast.Task, ast.TaskActionCall, ast.TaskModuleCall]],
mixins: Optional[Mapping[str, SupportsAstMerge]],
):
self._ctx = ctx
self._cl = config_loader
self._ast_tasks = ast_tasks
self._mixins = mixins or {}
async def _extend_base(
self,
base: BaseEarlyTask,
ast_task: Union[ast.Task, ast.TaskActionCall, ast.TaskModuleCall],
) -> BaseEarlyTask:
return base
async def build(self) -> Mapping[str, BaseEarlyTask]:
post_tasks: List[List[EarlyPostTask]] = []
prep_tasks: Dict[str, BaseEarlyTask] = {}
last_needs: Set[str] = set()
# Only used for sanity checks
real_id_to_need_to_expr: Dict[str, Mapping[str, IdExpr]] = {}
for num, ast_task in enumerate(self._ast_tasks, 1):
assert isinstance(
ast_task, (ast.Task, ast.TaskActionCall, ast.TaskModuleCall)
)
matrix_ast = ast_task.strategy.matrix if ast_task.strategy else None
matrices = await setup_matrix(self._ctx, matrix_ast)
if len(matrices) > self.MATRIX_SIZE_LIMIT:
assert matrix_ast
raise EvalError(
f"The matrix size for task #{num} exceeds the limit of 256",
matrix_ast._start,
matrix_ast._end,
)
real_ids = set()
post_tasks_group = []
for matrix in matrices:
# make prep patch(es)
matrix_ctx = MatrixOnlyContext(
matrix=matrix,
_client=self._cl.client,
_dry_run=False,
)
task_id, real_id = await self._setup_ids(matrix_ctx, num, ast_task)
needs, need_to_expr = await self._setup_needs(
matrix_ctx, last_needs, ast_task
)
real_id_to_need_to_expr[real_id] = need_to_expr
base = BaseEarlyTask(
id=task_id,
real_id=real_id,
needs=needs,
matrix=matrix,
enable=ast_task.enable,
)
base = await self._extend_base(base, ast_task)
if isinstance(ast_task, ast.Task):
ast_task = await apply_mixins(ast_task, self._mixins)
prep_tasks[real_id] = base.to_task(ast_task)
elif isinstance(ast_task, ast.TaskModuleCall):
action_name = await ast_task.module.eval(EMPTY_ROOT)
check_module_call_is_local(action_name, ast_task)
action = await self._cl.fetch_action(action_name)
if isinstance(action, ast.BatchAction):
prep_tasks[real_id] = base.to_module_call(
action_name, action, ast_task
)
else:
raise ValueError(
f"Module call to {action_name} with "
f"kind {action.kind.value} "
"is not supported."
)
else:
assert isinstance(ast_task, ast.TaskActionCall)
action_name = await ast_task.action.eval(EMPTY_ROOT)
action = await self._cl.fetch_action(action_name)
if ast_task.cache and not isinstance(action, ast.BatchAction):
raise EvalError(
f"Specifying cache in action call to the action "
f"{action_name} of kind {action.kind.value} is "
f"not supported.",
ast_task._start,
ast_task._end,
)
if isinstance(action, ast.BatchAction):
prep_tasks[real_id] = base.to_batch_call(
action_name, action, ast_task
)
elif isinstance(action, ast.LocalAction):
prep_tasks[real_id] = base.to_local_call(
action_name, action, ast_task
)
elif isinstance(action, ast.StatefulAction):
if action.post:
post_tasks_group.append(
replace(
base,
id=None,
real_id=f"post-{base.real_id}",
needs={real_id: ast.NeedsLevel.COMPLETED},
enable=action.post_if,
).to_post_task(action.post, real_id),
)
prep_tasks[real_id] = base.to_stateful_call(
action_name, action, ast_task
)
else:
raise ValueError(
f"Action {action_name} has kind {action.kind.value}, "
"that is not supported in batch mode."
)
real_ids.add(real_id)
if post_tasks_group:
post_tasks.append(post_tasks_group)
last_needs = real_ids
for post_tasks_group in reversed(post_tasks):
real_ids = set()
for task in post_tasks_group:
needs = {need: ast.NeedsLevel.COMPLETED for need in last_needs}
needs = {**needs, **task.needs}
task = replace(task, needs=needs)
prep_tasks[task.real_id] = task
real_ids.add(task.real_id)
last_needs = real_ids
# Check needs sanity
for prep_task in prep_tasks.values():
for need_id in prep_task.needs.keys():
if need_id not in prep_tasks:
id_expr = real_id_to_need_to_expr[prep_task.real_id][need_id]
raise EvalError(
f"Task {prep_task.real_id} needs unknown task {need_id}",
id_expr.start,
id_expr.end,
)
# Check that all tasks have non-null image
for prep_task in prep_tasks.values():
if isinstance(prep_task, EarlyTask):
image_expr = prep_task.ast_task.image
if image_expr.pattern is None:
raise EvalError(
f"Image for task {prep_task.real_id} is not specified",
image_expr.start,
image_expr.end,
)
return prep_tasks
async def _setup_ids(
self, ctx: MatrixOnlyContext, num: int, ast_task: ast.TaskBase
) -> Tuple[Optional[str], str]:
task_id = await ast_task.id.eval(ctx)
if task_id is None:
# Dash is not allowed in identifier, so the generated read id
# never clamps with user-provided one.
# Filter system properties
keys = [key for key in sorted(ctx.matrix) if key == key.lower()]
suffix = [str(ctx.matrix[key]) for key in keys]
real_id = "-".join(["task", str(num), *suffix])
else:
real_id = task_id
return task_id, real_id
async def _setup_needs(
self, ctx: RootABC, default_needs: AbstractSet[str], ast_task: ast.TaskBase
) -> Tuple[Mapping[str, ast.NeedsLevel], Mapping[str, IdExpr]]:
if ast_task.needs is not None:
needs, to_expr_map = {}, {}
for need, level in ast_task.needs.items():
need_id = await need.eval(ctx)
needs[need_id] = level
to_expr_map[need_id] = need
return needs, to_expr_map
return {need: ast.NeedsLevel.COMPLETED for need in default_needs}, {}
class TaskGraphBuilder(EarlyTaskGraphBuilder):
MATRIX_SIZE_LIMIT = 256
_ctx: BaseBatchContext
def __init__(
self,
ctx: BaseBatchContext,
config_loader: ConfigLoader,
default_cache: CacheConf,
ast_tasks: Sequence[Union[ast.Task, ast.TaskActionCall, ast.TaskModuleCall]],
mixins: Optional[Mapping[str, SupportsAstMerge]],
):
super().__init__(ctx, config_loader, ast_tasks, mixins)
self._ctx = ctx
self._default_cache = default_cache
async def _extend_base(
self,
base: BaseEarlyTask,
ast_task: Union[ast.Task, ast.TaskActionCall, ast.TaskModuleCall],
) -> BasePrepTask:
strategy = await self._setup_strategy(ast_task.strategy)
matrix_ctx = self._ctx.to_matrix_ctx(matrix=base.matrix, strategy=strategy)
cache = await setup_cache(
matrix_ctx,
self._default_cache,
ast_task.cache,
ast.CacheStrategy.INHERIT,
)
return base.to_prep_base(strategy, cache)
async def build(self) -> Mapping[str, BasePrepTask]:
# Super method already returns proper type (thanks to _extend_base),
# but it is hard to properly annotate, so we have to do runtime check here
ret = {}
tasks = await super().build()
for key, value in tasks.items():
assert isinstance(value, BasePrepTask)
ret[key] = value
return ret
async def _setup_strategy(
self, ast_strategy: Optional[ast.Strategy]
) -> StrategyCtx:
if ast_strategy is None:
return self._ctx.strategy
fail_fast = await ast_strategy.fail_fast.eval(self._ctx)
if fail_fast is None:
fail_fast = self._ctx.strategy.fail_fast
max_parallel = await ast_strategy.max_parallel.eval(self._ctx)
if max_parallel is None:
max_parallel = self._ctx.strategy.max_parallel
return StrategyCtx(fail_fast=fail_fast, max_parallel=max_parallel)
# Utils
def _id2tag(id: str) -> str:
return id.replace("_", "-").lower()
def _hash(val: Any) -> str:
hasher = hashlib.new("sha256")
data = json.dumps(val, sort_keys=True, default=_ctx_default)
hasher.update(data.encode("utf-8"))
return hasher.hexdigest()
def _ctx_default(val: Any) -> Any:
if dataclasses.is_dataclass(val):
if hasattr(val, "_client"):
val = dataclasses.replace(val, _client=None)
if hasattr(val, "_parent") and hasattr(val._parent, "_client"):
parent = dataclasses.replace(val._parent, _client=None)
val = dataclasses.replace(val, _parent=parent)
ret = dataclasses.asdict(val)
ret.pop("_client", None)
return ret
elif isinstance(val, enum.Enum):
return val.value
elif isinstance(val, RemotePath):
return str(val)
elif isinstance(val, LocalPath):
return str(val)
elif isinstance(val, collections.abc.Set):
return sorted(val)
elif isinstance(val, AlwaysT):
return str(val)
elif isinstance(val, URL):
return str(val)
elif isinstance(val, EmptyRoot):
return {}
else:
raise TypeError(f"Cannot dump {val!r}")
|
data ={'12.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 3813,
'cases7_bl': 8217,
'cases7_bl_per_100k': 102.794632911696,
'cases7_lk': 191,
'cases7_per_100k': 106.697353793901,
'cases7_per_100k_txt': '106,7',
'cases_per_100k': 2130.03670165521,
'cases_per_population': 2.13003670165521,
'death7_bl': 7,
'death7_lk': 0,
'death_rate': 1.5211119853134,
'deaths': 58,
'last_update': '12.04.2021, 00:00 Uhr',
'recovered': None},
'13.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 3831,
'cases7_bl': 8534,
'cases7_bl_per_100k': 106.760301480883,
'cases7_lk': 207,
'cases7_per_100k': 115.635352017474,
'cases7_per_100k_txt': '115,6',
'cases_per_100k': 2140.09194965672,
'cases_per_population': 2.14009194965672,
'death7_bl': 9,
'death7_lk': 0,
'death_rate': 1.54006786739755,
'deaths': 59,
'last_update': '13.04.2021, 00:00 Uhr',
'recovered': None},
'14.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 3850,
'cases7_bl': 8877,
'cases7_bl_per_100k': 111.051229932716,
'cases7_lk': 199,
'cases7_per_100k': 111.166352905687,
'cases7_per_100k_txt': '111,2',
'cases_per_100k': 2150.70582254722,
'cases_per_population': 2.15070582254722,
'death7_bl': 13,
'death7_lk': 0,
'death_rate': 1.53246753246753,
'deaths': 59,
'last_update': '14.04.2021, 00:00 Uhr',
'recovered': None},
'15.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 3904,
'cases7_bl': 9436,
'cases7_bl_per_100k': 118.04431740961,
'cases7_lk': 238,
'cases7_per_100k': 132.952723575646,
'cases7_per_100k_txt': '133,0',
'cases_per_100k': 2180.87156655178,
'cases_per_population': 2.18087156655178,
'death7_bl': 10,
'death7_lk': 0,
'death_rate': 1.53688524590164,
'deaths': 60,
'last_update': '15.04.2021, 00:00 Uhr',
'recovered': None},
'16.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 3932,
'cases7_bl': 9295,
'cases7_bl_per_100k': 116.280408046029,
'cases7_lk': 228,
'cases7_per_100k': 127.366474685913,
'cases7_per_100k_txt': '127,4',
'cases_per_100k': 2196.51306344303,
'cases_per_population': 2.19651306344303,
'death7_bl': 9,
'death7_lk': 0,
'death_rate': 1.52594099694812,
'deaths': 60,
'last_update': '16.04.2021, 00:00 Uhr',
'recovered': None},
'17.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 3966,
'cases7_bl': 9267,
'cases7_bl_per_100k': 115.93012817241,
'cases7_lk': 188,
'cases7_per_100k': 105.021479126981,
'cases7_per_100k_txt': '105,0',
'cases_per_100k': 2215.50630966812,
'cases_per_population': 2.21550630966812,
'death7_bl': 8,
'death7_lk': 0,
'death_rate': 1.51285930408472,
'deaths': 60,
'last_update': '17.04.2021, 00:00 Uhr',
'recovered': None},
'18.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 3999,
'cases7_bl': 9370,
'cases7_bl_per_100k': 117.218657707508,
'cases7_lk': 186,
'cases7_per_100k': 103.904229349034,
'cases7_per_100k_txt': '103,9',
'cases_per_100k': 2233.94093100424,
'cases_per_population': 2.23394093100424,
'death7_bl': 5,
'death7_lk': 0,
'death_rate': 1.50037509377344,
'deaths': 60,
'last_update': '18.04.2021, 00:00 Uhr',
'recovered': None},
'19.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 4027,
'cases7_bl': 9866,
'cases7_bl_per_100k': 123.42361546876,
'cases7_lk': 214,
'cases7_per_100k': 119.545726240287,
'cases7_per_100k_txt': '119,5',
'cases_per_100k': 2249.58242789549,
'cases_per_population': 2.24958242789549,
'death7_bl': 5,
'death7_lk': 0,
'death_rate': 1.48994288552272,
'deaths': 60,
'last_update': '19.04.2021, 00:00 Uhr',
'recovered': None},
'20.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 4039,
'cases7_bl': 9714,
'cases7_bl_per_100k': 121.522096154828,
'cases7_lk': 208,
'cases7_per_100k': 116.193976906447,
'cases7_per_100k_txt': '116,2',
'cases_per_100k': 2256.28592656317,
'cases_per_population': 2.25628592656317,
'death7_bl': 5,
'death7_lk': 0,
'death_rate': 1.48551621688537,
'deaths': 60,
'last_update': '20.04.2021, 00:00 Uhr',
'recovered': None},
'21.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 4042,
'cases7_bl': 9236,
'cases7_bl_per_100k': 115.542318312332,
'cases7_lk': 180,
'cases7_per_100k': 100.552480015195,
'cases7_per_100k_txt': '100,6',
'cases_per_100k': 2257.96180123009,
'cases_per_population': 2.25796180123009,
'death7_bl': 4,
'death7_lk': 0,
'death_rate': 1.50915388421573,
'deaths': 61,
'last_update': '21.04.2021, 00:00 Uhr',
'recovered': None},
'22.04.2021, 00:00 Uhr': {'AdmUnitId': 3351,
'BEZ': 'Landkreis',
'GEN': 'Celle',
'OBJECTID': 34,
'cases': 4075,
'cases7_bl': 9003,
'cases7_bl_per_100k': 112.627489364002,
'cases7_lk': 168,
'cases7_per_100k': 93.848981347515,
'cases7_per_100k_txt': '93,8',
'cases_per_100k': 2276.39642256621,
'cases_per_population': 2.27639642256621,
'death7_bl': 5,
'death7_lk': 0,
'death_rate': 1.49693251533742,
'deaths': 61,
'last_update': '22.04.2021, 00:00 Uhr',
'recovered': None}} |
# Generated by Django 4.0.1 on 2022-02-21 18:19
import ckeditor.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0013_alter_project_content_upload'),
]
operations = [
migrations.AlterField(
model_name='project',
name='content',
field=ckeditor.fields.RichTextField(blank=True, null=True),
),
]
|
from setuptools import setup
setup(name="epp_project_sven_jacobs", version="0.0.1")
|
"""create dimension and fact
Revision ID: 15d6a161a32b
Revises: c9311744ef3c
Create Date: 2022-02-11 21:49:44.152019
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "15d6a161a32b"
down_revision = "c9311744ef3c"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"dim_product",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("product_id", sa.String, primary_key=True, index=True),
sa.Column("product_name", sa.Text),
sa.Column("product_brand", sa.String),
sa.Column("product_search_image", sa.Text),
sa.Column("product_gender", sa.String),
sa.Column("product_primary_colour", sa.String),
sa.Column("product_category", sa.String),
sa.Column("product_year", sa.String),
sa.Column("product_season", sa.String),
sa.Column("product_catalog_date", sa.Date),
sa.Column("created_date", sa.Date),
schema="curated",
)
op.create_table(
"fct_product",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("process_date", sa.String, index=True),
sa.Column("search_option_category", sa.String),
sa.Column("search_option_sort", sa.String),
sa.Column("product_id", sa.String),
sa.Column("product_mrp", sa.Float),
sa.Column("product_price", sa.Float),
sa.Column("product_discount", sa.Float),
sa.Column("product_rating", sa.Float),
sa.Column("product_rating_count", sa.Integer),
schema="curated",
)
def downgrade():
op.drop_table("dim_product", schema="curated")
op.drop_table("fct_product", schema="curated")
|
#copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import time
import sys
import functools
import math
def set_paddle_flags(flags):
for key, value in flags.items():
if os.environ.get(key, None) is None:
os.environ[key] = str(value)
# NOTE(paddle-dev): All of these flags should be
# set before `import paddle`. Otherwise, it would
# not take any effect.
set_paddle_flags({
'FLAGS_eager_delete_tensor_gb': 0, # enable gc
'FLAGS_fraction_of_gpu_memory_to_use': 0.98
})
import argparse
import subprocess
import paddle
import paddle.fluid as fluid
import models
import utils.reader_cv2 as reader
from utils.utility import add_arguments, print_arguments, check_gpu
from utils.learning_rate import lr_warmup
from paddle.fluid.incubate.fleet.collective import fleet, DistributedStrategy
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
num_trainers = int(os.environ.get('PADDLE_TRAINERS_NUM', 1))
trainer_id = int(os.environ.get('PADDLE_TRAINER_ID'))
parser = argparse.ArgumentParser(description=__doc__)
add_arg = functools.partial(add_arguments, argparser=parser)
# yapf: disable
add_arg('batch_size', int, 32, "Minibatch size per device.")
add_arg('total_images', int, 1281167, "Training image number.")
add_arg('num_epochs', int, 120, "number of epochs.")
add_arg('class_dim', int, 1000, "Class number.")
add_arg('image_shape', str, "3,224,224", "input image size")
add_arg('model_save_dir', str, "output", "model save directory")
add_arg('pretrained_model', str, None, "Whether to use pretrained model.")
add_arg('checkpoint', str, None, "Whether to resume checkpoint.")
add_arg('lr', float, 0.1, "set learning rate.")
add_arg('model', str, "SE_ResNeXt50_32x4d", "Set the network to use.")
add_arg('data_dir', str, "./data/ILSVRC2012/", "The ImageNet dataset root dir.")
add_arg('l2_decay', float, 1e-4, "L2_decay parameter.")
add_arg('momentum_rate', float, 0.9, "momentum_rate.")
add_arg('lower_scale', float, 0.08, "Set the lower_scale in ramdom_crop")
add_arg('lower_ratio', float, 3./4., "Set the lower_ratio in ramdom_crop")
add_arg('upper_ratio', float, 4./3., "Set the upper_ratio in ramdom_crop")
add_arg('resize_short_size', int, 256, "Set the resize_short_size")
add_arg('use_gpu', bool, True, "Whether to use GPU or not.")
add_arg('nccl_comm_num', int, 1, "nccl comm num")
add_arg('num_iteration_per_drop_scope', int, 30, "Ihe iteration intervals to clean up temporary variables.")
add_arg('use_local_sgd', bool, True, "Whether to use LocalSGD argorithmn.")
add_arg('local_sgd_steps', int, 2, "The step number for local training before synchronizing parameters.")
add_arg('local_sgd_is_warm_steps', int, 30, "The warmup step of number for local sgd.")
add_arg('lsgd_warmup_strategy', int, 1, "Select strategy to warmup the lsgd,1:exp,2:const,3:linear")
add_arg('is_Test', bool, False, "Whether to test on every epoch")
def optimizer_setting(params):
l2_decay = params["l2_decay"]
momentum_rate = params["momentum_rate"]
# piecewise_decay
global_batch_size = params["batch_size"] * num_trainers
steps_per_pass = int(math.ceil(params["total_images"] * 1.0 / global_batch_size))
warmup_steps = steps_per_pass * 5
passes = [30,60,80,90]
bd = [steps_per_pass * p for p in passes]
batch_denom = 256
start_lr = params["lr"]
base_lr = params["lr"] * global_batch_size / batch_denom
lr = [base_lr * (0.1**i) for i in range(len(bd) + 1)]
lr_var = lr_warmup(fluid.layers.piecewise_decay(boundaries=bd, values=lr),
warmup_steps, start_lr, base_lr)
optimizer = fluid.optimizer.Momentum(learning_rate=lr_var,momentum=momentum_rate,
regularization=fluid.regularizer.L2Decay(l2_decay))
return optimizer
def net_config(image, model, args, is_train, label=0, y_a=0, y_b=0, lam=0.0):
model_list = [m for m in dir(models) if "__" not in m]
assert args.model in model_list, "{} is not lists: {}".format(args.model,
model_list)
class_dim = args.class_dim
model_name = args.model
out = model.net(input=image, class_dim=class_dim)
softmax_out = fluid.layers.softmax(out, use_cudnn=False)
if is_train:
cost, prob = fluid.layers.softmax_with_cross_entropy(out, label, return_softmax=True)
else:
cost = fluid.layers.cross_entropy(input=softmax_out, label=label)
avg_cost = fluid.layers.mean(cost)
acc_top1 = fluid.layers.accuracy(input=softmax_out, label=label, k=1)
acc_top5 = fluid.layers.accuracy(input=softmax_out, label=label, k=5)
return avg_cost, acc_top1, acc_top5
def build_program(is_train, main_prog, startup_prog, args, dist_strategy=None):
image_shape = [int(m) for m in args.image_shape.split(",")]
model_name = args.model
model_list = [m for m in dir(models) if "__" not in m]
model = models.__dict__[model_name]()
with fluid.program_guard(main_prog, startup_prog):
py_reader = fluid.layers.py_reader(
capacity=16,
shapes=[[-1] + image_shape, [-1, 1]],
lod_levels=[0, 0],
dtypes=["float32", "int64"],
use_double_buffer=True)
with fluid.unique_name.guard():
image, label = fluid.layers.read_file(py_reader)
avg_cost, acc_top1, acc_top5 = net_config(image, model, args, label=label, is_train=is_train)
avg_cost.persistable = True
acc_top1.persistable = True
acc_top5.persistable = True
build_program_out = [py_reader, avg_cost, acc_top1, acc_top5]
if is_train:
params = model.params
params["total_images"] = args.total_images
params["lr"] = args.lr
params["num_epochs"] = args.num_epochs
params["batch_size"] = args.batch_size
params["l2_decay"] = args.l2_decay
params["momentum_rate"] = args.momentum_rate
optimizer = optimizer_setting(params)
global_lr = optimizer._global_learning_rate()
dist_optimizer = fleet.distributed_optimizer(optimizer, strategy=dist_strategy)
_, param_grads = dist_optimizer.minimize(avg_cost)
global_lr.persistable=True
build_program_out.append(global_lr)
return build_program_out
def get_device_num():
"""
# NOTE(zcd): for multi-processe training, each process use one GPU card.
if num_trainers > 1 : return 1
visible_device = os.environ.get('CUDA_VISIBLE_DEVICES', None)
if visible_device:
device_num = len(visible_device.split(','))
else:
device_num = subprocess.check_output(['nvidia-smi','-L']).decode().count('\n')
"""
device_num = fluid.core.get_cuda_device_count()
return device_num
def get_local_sgd_steps(passid, local_sgd_steps, local_sgd_warmup, lsgd_warmup_strategy):
offset = passid - local_sgd_warmup
if offset < 0:
return 1
if lsgd_warmup_strategy == 1:
warm_up = [2**i for i in range(local_sgd_steps) if 2**i <=local_sgd_steps]
elif lsgd_warmup_strategy == 2:
warm_up = [local_sgd_steps]
elif lsgd_warmup_strategy == 3:
warm_up = [2*i for i in range(local_sgd_steps) if 2*i <=local_sgd_steps]
warm_up[0] = 1
warm_size = len(warm_up)
if offset >= warm_size:
return local_sgd_steps
else:
return warm_up[offset]
def train(args):
# parameters from arguments
model_name = args.model
checkpoint = args.checkpoint
pretrained_model = args.pretrained_model
model_save_dir = args.model_save_dir
startup_prog = fluid.Program()
train_prog = fluid.Program()
test_prog = fluid.Program()
exec_strategy = fluid.ExecutionStrategy()
exec_strategy.num_iteration_per_drop_scope = args.num_iteration_per_drop_scope
dist_strategy = DistributedStrategy()
dist_strategy.nccl_comm_num = args.nccl_comm_num
dist_strategy.use_local_sgd = args.use_local_sgd
dist_strategy.exec_strategy = exec_strategy
role = role_maker.PaddleCloudRoleMaker(is_collective=True)
fleet.init(role)
b_out = build_program(
is_train=True,
main_prog=train_prog,
startup_prog=startup_prog,
args=args,
dist_strategy=dist_strategy)
train_py_reader, train_cost, train_acc1, train_acc5, global_lr = b_out[0],b_out[1],b_out[2],b_out[3],b_out[4]
train_fetch_vars = [train_cost, train_acc1, train_acc5, global_lr]
train_fetch_list = []
for var in train_fetch_vars:
var.persistable=True
train_fetch_list.append(var.name)
dist_prog = fleet.main_program
local_prog = fleet._origin_program
b_out_test = build_program(
is_train=False,
main_prog=test_prog,
startup_prog=startup_prog,
args=args)
test_py_reader, test_cost, test_acc1, test_acc5 = b_out_test[0],b_out_test[1],b_out_test[2],b_out_test[3]
test_prog = test_prog.clone(for_test=True)
gpu_id = int(os.environ.get('FLAGS_selected_gpus', 0))
place = fluid.CUDAPlace(gpu_id) if args.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(startup_prog)
if checkpoint is not None:
fluid.io.load_persistables(exe, checkpoint, main_program=local_prog)
if pretrained_model:
def if_exist(var):
return os.path.exists(os.path.join(pretrained_model, var.name))
fluid.io.load_vars(
exe, pretrained_model, main_program=local_prog, predicate=if_exist)
if args.use_gpu:
device_num = get_device_num()
else:
device_num = 1
train_batch_size = args.batch_size
print("train_batch_size: %d device_num:%d" % (train_batch_size, device_num))
test_batch_size = 16
# NOTE: the order of batch data generated by batch_reader
# must be the same in the respective processes.
shuffle_seed = 1 if num_trainers > 1 else None
train_reader = reader.train(settings=args, data_dir=args.data_dir, pass_id_as_seed=shuffle_seed,num_epoch=args.num_epochs+1)
test_reader = reader.val(settings=args, data_dir=args.data_dir)
train_py_reader.decorate_paddle_reader(paddle.batch(train_reader,
batch_size=train_batch_size))
test_py_reader.decorate_paddle_reader(paddle.batch(test_reader,
batch_size=test_batch_size))
test_fetch_vars = [test_cost, test_acc1, test_acc5]
test_fetch_list = []
for var in test_fetch_vars:
var.persistable=True
test_fetch_list.append(var.name)
train_exe = exe
assert args.local_sgd_steps > 0, "local_sgd_steps must greater than 0"
step_cnt = 0
params = models.__dict__[args.model]().params
global_batch_size = args.batch_size * num_trainers
steps_per_pass = int(math.ceil(args.total_images * 1.0 / global_batch_size))
print("steps_per_pass {}".format(steps_per_pass))
print("global_batch_size {}".format(global_batch_size))
pass_id = 0
all_train_time = []
try :
train_py_reader.start()
train_info = [[], [], []]
test_info = [[], [], []]
train_begin=time.time()
batch_id = 0
time_record=[]
while True:
t1 = time.time()
pass_id = step_cnt // steps_per_pass
if pass_id >= args.num_epochs:
train_py_reader.reset()
print("Train is over. Time is {}".format(all_train_time))
break
local_sgd_steps = get_local_sgd_steps(pass_id, args.local_sgd_steps, args.local_sgd_is_warm_steps,args.lsgd_warmup_strategy)
if step_cnt % local_sgd_steps == 0:
current_prog = dist_prog
else:
current_prog = local_prog
loss, acc1, acc5, lr = train_exe.run(current_prog, fetch_list=train_fetch_list, use_program_cache=True)
acc1 = np.mean(np.array(acc1))
acc5 = np.mean(np.array(acc5))
train_info[1].append(acc1)
train_info[2].append(acc5)
t2 = time.time()
period = t2 - t1
time_record.append(period)
loss = np.mean(np.array(loss))
train_info[0].append(loss)
lr = np.mean(np.array(lr))
if batch_id % 30 == 0:
period = np.mean(time_record)
speed = args.batch_size * 1.0 / period
time_record=[]
print("Pass {0}, trainbatch {1}, loss {2}, acc1 {3}, acc5 {4}, lr {5}, time {6}, speed {7}"\
.format(pass_id, batch_id, "%.5f"%loss, "%.5f"%acc1, "%.5f"%acc5, "%.5f" %\
lr, "%2.2f sec" % period, "%.2f" % speed))
sys.stdout.flush()
batch_id += 1
step_cnt += 1
if (step_cnt // steps_per_pass) != pass_id: # train epoch end
train_loss = np.array(train_info[0]).mean()
train_acc1 = np.array(train_info[1]).mean()
train_acc5 = np.array(train_info[2]).mean()
train_end=time.time()
all_train_time.append(train_end - train_begin)
train_speed = (batch_id * train_batch_size) / (train_end - train_begin)
print("current local_sgd_steps {}".format(local_sgd_steps))
print("End pass {0}, train_loss {1}, train_acc1 {2}, train_acc5 {3}, "
"speed {4}".format(\
pass_id, "%.5f"%train_loss, "%.5f"%train_acc1, "%.5f"%train_acc5, "%.2f" % train_speed))
sys.stdout.flush()
#init
batch_id = 0
train_info = [[], [], []]
train_begin=time.time()
batch_id = 0
time_record=[]
if args.is_Test:
test_info = [[], [], []]
test_py_reader.start()
test_batch_id = 0
try:
while True:
t1 = time.time()
loss, acc1, acc5 = exe.run(program=test_prog,
fetch_list=test_fetch_list,
use_program_cache=True)
t2 = time.time()
period = t2 - t1
loss = np.mean(loss)
acc1 = np.mean(acc1)
acc5 = np.mean(acc5)
test_info[0].append(loss)
test_info[1].append(acc1)
test_info[2].append(acc5)
if test_batch_id % 200 == 0:
test_speed = test_batch_size * 1.0 / period
print("Pass {0},testbatch {1},loss {2}, acc1 {3},acc5 {4},time {5},speed {6}"\
.format(pass_id, test_batch_id, "%.5f"%loss,"%.5f"%acc1, "%.5f"%acc5,\
"%2.2f sec" % period, "%.2f" % test_speed))
sys.stdout.flush()
test_batch_id += 1
except fluid.core.EOFException:
test_py_reader.reset()
test_loss = np.array(test_info[0]).mean()
test_acc1 = np.array(test_info[1]).mean()
test_acc5 = np.array(test_info[2]).mean()
print("End pass {0}, test_loss {1}, test_acc1 {2}, test_acc5 {3}".format(pass_id,"%.5f"%test_loss,
"%.5f"%test_acc1, "%.5f"%test_acc5))
sys.stdout.flush()
except fluid.core.EOFException:
train_py_reader.reset()
#start test
test_py_reader.start()
test_batch_id = 0
test_info = [[], [], []]
try:
while True:
t1 = time.time()
loss, acc1, acc5 = exe.run(program=test_prog,
fetch_list=test_fetch_list,
use_program_cache=True)
t2 = time.time()
period = t2 - t1
loss = np.mean(loss)
acc1 = np.mean(acc1)
acc5 = np.mean(acc5)
test_info[0].append(loss)
test_info[1].append(acc1)
test_info[2].append(acc5)
if test_batch_id % 100 == 0:
test_speed = test_batch_size * 1.0 / period
print("Pass {0},testbatch {1},loss {2}, acc1 {3},acc5 {4},time {5},speed {6}"\
.format(pass_id, test_batch_id, "%.5f"%loss,"%.5f"%acc1, "%.5f"%acc5,
"%2.2f sec" % period, "%.2f" % test_speed))
sys.stdout.flush()
test_batch_id += 1
except fluid.core.EOFException:
test_py_reader.reset()
test_loss = np.array(test_info[0]).mean()
test_acc1 = np.array(test_info[1]).mean()
test_acc5 = np.array(test_info[2]).mean()
print("test_loss {0}, test_acc1 {1}, test_acc5 {2}".format("%.5f"%test_loss,
"%.5f"%test_acc1, "%.5f"%test_acc5))
sys.stdout.flush()
model_path = os.path.join(model_save_dir + '/' + model_name, str(pass_id))
if not os.path.isdir(model_path):
os.makedirs(model_path)
fluid.io.save_persistables(exe, model_path, main_program=fleet._origin_program)
def print_paddle_environments():
print('--------- Configuration Environments -----------')
for k in os.environ:
if "PADDLE_" in k or "FLAGS_" in k:
print("%s: %s" % (k, os.environ[k]))
print('------------------------------------------------')
def main():
args = parser.parse_args()
# this distributed benchmark code can only support gpu environment.
assert args.use_gpu, "only for gpu implementation."
print_arguments(args)
print_paddle_environments()
check_gpu(args.use_gpu)
train(args)
if __name__ == '__main__':
main()
|
_base_ = 'faster_rcnn_r50_caffe_fpn_1x_coco.py'
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[60000, 80000])
# Runner type
runner = dict(_delete_=True, type='IterBasedRunner', max_iters=90000)
checkpoint_config = dict(interval=10000)
evaluation = dict(interval=10000, metric='bbox')
|
#!/usr/bin/python
import argparse
import textwrap
import socket
import os
import subprocess
import logging
import logging.config
import logutils.dictconfig
import yaml
from jinja2 import Environment, FileSystemLoader, Template
import utils
verbose = True
def create_log( logdir ):
if not os.path.exists( logdir ):
os.system("mkdir -p " + logdir )
def exec_with_output( cmd ):
try:
# https://stackoverflow.com/questions/4814970/subprocess-check-output-doesnt-seem-to-exist-python-2-6-5
print cmd
output = subprocess.Popen( cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT ).communicate()[0]
print output
except subprocess.CalledProcessError as e:
print "Exception " + str(e.returncode) + ", output: " + e.output.strip()
if __name__ == '__main__':
print "Start... boostrap_hdfs.py "
try:
parser = argparse.ArgumentParser(prog='boostrap_hdfs.py',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
Lauch a HDFS server.
Command:
journalnode: Launch journal node.
namenode: Launch namenode.
datanode: Launch datanode.
''') )
parser.add_argument("--config",
help="configuration file",
action="store",
default="/etc/hdfs/config.yaml")
parser.add_argument("-v", "--verbose",
help="verbose information",
action="store_true" )
parser.add_argument("-f", "--force",
help="force (formatting)",
action="store_true" )
parser.add_argument("server", help = "See above for the list of available server type" )
# Obtain argument from environment variable.
args = parser.parse_args()
verbose = args.verbose
server = args.server
print "Parse command line argument... "
config_file = args.config
if not os.path.exists(config_file):
print "!!!Error!!! Can't find configuration file %s " % config_file
parser.print_help()
with open(config_file, 'r') as file:
config = yaml.load(file)
print "Configuration is : %s " % config
loggingDirBase = "/var/log/hdfs" if not "loggingDirBase" in config else config["loggingDirBase"]
config["loggingDir"] = os.path.join( loggingDirBase, server )
utils.render_template("logging.yaml.in-docker", "logging.yaml",config, verbose=verbose)
# logdir = config["loggingDir"]
# create_log( logdir )
# with open('logging.yaml') as f:
# logging_config = yaml.load(f)
# f.close()
# print logging_config
# logutils.dictconfig.dictConfig(logging_config)
isHA = "namenode2" in config["namenode"]
if isHA:
utils.render_template("hdfs-site.xml.in-docker", "/usr/local/hadoop/etc/hadoop/hdfs-site.xml",config, verbose=verbose)
else:
utils.render_template("hdfs-site-single.xml.in-docker", "/usr/local/hadoop/etc/hadoop/hdfs-site.xml",config, verbose=verbose)
utils.render_template("mapred-site.xml.in-docker", "/usr/local/hadoop/etc/hadoop/mapred-site.xml",config, verbose=verbose)
if isHA:
utils.render_template("yarn-site.xml.in-docker", "/usr/local/hadoop/etc/hadoop/yarn-site.xml",config, verbose=verbose)
else:
utils.render_template("yarn-site-single.xml.in-docker", "/usr/local/hadoop/etc/hadoop/yarn-site.xml",config, verbose=verbose)
except Exception as e:
print "boostrap_hdfs.py fails during initialization, exception %s" % e
exit()
# Launch journal node
if server == "journalnode":
cmd = "/usr/local/hadoop/sbin/hadoop-daemon.sh start journalnode"
exec_with_output( cmd )
exec_with_output( "pgrep -f JournalNode")
print "JournalNode running .... "
elif server == "zookeeper":
cmd = "/usr/local/hadoop/sbin/hadoop-daemon.sh start zookeeper"
exec_with_output( cmd )
print "Zookeeper node is running .... "
elif server == "namenode":
cmd = "/usr/local/hadoop/sbin/hadoop-daemon.sh start namenode"
exec_with_output( cmd )
cmd = "/usr/local/hadoop/sbin/hadoop-daemon.sh start zkfc"
exec_with_output( cmd )
exec_with_output( "pgrep -f NameNode")
exec_with_output( "pgrep -f DFSZKFailoverController")
print "Namenode is running"
elif server == "datanode":
cmd = "/usr/local/hadoop/sbin/hadoop-daemon.sh start datanode"
exec_with_output( cmd )
exec_with_output( "pgrep -f DataNode")
print "Datanode is running"
elif server == "format":
force = "" if not args.force else "-force "
cmd = "/usr/local/hadoop/bin/hdfs namenode -format %s -nonInteractive" % force
exec_with_output( cmd )
if isHA:
cmd = "/usr/local/hadoop/bin/hdfs zkfc -formatZK -nonInteractive"
exec_with_output( cmd )
print "Format namenode and zkfc ..... "
if isHA:
remotedir = os.path.join( config["namenode"]["data"], "current")
localdir = os.path.join( config["namenode"]["localdata"], "current")
exec_with_output( "rm -rf %s" % remotedir )
exec_with_output( "cp -r %s %s" % ( localdir, remotedir ) )
print "Copy data from %s to %s" % ( localdir, remotedir )
elif server == "copy":
remotedir = os.path.join( config["namenode"]["data"], "current")
localdir = os.path.join( config["namenode"]["localdata"], "current")
exec_with_output( "cp -r %s %s" % ( localdir, remotedir ) )
print "Copy data from %s to %s" % ( localdir, remotedir )
elif server == "standby":
remotedir = os.path.join( config["namenode"]["data"], "current")
localdir = os.path.join( config["namenode"]["localdata"], "current")
exec_with_output( "cp -r %s %s" % ( remotedir, localdir ) )
print "Copy data from %s to %s" % ( remotedir, localdir )
elif server == "resourcemanager":
cmd = "/usr/local/hadoop/sbin/yarn-daemon.sh start resourcemanager"
exec_with_output( cmd )
cmd1 = "/usr/local/hadoop/sbin/mr-jobhistory-daemon.sh start historyserver"
exec_with_output( cmd1 )
exec_with_output( "pgrep -f DataNode")
print "Yarn resource manager and history server is running"
elif server == "nodemanager":
cmd = "/usr/local/hadoop/sbin/yarn-daemon.sh start nodemanager"
exec_with_output( cmd )
exec_with_output( "pgrep -f DataNode")
print "Yarn node manager is running"
elif server == "spark":
print "Ready to execute spark command. "
else:
print "Unknown server" + server
|
""" .. _CubeSpectrum-at-api:
**CubeSpectrum_AT** --- Cuts one or more spectra through a cube.
----------------------------------------------------------------
This module defines the CubeSpectrum_AT class.
"""
from admit.AT import AT
import admit.util.bdp_types as bt
from admit.bdp.Image_BDP import Image_BDP
from admit.bdp.SpwCube_BDP import SpwCube_BDP
from admit.bdp.LineCube_BDP import LineCube_BDP
from admit.bdp.Moment_BDP import Moment_BDP
from admit.bdp.CubeSpectrum_BDP import CubeSpectrum_BDP
from admit.bdp.CubeStats_BDP import CubeStats_BDP
from admit.bdp.SourceList_BDP import SourceList_BDP
from admit.Summary import SummaryEntry
import admit.util.Table as Table
import admit.util.Image as Image
from admit.util import APlot
import admit.util.utils as utils
from admit.util.AdmitLogging import AdmitLogging as logging
from copy import deepcopy
import numpy as np
import numpy.ma as ma
import os
try:
import taskinit
import casa
except:
print "WARNING: No CASA; CubeSpectrum task cannot function."
class CubeSpectrum_AT(AT):
""" Define one (or more) spectra through a cube.
Either a list of positions is given directly (via the **pos=** keyword) or a set
of BDP's can be given, each of which will accumulate its positions
to a list of points for which the spectra are computed, as detailed below.
See also :ref:`CubeSpectrum-AT-Design` for the design document.
**Keywords**
**pos**: list of int or string
List of ra-dec position pairs.
Each pair will produce a separate spectrum and plot.
Positions can be given as two integers, in which case they are interpeted
as (0 based) pixel coordinates, e.g. pos=[121,119],
or in CASA's ra/dec region format,
e.g. pos=['00h47m33.159s','-25d17m17.41s']. Different pairs do not
need to be of the same type, so you can mix int's and strings.
If no positions are given, a position will be derived from the
input BDPs. See below how this is done. This also means if an input BDP
is given, the keyword values are ignored.
If no input pos is given, and no optional BPD's, the center
of the map is used.
**sources** : list of int
A python list of source indices (0 being the first) from the
SourceList_BDP to be selected for a spectrum. A blank list, [],
selects all. Normally the SourceList is ordered by total flux.
Default : [0]
**xaxis**: string
Select the X axis plotting style: channel number (the default),
frequency (in GHz), or velocity (for this the restfreq needs to be in the image header).
Currently ignored, channel is defaulted for SpwCube_BDP, and velocity for LineCube_BDP.
**Input BDPs**
**SpwCube_BDP** or **LineCube_BDP**: count: 1
One of more spectra are taken through this cube, as from an
`Ingest_AT <Ingest_AT.html>`_,
`ContinuumSub_AT <ContinuumSub_AT.html>`_ or
`LineCube_AT <LineCube_AT.html>`_.
**CubeStats_BDP**: count: 1 (optional)
If given, the cube maxpos from this table will be used for pos=[].
Normally the output of a `CubeStats_AT <CubeStats_AT.html>`_.
**Moment_BDP**: count: 1 (optional)
If given, the maxpos from this moment map will be used for pos=[].
Note : currently this is computed on the fly, as maps don't store
their maxpos. Typically the output of a
`CubeSum_AT <CubeSum_AT.html>`_ or `Moment_AT <Moment_AT.html>`_.
**SourceList_BDP**: count: 1 (optional)
If given, the positions in this source list will be used. By default
only the strongest source (index 0) is selected. Typically the output
from `SFind2D_AT <SFind2D_AT.html>`_ on a continuum map is given here.
**Output BDPs**
**CubeSpectrum_BDP**: count: 1
Spectra through the cube. Stored as a single multi-plane table if more than one
point was used.
Output BDP name takes from the input Image by replacing the extension with **"csp"**.
See also :ref:`CubeSpectrum-bdp-api`.
Parameters
----------
keyval : dictionary, optional
Attributes
----------
_version : string
"""
### todo's
"""
***Missing Features***
In the design document a number of options were mentioned that have not been implemented, see
also :ref:`CubeSpectrum-AT-Design` for that design document.
1) Only points can be selected, not regions. Or sizes around points.
NOTE the treatment of the bug in imval when > 1 pixel was used.
2) No magic names for pos=. The "xpeak,ypeak" is essentually when pos=[] left blank and
no other BDP are given, but there is no way to select the reference point "xref,yref"
3) Smoothing option is absent. There are filters that can be applied in LineID_AT though.
See also Smooth_AT, where you cann create a smoother version of the input cube.
"""
def __init__(self,**keyval):
keys = {"pos" : [], # one or more pairs of int's or ra/dec strings
"sources" : [0], # select which sources from a SourceList
"xaxis" : "", # currently still ignored
}
AT.__init__(self,keys,keyval)
self._version = "1.1.0"
self.set_bdp_in( [(Image_BDP, 1,bt.REQUIRED), # 0: cube: SpwCube or LineCube allowed
(CubeStats_BDP, 1,bt.OPTIONAL), # 1: stats, uses maxpos
(Moment_BDP, 1,bt.OPTIONAL), # 2: map, uses the max in this image as pos=
(SourceList_BDP, 1,bt.OPTIONAL)]) # 3: source list, for positions
self.set_bdp_out([(CubeSpectrum_BDP,1)])
def run(self):
"""Runs the task.
Parameters
----------
None
Returns
-------
None
"""
self._summary = {}
dt = utils.Dtime("CubeSpectrum")
# our BDP's
# b1 = input BDP
# b1s = optional input CubeSpectrum
# b1m = optional input Moment
# b1p = optional input SourceList for positions
# b2 = output BDP
b1 = self._bdp_in[0] # check input SpwCube (or LineCube)
fin = b1.getimagefile(bt.CASA)
if self._bdp_in[0]._type == bt.LINECUBE_BDP:
use_vel = True
else:
use_vel = False
sources = self.getkey("sources")
pos = [] # blank it first, then try and grab it from the optional bdp_in's
cmean = 0.0
csigma = 0.0
smax = [] # accumulate max in each spectrum for regression
self.spec_description = [] # for summary()
# get the tools
ia = taskinit.iatool()
if self._bdp_in[1] != None: # check if CubeStats_BDP
#print "BDP[1] type: ",self._bdp_in[1]._type
if self._bdp_in[1]._type != bt.CUBESTATS_BDP:
raise Exception,"bdp_in[1] not a CubeStats_BDP, should never happen"
# a table (cubestats)
b1s = self._bdp_in[1]
pos.append(b1s.maxpos[0])
pos.append(b1s.maxpos[1])
logging.info('CubeStats::maxpos,val=%s,%f' % (str(b1s.maxpos),b1s.maxval))
cmean = b1s.mean
csigma = b1s.sigma
dt.tag("CubeStats-pos")
if self._bdp_in[2] != None: # check if Moment_BDP (probably from CubeSum)
# print "BDP[2] type: ",self._bdp_in[2]._type
if self._bdp_in[2]._type != bt.MOMENT_BDP:
raise Exception,"bdp_in[2] not a Moment_BDP, should never happen"
b1m = self._bdp_in[2]
fim = b1m.getimagefile(bt.CASA)
pos1,maxval = self.maxpos_im(self.dir(fim)) # compute maxpos, since it is not in bdp (yet)
logging.info('CubeSum::maxpos,val=%s,%f' % (str(pos1),maxval))
pos.append(pos1[0])
pos.append(pos1[1])
dt.tag("Moment-pos")
if self._bdp_in[3] != None: # check if SourceList
# print "BDP[3] type: ",self._bdp_in[3]._type
# a table (SourceList)
b1p = self._bdp_in[3]
ra = b1p.table.getFullColumnByName("RA")
dec = b1p.table.getFullColumnByName("DEC")
peak = b1p.table.getFullColumnByName("Peak")
if sources == []:
# use the whole SourceList
for (r,d,p) in zip(ra,dec,peak):
rdc = convert_sexa(r,d)
pos.append(rdc[0])
pos.append(rdc[1])
logging.info('SourceList::maxpos,val=%s,%f' % (str(rdc),p))
else:
# select specific ones from the source list
for ipos in sources:
if ipos < len(ra):
radec = convert_sexa(ra[ipos],dec[ipos])
pos.append(radec[0])
pos.append(radec[1])
logging.info('SourceList::maxpos,val=%s,%f' % (str(radec),peak[ipos]))
else:
logging.warning('Skipping illegal source number %d' % ipos)
dt.tag("SourceList-pos")
# if pos[] still blank, use the AT keyword.
if len(pos) == 0:
pos = self.getkey("pos")
# if still none, try the map center
if len(pos) == 0:
# @todo this could result in a masked pixel and cause further havoc
# @todo could also take the reference pixel, but that could be outside image
ia.open(self.dir(fin))
s = ia.summary()
pos = [int(s['shape'][0])/2, int(s['shape'][1])/2]
logging.warning("No input positions supplied, map center choosen: %s" % str(pos))
dt.tag("map-center")
# exhausted all sources where pos[] can be set; if still zero, bail out
if len(pos) == 0:
raise Exception,"No positions found from input BDP's or pos="
# convert this regular list to a list of tuples with duplicates removed
# sadly the order is lost.
pos = list(set(zip(pos[0::2],pos[1::2])))
npos = len(pos)
dt.tag("open")
bdp_name = self.mkext(fin,"csp")
b2 = CubeSpectrum_BDP(bdp_name)
self.addoutput(b2)
imval = range(npos) # spectra, one for each pos (placeholder)
planes = range(npos) # labels for the tables (placeholder)
images = {} # png's accumulated
for i in range(npos): # loop over pos, they can have mixed types now
sd = []
caption = "Spectrum"
xpos = pos[i][0]
ypos = pos[i][1]
if type(xpos) != type(ypos):
print "POS:",xpos,ypos
raise Exception,"position pair not of the same type"
if type(xpos)==int:
# for integers, boxes are allowed, even multiple
box = '%d,%d,%d,%d' % (xpos,ypos,xpos,ypos)
# convention for summary is (box)
cbox = '(%d,%d,%d,%d)' % (xpos,ypos,xpos,ypos)
# use extend here, not append, we want individual values in a list
sd.extend([xpos,ypos,cbox])
caption = "Average Spectrum at %s" % cbox
if False:
# this will fail on 3D cubes (see CAS-7648)
imval[i] = casa.imval(self.dir(fin),box=box)
else:
# work around that CAS-7648 bug
# another approach is the ia.getprofile(), see CubeStats, this will
# also integrate over regions, imval will not (!!!)
region = 'centerbox[[%dpix,%dpix],[1pix,1pix]]' % (xpos,ypos)
caption = "Average Spectrum at %s" % region
imval[i] = casa.imval(self.dir(fin),region=region)
elif type(xpos)==str:
# this is tricky, to stay under 1 pixel , or you get a 2x2 back.
region = 'centerbox[[%s,%s],[1pix,1pix]]' % (xpos,ypos)
caption = "Average Spectrum at %s" % region
sd.extend([xpos,ypos,region])
imval[i] = casa.imval(self.dir(fin),region=region)
else:
print "Data type: ",type(xpos)
raise Exception,"Data type for region not handled"
dt.tag("imval")
flux = imval[i]['data']
if len(flux.shape) > 1: # rare case if we step on a boundary between cells?
logging.warning("source %d has spectrum shape %s: averaging the spectra" % (i,repr(flux.shape)))
flux = np.average(flux,axis=0)
logging.debug('minmax: %f %f %d' % (flux.min(),flux.max(),len(flux)))
smax.append(flux.max())
if i==0: # for first point record few extra things
if len(imval[i]['coords'].shape) == 2: # normal case: 1 pixel
freqs = imval[i]['coords'].transpose()[2]/1e9 # convert to GHz @todo: input units ok?
elif len(imval[i]['coords'].shape) == 3: # rare case if > 1 point in imval()
freqs = imval[i]['coords'][0].transpose()[2]/1e9 # convert to GHz @todo: input units ok?
else:
logging.fatal("bad shape %s in freq return from imval - SHOULD NEVER HAPPEN" % imval[i]['coords'].shape)
chans = np.arange(len(freqs)) # channels 0..nchans-1
unit = imval[i]['unit']
restfreq = casa.imhead(self.dir(fin),mode="get",hdkey="restfreq")['value']/1e9 # in GHz
dt.tag("imhead")
vel = (1-freqs/restfreq)*utils.c # @todo : use a function (and what about relativistic?)
# construct the Table for CubeSpectrum_BDP
# @todo note data needs to be a tuple, later to be column_stack'd
labels = ["channel" ,"frequency" ,"flux" ]
units = ["number" ,"GHz" ,unit ]
data = (chans ,freqs ,flux )
if i==0:
# plane 0 : we are allowing a multiplane table, so the first plane is special
table = Table(columns=labels,units=units,data=np.column_stack(data),planes=["0"])
else:
# planes 1,2,3.... are stacked onto the previous one
table.addPlane(np.column_stack(data),"%d" % i)
# example plot , one per position for now
if use_vel:
x = vel
xlab = 'VLSR (km/s)'
else:
x = chans
xlab = 'Channel'
y = [flux]
sd.append(xlab)
if type(xpos)==int:
# grab the RA/DEC... kludgy
h = casa.imstat(self.dir(fin),box=box)
ra = h['blcf'].split(',')[0]
dec = h['blcf'].split(',')[1]
title = '%s %d @ %d,%d = %s,%s' % (bdp_name,i,xpos,ypos,ra,dec)
else:
title = '%s %d @ %s,%s' % (bdp_name,i,xpos,ypos) # or use box, once we allow non-points
myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode, abspath=self.dir())
ylab = 'Flux (%s)' % unit
p1 = "%s_%d" % (bdp_name,i)
myplot.plotter(x,y,title,p1,xlab=xlab,ylab=ylab,thumbnail=True)
# Why not use p1 as the key?
ii = images["pos%d" % i] = myplot.getFigure(figno=myplot.figno,relative=True)
thumbname = myplot.getThumbnail(figno=myplot.figno,relative=True)
sd.extend([ii, thumbname, caption, fin])
self.spec_description.append(sd)
logging.regression("CSP: %s" % str(smax))
image = Image(images=images, description="CubeSpectrum")
b2.setkey("image",image)
b2.setkey("table",table)
b2.setkey("sigma",csigma) # TODO: not always available
b2.setkey("mean",cmean) # TODO: not always available
if True:
# @todo only first plane due to limitation in exportTable()
islash = bdp_name.find('/')
if islash < 0:
tabname = self.dir("testCubeSpectrum.tab")
else:
tabname = self.dir(bdp_name[:islash] + "/testCubeSpectrum.tab")
table.exportTable(tabname,cols=["frequency" ,"flux"])
dt.tag("done")
# For a single spectrum this is
# SummaryEntry([[data for spec1]], "CubeSpectrum_AT",taskid)
# For multiple spectra this is
# SummaryEntry([[data for spec1],[data for spec2],...], "CubeSpectrum_AT",taskid)
self._summary["spectra"] = SummaryEntry(self.spec_description,"CubeSpectrum_AT",self.id(True))
taskargs = "pos="+str(pos)
taskargs += ' <span style="background-color:white"> ' + fin.split('/')[0] + ' </span>'
for v in self._summary:
self._summary[v].setTaskArgs(taskargs)
dt.tag("summary")
dt.end()
def maxpos_im(self, im):
"""Find the position of the maximum in an image.
Helper function returns the position of the maximum value in the
image as an [x,y] list in 0-based pixel coordinates.
Parameters
----------
im : String, CASA image name
Returns
-------
list
[x,y] list in 0-based pixel coordinates.
"""
# 2D images don't store maxpos/maxval yet, so we need to grab them
# imstat on a 512^2 image is about 0.032 sec
# ia.getchunk is about 0.008, about 4x faster.
# we're going to assume 2D images fit in memory and always use getchunk
# @todo review the use of the new casautil.getdata() style routines
if True:
ia = taskinit.iatool()
ia.open(im)
plane = ia.getchunk(blc=[0,0,0,-1],trc=[-1,-1,-1,-1],dropdeg=True)
v = ma.masked_invalid(plane)
ia.close()
mp = np.unravel_index(v.argmax(), v.shape)
maxval = v[mp[0],mp[1]]
maxpos = [int(mp[0]),int(mp[1])]
else:
imstat0 = casa.imstat(im)
maxpos = imstat0["maxpos"][:2].tolist()
maxval = imstat0["max"][0]
#print "MAXPOS_IM:::",maxpos,maxval,type(maxpos[0])
return (maxpos,maxval)
def summary(self):
"""Returns the summary dictionary from the AT, for merging
into the ADMIT Summary object.
CubeSpectrum_AT adds the following to ADMIT summary:
.. table::
:class: borderless
+---------+----------+---------------------------+
| Key | type | Description |
+=========+==========+===========================+
| spectra | list | the spectral plots |
+---------+----------+---------------------------+
Parameters
----------
None
Returns
-------
dict
Dictionary of SummaryEntry
"""
if hasattr(self,"_summary"):
return self._summary
else:
return {}
def convert_sexa(ra,de):
""" this peculiar function converts something like
'18:29:56.713', '+01.13.15.61'
to
'18h29m56.713s', '+01d13m15.61s'
It's a mystery why the output format from casa.sourcefind()
has this peculiar H:M:S/D.M.S format
"""
ran = ra.replace(':','h',1).replace(':','m',1)+'s'
den = de.replace('.','d',1).replace('.','m',1)+'s'
return ran,den
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.