hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a21667339c2f8fc566e000b8dbe9b0df517ca6d | 376 | py | Python | downloadData.py | hagianga21/MaxCon_RL | 60f24eb74673998de3a37db251f4222ee33ba1c4 | [
"MIT"
] | 3 | 2021-06-09T15:40:44.000Z | 2022-03-30T03:16:07.000Z | downloadData.py | hagianga21/MaxCon_RL | 60f24eb74673998de3a37db251f4222ee33ba1c4 | [
"MIT"
] | null | null | null | downloadData.py | hagianga21/MaxCon_RL | 60f24eb74673998de3a37db251f4222ee33ba1c4 | [
"MIT"
] | 1 | 2021-11-20T07:44:56.000Z | 2021-11-20T07:44:56.000Z | import os
from google_drive_downloader import GoogleDriveDownloader as gdd
#Download training file
gdd.download_file_from_google_drive(file_id='1MkeJlbzueNJdjatyvZ4B4cO4WEnn1MwS',
dest_path='./Data/train.zip',
showsize=True,
unzip=True)
os.remove("./Data/train.zip") | 37.6 | 80 | 0.593085 |
4a2166ba2fe95203822cafea8333da6491cf96bf | 5,769 | py | Python | models/sepc/builder.py | xchani/simpledet | 3e7587f0a4403003bc4eda8a8403ec385bb980f8 | [
"Apache-2.0"
] | 3,195 | 2019-01-29T09:08:46.000Z | 2022-03-29T08:20:44.000Z | models/sepc/builder.py | xchani/simpledet | 3e7587f0a4403003bc4eda8a8403ec385bb980f8 | [
"Apache-2.0"
] | 275 | 2019-01-29T10:16:12.000Z | 2022-03-15T17:56:39.000Z | models/sepc/builder.py | xchani/simpledet | 3e7587f0a4403003bc4eda8a8403ec385bb980f8 | [
"Apache-2.0"
] | 563 | 2019-01-29T09:32:07.000Z | 2022-03-22T06:58:01.000Z | import mxnet as mx
import mxnext as X
from models.NASFPN.builder import RetinaNetNeck, RetinaNetNeckWithBN
from models.sepc.sepc_neck import SEPCFPN
from utils.patch_config import patch_config_as_nothrow
from models.NASFPN.builder import RetinaNetHeadWithBN
import math
class RetinaNetNeckWithBNWithSEPC(RetinaNetNeckWithBN):
def __init__(self, pNeck, pSEPC):
super().__init__(pNeck)
self.psepc = patch_config_as_nothrow(pSEPC)
self.neck_with_sepc = None
stride, pad_sizes = pSEPC.stride, pSEPC.pad_sizes
for i in range(len(stride)):
if pad_sizes[0] % stride[i] != 0 or pad_sizes[1] % stride[i] != 0:
print('Warning: This implementation of ibn used in SEPC expects (it\'s better) the (padded) input sizes {} dividable by the stride {}. '\
'When this is not satisfied, you should manually check that the feature_sizes at stride \'s\' statisfy the following: ' \
'\'ceil(pad_sizes[0]/s)==feature_sizes[0]\' and \'ceil(pad_sizes[1]/s)==feature_size[1]\''.format(pad_sizes, stride[i]))
self.feat_sizes = [[math.ceil(pad_sizes[0]/stride[i]), math.ceil(pad_sizes[1]/stride[i])] for i in range(len(stride))]
def get_retinanet_neck(self, data):
if self.neck_with_sepc is not None:
return self.neck_with_sepc
fpn_outs = super().get_retinanet_neck(data)
p3_conv, p4_conv, p5_conv, p6, p7 = fpn_outs['stride8'], fpn_outs['stride16'], fpn_outs['stride32'], fpn_outs['stride64'], fpn_outs['stride128']
# add SEPC module after default FPN
sepc_inputs = [p3_conv, p4_conv, p5_conv, p6, p7]
sepc_outs = SEPCFPN(
sepc_inputs,
out_channels=self.psepc.out_channels,
pconv_deform=self.psepc.pconv_deform,
ibn=self.psepc.ibn or False,
Pconv_num=self.psepc.pconv_num,
start_level=self.psepc.start_level or 1,
norm=self.psepc.normalizer,
lcconv_deform=self.psepc.lcconv_deform or False,
bilinear_upsample=self.psepc.bilinear_upsample or False,
feat_sizes=self.feat_sizes,
)
self.neck_with_sepc = dict(
stride128=sepc_outs[4],
stride64=sepc_outs[3],
stride32=sepc_outs[2],
stride16=sepc_outs[1],
stride8=sepc_outs[0]
)
return self.neck_with_sepc
class RetinaNetHeadWithBNWithSEPC(RetinaNetHeadWithBN):
def __init__(self, pRpn):
super().__init__(pRpn)
def _cls_subnet(self, conv_feat, conv_channel, num_base_anchor, num_class, stride, nb_conv=0):
p = self.p
if nb_conv <= 0:
cls_conv4_relu = conv_feat
if p.fp16:
cls_conv4_relu = X.to_fp32(cls_conv4_relu, name="cls_conv4_fp32")
output_channel = num_base_anchor * (num_class - 1)
output = X.conv(
data=cls_conv4_relu,
kernel=3,
filter=output_channel,
weight=self.cls_pred_weight,
bias=self.cls_pred_bias,
no_bias=False,
name="cls_pred"
)
return output
return super()._cls_subnet(conv_feat, conv_channel, num_base_anchor, num_class, stride)
def _bbox_subnet(self, conv_feat, conv_channel, num_base_anchor, num_class, stride, nb_conv=0):
p = self.p
if nb_conv <= 0:
bbox_conv4_relu = conv_feat
if p.fp16:
bbox_conv4_relu = X.to_fp32(bbox_conv4_relu, name="bbox_conv4_fp32")
output_channel = num_base_anchor * 4
output = X.conv(
data=bbox_conv4_relu,
kernel=3,
filter=output_channel,
weight=self.bbox_pred_weight,
bias=self.bbox_pred_bias,
no_bias=False,
name="bbox_pred"
)
return output
return super()._bbox_subnet(conv_feat, conv_channel, num_base_anchor, num_class, stride)
def get_output(self, conv_feat):
if self._cls_logit_dict is not None and self._bbox_delta_dict is not None:
return self._cls_logit_dict, self._bbox_delta_dict
p = self.p
stride = p.anchor_generate.stride
if not isinstance(stride, tuple):
stride = (stride)
conv_channel = p.head.conv_channel
num_base_anchor = len(p.anchor_generate.ratio) * len(p.anchor_generate.scale)
num_class = p.num_class
cls_logit_dict = dict()
bbox_delta_dict = dict()
for s in stride:
conv_feat_cls, conv_feat_loc = mx.sym.split(conv_feat["stride%s" % s], num_outputs=2, axis=1)
cls_logit = self._cls_subnet(
# conv_feat=conv_feat["stride%s" % s],
conv_feat=conv_feat_cls,
conv_channel=conv_channel,
num_base_anchor=num_base_anchor,
num_class=num_class,
stride=s,
nb_conv=self.p.nb_conv if self.p.nb_conv is not None else 4,
)
bbox_delta = self._bbox_subnet(
# conv_feat=conv_feat["stride%s" % s],
conv_feat=conv_feat_loc,
conv_channel=conv_channel,
num_base_anchor=num_base_anchor,
num_class=num_class,
stride=s,
nb_conv=self.p.nb_conv if not None else 4,
)
cls_logit_dict["stride%s" % s] = cls_logit
bbox_delta_dict["stride%s" % s] = bbox_delta
self._cls_logit_dict = cls_logit_dict
self._bbox_delta_dict = bbox_delta_dict
return self._cls_logit_dict, self._bbox_delta_dict | 44.038168 | 153 | 0.610504 |
4a2166d9ca0ee8874b680bf274a72c2715257573 | 7,544 | py | Python | eodatasets3/prepare/nasa_c_m_mcd43a1_6_prepare.py | GeoscienceAustralia/eo-datasets | 520b27acc5fbc8e23efdc175b044dde9e603f5f6 | [
"Apache-2.0"
] | 23 | 2019-03-21T22:22:13.000Z | 2021-09-08T05:26:07.000Z | eodatasets3/prepare/nasa_c_m_mcd43a1_6_prepare.py | GeoscienceAustralia/eo-datasets | 520b27acc5fbc8e23efdc175b044dde9e603f5f6 | [
"Apache-2.0"
] | 154 | 2015-07-02T23:03:04.000Z | 2021-09-09T00:54:49.000Z | eodatasets3/prepare/nasa_c_m_mcd43a1_6_prepare.py | GeoscienceAustralia/eo-datasets | 520b27acc5fbc8e23efdc175b044dde9e603f5f6 | [
"Apache-2.0"
] | 29 | 2016-02-15T03:24:47.000Z | 2021-09-08T01:44:32.000Z | import datetime
import re
import uuid
from pathlib import Path
from typing import Dict, Iterable
import click
import rasterio
from defusedxml import ElementTree
from eodatasets3 import serialise
from eodatasets3.utils import ItemProvider, read_paths_from_file
from ..metadata.valid_region import valid_region
MCD43A1_NS = uuid.UUID(hex="80dc431b-fc6c-4e6f-bf08-585eba1d8dc9")
def parse_xml(filepath: Path):
"""
Extracts metadata attributes from the xml document distributed
alongside the MCD43A1 tiles.
"""
root = ElementTree.parse(str(filepath), forbid_dtd=True).getroot()
granule_id = root.find("*//ECSDataGranule/LocalGranuleID").text
instrument = root.find("*//Platform/Instrument/InstrumentShortName").text
platform = "+".join(
sorted(
(ele.text for ele in root.findall("*//Platform/PlatformShortName")),
reverse=True,
)
)
start_date = root.find("*//RangeDateTime/RangeBeginningDate").text
start_time = root.find("*//RangeDateTime/RangeBeginningTime").text
end_date = root.find("*//RangeDateTime/RangeEndingDate").text
end_time = root.find("*//RangeDateTime/RangeEndingTime").text
v_tile = (
[
ele
for ele in root.findall("*//PSA")
if ele.find("PSAName").text == "VERTICALTILENUMBER"
][0]
.find("PSAValue")
.text
)
h_tile = (
[
ele
for ele in root.findall("*//PSA")
if ele.find("PSAName").text == "HORIZONTALTILENUMBER"
][0]
.find("PSAValue")
.text
)
creation_dt = root.find("*//InsertTime").text
return {
"granule_id": granule_id,
"instrument": instrument,
"platform": platform,
"vertical_tile": int(v_tile),
"horizontal_tile": int(h_tile),
"from_dt": (
datetime.datetime.strptime(
start_date + " " + start_time, "%Y-%m-%d %H:%M:%S.%f"
).replace(tzinfo=datetime.timezone.utc)
),
"to_dt": (
datetime.datetime.strptime(
end_date + " " + end_time, "%Y-%m-%d %H:%M:%S.%f"
).replace(tzinfo=datetime.timezone.utc)
),
"creation_dt": (
datetime.datetime.strptime(creation_dt, "%Y-%m-%d %H:%M:%S.%f").replace(
tzinfo=datetime.timezone.utc
)
),
}
def get_band_info(imagery_file: Path):
"""
Summarises the available image bands for indexing into datacube
Separate references are provided for each of the brdf parameter bands:
volumetric (vol), isometric (iso) and geometric (geo)
"""
band_info = {}
with rasterio.open(imagery_file, "r") as collection:
datasets = collection.subdatasets
for ds in datasets:
raster_params = re.match(
"(?P<fmt>HDF4_EOS:EOS_GRID):(?P<path>[^:]+):(?P<layer>.*)$", ds
)
if "_Quality_" in raster_params["layer"]:
name = raster_params["layer"].split(":")[-1]
band_info[name] = {
"path": Path(raster_params["path"]).name,
"layer": raster_params["layer"],
}
else:
name = raster_params["layer"].split(":")[-1]
# BRDF parameter bands are isotropic, volumetric and geometric
for idx, band_name in enumerate(["iso", "vol", "geo"], 1):
band_info[name + "_" + band_name] = {
"path": Path(raster_params["path"]).name,
"layer": raster_params["layer"],
"band": idx,
}
return band_info, datasets
def _get_dataset_properties(rasterio_path: str):
"""
returns dataset properties based on a sample dataset
"""
props = {}
with rasterio.open(rasterio_path, "r") as ds:
props["eo:gsd"] = float(ds.tags()["CHARACTERISTICBINSIZE"])
props["grids"] = {
"default": {"shape": list(ds.shape), "transform": list(ds.transform)}
}
props["crs"] = ds.crs.wkt
return props
def process_datasets(input_path: Path, xml_file: Path) -> Iterable[Dict]:
"""
Generates a metadata document for each tile provided,
requires a path to the input tile (hdf) and the
corresponding xml document describing the dataset.
"""
band_info, datasets = get_band_info(input_path)
xml_md = parse_xml(xml_file)
ds_props = _get_dataset_properties(datasets[0])
md = {}
md["id"] = str(uuid.uuid5(MCD43A1_NS, xml_md["granule_id"]))
md["product"] = {"href": "https://collections.dea.ga.gov.au/nasa_c_m_mcd43a1_6"}
md["crs"] = ds_props.pop("crs")
md["geometry"] = valid_region(datasets)
md["grids"] = ds_props.pop("grids")
md["lineage"] = {}
md["measurements"] = band_info
md["properties"] = {
"dtr:start_datetime": xml_md["from_dt"].isoformat(),
"dtr:end_datetime": xml_md["to_dt"].isoformat(),
"eo:instrument": xml_md["instrument"],
"eo:platform": xml_md["platform"],
"eo:gsd": ds_props.pop("eo:gsd"),
"eo:epsg": None,
"item:providers": [
{
"name": "National Aeronautics and Space Administration",
"roles": [ItemProvider.PRODUCER.value, ItemProvider.PROCESSOR.value],
"url": "https://modis.gsfc.nasa.gov/data/dataprod/mod43.php",
},
{
"name": "United States Geological Society",
"roles": [ItemProvider.PROCESSOR.value],
"url": "https://lpdaac.usgs.gov/products/mcd43a1v006/",
},
],
"odc:creation_datetime": xml_md["creation_dt"].isoformat(),
"odc:file_format": "HDF4_EOS:EOS_GRID",
"odc:region_code": "h{}v{}".format(
xml_md["horizontal_tile"], xml_md["vertical_tile"]
),
}
return [md]
def _process_datasets(output_dir, datasets, checksum):
"""
Wrapper function for processing multiple datasets
"""
for dataset in datasets:
docs = process_datasets(dataset, Path(str(dataset) + ".xml"))
outfile = output_dir / (dataset.stem + ".ga-md.yaml")
serialise.dump_yaml(outfile, *docs)
@click.command(
help="""\b
Prepare MODIS MCD43A1 tiles for indexing into a Data Cube.
This prepare script supports the HDF4_EOS:EOS_GRID datasets
with associated xml documents
Example usage: yourscript.py --output [directory] input_file1 input_file2"""
)
@click.option(
"--output",
"output_dir",
help="Write datasets into this directory",
type=click.Path(exists=False, writable=True, dir_okay=True),
)
@click.argument(
"datasets", type=click.Path(exists=True, readable=True, writable=False), nargs=-1
)
@click.option(
"--checksum/--no-checksum",
help="Checksum the input dataset to confirm match",
default=False,
)
@click.option(
"-f",
"dataset_listing_files",
type=click.Path(exists=True, readable=True, writable=False),
help="file containing a list of input paths (one per line)",
multiple=True,
)
def main(output_dir, datasets, checksum, dataset_listing_files):
datasets = [Path(p) for p in datasets]
for listing_file in dataset_listing_files:
datasets.extend(read_paths_from_file(Path(listing_file)))
return _process_datasets(Path(output_dir), datasets, checksum)
if __name__ == "__main__":
main()
| 33.380531 | 85 | 0.598091 |
4a2167369887a87f8a82e4390e458008301ddb9e | 10,236 | py | Python | src/awsCluster/cfnCluster/CFNClusterManager.py | AspirinCode/jupyter-genomics | d45526fab3de8fcc3d9fef005d4e39368ff3dfdc | [
"MIT"
] | 2 | 2019-01-04T08:17:27.000Z | 2021-04-10T02:59:35.000Z | src/awsCluster/cfnCluster/CFNClusterManager.py | AspirinCode/jupyter-genomics | d45526fab3de8fcc3d9fef005d4e39368ff3dfdc | [
"MIT"
] | null | null | null | src/awsCluster/cfnCluster/CFNClusterManager.py | AspirinCode/jupyter-genomics | d45526fab3de8fcc3d9fef005d4e39368ff3dfdc | [
"MIT"
] | 2 | 2021-09-10T02:57:51.000Z | 2021-09-21T00:16:56.000Z | __author__ = 'Guorong Xu<[email protected]>'
import os
import re
from shutil import copyfile
## Global variable: default configuation file for CFNCluster
config_template_file = os.getcwd().replace("notebooks", "data") + "/config"
def install_cfn_cluster():
print "Installing cfncluster package..."
print os.popen("pip install cfncluster").read()
def upgrade_cfn_cluster():
print "Upgrading cfncluster package..."
print os.popen("pip install --upgrade cfncluster").read()
def make_config_file():
dir = os.path.dirname(config_template_file)
if not os.path.exists(dir):
os.makedirs(dir)
filewriter = open(config_template_file, "w+")
filewriter.write("[aws]" + "\n")
filewriter.write("aws_region_name = us-east-1" + "\n")
filewriter.write("aws_access_key_id = ***" + "\n")
filewriter.write("aws_secret_access_key = ***" + "\n")
filewriter.write("[cluster elasticsearch]" + "\n")
filewriter.write("vpc_settings = ucsd" + "\n")
filewriter.write("key_name = ***" + "\n")
filewriter.write("master_instance_type = m3.large" + "\n")
filewriter.write("compute_instance_type = r3.4xlarge" + "\n")
filewriter.write("initial_queue_size = 0" + "\n")
filewriter.write("cluster_type = spot" + "\n")
filewriter.write("spot_price = 0.5" + "\n")
filewriter.write("ebs_settings = custom" + "\n")
filewriter.write("s3_read_resource = arn:aws:s3:::bucket_name" + "\n")
filewriter.write("s3_read_write_resource = arn:aws:s3:::bucket_name/*" + "\n")
filewriter.write("post_install = s3://bucket_name/path/to/postinstall.sh" + "\n")
filewriter.write("[vpc ucsd]" + "\n")
filewriter.write("master_subnet_id = subnet-00000000" + "\n")
filewriter.write("vpc_id = vpc-00000000" + "\n")
filewriter.write("[global]" + "\n")
filewriter.write("update_check = true" + "\n")
filewriter.write("sanity_check = true" + "\n")
filewriter.write("cluster_template = elasticsearch" + "\n")
filewriter.write("[ebs custom]" + "\n")
filewriter.write("ebs_snapshot_id = snap-a6e477ff" + "\n")
filewriter.write("volume_size = 200" + "\n")
filewriter.close()
## viewing CFNCluster configuration settings
def view_cfncluster_config():
if not os.path.isfile(config_template_file):
make_config_file()
cfncluster_config = os.environ['HOME'] + "/.cfncluster/config"
if not os.path.isfile(cfncluster_config):
if not os.path.exists(os.path.dirname(cfncluster_config)):
os.makedirs(os.path.dirname(cfncluster_config))
copyfile(config_template_file, cfncluster_config)
with open(config_template_file) as fp:
lines = fp.readlines()
for line in lines:
print line[:-1]
## inserting AWS access keys
def insert_access_keys(aws_access_key_id="***",
aws_secret_access_key="***"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'aws_access_key_id' in line:
line = line.replace(line[line.find("=") + 2:], aws_access_key_id + "\n")
if 'aws_secret_access_key' in line:
line = line.replace(line[line.find("=") + 2:], aws_secret_access_key + "\n")
f.write(line)
## configuring aws region name
def config_aws_region_name(aws_region_name="us-east-1"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'aws_region_name' in line:
line = line.replace(line[line.find("=") + 2:], aws_region_name + "\n")
f.write(line)
## configuring key pem file
def config_key_name(key_name):
if not os.path.isfile(config_template_file):
make_config_file()
private_key = key_name[key_name.rfind("/") + 1:-4]
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'key_name' in line:
line = line.replace(line[line.find("=") + 2:], private_key + "\n")
f.write(line)
## configuring master instance type and computer instance types
def config_instance_types(master_instance_type="m3.large", compute_instance_type="r3.2xlarge"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'master_instance_type' in line:
line = line.replace(line[line.find("=") + 2:], master_instance_type + "\n")
if 'compute_instance_type' in line:
line = line.replace(line[line.find("=") + 2:], compute_instance_type + "\n")
f.write(line)
## configuring initial cluster size
def config_initial_cluster_size(initial_cluster_size="1"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'initial_queue_size' in line:
line = line.replace(line[line.find("=") + 2:], initial_cluster_size + "\n")
f.write(line)
## configuring spot price for computer instances
def config_spot_price(spot_price="0.5"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'spot_price' in line:
line = line.replace(line[line.find("=") + 2:], spot_price + "\n")
f.write(line)
## configuring S3 read/write resource: bucket name
def config_s3_resource(s3_read_resource="s3://bucket_name/", s3_read_write_resource="s3://bucket_name/"):
if not os.path.isfile(config_template_file):
make_config_file()
## s3://ucsd-ccbb-wgs-test-us-east-1/RNASeq_Pipeline_Code/test_data
read_bucket_name = s3_read_resource[5:]
read_bucket_name = read_bucket_name[:read_bucket_name.find("/")]
write_bucket_name = s3_read_write_resource[5:]
write_bucket_name = write_bucket_name[:write_bucket_name.find("/")]
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 's3_read_resource' in line:
line = line.replace(line[line.find("=") + 2:], "arn:aws:s3:::" + read_bucket_name + "\n")
if 's3_read_write_resource' in line:
line = line.replace(line[line.find("=") + 2:], "arn:aws:s3:::" + write_bucket_name + "/*\n")
f.write(line)
## configuring post installation shell script for creating CFNCluster
def config_post_install(post_install="s3://bucket_name/path/to/postinstall.sh"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'post_install' in line:
line = line.replace(line[line.find("=") + 2:], post_install + "\n")
f.write(line)
## configuring vpc and subnet ids
def config_vpc_subnet_id(master_subnet_id="subnet-00000000",
vpc_id="vpc-00000000"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'master_subnet_id' in line:
line = line.replace(line[line.find("=") + 2:], master_subnet_id + "\n")
if 'vpc_id' in line:
line = line.replace(line[line.find("=") + 2:], vpc_id + "\n")
f.write(line)
## configuring EBS snapshot id
def config_ebs_snapshot_id(ebs_snapshot_id="snap-a6e477ff"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'ebs_snapshot_id' in line:
line = line.replace(line[line.find("=") + 2:], ebs_snapshot_id + "\n")
f.write(line)
## configuring EBS volume size to attach to CFNCluster
def config_volume_size(volume_size="200"):
if not os.path.isfile(config_template_file):
make_config_file()
with open(config_template_file, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if 'volume_size' in line:
line = line.replace(line[line.find("=") + 2:], volume_size + "\n")
f.write(line)
## listing all current cfnclusters
def list_cfn_cluster():
print os.popen("cfncluster list").read()
## creating a CFNCluster with the specific cluster name
def create_cfn_cluster(cluster_name="mycluster"):
master_ip_address = ""
response = os.popen("cfncluster status " + cluster_name).read()
if response.find("CREATE_COMPLETE") > -1:
print "cluster " + cluster_name + " does exist."
lines = re.split(r'\n+', response)
for line in lines:
if line.find("MasterPublicIP") > -1:
master_ip_address = line[line.find("=") + 2:-1]
print line
return master_ip_address
response = os.popen("cfncluster create " + cluster_name).read()
lines = re.split(r'\n+', response)
for line in lines:
if line.find("MasterPublicIP") > -1:
master_ip_address = line[line.find("=") + 2:-1]
print line
return master_ip_address
## deleting the specific CFNCluster
def delete_cfn_cluster(cluster_name="mycluster"):
print os.popen("cfncluster delete " + cluster_name).read()
if __name__ == "__main__":
view_cfncluster_config()
| 34.464646 | 108 | 0.623681 |
4a2167d89e691adbe7171dbe66e1c37f14bea325 | 80 | py | Python | tuna/infrastructure/arguments/__init__.py | russellnakamura/thetuna | 0e445baf780fb65e1d92fe1344ebdf21bf81573c | [
"MIT"
] | null | null | null | tuna/infrastructure/arguments/__init__.py | russellnakamura/thetuna | 0e445baf780fb65e1d92fe1344ebdf21bf81573c | [
"MIT"
] | null | null | null | tuna/infrastructure/arguments/__init__.py | russellnakamura/thetuna | 0e445baf780fb65e1d92fe1344ebdf21bf81573c | [
"MIT"
] | null | null | null | from arguments import BaseArguments
from argumentbuilder import ArgumentBuilder
| 26.666667 | 43 | 0.9 |
4a21689559df3214883514e25528bec7838bfaa6 | 16,896 | py | Python | framework/generic_classes.py | petrovp/networkx-related | ebe7053e032b527ebaa9565f96ba91145de3fd50 | [
"BSD-3-Clause"
] | null | null | null | framework/generic_classes.py | petrovp/networkx-related | ebe7053e032b527ebaa9565f96ba91145de3fd50 | [
"BSD-3-Clause"
] | null | null | null | framework/generic_classes.py | petrovp/networkx-related | ebe7053e032b527ebaa9565f96ba91145de3fd50 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2018 by
# Marta Grobelna <[email protected]>
# Petre Petrov <[email protected]>
# Rudi Floren <[email protected]>
# Tobias Winkler <[email protected]>
# All rights reserved.
# BSD license.
#
# Authors: Marta Grobelna <[email protected]>
# Petre Petrov <[email protected]>
# Rudi Floren <[email protected]>
# Tobias Winkler <[email protected]>
import random
from framework.settings_global import Settings
from framework.utils import *
class BoltzmannFrameworkError(Exception):
"""Base class for exceptions in the framework."""
class CombinatorialClass(object):
"""
Abstract base class for objects from mixed combinatorial classes.
CombinatorialClass instances are objects from a mixed combinatorial class (with l- and u-atoms). They do not
represent the class itself (the naming is sort of inaccurate).
"""
@property
def l_size(self):
"""Number of l-atoms in this object.
Returns
-------
int
L-size (number of labelled atoms).
"""
raise NotImplementedError
@property
def u_size(self):
"""Number of u-atoms in this object.
Returns
-------
int
U-size (number of unlabelled atoms).
"""
raise NotImplementedError
def l_atoms(self):
"""Iterator over all l-atoms."""
raise NotImplementedError
def u_atoms(self):
"""Iterator over all u-atoms."""
raise NotImplementedError
def random_l_atom(self):
"""Returns a random l-atom within this object or the object itself."""
rand_index = random.randrange(self.l_size)
return nth(self.l_atoms(), rand_index)
def random_u_atom(self):
"""Returns a random l-atom within this object or the object itself."""
rand_index = random.randrange(self.u_size)
return nth(self.u_atoms(), rand_index)
def replace_l_atoms(self, sampler, x, y, exceptions=None):
"""Replaces all l-atoms within this object.
Returns
-------
Parameters
----------
sampler: BoltzmannSamplerBase
x: str
y: str
exceptions: list of CombinatorialClass
"""
raise NotImplementedError
def replace_u_atoms(self, sampler, x, y, exceptions=None):
"""Replaces all u-atoms."""
raise NotImplementedError
def assign_random_labels(self):
"""Assigns labels from [0, l-size) to all l-atoms in this object (including itself if it is an l-atom).
Notes
-----
...
"""
labels = random.sample(range(self.l_size), self.l_size)
for atom in self.l_atoms():
atom.label = labels.pop()
def underive_all(self):
"""Removes all wrapping derived classes from an object.
Returns
-------
CombinatorialClass
An object which is not an instance of DerivedClass.
"""
res = self
while isinstance(res, DerivedClass):
res = res.base_class_object
return res
def __str__(self):
"""Returns a string representation of this object."""
raise NotImplementedError
class DummyClass(CombinatorialClass):
"""
An object without internal structure that only tracks sizes.
Useful for more efficient testing of a sampler's size distribution.
Parameters
----------
l_size: int, optional (default=0)
u_size: int, optional (default=0)
"""
def __init__(self, l_size=0, u_size=0):
self._l_size = l_size
self._u_size = u_size
@property
def l_size(self):
return self._l_size
@property
def u_size(self):
return self._u_size
def l_atoms(self):
raise BoltzmannFrameworkError("Cannot iterate over atoms from dummy class")
def u_atoms(self):
raise BoltzmannFrameworkError("Cannot iterate over atoms from dummy class")
def random_l_atom(self):
return 0
def random_u_atom(self):
return 0
def replace_l_atoms(self, sampler, x, y, exceptions=None):
if exceptions is None:
exceptions = []
if len(exceptions) > self.l_size:
raise BoltzmannFrameworkError("Too many exceptions for substitution")
l_growth = -(self.l_size - len(exceptions))
u_growth = 0
for _ in range(self.l_size - len(exceptions)):
gamma = sampler.sample(x, y)
if gamma.l_size <= 0:
raise BoltzmannFrameworkError("You may not use l-substitution when class contains objects of l-size 0")
l_growth += gamma.l_size
u_growth += gamma.u_size
self._l_size += l_growth
self._u_size += u_growth
return self
def replace_u_atoms(self, sampler, x, y, exceptions=None):
if exceptions is None:
exceptions = []
if len(exceptions) > self.u_size:
raise BoltzmannFrameworkError("Too many exceptions for substitution")
l_growth = 0
u_growth = -(self.u_size - len(exceptions))
for _ in range(self.u_size - len(exceptions)):
gamma = sampler.sample(x, y)
if gamma.u_size <= 0:
raise BoltzmannFrameworkError("You may not use u-substitution when class contains objects of u-size 0")
l_growth += gamma.l_size
u_growth += gamma.u_size
self._l_size += l_growth
self._u_size += u_growth
return self
def __str__(self):
return "(l: {}, u: {}".format(self.l_size, self.u_size)
class ZeroAtomClass(CombinatorialClass):
"""Represents the zero-atom."""
@property
def l_size(self):
return 0
@property
def u_size(self):
return 0
# noinspection PyUnreachableCode
def l_atoms(self):
# This syntax implements an empty generator.
return
yield
# noinspection PyUnreachableCode
def u_atoms(self):
# This syntax implements an empty generator.
return
yield
def replace_l_atoms(self, sampler, x, y, exceptions=None):
return self
def replace_u_atoms(self, sampler, x, y, exceptions=None):
return self
def __str__(self):
return '1'
class LAtomClass(ZeroAtomClass):
"""Represents an l-atom (labelled atom)."""
@property
def l_size(self):
return 1
def l_atoms(self):
yield self
def replace_l_atoms(self, sampler, x, y, exceptions=None):
if exceptions is not None and self in exceptions:
return self
else:
return sampler.sample(x, y)
def __str__(self):
try:
return str(self.label)
except AttributeError:
return 'L'
class UAtomClass(ZeroAtomClass):
"""Represents a u-atom (unlabelled atom)."""
@property
def u_size(self):
return 1
def u_atoms(self):
yield self
def replace_u_atoms(self, sampler, x, y, exceptions=None):
if exceptions is not None and self in exceptions:
return self
else:
return sampler.sample(x, y)
def __str__(self):
return 'U'
class ProdClass(CombinatorialClass):
"""
Represents an object from a cartesian product of two combinatorial classes.
Parameters
----------
first: CombinatorialClass
second: CombinatorialClass
"""
def __init__(self, first, second):
self._first = first
self._second = second
@property
def first(self):
return self._first
@property
def second(self):
return self._second
@property
def l_size(self):
return self.first.l_size + self.second.l_size
@property
def u_size(self):
return self.first.u_size + self.second.u_size
def l_atoms(self):
for atom in itertools.chain(self.first.l_atoms(), self.second.l_atoms()):
yield atom
def u_atoms(self):
for atom in itertools.chain(self.first.u_atoms(), self.second.u_atoms()):
yield atom
def replace_l_atoms(self, sampler, x, y, exceptions=None):
self._first = self.first.replace_l_atoms(sampler, x, y, exceptions)
self._second = self.second.replace_l_atoms(sampler, x, y, exceptions)
return self
def replace_u_atoms(self, sampler, x, y, exceptions=None):
self._first = self.first.replace_u_atoms(sampler, x, y, exceptions)
self._second = self.second.replace_u_atoms(sampler, x, y, exceptions)
return self
def __str__(self):
return "({},{})".format(self.first, self.second)
class SetClass(CombinatorialClass):
"""
An object from the class of sets of objects of a combinatorial class.
Parameters
----------
elems: list of CombinatorialClass
"""
def __init__(self, elems):
self._elems = elems
def __len__(self):
"""Returns the number of elements in the set."""
return len(self._elems)
def __iter__(self):
"""Returns an iterator over the set."""
return iter(self._elems)
def append(self, obj):
self._elems.append(obj)
@property
def l_size(self):
return sum([elem.l_size for elem in self._elems])
@property
def u_size(self):
return sum([elem.u_size for elem in self._elems])
def l_atoms(self):
for elem in self._elems:
for atom in elem.l_atoms():
yield atom
def u_atoms(self):
for elem in self._elems:
for atom in elem.l_atoms():
yield atom
def replace_l_atoms(self, sampler, x, y, exceptions=None):
for index, child in enumerate(self._elems):
self._elems[index] = child.replace_l_atoms(sampler, x, y, exceptions)
return self
def replace_u_atoms(self, sampler, x, y, exceptions=None):
for index, child in enumerate(self._elems):
self._elems[index] = child.replace_u_atoms(sampler, x, y, exceptions)
return self
def __str__(self):
result = '['
for elem in self._elems:
result += "{}{}".format(str(elem), ',' if elem is not self._elems[-1] else '')
result += ']'
return result
class DerivedClass(CombinatorialClass):
"""
Base class for l-derived and u-derived classes.
A derived class is a combinatorial class where one atom is marked and does not count to the l-size/u-size.
This class is not meant to be instantiated directly. Instantiate LDerivedClass or UDerivedClass instead.
Parameters
----------
base_class_object: CombinatorialClass
The object form the underlying underived class.
marked_atom: CombinatorialClass, optional (default=None)
The distinguished atom. None stands for any random atom.
Raises
------
BoltzmannFrameworkError
If the given atom does not exist in the base class object.
"""
def __init__(self, base_class_object, marked_atom=None):
if type(self) is DerivedClass:
raise BoltzmannFrameworkError("Instantiate objects of LDerivedClass or UDerivedClass")
self._base_class_object = base_class_object
self._marked_atom = marked_atom
@property
def marked_atom(self):
"""Returns the marked atom."""
return self._marked_atom
@marked_atom.setter
def marked_atom(self, atom):
"""Sets the marked atom."""
if atom is not None:
# TODO is the check expensive?
atoms = itertools.chain(self.base_class_object.l_atoms(), self.base_class_object.u_atoms())
if self.marked_atom not in atoms:
raise BoltzmannFrameworkError("Given atom does not exist in base class object")
self._marked_atom = atom
@property
def base_class_object(self):
"""Returns the object from the underlying underived class."""
return self._base_class_object
def invert_derivation_order(self):
"""Inverts the derivation order.
Only works if the underlying class is a derived class as well.
Raises
------
BoltzmannFrameworkError
If the underlying class is no derived class.
"""
if not isinstance(self.base_class_object, DerivedClass):
msg = "Base class object not from derived class: {}".format(self.base_class_object)
raise BoltzmannFrameworkError(msg)
res = self.base_class_object
self._base_class_object = self.base_class_object.base_class_object
res._base_class_object = self
return res
@property
def l_size(self):
return self.base_class_object.l_size
@property
def u_size(self):
return self.base_class_object.u_size
def l_atoms(self):
return self.base_class_object.l_atoms()
def u_atoms(self):
return self.base_class_object.u_atoms()
def replace_l_atoms(self, sampler, x, y, exceptions=None):
raise NotImplementedError
def replace_u_atoms(self, sampler, x, y, exceptions=None):
raise NotImplementedError
def __str__(self):
raise NotImplementedError
class LDerivedClass(DerivedClass):
"""
Wrapper for an l-derived class.
An l-derived class is a combinatorial class where one l-atom is marked and does not count to the l-size.
Parameters
----------
base_class_object: CombinatorialClass
The object form the underlying underived class.
marked_l_atom: CombinatorialClass, optional (default=None)
The distinguished l-atom (one stands for any random atom).
"""
def __init__(self, base_class_object, marked_l_atom=None):
super(LDerivedClass, self).__init__(base_class_object, marked_l_atom)
@property
def l_size(self):
return self.base_class_object.l_size - 1
def l_atoms(self):
if self.marked_atom is None:
# Select a random l-atom as the marked l-atom (do not use the checking setter here).
self._marked_atom = self.base_class_object.random_l_atom()
for l_atom in self.base_class_object.l_atoms():
if l_atom != self.marked_atom:
yield l_atom
def replace_l_atoms(self, sampler, x, y, exceptions=None):
if exceptions is None:
exceptions = []
if self.marked_atom is None:
self._marked_atom = self.base_class_object.random_l_atom()
exceptions.append(self.marked_atom)
base_replaced = self.base_class_object.replace_l_atoms(sampler, x, y, exceptions)
return LDerivedClass(base_replaced, self.marked_atom)
def replace_u_atoms(self, sampler, x, y, exceptions=None):
base_replaced = self.base_class_object.replace_u_atoms(sampler, x, y, exceptions)
return LDerivedClass(base_replaced, self.marked_atom)
def __str__(self):
return "{}_dx".format(str(self.base_class_object))
class UDerivedClass(DerivedClass):
"""
Wrapper for a u-derived class.
A u-derived class is a combinatorial class where one u-atom is marked and does not count to the u-size.
Parameters
----------
base_class_object: CombinatorialClass
The object form the underlying underived class.
marked_u_atom: CombinatorialClass, optional (default=None)
The distinguished u-atom (one stands for any random atom).
"""
def __init__(self, base_class_object, marked_u_atom=None):
super(UDerivedClass, self).__init__(base_class_object, marked_u_atom)
@property
def u_size(self):
return self.base_class_object.u_size - 1
def u_atoms(self):
if self.marked_atom is None:
# Select a random u-atom as the marked u-atom (do not use the checking setter here).
self._marked_atom = self.base_class_object.random_u_atom()
for u_atom in self.base_class_object.u_atoms():
if u_atom != self.marked_atom:
yield u_atom
def replace_l_atoms(self, sampler, x, y, exceptions=None):
base_replaced = self.base_class_object.replace_l_atoms(sampler, x, y, exceptions)
return UDerivedClass(base_replaced, self.marked_atom)
def replace_u_atoms(self, sampler, x, y, exceptions=None):
if exceptions is None:
exceptions = []
if self.marked_atom is None:
self._marked_atom = self.base_class_object.random_u_atom()
exceptions.append(self.marked_atom)
base_replaced = self.base_class_object.replace_u_atoms(sampler, x, y, exceptions)
return UDerivedClass(base_replaced, self.marked_atom)
def __str__(self):
return "{}_dy".format(str(self.base_class_object))
| 30.117647 | 119 | 0.637015 |
4a216965d6f74d78dbb6ad426fb485910b5e13a0 | 8,033 | py | Python | cryptotracker/lib/exchange.py | Sticky073/cryptotracker | 14142e4db74058a4bc5ac9d99562287e4d7a918d | [
"MIT"
] | null | null | null | cryptotracker/lib/exchange.py | Sticky073/cryptotracker | 14142e4db74058a4bc5ac9d99562287e4d7a918d | [
"MIT"
] | null | null | null | cryptotracker/lib/exchange.py | Sticky073/cryptotracker | 14142e4db74058a4bc5ac9d99562287e4d7a918d | [
"MIT"
] | null | null | null | from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceWithdrawException
from lib.currency import Currency, Pair, TRADE_CURRENCIES
from enum import Enum
from copy import copy, deepcopy
from time import time
from datetime import datetime
from multiprocessing.dummy import Pool as ThreadPool
import settings
class Exchange:
"""
Presumably, we want to interface with multiple exchanges (Binance, GDAX, IDEX, etc.).
This is a class that wraps each API and provides a common interface to the rest of our project.
"""
def __init__(self):
""" """
pass
class Binance(Exchange):
"""
This class extends the Exchange class and provides API-specific methods for Binance.
"""
def __init__(self, api_key, api_secret):
""" Initialize the Binance class with the API login info """
self.api_key = api_key
self.api_secret = api_secret
def connect(self):
""" Creates an instance of the Binance API client and populates general data """
self.client = Client(self.api_key, self.api_secret)
self.info = self.client.get_exchange_info()
self.account = self.client.get_account()
self.get_pairs()
self.get_balances()
def disconnect(self):
""" Removes the instance of the Binance API client so it can be garbage collected """
self.client = None
def get_pairs(self):
""" Get all of the pairs and currencies that Binance trades in """
# Get all of the pairs that Binance trades in
self.pairs = [Pair(pair['symbol'], pair['baseAsset'], pair['quoteAsset']) for pair in self.info['symbols']]
self.pairs.append(Pair('BTCEUR', 'EUR', 'BTC'))
self.pairs.append(Pair('ETHEUR', 'EUR', 'ETH'))
# Get the list of unique currencies that Binance trades in
self.currencies = [pair.trade_currency for pair in self.pairs] + [pair.base_currency for pair in self.pairs]
self.currencies = list(set(self.currencies))
# Initialize traded pairs with a guess
pairs = []
for coin in TRADE_CURRENCIES:
pairs.append(coin.alias + 'BNB')
pairs.append(coin.alias + 'ETH')
pairs.append(coin.alias + 'BTC')
self.traded_pairs = [self.pairs[self.pairs.index(p)] for p in pairs if p in self.pairs]
def get_balances(self):
""" Get the current balance for each currency in the static currency list """
self.inventory = []
for bal in self.account['balances']:
symbol = bal['asset']
amount = float(bal['free']) + float(bal['locked'])
if (amount > 0 or symbol in TRADE_CURRENCIES) and (symbol in self.currencies):
coin = deepcopy(self.currencies[self.currencies.index(symbol)])
coin.amount = amount
self.inventory.append(coin)
if (symbol not in TRADE_CURRENCIES):
print('Non-zero balance for ' + symbol + ' not included in trade currencies!')
def get_all_transactions(self):
pool = ThreadPool(10)
trades = pool.map(self.get_trade_history, self.traded_pairs)
pool.close()
pool.join()
self.trades = [tx for txlist in trades for tx in txlist]
self.transfers = self.get_transfer_history()
self.txlist = self.trades + self.transfers
self.txlist.sort()
self.traded_pairs = list(set([tx.pair for tx in self.trades]))
self.traded_pairs.sort()
def get_trade_history(self, pair):
""" """
def create_transaction(trade):
time = trade['time']
buysign = 1.0 if trade['isBuyer'] else -1.0
txtype = 'buy' if trade['isBuyer'] else 'sell'
base_amount = Amount(-buysign * float(trade['qty']) * float(trade['price']), pair.base_currency)
trade_amount = Amount(buysign * float(trade['qty']), pair.trade_currency)
fee_currency = self.currencies[self.currencies.index(trade['commissionAsset'])]
fee_amount = Amount(-float(trade['commission']), fee_currency)
amounts = [trade_amount, base_amount, fee_amount]
return Transaction(pair=pair, time=time, amounts=amounts, txtype=txtype)
print('Trade history for: ' + str(pair))
trades = self.client.get_my_trades(symbol=pair.symbol)
return [create_transaction(trade) for trade in trades]
def get_transfer_history(self):
""" """
def create_transaction(deposit):
coin = self.currencies[self.currencies.index(deposit['asset'])]
pair = self.pairs[self.pairs.index(deposit['asset'] + 'EUR')]
time = deposit['insertTime']
amount = deposit['amount']
return Transaction(pair=pair, time=time, amounts=[Amount(amount, coin)], txtype='deposit')
deposits = self.client.get_deposit_history()['depositList']
return [create_transaction(deposit) for deposit in deposits]
class Transaction():
"""
A simple object to represent transactions in trading history. Stores the time of the transaction,
the amount gained, amount spent and fees paid.
"""
TxTypes = Enum('TxTypes', ['withdrawal', 'deposit', 'buy', 'sell', 'cummulative', 'unknown'])
def __init__(self, pair, time, amounts, txtype=None):
# Pair
self.pair = pair
# Time (datetime format)
self.time = time
if (not isinstance(self.time, datetime)):
self.time = datetime.fromtimestamp(int(self.time) / 1000)
# Amounts
self.amounts = amounts
# Transaction type
try:
self.txtype = self.TxTypes[txtype]
except KeyError:
self.txtype = self.TxTypes.unknown
def __str__(self):
return '{0} {1} [{2}]: {3}'.format(str(self.pair), str(self.time), str(self.amounts), str(self.txtype))
def __repr__(self):
return 'Transaction({0}, {1}, {2}, {3})'.format(repr(self.pair), repr(self.time), repr(self.amounts), repr(self.txtype))
def __add__(self, tx):
""" Add the amounts of two transactions together """
if not isinstance(tx, Transaction):
tx = Transaction(self.pair, self.time, [], self.txtype.name)
pair = self.pair if (self.pair == tx.pair) else None
time = max(self.time, tx.time)
txtype = self.txtype if (self.txtype == tx.txtype) else self.TxTypes.cummulative
amounts = self.amounts + tx.amounts
return Transaction(pair, time, amounts, txtype.name)
def __radd__(self, tx):
return self + tx
def __lt__(self, tx):
return self.time < tx.time
def mergeAmounts(self):
""" Merge amounts that share the same currency into one """
currencies = set([amount.currency for amount in self.amounts])
amountLists = [[a for a in self.amounts if a.currency == c] for c in currencies]
self.amounts = [sum(a) for a in amountLists]
class Amount():
"""
An even simpler object that is really just a named tuple for an amount and currency
"""
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency
def __str__(self):
return str(self.currency) + ': ' + str(self.amount)
def __repr__(self):
return 'Amount(' + repr(self.currency) + ', ' + repr(self.amount) + ')'
def __add__(self, a):
if isinstance(a, Amount) and a.currency != self.currency:
raise Exception('Cannot add two amounts with different currencies: ' + str(self.currency) + ', ' + str(a.currency))
else:
return Amount(self.amount + float(a), self.currency)
def __radd__(self, a):
return self + a
def __float__(self):
return self.amount
def test():
b = Binance(settings.APIKEY, settings.APISECRET)
b.connect()
b.get_all_transactions()
return b
b = test()
| 34.625 | 128 | 0.624424 |
4a216a60d2be9014ef6e2c08b73abb3ed238393f | 11,162 | py | Python | ray/tune/ray_trial_executor.py | hyyh28/tesp | 8109b39011e05545453950c918b14da07e70fad3 | [
"MIT"
] | 29 | 2019-05-18T12:18:34.000Z | 2022-03-30T01:46:48.000Z | ray/tune/ray_trial_executor.py | kivo360/tesp | a77d9c228a6891b304e789ba2758a4cbfdb75ec0 | [
"MIT"
] | 8 | 2019-08-15T05:42:10.000Z | 2021-05-21T09:41:15.000Z | ray/tune/ray_trial_executor.py | kivo360/tesp | a77d9c228a6891b304e789ba2758a4cbfdb75ec0 | [
"MIT"
] | 8 | 2019-07-15T22:36:20.000Z | 2020-08-09T07:03:26.000Z | # coding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import os
import time
import traceback
import ray
from ray.tune.logger import NoopLogger
from ray.tune.trial import Trial, Resources, Checkpoint
from ray.tune.trial_executor import TrialExecutor
logger = logging.getLogger(__name__)
class RayTrialExecutor(TrialExecutor):
"""An implemention of TrialExecutor based on Ray."""
def __init__(self, queue_trials=False):
super(RayTrialExecutor, self).__init__(queue_trials)
self._running = {}
# Since trial resume after paused should not run
# trial.train.remote(), thus no more new remote object id generated.
# We use self._paused to store paused trials here.
self._paused = {}
self._avail_resources = Resources(cpu=0, gpu=0)
self._committed_resources = Resources(cpu=0, gpu=0)
self._resources_initialized = False
def _setup_runner(self, trial):
cls = ray.remote(
num_cpus=trial.resources.cpu,
num_gpus=trial.resources.gpu)(trial._get_trainable_cls())
trial.init_logger()
remote_logdir = trial.logdir
def logger_creator(config):
# Set the working dir in the remote process, for user file writes
if not os.path.exists(remote_logdir):
os.makedirs(remote_logdir)
os.chdir(remote_logdir)
return NoopLogger(config, remote_logdir)
# Logging for trials is handled centrally by TrialRunner, so
# configure the remote runner to use a noop-logger.
return cls.remote(config=trial.config, logger_creator=logger_creator)
def _train(self, trial):
"""Start one iteration of training and save remote id."""
assert trial.status == Trial.RUNNING, trial.status
remote = trial.runner.train.remote()
self._running[remote] = trial
def _start_trial(self, trial, checkpoint=None):
prior_status = trial.status
trial.status = Trial.RUNNING
trial.runner = self._setup_runner(trial)
if not self.restore(trial, checkpoint):
return
previous_run = self._find_item(self._paused, trial)
if (prior_status == Trial.PAUSED and previous_run):
# If Trial was in flight when paused, self._paused stores result.
self._paused.pop(previous_run[0])
self._running[previous_run[0]] = trial
else:
self._train(trial)
def _stop_trial(self, trial, error=False, error_msg=None,
stop_logger=True):
"""Stops this trial.
Stops this trial, releasing all allocating resources. If stopping the
trial fails, the run will be marked as terminated in error, but no
exception will be thrown.
Args:
error (bool): Whether to mark this trial as terminated in error.
error_msg (str): Optional error message.
stop_logger (bool): Whether to shut down the trial logger.
"""
if error:
trial.status = Trial.ERROR
else:
trial.status = Trial.TERMINATED
try:
trial.write_error_log(error_msg)
if hasattr(trial, 'runner') and trial.runner:
stop_tasks = []
stop_tasks.append(trial.runner.stop.remote())
stop_tasks.append(trial.runner.__ray_terminate__.remote())
# TODO(ekl) seems like wait hangs when killing actors
_, unfinished = ray.wait(
stop_tasks, num_returns=2, timeout=250)
except Exception:
logger.exception("Error stopping runner.")
trial.status = Trial.ERROR
finally:
trial.runner = None
if stop_logger:
trial.close_logger()
def start_trial(self, trial, checkpoint_obj=None):
"""Starts the trial."""
self._commit_resources(trial.resources)
try:
self._start_trial(trial, checkpoint_obj)
except Exception:
logger.exception("Error stopping runner - retrying...")
error_msg = traceback.format_exc()
time.sleep(2)
self._stop_trial(trial, error=True, error_msg=error_msg)
try:
self._start_trial(trial)
except Exception:
logger.exception("Error starting runner, aborting!")
error_msg = traceback.format_exc()
self._stop_trial(trial, error=True, error_msg=error_msg)
# note that we don't return the resources, since they may
# have been lost
def _find_item(self, dictionary, item):
out = [rid for rid, t in dictionary.items() if t is item]
return out
def stop_trial(self, trial, error=False, error_msg=None, stop_logger=True):
"""Only returns resources if resources allocated."""
prior_status = trial.status
self._stop_trial(
trial, error=error, error_msg=error_msg, stop_logger=stop_logger)
if prior_status == Trial.RUNNING:
self._return_resources(trial.resources)
out = self._find_item(self._running, trial)
for result_id in out:
self._running.pop(result_id)
def continue_training(self, trial):
"""Continues the training of this trial."""
self._train(trial)
def pause_trial(self, trial):
"""Pauses the trial.
If trial is in-flight, preserves return value in separate queue
before pausing, which is restored when Trial is resumed.
"""
trial_future = self._find_item(self._running, trial)
if trial_future:
self._paused[trial_future[0]] = trial
super(RayTrialExecutor, self).pause_trial(trial)
def reset_trial(self, trial, new_config, new_experiment_tag):
"""Tries to invoke `Trainable.reset_config()` to reset trial.
Args:
trial (Trial): Trial to be reset.
new_config (dict): New configuration for Trial
trainable.
new_experiment_tag (str): New experiment name
for trial.
Returns:
True if `reset_config` is successful else False.
"""
trial.experiment_tag = new_experiment_tag
trial.config = new_config
trainable = trial.runner
reset_val = ray.get(trainable.reset_config.remote(new_config))
return reset_val
def get_running_trials(self):
"""Returns the running trials."""
return list(self._running.values())
def get_next_available_trial(self):
[result_id], _ = ray.wait(list(self._running))
return self._running[result_id]
def fetch_result(self, trial):
"""Fetches one result of the running trials.
Returns:
Result of the most recent trial training run."""
trial_future = self._find_item(self._running, trial)
if not trial_future:
raise ValueError("Trial was not running.")
self._running.pop(trial_future[0])
result = ray.get(trial_future[0])
return result
def _commit_resources(self, resources):
self._committed_resources = Resources(
self._committed_resources.cpu + resources.cpu_total(),
self._committed_resources.gpu + resources.gpu_total())
def _return_resources(self, resources):
self._committed_resources = Resources(
self._committed_resources.cpu - resources.cpu_total(),
self._committed_resources.gpu - resources.gpu_total())
assert self._committed_resources.cpu >= 0
assert self._committed_resources.gpu >= 0
def _update_avail_resources(self):
resources = ray.global_state.cluster_resources()
num_cpus = resources["CPU"]
num_gpus = resources["GPU"]
self._avail_resources = Resources(int(num_cpus), int(num_gpus))
self._resources_initialized = True
def has_resources(self, resources):
"""Returns whether this runner has at least the specified resources."""
cpu_avail = self._avail_resources.cpu - self._committed_resources.cpu
gpu_avail = self._avail_resources.gpu - self._committed_resources.gpu
have_space = (resources.cpu_total() <= cpu_avail
and resources.gpu_total() <= gpu_avail)
if have_space:
return True
can_overcommit = self._queue_trials
if (resources.cpu_total() > 0 and cpu_avail <= 0) or \
(resources.gpu_total() > 0 and gpu_avail <= 0):
can_overcommit = False # requested resource is already saturated
if can_overcommit:
logger.warning(
"Allowing trial to start even though the "
"cluster does not have enough free resources. Trial actors "
"may appear to hang until enough resources are added to the "
"cluster (e.g., via autoscaling). You can disable this "
"behavior by specifying `queue_trials=False` in "
"ray.tune.run_experiments().")
return True
return False
def debug_string(self):
"""Returns a human readable message for printing to the console."""
if self._resources_initialized:
return "Resources requested: {}/{} CPUs, {}/{} GPUs".format(
self._committed_resources.cpu, self._avail_resources.cpu,
self._committed_resources.gpu, self._avail_resources.gpu)
else:
return ""
def on_step_begin(self):
"""Before step() called, update the available resources."""
self._update_avail_resources()
def save(self, trial, storage=Checkpoint.DISK):
"""Saves the trial's state to a checkpoint."""
trial._checkpoint.storage = storage
if storage == Checkpoint.MEMORY:
trial._checkpoint.value = trial.runner.save_to_object.remote()
else:
trial._checkpoint.value = ray.get(trial.runner.save.remote())
return trial._checkpoint.value
def restore(self, trial, checkpoint=None):
"""Restores training state from a given model checkpoint."""
if checkpoint is None or checkpoint.value is None:
checkpoint = trial._checkpoint
if checkpoint is None or checkpoint.value is None:
return True
if trial.runner is None:
logger.error("Unable to restore - no runner.")
trial.status = Trial.ERROR
return False
try:
value = checkpoint.value
if checkpoint.storage == Checkpoint.MEMORY:
assert type(value) != Checkpoint, type(value)
ray.get(trial.runner.restore_from_object.remote(value))
else:
ray.get(trial.runner.restore.remote(value))
return True
except Exception:
logger.exception("Error restoring runner.")
trial.status = Trial.ERROR
return False
| 37.456376 | 79 | 0.626859 |
4a216aead278e3f48dbe54d9bd63006b6b2e0885 | 863 | py | Python | proposals/migrations/0004_auto_20201109_1208.py | KolibriSolutions/BepMarketplace | c47d252fd744cde6b927e37c34d7a103c6162be5 | [
"BSD-3-Clause"
] | 1 | 2019-06-29T15:24:24.000Z | 2019-06-29T15:24:24.000Z | proposals/migrations/0004_auto_20201109_1208.py | KolibriSolutions/BepMarketplace | c47d252fd744cde6b927e37c34d7a103c6162be5 | [
"BSD-3-Clause"
] | 2 | 2020-01-12T17:47:33.000Z | 2020-01-12T17:47:45.000Z | proposals/migrations/0004_auto_20201109_1208.py | KolibriSolutions/BepMarketplace | c47d252fd744cde6b927e37c34d7a103c6162be5 | [
"BSD-3-Clause"
] | 2 | 2019-06-29T15:24:26.000Z | 2020-01-08T15:15:03.000Z | # Bep Marketplace ELE
# Copyright (c) 2016-2021 Kolibri Solutions
# License: See LICENSE file or https://github.com/KolibriSolutions/BepMarketplace/blob/master/LICENSE
# Generated by Django 3.0.8 on 2020-11-09 11:08
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('proposals', '0003_auto_20190701_1527'),
]
operations = [
migrations.AlterField(
model_name='proposal',
name='Assistants',
field=models.ManyToManyField(blank=True, help_text='Add an assistant to the project. If the assistant is not found in the list, ask him/her to login at least once in the system.', related_name='proposals', to=settings.AUTH_USER_MODEL),
),
]
| 34.52 | 247 | 0.706837 |
4a216c8bc4cede0c026c4bef505d54c6585c598b | 3,492 | py | Python | python/getalp/wsd/modules/embeddings/embeddings_bert.py | getalp/disambiguate-translate | 38ef754c786ded085d184633b21acc607902c098 | [
"MIT"
] | 53 | 2019-02-12T15:40:22.000Z | 2022-03-30T16:54:22.000Z | python/getalp/wsd/modules/embeddings/embeddings_bert.py | getalp/disambiguate-translate | 38ef754c786ded085d184633b21acc607902c098 | [
"MIT"
] | 21 | 2019-06-11T15:21:17.000Z | 2022-02-05T11:53:38.000Z | python/getalp/wsd/modules/embeddings/embeddings_bert.py | getalp/disambiguate-translate | 38ef754c786ded085d184633b21acc607902c098 | [
"MIT"
] | 19 | 2019-05-26T10:23:41.000Z | 2021-12-06T04:43:08.000Z | from torch.nn import Module
from getalp.wsd.torch_fix import *
from torch.nn.utils.rnn import pad_sequence
from getalp.wsd.torch_utils import default_device
from typing import List, Union, Dict
class EmbeddingsBert(Module):
def __init__(self, bert_path: str):
super().__init__()
from pytorch_pretrained_bert import BertModel, BertTokenizer
self.bert_embeddings = BertModel.from_pretrained(bert_path)
self.bert_tokenizer = BertTokenizer.from_pretrained(bert_path, do_lower_case=False)
for param in self.bert_embeddings.parameters():
param.requires_grad = False
self._is_fixed = True
self._output_dim = self.bert_embeddings.config.hidden_size
# input:
# - sample_x: List[str] - seq_in
# output:
# - sample_x: LongTensor - seq_out
# - new_size: int - seq_out
# - indices: List[int] - seq_in
def preprocess_sample_first(self, sample_x):
seq_token_indices: List[int] = []
seq_tokens: Union[List[str], torch.Tensor] = []
current_index = 1 # 0 is [CLS]
for token in sample_x:
subtokens = self.bert_tokenizer.tokenize(token)
if current_index + len(subtokens) + 1 >= self.bert_tokenizer.max_len:
break
seq_token_indices.append(current_index)
current_index += len(subtokens)
for subtoken in subtokens:
seq_tokens.append(subtoken)
seq_tokens = ["[CLS]"] + seq_tokens + ["[SEP]"]
seq_tokens = self.bert_tokenizer.convert_tokens_to_ids(seq_tokens)
seq_tokens = torch_tensor(seq_tokens, dtype=torch_long)
return seq_tokens, seq_tokens.size(0), seq_token_indices
# input:
# - sample_x: LongTensor - seq_in
# - new_size: int - seq_out
# - indices: List[int] - seq_in
# output:
# - sample_x: Tuple[LongTensor, List[int]] - sample_x, indices
@staticmethod
def preprocess_sample_next(sample_x, new_size, indices):
return sample_x, indices
# inputs:
# - inputs: List[List[str]] (batch x seq_in)
# output:
# - output: FloatTensor (batch x seq_out x hidden)
# - pad_mask: LongTensor (batch x seq_out)
# - token_indices: List[List[int]] (batch x seq_in)
def forward(self, inputs):
tokens: List[torch.Tensor] = []
token_indices: List[List[int]] = []
for seq in inputs:
tokens.append(seq[0].to(default_device))
token_indices.append(seq[1])
inputs = tokens
pad_mask = [torch_ones_like(x) for x in inputs]
pad_mask = pad_sequence(pad_mask, batch_first=True, padding_value=0)
inputs = pad_sequence(inputs, batch_first=True, padding_value=0)
inputs, _ = self.bert_embeddings(inputs, attention_mask=pad_mask, output_all_encoded_layers=False)
return inputs, pad_mask, token_indices
def get_output_dim(self):
return self._output_dim
def is_fixed(self):
return self._is_fixed
def get_lut_embeddings(self):
return self.bert_embeddings.embeddings.word_embeddings
_bert_embeddings_wrapper: Dict[str, EmbeddingsBert] = {}
def get_bert_embeddings(bert_path: str, clear_text: bool):
assert clear_text
if bert_path not in _bert_embeddings_wrapper:
_bert_embeddings_wrapper[bert_path] = EmbeddingsBert(bert_path)
return _bert_embeddings_wrapper[bert_path]
| 38.8 | 106 | 0.657789 |
4a216d6fb43b9676ec9eb7d9541b2bddaa513139 | 1,675 | py | Python | tests/test_ec2/test_general.py | gtourkas/moto | 307104417b579d23d02f670ff55217a2d4a16bee | [
"Apache-2.0"
] | 5,460 | 2015-01-01T01:11:17.000Z | 2022-03-31T23:45:38.000Z | tests/test_ec2/test_general.py | gtourkas/moto | 307104417b579d23d02f670ff55217a2d4a16bee | [
"Apache-2.0"
] | 4,475 | 2015-01-05T19:37:30.000Z | 2022-03-31T13:55:12.000Z | tests/test_ec2/test_general.py | gtourkas/moto | 307104417b579d23d02f670ff55217a2d4a16bee | [
"Apache-2.0"
] | 1,831 | 2015-01-14T00:00:44.000Z | 2022-03-31T20:30:04.000Z | import pytest
import boto
import boto3
from boto.exception import EC2ResponseError
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_ec2_deprecated, mock_ec2
from tests import EXAMPLE_AMI_ID
# Has boto3 equivalent
@mock_ec2_deprecated
def test_console_output():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance_id = reservation.instances[0].id
output = conn.get_console_output(instance_id)
output.output.should_not.equal(None)
# Has boto3 equivalent
@mock_ec2_deprecated
def test_console_output_without_instance():
conn = boto.connect_ec2("the_key", "the_secret")
with pytest.raises(EC2ResponseError) as cm:
conn.get_console_output("i-1234abcd")
cm.value.error_code.should.equal("InvalidInstanceID.NotFound")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2
def test_console_output_boto3():
conn = boto3.resource("ec2", "us-east-1")
instances = conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
output = instances[0].console_output()
output.get("Output").should_not.equal(None)
@mock_ec2
def test_console_output_without_instance_boto3():
client = boto3.client("ec2", "us-east-1")
with pytest.raises(ClientError) as ex:
client.get_console_output(InstanceId="i-1234abcd")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["ResponseMetadata"]["RequestId"].shouldnt.be.none
ex.value.response["Error"]["Code"].should.equal("InvalidInstanceID.NotFound")
| 31.603774 | 85 | 0.761194 |
4a216d9a7d6dfeda808f6b9fc3cac620cf2d40c9 | 3,486 | py | Python | mayan/apps/ocr/managers.py | nattangwiwat/Mayan-EDMS-recitation | fcf16afb56eae812fb99144d65ae1ae6749de0b7 | [
"Apache-2.0"
] | 4 | 2021-09-02T00:16:30.000Z | 2021-09-09T22:25:15.000Z | mayan/apps/ocr/managers.py | nattangwiwat/Mayan-EDMS-recitation | fcf16afb56eae812fb99144d65ae1ae6749de0b7 | [
"Apache-2.0"
] | 86 | 2021-09-01T23:53:02.000Z | 2021-09-20T02:25:10.000Z | mayan/apps/ocr/managers.py | nattangwiwat/Mayan-EDMS-recitation | fcf16afb56eae812fb99144d65ae1ae6749de0b7 | [
"Apache-2.0"
] | 70 | 2021-09-01T12:54:51.000Z | 2022-02-16T00:53:18.000Z | import logging
from django.apps import apps
from django.db import models, transaction
from mayan.apps.documents.literals import DOCUMENT_IMAGE_TASK_TIMEOUT
from mayan.apps.lock_manager.backends.base import LockingBackend
from .classes import OCRBackendBase
from .events import event_ocr_document_version_content_deleted
logger = logging.getLogger(name=__name__)
class DocumentVersionPageOCRContentManager(models.Manager):
def delete_content_for(self, document_version, user=None):
with transaction.atomic():
for document_version_page in document_version.pages.all():
self.filter(
document_version_page=document_version_page
).delete()
event_ocr_document_version_content_deleted.commit(
actor=user, action_object=document_version.document,
target=document_version
)
def process_document_version_page(
self, document_version_page, user=None
):
logger.info(
'Processing page: %d of document version: %s',
document_version_page.page_number,
document_version_page.document_version
)
DocumentVersionPageOCRContent = apps.get_model(
app_label='ocr', model_name='DocumentVersionPageOCRContent'
)
lock_name = document_version_page.get_lock_name(user=user)
try:
document_version_page_lock = LockingBackend.get_backend().acquire_lock(
name=lock_name, timeout=DOCUMENT_IMAGE_TASK_TIMEOUT * 2
)
except Exception:
raise
else:
try:
cache_filename = document_version_page.generate_image(
_acquire_lock=False, user=user
)
with document_version_page.cache_partition.get_file(filename=cache_filename).open() as file_object:
ocr_content = OCRBackendBase.get_instance().execute(
file_object=file_object,
language=document_version_page.document_version.document.language
)
DocumentVersionPageOCRContent.objects.update_or_create(
document_version_page=document_version_page, defaults={
'content': ocr_content
}
)
except Exception as exception:
logger.error(
'OCR error for document version page: %d; %s',
document_version_page.pk, exception, exc_info=True
)
raise
else:
logger.info(
'Finished processing page: %d of document version: %s',
document_version_page.page_number,
document_version_page.document_version
)
finally:
document_version_page_lock.release()
class DocumentTypeSettingsManager(models.Manager):
def get_by_natural_key(self, document_type_natural_key):
DocumentType = apps.get_model(
app_label='documents', model_name='DocumentType'
)
try:
document_type = DocumentType.objects.get_by_natural_key(
document_type_natural_key
)
except DocumentType.DoesNotExist:
raise self.model.DoesNotExist
return self.get(document_type__pk=document_type.pk)
| 36.694737 | 115 | 0.618187 |
4a216d9bb5c79830ff400857ff8ebb74f3d68e52 | 55 | py | Python | saleor/core/taxes/errors.py | rocka44/saleor | 3f1487b409fcafc8fde82c7e8b776c12921866b9 | [
"BSD-3-Clause"
] | 2 | 2019-07-27T22:56:14.000Z | 2021-09-08T14:37:38.000Z | saleor/core/taxes/errors.py | rocka44/saleor | 3f1487b409fcafc8fde82c7e8b776c12921866b9 | [
"BSD-3-Clause"
] | 2 | 2019-07-02T13:39:49.000Z | 2019-07-07T09:38:27.000Z | saleor/core/taxes/errors.py | rocka44/saleor | 3f1487b409fcafc8fde82c7e8b776c12921866b9 | [
"BSD-3-Clause"
] | 1 | 2019-05-02T17:30:49.000Z | 2019-05-02T17:30:49.000Z | class TaxError(Exception):
"""Default tax error"""
| 18.333333 | 27 | 0.672727 |
4a216dccc3c0ba519a89f1840a13f0a72a4d6f1a | 2,569 | py | Python | data/p4VQE/R4/benchmark/startQiskit_QC102.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startQiskit_QC102.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startQiskit_QC102.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=3
# total number=10
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.rx(2.9845130209103035,input_qubit[2]) # number=7
prog.h(input_qubit[2]) # number=3
prog.x(input_qubit[2]) # number=6
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=5
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[1],input_qubit[0]) # number=8
prog.swap(input_qubit[1],input_qubit[0]) # number=9
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =3962
writefile = open("../data/startQiskit_QC102.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| 27.623656 | 118 | 0.636824 |
4a216e14fc7baa985cacc98470a6424143054f78 | 747 | py | Python | reviews/managers.py | vaibhavantil2/detail-personalized-feed | 1fc74a5c8c514c4979fa08257eea5a95924e82bd | [
"MIT"
] | 15 | 2019-02-16T12:17:30.000Z | 2022-03-27T20:11:49.000Z | reviews/managers.py | vaibhavantil2/detail-personalized-feed | 1fc74a5c8c514c4979fa08257eea5a95924e82bd | [
"MIT"
] | 7 | 2019-05-10T08:27:14.000Z | 2021-04-26T15:19:06.000Z | reviews/managers.py | andreynovikov/django-rated-reviews | ccb24f5412bf5c831f79120c32e1cd51ddca5fe8 | [
"MIT"
] | 8 | 2019-11-07T21:05:10.000Z | 2021-08-03T06:59:37.000Z | from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_str
class ReviewManager(models.Manager):
def in_moderation(self):
"""
QuerySet for all reviews currently in the moderation queue.
"""
return self.get_queryset().filter(is_public=False)
def for_model(self, model):
"""
QuerySet for all reviews for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_queryset().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_str(model._get_pk_val()))
return qs
| 32.478261 | 78 | 0.666667 |
4a216ecd3c71914dc9dc8a749e966edf8b33b6ff | 3,033 | py | Python | accounts/forms.py | awolste/beginToReason2 | 9939af5157fcb1658fe9f1c29e0925a34cb53529 | [
"BSD-3-Clause"
] | null | null | null | accounts/forms.py | awolste/beginToReason2 | 9939af5157fcb1658fe9f1c29e0925a34cb53529 | [
"BSD-3-Clause"
] | null | null | null | accounts/forms.py | awolste/beginToReason2 | 9939af5157fcb1658fe9f1c29e0925a34cb53529 | [
"BSD-3-Clause"
] | null | null | null | """
This module contains custom forms used to collect the information needed to create a model.
"""
from django import forms
from .models import UserInformation, Class
class UserInformationForm(forms.ModelForm):
"""
This form creates an instance of a UserInformation model and collects its fields
"""
# Special options
blank = ''
prefer = 'Prefer Not To Answer'
# School options
clemson = 'Clemson University'
fau = 'Florida Atlantic University'
osu = 'The Ohio State University'
rhit = 'Rose-Hulman Institute of Technology'
other = 'Other'
schools = [
(blank, ''),
(clemson, 'Clemson University'),
(fau, 'Florida Atlantic University'),
(osu, 'The Ohio State University'),
(rhit, 'Rose-Hulman Institute of Technology'),
(other, 'Other')
]
# Gender options
male = 'Male'
female = 'Female'
genders = [
(blank, ''),
(male, 'Male'),
(female, 'Female'),
(prefer, 'Prefer Not To Answer')
]
# Race options
native = 'American Indian or Alaska Native'
asian = 'Asian'
black = 'Black or African American'
hispanic = 'Hispanic or Latino'
hawaiian = 'Native Hawaiian or Other Pacific Islander'
white = 'White'
races = [
(blank, ''),
(native, 'American Indian or Alaska Native'),
(asian, 'Asian'),
(black, 'Black or African American'),
(hispanic, 'Hispanic or Latino'),
(hawaiian, 'Native Hawaiian or Other Pacific Islander'),
(white, 'White'),
(prefer, 'Prefer Not To Answer')
]
# Fields that will need to be completed in this form
user_email = forms.EmailField(label='Email', widget=forms.EmailInput(attrs={'readonly': 'readonly'})) # read only
user_nickname = forms.CharField(label='Nickname', max_length=25)
user_school = forms.ChoiceField(label='School', choices=schools)
user_class = forms.ModelChoiceField(label='Class', queryset=Class.objects.all())
user_gender = forms.ChoiceField(label='Gender', choices=genders)
user_race = forms.ChoiceField(label='Race', choices=races)
user_instructor = forms.BooleanField(label='Are You An Instructor?', required=True)
def __init__(self, *args, **kwargs):
"""function __init__ is called to instantiate the user information form
Args:
*args: Variable length argument list.
**kwargs: Arbitrary keyword arguments.
"""
# super(UserInformationForm, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
# Validator that makes sure all the fields have been filled in
for _field_name, field in self.fields.items():
field.required = True
class Meta:
"""
A class that stores the meta information about this form
"""
model = UserInformation
fields = ['user_email', 'user_nickname', 'user_school', 'user_class', 'user_gender', 'user_race', 'user_instructor']
| 34.465909 | 124 | 0.633366 |
4a216f93b855057ec35c65bbe23d0bd4edc04aa1 | 1,506 | py | Python | src/frames/hackernews/index_frame.py | wtheisen/TerminusBrowse | 1d80fd3b258b5de8c669e3fea0a14e89599252a6 | [
"BSD-3-Clause"
] | 104 | 2019-08-14T14:25:22.000Z | 2022-03-21T01:46:52.000Z | src/frames/hackernews/index_frame.py | wtheisen/commandChan | 1d80fd3b258b5de8c669e3fea0a14e89599252a6 | [
"BSD-3-Clause"
] | 53 | 2019-07-14T17:56:45.000Z | 2019-08-13T18:11:55.000Z | src/frames/hackernews/index_frame.py | wtheisen/TerminusBrowser | 554dfe25c2824bd02894c032f532efadcb3a4b12 | [
"BSD-3-Clause"
] | 16 | 2019-08-14T14:14:45.000Z | 2022-01-30T17:02:30.000Z | import urwid, time
from customer_types import SITE
from frames.abstract_frame import AbstractFrame
import logging
log = logging.getLogger(__name__)
class HackerNewsIndexFrame(AbstractFrame):
def __init__(self, urwidViewManager, uFilter=None):
super().__init__(urwidViewManager, uFilter)
self.headerString = 'TerminusBrowse'
self.storyList = self.uvm.cfg.deep_get(SITE.HACKERNEWS, 'stories')
self.load()
# Overrides super
def loader(self):
self.contents = self.buildFrame()
def buildFrame(self):
boardButtons = []
for story in self.storyList:
if self.uFilter:
if self.uFilter.lower() in story.lower():
boardButtons.append(urwid.LineBox(urwid.AttrWrap(urwid.Button(story, self.changeFrameBoard), 'center')))
else:
boardButtons.append(urwid.LineBox(urwid.AttrWrap(urwid.Button(story, self.changeFrameBoard), 'center')))
self.parsedItems = len(boardButtons)
width = len(max(self.storyList, key=len))
buttonGrid = urwid.GridFlow(boardButtons, width + 9, 2, 2, 'center') # add 9 to width to account for widget padding
listbox_content = [buttonGrid]
return urwid.ListBox(urwid.SimpleListWalker(listbox_content))
def changeFrameBoard(self, button):
from command_handler_class import CommandHandler
ch = CommandHandler(self.uvm)
ch.routeCommand('story ' + button.get_label() + ' ' + '0')
| 35.857143 | 124 | 0.670651 |
4a2170b47a3d0e97668e4fc2809445040b98e556 | 1,794 | py | Python | jcomparison/perfcomp/jobdiff.py | fuzzball81/ci-config | b3a8ff6be780bfae0ae9e3e0511dfa61010695eb | [
"Apache-2.0"
] | 8 | 2016-10-06T13:24:04.000Z | 2021-11-04T20:51:23.000Z | jcomparison/perfcomp/jobdiff.py | fuzzball81/ci-config | b3a8ff6be780bfae0ae9e3e0511dfa61010695eb | [
"Apache-2.0"
] | 8 | 2020-02-26T20:11:29.000Z | 2021-09-23T23:23:47.000Z | jcomparison/perfcomp/jobdiff.py | fuzzball81/ci-config | b3a8ff6be780bfae0ae9e3e0511dfa61010695eb | [
"Apache-2.0"
] | 9 | 2016-04-08T14:38:06.000Z | 2021-11-01T18:43:30.000Z | from perfcomp import ansbile_playbook, pip_diff, rpm_diff
from perfcomp.graphs import graph_ansible_playbook
class JobDiff:
def __init__(self, good, bad, ansible_playbooks_diff, rpm_diff, pip_diff):
self.good, self.bad = good, bad
self.ansible_diff = ansible_playbooks_diff
self.rpm_diff = rpm_diff
self.pip_diff = pip_diff
def ansible_playbooks_diff(self):
data = ansbile_playbook.compare(self.good, self.bad)
images = {}
for i in data:
images[i] = graph_ansible_playbook(data[i], i) if data[i] else None
return {'ans_data': data, 'images': images}
def rpm_files_diff(self):
inline, uniq1, uniq2 = rpm_diff.rpms(self.good, self.bad)
# sometimes we need to inmprove the diff
inline, uniq1, uniq2 = rpm_diff.check_packages(inline, uniq1, uniq2)
colored_inline = [rpm_diff.colorize_diff(i) for i in inline]
inline_with_links = rpm_diff.add_github_links(inline, colored_inline)
return {
'inline': inline_with_links, "uniq1": uniq1, "uniq2": uniq2,
'rpms_diff_max_length': max([len(v) for v in (uniq1, uniq2)])
}
def pip_files_diff(self):
inline, uniq1, uniq2 = pip_diff.pip_modules(self.good, self.bad)
return {
'pip_inline': inline, "pip_uniq1": uniq1, "pip_uniq2": uniq2,
'pip_diff_max_length': max([len(v) for v in (uniq1, uniq2)])
}
def generate(self):
data_results = {}
if self.ansible_diff:
data_results.update(self.ansible_playbooks_diff())
if self.rpm_diff:
data_results.update(self.rpm_files_diff())
if self.pip_diff:
data_results.update(self.pip_files_diff())
return data_results
| 38.170213 | 79 | 0.64437 |
4a2171106e048f362d72207d22990e770a883667 | 2,728 | py | Python | src/login/models.py | gucunskidj/exam-registration | afbacb782bca554cb4431d385a632597022ba86f | [
"bzip2-1.0.6"
] | null | null | null | src/login/models.py | gucunskidj/exam-registration | afbacb782bca554cb4431d385a632597022ba86f | [
"bzip2-1.0.6"
] | null | null | null | src/login/models.py | gucunskidj/exam-registration | afbacb782bca554cb4431d385a632597022ba86f | [
"bzip2-1.0.6"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Smer(models.Model):
naziv_smera = models.CharField(max_length=500)
class Meta:
verbose_name_plural = 'Smerovi'
ordering = ['naziv_smera']
def __str__(self):
return self.naziv_smera
class Profesor(models.Model):
prezime_i_ime = models.CharField(max_length=500)
class Meta:
verbose_name_plural = 'Profesori'
ordering = ['prezime_i_ime']
def __str__(self):
return self.prezime_i_ime
class Predmet(models.Model):
SEMESTAR = (
('I', 'I'),
('II', 'II'),
('III', 'III'),
('IV', 'IV'),
('V', 'V'),
('VI', 'VI'),
('VII', 'VII'),
('VIII', 'VIII'),
)
VRSTA = (
('Obavezni', 'Obavezni'),
('Izborni', 'Izborni')
)
sifra_predmeta = models.CharField(primary_key=True, max_length=20)
naziv_predmeta = models.CharField(max_length=200)
profesor = models.ForeignKey(Profesor, on_delete=models.CASCADE, blank=True, null=True)
datum_ispita = models.DateTimeField(blank=True, null=True)
semestar = models.CharField(max_length=200, choices=SEMESTAR)
espb_bodova = models.IntegerField(null=True)
smer = models.ManyToManyField(Smer)
vrsta_predmeta = models.CharField(max_length=200, choices=VRSTA, blank=True, null=True)
prijava = models.BooleanField(default=False, null=True, blank=True)
class Meta:
verbose_name_plural = 'Predmeti'
ordering = ['semestar', 'sifra_predmeta']
def __str__(self):
return self.sifra_predmeta + ', ' + self.naziv_predmeta + ', ' + self.semestar + ' semestar'
class Student(models.Model):
GODINA = (
('I', 'I'),
('II', 'II'),
('III', 'III'),
('IV', 'IV'),
)
user = models.OneToOneField(User, null=True, on_delete=models.CASCADE)
ime_studenta = models.CharField(max_length=200)
prezime_studenta = models.CharField(max_length=200)
br_indexa = models.CharField(primary_key=True, max_length=10)
jmbg = models.CharField(max_length=13)
smer = models.ForeignKey(Smer, on_delete=models.CASCADE, blank=True, null=True)
predmeti = models.ManyToManyField(Predmet)
god_studija = models.CharField(max_length=200, choices=GODINA)
class Meta:
verbose_name_plural = 'Studenti'
def __str__(self):
return self.ime_studenta + ' ' + self.prezime_studenta + ', ' + self.smer.naziv_smera + ', ' + self.god_studija + ' god.'
# class Ispit(models.Model):
# student = models.ForeignKey(Student, on_delete=models.CASCADE, null=True)
# predmeti = models.ManyToManyField(Predmet) | 30.311111 | 129 | 0.644062 |
4a217300a6632e40826a51eb93450468af1bd996 | 8,073 | py | Python | airbyte-integrations/connectors/source-trello/source_trello/source.py | akawalsky/airbyte | 757650f9de0a2ae1a09e03573f015234f5a9fb92 | [
"MIT"
] | null | null | null | airbyte-integrations/connectors/source-trello/source_trello/source.py | akawalsky/airbyte | 757650f9de0a2ae1a09e03573f015234f5a9fb92 | [
"MIT"
] | null | null | null | airbyte-integrations/connectors/source-trello/source_trello/source.py | akawalsky/airbyte | 757650f9de0a2ae1a09e03573f015234f5a9fb92 | [
"MIT"
] | null | null | null | #
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
from abc import ABC
from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple
import requests
from airbyte_cdk.sources import AbstractSource
from airbyte_cdk.sources.streams import Stream
from airbyte_cdk.sources.streams.http import HttpStream
from airbyte_cdk.sources.streams.http.auth import HttpAuthenticator
from .utils import TrelloRequestRateLimits as balancer
class TrelloStream(HttpStream, ABC):
url_base = "https://api.trello.com/1/"
# Define primary key as sort key for full_refresh, or very first sync for incremental_refresh
primary_key = "id"
# Page size
limit = None
extra_params = None
def __init__(self, config: Mapping[str, Any]):
super().__init__(authenticator=config["authenticator"])
self.start_date = config["start_date"]
self.config = config
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
return None
def request_params(
self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
) -> MutableMapping[str, Any]:
params = {"limit": self.limit, "since": self.start_date}
if next_page_token:
params.update(**next_page_token)
if self.extra_params:
params.update(self.extra_params)
return params
@balancer.balance_rate_limit()
def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
json_response = response.json()
for record in json_response:
yield record
class ChildStreamMixin:
parent_stream_class: Optional[TrelloStream] = None
def stream_slices(self, sync_mode, **kwargs) -> Iterable[Optional[Mapping[str, any]]]:
board_ids = self.config.get("board_ids", [])
if len(board_ids) > 0:
for id in board_ids:
yield {"id": id}
# early exit because we don't need to fetch all boards we specifically told us not to
return
for item in self.parent_stream_class(config=self.config).read_records(sync_mode=sync_mode):
yield {"id": item["id"]}
yield from []
class IncrementalTrelloStream(TrelloStream, ABC):
cursor_field = "date"
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
json_response = response.json()
last_record = next(reversed(json_response), {})
next_page = last_record.get("id")
if next_page:
return {"before": next_page}
def request_params(
self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
) -> MutableMapping[str, Any]:
params = super().request_params(stream_state, stream_slice, next_page_token)
if stream_state:
params["since"] = stream_state[self.cursor_field]
return params
def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]:
return {self.cursor_field: max(latest_record.get(self.cursor_field, ""), current_stream_state.get(self.cursor_field, ""))}
class Boards(TrelloStream):
"""Return list of all boards.
API Docs: https://developer.atlassian.com/cloud/trello/rest/api-group-members/#api-members-id-boards-get
Endpoint: https://api.trello.com/1/members/me/boards
"""
def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return "members/me/boards"
class Cards(ChildStreamMixin, TrelloStream):
"""Return list of all cards of a boards.
API Docs: https://developer.atlassian.com/cloud/trello/rest/api-group-boards/#api-boards-id-cards-get
Endpoint: https://api.trello.com/1/boards/<id>/cards/all
"""
parent_stream_class = Boards
limit = 20000
extra_params = {"customFieldItems": "true"}
def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return f"boards/{stream_slice['id']}/cards/all"
class Checklists(ChildStreamMixin, TrelloStream):
"""Return list of all checklists of a boards.
API Docs: https://developer.atlassian.com/cloud/trello/rest/api-group-boards/#api-boards-id-checklists-get
Endpoint: https://api.trello.com/1/boards/<id>/checklists
"""
parent_stream_class = Boards
extra_params = {"fields": "all", "checkItem_fields": "all"}
def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return f"boards/{stream_slice['id']}/checklists"
class Lists(ChildStreamMixin, TrelloStream):
"""Return list of all lists of a boards.
API Docs: https://developer.atlassian.com/cloud/trello/rest/api-group-boards/#api-boards-id-lists-get
Endpoint: https://api.trello.com/1/boards/<id>/lists
"""
parent_stream_class = Boards
def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return f"boards/{stream_slice['id']}/lists"
class Users(ChildStreamMixin, TrelloStream):
"""Return list of all members of a boards.
API Docs: https://developer.atlassian.com/cloud/trello/rest/api-group-boards/#api-boards-id-members-get
Endpoint: https://api.trello.com/1/boards/<id>/members
"""
parent_stream_class = Boards
def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return f"boards/{stream_slice['id']}/members"
class Actions(ChildStreamMixin, IncrementalTrelloStream):
"""Return list of all actions of a boards.
API Docs: https://developer.atlassian.com/cloud/trello/rest/api-group-boards/#api-boards-boardid-actions-get
Endpoint: https://api.trello.com/1/boards/<id>/actions
"""
parent_stream_class = Boards
limit = 1000
def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return f"boards/{stream_slice['id']}/actions"
class TrelloAuthenticator(HttpAuthenticator):
"""
Generate auth header for start making requests from API token and API key.
"""
def __init__(
self,
token: str,
key: str,
auth_header: str = "Authorization",
key_header: str = "oauth_consumer_key",
token_header: str = "oauth_token",
):
self.auth_header = auth_header
self.key_header = key_header
self.token_header = token_header
self._key = key
self._token = token
def get_auth_header(self) -> Mapping[str, Any]:
return {self.auth_header: f'OAuth {self.key_header}="{self._key}", {self.token_header}="{self._token}"'}
class SourceTrello(AbstractSource):
"""
Source Trello fetch date from web-based, Kanban-style, list-making application.
"""
@staticmethod
def _get_authenticator(config: dict) -> TrelloAuthenticator:
key, token = config["key"], config["token"]
return TrelloAuthenticator(token=token, key=key)
def check_connection(self, logger, config) -> Tuple[bool, any]:
"""
Testing connection availability for the connector by granting the credentials.
"""
try:
url = f"{TrelloStream.url_base}members/me/boards"
authenticator = self._get_authenticator(config)
session = requests.get(url, headers=authenticator.get_auth_header())
session.raise_for_status()
available_boards = {row.get("id") for row in session.json()}
for board_id in config.get("board_ids", []):
if board_id not in available_boards:
raise Exception(f"board_id {board_id} not found")
return True, None
except requests.exceptions.RequestException as e:
return False, e
def streams(self, config: Mapping[str, Any]) -> List[Stream]:
config["authenticator"] = self._get_authenticator(config)
return [Actions(config), Boards(config), Cards(config), Checklists(config), Lists(config), Users(config)]
| 36.201794 | 135 | 0.671869 |
4a217325f61f6af5849120de8de8ce9cd38fd93d | 722 | py | Python | src/genie/libs/parser/bigip/get_ltm_message_routingsip.py | nujo/genieparser | 083b01efc46afc32abe1a1858729578beab50cd3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/bigip/get_ltm_message_routingsip.py | nujo/genieparser | 083b01efc46afc32abe1a1858729578beab50cd3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/bigip/get_ltm_message_routingsip.py | nujo/genieparser | 083b01efc46afc32abe1a1858729578beab50cd3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | # Global Imports
import json
from collections import defaultdict
# Metaparser
from genie.metaparser import MetaParser
# =============================================
# Collection for '/mgmt/tm/ltm/message-routing/sip' resources
# =============================================
class LtmMessageroutingSipSchema(MetaParser):
schema = {}
class LtmMessageroutingSip(LtmMessageroutingSipSchema):
""" To F5 resource for /mgmt/tm/ltm/message-routing/sip
"""
cli_command = "/mgmt/tm/ltm/message-routing/sip"
def rest(self):
response = self.device.get(self.cli_command)
response_json = response.json()
if not response_json:
return {}
return response_json
| 21.235294 | 61 | 0.608033 |
4a2174909a2c306a90046ffe4bf1c90a175367cd | 6,000 | py | Python | beetsplug/play.py | awesome-archive/beets | 44f33cabc7e2c5ea6fd79fac3b73ac54fa11d568 | [
"MIT"
] | null | null | null | beetsplug/play.py | awesome-archive/beets | 44f33cabc7e2c5ea6fd79fac3b73ac54fa11d568 | [
"MIT"
] | null | null | null | beetsplug/play.py | awesome-archive/beets | 44f33cabc7e2c5ea6fd79fac3b73ac54fa11d568 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, David Hamp-Gonsalves
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Send the results of a query to the configured music player as a playlist.
"""
from __future__ import division, absolute_import, print_function
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand
from beets import config
from beets import ui
from beets import util
from os.path import relpath
from tempfile import NamedTemporaryFile
# Indicate where arguments should be inserted into the command string.
# If this is missing, they're placed at the end.
ARGS_MARKER = '$args'
class PlayPlugin(BeetsPlugin):
def __init__(self):
super(PlayPlugin, self).__init__()
config['play'].add({
'command': None,
'use_folders': False,
'relative_to': None,
'raw': False,
# Backwards compatibility. See #1803 and line 74
'warning_threshold': -2,
'warning_treshold': 100,
})
def commands(self):
play_command = Subcommand(
'play',
help=u'send music to a player as a playlist'
)
play_command.parser.add_album_option()
play_command.parser.add_option(
u'-A', u'--args',
action='store',
help=u'add additional arguments to the command',
)
play_command.func = self.play_music
return [play_command]
def play_music(self, lib, opts, args):
"""Execute query, create temporary playlist and execute player
command passing that playlist, at request insert optional arguments.
"""
command_str = config['play']['command'].get()
if not command_str:
command_str = util.open_anything()
use_folders = config['play']['use_folders'].get(bool)
relative_to = config['play']['relative_to'].get()
raw = config['play']['raw'].get(bool)
warning_threshold = config['play']['warning_threshold'].get(int)
# We use -2 as a default value for warning_threshold to detect if it is
# set or not. We can't use a falsey value because it would have an
# actual meaning in the configuration of this plugin, and we do not use
# -1 because some people might use it as a value to obtain no warning,
# which wouldn't be that bad of a practice.
if warning_threshold == -2:
# if warning_threshold has not been set by user, look for
# warning_treshold, to preserve backwards compatibility. See #1803.
# warning_treshold has the correct default value of 100.
warning_threshold = config['play']['warning_treshold'].get(int)
if relative_to:
relative_to = util.normpath(relative_to)
# Add optional arguments to the player command.
if opts.args:
if ARGS_MARKER in command_str:
command_str = command_str.replace(ARGS_MARKER, opts.args)
else:
command_str = u"{} {}".format(command_str, opts.args)
else:
# Don't include the marker in the command.
command_str = command_str.replace(" " + ARGS_MARKER, "")
# Perform search by album and add folders rather than tracks to
# playlist.
if opts.album:
selection = lib.albums(ui.decargs(args))
paths = []
sort = lib.get_default_album_sort()
for album in selection:
if use_folders:
paths.append(album.item_dir())
else:
paths.extend(item.path
for item in sort.sort(album.items()))
item_type = 'album'
# Perform item query and add tracks to playlist.
else:
selection = lib.items(ui.decargs(args))
paths = [item.path for item in selection]
if relative_to:
paths = [relpath(path, relative_to) for path in paths]
item_type = 'track'
item_type += 's' if len(selection) > 1 else ''
if not selection:
ui.print_(ui.colorize('text_warning',
u'No {0} to play.'.format(item_type)))
return
# Warn user before playing any huge playlists.
if warning_threshold and len(selection) > warning_threshold:
ui.print_(ui.colorize(
'text_warning',
u'You are about to queue {0} {1}.'.format(
len(selection), item_type)))
if ui.input_options((u'Continue', u'Abort')) == 'a':
return
ui.print_(u'Playing {0} {1}.'.format(len(selection), item_type))
if raw:
open_args = paths
else:
open_args = [self._create_tmp_playlist(paths)]
self._log.debug(u'executing command: {} {!r}', command_str, open_args)
try:
util.interactive_open(open_args, command_str)
except OSError as exc:
raise ui.UserError(
"Could not play the query: {0}".format(exc))
def _create_tmp_playlist(self, paths_list):
"""Create a temporary .m3u file. Return the filename.
"""
m3u = NamedTemporaryFile('wb', suffix='.m3u', delete=False)
for item in paths_list:
m3u.write(item + b'\n')
m3u.close()
return m3u.name
| 37.974684 | 79 | 0.607 |
4a217492cbacfec301ecd7bb60c637ecacd789dd | 6,934 | py | Python | dualis/main.py | KarelZe/dualis | cecf945ec76d9f49e0d6c9cfeb7f95f0c0bdf313 | [
"MIT"
] | 8 | 2018-05-31T13:14:02.000Z | 2021-11-18T22:33:35.000Z | dualis/main.py | KarelZe/dualis | cecf945ec76d9f49e0d6c9cfeb7f95f0c0bdf313 | [
"MIT"
] | 3 | 2018-05-26T21:50:09.000Z | 2018-05-31T14:01:17.000Z | dualis/main.py | KarelZe/dualis | cecf945ec76d9f49e0d6c9cfeb7f95f0c0bdf313 | [
"MIT"
] | null | null | null | import itertools
from concurrent import futures
import requests
from bs4 import BeautifulSoup
from flask import Flask, jsonify, request
from werkzeug.exceptions import abort
app = Flask(__name__)
BASE_URL = "https://dualis.dhbw.de"
units = []
@app.route("/dualis/api/v1.0/semesters/", methods=['GET'])
def get_semesters():
# TODO: refactor code so that semesters can be accessed through endpoint
return jsonify({}), 200
@app.route("/dualis/api/v1.0/units/", methods=['GET'])
def get_units():
# TODO: refactor code so that units and all relating exams can be accessed through endpoint
return jsonify({}), 200
@app.route("/dualis/api/v1.0/grades/", methods=['GET'])
def get_grades():
"""
api endpoint to query grades from dualis.dhbw.de. Function expects credentials in GET request
like {"user":"[email protected]","password":"journeyToTheCenterOftheEarth"}
:return: grades of all semesters from all modules as json
"""
if not request.json or not 'password' in request.json or not 'user' in request.json:
abort(401)
# TODO: Refactor spaghetti code :)
# retrieve password and username from body
request_json = request.get_json()
user = request_json.get('user')
password = request_json.get('password')
# create a session
url = BASE_URL + "/scripts/mgrqcgi?APPNAME=CampusNet&PRGNAME=EXTERNALPAGES&ARGUMENTS=-N000000000000001,-N000324,-Awelcome"
cookie_request = requests.get(url)
data = {"usrname": user, "pass": password,
"APPNAME": "CampusNet",
"PRGNAME": "LOGINCHECK",
"ARGUMENTS": "clino,usrname,pass,menuno,menu_type, browser,platform",
"clino": "000000000000001",
"menuno": "000324",
"menu_type": "classic",
"browser": "",
"platform": ""
}
# return dualis response code, if response code is not 200
login_response = requests.post(url, data=data, headers=None, verify=True, cookies=cookie_request.cookies)
arguments = login_response.headers['REFRESH']
if not login_response.ok:
abort(login_response.status_code)
# redirecting to course results...
url_content = BASE_URL + "/scripts/mgrqcgi?APPNAME=CampusNet&PRGNAME=STARTPAGE_DISPATCH&ARGUMENTS=" + arguments[79:]
url_content = url_content.replace("STARTPAGE_DISPATCH", "COURSERESULTS")
semester_ids_response = requests.get(url_content, cookies=login_response.cookies)
if not semester_ids_response.ok:
abort(semester_ids_response.status_code)
# get ids of all semester, replaces -N ...
soup = BeautifulSoup(semester_ids_response.content, 'html.parser')
options = soup.find_all('option')
semester_ids = [option['value'] for option in options]
semester_urls = [url_content[:-15] + semester_id for semester_id in semester_ids]
# search for all unit_urls in parallel
with futures.ThreadPoolExecutor(8) as semester_pool:
tmp = semester_pool.map(parse_semester, semester_urls, [login_response.cookies] * len(semester_urls))
unit_urls = list(itertools.chain.from_iterable(tmp))
# query all unit_urls to obtain grades in parallel
with futures.ThreadPoolExecutor(8) as detail_pool:
semester = detail_pool.map(parse_unit, unit_urls, [login_response.cookies] * len(unit_urls))
units.extend(semester)
# find logout url in html source code and logout
logout_url = BASE_URL + soup.find('a', {'id': 'logoutButton'})['href']
logout(logout_url, cookie_request.cookies)
# return dict containing units and exams as valid json
return jsonify(units), 200
def parse_student_results(url, cookies):
"""
This function calls the dualis web page of a given semester to query for all modules, that have been finished.
:param url: url of STUDENT_RESULT page
:param cookies: cookie of current session
:return: list of urls for units
"""
response = requests.get(url=url, cookies=cookies)
student_result_soup = BeautifulSoup(response.content, "html.parser")
table = student_result_soup.find("table", {"class": "students_results"})
return [a['href'] for a in table.find_all("a", href=True)]
def parse_semester(url, cookies):
"""
function calls the dualis web page of a given a semester to extract the urls of all units within the semester.
It's searching for script-tags containing the urls and crops away the surrounding javascript.
:param url: url of the semester page
:param cookies: cookie for the semester page
:return: list with urls of all units in semester
"""
semester_response = requests.get(url, cookies=cookies)
semester_soup = BeautifulSoup(semester_response.content, 'html.parser')
table = semester_soup.find("table", {"class": "list"})
# get unit details from javascript
return [script.text.strip()[301:414] for script in table.find_all("script")]
def parse_unit(url, cookies):
"""
function calls the dualis webpage of a given module to extract the grades
:param url: url for unit page
:param cookies: cookie for unit page
:return: unit with information about name and exams incl. grades
"""
response = requests.get(url=BASE_URL + url, cookies=cookies)
detail_soup = BeautifulSoup(response.content, "html.parser")
h1 = detail_soup.find("h1").text.strip()
table = detail_soup.find("table", {"class": "tb"})
td = [td.text.strip() for td in table.find_all("td")]
unit = {'name': h1.replace("\n", " ").replace("\r", ""), 'exams': []}
# units have non uniform structure. Try to map based on total size.
if len(td) <= 24:
exam = {'name': td[13], 'date': td[14], 'grade': td[15], 'externally accepted': False}
unit['exams'].append(exam)
elif len(td) <= 29:
exam = {'name': td[19], 'date': td[14], 'grade': td[21], 'externally accepted': False}
unit['exams'].append(exam)
elif len(td) == 30:
for idx in range(13, len(td) - 5, 6):
exam = {'name': td[idx], 'date': td[idx + 1], 'grade': td[idx + 2], 'externally accepted': False}
unit['exams'].append(exam)
elif len(td) <= 31:
for idx in range(11, len(td) - 7, 7):
exam = {'name': td[idx], 'date': td[idx + 3], 'grade': td[idx + 4], 'externally accepted': False}
unit['exams'].append(exam)
else:
for idx in range(19, len(td) - 5, 6):
exam = {'name': td[idx], 'date': td[14], 'grade': td[idx + 2], 'externally accepted': False}
unit['exams'].append(exam)
return unit
def logout(url, cookies):
"""
Function to perform logout in dualis.dhbw.de
:param url: url to perform logout
:param cookies: cookie with session information
:return: boolean whether logging out was successful
"""
return requests.get(url=url, cookies=cookies).ok
if __name__ == "__main__":
app.run(debug=True)
| 41.27381 | 126 | 0.672483 |
4a2176b950357b2b09a181f35a36bb7986b9f6fd | 3,078 | py | Python | app/routers/request/http.py | 01xu10/pity | ac4aafba47d916ac8731ba087ff26eb06f90d61c | [
"Apache-2.0"
] | 1 | 2021-11-11T14:12:36.000Z | 2021-11-11T14:12:36.000Z | app/routers/request/http.py | 01xu10/pity | ac4aafba47d916ac8731ba087ff26eb06f90d61c | [
"Apache-2.0"
] | null | null | null | app/routers/request/http.py | 01xu10/pity | ac4aafba47d916ac8731ba087ff26eb06f90d61c | [
"Apache-2.0"
] | null | null | null | import asyncio
import json
from typing import List, Dict
from fastapi import Depends, APIRouter
from app.dao.test_case.TestcaseDataDao import PityTestcaseDataDao
from app.handler.fatcory import PityResponse
from app.middleware.AsyncHttpClient import AsyncRequest
from app.routers import Permission
from app.routers.request.http_schema import HttpRequestForm
from app.utils.executor import Executor
router = APIRouter(prefix="/request")
@router.post("/http")
async def http_request(data: HttpRequestForm, user_info=Depends(Permission())):
if "Content-Type" not in data.headers:
data.headers['Content-Type'] = "application/json; charset=UTF-8"
if "form" not in data.headers['Content-Type']:
r = AsyncRequest(data.url, headers=data.headers,
data=data.body.encode() if data.body is not None else data.body)
else:
body = json.loads(data.body)
r = AsyncRequest(data.url, headers=data.headers, data=body if body is not None else body)
response = await r.invoke(data.method)
if response.get("status"):
return dict(code=0, data=response, msg="操作成功")
return dict(code=110, data=response, msg=response.get("msg"))
@router.get("/run")
async def execute_case(env: int, case_id: int, user_info=Depends(Permission())):
try:
executor = Executor()
test_data = await PityTestcaseDataDao.list_testcase_data_by_env(env, case_id)
ans = []
for data in test_data:
params = json.loads(data.json_data)
result, err = await executor.run(env, case_id, request_param=params)
if err:
return PityResponse.failed(data=result, msg=err)
ans.append(result)
return PityResponse.success(ans)
except Exception as e:
return PityResponse.failed(e)
@router.post("/run/async")
async def execute_case(env: int, case_id: List[int], user_info=Depends(Permission())):
data = dict()
# s = time.perf_counter()
await asyncio.gather(*(run_single(env, c, data) for c in case_id))
# elapsed = time.perf_counter() - s
# print(f"async executed in {elapsed:0.2f} seconds.")
return dict(code=0, data=data, msg="操作成功")
@router.post("/run/sync")
async def execute_case(env: int, case_id: List[int], user_info=Depends(Permission())):
data = dict()
# s = time.perf_counter()
for c in case_id:
executor = Executor()
data[c] = await executor.run(env, c)
# elapsed = time.perf_counter() - s
# print(f"sync executed in {elapsed:0.2f} seconds.")
return dict(code=0, data=data, msg="操作成功")
@router.post("/run/multiple")
async def execute_as_report(case_id: List[int], user_info=Depends(Permission())):
report_id = await Executor.run_multiple(user_info['id'], 1, case_id)
return dict(code=0, data=report_id, msg="操作成功")
async def run_single(env: int, case_id: int, data: Dict[int, tuple]):
executor = Executor()
data[case_id] = await executor.run(env, case_id)
| 38 | 98 | 0.665042 |
4a2176db7cf678cae45c3769a9e2c1a587dee1e2 | 14,585 | py | Python | stickybeak/vendored/pip/_vendor/html5lib/treebuilders/base.py | reloadware/stickybeak | 8ac52a80849a3098fb6b2f47115970a734a73c14 | [
"Apache-2.0"
] | null | null | null | stickybeak/vendored/pip/_vendor/html5lib/treebuilders/base.py | reloadware/stickybeak | 8ac52a80849a3098fb6b2f47115970a734a73c14 | [
"Apache-2.0"
] | null | null | null | stickybeak/vendored/pip/_vendor/html5lib/treebuilders/base.py | reloadware/stickybeak | 8ac52a80849a3098fb6b2f47115970a734a73c14 | [
"Apache-2.0"
] | 1 | 2022-01-01T15:14:42.000Z | 2022-01-01T15:14:42.000Z | from __future__ import absolute_import, division, unicode_literals
from stickybeak.vendored.pip._vendor.six import text_type
from ..constants import scopingElements, tableInsertModeElements, namespaces
# The scope markers are inserted when entering object elements,
# marquees, table cells, and table captions, and are used to prevent formatting
# from "leaking" into tables, object elements, and marquees.
Marker = None
listElementsMap = {
None: (frozenset(scopingElements), False),
"button": (frozenset(scopingElements | {(namespaces["html"], "button")}), False),
"list": (frozenset(scopingElements | {(namespaces["html"], "ol"),
(namespaces["html"], "ul")}), False),
"table": (frozenset([(namespaces["html"], "html"),
(namespaces["html"], "table")]), False),
"select": (frozenset([(namespaces["html"], "optgroup"),
(namespaces["html"], "option")]), True)
}
class Node(object):
"""Represents an item in the tree"""
def __init__(self, name):
"""Creates a Node
:arg name: The tag name associated with the node
"""
# The tag name associated with the node
self.name = name
# The parent of the current node (or None for the document node)
self.parent = None
# The value of the current node (applies to text nodes and comments)
self.value = None
# A dict holding name -> value pairs for attributes of the node
self.attributes = {}
# A list of child nodes of the current node. This must include all
# elements but not necessarily other node types.
self.childNodes = []
# A list of miscellaneous flags that can be set on the node.
self._flags = []
def __str__(self):
attributesStr = " ".join(["%s=\"%s\"" % (name, value)
for name, value in
self.attributes.items()])
if attributesStr:
return "<%s %s>" % (self.name, attributesStr)
else:
return "<%s>" % (self.name)
def __repr__(self):
return "<%s>" % (self.name)
def appendChild(self, node):
"""Insert node as a child of the current node
:arg node: the node to insert
"""
raise NotImplementedError
def insertText(self, data, insertBefore=None):
"""Insert data as text in the current node, positioned before the
start of node insertBefore or to the end of the node's text.
:arg data: the data to insert
:arg insertBefore: True if you want to insert the text before the node
and False if you want to insert it after the node
"""
raise NotImplementedError
def insertBefore(self, node, refNode):
"""Insert node as a child of the current node, before refNode in the
list of child nodes. Raises ValueError if refNode is not a child of
the current node
:arg node: the node to insert
:arg refNode: the child node to insert the node before
"""
raise NotImplementedError
def removeChild(self, node):
"""Remove node from the children of the current node
:arg node: the child node to remove
"""
raise NotImplementedError
def reparentChildren(self, newParent):
"""Move all the children of the current node to newParent.
This is needed so that trees that don't store text as nodes move the
text in the correct way
:arg newParent: the node to move all this node's children to
"""
# XXX - should this method be made more general?
for child in self.childNodes:
newParent.appendChild(child)
self.childNodes = []
def cloneNode(self):
"""Return a shallow copy of the current node i.e. a node with the same
name and attributes but with no parent or child nodes
"""
raise NotImplementedError
def hasContent(self):
"""Return true if the node has children or text, false otherwise
"""
raise NotImplementedError
class ActiveFormattingElements(list):
def append(self, node):
equalCount = 0
if node != Marker:
for element in self[::-1]:
if element == Marker:
break
if self.nodesEqual(element, node):
equalCount += 1
if equalCount == 3:
self.remove(element)
break
list.append(self, node)
def nodesEqual(self, node1, node2):
if not node1.nameTuple == node2.nameTuple:
return False
if not node1.attributes == node2.attributes:
return False
return True
class TreeBuilder(object):
"""Base treebuilder implementation
* documentClass - the class to use for the bottommost node of a document
* elementClass - the class to use for HTML Elements
* commentClass - the class to use for comments
* doctypeClass - the class to use for doctypes
"""
# pylint:disable=not-callable
# Document class
documentClass = None
# The class to use for creating a node
elementClass = None
# The class to use for creating comments
commentClass = None
# The class to use for creating doctypes
doctypeClass = None
# Fragment class
fragmentClass = None
def __init__(self, namespaceHTMLElements):
"""Create a TreeBuilder
:arg namespaceHTMLElements: whether or not to namespace HTML elements
"""
if namespaceHTMLElements:
self.defaultNamespace = "http://www.w3.org/1999/xhtml"
else:
self.defaultNamespace = None
self.reset()
def reset(self):
self.openElements = []
self.activeFormattingElements = ActiveFormattingElements()
# XXX - rename these to headElement, formElement
self.headPointer = None
self.formPointer = None
self.insertFromTable = False
self.document = self.documentClass()
def elementInScope(self, target, variant=None):
# If we pass a node in we match that. if we pass a string
# match any node with that name
exactNode = hasattr(target, "nameTuple")
if not exactNode:
if isinstance(target, text_type):
target = (namespaces["html"], target)
assert isinstance(target, tuple)
listElements, invert = listElementsMap[variant]
for node in reversed(self.openElements):
if exactNode and node == target:
return True
elif not exactNode and node.nameTuple == target:
return True
elif (invert ^ (node.nameTuple in listElements)):
return False
assert False # We should never reach this point
def reconstructActiveFormattingElements(self):
# Within this algorithm the order of steps described in the
# specification is not quite the same as the order of steps in the
# code. It should still do the same though.
# Step 1: stop the algorithm when there's nothing to do.
if not self.activeFormattingElements:
return
# Step 2 and step 3: we start with the last element. So i is -1.
i = len(self.activeFormattingElements) - 1
entry = self.activeFormattingElements[i]
if entry == Marker or entry in self.openElements:
return
# Step 6
while entry != Marker and entry not in self.openElements:
if i == 0:
# This will be reset to 0 below
i = -1
break
i -= 1
# Step 5: let entry be one earlier in the list.
entry = self.activeFormattingElements[i]
while True:
# Step 7
i += 1
# Step 8
entry = self.activeFormattingElements[i]
clone = entry.cloneNode() # Mainly to get a new copy of the attributes
# Step 9
element = self.insertElement({"type": "StartTag",
"name": clone.name,
"namespace": clone.namespace,
"data": clone.attributes})
# Step 10
self.activeFormattingElements[i] = element
# Step 11
if element == self.activeFormattingElements[-1]:
break
def clearActiveFormattingElements(self):
entry = self.activeFormattingElements.pop()
while self.activeFormattingElements and entry != Marker:
entry = self.activeFormattingElements.pop()
def elementInActiveFormattingElements(self, name):
"""Check if an element exists between the end of the active
formatting elements and the last marker. If it does, return it, else
return false"""
for item in self.activeFormattingElements[::-1]:
# Check for Marker first because if it's a Marker it doesn't have a
# name attribute.
if item == Marker:
break
elif item.name == name:
return item
return False
def insertRoot(self, token):
element = self.createElement(token)
self.openElements.append(element)
self.document.appendChild(element)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
doctype = self.doctypeClass(name, publicId, systemId)
self.document.appendChild(doctype)
def insertComment(self, token, parent=None):
if parent is None:
parent = self.openElements[-1]
parent.appendChild(self.commentClass(token["data"]))
def createElement(self, token):
"""Create an element but don't insert it anywhere"""
name = token["name"]
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
return element
def _getInsertFromTable(self):
return self._insertFromTable
def _setInsertFromTable(self, value):
"""Switch the function used to insert an element from the
normal one to the misnested table one and back again"""
self._insertFromTable = value
if value:
self.insertElement = self.insertElementTable
else:
self.insertElement = self.insertElementNormal
insertFromTable = property(_getInsertFromTable, _setInsertFromTable)
def insertElementNormal(self, token):
name = token["name"]
assert isinstance(name, text_type), "Element %s not unicode" % name
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
self.openElements[-1].appendChild(element)
self.openElements.append(element)
return element
def insertElementTable(self, token):
"""Create an element and insert it into the tree"""
element = self.createElement(token)
if self.openElements[-1].name not in tableInsertModeElements:
return self.insertElementNormal(token)
else:
# We should be in the InTable mode. This means we want to do
# special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
if insertBefore is None:
parent.appendChild(element)
else:
parent.insertBefore(element, insertBefore)
self.openElements.append(element)
return element
def insertText(self, data, parent=None):
"""Insert text data."""
if parent is None:
parent = self.openElements[-1]
if (not self.insertFromTable or (self.insertFromTable and
self.openElements[-1].name
not in tableInsertModeElements)):
parent.insertText(data)
else:
# We should be in the InTable mode. This means we want to do
# special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
parent.insertText(data, insertBefore)
def getTableMisnestedNodePosition(self):
"""Get the foster parent element, and sibling to insert before
(or None) when inserting a misnested table node"""
# The foster parent element is the one which comes before the most
# recently opened table element
# XXX - this is really inelegant
lastTable = None
fosterParent = None
insertBefore = None
for elm in self.openElements[::-1]:
if elm.name == "table":
lastTable = elm
break
if lastTable:
# XXX - we should really check that this parent is actually a
# node here
if lastTable.parent:
fosterParent = lastTable.parent
insertBefore = lastTable
else:
fosterParent = self.openElements[
self.openElements.index(lastTable) - 1]
else:
fosterParent = self.openElements[0]
return fosterParent, insertBefore
def generateImpliedEndTags(self, exclude=None):
name = self.openElements[-1].name
# XXX td, th and tr are not actually needed
if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and
name != exclude):
self.openElements.pop()
# XXX This is not entirely what the specification says. We should
# investigate it more closely.
self.generateImpliedEndTags(exclude)
def getDocument(self):
"""Return the final tree"""
return self.document
def getFragment(self):
"""Return the final fragment"""
# assert self.innerHTML
fragment = self.fragmentClass()
self.openElements[0].reparentChildren(fragment)
return fragment
def testSerializer(self, node):
"""Serialize the subtree of node in the format required by unit tests
:arg node: the node from which to start serializing
"""
raise NotImplementedError
| 34.892344 | 92 | 0.599931 |
4a2176f2d74d6ec5b907cf6aa7f617d83a50ae51 | 4,447 | py | Python | preprocessing/libs/utils/pytorch_ssim/__init__.py | vision4robotics/UDAT | f06971ec7d6794d0572864cf9fd6d49a66201303 | [
"Apache-2.0"
] | 81 | 2022-03-06T05:10:46.000Z | 2022-03-30T09:43:41.000Z | preprocessing/libs/utils/pytorch_ssim/__init__.py | JayYe99/UDAT | f06971ec7d6794d0572864cf9fd6d49a66201303 | [
"Apache-2.0"
] | null | null | null | preprocessing/libs/utils/pytorch_ssim/__init__.py | JayYe99/UDAT | f06971ec7d6794d0572864cf9fd6d49a66201303 | [
"Apache-2.0"
] | 13 | 2022-03-07T12:11:44.000Z | 2022-03-31T06:16:01.000Z | import torch
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
from math import exp
def gaussian(window_size, sigma):
gauss = torch.Tensor([exp(-(x - window_size//2)**2/float(2*sigma**2)) for x in range(window_size)])
return gauss/gauss.sum()
def create_window(window_size, channel):
_1D_window = gaussian(window_size, 1.5).unsqueeze(1)
_2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0)
window = Variable(_2D_window.expand(channel, 1, window_size, window_size).contiguous())
return window
def _ssim(img1, img2, window, window_size, channel, size_average = True):
mu1 = F.conv2d(img1, window, padding = window_size//2, groups = channel)
mu2 = F.conv2d(img2, window, padding = window_size//2, groups = channel)
mu1_sq = mu1.pow(2)
mu2_sq = mu2.pow(2)
mu1_mu2 = mu1*mu2
sigma1_sq = F.conv2d(img1*img1, window, padding = window_size//2, groups = channel) - mu1_sq
sigma2_sq = F.conv2d(img2*img2, window, padding = window_size//2, groups = channel) - mu2_sq
sigma12 = F.conv2d(img1*img2, window, padding = window_size//2, groups = channel) - mu1_mu2
C1 = 0.01**2
C2 = 0.03**2
ssim_map = ((2*mu1_mu2 + C1)*(2*sigma12 + C2))/((mu1_sq + mu2_sq + C1)*(sigma1_sq + sigma2_sq + C2))
if size_average:
return ssim_map.mean()
else:
return ssim_map.mean(1).mean(1).mean(1)
class SSIM(torch.nn.Module):
def __init__(self, window_size = 11, size_average = True):
super(SSIM, self).__init__()
self.window_size = window_size
self.size_average = size_average
self.channel = 1
self.window = create_window(window_size, self.channel)
def forward(self, img1, img2):
(_, channel, _, _) = img1.size()
if channel == self.channel and self.window.data.type() == img1.data.type():
window = self.window
else:
window = create_window(self.window_size, channel)
if img1.is_cuda:
window = window.cuda(img1.get_device())
window = window.type_as(img1)
self.window = window
self.channel = channel
return _ssim(img1, img2, window, self.window_size, channel, self.size_average)
def _logssim(img1, img2, window, window_size, channel, size_average = True):
mu1 = F.conv2d(img1, window, padding = window_size//2, groups = channel)
mu2 = F.conv2d(img2, window, padding = window_size//2, groups = channel)
mu1_sq = mu1.pow(2)
mu2_sq = mu2.pow(2)
mu1_mu2 = mu1*mu2
sigma1_sq = F.conv2d(img1*img1, window, padding = window_size//2, groups = channel) - mu1_sq
sigma2_sq = F.conv2d(img2*img2, window, padding = window_size//2, groups = channel) - mu2_sq
sigma12 = F.conv2d(img1*img2, window, padding = window_size//2, groups = channel) - mu1_mu2
C1 = 0.01**2
C2 = 0.03**2
ssim_map = ((2*mu1_mu2 + C1)*(2*sigma12 + C2))/((mu1_sq + mu2_sq + C1)*(sigma1_sq + sigma2_sq + C2))
ssim_map = (ssim_map - torch.min(ssim_map))/(torch.max(ssim_map)-torch.min(ssim_map))
ssim_map = -torch.log(ssim_map + 1e-8)
if size_average:
return ssim_map.mean()
else:
return ssim_map.mean(1).mean(1).mean(1)
class LOGSSIM(torch.nn.Module):
def __init__(self, window_size = 11, size_average = True):
super(LOGSSIM, self).__init__()
self.window_size = window_size
self.size_average = size_average
self.channel = 1
self.window = create_window(window_size, self.channel)
def forward(self, img1, img2):
(_, channel, _, _) = img1.size()
if channel == self.channel and self.window.data.type() == img1.data.type():
window = self.window
else:
window = create_window(self.window_size, channel)
if img1.is_cuda:
window = window.cuda(img1.get_device())
window = window.type_as(img1)
self.window = window
self.channel = channel
return _logssim(img1, img2, window, self.window_size, channel, self.size_average)
def ssim(img1, img2, window_size = 11, size_average = True):
(_, channel, _, _) = img1.size()
window = create_window(window_size, channel)
if img1.is_cuda:
window = window.cuda(img1.get_device())
window = window.type_as(img1)
return _ssim(img1, img2, window, window_size, channel, size_average)
| 35.576 | 104 | 0.643805 |
4a21786d6c1ca42dae10931b8df3aec17881f7c5 | 111 | py | Python | Algorithms/ConsecutiveOnesInBinary/Solution.py | shams-sam/logic-lab | 559990b0c3d44bfe59d32dcb8038a0cab3efc26e | [
"MIT"
] | null | null | null | Algorithms/ConsecutiveOnesInBinary/Solution.py | shams-sam/logic-lab | 559990b0c3d44bfe59d32dcb8038a0cab3efc26e | [
"MIT"
] | null | null | null | Algorithms/ConsecutiveOnesInBinary/Solution.py | shams-sam/logic-lab | 559990b0c3d44bfe59d32dcb8038a0cab3efc26e | [
"MIT"
] | null | null | null | #!/bin/python
import sys
n = int(raw_input().strip())
ans = 0
while(n>0):
n &= n>>1
ans += 1
print ans
| 12.333333 | 28 | 0.558559 |
4a2178745b23e9b6ddd70575d7a9445eac5acda7 | 2,291 | py | Python | example/image-classification/benchmark_score.py | jrosebr1/mxnet | f2b7e0522c099d14f9f5a46c354fe0971db47d97 | [
"Apache-2.0"
] | 5 | 2017-01-21T08:53:55.000Z | 2021-08-20T13:06:43.000Z | example/image-classification/benchmark_score.py | jrosebr1/mxnet | f2b7e0522c099d14f9f5a46c354fe0971db47d97 | [
"Apache-2.0"
] | null | null | null | example/image-classification/benchmark_score.py | jrosebr1/mxnet | f2b7e0522c099d14f9f5a46c354fe0971db47d97 | [
"Apache-2.0"
] | 5 | 2017-02-20T18:55:16.000Z | 2020-04-17T21:34:22.000Z | """
Benchmark the scoring performance on various CNNs
"""
from common import find_mxnet
from common.util import get_gpus
import mxnet as mx
from importlib import import_module
import logging
import time
import numpy as np
logging.basicConfig(level=logging.DEBUG)
def get_symbol(network, batch_size):
image_shape = (3,299,299) if network == 'inception-v3' else (3,224,224)
num_layers = 0
if 'resnet' in network:
num_layers = int(network.split('-')[1])
network = 'resnet'
net = import_module('symbol.'+network)
sym = net.get_symbol(num_classes = 1000,
image_shape = ','.join([str(i) for i in image_shape]),
num_layers = num_layers)
return (sym, [('data', (batch_size,)+image_shape)])
def score(network, dev, batch_size, num_batches):
# get mod
sym, data_shape = get_symbol(network, batch_size)
mod = mx.mod.Module(symbol=sym, context=dev)
mod.bind(for_training = False,
inputs_need_grad = False,
data_shapes = data_shape)
mod.init_params(initializer=mx.init.Xavier(magnitude=2.))
# get data
data = [mx.random.uniform(-1.0, 1.0, shape=shape, ctx=dev) for _, shape in mod.data_shapes]
batch = mx.io.DataBatch(data, []) # empty label
# run
dry_run = 5 # use 5 iterations to warm up
for i in range(dry_run+num_batches):
if i == dry_run:
tic = time.time()
mod.forward(batch, is_train=False)
for output in mod.get_outputs():
output.wait_to_read()
# return num images per second
return num_batches*batch_size/(time.time() - tic)
if __name__ == '__main__':
networks = ['alexnet', 'vgg', 'inception-bn', 'inception-v3', 'resnet-50', 'resnet-152']
devs = [mx.gpu(0)] if len(get_gpus()) > 0 else []
# Enable USE_MKL2017_EXPERIMENTAL for better CPU performance
devs.append(mx.cpu())
batch_sizes = [1, 2, 4, 8, 16, 32]
for net in networks:
logging.info('network: %s', net)
for d in devs:
logging.info('device: %s', d)
for b in batch_sizes:
speed = score(network=net, dev=d, batch_size=b, num_batches=10)
logging.info('batch size %2d, image/sec: %f', b, speed)
| 35.246154 | 95 | 0.62069 |
4a217927837f7d3eae136a6f3a07c4e4a9df2cfc | 30,323 | py | Python | factutils/rts/main.py | makbn/eval-runner-docker | ed5650a3fe1c7d1c9a6d698608cb125d4aafd9dc | [
"Apache-2.0"
] | 2 | 2021-06-03T14:27:27.000Z | 2021-06-03T20:15:02.000Z | factutils/rts/main.py | makbn/eval-runner-docker | ed5650a3fe1c7d1c9a6d698608cb125d4aafd9dc | [
"Apache-2.0"
] | null | null | null | factutils/rts/main.py | makbn/eval-runner-docker | ed5650a3fe1c7d1c9a6d698608cb125d4aafd9dc | [
"Apache-2.0"
] | 3 | 2021-06-03T20:14:29.000Z | 2021-11-23T07:51:19.000Z | #!/usr/bin/env python3
import argparse
import configparser
import csv
import json
import logging
import os
import re
import shlex
import shutil
import subprocess
from collections import namedtuple
from dataclasses import dataclass
from pathlib import Path
from pprint import pprint
from typing import Dict, List, Set, Tuple, Union, Optional
from hislicing.env_const import FactFmt
from hislicing.run_cslicer import collect_deps_diff_facts
from rts import testinfo, extract_data
from run_grok import run_grok, prepare_misc_dir
from util import init_logging, deprecation, restore_clean_repo
logger = logging.getLogger(__name__)
# BugInfo=namedtuple("BugInfo", bugid, rev_bug, rev_fix)
RevPair = namedtuple("RevPair", ["rev_bug", "rev_fix"])
mvn_test_output = re.compile(r"Tests run: (\d+), Failures: (\d+), Errors: (\d+), Skipped: (\d+)")
@dataclass()
class PathsCfg:
base_path: Path = Path("~/Projects").expanduser()
dfcts4j_path: Path = None
projects_data_path: Path = None
cslicer_properties: Path = None
exec_grok_base: Path = Path("~/.local/tmp/run_grok").expanduser()
facts_path: Path = None
grok_log_path: Path = None
grok_results_path: Path = None
def populate_others(self):
self.dfcts4j_path = self.base_path / "defects4j"
self.projects_data_path = self.dfcts4j_path / "framework/projects"
self.cslicer_properties = self.base_path / "rts-exp/configs"
self.facts_path = self.exec_grok_base / "facts"
self.grok_log_path = self.exec_grok_base / "grok_logs"
self.grok_results_path = self.exec_grok_base / "grok_results"
PATHS_CFG = PathsCfg()
# FACTS_PATH = Path("~/Projects/rts-exp/facts").expanduser()
def get_projects(debug: bool = True) -> Dict[str, Dict[str, Union[str, list]]]:
if debug:
return {
"Lang": {
"local_repo": "commons-lang",
"bug_id": [28]
}
}
# return {
# "Lang": {
# "local_repo": "commons-lang",
# "bug_id": [28]
# }
# }
else:
return {
"Lang": {
"local_repo": "commons-lang",
"bug_id": list(range(28, 54))
},
"Math": {
"local_repo": "commons-math",
"bug_id": list(range(5, 105))
# "bug_id": list(filter(lambda x: x != 97, range(5, 105)))
},
"Time": {
"local_repo": "joda-time",
"bug_id": list(filter(lambda x: x != 21, range(1, 27)))
}
}
def get_rev_id(csv_data_file: Path, bug_ids: List[int]) -> Dict[int, RevPair]:
bugs = {}
bug_id_set: Set[int] = set(bug_ids)
with csv_data_file.open() as f_csv:
csvreader = csv.reader(f_csv, delimiter=',')
_ = next(csvreader) # ignore header
for row in csvreader:
bid: int = int(row[0])
if bid in bug_id_set:
bugs[bid] = RevPair(row[1], row[2])
return bugs
def get_class_from_testname(clazz: str) -> str:
"""
get the test class from the full test method name
:param clazz: e.g. org.apache.commons.lang3.text.translate.NumericEntityUnescaperTest::testSupplementaryUnescaping
:return: e.g. org.apache.commons.lang3.text.translate.NumericEntityUnescaperTest
"""
return clazz[:clazz.rfind("::")]
def get_trigger_tests(data_file: Path) -> List[str]:
"""
Get the list of trigger tests from data_file
:param data_file: path to the trigger_tests stacktrace file,
usually projects/ProjectId/trigger_tests/bid
:return: the list of trigger tests
"""
with data_file.open() as df:
logger.info(f"Read trigger tests from {data_file}")
return [line[4:].strip() for line in filter(lambda x: x.startswith("--- "), df)]
def get_trigger_tests_path(project_id: str, bid: int) -> Path:
"""
Given the project id and bug id, return path to the file containing stack traces
:param project_id: e.g. Lang
:param bid: e.g. 28 in bug Lang-28
:return: the path to the file containing trigger test names and stack traces
"""
return PATHS_CFG.projects_data_path / project_id / "trigger_tests" / str(bid)
def get_relevant_tests(data_file: Path) -> List[str]:
with data_file.open() as df:
logger.info(f"Read relevant test classes from {data_file}")
return [row.strip() for row in df]
def get_relevant_tests_path(project_id: str, bid: int) -> Path:
"""
Given the project id and bug id, return path to the file containing relevant tests
:param project_id: e.g. Lang
:param bid: e.g. 28 in bug Lang-28
:return: the path to the file containing relevant tests, one test on each line
"""
return PATHS_CFG.projects_data_path / project_id / "relevant_tests" / str(bid)
def verify_relevant_tests(projects: dict):
for p, v in projects.items():
logger.info(f"Start: verify relevant tests on project {p}")
for bid in v["bug_id"]:
data_file = get_relevant_tests_path(p, bid)
expected: set = set(get_relevant_tests(data_file))
actual: set = read_test_classes_from_grok_result(PATHS_CFG.grok_results_path / f"{p}-{bid}.affected")
logger.info(f"[{p}-{bid}] <- Verify relevant tests.")
# print(f"Expected:\n{expected}")
# print(f"Actual:\n{actual}")
if expected - actual:
print(f"Unsafe! expect {expected - actual}")
# if actual - expected:
# print(f"actual is more: {actual-expected}")
def verify_trigger_testclass(projects: dict) -> Dict[str, Dict[str, Set]]:
"""
Verify grok result against trigger_tests csv files in defects4j
:param projects: the dict containing project ids and bug ids
:return: the dict containing test classes in grok results and class names of
trigger_tests for each project
e.g. {"Lang-28":{"expected": {testClass1}, "actual": {testClass2}}}
"""
results = {}
for p, v in projects.items():
logger.info(f"START: verify trigger tests on project [{p}]")
for bid in v["bug_id"]:
pbid = f"{p}-{bid}"
logger.info(f"=> START: verify trigger tests on bug [{pbid}]")
data_file = get_trigger_tests_path(p, bid)
expected: Set[str] = set([get_class_from_testname(x) for x in get_trigger_tests(data_file)])
actual: Set[str] = read_test_classes_from_grok_result(PATHS_CFG.grok_results_path / f"{p}-{bid}.affected")
unsafe: set = expected - actual
results[pbid] = {"expected": expected, "actual": actual, "unsafe": unsafe}
logger.info(f"[{pbid}] <- Check safety property of grok results.")
if unsafe:
print(f"Unsafe! expect {expected - actual}")
return results
def get_bugs_info(pid: str, bids: List[int]) -> Dict[int, RevPair]:
csv_data_file: Path = PATHS_CFG.projects_data_path / pid / "active-bugs.csv"
bugs: Dict[int, RevPair] = get_rev_id(csv_data_file, bids)
return bugs
def batch_count_methods(projects, results: Dict[str, Dict[str, Set]], out_f: Path, resume: bool) -> Dict[str, int]:
"""
:param results: the dict returned by verify_trigger_testclass()
:return: dict e.g., {"Lang-28": 20}
"""
if resume:
try:
with out_f.open('r') as existf:
existing = json.load(existf)
except FileNotFoundError:
logger.info(f"{out_f} does not exist, re-count all")
existing = dict()
else:
existing = dict()
bid_methods = existing # copy existing data
for p, v in projects.items():
bugs_info: Dict[int, RevPair] = get_bugs_info(p, v["bug_id"])
repo_path: Path = get_repo_path(v["local_repo"])
for bid, rev_pair in bugs_info.items():
pbid: str = f"{p}-{bid}"
if pbid in existing.keys():
logger.info(f"[{pbid}] <- Read from existing data on disk, skip.")
continue
logger.info(f"[{pbid}] <- Count test methods of affected test classes.")
try:
classes: Set[str] = results.get(pbid).get("actual")
except KeyError:
logger.warning(f"Skip {pbid}, which does not exist in verification results")
continue
build_path: Path = handle_special_dirstruct(repo_path, p, bid)
logger.info(f"[{pbid}] <- {len(classes)} affected classes")
count = count_methods(classes, repo_path, build_path, rev_pair.rev_fix)
# if pbid == "Math-97":
# count = count_methods(classes, repo_path, build_path, rev_pair.rev_fix)
# else:
# count = count_methods_by_runtest(classes, repo_path, build_path, rev_pair.rev_fix)
# if count == 0:
# # count = fix_methods_count(pbid)
# # logger.info(f"[{pbid}] <- Fix methods count with manually checked data: {0} => {count}")
# count = count_methods(classes, repo_path, build_path, rev_pair.rev_fix)
# logger.info(f"[{pbid}] <- Count methods without actual running: {count}")
bid_methods[pbid] = count
# Workaround: re-wirte file in each iteration
with out_f.open('w') as outf:
json.dump(bid_methods, outf, indent=2)
# for bid, v in results.items():
# actual_classes: Set[str] = v['actual']
# bid_methods[bid] = count_methods(actual_classes
return bid_methods
def find_class_path(base_path: Path, class_name: str) -> Tuple[bool, str]:
"""
Get class path from class name. It should work with nested classes.
:param base_path: path to the test class directory, usually {prject_repo}/target/test-classes
:param class_name: fully qualified class name read from grok result
:return: a tuple (bool, str). The first value indicate whether the class file
is found. The second value is the path to class file if found, or the last tried
path if not found.
"""
class_path: str = str(base_path / f"{class_name.replace('.', '/')}.class")
while not os.path.isfile(class_path) and '/' in class_path:
class_path = r'$'.join(class_path.rsplit('/', 1))
if not os.path.isfile(class_path):
return False, class_path
return True, class_path
def count_methods(classes: Set[str], repo_path: Path, build_path: Path, rev: str) -> int:
"""
:param rev:
:param build_path:
:param repo_path:
:param classes: the set of class names
:return:
"""
restore_clean_repo(str(repo_path))
# build_repo(rev, repo_path, build_path)
compile_tests(rev, repo_path, build_path)
class_base_path: Path = build_path / "target" / "test-classes"
count = 0
for c in classes:
found, cpath = find_class_path(class_base_path, c)
if not found:
logger.warning(f"Cannot find {c}. Skip.")
continue
methods: List[str] = testinfo.extract_test_methods(cpath)
count += len(methods)
return count
def fix_methods_count(pbid: str) -> int:
"""
For those versions, the mvn test -Dtest=testClassNames would run 0 test, which is possibly
caused by some issues of surefire plugin.
Surefire-plugin version changes: 2.12.4 (fine) -> 2.3 (bad) -> 2.4.2 (fine)
To get following data, we manually add <version>2.4.2</version> in pom.xml and run mvn test -Dtest=...
"""
manual_data = {'Lang-47': 119, 'Lang-48': 281, 'Lang-49': 25, 'Lang-50': 13, 'Lang-51': 44, 'Lang-52': 74}
if pbid not in manual_data:
logger.warning(f"Cannot fix {pbid}, need manual inspection.")
return manual_data.get(pbid, 0)
def count_methods_by_runtest(classes: Set[str], repo_path: Path, build_path: Path, rev: str) -> int:
restore_clean_repo(str(repo_path))
cli_output: str = run_tests(rev, repo_path, build_path, classes)
num_run: int = 0
for m in mvn_test_output.findall(cli_output):
logger.debug(f"m={m}")
num_run = max(int(m[0]), num_run)
return num_run
def tex_escape(input_str: str) -> str:
return input_str.replace('_', '\\_')
def simplify_class_name(fullname: str) -> str:
replace_dict = {
"org.apache.commons.lang": "lang",
"org.apache.commons.math": "math",
"org.joda.time": "time"
}
for k, v in replace_dict.items():
if fullname.startswith(k):
return fullname.replace(k, v, 1)
logger.warning("Cannot simplify class name: {fullname}")
return fullname
def simplify_class_name_more(fullname: str) -> str:
return ".".join(fullname.rsplit(".", 2)[-2:])
def process_names(name: str) -> str:
return simplify_class_name(tex_escape(name))
def output_result_table(results: Dict[str, Dict[str, Set]], out_file: Path):
""" *DEPRECATED*, use write_num_test_method() now
Output a LaTeX source file for the result table (list each class name)
:param results: the dict returned by verify_trigger_testclass()
:param out_file: path to the output (LaTeX source)
:return: None
"""
deprecation("The 'output_result_table' method for listing all class names is deprecated, "
"use write_num_test_class() instead")
str_write_to_file = "\\begin{footnotesize}\n\\centering\n\\begin{longtable}{lll}\n\\caption{Results}\n\\label{tab:rts-result}\\\\ \\toprule\n"
str_write_to_file += "\\textbf{Project ID} & \\textbf{Trigger Test Classes} & \\textbf{Grok Results} \\\\ \\midrule\n"
for bid, v in results.items():
expected, actual = tuple(sorted(s) for s in [v["expected"], v["actual"]])
expected_num, actual_num = len(expected), len(actual)
rows = max(expected_num, actual_num)
expected_first = process_names(expected[0]) if expected_num > 0 else " "
actual_first = process_names(actual[0]) if actual_num > 0 else " "
str_write_to_file += f"\\multirow{{{rows}}}{{*}}{{{bid}}} & {expected_first} & {actual_first} \\\\\n"
for i in range(1, rows):
expected_i = process_names(expected[i]) if i < expected_num else " "
actual_i = process_names(actual[i]) if i < actual_num else " "
str_write_to_file += f" & {expected_i} & {actual_i} \\\\\n"
str_write_to_file += "\\midrule\n"
str_write_to_file = str_write_to_file[:-9]
str_write_to_file += "\\bottomrule\n\\end{longtable}\n\\end{footnotesize}\n"
with out_file.open("w") as of:
of.write(str_write_to_file)
def write_num_test_method(bid_methodcount: Dict[str, int], out_file: Path):
"""
Output a LaTeX source file for the result table (numbers of methods)
:param bid_methodcount: dict returned by batch_count_method(), e.g. {"Lang-28": 20}
:param out_file: path to the output (LaTeX source)
:return: None
"""
existing_data = extract_data.read_tex(Path("rts/rts_data/defects4j-numbers.tex"))
str_write_to_file = "\\begin{footnotesize}\n\\centering\n\\begin{longtable}{lrrrrrrc}\n" \
"\\caption{Number of Methods}\n\\label{tab:rts-methods-num}\\\\ \\toprule\n"
str_write_to_file += "\\textbf{Project ID} & \\textbf{RetestAll} & \\textbf{Ekstazi} & \\textbf{Clover} " \
"& \\textbf{STARTS} & \\textbf{HyRTS} & \\textbf{Facts} & \\textbf{worse than?} \\\\ \\midrule\n"
for bid, count in bid_methodcount.items():
tool_nums: List[int] = []
for t in extract_data.RTStool:
bid_in_data = "".join(bid.split("-")).lower()
num = extract_data.get_nums(existing_data, bid_in_data, t, extract_data.ToolResult.NumOfRunTests)
tool_nums.append(num)
tool_nums_str = " & ".join([str(x) for x in tool_nums])
worse_than: int = len(list(filter(lambda x: x < count, tool_nums)))
str_write_to_file += f"{bid} & {tool_nums_str} & {count} & {worse_than}\\\\\n"
str_write_to_file += "\\bottomrule\n\\end{longtable}\n\\end{footnotesize}\n"
with out_file.open("w") as of:
of.write(str_write_to_file)
def calc_percent(bid_methodcount: Dict[str, int]):
existing_data = extract_data.read_tex(Path("rts/rts_data/defects4j-numbers.tex"))
tool_set = {"ekstazi", "starts", "clover", "notool"}
percentage: Dict[str, Dict[str, float]] = {t: dict() for t in tool_set - {"notool"}}
percentage["fact"] = dict()
for bid, count in bid_methodcount.items():
bid_in_data = "".join(bid.split("-")).lower()
nums: Dict[str, int] = dict()
for t in tool_set:
num = extract_data.get_nums(existing_data, bid_in_data, extract_data.RTStool[t],
extract_data.ToolResult.NumOfRunTests)
nums[t] = num
for t in tool_set - {"notool"}:
percentage[t][bid] = nums[t] / nums["notool"]
percentage["fact"][bid] = count / nums["notool"]
# pprint(percentage)
avg_per_project: Dict[str, Dict[str, float]] = {t: dict() for t in tool_set - {"notool"} | {"fact"}}
for t, d in percentage.items():
# avg_per_project[t] = {
proj = "Lang", "Math", "Time"
count_dict: Dict[str, int] = {x: 0 for x in proj}
for p in proj:
for bid, percent in d.items():
if bid.startswith(p):
count_dict[p] += 1
if p in avg_per_project[t]:
avg_per_project[t][p] += d[bid]
else:
avg_per_project[t][p] = d[bid]
pprint(count_dict)
for p in "Lang", "Math", "Time":
if p not in avg_per_project[t]:
logger.info(f"{p}: data not available in current run, skip")
continue
avg_per_project[t][p] /= count_dict[p]
pprint(avg_per_project)
def write_num_test_class(results: Dict[str, Dict[str, Set]], out_file: Path):
"""
Output a LaTeX source file for the result table (numbers of classes only)
:param results: the dict returned by verify_trigger_testclass()
:param out_file: path to the output (LaTeX source)
:return: None
"""
str_write_to_file = "\\begin{footnotesize}\n\\centering\n\\begin{longtable}{lccc}\n\\caption{Results}\n\\label{tab:rts-result-num}\\\\ \\toprule\n"
str_write_to_file += "\\textbf{Project ID} & \\textbf{\# Trigger} & \\textbf{\# Grok} & \\textbf{Unsafe?} \\\\ \\midrule\n"
for bid, v in results.items():
# expected, actual, unsafe = tuple(sorted(s) for s in [v["expected"], v["actual"]])
expected_num, actual_num, unsafe_num = len(v["expected"]), len(v["actual"]), len(v["unsafe"])
unsafe_indicator = "" if unsafe_num == 0 else unsafe_num
str_write_to_file += f"{bid} & {expected_num} & {actual_num} & {unsafe_indicator}\\\\\n"
str_write_to_file += "\\bottomrule\n\\end{longtable}\n\\end{footnotesize}\n"
with out_file.open("w") as of:
of.write(str_write_to_file)
def read_test_classes_from_grok_result(grok_out: Path) -> Set[str]:
if not grok_out.exists():
logger.error(f"{grok_out} does not exist, skip")
return set()
with grok_out.open() as df:
logger.info(f"Read grok output from {grok_out}")
clazz = set([row.strip() for row in df])
return clazz
def generate_config_file(repo_path: Path, build_path: Path, rev_pair: RevPair, out_file: Path):
if not build_path:
build_path = repo_path
lines = [f"repoPath = {repo_path / '.git'}\n",
f"startCommit = {rev_pair.rev_bug}\n",
f"endCommit = {rev_pair.rev_fix}\n",
f"classRoot = {build_path / 'target/classes'}"]
with out_file.open("w") as f_out:
f_out.writelines(lines)
def get_facts_subdir(name: str) -> Path:
return PATHS_CFG.facts_path / name
def handle_special_dirstruct(repo_path: Path, pid: str, bid: int) -> Path:
if pid == "Time" and bid >= 21:
return repo_path / "JodaTime"
else:
return repo_path
def get_repo_path(subdir: str) -> Path:
return PATHS_CFG.dfcts4j_path / "project_repos" / subdir
def collect_facts(projects: dict, skip_exist: bool = False) -> None:
"""
Generate configuration files, run cslicer, store facts
:param projects: {project_name : path, bug_id_list}, see get_projects()
:param skip_exist: will check if subdir exist and skip existing facts
:return: None
"""
for p, v in projects.items():
logger.info(f"Start work on project {p}")
bugs_info = get_bugs_info(p, v["bug_id"])
repo_path: Path = get_repo_path(v["local_repo"])
for bid, rev_pair in bugs_info.items():
build_path: Path = handle_special_dirstruct(repo_path, p, bid)
pbid: str = f"{p}-{bid}"
if skip_exist and get_facts_subdir(pbid).exists():
logger.info(f"Skip collecting facts for {pbid}. (existing)")
continue
logger.info(f"Start on pair {pbid}")
out_file = PATHS_CFG.cslicer_properties / f"{pbid}.properties"
restore_clean_repo(str(repo_path))
# build buggy version
build_repo(rev_pair.rev_fix, repo_path, build_path)
# generate config file
generate_config_file(repo_path, build_path, rev_pair, out_file)
# run cslicer
collect_deps_diff_facts("/tmp/rts.log", out_file, None, FactFmt.fact)
move_facts(repo_path / ".facts", name=pbid)
def batch_post_process_diff_facts():
for subdir in PATHS_CFG.facts_path.iterdir():
if subdir.is_dir() and os.path.splitext(subdir)[1] != ".old":
diff_file = subdir / "30-diff_tuple.ta"
if not diff_file.exists():
logger.error(f"{diff_file} does not exist")
continue
post_process_diff_facts(diff_file)
def post_process_diff_facts(facts_path: Path):
with facts_path.open("a") as ff:
for x in {"Update", "Insert", "Delete"}:
ff.write(f"{x} NONE NONE\n")
def batch_run_souffle(projects: dict, dl_prog: Path, skip_exist: bool = False):
"""
TO BE IMPLEMENTED.
Run souffle on facts in batch mode
:param projects: {project_name : path, bug_id_list}, see get_projects()
:param dl_prog: path to the grok query script
:param skip_exist: will check if subdir exist and skip existing facts
:return: None
"""
pass
def batch_run_grok(projects: dict, grok_script: Path, skip_exist: bool = False):
"""
Run grok on facts, per subdir
:param projects: {project_name : path, bug_id_list}, see get_projects()
:param grok_script: path to the grok query script
:param skip_exist: will check if subdir exist and skip existing facts
:return: None
"""
for p, v in projects.items():
for bid in v["bug_id"]:
pbid: str = f"{p}-{bid}"
subdir = get_facts_subdir(pbid)
if subdir.is_dir():
grok_results_per_run = PATHS_CFG.grok_results_path / pbid
grok_log_per_run = PATHS_CFG.grok_results_path / f"{pbid}.log"
affected_file: Path = PATHS_CFG.grok_results_path / f"{pbid}.affected"
if skip_exist and affected_file.exists():
logger.info(f"Skip running grok for {pbid}. (existing)")
continue
logger.info(f"Grok on {pbid}")
run_grok(grok_script, grok_results_per_run, subdir, str(grok_log_per_run))
def run_tests(rev, repo_path, build_path, classes: Set[str]) -> str:
cwd = os.getcwd()
os.chdir(repo_path)
checkout_cmd = f'git checkout {rev}'
subprocess.run(shlex.split(checkout_cmd), capture_output=True)
os.chdir(build_path)
run_test_cmd = 'mvn test -Dtest=' + ",".join(classes)
logger.info(f"Run: {run_test_cmd}")
run = subprocess.run(shlex.split(run_test_cmd), shell=False, cwd=build_path,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if run.stderr:
logger.error(run.stderr.decode())
logger.info(f"Finish: mvn test -Dtest=...")
os.chdir(cwd)
return run.stdout.decode()
def compile_tests(rev, repo_path, build_path):
cwd = os.getcwd()
os.chdir(repo_path)
checkout_cmd = f'git checkout {rev}'
subprocess.run(shlex.split(checkout_cmd), capture_output=True)
os.chdir(build_path)
build_cmd = 'mvn test-compile'
logger.info(f"Run: {build_cmd}")
run_build = subprocess.run(shlex.split(build_cmd), shell=False, cwd=build_path,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
logger.debug(run_build.stdout.decode())
if run_build.stderr:
logger.error(run_build.stderr.decode())
logger.info(f"Finish: {build_cmd}")
os.chdir(cwd)
def build_repo(rev, repo_path, build_path):
cwd = os.getcwd()
os.chdir(repo_path)
checkout_cmd = f'git checkout {rev}'
subprocess.run(shlex.split(checkout_cmd), capture_output=True)
os.chdir(build_path)
# install_cmdline = 'mvn install -DskipTests'
install_cmdline = 'mvn test-compile'
logger.info(install_cmdline)
run_build = subprocess.run(shlex.split(install_cmdline), shell=False, cwd=build_path, capture_output=True)
logger.debug(run_build.stdout.decode())
if run_build.stderr:
logger.error(run_build.stderr.decode())
os.chdir(cwd)
def move_facts(facts_dir: Path, name: str):
dst_dir: Path = get_facts_subdir(name)
if os.path.isdir(dst_dir):
dst_dir_old = PATHS_CFG.facts_path / f"{name}.old"
logger.warning(f"Renaming existing {dst_dir} before moving in "
f"newly-generated facts, existing .old directories "
f"will be overwritten")
if os.path.isdir(dst_dir_old):
shutil.rmtree(dst_dir_old)
shutil.move(dst_dir, dst_dir_old)
shutil.move(str(facts_dir), str(dst_dir))
def handle_args():
"""handle cmdline arguments
usual procedure:
1. -f collect facts
2. --ensure-all-change-types post-process so that jgrok does not throw exception
3. -s run grok
4. -v verify
5.1 --count-class results in TeX, number of test classes
5.2.1 --count-method-json results in json, number of methods
5.2.2 --count-method-tex results in TeX, number of methods, in comparison with existing tools
optional. --resume skip existing
optional. --debug run with a small subset of projects
optional. -l specify log level
:return: arguments
"""
parser = argparse.ArgumentParser(description="Test selection on Defects4j")
parser.add_argument("-l", metavar="LOG_LEVEL", type=str)
parser.add_argument("--alt-config", metavar="CFG_FILE",
help="Override default paths using configuration in CFG_FILE")
parser.add_argument("-f", action="store_true", help="Collect facts for projects")
parser.add_argument("--resume", action="store_true",
help="Resume process, according to the existence of files/dirs. Works with -f, -s")
parser.add_argument("--ensure-all-change-types", action="store_true",
help="Ensure that all change types exist in diff facts by adding NONE facts")
parser.add_argument("--debug", action="store_true", help="Use one bug pair for testing, default: True")
parser.add_argument("-s", metavar="GROK_SCRIPT", help="Run grok with specified grok script")
parser.add_argument("-v", action="store_true", help="Verify grok results")
parser.add_argument("--count-class", metavar="OUTPUT_TEX_SRC",
help="Output TeX source for a results table")
parser.add_argument("--count-method-json", metavar="OUTPUT_JSON",
help="Count test methods of affected test classes and write OUTPUT_JSON")
parser.add_argument("--count-method-tex", nargs=2, metavar=("JSON_FILE", "OUTPUT_TEX_SRC"),
help="Read JSON and write TeX table")
parser.add_argument("--percent", metavar="JSON_FILE", help="Calculate percentage")
args = parser.parse_args()
init_logging(args.l)
logger.debug(args)
return args
def handle_env(cfg_file: Optional[str]):
global PATHS_CFG
if not cfg_file:
logger.info("--alt-config not set, use default paths")
elif not os.path.isfile(cfg_file):
logger.error(f"path provided to --alt-config ({cfg_file}) is not a file")
else:
config = configparser.ConfigParser()
config.read(cfg_file)
if 'PATHS' in config:
for x in ["base_path", "exec_grok_base"]:
if x in config['PATHS']:
setattr(PATHS_CFG, x, Path(config['PATHS'][x]))
PATHS_CFG.populate_others()
def main():
args = handle_args()
handle_env(args.alt_config)
projects = get_projects(args.debug)
if args.f:
if args.resume:
collect_facts(projects, True)
else:
collect_facts(projects)
if args.ensure_all_change_types:
batch_post_process_diff_facts()
if args.s:
prepare_misc_dir([PATHS_CFG.grok_log_path, PATHS_CFG.grok_results_path])
if args.resume:
batch_run_grok(projects, Path(args.s), True)
else:
batch_run_grok(projects, Path(args.s))
if args.v:
results = verify_trigger_testclass(projects)
if args.count_class:
write_num_test_class(results, Path(args.count_class))
if args.count_method_json:
out_f: Path = Path(args.count_method_json).resolve()
bid_methods = batch_count_methods(projects, results, out_f, args.resume)
logger.info(bid_methods)
if args.count_method_tex:
in_f: Path = Path(args.count_method_tex[0])
out_f: Path = Path(args.count_method_tex[1])
with in_f.open() as df:
bid_methods = json.load(df)
write_num_test_method(bid_methods, out_f)
if args.percent:
in_f: Path = Path(args.percent)
with in_f.open() as df:
bid_methods = json.load(df)
calc_percent(bid_methods)
if __name__ == '__main__':
main()
| 41.882597 | 151 | 0.631369 |
4a2179f218ce1be5107a1517994821455188f478 | 10,267 | py | Python | glance/tests/unit/test_versions.py | ishamibrahim/glance | ca51cb84650da93c52a26b2581f0582f0a280c11 | [
"Apache-2.0"
] | null | null | null | glance/tests/unit/test_versions.py | ishamibrahim/glance | ca51cb84650da93c52a26b2581f0582f0a280c11 | [
"Apache-2.0"
] | null | null | null | glance/tests/unit/test_versions.py | ishamibrahim/glance | ca51cb84650da93c52a26b2581f0582f0a280c11 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from six.moves import http_client as http
import webob
from glance.api.middleware import version_negotiation
from glance.api import versions
from glance.common.wsgi import Request as WsgiRequest
from glance.tests.unit import base
class VersionsTest(base.IsolatedUnitTest):
"""Test the version information returned from the API service."""
def _get_versions_list(self, url):
versions = [
{
'id': 'v2.6',
'status': 'CURRENT',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
'id': 'v2.5',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self',
'href': '%s/v1/' % url}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self',
'href': '%s/v1/' % url}],
},
]
return versions
def test_get_version_list(self):
req = webob.Request.blank('/', base_url='http://127.0.0.1:9292/')
req.accept = 'application/json'
self.config(bind_host='127.0.0.1', bind_port=9292)
res = versions.Controller().index(req)
self.assertEqual(http.MULTIPLE_CHOICES, res.status_int)
self.assertEqual('application/json', res.content_type)
results = jsonutils.loads(res.body)['versions']
expected = self._get_versions_list('http://127.0.0.1:9292')
self.assertEqual(expected, results)
def test_get_version_list_public_endpoint(self):
req = webob.Request.blank('/', base_url='http://127.0.0.1:9292/')
req.accept = 'application/json'
self.config(bind_host='127.0.0.1', bind_port=9292,
public_endpoint='https://example.com:9292')
res = versions.Controller().index(req)
self.assertEqual(http.MULTIPLE_CHOICES, res.status_int)
self.assertEqual('application/json', res.content_type)
results = jsonutils.loads(res.body)['versions']
expected = self._get_versions_list('https://example.com:9292')
self.assertEqual(expected, results)
def test_get_version_list_secure_proxy_ssl_header(self):
self.config(secure_proxy_ssl_header='HTTP_X_FORWARDED_PROTO')
url = 'http://localhost:9292'
environ = webob.request.environ_from_url(url)
req = WsgiRequest(environ)
res = versions.Controller().index(req)
self.assertEqual(http.MULTIPLE_CHOICES, res.status_int)
self.assertEqual('application/json', res.content_type)
results = jsonutils.loads(res.body)['versions']
expected = self._get_versions_list(url)
self.assertEqual(expected, results)
def test_get_version_list_secure_proxy_ssl_header_https(self):
self.config(secure_proxy_ssl_header='HTTP_X_FORWARDED_PROTO')
url = 'http://localhost:9292'
ssl_url = 'https://localhost:9292'
environ = webob.request.environ_from_url(url)
environ['HTTP_X_FORWARDED_PROTO'] = "https"
req = WsgiRequest(environ)
res = versions.Controller().index(req)
self.assertEqual(http.MULTIPLE_CHOICES, res.status_int)
self.assertEqual('application/json', res.content_type)
results = jsonutils.loads(res.body)['versions']
expected = self._get_versions_list(ssl_url)
self.assertEqual(expected, results)
class VersionNegotiationTest(base.IsolatedUnitTest):
def setUp(self):
super(VersionNegotiationTest, self).setUp()
self.middleware = version_negotiation.VersionNegotiationFilter(None)
def test_request_url_v1(self):
request = webob.Request.blank('/v1/images')
self.middleware.process_request(request)
self.assertEqual('/v1/images', request.path_info)
def test_request_url_v1_0(self):
request = webob.Request.blank('/v1.0/images')
self.middleware.process_request(request)
self.assertEqual('/v1/images', request.path_info)
def test_request_url_v1_1(self):
request = webob.Request.blank('/v1.1/images')
self.middleware.process_request(request)
self.assertEqual('/v1/images', request.path_info)
def test_request_accept_v1(self):
request = webob.Request.blank('/images')
request.headers = {'accept': 'application/vnd.openstack.images-v1'}
self.middleware.process_request(request)
self.assertEqual('/v1/images', request.path_info)
def test_request_url_v2(self):
request = webob.Request.blank('/v2/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_0(self):
request = webob.Request.blank('/v2.0/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_1(self):
request = webob.Request.blank('/v2.1/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_2(self):
request = webob.Request.blank('/v2.2/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_3(self):
request = webob.Request.blank('/v2.3/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_4(self):
request = webob.Request.blank('/v2.4/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_5(self):
request = webob.Request.blank('/v2.5/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_6(self):
request = webob.Request.blank('/v2.6/images')
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
def test_request_url_v2_7_unsupported(self):
request = webob.Request.blank('/v2.7/images')
resp = self.middleware.process_request(request)
self.assertIsInstance(resp, versions.Controller)
def test_request_url_v2_7_unsupported_EXPERIMENTAL(self):
request = webob.Request.blank('/v2.7/images')
self.config(enable_image_import=True)
resp = self.middleware.process_request(request)
self.assertIsInstance(resp, versions.Controller)
class VersionsAndNegotiationTest(VersionNegotiationTest, VersionsTest):
"""
Test that versions mentioned in the versions response are correctly
negotiated.
"""
def _get_list_of_version_ids(self, status):
request = webob.Request.blank('/')
request.accept = 'application/json'
response = versions.Controller().index(request)
v_list = jsonutils.loads(response.body)['versions']
return [v['id'] for v in v_list if v['status'] == status]
def _assert_version_is_negotiated(self, version_id):
request = webob.Request.blank("/%s/images" % version_id)
self.middleware.process_request(request)
major = version_id.split('.', 1)[0]
expected = "/%s/images" % major
self.assertEqual(expected, request.path_info)
def test_current_is_negotiated(self):
# NOTE(rosmaita): Bug 1609571: the versions response was correct, but
# the negotiation had not been updated for the CURRENT version.
to_check = self._get_list_of_version_ids('CURRENT')
self.assertTrue(to_check)
for version_id in to_check:
self._assert_version_is_negotiated(version_id)
def test_supported_is_negotiated(self):
to_check = self._get_list_of_version_ids('SUPPORTED')
for version_id in to_check:
self._assert_version_is_negotiated(version_id)
def test_deprecated_is_negotiated(self):
to_check = self._get_list_of_version_ids('DEPRECATED')
for version_id in to_check:
self._assert_version_is_negotiated(version_id)
def test_experimental_is_negotiated(self):
to_check = self._get_list_of_version_ids('EXPERIMENTAL')
for version_id in to_check:
self._assert_version_is_negotiated(version_id)
| 39.337165 | 78 | 0.615662 |
4a217a2821f0feb41f37c2c7394c91ae3496d8b6 | 8,583 | py | Python | cogs/server-management2/role.py | yaakiyu/rt-bot | f68bca95c516e08c31ecc846524dcea4c8ba1503 | [
"BSD-4-Clause"
] | null | null | null | cogs/server-management2/role.py | yaakiyu/rt-bot | f68bca95c516e08c31ecc846524dcea4c8ba1503 | [
"BSD-4-Clause"
] | null | null | null | cogs/server-management2/role.py | yaakiyu/rt-bot | f68bca95c516e08c31ecc846524dcea4c8ba1503 | [
"BSD-4-Clause"
] | null | null | null | # RT - Role Panel
from __future__ import annotations
from discord.ext import commands
import discord
from core import Cog, RT, t
from rtlib.common.json import loads
from rtutil.utils import (
artificially_send, adjust_min_max, replace_nl, is_json, fetch_webhook,
edit_reference
)
from rtutil.content_data import ContentData
from rtutil.panel import extract_emojis
from data import FORBIDDEN, NO_MORE_SETTING
from .__init__ import FSPARENT
class RolePanelEventContext(Cog.EventContext):
"役職パネルのイベントコンテキストです。"
add: set[discord.Role]
remove: set[discord.Role]
class RolePanelView(discord.ui.View):
"役職パネルのViewです。"
def __init__(self, cog: RolePanel, *args, **kwargs):
self.cog = cog
super().__init__(*args, **kwargs)
def extract_description(self, interaction: discord.Interaction) -> str:
# 説明を取り出します。
assert interaction.message is not None \
and interaction.message.embeds[0].description is not None
return interaction.message.embeds[0].description
@discord.ui.select(custom_id="role_panel.add_roles")
async def add_roles(self, interaction: discord.Interaction, select: discord.ui.Select):
# 役職を付与する。
assert interaction.guild is not None and isinstance(interaction.user, discord.Member)
description = self.extract_description(interaction)
# 付与するロールのリストを作る。
roles, remove_roles, error = set(), set(), None
for id_ in (selected := set(map(int, select.values))):
role = interaction.guild.get_role(id_)
if role is None:
await interaction.response.send_message(t(
error := self.cog.text_format({
"ja": "ロールが見つかりませんでした:{id_}",
"en": "Role not found: {id_}"
}, id_=id_), interaction
), ephemeral=True)
break
if interaction.user.get_role(id_) is None:
roles.add(role)
# ロールの処理を行う。
try:
if not error:
# 削除するロールのリストを作り削除を行う。
if remove_roles := set(role for role in filter(
lambda role: role.id not in selected and str(role.id) in description,
interaction.user.roles
)):
await interaction.user.remove_roles(*remove_roles)
# ロールの付与を行う。
if roles:
await interaction.user.add_roles(*roles)
except discord.Forbidden:
await interaction.response.send_message(t(dict(
ja="権限がないためロールの処理に失敗しました。",
en="Role processing failed due to lack of permissions."
), interaction), ephemeral=True)
error = FORBIDDEN
else:
await interaction.response.send_message("Ok", ephemeral=True)
self.cog.bot.rtevent.dispatch("on_role_panel", RolePanelEventContext(
self.cog.bot, interaction.guild, self.cog.detail_or(error), {
"ja": "役職パネル", "en": "Role Panel"
}, self.cog.text_format({
"ja": "対象:{name}\nロール:{roles}", "en": "Target: {name}\nRoles: {roles}"
}, name=self.cog.name_and_id(interaction.user), roles=", ".join(
self.cog.name_and_id(role) for role in roles
)), self.cog.role, error, add=roles, remove=remove_roles
))
@discord.ui.button(
custom_id="role_panel.remove_roles",
style=discord.ButtonStyle.danger,
emoji="🗑"
)
async def remove_roles(self, interaction: discord.Interaction, _):
# 役職を削除する。
description = self.extract_description(interaction)
assert isinstance(interaction.user, discord.Member)
if roles := set(role for role in interaction.user.roles if str(role.id) in description):
await interaction.user.remove_roles(*roles)
await interaction.response.send_message("Ok", ephemeral=True)
class RolePanel(Cog):
"役職パネルのコグです。"
def __init__(self, bot: RT):
self.bot = bot
@commands.Cog.listener()
async def on_setup(self):
self.bot.add_view(RolePanelView(self, timeout=None))
@commands.command(
aliases=("rp", "役職パネル", "ロールパネル", "やぱ", "ろぱ"), fsparent=FSPARENT,
description="Create a role panel."
)
@discord.app_commands.rename(min_="min", max_="max")
@discord.app_commands.describe(
min_=(_d_mi := "The minimum number of roles that can be added."),
max_=(_d_ma := "The maximum number of roles that can be added."),
title=(_d_t := "Title of role panel."),
content="Enter the name or ID of the role to be included in the role panel, separated by `<nl>`.",
)
@commands.has_guild_permissions(manage_roles=True)
@commands.cooldown(1, 10, commands.BucketType.channel)
async def role(
self, ctx: commands.Context, min_: int = -1, max_: int = -1,
title: str = "Role Panel", *, content: str
):
# テキストチャンネル以外は除外する。
if not isinstance(ctx.channel, discord.TextChannel):
raise Cog.BadRequest({
"ja": "テキストチャンネルである必要があります。",
"en": "Must be a text channel."
})
# `Get content`の場合は中身を取り出す。
if is_json(content):
data: ContentData = loads(content)
content = data["content"]["embeds"][0]["description"]
content = replace_nl(content)
if (length := len(roles := extract_emojis(content))) > 25:
return await ctx.reply(t(NO_MORE_SETTING, ctx))
# Viewの設定を行う。
view = RolePanelView(self, timeout=0)
view.add_roles.min_values, view.add_roles.max_values = adjust_min_max(
length, min_, max_
)
# ロールをオプションとして全て追加する。
for emoji, role in (roles := [
(emoji, await commands.RoleConverter().convert(ctx, target.strip()))
for emoji, target in roles.items()
]):
view.add_roles.add_option(label=role.name, value=str(role.id), emoji=emoji)
view.add_roles.placeholder = t(dict(ja="ロールを設定する", en="Set roles"), ctx)
view.remove_roles.label = t(dict(ja="ロールをリセットする", en="Reset roles"), ctx)
# 埋め込みを作る。
await self.reply(ctx, embed=discord.Embed(
title=title, description="\n".join(
f"{emoji} {role.mention}" for emoji, role in roles
), color=ctx.author.color
).set_footer(text=t(dict(
ja="RTの役職パネル", en="RT's Role Panel"
), ctx)), view=view)
async def reply(self, ctx: commands.Context, **kwargs):
"色々な処理をして返信をします。"
if ctx.message.reference is None:
# 役職パネルを送信する。
assert isinstance(ctx.author, discord.Member) \
and isinstance(ctx.channel, discord.TextChannel | discord.Thread)
await artificially_send(ctx.channel, ctx.author, **kwargs)
else:
# 返信された際は返信先の役職パネルを更新する。
reply = await edit_reference(self.bot, ctx.message, **kwargs)
if isinstance(reply, str):
return await ctx.reply(reply)
if ctx.interaction is not None:
await ctx.interaction.response.send_message("Ok", ephemeral=True)
(Cog.HelpCommand(role)
.merge_description("headline", ja="役職パネルを作ります。")
.add_arg("min", "int", ("default", "-1"),
ja="設定できるロールの最低個数です。", en=_d_mi)
.add_arg("max", "int", ("default", "-1"),
ja="設定できるロールの最大個数です。", en=_d_ma)
.add_arg("title", "str", ("default", "Role Panel"),
ja="役職パネルのタイトルです。", en=_d_t)
.add_arg("content", "str",
ja="""改行または`<nl>`か`<改行>`で分けた役職の名前かIDです。
`Get content`で取得したコードをこの引数に入れることも可能です。
その場合はコードに埋め込みの説明欄が含まれている必要があります。
これは、役職パネルの内容を簡単にコピーするために使用しましょう。""",
en="""The name or ID of the role, separated by a newline or `<nl>` or `<nl>`.
It is also possible to put code obtained with `Get content` into this argument.
In that case, the code must contain an embedded description field.
This should be used to easily copy the content of the position panel.""")
.set_extra("Notes",
ja="`rt!`形式のコマンドを役職パネルのメッセージに返信して実行すると、その役職パネルの内容を上書きすることができます。",
en="Executing a command of the form `rt!` in reply to a role panel message will overwrite the contents of that role panel."))
del _d_mi, _d_ma, _d_t
async def setup(bot: RT) -> None:
await bot.add_cog(RolePanel(bot)) | 39.552995 | 137 | 0.606664 |
4a217a315209ea278a1d246794384730f20f6bf1 | 16,778 | py | Python | nevow/test/livetest_athena.py | msdemlei/nevow | 1b0afa851d476b8d4beec5c3a3894b19fad49b17 | [
"MIT"
] | null | null | null | nevow/test/livetest_athena.py | msdemlei/nevow | 1b0afa851d476b8d4beec5c3a3894b19fad49b17 | [
"MIT"
] | null | null | null | nevow/test/livetest_athena.py | msdemlei/nevow | 1b0afa851d476b8d4beec5c3a3894b19fad49b17 | [
"MIT"
] | 1 | 2019-02-28T13:46:26.000Z | 2019-02-28T13:46:26.000Z | # Copyright (c) 2004-2007 Divmod.
# See LICENSE for details.
"""
Browser integration tests for Athena.
"""
from zope.interface import implements
from twisted.internet import defer
from nevow.inevow import IAthenaTransportable
from nevow import loaders, tags, athena
from nevow.page import Element, renderer
from nevow.athena import expose, LiveElement
from nevow.livetrial import testcase
from nevow.test import test_json
from nevow.testutil import CSSModuleTestMixin
class WidgetInitializerArguments(testcase.TestCase):
"""
Test that the arguments represented by the list returned by
getInitialArguments are properly passed to the widget class's __init__
method.
"""
jsClass = u'Nevow.Athena.Tests.WidgetInitializerArguments'
_args = [1, "two", [3.0 for four in range(5)]]
def getInitialArguments(self):
return self._args
def test(self, args):
self.assertEqual(self._args, args)
expose(test)
class CallRemoteTestCase(testcase.TestCase):
"""
Test the callRemote method of Widgets.
"""
jsClass = u'Nevow.Athena.Tests.CallRemoteTestCase'
class ClientToServerArgumentSerialization(testcase.TestCase):
"""
Tests that arguments passed to a method on the server are properly
received.
"""
jsClass = u'Nevow.Athena.Tests.ClientToServerArgumentSerialization'
def test(self, i, f, s, l, d):
self.assertEqual(i, 1)
self.assertEqual(f, 1.5)
self.assertTrue(isinstance(s, str))
self.assertEqual(s, 'Hello world')
self.assertTrue(isinstance(l[2], str))
self.assertEqual(l, [1, 1.5, 'Hello world'])
self.assertEqual(d, {'hello world': 'object value'})
self.assertTrue(isinstance(list(d.keys())[0], str))
self.assertTrue(isinstance(list(d.values())[0], str))
expose(test)
class ClientToServerResultSerialization(testcase.TestCase):
"""
Tests that the return value from a method on the server is properly
received by the client.
"""
jsClass = u'Nevow.Athena.Tests.ClientToServerResultSerialization'
def test(self, i, f, s, l, d):
return (i, f, s, l, d)
expose(test)
class JSONRoundtrip(testcase.TestCase):
"""
Test that all test cases from nevow.test.test_json roundtrip correctly
through the real client implementation, too.
"""
jsClass = u'Nevow.Athena.Tests.JSONRoundtrip'
def test(self):
cases = test_json.TEST_OBJECTS + test_json.TEST_STRINGLIKE_OBJECTS
def _verifyRoundtrip(_cases):
for v1, v2 in zip(cases, _cases):
self.assertEqual(v1, v2)
return self.callRemote('identity', cases).addCallback(_verifyRoundtrip)
expose(test)
class ExceptionFromServer(testcase.TestCase):
"""
Tests that when a method on the server raises an exception, the client
properly receives an error.
"""
jsClass = u'Nevow.Athena.Tests.ExceptionFromServer'
def testSync(self, s):
raise Exception(s)
expose(testSync)
class AsyncExceptionFromServer(testcase.TestCase):
"""
Tests that when a method on the server raises an exception asynchronously,
the client properly receives an error.
"""
jsClass = u'Nevow.Athena.Tests.AsyncExceptionFromServer'
def testAsync(self, s):
return defer.fail(Exception(s))
expose(testAsync)
class ExceptionFromClient(testcase.TestCase):
"""
Tests that when a method on the client raises an exception, the server
properly receives an error.
"""
jsClass = u'Nevow.Athena.Tests.ExceptionFromClient'
def loopbackError(self):
return self.callRemote('generateError').addErrback(self.checkError)
expose(loopbackError)
def checkError(self, f):
f.trap(athena.JSException)
if 'This is a test exception' in f.value.args[0]:
return True
else:
raise f
class AsyncExceptionFromClient(testcase.TestCase):
"""
Tests that when a method on the client raises an exception asynchronously,
the server properly receives an error.
"""
jsClass = u'Nevow.Athena.Tests.AsyncExceptionFromClient'
def loopbackError(self):
return self.callRemote('generateError').addErrback(self.checkError)
expose(loopbackError)
def checkError(self, f):
f.trap(athena.JSException)
if 'This is a deferred test exception' in f.value.args[0]:
return True
else:
raise f
class CustomTransportable(object):
"""
A simple transportable object used to verify customization is possible.
"""
implements(IAthenaTransportable)
jsClass = u'Nevow.Athena.Tests.CustomTransportable'
def getInitialArguments(self):
return ("Hello", 5, "world")
class ServerToClientArgumentSerialization(testcase.TestCase):
"""
Tests that a method invoked on the client by the server is passed the
correct arguments.
"""
jsClass = u'Nevow.Athena.Tests.ServerToClientArgumentSerialization'
def test(self):
return self.callRemote(
'reverse', 1, 1.5, 'hello', {'world': 'value'},
CustomTransportable())
expose(test)
class ServerToClientResultSerialization(testcase.TestCase):
"""
Tests that the result returned by a method invoked on the client by the
server is correct.
"""
jsClass = u'Nevow.Athena.Tests.ServerToClientResultSerialization'
def test(self):
def cbResults(result):
self.assertEqual(result[0], 1)
self.assertEqual(result[1], 1.5)
self.assertEqual(result[2], 'hello')
self.assertEqual(result[3], {'world': 'value'})
d = self.callRemote('reverse')
d.addCallback(cbResults)
return d
expose(test)
class WidgetInATable(testcase.TestCase):
jsClass = u"Nevow.Athena.Tests.WidgetInATable"
def getTestContainer(self):
return tags.table[tags.tbody[tags.tr[tags.td[tags.slot('widget')]]]]
class WidgetIsATable(testcase.TestCase):
jsClass = u"Nevow.Athena.Tests.WidgetIsATable"
def getWidgetTag(self):
"""
Make this widget's top-level node a table node.
"""
return tags.table
def getWidgetDocument(self):
"""
Create a body for the table node at the top of this widget. Put a row
and a column in it.
"""
return tags.tbody[tags.tr[tags.td]]
class ParentChildRelationshipTest(testcase.TestCase):
jsClass = u"Nevow.Athena.Tests.ChildParentRelationshipTest"
def getWidgetDocument(self):
"""
Return a tag which will have numerous children rendered beneath it.
"""
return tags.div(render=tags.directive('childrenWidgets'))
def render_childrenWidgets(self, ctx, data):
"""
Put some children into this widget. The client portion of this test
will assert things about their presence in C{Widget.childWidgets}.
"""
for i in range(3):
yield ChildFragment(self.page, i)
def getChildCount(self):
return 3
expose(getChildCount)
class ChildFragment(athena.LiveFragment):
jsClass = u'Nevow.Athena.Tests.ChildParentRelationshipTest'
docFactory = loaders.stan(tags.div(render=tags.directive('liveFragment'))[
tags.div(render=tags.directive('childrenWidgets')),
'child'])
def __init__(self, page, childCount):
super(ChildFragment, self).__init__()
self.page = page
self.childCount = childCount
def render_childrenWidgets(self, ctx, data):
# yield tags.div['There are ', self.childCount, 'children']
for i in range(self.childCount):
yield ChildFragment(self.page, self.childCount - 1)
def getChildCount(self):
return self.childCount
expose(getChildCount)
class AutomaticClass(testcase.TestCase):
jsClass = u'Nevow.Athena.Tests.AutomaticClass'
docFactory = loaders.stan(tags.div(render=tags.directive('liveTest')))
class ButtonElement(Element):
"""
A button with an automatic Athena event handler.
"""
preprocessors = LiveElement.preprocessors
docFactory = loaders.stan(
tags.button[
athena.handler(event='onclick', handler='handler')])
class AthenaHandler(testcase.TestCase):
jsClass = u'Nevow.Athena.Tests.AthenaHandler'
def getWidgetDocument(self):
"""
Return a button with an automatic athena handler attached to its
onclick event.
"""
return ButtonElement()
class NodeLocationSubElement1(LiveElement):
docFactory = loaders.stan(
tags.div(render=tags.directive('liveElement'))[
tags.invisible(render=tags.directive('bar')),
tags.label(_class='foo', _for="username"),
tags.input(_class='foo', id='username')])
def bar(self, req, tag):
e = NodeLocationSubElement2()
e.setFragmentParent(self)
return e
renderer(bar)
class NodeLocationSubElement2(LiveElement):
docFactory = loaders.stan(
tags.div(render=tags.directive('liveElement'))[
tags.label(_class='bar', _for="username"),
tags.input(_class='bar', id='username')])
def getDynamicWidget(self):
"""
Return a widget dynamically for us to have more fun with.
"""
e = NodeLocationSubElement1()
e.setFragmentParent(self)
return e
expose(getDynamicWidget)
def getNodeInsertedHelper(self):
"""
Return a dynamically instantiated NodeInsertedHelper to play with.
"""
e = NodeInsertedHelper()
e.setFragmentParent(self)
return e
expose(getNodeInsertedHelper)
class NodeInsertedHelper(LiveElement):
"""
Simple widget to be dynamically instatiated for testing nodeInserted
behaviour on client side.
"""
jsClass = u'Nevow.Athena.Tests.NodeInsertedHelper'
docFactory = loaders.stan(
tags.div(render=tags.directive('liveElement')))
class NodeLocation(testcase.TestCase):
jsClass = u'Nevow.Athena.Tests.NodeLocation'
def getWidgetDocument(self):
"""
Return some child elements for us to search in.
"""
e = NodeLocationSubElement1()
e.setFragmentParent(self)
e2 = NodeLocationSubElement2()
e2.setFragmentParent(self)
return [e, e2]
class WidgetRequiresImport(LiveElement):
"""
Widget which has no behavior, but which has a JavaScript class which will
require a dynamic import.
"""
jsClass = u'Nevow.Athena.Tests.Resources.ImportWidget'
docFactory = loaders.stan(tags.div(render=tags.directive('liveElement')))
class DynamicWidgetInstantiation(testcase.TestCase):
jsClass = u'Nevow.Athena.Tests.DynamicWidgetInstantiation'
def makeDynamicWidget(self):
"""
Return a newly created LiveFragment with no parent.
"""
class DynamicFragment(athena.LiveFragment):
docFactory = loaders.stan(tags.div(render=tags.directive('liveFragment')))
jsClass = u'Nevow.Athena.Tests.DynamicWidgetClass'
def someMethod(self):
return 'foo'
expose(someMethod)
return DynamicFragment()
def getDynamicWidget(self):
"""
Return a newly created LiveFragment with this LiveFragment as its
parent.
"""
f = self.makeDynamicWidget()
f.setFragmentParent(self)
return f
expose(getDynamicWidget)
def getDynamicWidgetLater(self):
"""
Make a s->c call with a LiveFragment as an argument. This tests
that widgets are reliably serialized when they appear as function
arguments.
"""
class DynamicFragment(athena.LiveFragment):
docFactory = loaders.stan(tags.div(render=tags.directive('liveFragment')))
jsClass = u'Nevow.Athena.Tests.DynamicWidgetClass'
def someMethod(self):
return 'foo'
expose(someMethod)
f = DynamicFragment()
f.setFragmentParent(self)
return self.callRemote("sendWidgetAsArgument", f)
expose(getDynamicWidgetLater)
def getDynamicWidgetInfo(self):
"""
Return a dictionary containing structured information about a newly
created Fragment which is a child of this test case.
"""
f = self.getDynamicWidget()
# Force it to have an ID and to become part of the page and other
# grotty filthy things.
#
# XXX Make an actual API, maybe.
widgetInfo = f._structured()
return {
'id': widgetInfo['id'],
'klass': widgetInfo['class']}
expose(getDynamicWidgetInfo)
def getWidgetWithImports(self):
"""
Return a Widget which requires a module import.
"""
f = WidgetRequiresImport()
f.setFragmentParent(self)
return f
expose(getWidgetWithImports)
def getNonXHTMLWidget(self):
"""
@return: a widget with a namespace that is not XHTML so a test can
verify that the namespace is preserved.
"""
class NonXHTMLFragment(athena.LiveFragment):
circle = tags.Proto("circle")
docFactory = loaders.stan(
circle(xmlns="http://www.w3.org/2000/svg",
render=tags.directive("liveFragment")))
f = NonXHTMLFragment()
f.setFragmentParent(self)
return f
expose(getNonXHTMLWidget)
def getAndRememberDynamicWidget(self):
"""
Call and return the result of L{getDynamicWidget}, but also save the
result as an attribute on self for later inspection.
"""
self.savedWidget = self.getDynamicWidget()
return self.savedWidget
expose(getAndRememberDynamicWidget)
def getAndSaveDynamicWidgetWithChild(self):
"""
Return a LiveFragment which is a child of this widget and which has a
child.
"""
childFragment = self.makeDynamicWidget()
class DynamicFragment(athena.LiveFragment):
docFactory = loaders.stan(
tags.div(render=tags.directive('liveFragment'))[
tags.div(render=tags.directive('child'))])
jsClass = u'Nevow.Athena.Tests.DynamicWidgetClass'
def render_child(self, ctx):
childFragment.setFragmentParent(self)
return childFragment
f = DynamicFragment()
f.setFragmentParent(self)
return f
expose(getAndSaveDynamicWidgetWithChild)
def assertSavedWidgetRemoved(self):
"""
Verify that the saved widget is no longer a child of this fragment.
"""
self.assertNotIn(self.savedWidget, self.liveFragmentChildren)
expose(assertSavedWidgetRemoved)
def detachSavedDynamicWidget(self):
"""
Initiate a server-side detach on the saved widget.
"""
return self.savedWidget.detach()
expose(detachSavedDynamicWidget)
class GettingWidgetlessNodeRaisesException(testcase.TestCase):
jsClass = u'Nevow.Athena.Tests.GettingWidgetlessNodeRaisesException'
class RemoteMethodErrorShowsDialog(testcase.TestCase):
jsClass = u'Nevow.Athena.Tests.RemoteMethodErrorShowsDialog'
def raiseValueError(self):
raise ValueError('hi')
athena.expose(raiseValueError)
class DelayedCallTests(testcase.TestCase):
"""
Tests for the behavior of scheduling timed calls in the client.
"""
jsClass = u'Nevow.Athena.Tests.DelayedCallTests'
class DynamicStylesheetFetching(testcase.TestCase, CSSModuleTestMixin):
"""
Tests for stylesheet fetching when dynamic widget instantiation is
involved.
"""
jsClass = u'Nevow.Athena.Tests.DynamicStylesheetFetching'
# lala we want to use TestCase.mktemp
_testMethodName = 'DynamicStylesheetFetching'
def getWidgetWithCSSDependencies(self):
"""
Return a widget which depends on some CSS.
"""
self.page.cssModules = self._makeCSSRegistry()
element = athena.LiveElement()
element.cssModule = 'TestCSSModuleDependencies.Dependor'
element.setFragmentParent(self)
element.docFactory = loaders.stan(
tags.div(render=tags.directive('liveElement')))
return (
element,
[str(self.page.getCSSModuleURL(n))
for n in ('TestCSSModuleDependencies',
'TestCSSModuleDependencies.Dependee',
'TestCSSModuleDependencies.Dependor')])
expose(getWidgetWithCSSDependencies)
| 28.010017 | 86 | 0.655799 |
4a217aa75a5a04b1124f375f0f112f8152b38d1e | 1,883 | py | Python | saqaparser.py | johnddias/saqaparser | 94de26f4f5db724c87e8aca7a523cfdba2766dc0 | [
"Apache-2.0"
] | 6 | 2017-03-27T00:59:08.000Z | 2021-03-08T08:51:36.000Z | saqaparser.py | johnddias/saqaparser | 94de26f4f5db724c87e8aca7a523cfdba2766dc0 | [
"Apache-2.0"
] | null | null | null | saqaparser.py | johnddias/saqaparser | 94de26f4f5db724c87e8aca7a523cfdba2766dc0 | [
"Apache-2.0"
] | 2 | 2017-07-17T21:51:22.000Z | 2021-02-19T16:58:27.000Z | #!/usr/bin/env python
import logging
import sys
from HTMLParser import HTMLParser
from bs4 import BeautifulSoup, SoupStrainer
import requests
import inquirer
only_transcripts = SoupStrainer(sasource="qp_transcripts")
def gettranscripts ( ticker ):
"Gets a list of available transcripts for a valid stock ticker"
turl = "http://seekingalpha.com/symbol/%s/transcripts" % (ticker)
tres = requests.get(turl, headers=headers)
tsoup = BeautifulSoup(tres.text, 'html.parser', parse_only=only_transcripts)
picklist = []
for t in tsoup:
picklist.append(t.string)
return picklist;
def gethref ( ticker,article ):
"Finds the href for the selected article title string"
turl = "http://seekingalpha.com/symbol/%s/transcripts" % (ticker)
tres = requests.get(turl, headers=headers)
tsoup = BeautifulSoup(tres.text, 'html.parser', parse_only=only_transcripts)
href = ""
for t in tsoup:
if t.string == article:
href=t['href']
return href;
headers = {
'User-Agent': 'Mozzila/5.0'
}
tick = raw_input('Ticker Symbol: ')
picklist = gettranscripts(tick)
questions = [
inquirer.List('article',
message = 'Select an article to process...',
choices = picklist,
),
]
article = inquirer.prompt(questions)
targeturl = gethref(tick,article['article'])
url = "http://seekingalpha.com" + targeturl + "?part=single"
response = requests.get(url, headers=headers)
#print(response.status_code)
#print(response.headers)
soup = BeautifulSoup(response.text, 'html.parser')
transcript = soup.find(id="a-body")
fo = open("qanda.txt", "w")
qastart = False
for i in transcript.children:
if i.string == "Question-and-Answer Session":
qastart = True
if qastart:
t = i.string
if t:
fo.write(t.encode('utf-8'))
fo.write("\n")
fo.close()
| 28.530303 | 80 | 0.668614 |
4a217b180eba282269ec9ae9acf30217453e3404 | 535 | py | Python | Heraut_python/Monstre_class.py | yppdr/SpaceKiwi | e82c9c9704d33a3e928ba613c58750b285ec54e8 | [
"MIT"
] | null | null | null | Heraut_python/Monstre_class.py | yppdr/SpaceKiwi | e82c9c9704d33a3e928ba613c58750b285ec54e8 | [
"MIT"
] | 8 | 2018-10-20T09:59:59.000Z | 2018-10-21T13:12:28.000Z | Heraut_python/Monstre_class.py | yppdr/SpaceKiwi | e82c9c9704d33a3e928ba613c58750b285ec54e8 | [
"MIT"
] | 1 | 2018-10-22T08:13:26.000Z | 2018-10-22T08:13:26.000Z | class Monstre:
def __init__(self,leNom,lesPV,nbAtk,nbDeff,NbPoLoot):
self.nom = leNom
self.pvMax = lesPV
self.pvActu = lesPV
self.atk = nbAtk
self.deff = nbDeff
self.gold = NbPoLoot
def _get_nom(self):
return self.nom
def _get_pvActu(self):
return self.pvActu
def _get_pvMax(self):
return self.pvMax
def _get_atk(self):
return self.atk
def _get_deff(self):
return self.deff
def _get_gold(self):
return self.po
| 24.318182 | 57 | 0.596262 |
4a217bc6cd6205aecda209fe3dbf6bfc8930b314 | 851 | py | Python | tests/bots/stocks/options/test_vsurf.py | 23errg/GamestonkTerminal | 826cd8a723d8e2b810c51bf8266c09e8e55059c4 | [
"MIT"
] | null | null | null | tests/bots/stocks/options/test_vsurf.py | 23errg/GamestonkTerminal | 826cd8a723d8e2b810c51bf8266c09e8e55059c4 | [
"MIT"
] | null | null | null | tests/bots/stocks/options/test_vsurf.py | 23errg/GamestonkTerminal | 826cd8a723d8e2b810c51bf8266c09e8e55059c4 | [
"MIT"
] | null | null | null | import pytest
try:
from bots.stocks.options.vsurf import vsurf_command
except ImportError:
pytest.skip(allow_module_level=True)
@pytest.fixture(scope="module")
def vcr_config():
return {
"filter_headers": [("User-Agent", None)],
"filter_query_parameters": [
("period1", "MOCK_PERIOD_1"),
("period2", "MOCK_PERIOD_2"),
("date", "MOCK_DATE"),
],
}
@pytest.mark.vcr
@pytest.mark.bots
@pytest.mark.parametrize("z", ["IV", "OI", "LP"])
def test_vsurf(recorder, z):
value = vsurf_command("TSLA", z)
value["imagefile"] = str(type(value["imagefile"]))
recorder.capture(value)
@pytest.mark.vcr
@pytest.mark.bots
@pytest.mark.parametrize("ticker", [None, "", "ZZZZ"])
def test_vsurf_invalid(ticker):
with pytest.raises(Exception):
vsurf_command(ticker)
| 23 | 55 | 0.638073 |
4a217c825bedf9714ee3c9b43640364784bdf81e | 880 | py | Python | databases/migrations/2018_02_21_023908_create_schedule_table.py | josephmancuso/gbaleague-masonite2 | b3dd5ec3f20c07eaabcc3129b0c50379a946a82b | [
"MIT"
] | null | null | null | databases/migrations/2018_02_21_023908_create_schedule_table.py | josephmancuso/gbaleague-masonite2 | b3dd5ec3f20c07eaabcc3129b0c50379a946a82b | [
"MIT"
] | 3 | 2018-07-25T17:36:43.000Z | 2020-01-06T18:52:51.000Z | databases/migrations/2018_02_21_023908_create_schedule_table.py | josephmancuso/gbaleague-masonite2 | b3dd5ec3f20c07eaabcc3129b0c50379a946a82b | [
"MIT"
] | null | null | null | from orator.migrations import Migration
class CreateScheduleTable(Migration):
def up(self):
"""
Run the migrations.
"""
with self.schema.create('schedules') as table:
table.increments('id')
table.integer('league_id').unsigned()
table.integer('team1_id').unsigned()
table.integer('team2_id').unsigned()
table.integer('winner_id').unsigned().nullable()
table.foreign('league_id').references('id').on('leagues')
table.foreign('team1_id').references('id').on('teams')
table.foreign('team2_id').references('id').on('teams')
table.foreign('winner_id').references('id').on('teams')
table.timestamps()
def down(self):
"""
Revert the migrations.
"""
self.schema.drop('schedules')
| 31.428571 | 69 | 0.563636 |
4a217ccf8581a3206c6660408afe80dcb1ecad70 | 819 | py | Python | importer/management/commands/fix-sort-date.py | cyroxx/meine-stadt-transparent | d5a3f03a29a1bb97ce50ac5257d8bbd5208d9218 | [
"MIT"
] | 34 | 2017-10-04T14:20:41.000Z | 2022-03-11T18:06:48.000Z | importer/management/commands/fix-sort-date.py | cyroxx/meine-stadt-transparent | d5a3f03a29a1bb97ce50ac5257d8bbd5208d9218 | [
"MIT"
] | 588 | 2017-10-14T18:31:17.000Z | 2022-03-16T13:00:30.000Z | importer/management/commands/fix-sort-date.py | codeformuenster/meine-stadt-transparent | 1458bc6acad40183908e2b7cc98ef92165d1123a | [
"MIT"
] | 11 | 2017-11-27T10:12:59.000Z | 2022-02-09T10:27:11.000Z | import datetime
from dateutil import tz
from django.core.management.base import BaseCommand
from importer.functions import fix_sort_date
class Command(BaseCommand):
help = "After the initial import, this command guesses the sort_date-Attribute of papers and files"
def add_arguments(self, parser):
help_str = (
"The date of the first import in the format YYYY-MM-DD. "
+ "All documents/files created up to this day will have the sort_date-Attribute modified."
)
parser.add_argument("import_date", type=str, help=help_str)
def handle(self, *args, **options):
import_date = datetime.datetime.strptime(
options["import_date"] + " 23:59:59", "%Y-%m-%d %H:%M:%S"
).replace(tzinfo=tz.tzlocal())
fix_sort_date(import_date)
| 32.76 | 103 | 0.675214 |
4a217d14ce99ad294469a5316272219a13cadf80 | 3,597 | py | Python | aidistillery/stopwords.py | TheMTank/arxiv-summariser | db4f1e42bcc9185e197a00a18e280a4a3011453c | [
"MIT"
] | 17 | 2018-11-26T23:06:20.000Z | 2022-01-18T21:43:17.000Z | aidistillery/stopwords.py | TheMTank/arxiv-summariser | db4f1e42bcc9185e197a00a18e280a4a3011453c | [
"MIT"
] | 3 | 2018-11-27T12:17:20.000Z | 2019-02-05T11:40:44.000Z | aidistillery/stopwords.py | TheMTank/arxiv-summariser | db4f1e42bcc9185e197a00a18e280a4a3011453c | [
"MIT"
] | 3 | 2019-03-06T10:14:08.000Z | 2020-01-21T17:26:20.000Z | stopwords = ['a', 'about', 'above', 'across', 'after', 'afterwards']
stopwords += ['again', 'against', 'all', 'almost', 'alone', 'along']
stopwords += ['already', 'also', 'although', 'always', 'am', 'among']
stopwords += ['amongst', 'amoungst', 'amount', 'an', 'and', 'another']
stopwords += ['any', 'anyhow', 'anyone', 'anything', 'anyway', 'anywhere']
stopwords += ['are', 'around', 'as', 'at', 'back', 'be', 'became']
stopwords += ['because', 'become', 'becomes', 'becoming', 'been']
stopwords += ['before', 'beforehand', 'behind', 'being', 'below']
stopwords += ['beside', 'besides', 'between', 'beyond', 'bill', 'both']
stopwords += ['bottom', 'but', 'by', 'call', 'can', 'cannot', 'cant']
stopwords += ['co', 'con', 'could', 'couldnt', 'cry', 'de']
stopwords += ['describe', 'detail', 'did', 'do', 'done', 'down', 'due']
stopwords += ['during', 'each', 'eg', 'eight', 'either', 'eleven', 'else']
stopwords += ['elsewhere', 'empty', 'enough', 'etc', 'even', 'ever']
stopwords += ['every', 'everyone', 'everything', 'everywhere', 'except']
stopwords += ['few', 'fifteen', 'fifty', 'fill', 'find', 'fire', 'first']
stopwords += ['five', 'for', 'former', 'formerly', 'forty', 'found']
stopwords += ['four', 'from', 'front', 'full', 'further', 'get', 'give']
stopwords += ['go', 'had', 'has', 'hasnt', 'have', 'he', 'hence', 'her']
stopwords += ['here', 'hereafter', 'hereby', 'herein', 'hereupon', 'hers']
stopwords += ['herself', 'him', 'himself', 'his', 'how', 'however']
stopwords += ['hundred', 'i', 'ie', 'if', 'in', 'inc', 'indeed']
stopwords += ['interest', 'into', 'is', 'it', 'its', 'itself', 'keep']
stopwords += ['last', 'latter', 'latterly', 'least', 'less', 'ltd', 'made']
stopwords += ['many', 'may', 'me', 'meanwhile', 'might', 'mill', 'mine']
stopwords += ['more', 'moreover', 'most', 'mostly', 'move', 'much']
stopwords += ['must', 'my', 'myself', 'name', 'namely', 'neither', 'never']
stopwords += ['nevertheless', 'next', 'nine', 'no', 'nobody', 'none']
stopwords += ['noone', 'nor', 'not', 'nothing', 'now', 'nowhere', 'of']
stopwords += ['off', 'often', 'on','once', 'one', 'only', 'onto', 'or']
stopwords += ['other', 'others', 'otherwise', 'our', 'ours', 'ourselves']
stopwords += ['out', 'over', 'own', 'part', 'per', 'perhaps', 'please']
stopwords += ['put', 'rather', 're', 's', 'same', 'see', 'seem', 'seemed']
stopwords += ['seeming', 'seems', 'serious', 'several', 'she', 'should']
stopwords += ['show', 'side', 'since', 'sincere', 'six', 'sixty', 'so']
stopwords += ['some', 'somehow', 'someone', 'something', 'sometime']
stopwords += ['sometimes', 'somewhere', 'still', 'such', 'system', 'take']
stopwords += ['ten', 'than', 'that', 'the', 'their', 'them', 'themselves']
stopwords += ['then', 'thence', 'there', 'thereafter', 'thereby']
stopwords += ['therefore', 'therein', 'thereupon', 'these', 'they']
stopwords += ['thick', 'thin', 'third', 'this', 'those', 'though', 'three']
stopwords += ['three', 'through', 'throughout', 'thru', 'thus', 'to']
stopwords += ['together', 'too', 'top', 'toward', 'towards', 'twelve']
stopwords += ['twenty', 'two', 'un', 'under', 'until', 'up', 'upon']
stopwords += ['us', 'very', 'via', 'was', 'we', 'well', 'were', 'what']
stopwords += ['whatever', 'when', 'whence', 'whenever', 'where']
stopwords += ['whereafter', 'whereas', 'whereby', 'wherein', 'whereupon']
stopwords += ['wherever', 'whether', 'which', 'while', 'whither', 'who']
stopwords += ['whoever', 'whole', 'whom', 'whose', 'why', 'will', 'with']
stopwords += ['within', 'without', 'would', 'yet', 'you', 'your']
stopwords += ['yours', 'yourself', 'yourselves']
| 69.173077 | 75 | 0.572699 |
4a217d5b683915126c0a7d9d8198809e0f230831 | 9,898 | py | Python | synonyms/synonyms.py | Walelile/Synonyms | 62c2aea9e048e05e4fbe73d8bdf2a8b5b550a187 | [
"MIT"
] | 1 | 2019-08-04T09:42:45.000Z | 2019-08-04T09:42:45.000Z | synonyms/synonyms.py | skywindy/Synonyms | b23e1c3ba7f5c972dc2b2f7ad8c0d499c19ab228 | [
"MIT"
] | null | null | null | synonyms/synonyms.py | skywindy/Synonyms | b23e1c3ba7f5c972dc2b2f7ad8c0d499c19ab228 | [
"MIT"
] | 1 | 2019-09-11T09:09:20.000Z | 2019-09-11T09:09:20.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#=========================================================================
#
# Copyright (c) 2017 <> All Rights Reserved
#
#
# File: /Users/hain/ai/Synonyms/synonyms/__init__.py
# Author: Hai Liang Wang
# Date: 2017-09-27
#
#=========================================================================
"""
Chinese Synonyms for Natural Language Processing and Understanding.
"""
from __future__ import print_function
from __future__ import division
__copyright__ = "Copyright (c) 2017 . All Rights Reserved"
__author__ = "Hu Ying Xi<>, Hai Liang Wang<[email protected]>"
__date__ = "2017-09-27"
__version__ = "3.3.10"
import os
import sys
import numpy as np
curdir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(curdir)
PLT = 2
if sys.version_info[0] < 3:
default_stdout = sys.stdout
default_stderr = sys.stderr
reload(sys)
sys.stdout = default_stdout
sys.stderr = default_stderr
sys.setdefaultencoding("utf-8")
# raise "Must be using Python 3"
else:
PLT = 3
# Get Environment variables
ENVIRON = os.environ.copy()
import json
import gzip
import shutil
from absl import logging
from .word2vec import KeyedVectors
from .utils import any2utf8
from .utils import any2unicode
from .utils import sigmoid
from .utils import cosine
from .utils import is_digit
import jieba
from .jieba import posseg as _tokenizer
'''
globals
'''
_vocab = dict()
_size = 0
_vectors = None
_stopwords = set()
_cache_nearby = dict()
'''
lambda fns
'''
# combine similarity scores
_similarity_smooth = lambda x, y, z, u: (x * y) + z - u
_flat_sum_array = lambda x: np.sum(x, axis=0) # 分子
'''
tokenizer settings
'''
tokenizer_dict = os.path.join(curdir, 'data', 'vocab.txt')
if "SYNONYMS_WORDSEG_DICT" in ENVIRON:
if os.path.exists(ENVIRON["SYNONYMS_WORDSEG_DICT"]):
print("info: set wordseg dict with %s" % tokenizer_dict)
tokenizer_dict = ENVIRON["SYNONYMS_WORDSEG_DICT"]
else: print("warning: can not find dict at [%s]" % tokenizer_dict)
print(">> Synonyms load wordseg dict [%s] ... " % tokenizer_dict)
_tokenizer.initialize(tokenizer_dict)
# stopwords
_fin_stopwords_path = os.path.join(curdir, 'data', 'stopwords.txt')
def _load_stopwords(file_path):
'''
load stop words
'''
global _stopwords
if sys.version_info[0] < 3:
words = open(file_path, 'r')
else:
words = open(file_path, 'r', encoding='utf-8')
stopwords = words.readlines()
for w in stopwords:
_stopwords.add(any2unicode(w).strip())
print(">> Synonyms on loading stopwords [%s] ..." % _fin_stopwords_path)
_load_stopwords(_fin_stopwords_path)
def _segment_words(sen):
'''
segment words with jieba
'''
words, tags = [], []
m = _tokenizer.cut(sen, HMM=True) # HMM更好的识别新词
for x in m:
words.append(x.word)
tags.append(x.flag)
return words, tags
'''
word embedding
'''
# vectors
_f_model = os.path.join(curdir, 'data', 'words.vector')
if "SYNONYMS_WORD2VEC_BIN_MODEL_ZH_CN" in ENVIRON:
_f_model = ENVIRON["SYNONYMS_WORD2VEC_BIN_MODEL_ZH_CN"]
def _load_w2v(model_file=_f_model, binary=True):
'''
load word2vec model
'''
if not os.path.exists(model_file):
print("os.path : ", os.path)
raise Exception("Model file [%s] does not exist." % model_file)
return KeyedVectors.load_word2vec_format(
model_file, binary=binary, unicode_errors='ignore')
print(">> Synonyms on loading vectors [%s] ..." % _f_model)
_vectors = _load_w2v(model_file=_f_model)
def _get_wv(sentence, ignore=False):
'''
get word2vec data by sentence
sentence is segmented string.
'''
global _vectors
vectors = []
for y in sentence:
y_ = any2unicode(y).strip()
if y_ not in _stopwords:
syns = nearby(y_)[0]
# print("sentence %s word: %s" %(sentence, y_))
# print("sentence %s word nearby: %s" %(sentence, " ".join(syns)))
c = []
try:
c.append(_vectors.word_vec(y_))
except KeyError as error:
if ignore:
continue
else:
logging.warning("not exist in w2v model: %s" % y_)
# c.append(np.zeros((100,), dtype=float))
random_state = np.random.RandomState(seed=(hash(y_) % (2**32 - 1)))
c.append(random_state.uniform(low=-10.0, high=10.0, size=(100,)))
for n in syns:
if n is None: continue
try:
v = _vectors.word_vec(any2unicode(n))
except KeyError as error:
# v = np.zeros((100,), dtype=float)
random_state = np.random.RandomState(seed=(hash(n) % (2 ** 32 - 1)))
v = random_state.uniform(low=10.0, high=10.0, size=(100,))
c.append(v)
r = np.average(c, axis=0)
vectors.append(r)
return vectors
'''
Distance
'''
# Levenshtein Distance
def _levenshtein_distance(sentence1, sentence2):
'''
Return the Levenshtein distance between two strings.
Based on:
http://rosettacode.org/wiki/Levenshtein_distance#Python
'''
first = any2utf8(sentence1).decode('utf-8', 'ignore')
second = any2utf8(sentence2).decode('utf-8', 'ignore')
sentence1_len, sentence2_len = len(first), len(second)
maxlen = max(sentence1_len, sentence2_len)
if sentence1_len > sentence2_len:
first, second = second, first
distances = range(len(first) + 1)
for index2, char2 in enumerate(second):
new_distances = [index2 + 1]
for index1, char1 in enumerate(first):
if char1 == char2:
new_distances.append(distances[index1])
else:
new_distances.append(1 + min((distances[index1],
distances[index1 + 1],
new_distances[-1])))
distances = new_distances
levenshtein = distances[-1]
d = float((maxlen - levenshtein)/maxlen)
# smoothing
s = (sigmoid(d * 6) - 0.5) * 2
# print("smoothing[%s| %s]: %s -> %s" % (sentence1, sentence2, d, s))
return s
def sv(sentence, ignore=False):
'''
获得一个分词后句子的向量,向量以BoW方式组成
sentence: 句子是分词后通过空格联合起来
ignore: 是否忽略OOV,False时,随机生成一个向量
'''
return _get_wv(sentence, ignore = ignore)
def v(word):
'''
获得一个词语的向量,OOV时抛出 KeyError 异常
'''
y_ = any2unicode(word).strip()
return _vectors.word_vec(y_)
def _nearby_levenshtein_distance(s1, s2):
'''
使用空间距离近的词汇优化编辑距离计算
'''
s1_len, s2_len = len(s1), len(s2)
maxlen = s1_len
if s1_len == s2_len:
first, second = sorted([s1, s2])
elif s1_len < s2_len:
first = s1
second = s2
maxlen = s2_len
else:
first = s2
second = s1
ft = set() # all related words with first sentence
for x in first:
ft.add(x)
n, _ = nearby(x)
for o in n[:10]:
ft.add(o)
scores = []
for x in second:
scores.append(max([_levenshtein_distance(x, y) for y in ft]))
s = np.sum(scores) / maxlen
return s
def _similarity_distance(s1, s2, ignore):
'''
compute similarity with distance measurement
'''
g = 0.0
try:
g_ = cosine(_flat_sum_array(_get_wv(s1, ignore)), _flat_sum_array(_get_wv(s2, ignore)))
if is_digit(g_): g = g_
except: pass
u = _nearby_levenshtein_distance(s1, s2)
logging.debug("g: %s, u: %s" % (g, u))
if u >= 0.99:
r = 1.0
elif u > 0.9:
r = _similarity_smooth(g, 0.05, u, 0.05)
elif u > 0.8:
r = _similarity_smooth(g, 0.1, u, 0.2)
elif u > 0.4:
r = _similarity_smooth(g, 0.2, u, 0.15)
elif u > 0.2:
r = _similarity_smooth(g, 0.3, u, 0.1)
else:
r = _similarity_smooth(g, 0.4, u, 0)
if r < 0: r = abs(r)
r = min(r, 1.0)
return float("%.3f" % r)
'''
Public Methods
'''
seg = _segment_words # word segmenter
def nearby(word):
'''
Nearby word
'''
w = any2unicode(word)
# read from cache
if w in _cache_nearby: return _cache_nearby[w]
words, scores = [], []
try:
for x in _vectors.neighbours(w):
words.append(x[0])
scores.append(x[1])
except: pass # ignore key error, OOV
# put into cache
_cache_nearby[w] = (words, scores)
return words, scores
def compare(s1, s2, seg=True, ignore=False, stopwords=False):
'''
compare similarity
s1 : sentence1
s2 : sentence2
seg : True : The original sentences need jieba.cut
Flase : The original sentences have been cut.
ignore: True: ignore OOV words
False: get vector randomly for OOV words
'''
if s1 == s2: return 1.0
s1_words = []
s2_words = []
if seg:
s1 = [x for x in jieba.cut(s1)]
s2 = [x for x in jieba.cut(s2)]
else:
s1 = s1.split()
s2 = s2.split()
# check stopwords
if not stopwords:
global _stopwords
for x in s1:
if not x in _stopwords:
s1_words.append(x)
for x in s2:
if not x in _stopwords:
s2_words.append(x)
else:
s1_words = s1
s2_words = s2
assert len(s1) > 0 and len(s2) > 0, "The length of s1 and s2 should > 0."
return _similarity_distance(s1_words, s2_words, ignore)
def display(word):
print("'%s'近义词:" % word)
o = nearby(word)
assert len(o) == 2, "should contain 2 list"
if len(o[0]) == 0:
print(" out of vocabulary")
for k, v in enumerate(o[0]):
print(" %d. %s:%s" % (k + 1, v, o[1][k]))
def main():
display("人脸")
display("NOT_EXIST")
if __name__ == '__main__':
main()
| 27.571031 | 95 | 0.585068 |
4a217d7911cebf10ce8aec63d0dd262e1bf6a2bb | 2,487 | py | Python | lightning_plus/shield/signals.py | twocucao/lightning-plus | e69c81da9c15fdfc37355e0362ff7ed804e94b2a | [
"MIT"
] | 1 | 2021-04-15T14:52:12.000Z | 2021-04-15T14:52:12.000Z | lightning_plus/shield/signals.py | twocucao/lightning | e69c81da9c15fdfc37355e0362ff7ed804e94b2a | [
"MIT"
] | null | null | null | lightning_plus/shield/signals.py | twocucao/lightning | e69c81da9c15fdfc37355e0362ff7ed804e94b2a | [
"MIT"
] | null | null | null | import logging
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db.models.signals import m2m_changed
from django.dispatch import receiver
from lightning_plus.shield.filter import (
SHIELD_RULES_DICT_CACHE_KEY,
USER_GROUP_MAP_CACHE_KEY,
)
from lightning_plus.shield.models import Rule
from django.core.cache import cache
from lightning_plus.api_basebone.signals import post_bsm_create, post_bsm_delete
log = logging.getLogger(__name__)
HAS_SHIELD_MODEL = hasattr(settings, "SHIELD_MODEL")
@receiver(post_bsm_create, sender=Rule, dispatch_uid="clean_rule_cache_by_save")
def clean_rule_cache_by_save(
sender, instance: Rule, create, request, old_instance, **kwargs
):
log.debug("clean shield rules by saving")
if (not create) and (
(old_instance.model.pk != instance.model.pk)
if HAS_SHIELD_MODEL
else (old_instance.model != instance.model)
):
if HAS_SHIELD_MODEL:
model_name = old_instance.model.name.lower()
app_name = old_instance.model.app.name
else:
app_name, model_name = old_instance.model.split("__", 1)
cache_key = SHIELD_RULES_DICT_CACHE_KEY.format(
app_label=app_name, model_slug=model_name
)
cache.delete(cache_key)
if HAS_SHIELD_MODEL:
model_name = instance.model.name.lower()
app_name = instance.model.app.name
else:
app_name, model_name = instance.model.split("__", 1)
cache_key = SHIELD_RULES_DICT_CACHE_KEY.format(
app_label=app_name, model_slug=model_name
)
cache.delete(cache_key)
@receiver(post_bsm_delete, sender=Rule, dispatch_uid="clean_rule_cache_by_delete")
def clean_rule_cache_by_delete(sender, instance: Rule, **kwargs):
log.debug("clean shield rules by deleting")
if HAS_SHIELD_MODEL:
model_name = instance.model.name.lower()
app_name = instance.model.app.name
else:
app_name, model_name = instance.model.split("__", 1)
cache_key = SHIELD_RULES_DICT_CACHE_KEY.format(
app_label=app_name, model_slug=model_name
)
cache.delete(cache_key)
@receiver(
m2m_changed,
sender=get_user_model().groups.through,
dispatch_uid="user_group_changed",
)
def user_group_changed(sender, instance, model, pk_set, action, **kwargs):
if action in ("post_add", "post_remove", "post_clear"):
cache_key = USER_GROUP_MAP_CACHE_KEY
cache.delete(cache_key)
| 32.723684 | 82 | 0.721753 |
4a217e11aaf4bc7ef59feccee5a39fde2ab72035 | 338 | py | Python | python/fact(7).py | adidom/hacktoberfest2020 | 63480c7295814f444a096fb25db1daf5c7724df5 | [
"MIT"
] | 11 | 2020-10-14T05:43:12.000Z | 2021-10-02T02:29:52.000Z | python/fact(7).py | adidom/hacktoberfest2020 | 63480c7295814f444a096fb25db1daf5c7724df5 | [
"MIT"
] | 10 | 2020-10-14T15:15:41.000Z | 2020-10-31T17:13:33.000Z | python/fact(7).py | adidom/hacktoberfest2020 | 63480c7295814f444a096fb25db1daf5c7724df5 | [
"MIT"
] | 91 | 2020-10-13T15:38:28.000Z | 2021-10-02T02:29:55.000Z | # Factorial of a number using recursion
def recur_factorial(n):
if n == 1 or n==0:
return 1
else:
return n*recur_factorial(n-1)
num = 7
# check if the number is negative
if num < 0:
print("Sorry, factorial does not exist for negative numbers")
else:
print("The factorial of", num, "is", recur_factorial(num))
| 21.125 | 64 | 0.66568 |
4a217e5dec26eee98d0524a9913ef651a68b8ada | 76,159 | py | Python | python/ccxt/async_support/bitpanda.py | xCuri0/ccxt | bf3d10e0d3c1defb9aba9e81caa57cad0187fa5c | [
"MIT"
] | 2 | 2019-05-30T21:49:40.000Z | 2019-05-31T13:12:23.000Z | python/ccxt/async_support/bitpanda.py | xCuri0/ccxt | bf3d10e0d3c1defb9aba9e81caa57cad0187fa5c | [
"MIT"
] | 1 | 2021-01-27T12:10:31.000Z | 2021-01-27T12:11:16.000Z | python/ccxt/async_support/bitpanda.py | xCuri0/ccxt | bf3d10e0d3c1defb9aba9e81caa57cad0187fa5c | [
"MIT"
] | 1 | 2020-09-11T18:44:37.000Z | 2020-09-11T18:44:37.000Z | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import ExchangeNotAvailable
class bitpanda(Exchange):
def describe(self):
return self.deep_extend(super(bitpanda, self).describe(), {
'id': 'bitpanda',
'name': 'Bitpanda Pro',
'countries': ['AT'], # Austria
'rateLimit': 300,
'version': 'v1',
# new metainfo interface
'has': {
'CORS': False,
'publicAPI': True,
'privateAPI': True,
'cancelAllOrders': True,
'cancelOrder': True,
'cancelOrders': True,
'createDepositAddress': True,
'createOrder': True,
'fetchBalance': True,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDeposits': True,
'fetchDepositAddress': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrderTrades': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFees': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchWithdrawals': True,
'withdraw': True,
},
'timeframes': {
'1m': '1/MINUTES',
'5m': '5/MINUTES',
'15m': '15/MINUTES',
'30m': '30/MINUTES',
'1h': '1/HOURS',
'4h': '4/HOURS',
'1d': '1/DAYS',
'1w': '1/WEEKS',
'1M': '1/MONTHS',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87591171-9a377d80-c6f0-11ea-94ac-97a126eac3bc.jpg',
'api': {
'public': 'https://api.exchange.bitpanda.com/public',
'private': 'https://api.exchange.bitpanda.com/public',
},
'www': 'https://www.bitpanda.com/en/pro',
'doc': [
'https://developers.bitpanda.com/exchange/',
],
'fees': 'https://www.bitpanda.com/en/pro/fees',
},
'api': {
'public': {
'get': [
'currencies',
'candlesticks/{instrument_code}',
'fees',
'instruments',
'order-book/{instrument_code}',
'market-ticker',
'market-ticker/{instrument_code}',
'price-ticks/{instrument_code}',
'time',
],
},
'private': {
'get': [
'account/balances',
'account/deposit/crypto/{currency_code}',
'account/deposit/fiat/EUR',
'account/deposits',
'account/deposits/bitpanda',
'account/withdrawals',
'account/withdrawals/bitpanda',
'account/fees',
'account/orders',
'account/orders/{order_id}',
'account/orders/{order_id}/trades',
'account/trades',
'account/trades/{trade_id}',
'account/trading-volume',
],
'post': [
'account/deposit/crypto',
'account/withdraw/crypto',
'account/withdraw/fiat',
'account/fees',
'account/orders',
],
'delete': [
'account/orders',
'account/orders/{order_id}',
'account/orders/client/{client_id}',
],
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'taker': 0.15 / 100,
'maker': 0.10 / 100,
'tiers': [
# volume in BTC
{
'taker': [
[0, 0.15 / 100],
[100, 0.13 / 100],
[250, 0.13 / 100],
[1000, 0.1 / 100],
[5000, 0.09 / 100],
[10000, 0.075 / 100],
[20000, 0.065 / 100],
],
'maker': [
[0, 0.1 / 100],
[100, 0.1 / 100],
[250, 0.09 / 100],
[1000, 0.075 / 100],
[5000, 0.06 / 100],
[10000, 0.05 / 100],
[20000, 0.05 / 100],
],
},
],
},
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
},
'exceptions': {
'exact': {
'INVALID_CLIENT_UUID': InvalidOrder,
'ORDER_NOT_FOUND': OrderNotFound,
'ONLY_ONE_ERC20_ADDRESS_ALLOWED': InvalidAddress,
'DEPOSIT_ADDRESS_NOT_USED': InvalidAddress,
'INVALID_CREDENTIALS': AuthenticationError,
'MISSING_CREDENTIALS': AuthenticationError,
'INVALID_APIKEY': AuthenticationError,
'INVALID_SCOPES': AuthenticationError,
'INVALID_SUBJECT': AuthenticationError,
'INVALID_ISSUER': AuthenticationError,
'INVALID_AUDIENCE': AuthenticationError,
'INVALID_DEVICE_ID': AuthenticationError,
'INVALID_IP_RESTRICTION': AuthenticationError,
'APIKEY_REVOKED': AuthenticationError,
'APIKEY_EXPIRED': AuthenticationError,
'SYNCHRONIZER_TOKEN_MISMATCH': AuthenticationError,
'SESSION_EXPIRED': AuthenticationError,
'INTERNAL_ERROR': AuthenticationError,
'CLIENT_IP_BLOCKED': PermissionDenied,
'MISSING_PERMISSION': PermissionDenied,
'ILLEGAL_CHARS': BadRequest,
'UNSUPPORTED_MEDIA_TYPE': BadRequest,
'ACCOUNT_HISTORY_TIME_RANGE_TOO_BIG': BadRequest,
'CANDLESTICKS_TIME_RANGE_TOO_BIG': BadRequest,
'INVALID_INSTRUMENT_CODE': BadRequest,
'INVALID_ORDER_TYPE': BadRequest,
'INVALID_UNIT': BadRequest,
'INVALID_PERIOD': BadRequest,
'INVALID_TIME': BadRequest,
'INVALID_DATE': BadRequest,
'INVALID_CURRENCY': BadRequest,
'INVALID_AMOUNT': BadRequest,
'INVALID_PRICE': BadRequest,
'INVALID_LIMIT': BadRequest,
'INVALID_QUERY': BadRequest,
'INVALID_CURSOR': BadRequest,
'INVALID_ACCOUNT_ID': BadRequest,
'INVALID_SIDE': InvalidOrder,
'INVALID_ACCOUNT_HISTORY_FROM_TIME': BadRequest,
'INVALID_ACCOUNT_HISTORY_MAX_PAGE_SIZE': BadRequest,
'INVALID_ACCOUNT_HISTORY_TIME_PERIOD': BadRequest,
'INVALID_ACCOUNT_HISTORY_TO_TIME': BadRequest,
'INVALID_CANDLESTICKS_GRANULARITY': BadRequest,
'INVALID_CANDLESTICKS_UNIT': BadRequest,
'INVALID_ORDER_BOOK_DEPTH': BadRequest,
'INVALID_ORDER_BOOK_LEVEL': BadRequest,
'INVALID_PAGE_CURSOR': BadRequest,
'INVALID_TIME_RANGE': BadRequest,
'INVALID_TRADE_ID': BadRequest,
'INVALID_UI_ACCOUNT_SETTINGS': BadRequest,
'NEGATIVE_AMOUNT': InvalidOrder,
'NEGATIVE_PRICE': InvalidOrder,
'MIN_SIZE_NOT_SATISFIED': InvalidOrder,
'BAD_AMOUNT_PRECISION': InvalidOrder,
'BAD_PRICE_PRECISION': InvalidOrder,
'BAD_TRIGGER_PRICE_PRECISION': InvalidOrder,
'MAX_OPEN_ORDERS_EXCEEDED': BadRequest,
'MISSING_PRICE': InvalidOrder,
'MISSING_ORDER_TYPE': InvalidOrder,
'MISSING_SIDE': InvalidOrder,
'MISSING_CANDLESTICKS_PERIOD_PARAM': ArgumentsRequired,
'MISSING_CANDLESTICKS_UNIT_PARAM': ArgumentsRequired,
'MISSING_FROM_PARAM': ArgumentsRequired,
'MISSING_INSTRUMENT_CODE': ArgumentsRequired,
'MISSING_ORDER_ID': InvalidOrder,
'MISSING_TO_PARAM': ArgumentsRequired,
'MISSING_TRADE_ID': ArgumentsRequired,
'INVALID_ORDER_ID': OrderNotFound,
'NOT_FOUND': OrderNotFound,
'INSUFFICIENT_LIQUIDITY': InsufficientFunds,
'INSUFFICIENT_FUNDS': InsufficientFunds,
'NO_TRADING': ExchangeNotAvailable,
'SERVICE_UNAVAILABLE': ExchangeNotAvailable,
'GATEWAY_TIMEOUT': ExchangeNotAvailable,
'RATELIMIT': DDoSProtection,
'CF_RATELIMIT': DDoSProtection,
'INTERNAL_SERVER_ERROR': ExchangeError,
},
'broad': {
},
},
'commonCurrencies': {
'MIOTA': 'IOTA', # https://github.com/ccxt/ccxt/issues/7487
},
# exchange-specific options
'options': {
'fetchTradingFees': {
'method': 'fetchPrivateTradingFees', # or 'fetchPublicTradingFees'
},
'fiat': ['EUR', 'CHF'],
},
})
async def fetch_time(self, params={}):
response = await self.publicGetTime(params)
#
# {
# iso: '2020-07-10T05:17:26.716Z',
# epoch_millis: 1594358246716,
# }
#
return self.safe_integer(response, 'epoch_millis')
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrencies(params)
#
# [
# {
# "code":"BEST",
# "precision":8
# }
# ]
#
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'code')
code = self.safe_currency_code(id)
result[code] = {
'id': id,
'code': code,
'name': None,
'info': currency, # the original payload
'active': None,
'fee': None,
'precision': self.safe_integer(currency, 'precision'),
'limits': {
'amount': {'min': None, 'max': None},
'price': {'min': None, 'max': None},
'cost': {'min': None, 'max': None},
'withdraw': {'min': None, 'max': None},
},
}
return result
async def fetch_markets(self, params={}):
response = await self.publicGetInstruments(params)
#
# [
# {
# state: 'ACTIVE',
# base: {code: 'ETH', precision: 8},
# quote: {code: 'CHF', precision: 2},
# amount_precision: 4,
# market_precision: 2,
# min_size: '10.0'
# }
# ]
#
result = []
for i in range(0, len(response)):
market = response[i]
baseAsset = self.safe_value(market, 'base', {})
quoteAsset = self.safe_value(market, 'quote', {})
baseId = self.safe_string(baseAsset, 'code')
quoteId = self.safe_string(quoteAsset, 'code')
id = baseId + '_' + quoteId
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'amount_precision'),
'price': self.safe_integer(market, 'market_precision'),
}
limits = {
'amount': {
'min': None,
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_float(market, 'min_size'),
'max': None,
},
}
state = self.safe_string(market, 'state')
active = (state == 'ACTIVE')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'precision': precision,
'limits': limits,
'info': market,
'active': active,
})
return result
async def fetch_trading_fees(self, params={}):
method = self.safe_string(params, 'method')
params = self.omit(params, 'method')
if method is None:
options = self.safe_value(self.options, 'fetchTradingFees', {})
method = self.safe_string(options, 'method', 'fetchPrivateTradingFees')
return await getattr(self, method)(params)
async def fetch_public_trading_fees(self, params={}):
await self.load_markets()
response = await self.publicGetFees(params)
#
# [
# {
# "fee_group_id":"default",
# "display_text":"The standard fee plan.",
# "fee_tiers":[
# {"volume":"0.0","fee_group_id":"default","maker_fee":"0.1","taker_fee":"0.15"},
# {"volume":"100.0","fee_group_id":"default","maker_fee":"0.1","taker_fee":"0.13"},
# {"volume":"250.0","fee_group_id":"default","maker_fee":"0.09","taker_fee":"0.13"},
# {"volume":"1000.0","fee_group_id":"default","maker_fee":"0.075","taker_fee":"0.1"},
# {"volume":"5000.0","fee_group_id":"default","maker_fee":"0.06","taker_fee":"0.09"},
# {"volume":"10000.0","fee_group_id":"default","maker_fee":"0.05","taker_fee":"0.075"},
# {"volume":"20000.0","fee_group_id":"default","maker_fee":"0.05","taker_fee":"0.065"}
# ],
# "fee_discount_rate":"25.0",
# "minimum_price_value":"0.12"
# }
# ]
#
feeGroupsById = self.index_by(response, 'fee_group_id')
feeGroupId = self.safe_value(self.options, 'fee_group_id', 'default')
feeGroup = self.safe_value(feeGroupsById, feeGroupId, {})
feeTiers = self.safe_value(feeGroup, 'fee_tiers')
result = {}
for i in range(0, len(self.symbols)):
symbol = self.symbols[i]
fee = {
'info': feeGroup,
'symbol': symbol,
'maker': None,
'taker': None,
'percentage': True,
'tierBased': True,
}
takerFees = []
makerFees = []
for i in range(0, len(feeTiers)):
tier = feeTiers[i]
volume = self.safe_float(tier, 'volume')
taker = self.safe_float(tier, 'taker_fee')
maker = self.safe_float(tier, 'maker_fee')
taker /= 100
maker /= 100
takerFees.append([volume, taker])
makerFees.append([volume, maker])
if i == 0:
fee['taker'] = taker
fee['maker'] = maker
tiers = {
'taker': takerFees,
'maker': makerFees,
}
fee['tiers'] = tiers
result[symbol] = fee
return result
async def fetch_private_trading_fees(self, params={}):
await self.load_markets()
response = await self.privateGetAccountFees(params)
#
# {
# "account_id": "ed524d00-820a-11e9-8f1e-69602df16d85",
# "running_trading_volume": "0.0",
# "fee_group_id": "default",
# "collect_fees_in_best": False,
# "fee_discount_rate": "25.0",
# "minimum_price_value": "0.12",
# "fee_tiers": [
# {"volume": "0.0", "fee_group_id": "default", "maker_fee": "0.1", "taker_fee": "0.1"},
# {"volume": "100.0", "fee_group_id": "default", "maker_fee": "0.09", "taker_fee": "0.1"},
# {"volume": "250.0", "fee_group_id": "default", "maker_fee": "0.08", "taker_fee": "0.1"},
# {"volume": "1000.0", "fee_group_id": "default", "maker_fee": "0.07", "taker_fee": "0.09"},
# {"volume": "5000.0", "fee_group_id": "default", "maker_fee": "0.06", "taker_fee": "0.08"},
# {"volume": "10000.0", "fee_group_id": "default", "maker_fee": "0.05", "taker_fee": "0.07"},
# {"volume": "20000.0", "fee_group_id": "default", "maker_fee": "0.05", "taker_fee": "0.06"},
# {"volume": "50000.0", "fee_group_id": "default", "maker_fee": "0.05", "taker_fee": "0.05"}
# ],
# "active_fee_tier": {"volume": "0.0", "fee_group_id": "default", "maker_fee": "0.1", "taker_fee": "0.1"}
# }
#
activeFeeTier = self.safe_value(response, 'active_fee_tier', {})
result = {
'info': response,
'maker': self.safe_float(activeFeeTier, 'maker_fee'),
'taker': self.safe_float(activeFeeTier, 'taker_fee'),
'percentage': True,
'tierBased': True,
}
feeTiers = self.safe_value(response, 'fee_tiers')
takerFees = []
makerFees = []
for i in range(0, len(feeTiers)):
tier = feeTiers[i]
volume = self.safe_float(tier, 'volume')
taker = self.safe_float(tier, 'taker_fee')
maker = self.safe_float(tier, 'maker_fee')
taker /= 100
maker /= 100
takerFees.append([volume, taker])
makerFees.append([volume, maker])
tiers = {
'taker': takerFees,
'maker': makerFees,
}
result['tiers'] = tiers
return result
def parse_ticker(self, ticker, market=None):
#
# fetchTicker, fetchTickers
#
# {
# "instrument_code":"BTC_EUR",
# "sequence":602562,
# "time":"2020-07-10T06:27:34.951Z",
# "state":"ACTIVE",
# "is_frozen":0,
# "quote_volume":"1695555.1783768",
# "base_volume":"205.67436",
# "last_price":"8143.91",
# "best_bid":"8143.71",
# "best_ask":"8156.9",
# "price_change":"-147.47",
# "price_change_percentage":"-1.78",
# "high":"8337.45",
# "low":"8110.0"
# }
#
timestamp = self.parse8601(self.safe_string(ticker, 'time'))
marketId = self.safe_string(ticker, 'instrument_code')
symbol = self.safe_symbol(marketId, market, '_')
last = self.safe_float(ticker, 'last_price')
percentage = self.safe_float(ticker, 'price_change_percentage')
change = self.safe_float(ticker, 'price_change')
open = None
average = None
if (last is not None) and (change is not None):
open = last - change
average = self.sum(last, open) / 2
baseVolume = self.safe_float(ticker, 'base_volume')
quoteVolume = self.safe_float(ticker, 'quote_volume')
vwap = self.vwap(baseVolume, quoteVolume)
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'best_bid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'best_ask'),
'askVolume': None,
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'instrument_code': market['id'],
}
response = await self.publicGetMarketTickerInstrumentCode(self.extend(request, params))
#
# {
# "instrument_code":"BTC_EUR",
# "sequence":602562,
# "time":"2020-07-10T06:27:34.951Z",
# "state":"ACTIVE",
# "is_frozen":0,
# "quote_volume":"1695555.1783768",
# "base_volume":"205.67436",
# "last_price":"8143.91",
# "best_bid":"8143.71",
# "best_ask":"8156.9",
# "price_change":"-147.47",
# "price_change_percentage":"-1.78",
# "high":"8337.45",
# "low":"8110.0"
# }
#
return self.parse_ticker(response, market)
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.publicGetMarketTicker(params)
#
# [
# {
# "instrument_code":"BTC_EUR",
# "sequence":602562,
# "time":"2020-07-10T06:27:34.951Z",
# "state":"ACTIVE",
# "is_frozen":0,
# "quote_volume":"1695555.1783768",
# "base_volume":"205.67436",
# "last_price":"8143.91",
# "best_bid":"8143.71",
# "best_ask":"8156.9",
# "price_change":"-147.47",
# "price_change_percentage":"-1.78",
# "high":"8337.45",
# "low":"8110.0"
# }
# ]
#
result = {}
for i in range(0, len(response)):
ticker = self.parse_ticker(response[i])
symbol = ticker['symbol']
result[symbol] = ticker
return self.filter_by_array(result, 'symbol', symbols)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'instrument_code': self.market_id(symbol),
# level 1 means only the best bid and ask
# level 2 is a compiled order book up to market precision
# level 3 is a full orderbook
# if you wish to get regular updates about orderbooks please use the Websocket channel
# heavy usage of self endpoint may result in limited access according to rate limits rules
# 'level': 3, # default
}
if limit is not None:
request['depth'] = limit
response = await self.publicGetOrderBookInstrumentCode(self.extend(request, params))
#
# level 1
#
# {
# "instrument_code":"BTC_EUR",
# "time":"2020-07-10T07:39:06.343Z",
# "asks":{
# "value":{
# "price":"8145.29",
# "amount":"0.96538",
# "number_of_orders":1
# }
# },
# "bids":{
# "value":{
# "price":"8134.0",
# "amount":"1.5978",
# "number_of_orders":5
# }
# }
# }
#
# level 2
#
# {
# "instrument_code":"BTC_EUR","time":"2020-07-10T07:36:43.538Z",
# "asks":[
# {"price":"8146.59","amount":"0.89691","number_of_orders":1},
# {"price":"8146.89","amount":"1.92062","number_of_orders":1},
# {"price":"8169.5","amount":"0.0663","number_of_orders":1},
# ],
# "bids":[
# {"price":"8143.49","amount":"0.01329","number_of_orders":1},
# {"price":"8137.01","amount":"5.34748","number_of_orders":1},
# {"price":"8137.0","amount":"2.0","number_of_orders":1},
# ]
# }
#
# level 3
#
# {
# "instrument_code":"BTC_EUR",
# "time":"2020-07-10T07:32:31.525Z",
# "bids":[
# {"price":"8146.79","amount":"0.01537","order_id":"5d717da1-a8f4-422d-afcc-03cb6ab66825"},
# {"price":"8139.32","amount":"3.66009","order_id":"d0715c68-f28d-4cf1-a450-d56cf650e11c"},
# {"price":"8137.51","amount":"2.61049","order_id":"085fd6f4-e835-4ca5-9449-a8f165772e60"},
# ],
# "asks":[
# {"price":"8153.49","amount":"0.93384","order_id":"755d3aa3-42b5-46fa-903d-98f42e9ae6c4"},
# {"price":"8153.79","amount":"1.80456","order_id":"62034cf3-b70d-45ff-b285-ba6307941e7c"},
# {"price":"8167.9","amount":"0.0018","order_id":"036354e0-71cd-492f-94f2-01f7d4b66422"},
# ]
# }
#
timestamp = self.parse8601(self.safe_string(response, 'time'))
return self.parse_order_book(response, timestamp, 'bids', 'asks', 'price', 'amount')
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# "instrument_code":"BTC_EUR",
# "granularity":{"unit":"HOURS","period":1},
# "high":"9252.65",
# "low":"9115.27",
# "open":"9250.0",
# "close":"9132.35",
# "total_amount":"33.85924",
# "volume":"311958.9635744",
# "time":"2020-05-08T22:59:59.999Z",
# "last_sequence":461123
# }
#
granularity = self.safe_value(ohlcv, 'granularity')
unit = self.safe_string(granularity, 'unit')
period = self.safe_string(granularity, 'period')
units = {
'MINUTES': 'm',
'HOURS': 'h',
'DAYS': 'd',
'WEEKS': 'w',
'MONTHS': 'M',
}
lowercaseUnit = self.safe_string(units, unit)
timeframe = period + lowercaseUnit
durationInSeconds = self.parse_timeframe(timeframe)
duration = durationInSeconds * 1000
timestamp = self.parse8601(self.safe_string(ohlcv, 'time'))
alignedTimestamp = duration * int(timestamp / duration)
options = self.safe_value(self.options, 'fetchOHLCV', {})
volumeField = self.safe_string(options, 'volume', 'total_amount')
return [
alignedTimestamp,
self.safe_float(ohlcv, 'open'),
self.safe_float(ohlcv, 'high'),
self.safe_float(ohlcv, 'low'),
self.safe_float(ohlcv, 'close'),
self.safe_float(ohlcv, volumeField),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
periodUnit = self.safe_string(self.timeframes, timeframe)
period, unit = periodUnit.split('/')
durationInSeconds = self.parse_timeframe(timeframe)
duration = durationInSeconds * 1000
if limit is None:
limit = 1500
request = {
'instrument_code': market['id'],
# 'from': self.iso8601(since),
# 'to': self.iso8601(self.milliseconds()),
'period': period,
'unit': unit,
}
if since is None:
now = self.milliseconds()
request['to'] = self.iso8601(now)
request['from'] = self.iso8601(now - limit * duration)
else:
request['from'] = self.iso8601(since)
request['to'] = self.iso8601(self.sum(since, limit * duration))
response = await self.publicGetCandlesticksInstrumentCode(self.extend(request, params))
#
# [
# {"instrument_code":"BTC_EUR","granularity":{"unit":"HOURS","period":1},"high":"9252.65","low":"9115.27","open":"9250.0","close":"9132.35","total_amount":"33.85924","volume":"311958.9635744","time":"2020-05-08T22:59:59.999Z","last_sequence":461123},
# {"instrument_code":"BTC_EUR","granularity":{"unit":"HOURS","period":1},"high":"9162.49","low":"9040.0","open":"9132.53","close":"9083.69","total_amount":"26.19685","volume":"238553.7812365","time":"2020-05-08T23:59:59.999Z","last_sequence":461376},
# {"instrument_code":"BTC_EUR","granularity":{"unit":"HOURS","period":1},"high":"9135.7","low":"9002.59","open":"9055.45","close":"9133.98","total_amount":"26.21919","volume":"238278.8724959","time":"2020-05-09T00:59:59.999Z","last_sequence":461521},
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "instrument_code":"BTC_EUR",
# "price":"8137.28",
# "amount":"0.22269",
# "taker_side":"BUY",
# "volume":"1812.0908832",
# "time":"2020-07-10T14:44:32.299Z",
# "trade_timestamp":1594392272299,
# "sequence":603047
# }
#
# fetchMyTrades, fetchOrder, fetchOpenOrders, fetchClosedOrders trades(private)
#
# {
# "fee": {
# "fee_amount": "0.0014",
# "fee_currency": "BTC",
# "fee_percentage": "0.1",
# "fee_group_id": "default",
# "fee_type": "TAKER",
# "running_trading_volume": "0.0"
# },
# "trade": {
# "trade_id": "fdff2bcc-37d6-4a2d-92a5-46e09c868664",
# "order_id": "36bb2437-7402-4794-bf26-4bdf03526439",
# "account_id": "a4c699f6-338d-4a26-941f-8f9853bfc4b9",
# "amount": "1.4",
# "side": "BUY",
# "instrument_code": "BTC_EUR",
# "price": "7341.4",
# "time": "2019-09-27T15:05:32.564Z",
# "sequence": 48670
# }
# }
#
feeInfo = self.safe_value(trade, 'fee', {})
trade = self.safe_value(trade, 'trade', trade)
timestamp = self.safe_integer(trade, 'trade_timestamp')
if timestamp is None:
timestamp = self.parse8601(self.safe_string(trade, 'time'))
side = self.safe_string_lower_2(trade, 'side', 'taker_side')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'amount')
cost = self.safe_float(trade, 'volume')
if (cost is None) and (amount is not None) and (price is not None):
cost = amount * price
marketId = self.safe_string(trade, 'instrument_code')
symbol = self.safe_symbol(marketId, market, '_')
feeCost = self.safe_float(feeInfo, 'fee_amount')
takerOrMaker = None
fee = None
if feeCost is not None:
feeCurrencyId = self.safe_string(feeInfo, 'fee_currency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
feeRate = self.safe_float(feeInfo, 'fee_percentage')
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': feeRate,
}
takerOrMaker = self.safe_string_lower(feeInfo, 'fee_type')
return {
'id': self.safe_string_2(trade, 'trade_id', 'sequence'),
'order': self.safe_string(trade, 'order_id'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'takerOrMaker': takerOrMaker,
'fee': fee,
'info': trade,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'instrument_code': market['id'],
# 'from': self.iso8601(since),
# 'to': self.iso8601(self.milliseconds()),
}
if since is not None:
# returns price ticks for a specific market with an interval of maximum of 4 hours
# sorted by latest first
request['from'] = self.iso8601(since)
request['to'] = self.iso8601(self.sum(since, 14400000))
response = await self.publicGetPriceTicksInstrumentCode(self.extend(request, params))
#
# [
# {
# "instrument_code":"BTC_EUR",
# "price":"8137.28",
# "amount":"0.22269",
# "taker_side":"BUY",
# "volume":"1812.0908832",
# "time":"2020-07-10T14:44:32.299Z",
# "trade_timestamp":1594392272299,
# "sequence":603047
# }
# ]
#
return self.parse_trades(response, market, since, limit)
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetAccountBalances(params)
#
# {
# "account_id":"4b95934f-55f1-460c-a525-bd5afc0cf071",
# "balances":[
# {
# "account_id":"4b95934f-55f1-460c-a525-bd5afc0cf071",
# "currency_code":"BTC",
# "change":"10.0",
# "available":"10.0",
# "locked":"0.0",
# "sequence":142135994,
# "time":"2020-07-01T10:57:32.959Z"
# }
# ]
# }
#
balances = self.safe_value(response, 'balances', [])
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency_code')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_float(balance, 'available')
account['used'] = self.safe_float(balance, 'locked')
result[code] = account
return self.parse_balance(result)
def parse_deposit_address(self, depositAddress, currency=None):
code = None
if currency is not None:
code = currency['code']
address = self.safe_string(depositAddress, 'address')
tag = self.safe_string(depositAddress, 'destination_tag')
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': depositAddress,
}
async def create_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = await self.privatePostAccountDepositCrypto(self.extend(request, params))
#
# {
# "address":"rBnNhk95FrdNisZtXcStzriFS8vEzz53DM",
# "destination_tag":"865690307",
# "enabled":true,
# "is_smart_contract":false
# }
#
return self.parse_deposit_address(response, currency)
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency_code': currency['id'],
}
response = await self.privateGetAccountDepositCryptoCurrencyCode(self.extend(request, params))
#
# {
# "address":"rBnNhk95FrdNisZtXcStzriFS8vEzz53DM",
# "destination_tag":"865690307",
# "enabled":true,
# "is_smart_contract":false,
# "can_create_more":false
# }
#
return self.parse_deposit_address(response, currency)
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'cursor': 'string', # pointer specifying the position from which the next pages should be returned
}
currency = None
if code is not None:
currency = self.currency(code)
request['currency_code'] = currency['id']
if limit is not None:
request['max_page_size'] = limit
if since is not None:
to = self.safe_string(params, 'to')
if to is None:
raise ArgumentsRequired(self.id + ' fetchDeposits requires a "to" iso8601 string param with the since argument is specified')
request['from'] = self.iso8601(since)
response = await self.privateGetAccountDeposits(self.extend(request, params))
#
# {
# "deposit_history": [
# {
# "transaction_id": "e5342efcd-d5b7-4a56-8e12-b69ffd68c5ef",
# "account_id": "c2d0076a-c20d-41f8-9e9a-1a1d028b2b58",
# "amount": "100",
# "type": "CRYPTO",
# "funds_source": "INTERNAL",
# "time": "2020-04-22T09:57:47Z",
# "currency": "BTC",
# "fee_amount": "0.0",
# "fee_currency": "BTC"
# },
# {
# "transaction_id": "79793d00-2899-4a4d-95b7-73ae6b31384f",
# "account_id": "c2d0076a-c20d-41f8-9e9a-1a1d028b2b58",
# "time": "2020-05-05T11:22:07.925Z",
# "currency": "EUR",
# "funds_source": "EXTERNAL",
# "type": "FIAT",
# "amount": "50.0",
# "fee_amount": "0.01",
# "fee_currency": "EUR"
# }
# ],
# "max_page_size": 2,
# "cursor": "eyJhY2NvdW50X2lkIjp7InMiOiJlMzY5YWM4MC00NTc3LTExZTktYWUwOC05YmVkYzQ3OTBiODQiLCJzcyI6W10sIm5zIjpbXSwiYnMiOltdLCJtIjp7fSwibCI6W119LCJpdGVtX2tleSI6eyJzIjoiV0lUSERSQVdBTDo6MmFlMjYwY2ItOTk3MC00YmNiLTgxNmEtZGY4MDVmY2VhZTY1Iiwic3MiOltdLCJucyI6W10sImJzIjpbXSwibSI6e30sImwiOltdfSwiZ2xvYmFsX3dpdGhkcmF3YWxfaW5kZXhfaGFzaF9rZXkiOnsicyI6ImUzNjlhYzgwLTQ1NzctMTFlOS1hZTA4LTliZWRjNDc5MGI4NCIsInNzIjpbXSwibnMiOltdLCJicyI6W10sIm0iOnt9LCJsIjpbXX0sInRpbWVzdGFtcCI6eyJuIjoiMTU4ODA1ODc2Nzk0OCIsInNzIjpbXSwibnMiOltdLCJicyI6W10sIm0iOnt9LCJsIjpbXX19"
# }
#
depositHistory = self.safe_value(response, 'deposit_history', [])
return self.parse_transactions(depositHistory, currency, since, limit, {'type': 'deposit'})
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'cursor': 'string', # pointer specifying the position from which the next pages should be returned
}
currency = None
if code is not None:
currency = self.currency(code)
request['currency_code'] = currency['id']
if limit is not None:
request['max_page_size'] = limit
if since is not None:
to = self.safe_string(params, 'to')
if to is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawals requires a "to" iso8601 string param with the since argument is specified')
request['from'] = self.iso8601(since)
response = await self.privateGetAccountWithdrawals(self.extend(request, params))
#
# {
# "withdrawal_history": [
# {
# "account_id": "e369ac80-4577-11e9-ae08-9bedc4790b84",
# "amount": "0.1",
# "currency": "BTC",
# "fee_amount": "0.00002",
# "fee_currency": "BTC",
# "funds_source": "EXTERNAL",
# "related_transaction_id": "e298341a-3855-405e-bce3-92db368a3157",
# "time": "2020-05-05T11:11:32.110Z",
# "transaction_id": "6693ff40-bb10-4dcf-ada7-3b287727c882",
# "type": "CRYPTO"
# },
# {
# "account_id": "e369ac80-4577-11e9-ae08-9bedc4790b84",
# "amount": "0.1",
# "currency": "BTC",
# "fee_amount": "0.0",
# "fee_currency": "BTC",
# "funds_source": "INTERNAL",
# "time": "2020-05-05T10:29:53.464Z",
# "transaction_id": "ec9703b1-954b-4f76-adea-faac66eabc0b",
# "type": "CRYPTO"
# }
# ],
# "cursor": "eyJhY2NvdW50X2lkIjp7InMiOiJlMzY5YWM4MC00NTc3LTExZTktYWUwOC05YmVkYzQ3OTBiODQiLCJzcyI6W10sIm5zIjpbXSwiYnMiOltdLCJtIjp7fSwibCI6W119LCJpdGVtX2tleSI6eyJzIjoiV0lUSERSQVdBTDo6ZWM5NzAzYjEtOTU0Yi00Zjc2LWFkZWEtZmFhYzY2ZWFiYzBiIiwic3MiOltdLCJucyI6W10sImJzIjpbXSwibSI6e30sImwiOltdfSwiZ2xvYmFsX3dpdGhkcmF3YWxfaW5kZXhfaGFzaF9rZXkiOnsicyI6ImUzNjlhYzgwLTQ1NzctMTFlOS1hZTA4LTliZWRjNDc5MGI4NCIsInNzIjpbXSwibnMiOltdLCJicyI6W10sIm0iOnt9LCJsIjpbXX0sInRpbWVzdGFtcCI6eyJuIjoiMTU4ODY3NDU5MzQ2NCIsInNzIjpbXSwibnMiOltdLCJicyI6W10sIm0iOnt9LCJsIjpbXX19",
# "max_page_size": 2
# }
#
withdrawalHistory = self.safe_value(response, 'withdrawal_history', [])
return self.parse_transactions(withdrawalHistory, currency, since, limit, {'type': 'withdrawal'})
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
request = {
'currency': code,
'amount': self.currency_to_precision(code, amount),
# 'payout_account_id': '66756a10-3e86-48f4-9678-b634c4b135b2', # fiat only
# 'recipient': { # crypto only
# 'address': address,
# # 'destination_tag': '',
# },
}
options = self.safe_value(self.options, 'fiat', [])
isFiat = self.in_array(code, options)
method = 'privatePostAccountWithdrawFiat' if isFiat else 'privatePostAccountWithdrawCrypto'
if isFiat:
payoutAccountId = self.safe_string(params, 'payout_account_id')
if payoutAccountId is None:
raise ArgumentsRequired(self.id + ' withdraw() requires a payout_account_id param for fiat ' + code + ' withdrawals')
else:
recipient = {'address': address}
if tag is not None:
recipient['destination_tag'] = tag
request['recipient'] = recipient
response = await getattr(self, method)(self.extend(request, params))
#
# crypto
#
# {
# "amount": "1234.5678",
# "fee": "1234.5678",
# "recipient": "3NacQ7rzZdhfyAtfJ5a11k8jFPdcMP2Bq7",
# "destination_tag": "",
# "transaction_id": "d0f8529f-f832-4e6a-9dc5-b8d5797badb2"
# }
#
# fiat
#
# {
# "transaction_id": "54236cd0-4413-11e9-93fb-5fea7e5b5df6"
# }
#
return self.parse_transaction(response, currency)
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits, fetchWithdrawals
#
# {
# "transaction_id": "C2b42efcd-d5b7-4a56-8e12-b69ffd68c5ef",
# "type": "FIAT",
# "account_id": "c2d0076a-c20d-41f8-9e9a-1a1d028b2b58",
# "amount": "1234.5678",
# "time": "2019-08-24T14:15:22Z",
# "funds_source": "INTERNAL",
# "currency": "BTC",
# "fee_amount": "1234.5678",
# "fee_currency": "BTC",
# "blockchain_transaction_id": "f4184fc596403b9d638783cf57adfe4c75c605f6356fbc91338530e9831e9e16",
# "related_transaction_id": "e298341a-3855-405e-bce3-92db368a3157"
# }
#
# withdraw
#
#
# crypto
#
# {
# "amount": "1234.5678",
# "fee": "1234.5678",
# "recipient": "3NacQ7rzZdhfyAtfJ5a11k8jFPdcMP2Bq7",
# "destination_tag": "",
# "transaction_id": "d0f8529f-f832-4e6a-9dc5-b8d5797badb2"
# }
#
# fiat
#
# {
# "transaction_id": "54236cd0-4413-11e9-93fb-5fea7e5b5df6"
# }
#
id = self.safe_string(transaction, 'transaction_id')
amount = self.safe_float(transaction, 'amount')
timestamp = self.parse8601(self.safe_string(transaction, 'time'))
currencyId = self.safe_string(transaction, 'currency')
currency = self.safe_currency(currencyId, currency)
status = 'ok' # the exchange returns cleared transactions only
feeCost = self.safe_float_2(transaction, 'fee_amount', 'fee')
fee = None
addressTo = self.safe_string(transaction, 'recipient')
tagTo = self.safe_string(transaction, 'destination_tag')
if feeCost is not None:
feeCurrencyId = self.safe_string(transaction, 'fee_currency', currencyId)
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'info': transaction,
'id': id,
'currency': currency['code'],
'amount': amount,
'address': addressTo,
'addressFrom': None,
'addressTo': addressTo,
'tag': tagTo,
'tagFrom': None,
'tagTo': tagTo,
'status': status,
'type': None,
'updated': None,
'txid': self.safe_string(transaction, 'blockchain_transaction_id'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': fee,
}
def parse_order_status(self, status):
statuses = {
'FILLED': 'open',
'FILLED_FULLY': 'closed',
'FILLED_CLOSED': 'canceled',
'FILLED_REJECTED': 'rejected',
'OPEN': 'open',
'REJECTED': 'rejected',
'CLOSED': 'canceled',
'FAILED': 'failed',
'STOP_TRIGGERED': 'triggered',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "order_id": "d5492c24-2995-4c18-993a-5b8bf8fffc0d",
# "client_id": "d75fb03b-b599-49e9-b926-3f0b6d103206",
# "account_id": "a4c699f6-338d-4a26-941f-8f9853bfc4b9",
# "instrument_code": "BTC_EUR",
# "time": "2019-08-01T08:00:44.026Z",
# "side": "BUY",
# "price": "5000",
# "amount": "1",
# "filled_amount": "0.5",
# "type": "LIMIT",
# "time_in_force": "GOOD_TILL_CANCELLED"
# }
#
# fetchOrder, fetchOpenOrders, fetchClosedOrders
#
# {
# "order": {
# "order_id": "66756a10-3e86-48f4-9678-b634c4b135b2",
# "account_id": "1eb2ad5d-55f1-40b5-bc92-7dc05869e905",
# "instrument_code": "BTC_EUR",
# "amount": "1234.5678",
# "filled_amount": "1234.5678",
# "side": "BUY",
# "type": "LIMIT",
# "status": "OPEN",
# "sequence": 123456789,
# "price": "1234.5678",
# "average_price": "1234.5678",
# "reason": "INSUFFICIENT_FUNDS",
# "time": "2019-08-24T14:15:22Z",
# "time_in_force": "GOOD_TILL_CANCELLED",
# "time_last_updated": "2019-08-24T14:15:22Z",
# "expire_after": "2019-08-24T14:15:22Z",
# "is_post_only": False,
# "time_triggered": "2019-08-24T14:15:22Z",
# "trigger_price": "1234.5678"
# },
# "trades": [
# {
# "fee": {
# "fee_amount": "0.0014",
# "fee_currency": "BTC",
# "fee_percentage": "0.1",
# "fee_group_id": "default",
# "fee_type": "TAKER",
# "running_trading_volume": "0.0"
# },
# "trade": {
# "trade_id": "fdff2bcc-37d6-4a2d-92a5-46e09c868664",
# "order_id": "36bb2437-7402-4794-bf26-4bdf03526439",
# "account_id": "a4c699f6-338d-4a26-941f-8f9853bfc4b9",
# "amount": "1.4",
# "side": "BUY",
# "instrument_code": "BTC_EUR",
# "price": "7341.4",
# "time": "2019-09-27T15:05:32.564Z",
# "sequence": 48670
# }
# }
# ]
# }
#
rawTrades = self.safe_value(order, 'trades', [])
order = self.safe_value(order, 'order', order)
id = self.safe_string(order, 'order_id')
clientOrderId = self.safe_string(order, 'client_id')
timestamp = self.parse8601(self.safe_string(order, 'time'))
status = self.parse_order_status(self.safe_string(order, 'status'))
marketId = self.safe_string(order, 'instrument_code')
symbol = self.safe_symbol(marketId, market, '_')
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'amount')
cost = None
filled = self.safe_float(order, 'filled_amount')
remaining = None
if filled is not None:
if amount is not None:
remaining = max(0, amount - filled)
if status is None:
if remaining > 0:
status = 'open'
else:
status = 'closed'
side = self.safe_string_lower(order, 'side')
type = self.safe_string_lower(order, 'type')
trades = self.parse_trades(rawTrades, market, None, None)
fees = []
numTrades = len(trades)
lastTradeTimestamp = None
tradeCost = None
tradeAmount = None
if numTrades > 0:
lastTradeTimestamp = trades[0]['timestamp']
tradeCost = 0
tradeAmount = 0
for i in range(0, len(trades)):
trade = trades[i]
fees.append(trade['fee'])
lastTradeTimestamp = max(lastTradeTimestamp, trade['timestamp'])
tradeCost = self.sum(tradeCost, trade['cost'])
tradeAmount = self.sum(tradeAmount, trade['amount'])
average = self.safe_float(order, 'average_price')
if average is None:
if (tradeCost is not None) and (tradeAmount is not None) and (tradeAmount != 0):
average = tradeCost / tradeAmount
if cost is None:
if (average is not None) and (filled is not None):
cost = average * filled
timeInForce = self.parse_time_in_force(self.safe_string(order, 'time_in_force'))
stopPrice = self.safe_float(order, 'trigger_price')
postOnly = self.safe_value(order, 'is_post_only')
result = {
'id': id,
'clientOrderId': clientOrderId,
'info': order,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
# 'fee': None,
'trades': trades,
}
numFees = len(fees)
if numFees > 0:
if numFees == 1:
result['fee'] = fees[0]
else:
feesByCurrency = self.group_by(fees, 'currency')
feeCurrencies = list(feesByCurrency.keys())
numFeesByCurrency = len(feeCurrencies)
if numFeesByCurrency == 1:
feeCurrency = feeCurrencies[0]
feeArray = self.safe_value(feesByCurrency, feeCurrency)
feeCost = 0
for i in range(0, len(feeArray)):
feeCost = self.sum(feeCost, feeArray[i]['cost'])
result['fee'] = {
'cost': feeCost,
'currency': feeCurrency,
}
else:
result['fees'] = fees
else:
result['fee'] = None
return result
def parse_time_in_force(self, timeInForce):
timeInForces = {
'GOOD_TILL_CANCELLED': 'GTC',
'GOOD_TILL_TIME': 'GTT',
'IMMEDIATE_OR_CANCELLED': 'IOC',
'FILL_OR_KILL': 'FOK',
}
return self.safe_string(timeInForces, timeInForce, timeInForce)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
uppercaseType = type.upper()
request = {
'instrument_code': market['id'],
'type': uppercaseType, # LIMIT, MARKET, STOP
'side': side.upper(), # or SELL
'amount': self.amount_to_precision(symbol, amount),
# "price": "1234.5678", # required for LIMIT and STOP orders
# "client_id": "d75fb03b-b599-49e9-b926-3f0b6d103206", # optional
# "time_in_force": "GOOD_TILL_CANCELLED", # limit orders only, GOOD_TILL_CANCELLED, GOOD_TILL_TIME, IMMEDIATE_OR_CANCELLED and FILL_OR_KILL
# "expire_after": "2020-07-02T19:40:13Z", # required for GOOD_TILL_TIME
# "is_post_only": False, # limit orders only, optional
# "trigger_price": "1234.5678" # required for stop orders
}
priceIsRequired = False
if uppercaseType == 'LIMIT' or uppercaseType == 'STOP':
priceIsRequired = True
if uppercaseType == 'STOP':
triggerPrice = self.safe_float(params, 'trigger_price')
if triggerPrice is None:
raise ArgumentsRequired(self.id + ' createOrder requires a trigger_price param for ' + type + ' orders')
request['trigger_price'] = self.price_to_precision(symbol, triggerPrice)
params = self.omit(params, 'trigger_price')
if priceIsRequired:
request['price'] = self.price_to_precision(symbol, price)
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_id')
if clientOrderId is not None:
request['client_id'] = clientOrderId
params = self.omit(params, ['clientOrderId', 'client_id'])
response = await self.privatePostAccountOrders(self.extend(request, params))
#
# {
# "order_id": "d5492c24-2995-4c18-993a-5b8bf8fffc0d",
# "client_id": "d75fb03b-b599-49e9-b926-3f0b6d103206",
# "account_id": "a4c699f6-338d-4a26-941f-8f9853bfc4b9",
# "instrument_code": "BTC_EUR",
# "time": "2019-08-01T08:00:44.026Z",
# "side": "BUY",
# "price": "5000",
# "amount": "1",
# "filled_amount": "0.5",
# "type": "LIMIT",
# "time_in_force": "GOOD_TILL_CANCELLED"
# }
#
return self.parse_order(response, market)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_id')
params = self.omit(params, ['clientOrderId', 'client_id'])
method = 'privateDeleteAccountOrdersOrderId'
request = {}
if clientOrderId is not None:
method = 'privateDeleteAccountOrdersClientClientId'
request['client_id'] = clientOrderId
else:
request['order_id'] = id
response = await getattr(self, method)(self.extend(request, params))
#
# responds with an empty body
#
return response
async def cancel_all_orders(self, symbol=None, params={}):
await self.load_markets()
request = {}
if symbol is not None:
market = self.market(symbol)
request['instrument_code'] = market['id']
response = await self.privateDeleteAccountOrders(self.extend(request, params))
#
# [
# "a10e9bd1-8f72-4cfe-9f1b-7f1c8a9bd8ee"
# ]
#
return response
async def cancel_orders(self, ids, symbol=None, params={}):
await self.load_markets()
request = {
'ids': ','.join(ids),
}
response = await self.privateDeleteAccountOrders(self.extend(request, params))
#
# [
# "a10e9bd1-8f72-4cfe-9f1b-7f1c8a9bd8ee"
# ]
#
return response
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'order_id': id,
}
response = await self.privateGetAccountOrdersOrderId(self.extend(request, params))
#
# {
# "order": {
# "order_id": "36bb2437-7402-4794-bf26-4bdf03526439",
# "account_id": "a4c699f6-338d-4a26-941f-8f9853bfc4b9",
# "time_last_updated": "2019-09-27T15:05:35.096Z",
# "sequence": 48782,
# "price": "7349.2",
# "filled_amount": "100.0",
# "status": "FILLED_FULLY",
# "amount": "100.0",
# "instrument_code": "BTC_EUR",
# "side": "BUY",
# "time": "2019-09-27T15:05:32.063Z",
# "type": "MARKET"
# },
# "trades": [
# {
# "fee": {
# "fee_amount": "0.0014",
# "fee_currency": "BTC",
# "fee_percentage": "0.1",
# "fee_group_id": "default",
# "fee_type": "TAKER",
# "running_trading_volume": "0.0"
# },
# "trade": {
# "trade_id": "fdff2bcc-37d6-4a2d-92a5-46e09c868664",
# "order_id": "36bb2437-7402-4794-bf26-4bdf03526439",
# "account_id": "a4c699f6-338d-4a26-941f-8f9853bfc4b9",
# "amount": "1.4",
# "side": "BUY",
# "instrument_code": "BTC_EUR",
# "price": "7341.4",
# "time": "2019-09-27T15:05:32.564Z",
# "sequence": 48670
# }
# }
# ]
# }
#
return self.parse_order(response)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'from': self.iso8601(since),
# 'to': self.iso8601(self.milliseconds()), # max range is 100 days
# 'instrument_code': market['id'],
# 'with_cancelled_and_rejected': False, # default is False, orders which have been cancelled by the user before being filled or rejected by the system as invalid, additionally, all inactive filled orders which would return with "with_just_filled_inactive"
# 'with_just_filled_inactive': False, # orders which have been filled and are no longer open, use of "with_cancelled_and_rejected" extends "with_just_filled_inactive" and in case both are specified the latter is ignored
# 'with_just_orders': False, # do not return any trades corresponsing to the orders, it may be significanly faster and should be used if user is not interesting in trade information
# 'max_page_size': 100,
# 'cursor': 'string', # pointer specifying the position from which the next pages should be returned
}
market = None
if symbol is not None:
market = self.market(symbol)
request['instrument_code'] = market['id']
if since is not None:
to = self.safe_string(params, 'to')
if to is None:
raise ArgumentsRequired(self.id + ' fetchOrders requires a "to" iso8601 string param with the since argument is specified, max range is 100 days')
request['from'] = self.iso8601(since)
if limit is not None:
request['max_page_size'] = limit
response = await self.privateGetAccountOrders(self.extend(request, params))
#
# {
# "order_history": [
# {
# "order": {
# "trigger_price": "12089.88",
# "order_id": "d453ca12-c650-46dd-9dee-66910d96bfc0",
# "account_id": "ef3a5f4c-cfcd-415e-ba89-5a9abf47b28a",
# "instrument_code": "BTC_USDT",
# "time": "2019-08-23T10:02:31.663Z",
# "side": "SELL",
# "price": "10159.76",
# "average_price": "10159.76",
# "amount": "0.2",
# "filled_amount": "0.2",
# "type": "STOP",
# "sequence": 8,
# "status": "FILLED_FULLY"
# },
# "trades": [
# {
# "fee": {
# "fee_amount": "0.4188869",
# "fee_currency": "USDT",
# "fee_percentage": "0.1",
# "fee_group_id": "default",
# "fee_type": "TAKER",
# "running_trading_volume": "0.0"
# },
# "trade": {
# "trade_id": "ec82896f-fd1b-4cbb-89df-a9da85ccbb4b",
# "order_id": "d453ca12-c650-46dd-9dee-66910d96bfc0",
# "account_id": "ef3a5f4c-cfcd-415e-ba89-5a9abf47b28a",
# "amount": "0.2",
# "side": "SELL",
# "instrument_code": "BTC_USDT",
# "price": "10159.76",
# "time": "2019-08-23T10:02:32.663Z",
# "sequence": 9
# }
# }
# ]
# },
# {
# "order": {
# "order_id": "5151a99e-f414-418f-8cf1-2568d0a63ea5",
# "account_id": "ef3a5f4c-cfcd-415e-ba89-5a9abf47b28a",
# "instrument_code": "BTC_USDT",
# "time": "2019-08-23T10:01:36.773Z",
# "side": "SELL",
# "price": "12289.88",
# "amount": "0.5",
# "filled_amount": "0.0",
# "type": "LIMIT",
# "sequence": 7,
# "status": "OPEN"
# },
# "trades": []
# },
# {
# "order": {
# "order_id": "ac80d857-75e1-4733-9070-fd4288395fdc",
# "account_id": "ef3a5f4c-cfcd-415e-ba89-5a9abf47b28a",
# "instrument_code": "BTC_USDT",
# "time": "2019-08-23T10:01:25.031Z",
# "side": "SELL",
# "price": "11089.88",
# "amount": "0.1",
# "filled_amount": "0.0",
# "type": "LIMIT",
# "sequence": 6,
# "status": "OPEN"
# },
# "trades": []
# }
# ],
# "max_page_size": 100
# }
#
orderHistory = self.safe_value(response, 'order_history', [])
return self.parse_orders(orderHistory, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
request = {
'with_cancelled_and_rejected': True, # default is False, orders which have been cancelled by the user before being filled or rejected by the system as invalid, additionally, all inactive filled orders which would return with "with_just_filled_inactive"
}
return await self.fetch_open_orders(symbol, since, limit, self.extend(request, params))
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'order_id': id,
# 'max_page_size': 100,
# 'cursor': 'string', # pointer specifying the position from which the next pages should be returned
}
if limit is not None:
request['max_page_size'] = limit
response = await self.privateGetAccountOrdersOrderIdTrades(self.extend(request, params))
#
# {
# "trade_history": [
# {
# "trade": {
# "trade_id": "2b42efcd-d5b7-4a56-8e12-b69ffd68c5ef",
# "order_id": "66756a10-3e86-48f4-9678-b634c4b135b2",
# "account_id": "c2d0076a-c20d-41f8-9e9a-1a1d028b2b58",
# "amount": "1234.5678",
# "side": "BUY",
# "instrument_code": "BTC_EUR",
# "price": "1234.5678",
# "time": "2019-08-24T14:15:22Z",
# "price_tick_sequence": 0,
# "sequence": 123456789
# },
# "fee": {
# "fee_amount": "1234.5678",
# "fee_percentage": "1234.5678",
# "fee_group_id": "default",
# "running_trading_volume": "1234.5678",
# "fee_currency": "BTC",
# "fee_type": "TAKER"
# }
# }
# ],
# "max_page_size": 0,
# "cursor": "string"
# }
#
tradeHistory = self.safe_value(response, 'trade_history', [])
market = None
if symbol is not None:
market = self.market(symbol)
return self.parse_trades(tradeHistory, market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'from': self.iso8601(since),
# 'to': self.iso8601(self.milliseconds()), # max range is 100 days
# 'instrument_code': market['id'],
# 'max_page_size': 100,
# 'cursor': 'string', # pointer specifying the position from which the next pages should be returned
}
market = None
if symbol is not None:
market = self.market(symbol)
request['instrument_code'] = market['id']
if since is not None:
to = self.safe_string(params, 'to')
if to is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a "to" iso8601 string param with the since argument is specified, max range is 100 days')
request['from'] = self.iso8601(since)
if limit is not None:
request['max_page_size'] = limit
response = await self.privateGetAccountTrades(self.extend(request, params))
#
# {
# "trade_history": [
# {
# "trade": {
# "trade_id": "2b42efcd-d5b7-4a56-8e12-b69ffd68c5ef",
# "order_id": "66756a10-3e86-48f4-9678-b634c4b135b2",
# "account_id": "c2d0076a-c20d-41f8-9e9a-1a1d028b2b58",
# "amount": "1234.5678",
# "side": "BUY",
# "instrument_code": "BTC_EUR",
# "price": "1234.5678",
# "time": "2019-08-24T14:15:22Z",
# "price_tick_sequence": 0,
# "sequence": 123456789
# },
# "fee": {
# "fee_amount": "1234.5678",
# "fee_percentage": "1234.5678",
# "fee_group_id": "default",
# "running_trading_volume": "1234.5678",
# "fee_currency": "BTC",
# "fee_type": "TAKER"
# }
# }
# ],
# "max_page_size": 0,
# "cursor": "string"
# }
#
tradeHistory = self.safe_value(response, 'trade_history', [])
return self.parse_trades(tradeHistory, market, since, limit)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/' + self.version + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
self.check_required_credentials()
headers = {
'Accept': 'application/json',
'Authorization': 'Bearer ' + self.apiKey,
}
if method == 'POST':
body = self.json(query)
headers['Content-Type'] = 'application/json'
else:
if query:
url += '?' + self.urlencode(query)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return
#
# {"error":"MISSING_FROM_PARAM"}
# {"error":"MISSING_TO_PARAM"}
# {"error":"CANDLESTICKS_TIME_RANGE_TOO_BIG"}
#
message = self.safe_string(response, 'error')
if message is not None:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(feedback) # unknown message
| 44.201393 | 555 | 0.476437 |
4a217f21cd53eae78db16320fdce844883047180 | 20,122 | py | Python | pw_env_setup/py/pw_env_setup/env_setup.py | LuDuda/pigweed | dcd7230895a234156bc7b6e5061e6936627c5fbb | [
"Apache-2.0"
] | null | null | null | pw_env_setup/py/pw_env_setup/env_setup.py | LuDuda/pigweed | dcd7230895a234156bc7b6e5061e6936627c5fbb | [
"Apache-2.0"
] | null | null | null | pw_env_setup/py/pw_env_setup/env_setup.py | LuDuda/pigweed | dcd7230895a234156bc7b6e5061e6936627c5fbb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Environment setup script for Pigweed.
This script installs everything and writes out a file for the user's shell
to source.
For now, this is valid Python 2 and Python 3. Once we switch to running this
with PyOxidizer it can be upgraded to recent Python 3.
"""
from __future__ import print_function
import argparse
import copy
import glob
import inspect
import json
import os
import shutil
import subprocess
import sys
# TODO(pwbug/67): Remove import hacks once the oxidized prebuilt binaries are
# proven stable for first-time bootstrapping. For now, continue to support
# running directly from source without assuming a functioning Python
# environment when running for the first time.
# If we're running oxidized, filesystem-centric import hacks won't work. In that
# case, jump straight to the imports and assume oxidation brought in the deps.
if not getattr(sys, 'oxidized', False):
old_sys_path = copy.deepcopy(sys.path)
filename = None
if hasattr(sys.modules[__name__], '__file__'):
filename = __file__
else:
# Try introspection in environments where __file__ is not populated.
frame = inspect.currentframe()
if frame is not None:
filename = inspect.getfile(frame)
# If none of our strategies worked, we're in a strange runtime environment.
# The imports are almost certainly going to fail.
if filename is None:
raise RuntimeError(
'Unable to locate pw_env_setup module; cannot continue.\n'
'\n'
'Try updating to one of the standard Python implemetations:\n'
' https://www.python.org/downloads/')
sys.path = [
os.path.abspath(os.path.join(filename, os.path.pardir, os.path.pardir))
]
import pw_env_setup # pylint: disable=unused-import
sys.path = old_sys_path
# pylint: disable=wrong-import-position
from pw_env_setup.cipd_setup import update as cipd_update
from pw_env_setup.cipd_setup import wrapper as cipd_wrapper
from pw_env_setup.colors import Color, enable_colors
from pw_env_setup import cargo_setup
from pw_env_setup import environment
from pw_env_setup import spinner
from pw_env_setup import virtualenv_setup
from pw_env_setup import windows_env_start
# TODO(pwbug/67, pwbug/68) switch to shutil.which().
def _which(executable,
pathsep=os.pathsep,
use_pathext=None,
case_sensitive=None):
if use_pathext is None:
use_pathext = (os.name == 'nt')
if case_sensitive is None:
case_sensitive = (os.name != 'nt' and sys.platform != 'darwin')
if not case_sensitive:
executable = executable.lower()
exts = None
if use_pathext:
exts = frozenset(os.environ['PATHEXT'].split(pathsep))
if not case_sensitive:
exts = frozenset(x.lower() for x in exts)
if not exts:
raise ValueError('empty PATHEXT')
paths = os.environ['PATH'].split(pathsep)
for path in paths:
try:
entries = frozenset(os.listdir(path))
if not case_sensitive:
entries = frozenset(x.lower() for x in entries)
except OSError:
continue
if exts:
for ext in exts:
if executable + ext in entries:
return os.path.join(path, executable + ext)
else:
if executable in entries:
return os.path.join(path, executable)
return None
class _Result:
class Status:
DONE = 'done'
SKIPPED = 'skipped'
FAILED = 'failed'
def __init__(self, status, *messages):
self._status = status
self._messages = list(messages)
def ok(self):
return self._status in {_Result.Status.DONE, _Result.Status.SKIPPED}
def status_str(self):
return self._status
def messages(self):
return self._messages
def _process_globs(globs):
unique_globs = []
for pat in globs:
if pat and pat not in unique_globs:
unique_globs.append(pat)
files = []
warnings = []
for pat in unique_globs:
if pat:
matches = glob.glob(pat)
if not matches:
warnings.append(
'warning: pattern "{}" matched 0 files'.format(pat))
files.extend(matches)
if globs and not files:
warnings.append('warning: matched 0 total files')
return files, warnings
def result_func(glob_warnings):
def result(status, *args):
return _Result(status, *([str(x) for x in glob_warnings] + list(args)))
return result
# TODO(mohrr) remove disable=useless-object-inheritance once in Python 3.
# pylint: disable=useless-object-inheritance
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-arguments
class EnvSetup(object):
"""Run environment setup for Pigweed."""
def __init__(self, pw_root, cipd_cache_dir, shell_file, quiet, install_dir,
use_pigweed_defaults, cipd_package_file, virtualenv_root,
virtualenv_requirements, virtualenv_gn_target,
virtualenv_gn_out_dir, cargo_package_file, enable_cargo,
json_file, project_root):
self._env = environment.Environment()
self._project_root = project_root
self._pw_root = pw_root
self._setup_root = os.path.join(pw_root, 'pw_env_setup', 'py',
'pw_env_setup')
self._cipd_cache_dir = cipd_cache_dir
self._shell_file = shell_file
self._is_windows = os.name == 'nt'
self._quiet = quiet
self._install_dir = install_dir
self._virtualenv_root = (virtualenv_root
or os.path.join(install_dir, 'pigweed-venv'))
if os.path.isfile(shell_file):
os.unlink(shell_file)
if isinstance(self._pw_root, bytes) and bytes != str:
self._pw_root = self._pw_root.decode()
self._cipd_package_file = []
self._virtualenv_requirements = []
self._virtualenv_gn_targets = []
self._cargo_package_file = []
self._enable_cargo = enable_cargo
self._json_file = json_file
setup_root = os.path.join(pw_root, 'pw_env_setup', 'py',
'pw_env_setup')
# TODO(pwbug/67, pwbug/68) Investigate pulling these files into an
# oxidized env setup executable instead of referring to them in the
# source tree. Note that this could be error-prone because users expect
# changes to the files in the source tree to affect bootstrap.
if use_pigweed_defaults:
# If updating this section make sure to update
# $PW_ROOT/pw_env_setup/docs.rst as well.
self._cipd_package_file.append(
os.path.join(setup_root, 'cipd_setup', 'pigweed.json'))
self._cipd_package_file.append(
os.path.join(setup_root, 'cipd_setup', 'luci.json'))
self._virtualenv_requirements.append(
os.path.join(setup_root, 'virtualenv_setup',
'requirements.txt'))
self._virtualenv_gn_targets.append(
virtualenv_setup.GnTarget(
'{}#:python.install'.format(pw_root)))
self._cargo_package_file.append(
os.path.join(setup_root, 'cargo_setup', 'packages.txt'))
self._cipd_package_file.extend(cipd_package_file)
self._virtualenv_requirements.extend(virtualenv_requirements)
self._virtualenv_gn_targets.extend(virtualenv_gn_target)
self._virtualenv_gn_out_dir = virtualenv_gn_out_dir
self._cargo_package_file.extend(cargo_package_file)
self._env.set('PW_PROJECT_ROOT', project_root)
self._env.set('PW_ROOT', pw_root)
self._env.set('_PW_ACTUAL_ENVIRONMENT_ROOT', install_dir)
self._env.add_replacement('_PW_ACTUAL_ENVIRONMENT_ROOT', install_dir)
self._env.add_replacement('PW_ROOT', pw_root)
def _log(self, *args, **kwargs):
# Not using logging module because it's awkward to flush a log handler.
if self._quiet:
return
flush = kwargs.pop('flush', False)
print(*args, **kwargs)
if flush:
sys.stdout.flush()
def setup(self):
"""Runs each of the env_setup steps."""
if os.name == 'nt':
windows_env_start.print_banner(bootstrap=True, no_shell_file=False)
else:
enable_colors()
steps = [
('CIPD package manager', self.cipd),
('Python environment', self.virtualenv),
('Host tools', self.host_tools),
]
# TODO(pwbug/63): Add a Windows version of cargo to CIPD.
if not self._is_windows and self._enable_cargo:
steps.append(("Rust cargo", self.cargo))
if self._is_windows:
steps.append(("Windows scripts", self.win_scripts))
self._log(
Color.bold('Downloading and installing packages into local '
'source directory:\n'))
max_name_len = max(len(name) for name, _ in steps)
self._env.comment('''
This file is automatically generated. DO NOT EDIT!
For details, see $PW_ROOT/pw_env_setup/py/pw_env_setup/env_setup.py and
$PW_ROOT/pw_env_setup/py/pw_env_setup/environment.py.
'''.strip())
if not self._is_windows:
self._env.comment('''
For help debugging errors in this script, uncomment the next line.
set -x
Then use `set +x` to go back to normal.
'''.strip())
self._env.echo(
Color.bold(
'Activating environment (setting environment variables):'))
self._env.echo('')
for name, step in steps:
self._log(' Setting up {name:.<{width}}...'.format(
name=name, width=max_name_len),
end='',
flush=True)
self._env.echo(
' Setting environment variables for {name:.<{width}}...'.
format(name=name, width=max_name_len),
newline=False,
)
spin = spinner.Spinner()
with spin():
result = step()
self._log(result.status_str())
self._env.echo(result.status_str())
for message in result.messages():
sys.stderr.write('{}\n'.format(message))
self._env.echo(message)
if not result.ok():
return -1
self._log('')
self._env.echo('')
self._env.finalize()
self._env.echo(Color.bold('Sanity checking the environment:'))
self._env.echo()
self._env.doctor()
self._env.echo()
self._env.echo(
Color.bold('Environment looks good, you are ready to go!'))
self._env.echo()
with open(self._shell_file, 'w') as outs:
self._env.write(outs)
deactivate = os.path.join(
self._install_dir,
'deactivate{}'.format(os.path.splitext(self._shell_file)[1]))
with open(deactivate, 'w') as outs:
self._env.write_deactivate(outs)
config = {
# Skipping sysname and nodename in os.uname(). nodename could change
# based on the current network. sysname won't change, but is
# redundant because it's contained in release or version, and
# skipping it here simplifies logic.
'uname': ' '.join(getattr(os, 'uname', lambda: ())()[2:]),
'os': os.name,
}
with open(os.path.join(self._install_dir, 'config.json'), 'w') as outs:
outs.write(
json.dumps(config, indent=4, separators=(',', ': ')) + '\n')
if self._json_file is not None:
with open(self._json_file, 'w') as outs:
self._env.json(outs)
return 0
def cipd(self):
install_dir = os.path.join(self._install_dir, 'cipd')
cipd_client = cipd_wrapper.init(install_dir, silent=True)
package_files, glob_warnings = _process_globs(self._cipd_package_file)
result = result_func(glob_warnings)
if not package_files:
return result(_Result.Status.SKIPPED)
if not cipd_update.update(cipd=cipd_client,
root_install_dir=install_dir,
package_files=package_files,
cache_dir=self._cipd_cache_dir,
env_vars=self._env):
return result(_Result.Status.FAILED)
return result(_Result.Status.DONE)
def virtualenv(self):
"""Setup virtualenv."""
requirements, req_glob_warnings = _process_globs(
self._virtualenv_requirements)
result = result_func(req_glob_warnings)
orig_python3 = _which('python3')
with self._env():
new_python3 = _which('python3')
# There is an issue with the virtualenv module on Windows where it
# expects sys.executable to be called "python.exe" or it fails to
# properly execute. If we installed Python 3 in the CIPD step we need
# to address this. Detect if we did so and if so create a copy of
# python3.exe called python.exe so that virtualenv works.
if orig_python3 != new_python3 and self._is_windows:
python3_copy = os.path.join(os.path.dirname(new_python3),
'python.exe')
if not os.path.exists(python3_copy):
shutil.copyfile(new_python3, python3_copy)
new_python3 = python3_copy
if not requirements and not self._virtualenv_gn_targets:
return result(_Result.Status.SKIPPED)
if not virtualenv_setup.install(
project_root=self._project_root,
venv_path=self._virtualenv_root,
requirements=requirements,
gn_targets=self._virtualenv_gn_targets,
gn_out_dir=self._virtualenv_gn_out_dir,
python=new_python3,
env=self._env,
):
return result(_Result.Status.FAILED)
return result(_Result.Status.DONE)
def host_tools(self):
# The host tools are grabbed from CIPD, at least initially. If the
# user has a current host build, that build will be used instead.
# TODO(mohrr) find a way to do stuff like this for all projects.
host_dir = os.path.join(self._pw_root, 'out', 'host')
self._env.prepend('PATH', os.path.join(host_dir, 'host_tools'))
return _Result(_Result.Status.DONE)
def win_scripts(self):
# These scripts act as a compatibility layer for windows.
env_setup_dir = os.path.join(self._pw_root, 'pw_env_setup')
self._env.prepend('PATH', os.path.join(env_setup_dir,
'windows_scripts'))
return _Result(_Result.Status.DONE)
def cargo(self):
install_dir = os.path.join(self._install_dir, 'cargo')
package_files, glob_warnings = _process_globs(self._cargo_package_file)
result = result_func(glob_warnings)
if not package_files:
return result(_Result.Status.SKIPPED)
if not cargo_setup.install(install_dir=install_dir,
package_files=package_files,
env=self._env):
return result(_Result.Status.FAILED)
return result(_Result.Status.DONE)
def parse(argv=None):
"""Parse command-line arguments."""
parser = argparse.ArgumentParser()
pw_root = os.environ.get('PW_ROOT', None)
if not pw_root:
try:
with open(os.devnull, 'w') as outs:
pw_root = subprocess.check_output(
['git', 'rev-parse', '--show-toplevel'],
stderr=outs).strip()
except subprocess.CalledProcessError:
pw_root = None
parser.add_argument(
'--pw-root',
default=pw_root,
required=not pw_root,
)
project_root = os.environ.get('PW_PROJECT_ROOT', None) or pw_root
parser.add_argument(
'--project-root',
default=project_root,
required=not project_root,
)
parser.add_argument(
'--cipd-cache-dir',
default=os.environ.get('CIPD_CACHE_DIR',
os.path.expanduser('~/.cipd-cache-dir')),
)
parser.add_argument(
'--shell-file',
help='Where to write the file for shells to source.',
required=True,
)
parser.add_argument(
'--quiet',
help='Reduce output.',
action='store_true',
default='PW_ENVSETUP_QUIET' in os.environ,
)
parser.add_argument(
'--install-dir',
help='Location to install environment.',
required=True,
)
parser.add_argument(
'--use-pigweed-defaults',
help='Use Pigweed default values in addition to the given environment '
'variables.',
action='store_true',
)
parser.add_argument(
'--cipd-package-file',
help='CIPD package file. JSON file consisting of a list of dicts with '
'"path" and "tags" keys, where "tags" a list of str.',
default=[],
action='append',
)
parser.add_argument(
'--virtualenv-requirements',
help='Pip requirements file. Compiled with pip-compile.',
default=[],
action='append',
)
parser.add_argument(
'--virtualenv-gn-target',
help=('GN targets that build and install Python packages. Format: '
'path/to/gn_root#target'),
default=[],
action='append',
type=virtualenv_setup.GnTarget,
)
parser.add_argument(
'--virtualenv-gn-out-dir',
help=('Output directory to use when building and installing Python '
'packages with GN; defaults to a unique path in the environment '
'directory.'))
parser.add_argument(
'--virtualenv-root',
help=('Root of virtualenv directory. Default: '
'<install_dir>/pigweed-venv'),
default=None,
)
parser.add_argument(
'--cargo-package-file',
help='Rust cargo packages to install. Lines with package name and '
'version separated by a space.',
default=[],
action='append',
)
parser.add_argument(
'--enable-cargo',
help='Enable cargo installation.',
action='store_true',
)
parser.add_argument(
'--json-file',
help='Dump environment variable operations to a JSON file.',
default=None,
)
args = parser.parse_args(argv)
one_required = (
'use_pigweed_defaults',
'cipd_package_file',
'virtualenv_requirements',
'virtualenv_gn_target',
'cargo_package_file',
)
if not any(getattr(args, x) for x in one_required):
parser.error('At least one of ({}) is required'.format(', '.join(
'"--{}"'.format(x.replace('_', '-')) for x in one_required)))
return args
def main():
try:
return EnvSetup(**vars(parse())).setup()
except subprocess.CalledProcessError as err:
print()
print(err.output)
raise
if __name__ == '__main__':
sys.exit(main())
| 33.536667 | 80 | 0.611371 |
4a217f693a0dd76ad62a38e78e4f840e5f38af6f | 3,250 | py | Python | bomberman_game.py | anudeep586/Codechef_hackerrank_codeforces1 | 39a536d6ad6d670e0bce2ba8657cf5715b0037e0 | [
"0BSD"
] | null | null | null | bomberman_game.py | anudeep586/Codechef_hackerrank_codeforces1 | 39a536d6ad6d670e0bce2ba8657cf5715b0037e0 | [
"0BSD"
] | null | null | null | bomberman_game.py | anudeep586/Codechef_hackerrank_codeforces1 | 39a536d6ad6d670e0bce2ba8657cf5715b0037e0 | [
"0BSD"
] | null | null | null | r,c,t=[int(x) for x in input().split(" ")]
arr1=[]
arr2=[]
arr3=[]
for _ in range(r):
arr=list(input())
arr2.append(arr)
for i in range(len(arr2)):
k=[]
for j in range(len(arr2[i])):
k.append('O')
arr3.append(k)
arr1=arr3
def first(arr1,arr2):
for i in range(len(arr1)):
for j in range(len(arr1[i])):
if arr2[i][j]=='O' and j!=0 and j!=len(arr2[i])-1 and i!=0 and i!=len(arr2)-1 and r!=1:
arr1[i][j]='.'
arr1[i][j-1]='.'
arr1[i][j+1]='.'
arr1[i-1][j]='.'
arr1[i+1][j]='.'
if arr2[i][j]=='O' and i!=0 and i!=len(arr2)-1 and j==0 and r!=1:
arr1[i][j]='.'
arr1[i-1][j]='.'
arr1[i+1][j]='.'
arr1[i][j+1]='.'
if arr2[i][j]=='O' and j==len(arr2[i])-1 and i!=0 and i!=len(arr2)-1 and r!=1:
arr1[i][j]='.'
arr1[i][j-1]='.'
arr1[i-1][j]='.'
arr1[i+1][j]='.'
if arr2[i][j]=='O' and i==0 and j!=len(arr2[i])-1 and j!=0 and r!=1:
arr1[i][j]='.'
arr1[i][j+1]='.'
arr1[i+1][j]='.'
arr1[i][j-1]='.'
if arr2[i][j]=='O' and i==0 and j!=len(arr2[i])-1 and j==0 and r!=1:
arr1[i][j]='.'
arr1[i][j+1]='.'
arr1[i+1][j]='.'
if arr2[i][j]=='O' and i==0 and j==len(arr2[i])-1 and j!=0 and r!=1:
arr1[i][j]='.'
arr1[i+1][j]='.'
arr1[i][j-1]='.'
if arr2[i][j]=='O' and i==len(arr2)-1 and j!=len(arr2[i])-1 and j!=0 and r!=1:
arr1[i][j]='.'
arr1[i][j-1]='.'
arr1[i][j+1]='.'
arr1[i-1][j]='.'
if arr2[i][j]=='O' and i==len(arr2)-1 and j!=len(arr2[i])-1 and j==0 and r!=1:
arr1[i][j]='.'
arr1[i][j+1]='.'
arr1[i-1][j]='.'
if arr2[i][j]=='O' and i==len(arr2)-1 and j==len(arr2[i])-1 and j!=0 and r!=1:
arr1[i][j]='.'
arr1[i][j-1]='.'
arr1[i-1][j]='.'
if arr2[i][j]=='O' and i==0 and j!=0 and j!=len(arr2[i])-1 and r==1:
arr1[i][j+1]='.'
arr1[i][j-1]='.'
if arr2[i][j]=='O' and i==0 and j==0 and j!=len(arr2[i])-1 and r==1:
arr1[i][j+1]='.'
if arr2[i][j]=='O' and i==0 and j!=0 and j==len(arr2[i])-1 and r==1:
arr1[i][j-1]='.'
if t%2==0:
for x in arr1:
print(''.join(x))
elif t%4==3:
first(arr1,arr2)
for x in arr1:
print(''.join(x))
elif t==1:
for x in arr2:
print(''.join(x))
elif t%4==1:
first(arr1,arr2)
arr2=arr1
arr1=arr3
z=[]
for i in range(len(arr2)):
k=[]
for j in range(len(arr2[i])):
k.append('O')
z.append(k)
first(z,arr2)
for x in z:
print(''.join(x))
| 25.193798 | 100 | 0.348308 |
4a217fec417e4a88c4d9aba933eb45ccfb54973a | 209 | py | Python | cdecimal/errors.py | virtuNat/cdecimal | ab07330581e4c8181e4a98851df02c96a7fb1d82 | [
"MIT"
] | null | null | null | cdecimal/errors.py | virtuNat/cdecimal | ab07330581e4c8181e4a98851df02c96a7fb1d82 | [
"MIT"
] | null | null | null | cdecimal/errors.py | virtuNat/cdecimal | ab07330581e4c8181e4a98851df02c96a7fb1d82 | [
"MIT"
] | 1 | 2018-09-23T10:44:56.000Z | 2018-09-23T10:44:56.000Z | class DivisionByZero(ZeroDivisionError):
"""Thrown when division by zero occurs."""
class InvalidOperationError(ArithmeticError):
"""Thrown when an operation would produce an indeterminate value."""
| 29.857143 | 72 | 0.76555 |
4a2180831356d55304d73e1e762f9f3f4678067d | 2,953 | py | Python | test/Task/Fs/ChmodTaskTest.py | paulondc/chilopoda | 046dbb0c1b4ff20ea5f2e1679f8d89f3089b6aa4 | [
"MIT"
] | 2 | 2019-09-24T18:56:27.000Z | 2021-02-07T04:58:49.000Z | test/Task/Fs/ChmodTaskTest.py | paulondc/kombi | 046dbb0c1b4ff20ea5f2e1679f8d89f3089b6aa4 | [
"MIT"
] | 20 | 2019-02-16T04:21:13.000Z | 2019-03-09T21:21:21.000Z | test/Task/Fs/ChmodTaskTest.py | paulondc/kombi | 046dbb0c1b4ff20ea5f2e1679f8d89f3089b6aa4 | [
"MIT"
] | 3 | 2019-11-15T05:16:32.000Z | 2021-09-28T21:28:29.000Z | import unittest
import os
import sys
from ...BaseTestCase import BaseTestCase
from kombi.Task import Task
from kombi.Crawler.Fs import FsCrawler
class ChmodTaskTest(BaseTestCase):
"""Test Chmod task."""
__dir = os.path.join(BaseTestCase.dataTestsDirectory(), "glob")
__path = os.path.join(__dir, "images", "RND_ass_lookdev_default_beauty_tt.1001.exr")
@unittest.skipIf(sys.platform.startswith("win"), "not supported on windows")
def testChmodFile(self):
"""
Test that the chmod task works properly on a file.
"""
crawler = FsCrawler.createFromPath(self.__path)
chmodTask = Task.create('chmod')
chmodTask.add(crawler, self.__path)
for permission in ["644", "444", "744", "664"]:
chmodTask.setOption('directoryMode', permission)
chmodTask.setOption('fileMode', permission)
result = chmodTask.output()
self.assertEqual(len(result), 1)
crawler = result[0]
self.assertEqual(self.__getPermission(crawler.var('filePath')), permission)
@unittest.skipIf(sys.platform.startswith("win"), "not supported on windows")
def testChmodDir(self):
"""
Test that the chmod task works properly on a directory.
"""
crawler = FsCrawler.createFromPath(self.__dir)
fileCrawler = FsCrawler.createFromPath(self.__path)
chmodTask = Task.create('chmod')
chmodTask.add(crawler, self.__dir)
chmodTask.add(fileCrawler, self.__dir)
dirPerm = "775"
filePerm = "664"
chmodTask.setOption('directoryMode', dirPerm)
chmodTask.setOption('fileMode', filePerm)
result = chmodTask.output()
self.assertEqual(len(result), 1)
self.assertEqual(self.__getPermission(self.__dir), dirPerm)
self.assertEqual(self.__getPermission(self.__path), filePerm)
@unittest.skipIf(sys.platform.startswith("win"), "not supported on windows")
def testSymlink(self):
"""
Test that hardlinks are skipped when running the chmod task.
"""
link = os.path.join(self.dataTestsDirectory(), 'symlink.exr')
os.symlink(self.__path, link)
self.assertEqual(self.__getPermission(link), '664')
self.assertTrue(os.path.islink(link))
crawler = FsCrawler.createFromPath(link)
chmodTask = Task.create('chmod')
chmodTask.add(crawler, link)
chmodTask.setOption('directoryMode', '775')
chmodTask.setOption('fileMode', '775')
chmodTask.output()
self.assertEqual(self.__getPermission(link), '664')
self.addCleanup(self.cleanup, link)
def cleanup(self, fileToDelete):
"""
Remove file created during test.
"""
os.remove(fileToDelete)
@staticmethod
def __getPermission(filePath):
return oct(os.stat(filePath).st_mode)[-3:]
if __name__ == "__main__":
unittest.main()
| 36.9125 | 88 | 0.64917 |
4a218087ad188d568c67cc4f6279c4133b83e6d3 | 1,776 | py | Python | Tutorial_Kivy_Codemy/codemy_3_Kv_Language.py | LivioAlvarenga/Tutoriais_Kivy_KivyMD | b6225578e764eaf0312afafbb2f76dc06f92342d | [
"MIT"
] | null | null | null | Tutorial_Kivy_Codemy/codemy_3_Kv_Language.py | LivioAlvarenga/Tutoriais_Kivy_KivyMD | b6225578e764eaf0312afafbb2f76dc06f92342d | [
"MIT"
] | null | null | null | Tutorial_Kivy_Codemy/codemy_3_Kv_Language.py | LivioAlvarenga/Tutoriais_Kivy_KivyMD | b6225578e764eaf0312afafbb2f76dc06f92342d | [
"MIT"
] | null | null | null | # https://www.youtube.com/watch?v=k4QCoS-hj-s&list=PLCC34OHNcOtpz7PJQ7Tv7hqFBP_xDDjqg&index=5
# https://www.youtube.com/watch?v=dVVPOPuPPc0&list=PLCC34OHNcOtpz7PJQ7Tv7hqFBP_xDDjqg&index=6
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import ObjectProperty
from kivy.lang import Builder
Builder.load_file('codemy_3_Kv_Language.kv')
class MyGridLayout(Widget):
# None para quando iniciar esta variavel não ter nada.
name = ObjectProperty(None)
pizza = ObjectProperty(None)
color = ObjectProperty(None)
def press(self):
name = self.name.text
pizza = self.pizza.text
color = self.color.text
# print(f'Hello {name}, sua pizza favorita é {pizza} e sua cor
# favorita é {color}')
# imprimir na tela
# self.add_widget(Label(text=f'Olá {name}, sua pizza favorita é {pizza}
# e sua cor favorita é {color}'))
print(
f'Olá {name}, sua pizza favorita é {pizza} e sua cor favorita \
é {color}')
# limpar input boxes
self.name.text = ''
self.pizza.text = ''
self.color.text = ''
class MyApp(App):
def build(self):
return MyGridLayout()
if __name__ == '__main__':
MyApp().run()
'''
Existem três forma de fazer um kv language:
1. crie um arquivo com nome da class App em minusculo sem o app. Ex:
class MyApp(App): ----> my.kv
Desta forma ira localizar e usar o arquivo my.kv automatico.
2. crie um arquivo com qualquer nome .kv e chame o mesmo com Builder
Builder.load_file('codemy_3_Kv_Language_C.kv'). Obs. se estiver
em outro diretorio é so colocar o caminho
3. a terceira forma é com Builder.load_string
Builder.load_string(""" coloque o codigo kv aqui!!! """)
'''
| 29.6 | 93 | 0.67286 |
4a2180b5d73be05d41049c55d3072654cda6e44f | 17,588 | py | Python | python/ray/tune/sample.py | lavanyashukla/ray | 9c1a75b6ff82a842131e6beb3c260188befc21df | [
"Apache-2.0"
] | 1 | 2020-10-21T22:24:27.000Z | 2020-10-21T22:24:27.000Z | python/ray/tune/sample.py | mfitton/ray | fece8db70d703da1aad192178bd50923e83cc99a | [
"Apache-2.0"
] | null | null | null | python/ray/tune/sample.py | mfitton/ray | fece8db70d703da1aad192178bd50923e83cc99a | [
"Apache-2.0"
] | null | null | null | import logging
import random
from copy import copy
from inspect import signature
from math import isclose
from typing import Any, Callable, Dict, List, Optional, Sequence, Union
import numpy as np
logger = logging.getLogger(__name__)
class Domain:
"""Base class to specify a type and valid range to sample parameters from.
This base class is implemented by parameter spaces, like float ranges
(``Float``), integer ranges (``Integer``), or categorical variables
(``Categorical``). The ``Domain`` object contains information about
valid values (e.g. minimum and maximum values), and exposes methods that
allow specification of specific samplers (e.g. ``uniform()`` or
``loguniform()``).
"""
sampler = None
default_sampler_cls = None
def cast(self, value):
"""Cast value to domain type"""
return value
def set_sampler(self, sampler, allow_override=False):
if self.sampler and not allow_override:
raise ValueError("You can only choose one sampler for parameter "
"domains. Existing sampler for parameter {}: "
"{}. Tried to add {}".format(
self.__class__.__name__, self.sampler,
sampler))
self.sampler = sampler
def get_sampler(self):
sampler = self.sampler
if not sampler:
sampler = self.default_sampler_cls()
return sampler
def sample(self, spec=None, size=1):
sampler = self.get_sampler()
return sampler.sample(self, spec=spec, size=size)
def is_grid(self):
return isinstance(self.sampler, Grid)
def is_function(self):
return False
def is_valid(self, value: Any):
"""Returns True if `value` is a valid value in this domain."""
raise NotImplementedError
@property
def domain_str(self):
return "(unknown)"
class Sampler:
def sample(self,
domain: Domain,
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
raise NotImplementedError
class BaseSampler(Sampler):
def __str__(self):
return "Base"
class Uniform(Sampler):
def __str__(self):
return "Uniform"
class LogUniform(Sampler):
def __init__(self, base: float = 10):
self.base = base
assert self.base > 0, "Base has to be strictly greater than 0"
def __str__(self):
return "LogUniform"
class Normal(Sampler):
def __init__(self, mean: float = 0., sd: float = 0.):
self.mean = mean
self.sd = sd
assert self.sd > 0, "SD has to be strictly greater than 0"
def __str__(self):
return "Normal"
class Grid(Sampler):
"""Dummy sampler used for grid search"""
def sample(self,
domain: Domain,
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
return RuntimeError("Do not call `sample()` on grid.")
class Float(Domain):
class _Uniform(Uniform):
def sample(self,
domain: "Float",
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
assert domain.lower > float("-inf"), \
"Uniform needs a lower bound"
assert domain.upper < float("inf"), \
"Uniform needs a upper bound"
items = np.random.uniform(domain.lower, domain.upper, size=size)
return items if len(items) > 1 else domain.cast(items[0])
class _LogUniform(LogUniform):
def sample(self,
domain: "Float",
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
assert domain.lower > 0, \
"LogUniform needs a lower bound greater than 0"
assert 0 < domain.upper < float("inf"), \
"LogUniform needs a upper bound greater than 0"
logmin = np.log(domain.lower) / np.log(self.base)
logmax = np.log(domain.upper) / np.log(self.base)
items = self.base**(np.random.uniform(logmin, logmax, size=size))
return items if len(items) > 1 else domain.cast(items[0])
class _Normal(Normal):
def sample(self,
domain: "Float",
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
assert not domain.lower or domain.lower == float("-inf"), \
"Normal sampling does not allow a lower value bound."
assert not domain.upper or domain.upper == float("inf"), \
"Normal sampling does not allow a upper value bound."
items = np.random.normal(self.mean, self.sd, size=size)
return items if len(items) > 1 else domain.cast(items[0])
default_sampler_cls = _Uniform
def __init__(self, lower: Optional[float], upper: Optional[float]):
# Need to explicitly check for None
self.lower = lower if lower is not None else float("-inf")
self.upper = upper if upper is not None else float("inf")
def cast(self, value):
return float(value)
def uniform(self):
if not self.lower > float("-inf"):
raise ValueError(
"Uniform requires a lower bound. Make sure to set the "
"`lower` parameter of `Float()`.")
if not self.upper < float("inf"):
raise ValueError(
"Uniform requires a upper bound. Make sure to set the "
"`upper` parameter of `Float()`.")
new = copy(self)
new.set_sampler(self._Uniform())
return new
def loguniform(self, base: float = 10):
if not self.lower > 0:
raise ValueError(
"LogUniform requires a lower bound greater than 0."
f"Got: {self.lower}. Did you pass a variable that has "
"been log-transformed? If so, pass the non-transformed value "
"instead.")
if not 0 < self.upper < float("inf"):
raise ValueError(
"LogUniform requires a upper bound greater than 0. "
f"Got: {self.lower}. Did you pass a variable that has "
"been log-transformed? If so, pass the non-transformed value "
"instead.")
new = copy(self)
new.set_sampler(self._LogUniform(base))
return new
def normal(self, mean=0., sd=1.):
new = copy(self)
new.set_sampler(self._Normal(mean, sd))
return new
def quantized(self, q: float):
if self.lower > float("-inf") and not isclose(self.lower / q,
round(self.lower / q)):
raise ValueError(
f"Your lower variable bound {self.lower} is not divisible by "
f"quantization factor {q}.")
if self.upper < float("inf") and not isclose(self.upper / q,
round(self.upper / q)):
raise ValueError(
f"Your upper variable bound {self.upper} is not divisible by "
f"quantization factor {q}.")
new = copy(self)
new.set_sampler(Quantized(new.get_sampler(), q), allow_override=True)
return new
def is_valid(self, value: float):
return self.lower <= value <= self.upper
@property
def domain_str(self):
return f"({self.lower}, {self.upper})"
class Integer(Domain):
class _Uniform(Uniform):
def sample(self,
domain: "Integer",
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
items = np.random.randint(domain.lower, domain.upper, size=size)
return items if len(items) > 1 else domain.cast(items[0])
class _LogUniform(LogUniform):
def sample(self,
domain: "Integer",
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
assert domain.lower > 0, \
"LogUniform needs a lower bound greater than 0"
assert 0 < domain.upper < float("inf"), \
"LogUniform needs a upper bound greater than 0"
logmin = np.log(domain.lower) / np.log(self.base)
logmax = np.log(domain.upper) / np.log(self.base)
items = self.base**(np.random.uniform(logmin, logmax, size=size))
items = np.round(items).astype(int)
return items if len(items) > 1 else domain.cast(items[0])
default_sampler_cls = _Uniform
def __init__(self, lower, upper):
self.lower = lower
self.upper = upper
def cast(self, value):
return int(value)
def quantized(self, q: int):
new = copy(self)
new.set_sampler(Quantized(new.get_sampler(), q), allow_override=True)
return new
def uniform(self):
new = copy(self)
new.set_sampler(self._Uniform())
return new
def loguniform(self, base: float = 10):
if not self.lower > 0:
raise ValueError(
"LogUniform requires a lower bound greater than 0."
f"Got: {self.lower}. Did you pass a variable that has "
"been log-transformed? If so, pass the non-transformed value "
"instead.")
if not 0 < self.upper < float("inf"):
raise ValueError(
"LogUniform requires a upper bound greater than 0. "
f"Got: {self.lower}. Did you pass a variable that has "
"been log-transformed? If so, pass the non-transformed value "
"instead.")
new = copy(self)
new.set_sampler(self._LogUniform(base))
return new
def is_valid(self, value: int):
return self.lower <= value <= self.upper
@property
def domain_str(self):
return f"({self.lower}, {self.upper})"
class Categorical(Domain):
class _Uniform(Uniform):
def sample(self,
domain: "Categorical",
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
items = random.choices(domain.categories, k=size)
return items if len(items) > 1 else domain.cast(items[0])
default_sampler_cls = _Uniform
def __init__(self, categories: Sequence):
self.categories = list(categories)
def uniform(self):
new = copy(self)
new.set_sampler(self._Uniform())
return new
def grid(self):
new = copy(self)
new.set_sampler(Grid())
return new
def __len__(self):
return len(self.categories)
def __getitem__(self, item):
return self.categories[item]
def is_valid(self, value: Any):
return value in self.categories
@property
def domain_str(self):
return f"{self.categories}"
class Function(Domain):
class _CallSampler(BaseSampler):
def sample(self,
domain: "Function",
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
pass_spec = len(signature(domain.func).parameters) > 0
if pass_spec:
items = [
domain.func(spec[i] if isinstance(spec, list) else spec)
for i in range(size)
]
else:
items = [domain.func() for i in range(size)]
return items if len(items) > 1 else domain.cast(items[0])
default_sampler_cls = _CallSampler
def __init__(self, func: Callable):
if len(signature(func).parameters) > 1:
raise ValueError(
"The function passed to a `Function` parameter must accept "
"either 0 or 1 parameters.")
self.func = func
def is_function(self):
return True
def is_valid(self, value: Any):
return True # This is user-defined, so lets not assume anything
@property
def domain_str(self):
return f"{self.func}()"
class Quantized(Sampler):
def __init__(self, sampler: Sampler, q: Union[float, int]):
self.sampler = sampler
self.q = q
assert self.sampler, "Quantized() expects a sampler instance"
def get_sampler(self):
return self.sampler
def sample(self,
domain: Domain,
spec: Optional[Union[List[Dict], Dict]] = None,
size: int = 1):
values = self.sampler.sample(domain, spec, size)
quantized = np.round(np.divide(values, self.q)) * self.q
if not isinstance(quantized, np.ndarray):
return domain.cast(quantized)
return list(quantized)
# TODO (krfricke): Remove tune.function
def function(func):
logger.warning(
"DeprecationWarning: wrapping {} with tune.function() is no "
"longer needed".format(func))
return func
def sample_from(func: Callable[[Dict], Any]):
"""Specify that tune should sample configuration values from this function.
Arguments:
func: An callable function to draw a sample from.
"""
return Function(func)
def uniform(lower: float, upper: float):
"""Sample a float value uniformly between ``lower`` and ``upper``.
Sampling from ``tune.uniform(1, 10)`` is equivalent to sampling from
``np.random.uniform(1, 10))``
"""
return Float(lower, upper).uniform()
def quniform(lower: float, upper: float, q: float):
"""Sample a quantized float value uniformly between ``lower`` and ``upper``.
Sampling from ``tune.uniform(1, 10)`` is equivalent to sampling from
``np.random.uniform(1, 10))``
The value will be quantized, i.e. rounded to an integer increment of ``q``.
Quantization makes the upper bound inclusive.
"""
return Float(lower, upper).uniform().quantized(q)
def loguniform(lower: float, upper: float, base: float = 10):
"""Sugar for sampling in different orders of magnitude.
Args:
lower (float): Lower boundary of the output interval (e.g. 1e-4)
upper (float): Upper boundary of the output interval (e.g. 1e-2)
base (int): Base of the log. Defaults to 10.
"""
return Float(lower, upper).loguniform(base)
def qloguniform(lower: float, upper: float, q: float, base: float = 10):
"""Sugar for sampling in different orders of magnitude.
The value will be quantized, i.e. rounded to an integer increment of ``q``.
Quantization makes the upper bound inclusive.
Args:
lower (float): Lower boundary of the output interval (e.g. 1e-4)
upper (float): Upper boundary of the output interval (e.g. 1e-2)
q (float): Quantization number. The result will be rounded to an
integer increment of this value.
base (int): Base of the log. Defaults to 10.
"""
return Float(lower, upper).loguniform(base).quantized(q)
def choice(categories: List):
"""Sample a categorical value.
Sampling from ``tune.choice([1, 2])`` is equivalent to sampling from
``random.choice([1, 2])``
"""
return Categorical(categories).uniform()
def randint(lower: int, upper: int):
"""Sample an integer value uniformly between ``lower`` and ``upper``.
``lower`` is inclusive, ``upper`` is exclusive.
Sampling from ``tune.randint(10)`` is equivalent to sampling from
``np.random.randint(10)``
"""
return Integer(lower, upper).uniform()
def lograndint(lower: int, upper: int, base: float = 10):
"""Sample an integer value log-uniformly between ``lower`` and ``upper``,
with ``base`` being the base of logarithm.
``lower`` is inclusive, ``upper`` is exclusive.
"""
return Integer(lower, upper).loguniform(base)
def qrandint(lower: int, upper: int, q: int = 1):
"""Sample an integer value uniformly between ``lower`` and ``upper``.
``lower`` is inclusive, ``upper`` is also inclusive (!).
The value will be quantized, i.e. rounded to an integer increment of ``q``.
Quantization makes the upper bound inclusive.
"""
return Integer(lower, upper).uniform().quantized(q)
def qlograndint(lower: int, upper: int, q: int, base: float = 10):
"""Sample an integer value log-uniformly between ``lower`` and ``upper``,
with ``base`` being the base of logarithm.
``lower`` is inclusive, ``upper`` is also inclusive (!).
The value will be quantized, i.e. rounded to an integer increment of ``q``.
Quantization makes the upper bound inclusive.
"""
return Integer(lower, upper).loguniform(base).quantized(q)
def randn(mean: float = 0., sd: float = 1.):
"""Sample a float value normally with ``mean`` and ``sd``.
Args:
mean (float): Mean of the normal distribution. Defaults to 0.
sd (float): SD of the normal distribution. Defaults to 1.
"""
return Float(None, None).normal(mean, sd)
def qrandn(mean: float, sd: float, q: float):
"""Sample a float value normally with ``mean`` and ``sd``.
The value will be quantized, i.e. rounded to an integer increment of ``q``.
Args:
mean (float): Mean of the normal distribution.
sd (float): SD of the normal distribution.
q (float): Quantization number. The result will be rounded to an
integer increment of this value.
"""
return Float(None, None).normal(mean, sd).quantized(q)
| 32.57037 | 80 | 0.587105 |
4a2180da016beb86a73a3b306a1200ce47561479 | 1,446 | py | Python | pinger/server.py | shichao-an/udp-pinger | 463b2dc556c8fe7d972ed48c42122d563b8b096c | [
"BSD-2-Clause"
] | 1 | 2020-09-26T16:27:11.000Z | 2020-09-26T16:27:11.000Z | pinger/server.py | shichao-an/udp-pinger | 463b2dc556c8fe7d972ed48c42122d563b8b096c | [
"BSD-2-Clause"
] | null | null | null | pinger/server.py | shichao-an/udp-pinger | 463b2dc556c8fe7d972ed48c42122d563b8b096c | [
"BSD-2-Clause"
] | 1 | 2019-01-11T08:53:14.000Z | 2019-01-11T08:53:14.000Z | #!/usr/bin/env python
import select
import socket
import sys
DEFAULT_PORT = 5005
class UDPPingerServer(object):
UDP_IP = '' # INADDR_ANY
def __init__(self, port):
self.socket = socket.socket(socket.AF_INET,
socket.SOCK_DGRAM)
self.server_address = (self.UDP_IP, port)
self.socket.bind(self.server_address)
self.socket.setblocking(0)
def start(self):
msg = 'Starting ping server at 0.0.0.0:%d\n' % self.server_address[1]
sys.stderr.write(msg)
while True:
r, w, e = select.select([self.socket.fileno()], [], [], 1)
if self.socket.fileno() in r:
data, addr = self.socket.recvfrom(1024)
self.log_message(addr)
self.socket.sendto(data, addr)
def log_message(self, addr):
msg = 'Ping from %s:%d\n' % addr
sys.stderr.write(msg)
if __name__ == '__main__':
usage = 'Usage: ./server.py [port]\n'
port = DEFAULT_PORT
if len(sys.argv) == 2:
if sys.argv[1].isdigit():
port = int(sys.argv[1])
elif sys.argv[1] in ('-h', '--help'):
sys.stderr.write(usage)
sys.exit(1)
else:
sys.stderr.write('Invalid port number')
sys.exit(1)
if len(sys.argv) > 2:
sys.stderr.write(usage)
sys.exit(1)
server = UDPPingerServer(port)
server.start()
| 27.283019 | 77 | 0.551867 |
4a2181397ced150e3d38553c051bf19184840ec1 | 12,306 | py | Python | test/lit.cfg.py | arunkumarbhattar/llvm | 2c4ca6832fa6b306ee6a7010bfb80a3f2596f824 | [
"Apache-2.0"
] | 4,812 | 2015-01-02T19:38:10.000Z | 2022-03-27T12:42:24.000Z | test/lit.cfg.py | arunkumarbhattar/llvm | 2c4ca6832fa6b306ee6a7010bfb80a3f2596f824 | [
"Apache-2.0"
] | 71 | 2016-07-26T15:16:53.000Z | 2022-03-31T14:39:47.000Z | test/lit.cfg.py | arunkumarbhattar/llvm | 2c4ca6832fa6b306ee6a7010bfb80a3f2596f824 | [
"Apache-2.0"
] | 2,543 | 2015-01-01T11:18:36.000Z | 2022-03-22T21:32:36.000Z | # -*- Python -*-
# Configuration file for the 'lit' test runner.
import os
import sys
import re
import platform
import subprocess
import lit.util
import lit.formats
from lit.llvm import llvm_config
from lit.llvm.subst import FindTool
from lit.llvm.subst import ToolSubst
# name: The name of this test suite.
config.name = 'LLVM'
# testFormat: The test format to use to interpret tests.
config.test_format = lit.formats.ShTest(not llvm_config.use_lit_shell)
# suffixes: A list of file extensions to treat as test files. This is overriden
# by individual lit.local.cfg files in the test subdirectories.
config.suffixes = ['.ll', '.c', '.cxx', '.test', '.txt', '.s', '.mir']
# excludes: A list of directories to exclude from the testsuite. The 'Inputs'
# subdirectories contain auxiliary inputs for various tests in their parent
# directories.
config.excludes = ['Inputs', 'CMakeLists.txt', 'README.txt', 'LICENSE.txt']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
# test_exec_root: The root path where tests should be run.
config.test_exec_root = os.path.join(config.llvm_obj_root, 'test')
# Tweak the PATH to include the tools dir.
llvm_config.with_environment('PATH', config.llvm_tools_dir, append_path=True)
# Propagate some variables from the host environment.
llvm_config.with_system_environment(
['HOME', 'INCLUDE', 'LIB', 'TMP', 'TEMP', 'ASAN_SYMBOLIZER_PATH', 'MSAN_SYMBOLIZER_PATH'])
# Set up OCAMLPATH to include newly built OCaml libraries.
top_ocaml_lib = os.path.join(config.llvm_lib_dir, 'ocaml')
llvm_ocaml_lib = os.path.join(top_ocaml_lib, 'llvm')
llvm_config.with_system_environment('OCAMLPATH')
llvm_config.with_environment('OCAMLPATH', top_ocaml_lib, append_path=True)
llvm_config.with_environment('OCAMLPATH', llvm_ocaml_lib, append_path=True)
llvm_config.with_system_environment('CAML_LD_LIBRARY_PATH')
llvm_config.with_environment(
'CAML_LD_LIBRARY_PATH', llvm_ocaml_lib, append_path=True)
# Set up OCAMLRUNPARAM to enable backtraces in OCaml tests.
llvm_config.with_environment('OCAMLRUNPARAM', 'b')
# Provide the path to asan runtime lib 'libclang_rt.asan_osx_dynamic.dylib' if
# available. This is darwin specific since it's currently only needed on darwin.
def get_asan_rtlib():
if not 'Address' in config.llvm_use_sanitizer or \
not 'Darwin' in config.host_os or \
not 'x86' in config.host_triple:
return ''
try:
import glob
except:
print('glob module not found, skipping get_asan_rtlib() lookup')
return ''
# The libclang_rt.asan_osx_dynamic.dylib path is obtained using the relative
# path from the host cc.
host_lib_dir = os.path.join(os.path.dirname(config.host_cc), '../lib')
asan_dylib_dir_pattern = host_lib_dir + \
'/clang/*/lib/darwin/libclang_rt.asan_osx_dynamic.dylib'
found_dylibs = glob.glob(asan_dylib_dir_pattern)
if len(found_dylibs) != 1:
return ''
return found_dylibs[0]
llvm_config.use_default_substitutions()
# Add site-specific substitutions.
config.substitutions.append(('%llvmshlibdir', config.llvm_shlib_dir))
config.substitutions.append(('%shlibext', config.llvm_shlib_ext))
config.substitutions.append(('%exeext', config.llvm_exe_ext))
lli_args = []
# The target triple used by default by lli is the process target triple (some
# triple appropriate for generating code for the current process) but because
# we don't support COFF in MCJIT well enough for the tests, force ELF format on
# Windows. FIXME: the process target triple should be used here, but this is
# difficult to obtain on Windows.
if re.search(r'cygwin|windows-gnu|windows-msvc', config.host_triple):
lli_args = ['-mtriple=' + config.host_triple + '-elf']
llc_args = []
# Similarly, have a macro to use llc with DWARF even when the host is Windows
if re.search(r'windows-msvc', config.target_triple):
llc_args = [' -mtriple=' +
config.target_triple.replace('-msvc', '-gnu')]
# Provide the path to asan runtime lib if available. On darwin, this lib needs
# to be loaded via DYLD_INSERT_LIBRARIES before libLTO.dylib in case the files
# to be linked contain instrumented sanitizer code.
ld64_cmd = config.ld64_executable
asan_rtlib = get_asan_rtlib()
if asan_rtlib:
ld64_cmd = 'DYLD_INSERT_LIBRARIES={} {}'.format(asan_rtlib, ld64_cmd)
ocamlc_command = '%s ocamlc -cclib -L%s %s' % (
config.ocamlfind_executable, config.llvm_lib_dir, config.ocaml_flags)
ocamlopt_command = 'true'
if config.have_ocamlopt:
ocamlopt_command = '%s ocamlopt -cclib -L%s -cclib -Wl,-rpath,%s %s' % (
config.ocamlfind_executable, config.llvm_lib_dir, config.llvm_lib_dir, config.ocaml_flags)
opt_viewer_cmd = '%s %s/tools/opt-viewer/opt-viewer.py' % (sys.executable, config.llvm_src_root)
llvm_locstats_tool = os.path.join(config.llvm_tools_dir, 'llvm-locstats')
config.substitutions.append(
('%llvm-locstats', "'%s' %s" % (config.python_executable, llvm_locstats_tool)))
config.llvm_locstats_used = os.path.exists(llvm_locstats_tool)
tools = [
ToolSubst('%lli', FindTool('lli'), post='.', extra_args=lli_args),
ToolSubst('%llc_dwarf', FindTool('llc'), extra_args=llc_args),
ToolSubst('%go', config.go_executable, unresolved='ignore'),
ToolSubst('%gold', config.gold_executable, unresolved='ignore'),
ToolSubst('%ld64', ld64_cmd, unresolved='ignore'),
ToolSubst('%ocamlc', ocamlc_command, unresolved='ignore'),
ToolSubst('%ocamlopt', ocamlopt_command, unresolved='ignore'),
ToolSubst('%opt-viewer', opt_viewer_cmd),
ToolSubst('%llvm-objcopy', FindTool('llvm-objcopy')),
ToolSubst('%llvm-strip', FindTool('llvm-strip')),
]
# FIXME: Why do we have both `lli` and `%lli` that do slightly different things?
tools.extend([
'dsymutil', 'lli', 'lli-child-target', 'llvm-ar', 'llvm-as',
'llvm-bcanalyzer', 'llvm-config', 'llvm-cov', 'llvm-cxxdump', 'llvm-cvtres',
'llvm-diff', 'llvm-dis', 'llvm-dwarfdump', 'llvm-exegesis', 'llvm-extract',
'llvm-isel-fuzzer', 'llvm-ifs', 'llvm-jitlink', 'llvm-opt-fuzzer', 'llvm-lib',
'llvm-link', 'llvm-lto', 'llvm-lto2', 'llvm-mc', 'llvm-mca',
'llvm-modextract', 'llvm-nm', 'llvm-objcopy', 'llvm-objdump',
'llvm-pdbutil', 'llvm-profdata', 'llvm-ranlib', 'llvm-rc', 'llvm-readelf',
'llvm-readobj', 'llvm-rtdyld', 'llvm-size', 'llvm-split', 'llvm-strings',
'llvm-strip', 'llvm-tblgen', 'llvm-undname', 'llvm-c-test', 'llvm-cxxfilt',
'llvm-xray', 'yaml2obj', 'obj2yaml', 'yaml-bench', 'verify-uselistorder',
'bugpoint', 'llc', 'llvm-symbolizer', 'opt', 'sancov', 'sanstats'])
# The following tools are optional
tools.extend([
ToolSubst('llvm-go', unresolved='ignore'),
ToolSubst('llvm-mt', unresolved='ignore'),
ToolSubst('Kaleidoscope-Ch3', unresolved='ignore'),
ToolSubst('Kaleidoscope-Ch4', unresolved='ignore'),
ToolSubst('Kaleidoscope-Ch5', unresolved='ignore'),
ToolSubst('Kaleidoscope-Ch6', unresolved='ignore'),
ToolSubst('Kaleidoscope-Ch7', unresolved='ignore'),
ToolSubst('Kaleidoscope-Ch8', unresolved='ignore')])
llvm_config.add_tool_substitutions(tools, config.llvm_tools_dir)
# Targets
config.targets = frozenset(config.targets_to_build.split())
for arch in config.targets_to_build.split():
config.available_features.add(arch.lower() + '-registered-target')
# Features
known_arches = ["x86_64", "mips64", "ppc64", "aarch64"]
if (config.host_ldflags.find("-m32") < 0
and any(config.llvm_host_triple.startswith(x) for x in known_arches)):
config.available_features.add("llvm-64-bits")
config.available_features.add("host-byteorder-" + sys.byteorder + "-endian")
if sys.platform in ['win32']:
# ExecutionEngine, no weak symbols in COFF.
config.available_features.add('uses_COFF')
else:
# Others/can-execute.txt
config.available_features.add('can-execute')
# Loadable module
if config.has_plugins:
config.available_features.add('plugins')
# Static libraries are not built if BUILD_SHARED_LIBS is ON.
if not config.build_shared_libs and not config.link_llvm_dylib:
config.available_features.add('static-libs')
def have_cxx_shared_library():
readobj_exe = lit.util.which('llvm-readobj', config.llvm_tools_dir)
if not readobj_exe:
print('llvm-readobj not found')
return False
try:
readobj_cmd = subprocess.Popen(
[readobj_exe, '-needed-libs', readobj_exe], stdout=subprocess.PIPE)
except OSError:
print('could not exec llvm-readobj')
return False
readobj_out = readobj_cmd.stdout.read().decode('ascii')
readobj_cmd.wait()
regex = re.compile(r'(libc\+\+|libstdc\+\+|msvcp).*\.(so|dylib|dll)')
needed_libs = False
for line in readobj_out.splitlines():
if 'NeededLibraries [' in line:
needed_libs = True
if ']' in line:
needed_libs = False
if needed_libs and regex.search(line.lower()):
return True
return False
if have_cxx_shared_library():
config.available_features.add('cxx-shared-library')
if config.libcxx_used:
config.available_features.add('libcxx-used')
# Direct object generation
if not 'hexagon' in config.target_triple:
config.available_features.add('object-emission')
# LLVM can be configured with an empty default triple
# Some tests are "generic" and require a valid default triple
if config.target_triple:
config.available_features.add('default_triple')
import subprocess
def have_ld_plugin_support():
if not os.path.exists(os.path.join(config.llvm_shlib_dir, 'LLVMgold' + config.llvm_shlib_ext)):
return False
ld_cmd = subprocess.Popen(
[config.gold_executable, '--help'], stdout=subprocess.PIPE, env={'LANG': 'C'})
ld_out = ld_cmd.stdout.read().decode()
ld_cmd.wait()
if not '-plugin' in ld_out:
return False
# check that the used emulations are supported.
emu_line = [l for l in ld_out.split('\n') if 'supported emulations' in l]
if len(emu_line) != 1:
return False
emu_line = emu_line[0]
fields = emu_line.split(':')
if len(fields) != 3:
return False
emulations = fields[2].split()
if 'elf_x86_64' not in emulations:
return False
if 'elf32ppc' in emulations:
config.available_features.add('ld_emu_elf32ppc')
ld_version = subprocess.Popen(
[config.gold_executable, '--version'], stdout=subprocess.PIPE, env={'LANG': 'C'})
if not 'GNU gold' in ld_version.stdout.read().decode():
return False
ld_version.wait()
return True
if have_ld_plugin_support():
config.available_features.add('ld_plugin')
def have_ld64_plugin_support():
if not os.path.exists(os.path.join(config.llvm_shlib_dir, 'libLTO' + config.llvm_shlib_ext)):
return False
if config.ld64_executable == '':
return False
ld_cmd = subprocess.Popen(
[config.ld64_executable, '-v'], stderr=subprocess.PIPE)
ld_out = ld_cmd.stderr.read().decode()
ld_cmd.wait()
if 'ld64' not in ld_out or 'LTO' not in ld_out:
return False
return True
if have_ld64_plugin_support():
config.available_features.add('ld64_plugin')
# Ask llvm-config about asserts and global-isel.
llvm_config.feature_config(
[('--assertion-mode', {'ON': 'asserts'}),
('--has-global-isel', {'ON': 'global-isel'})])
if 'darwin' == sys.platform:
try:
sysctl_cmd = subprocess.Popen(['sysctl', 'hw.optional.fma'],
stdout=subprocess.PIPE)
except OSError:
print('Could not exec sysctl')
result = sysctl_cmd.stdout.read().decode('ascii')
if -1 != result.find('hw.optional.fma: 1'):
config.available_features.add('fma3')
sysctl_cmd.wait()
# .debug_frame is not emitted for targeting Windows x64.
if not re.match(r'^x86_64.*-(windows-gnu|windows-msvc)', config.target_triple):
config.available_features.add('debug_frame')
if config.have_libxar:
config.available_features.add('xar')
if config.enable_threads:
config.available_features.add('thread_support')
if config.llvm_libxml2_enabled:
config.available_features.add('libxml2')
if config.have_opt_viewer_modules:
config.available_features.add('have_opt_viewer_modules')
| 36.408284 | 99 | 0.709898 |
4a21817553cd382a66e3a0478aa06d02bbc7d458 | 1,985 | py | Python | image_correction.py | mhchen124/CarND-LaneDetectionAdv | 19c69e5cd7b92838a76f46d3263065db15351fe0 | [
"MIT"
] | 1 | 2020-03-05T12:50:40.000Z | 2020-03-05T12:50:40.000Z | image_correction.py | mhchen124/CarND-LaneDetectionAdv | 19c69e5cd7b92838a76f46d3263065db15351fe0 | [
"MIT"
] | null | null | null | image_correction.py | mhchen124/CarND-LaneDetectionAdv | 19c69e5cd7b92838a76f46d3263065db15351fe0 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
import glob
import pickle
import matplotlib.pyplot as plt
#%matplotlib qt
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((6*9,3), np.float32)
objp[:,:2] = np.mgrid[0:9,0:6].T.reshape(-1,2)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d points in real world space
imgpoints = [] # 2d points in image plane.
dist_pickle = {}
# Make a list of calibration images
images = glob.glob('./camera_cal/calibration*.jpg')
# Step through the list and search for chessboard corners
for fname in images:
img = cv2.imread(fname)
img_size = (img.shape[1], img.shape[0])
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# Find the chessboard corners
ret, corners = cv2.findChessboardCorners(gray, (9,6), None)
# If found, add object points, image points
if ret == True:
objpoints.append(objp)
imgpoints.append(corners)
# Draw and display the corners
img = cv2.drawChessboardCorners(img, (9,6), corners, ret)
cv2.imshow('img',img)
cv2.imwrite(fname.replace("camera_cal", "camera_cal_corner"), img)
cv2.waitKey(500)
ret, mtx, dist, rvec, tvec = cv2.calibrateCamera(objpoints, imgpoints, img_size, None, None)
# Save a set of mtx/dist for later use
if fname.__contains__("calibration3"):
dist_pickle["mtx"] = mtx
dist_pickle["dist"] = dist
dst = cv2.undistort(gray, mtx, dist, None, mtx)
cv2.imwrite(fname.replace("camera_cal", "camera_cal_undistorted"), dst)
cv2.waitKey(500)
cv2.destroyAllWindows()
pickle.dump( dist_pickle, open( "dist_pickle.p", "wb" ) )
# Apply the image distortion function on test1.jpg
img = cv2.imread("./test_images/test3.jpg")
mtx = dist_pickle["mtx"]
dist = dist_pickle["dist"]
dst = cv2.undistort(img, mtx, dist, None, mtx)
cv2.imwrite("./output_images/test3_undistorted.jpg", dst)
cv2.waitKey(500)
| 31.015625 | 100 | 0.668514 |
4a218228260a50b01bd9fde6bfc91bcc9e784cd4 | 9,158 | py | Python | tests/test_blob.py | ShaneMicro/azure-functions-python-library | f56564effbf291a27e1bd5751a38484af387bb7f | [
"MIT"
] | null | null | null | tests/test_blob.py | ShaneMicro/azure-functions-python-library | f56564effbf291a27e1bd5751a38484af387bb7f | [
"MIT"
] | 1 | 2022-03-02T11:49:02.000Z | 2022-03-02T11:49:02.000Z | tests/test_blob.py | ShaneMicro/azure-functions-python-library | f56564effbf291a27e1bd5751a38484af387bb7f | [
"MIT"
] | null | null | null | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import json
import unittest
from typing import Any, Dict
import azure.functions as func
import azure.functions.blob as afb
from azure.functions.blob import InputStream
from azure.functions.meta import Datum
class TestBlob(unittest.TestCase):
def test_blob_input_type(self):
check_input_type = afb.BlobConverter.check_input_type_annotation
self.assertTrue(check_input_type(str))
self.assertTrue(check_input_type(bytes))
self.assertTrue(check_input_type(InputStream))
self.assertFalse(check_input_type(bytearray))
def test_blob_input_none(self):
result: func.DocumentList = afb.BlobConverter.decode(
data=None, trigger_metadata=None)
self.assertIsNone(result)
def test_blob_input_incorrect_type(self):
datum: Datum = Datum(value=b'string_content', type='bytearray')
with self.assertRaises(ValueError):
afb.BlobConverter.decode(data=datum, trigger_metadata=None)
def test_blob_input_string_no_metadata(self):
datum: Datum = Datum(value='string_content', type='string')
result: InputStream = afb.BlobConverter.decode(
data=datum, trigger_metadata=None)
self.assertIsNotNone(result)
# Verify result metadata
self.assertIsInstance(result, InputStream)
self.assertIsNone(result.name)
self.assertIsNone(result.length)
self.assertIsNone(result.uri)
self.assertTrue(result.readable())
self.assertFalse(result.seekable())
self.assertFalse(result.writable())
# Verify result content
content: bytes = result.read()
self.assertEqual(content, b'string_content')
def test_blob_input_bytes_no_metadata(self):
datum: Datum = Datum(value=b'bytes_content', type='bytes')
result: InputStream = afb.BlobConverter.decode(
data=datum, trigger_metadata=None)
self.assertIsNotNone(result)
# Verify result metadata
self.assertIsInstance(result, InputStream)
self.assertIsNone(result.name)
self.assertIsNone(result.length)
self.assertIsNone(result.uri)
self.assertTrue(result.readable())
self.assertFalse(result.seekable())
self.assertFalse(result.writable())
# Verify result content
content: bytes = result.read()
self.assertEqual(content, b'bytes_content')
def test_blob_input_with_metadata_no_blob_properties(self):
datum: Datum = Datum(value=b'blob_content', type='bytes')
trigger_metadata: Dict[str, Any] = {
'BlobTrigger': Datum('blob_trigger_name', 'string'),
'Uri': Datum('https://test.io/blob_trigger', 'string')
}
result: InputStream = afb. \
BlobConverter.decode(data=datum, trigger_metadata=trigger_metadata)
# Verify result metadata
self.assertIsInstance(result, InputStream)
self.assertEqual(result.name, 'blob_trigger_name')
self.assertEqual(result.length, None)
self.assertEqual(result.uri, 'https://test.io/blob_trigger')
self.assertEqual(result.blob_properties, None)
self.assertEqual(result.metadata, None)
def test_blob_input_with_metadata_no_trigger_metadata(self):
sample_blob_properties = '{"Length": "12"}'
datum: Datum = Datum(value=b'blob_content', type='bytes')
trigger_metadata: Dict[str, Any] = {
'Properties': Datum(sample_blob_properties, 'json'),
'BlobTrigger': Datum('blob_trigger_name', 'string'),
'Uri': Datum('https://test.io/blob_trigger', 'string')
}
result: InputStream = afb. \
BlobConverter.decode(data=datum, trigger_metadata=trigger_metadata)
# Verify result metadata
self.assertIsInstance(result, InputStream)
self.assertEqual(result.name, 'blob_trigger_name')
self.assertEqual(result.length, len(b'blob_content'))
self.assertEqual(result.uri, 'https://test.io/blob_trigger')
self.assertEqual(result.blob_properties,
json.loads(sample_blob_properties))
self.assertEqual(result.metadata, None)
def test_blob_input_with_metadata_with_trigger_metadata(self):
sample_metadata = '{"Hello": "World"}'
sample_blob_properties = '''{
"ContentMD5": "B54d+wzLC8IlnxyyZxwPsw==",
"ContentType": "application/octet-stream",
"ETag": "0x8D8989BC453467D",
"Created": "2020-12-03T08:07:26+00:00",
"LastModified": "2020-12-04T21:30:05+00:00",
"BlobType": 2,
"LeaseStatus": 2,
"LeaseState": 1,
"LeaseDuration": 0,
"Length": "12"
}'''
datum: Datum = Datum(value=b'blob_content', type='bytes')
trigger_metadata: Dict[str, Any] = {
'Metadata': Datum(sample_metadata, 'json'),
'Properties': Datum(sample_blob_properties, 'json'),
'BlobTrigger': Datum('blob_trigger_name', 'string'),
'Uri': Datum('https://test.io/blob_trigger', 'string')
}
result: InputStream = afb.BlobConverter.decode(
data=datum, trigger_metadata=trigger_metadata)
# Verify result metadata
self.assertIsInstance(result, InputStream)
self.assertEqual(result.name, 'blob_trigger_name')
self.assertEqual(result.length, len(b'blob_content'))
self.assertEqual(result.uri, 'https://test.io/blob_trigger')
self.assertEqual(result.blob_properties,
json.loads(sample_blob_properties))
self.assertEqual(result.metadata,
json.loads(sample_metadata))
def test_blob_input_with_metadata_with_incorrect_trigger_metadata(self):
sample_metadata = 'Hello World'
sample_blob_properties = '''{"Length": "12"}'''
datum: Datum = Datum(value=b'blob_content', type='bytes')
trigger_metadata: Dict[str, Any] = {
'Metadata': Datum(sample_metadata, 'string'),
'Properties': Datum(sample_blob_properties, 'json'),
'BlobTrigger': Datum('blob_trigger_name', 'string'),
'Uri': Datum('https://test.io/blob_trigger', 'string')
}
result: InputStream = afb. \
BlobConverter.decode(data=datum, trigger_metadata=trigger_metadata)
# Verify result metadata
self.assertIsInstance(result, InputStream)
self.assertEqual(result.name, 'blob_trigger_name')
self.assertEqual(result.length, len(b'blob_content'))
self.assertEqual(result.uri, 'https://test.io/blob_trigger')
self.assertEqual(result.blob_properties,
json.loads(sample_blob_properties))
self.assertEqual(result.metadata, None)
def test_blob_incomplete_read(self):
datum: Datum = Datum(value=b'blob_content', type='bytes')
result: InputStream = afb.BlobConverter.decode(
data=datum, trigger_metadata=None)
self.assertEqual(result.read(size=3), b'blo')
def test_blob_output_custom_output_content(self):
class CustomOutput:
def read(self) -> bytes:
return b'custom_output_content'
# Try encoding a custom instance as an output return
out = CustomOutput()
result: Datum = afb.BlobConverter.encode(obj=out, expected_type=None)
self.assertEqual(result.value, b'custom_output_content')
self.assertEqual(result.type, 'bytes')
def test_blob_output_custom_output_without_read_method(self):
class CustomOutput:
def _read(self) -> bytes:
return b'should_not_be_called'
# Try encoding a custom instance without read() method
# This should raise an error when an unknown output is returned
out = CustomOutput()
with self.assertRaises(NotImplementedError):
afb.BlobConverter.encode(obj=out, expected_type=None)
def test_blob_output_string(self):
out: str = 'blob_output_string'
result: Datum = afb.BlobConverter.encode(obj=out, expected_type=None)
self.assertEqual(result.value, 'blob_output_string')
self.assertEqual(result.type, 'string')
def test_blob_output_bytes(self):
out: bytes = b'blob_output_bytes'
result: Datum = afb.BlobConverter.encode(obj=out, expected_type=None)
self.assertEqual(result.value, b'blob_output_bytes')
self.assertEqual(result.type, 'bytes')
def test_blob_output_type(self):
check_output_type = afb.BlobConverter.check_output_type_annotation
self.assertTrue(check_output_type(str))
self.assertTrue(check_output_type(bytes))
self.assertTrue(check_output_type(bytearray))
self.assertTrue(check_output_type(InputStream))
def test_blob_output_custom_type(self):
class CustomOutput:
def read(self) -> Datum:
return Datum(b'custom_output_content', 'types')
check_output_type = afb.BlobConverter.check_output_type_annotation
self.assertTrue(check_output_type(CustomOutput))
| 42.202765 | 79 | 0.67078 |
4a2183e454f1baee9756cbbe746c4cfc47f69e9a | 16,036 | py | Python | tensorflow.py | jyotidabass/document_text_recognition | 7bbdf4b1e5f7e9a28a7047dcd13eb2a5501643ef | [
"Apache-2.0"
] | 2 | 2022-03-09T11:15:14.000Z | 2022-03-09T15:29:12.000Z | tensorflow.py | jyotidabass/document_text_recognition | 7bbdf4b1e5f7e9a28a7047dcd13eb2a5501643ef | [
"Apache-2.0"
] | null | null | null | tensorflow.py | jyotidabass/document_text_recognition | 7bbdf4b1e5f7e9a28a7047dcd13eb2a5501643ef | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2021-2022, Mindee.
# This program is licensed under the Apache License version 2.
# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
import random
from typing import Any, Callable, Dict, Iterable, List, Tuple, Union
import numpy as np
import tensorflow as tf
import tensorflow_addons as tfa
from doctr.utils.repr import NestedObject
from ..functional.tensorflow import random_shadow
__all__ = ['Compose', 'Resize', 'Normalize', 'LambdaTransformation', 'ToGray', 'RandomBrightness',
'RandomContrast', 'RandomSaturation', 'RandomHue', 'RandomGamma', 'RandomJpegQuality', 'GaussianBlur',
'ChannelShuffle', 'GaussianNoise', 'RandomHorizontalFlip', 'RandomShadow']
class Compose(NestedObject):
"""Implements a wrapper that will apply transformations sequentially
Example::
>>> from doctr.transforms import Compose, Resize
>>> import tensorflow as tf
>>> transfos = Compose([Resize((32, 32))])
>>> out = transfos(tf.random.uniform(shape=[64, 64, 3], minval=0, maxval=1))
Args:
transforms: list of transformation modules
"""
_children_names: List[str] = ['transforms']
def __init__(self, transforms: List[Callable[[Any], Any]]) -> None:
self.transforms = transforms
def __call__(self, x: Any) -> Any:
for t in self.transforms:
x = t(x)
return x
class Resize(NestedObject):
"""Resizes a tensor to a target size
Example::
>>> from doctr.transforms import Resize
>>> import tensorflow as tf
>>> transfo = Resize((32, 32))
>>> out = transfo(tf.random.uniform(shape=[64, 64, 3], minval=0, maxval=1))
Args:
output_size: expected output size
method: interpolation method
preserve_aspect_ratio: if `True`, preserve aspect ratio and pad the rest with zeros
symmetric_pad: if `True` while preserving aspect ratio, the padding will be done symmetrically
"""
def __init__(
self,
output_size: Tuple[int, int],
method: str = 'bilinear',
preserve_aspect_ratio: bool = False,
symmetric_pad: bool = False,
) -> None:
self.output_size = output_size
self.method = method
self.preserve_aspect_ratio = preserve_aspect_ratio
self.symmetric_pad = symmetric_pad
def extra_repr(self) -> str:
_repr = f"output_size={self.output_size}, method='{self.method}'"
if self.preserve_aspect_ratio:
_repr += f", preserve_aspect_ratio={self.preserve_aspect_ratio}, symmetric_pad={self.symmetric_pad}"
return _repr
def __call__(self, img: tf.Tensor) -> tf.Tensor:
input_dtype = img.dtype
img = tf.image.resize(img, self.output_size, self.method, self.preserve_aspect_ratio)
if self.preserve_aspect_ratio:
# pad width
if not self.symmetric_pad:
offset = (0, 0)
elif self.output_size[0] == img.shape[0]:
offset = (0, int((self.output_size[1] - img.shape[1]) / 2))
else:
offset = (int((self.output_size[0] - img.shape[0]) / 2), 0)
img = tf.image.pad_to_bounding_box(img, *offset, *self.output_size)
return tf.cast(img, dtype=input_dtype)
class Normalize(NestedObject):
"""Normalize a tensor to a Gaussian distribution for each channel
Example::
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
Args:
mean: average value per channel
std: standard deviation per channel
"""
def __init__(self, mean: Tuple[float, float, float], std: Tuple[float, float, float]) -> None:
self.mean = tf.constant(mean)
self.std = tf.constant(std)
def extra_repr(self) -> str:
return f"mean={self.mean.numpy().tolist()}, std={self.std.numpy().tolist()}"
def __call__(self, img: tf.Tensor) -> tf.Tensor:
img -= tf.cast(self.mean, dtype=img.dtype)
img /= tf.cast(self.std, dtype=img.dtype)
return img
class LambdaTransformation(NestedObject):
"""Normalize a tensor to a Gaussian distribution for each channel
Example::
>>> from doctr.transforms import LambdaTransformation
>>> import tensorflow as tf
>>> transfo = LambdaTransformation(lambda x: x/ 255.)
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
Args:
fn: the function to be applied to the input tensor
"""
def __init__(self, fn: Callable[[tf.Tensor], tf.Tensor]) -> None:
self.fn = fn
def __call__(self, img: tf.Tensor) -> tf.Tensor:
return self.fn(img)
class ToGray(NestedObject):
"""Convert a RGB tensor (batch of images or image) to a 3-channels grayscale tensor
Example::
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = ToGray()
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
"""
def __init__(self, num_output_channels: int = 1):
self.num_output_channels = num_output_channels
def __call__(self, img: tf.Tensor) -> tf.Tensor:
img = tf.image.rgb_to_grayscale(img)
return img if self.num_output_channels == 1 else tf.repeat(img, self.num_output_channels, axis=-1)
class RandomBrightness(NestedObject):
"""Randomly adjust brightness of a tensor (batch of images or image) by adding a delta
to all pixels
Example:
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = Brightness()
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
Args:
max_delta: offset to add to each pixel is randomly picked in [-max_delta, max_delta]
p: probability to apply transformation
"""
def __init__(self, max_delta: float = 0.3) -> None:
self.max_delta = max_delta
def extra_repr(self) -> str:
return f"max_delta={self.max_delta}"
def __call__(self, img: tf.Tensor) -> tf.Tensor:
return tf.image.random_brightness(img, max_delta=self.max_delta)
class RandomContrast(NestedObject):
"""Randomly adjust contrast of a tensor (batch of images or image) by adjusting
each pixel: (img - mean) * contrast_factor + mean.
Example:
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = Contrast()
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
Args:
delta: multiplicative factor is picked in [1-delta, 1+delta] (reduce contrast if factor<1)
"""
def __init__(self, delta: float = .3) -> None:
self.delta = delta
def extra_repr(self) -> str:
return f"delta={self.delta}"
def __call__(self, img: tf.Tensor) -> tf.Tensor:
return tf.image.random_contrast(img, lower=1 - self.delta, upper=1 / (1 - self.delta))
class RandomSaturation(NestedObject):
"""Randomly adjust saturation of a tensor (batch of images or image) by converting to HSV and
increasing saturation by a factor.
Example:
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = Saturation()
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
Args:
delta: multiplicative factor is picked in [1-delta, 1+delta] (reduce saturation if factor<1)
"""
def __init__(self, delta: float = .5) -> None:
self.delta = delta
def extra_repr(self) -> str:
return f"delta={self.delta}"
def __call__(self, img: tf.Tensor) -> tf.Tensor:
return tf.image.random_saturation(img, lower=1 - self.delta, upper=1 + self.delta)
class RandomHue(NestedObject):
"""Randomly adjust hue of a tensor (batch of images or image) by converting to HSV and adding a delta
Example::
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = Hue()
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
Args:
max_delta: offset to add to each pixel is randomly picked in [-max_delta, max_delta]
"""
def __init__(self, max_delta: float = 0.3) -> None:
self.max_delta = max_delta
def extra_repr(self) -> str:
return f"max_delta={self.max_delta}"
def __call__(self, img: tf.Tensor) -> tf.Tensor:
return tf.image.random_hue(img, max_delta=self.max_delta)
class RandomGamma(NestedObject):
"""randomly performs gamma correction for a tensor (batch of images or image)
Example:
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = Gamma()
>>> out = transfo(tf.random.uniform(shape=[8, 64, 64, 3], minval=0, maxval=1))
Args:
min_gamma: non-negative real number, lower bound for gamma param
max_gamma: non-negative real number, upper bound for gamma
min_gain: lower bound for constant multiplier
max_gain: upper bound for constant multiplier
"""
def __init__(
self,
min_gamma: float = 0.5,
max_gamma: float = 1.5,
min_gain: float = 0.8,
max_gain: float = 1.2,
) -> None:
self.min_gamma = min_gamma
self.max_gamma = max_gamma
self.min_gain = min_gain
self.max_gain = max_gain
def extra_repr(self) -> str:
return f"""gamma_range=({self.min_gamma}, {self.max_gamma}),
gain_range=({self.min_gain}, {self.max_gain})"""
def __call__(self, img: tf.Tensor) -> tf.Tensor:
gamma = random.uniform(self.min_gamma, self.max_gamma)
gain = random.uniform(self.min_gain, self.max_gain)
return tf.image.adjust_gamma(img, gamma=gamma, gain=gain)
class RandomJpegQuality(NestedObject):
"""Randomly adjust jpeg quality of a 3 dimensional RGB image
Example::
>>> from doctr.transforms import Normalize
>>> import tensorflow as tf
>>> transfo = JpegQuality()
>>> out = transfo(tf.random.uniform(shape=[64, 64, 3], minval=0, maxval=1))
Args:
min_quality: int between [0, 100]
max_quality: int between [0, 100]
"""
def __init__(self, min_quality: int = 60, max_quality: int = 100) -> None:
self.min_quality = min_quality
self.max_quality = max_quality
def extra_repr(self) -> str:
return f"min_quality={self.min_quality}"
def __call__(self, img: tf.Tensor) -> tf.Tensor:
return tf.image.random_jpeg_quality(
img, min_jpeg_quality=self.min_quality, max_jpeg_quality=self.max_quality
)
class GaussianBlur(NestedObject):
"""Randomly adjust jpeg quality of a 3 dimensional RGB image
Example::
>>> from doctr.transforms import GaussianBlur
>>> import tensorflow as tf
>>> transfo = GaussianBlur(3, (.1, 5))
>>> out = transfo(tf.random.uniform(shape=[64, 64, 3], minval=0, maxval=1))
Args:
kernel_shape: size of the blurring kernel
std: min and max value of the standard deviation
"""
def __init__(self, kernel_shape: Union[int, Iterable[int]], std: Tuple[float, float]) -> None:
self.kernel_shape = kernel_shape
self.std = std
def extra_repr(self) -> str:
return f"kernel_shape={self.kernel_shape}, std={self.std}"
@tf.function
def __call__(self, img: tf.Tensor) -> tf.Tensor:
sigma = random.uniform(self.std[0], self.std[1])
return tfa.image.gaussian_filter2d(
img, filter_shape=self.kernel_shape, sigma=sigma,
)
class ChannelShuffle(NestedObject):
"""Randomly shuffle channel order of a given image"""
def __init__(self):
pass
def __call__(self, img: tf.Tensor) -> tf.Tensor:
return tf.transpose(tf.random.shuffle(tf.transpose(img, perm=[2, 0, 1])), perm=[1, 2, 0])
class GaussianNoise(NestedObject):
"""Adds Gaussian Noise to the input tensor
Example::
>>> from doctr.transforms import GaussianNoise
>>> import tensorflow as tf
>>> transfo = GaussianNoise(0., 1.)
>>> out = transfo(tf.random.uniform(shape=[64, 64, 3], minval=0, maxval=1))
Args:
mean : mean of the gaussian distribution
std : std of the gaussian distribution
"""
def __init__(self, mean: float = 0., std: float = 1.) -> None:
super().__init__()
self.std = std
self.mean = mean
def __call__(self, x: tf.Tensor) -> tf.Tensor:
# Reshape the distribution
noise = self.mean + 2 * self.std * tf.random.uniform(x.shape) - self.std
if x.dtype == tf.uint8:
return tf.cast(
tf.clip_by_value(tf.math.round(tf.cast(x, dtype=tf.float32) + 255 * noise), 0, 255),
dtype=tf.uint8
)
else:
return tf.cast(tf.clip_by_value(x + noise, 0, 1), dtype=x.dtype)
def extra_repr(self) -> str:
return f"mean={self.mean}, std={self.std}"
class RandomHorizontalFlip(NestedObject):
"""Adds random horizontal flip to the input tensor/np.ndarray
Example::
>>> from doctr.transforms import RandomHorizontalFlip
>>> import tensorflow as tf
>>> transfo = RandomHorizontalFlip(p=0.5)
>>> image = tf.random.uniform(shape=[64, 64, 3], minval=0, maxval=1)
>>> target = {
"boxes": np.array([[0.1, 0.1, 0.4, 0.5] ], dtype= np.float32),
"labels": np.ones(1, dtype= np.int64)
}
>>> out = transfo(image, target)
Args:
p : probability of Horizontal Flip
"""
def __init__(self, p: float) -> None:
super().__init__()
self.p = p
def __call__(
self,
img: Union[tf.Tensor, np.ndarray],
target: Dict[str, Any]
) -> Tuple[tf.Tensor, Dict[str, Any]]:
"""
Args:
img: Image to be flipped.
target: Dictionary with boxes (in relative coordinates of shape (N, 4)) and labels as keys
Returns:
Tuple of numpy nd-array or Tensor and target
"""
if np.random.rand(1) <= self.p:
_img = tf.image.flip_left_right(img)
_target = target.copy()
# Changing the relative bbox coordinates
_target["boxes"][:, ::2] = 1 - target["boxes"][:, [2, 0]]
return _img, _target
return img, target
class RandomShadow(NestedObject):
"""Adds random shade to the input image
Example::
>>> from doctr.transforms import RandomShadow
>>> import tensorflow as tf
>>> transfo = RandomShadow(0., 1.)
>>> out = transfo(tf.random.uniform(shape=[64, 64, 3], minval=0, maxval=1))
Args:
opacity_range : minimum and maximum opacity of the shade
"""
def __init__(self, opacity_range: Tuple[float, float] = None) -> None:
super().__init__()
self.opacity_range = opacity_range if isinstance(opacity_range, tuple) else (.2, .8)
def __call__(self, x: tf.Tensor) -> tf.Tensor:
# Reshape the distribution
if x.dtype == tf.uint8:
return tf.cast(
tf.clip_by_value(
tf.math.round(255 * random_shadow(tf.cast(x, dtype=tf.float32) / 255, self.opacity_range)),
0,
255,
),
dtype=tf.uint8
)
else:
return tf.clip_by_value(random_shadow(x, self.opacity_range), 0, 1)
def extra_repr(self) -> str:
return f"opacity_range={self.opacity_range}"
| 35.089716 | 113 | 0.614679 |
4a2184296f49f9aab0cff98556773cc132015ab2 | 3,476 | py | Python | web_reflectivity/fitting/catalog.py | neutrons/web_reflectivity | 8381a0a1e64fb8df89a28e4729cb2957e0ebce57 | [
"Apache-2.0"
] | 1 | 2020-01-31T20:47:06.000Z | 2020-01-31T20:47:06.000Z | web_reflectivity/fitting/catalog.py | neutrons/web_reflectivity | 8381a0a1e64fb8df89a28e4729cb2957e0ebce57 | [
"Apache-2.0"
] | 5 | 2017-08-15T18:00:51.000Z | 2019-04-17T18:00:41.000Z | web_reflectivity/fitting/catalog.py | neutrons/web_reflectivity | 8381a0a1e64fb8df89a28e4729cb2957e0ebce57 | [
"Apache-2.0"
] | 2 | 2018-10-17T09:17:33.000Z | 2021-10-18T13:06:55.000Z | #pylint: disable=bare-except, invalid-name, too-many-nested-blocks, too-many-locals, too-many-branches
"""
Optional utilities to communicate with ONcat.
ONcat is an online data catalog used internally at ORNL.
@copyright: 2018 Oak Ridge National Laboratory
"""
import sys
import datetime
import logging
from django.conf import settings
try:
import pyoncat
HAVE_ONCAT = True
except:
HAVE_ONCAT = False
from fitting.models import CatalogCache
def decode_time(timestamp):
"""
Decode timestamp and return a datetime object
:param timestamp: timestamp to decode
"""
try:
tz_location = timestamp.rfind('+')
if tz_location < 0:
tz_location = timestamp.rfind('-')
if tz_location > 0:
date_time_str = timestamp[:tz_location]
# Get rid of fractions of a second
sec_location = date_time_str.rfind('.')
if sec_location > 0:
date_time_str = date_time_str[:sec_location]
return datetime.datetime.strptime(date_time_str, "%Y-%m-%dT%H:%M:%S")
except:
logging.error("Could not parse timestamp '%s': %s", timestamp, sys.exc_value)
return None
def get_run_info(instrument, run_number):
"""
Legacy issue:
Until the facility information is stored in the DB so that we can
retrieve the facility from it, we'll have to use the application
configuration.
:param str instrument: instrument name
:param str run_number: run number
:param str facility: facility name (SNS or HFIR)
"""
facility = 'SNS'
if hasattr(settings, 'FACILITY_INFO'):
facility = settings.FACILITY_INFO.get(instrument, 'SNS')
return _get_run_info(instrument, run_number, facility)
def _get_run_info(instrument, run_number, facility='SNS'):
"""
Get ONCat info for the specified run
Notes: At the moment we do not catalog reduced data
:param str instrument: instrument short name
:param str run_number: run number
:param str facility: facility name (SNS or HFIR)
"""
run_info = {}
cached_entry = [] #CatalogCache.objects.filter(data_path="%s/%s" % (instrument, run_number))
if len(cached_entry) > 0:
return dict(title=cached_entry[0].title, proposal=cached_entry[0].proposal)
if not HAVE_ONCAT:
return run_info
try:
oncat = pyoncat.ONCat(
settings.CATALOG_URL,
# Here we're using the machine-to-machine "Client Credentials" flow,
# which requires a client ID and secret, but no *user* credentials.
flow = pyoncat.CLIENT_CREDENTIALS_FLOW,
client_id = settings.CATALOG_ID,
client_secret = settings.CATALOG_SECRET,
)
oncat.login()
datafiles = oncat.Datafile.list(
facility = facility,
instrument = instrument.upper(),
projection = ['experiment', 'location', 'metadata.entry.title'],
tags = ['type/raw'],
ranges_q = 'indexed.run_number:%s' % str(run_number)
)
if datafiles:
run_info['title'] = datafiles[0].metadata.get('entry', {}).get('title', None)
run_info['proposal'] = datafiles[0].experiment
run_info['location'] = datafiles[0].location
except:
logging.error("Communication with ONCat server failed: %s", sys.exc_value)
return run_info
| 35.111111 | 102 | 0.636939 |
4a21843f4dc448bc83b3bc655dd17ac89a675f94 | 1,700 | py | Python | build_event.py | thehorizondweller/gyft | 268b484cc62f1eb67c368db1d1112ef411f13ce5 | [
"MIT"
] | 39 | 2016-08-07T15:07:34.000Z | 2022-01-29T08:44:45.000Z | build_event.py | thehorizondweller/gyft | 268b484cc62f1eb67c368db1d1112ef411f13ce5 | [
"MIT"
] | 66 | 2016-08-12T04:30:06.000Z | 2022-02-06T09:36:50.000Z | build_event.py | thehorizondweller/gyft | 268b484cc62f1eb67c368db1d1112ef411f13ce5 | [
"MIT"
] | 58 | 2016-08-07T20:38:33.000Z | 2022-02-01T06:40:00.000Z | from icalendar import Calendar, Event
import pytz
from datetime import datetime, timedelta
def build_event_duration(summary, description, start, duration, location,
freq_of_recurrence, until):
'''
Return an event that can be added to a calendar
summary: summary of the event
description: description of the event
location: self explanatory
start, end, stamp: These are datetime.datetime objects
freq_of_recurrence: frequency of recurrence, string which can take the
values daily, weekly, monthly, etc.
until: A datetime.datetime object which signifies when the recurrence will
end
'''
event = Event()
event.add('summary', summary)
event.add('description', description)
event.add('dtstart', start)
event.add('duration', timedelta(hours=duration))
event.add('dtstamp', datetime.now())
event.add('location', location)
event.add('rrule', { 'FREQ': freq_of_recurrence, 'UNTIL': until})
return event
def generateIndiaTime(year, month, date, hour, minutes):
return datetime(year, month, date, hour, minutes, tzinfo=pytz.timezone('Asia/Kolkata'))
if __name__ == '__main__':
cal = Calendar()
cal.add('prodid', '-//Your Timetable generated by GYFT//mxm.dk//')
cal.add('version', '1.0')
example_event = build_event(
"example event",
"example event's description 2!",
generateIndiaTime(2016, 8, 22, 19, 0),
generateIndiaTime(2016, 8, 22, 20, 0),
"imaginary location!",
"weekly",
generateIndiaTime(2016, 11, 20, 12, 0))
cal.add_component(example_event)
with open('timetable.ics', 'wb') as f:
f.write(cal.to_ical())
| 31.481481 | 91 | 0.669412 |
4a218440a135eea47a8f4f2b0ed5da6f0f570489 | 2,002 | py | Python | rewardpredictive/evaluate.py | taodav/rewardpredictive | be0dc53993a0c59e1a64df1aef9923d57e8692dc | [
"MIT"
] | 1 | 2020-08-04T18:01:53.000Z | 2020-08-04T18:01:53.000Z | rewardpredictive/evaluate.py | lucaslehnert/rewardpredictive | 273da5a2566a263678159ed81dfb202b180a45a1 | [
"MIT"
] | 5 | 2020-07-14T20:06:36.000Z | 2022-02-10T01:52:53.000Z | rewardpredictive/evaluate.py | taodav/rewardpredictive | be0dc53993a0c59e1a64df1aef9923d57e8692dc | [
"MIT"
] | 1 | 2020-12-14T20:00:39.000Z | 2020-12-14T20:00:39.000Z | import numpy as np
import rlutils as rl
from .utils import cluster_idx_to_phi_mat, lam_from_mat, reward_rollout
def eval_total_reward(task, partition, repeats=5, rollout_depth=10, gamma=0.9):
"""
Evaluate the total reward that can be generated by using the given abstraction on the specified task.
:param task:
:param partition:
:param repeats:
:param rollout_depth:
:param gamma:
:return:
"""
phi_mat = cluster_idx_to_phi_mat(partition)
t_mat, r_vec = task.get_t_mat_r_vec()
m_mat, w_vec = lam_from_mat(t_mat, r_vec, phi_mat)
q_phi, _ = rl.algorithm.vi(m_mat, w_vec, gamma=gamma)
latent_agent = rl.agent.ValueFunctionAgent(q_fun=lambda s: np.matmul(q_phi, s))
agent = rl.agent.StateRepresentationWrapperAgent(latent_agent, phi=lambda s: np.matmul(s, phi_mat))
policy = rl.policy.GreedyPolicy(agent)
logger = rl.logging.LoggerTotalReward()
for _ in range(repeats):
rl.data.simulate_gracefully(task, policy, logger, max_steps=rollout_depth)
# logger.on_simulation_timeout()
return logger.get_total_reward_episodic()
def eval_reward_predictive(task, partition, repeats=5, rollout_depth=10):
phi_mat = cluster_idx_to_phi_mat(partition)
t_mat, r_vec = task.get_t_mat_r_vec()
m_mat, w_vec = lam_from_mat(t_mat, r_vec, phi_mat)
action_seq_list = np.random.randint(0, task.num_actions(), size=[repeats, rollout_depth])
start_state_list = [rl.one_hot(i, task.num_states()) for i in np.random.randint(0, task.num_states(), size=repeats)]
start_state_list = np.stack(start_state_list).astype(dtype=np.float32)
rew_err = []
for i in range(repeats):
s = start_state_list[i]
action_seq = action_seq_list[i]
rew_original = reward_rollout(t_mat, r_vec, s, action_seq)
rew_latent = reward_rollout(m_mat, w_vec, np.matmul(s, phi_mat), action_seq)
rew_err.append(np.abs(rew_original - rew_latent))
return np.array(rew_err, dtype=np.float32)
| 38.5 | 120 | 0.71978 |
4a2184ee0158589d332f4602757e2a65823f04ae | 335 | py | Python | malt/constants.py | Anavros/malt | d972ce9851f0174160c1ae73b9d54b56575fe5c0 | [
"MIT"
] | null | null | null | malt/constants.py | Anavros/malt | d972ce9851f0174160c1ae73b9d54b56575fe5c0 | [
"MIT"
] | 8 | 2015-12-05T17:28:39.000Z | 2016-12-09T18:41:25.000Z | malt/constants.py | Anavros/malt | d972ce9851f0174160c1ae73b9d54b56575fe5c0 | [
"MIT"
] | null | null | null |
# Characters
LIST_BEGIN = '['
LIST_END = ']'
DICT_BEGIN = '{'
DICT_END = '}'
LINE_END = '\n'
JOIN = ':'
EQUALS = '='
COMMENT = '#'
HINT = '?'
DOUBLE_QUOTE = '\"'
SINGLE_QUOTE = '\''
# Character Groups
COMMENTS = '#?'
SEPARATORS = ' \t\n'
QUOTES = '\'\"'
BRACES = '[]{}'
# Literal Strings
LINE_CONTINUE = '...'
BLOCK_COMMENT = '###'
| 13.958333 | 21 | 0.558209 |
4a21853c599929a2b254f25ac86b3fb60de5e2de | 14,286 | py | Python | deep_cox_mixtures/dcm/plots.py | chiragnagpal/google-research | b9225e1eb6d83a2b9a69cd9b4b319129d0c68fc7 | [
"Apache-2.0"
] | 21 | 2021-01-19T15:38:45.000Z | 2022-03-03T09:25:15.000Z | deep_cox_mixtures/dcm/plots.py | chiragnagpal/google-research | b9225e1eb6d83a2b9a69cd9b4b319129d0c68fc7 | [
"Apache-2.0"
] | 4 | 2021-04-09T12:00:52.000Z | 2021-11-10T15:23:02.000Z | deep_cox_mixtures/dcm/plots.py | chiragnagpal/google-research | b9225e1eb6d83a2b9a69cd9b4b319129d0c68fc7 | [
"Apache-2.0"
] | 2 | 2021-12-09T06:41:25.000Z | 2022-03-17T18:01:52.000Z | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilitites to plot the ROC and Calibration for survival models.
This module has utility functions to generate ROC and Calibration plots for
survival models at given horizons of time. Note that ideally both the ROC and
Calibration curves require to be adjusted for censoring using IPCW estimates.
Not designed to be called directly, would be called when running a function from
dcm.deep_cox_mixtures
"""
from dcm import baseline_models
from dcm import models
from dcm.calibration import calibration_curve
import matplotlib as mpl
from matplotlib import pyplot as plt
from dcm.skmetrics import brier_score
from dcm.skmetrics import cumulative_dynamic_auc
from dcm.skmetrics import concordance_index_ipcw
import numpy as np
import logging
logging.getLogger("matplotlib").setLevel(logging.CRITICAL)
from sklearn.metrics import auc
def plot_calibration_curve(ax,
scores,
e,
t,
a,
folds,
group,
quant,
strat='quantile',
adj='IPCW',
plot=True):
"""Function to plot Calibration Curve at a specified time horizon.
Accepts a matplotlib figure instance, risk scores from a trained survival
analysis model, and quantiles of event interest and generates an IPCW
adjusted calibration curve.
Args:
ax:
a matplotlib subfigure object.
scores:
risk scores P(T>t) issued by a trained survival analysis model
(output of deep_cox_mixtures.models.predict_survival).
e:
a numpy array of event indicators.
t:
a numpy array of event/censoring times.
a:
a numpy vector of protected attributes.
folds:
a numpy vector of cv folds.
group:
List of the demogrpahics to adjust for.
quant:
a list of event time quantiles at which the models are to be evaluated.
strat:
Specifies how the bins are computed. One of:
"quantile": Equal sized bins.
"uniform": Uniformly stratified.
adj (str):
Determines if IPCW adjustment is carried out on a population or subgroup
level.
One of "IPCWpop", "IPCWcon" (not implemented).
Returns:
A plotted matplotlib calibration curve.
"""
allscores = np.ones_like(t).astype('float')
for fold in set(folds):
allscores[folds == fold] = scores[fold]
scores = allscores
b_fc = (0, 0, 1, .4)
r_fc = (1, 0, 0, .2)
b_ec = (0, 0, 1, .8)
r_ec = (1, 0, 0, .8)
n_bins = 20
hatch = '//'
fs = 16
prob_true_n, _, outbins, ece = calibration_curve(
scores,
e,
t,
a,
group,
quant,
typ=adj,
ret_bins=True,
strat=strat,
n_bins=n_bins)
for d in range(len(prob_true_n)):
binsize = outbins[d + 1] - outbins[d]
binloc = (outbins[d + 1] + outbins[d]) / 2
gap = (prob_true_n[d] - binloc)
if gap < 0:
bottom = prob_true_n[d]
else:
bottom = prob_true_n[d] - abs(gap)
if d == len(prob_true_n) - 1:
lbl1 = 'Score'
lbl2 = 'Gap'
else:
lbl1 = None
lbl2 = None
if plot:
ax.bar(
binloc,
prob_true_n[d],
width=binsize,
facecolor=b_fc,
edgecolor=b_ec,
linewidth=2.5,
label=lbl1)
ax.bar(
binloc,
abs(gap),
bottom=bottom,
width=binsize,
facecolor=r_fc,
edgecolor=r_ec,
linewidth=2.5,
hatch=hatch,
label=lbl2)
d += 1
if plot:
ax.plot([0, 1], [0, 1], c='k', ls='--', lw=2, zorder=100)
ax.set_xlabel('Predicted Score', fontsize=fs)
ax.set_ylabel('True Score', fontsize=fs)
ax.legend(fontsize=fs)
ax.set_title(str(group), fontsize=fs)
ax.set_xlim(0, 1)
ax.set_ylim(0, 1)
ax.grid(ls=':', lw=2, zorder=-100, color='grey')
ax.set_axisbelow(True)
ax.text(
x=0.030,
y=.7,
s='ECE=' + str(round(ece, 3)),
size=fs,
bbox=dict(boxstyle='round', fc='white', ec='grey', pad=0.2))
return ece
def plot_roc_curve(ax,
scores,
e,
t,
a,
folds,
groups,
quant,
plot=True):
"""Function to plot ROC at a specified time horizon.
Accepts a matplotlib figure instance, risk scores from a trained survival
analysis model, and quantiles of event interest and generates an IPCW
adjusted ROC curve.
Args:
ax:
a matplotlib subfigure object.
scores:
choice of model. One of "coupled_deep_cph", "coupled_deep_cph_vae".
e:
a numpy array of input features.
t:
a numpy array of input features.
a:
a numpy vector of protected attributes.
folds:
a numpy vector of cv fold.
groups:
List of the demogrpahics to adjust for.
quant:
a list of event time quantiles at which the models are to be evaluated.
Returns:
A plotted matplotlib ROC curve.
"""
fs = 16
fprs, tprs, tprs_std, ctds, brss = {}, {}, {}, {}, {}
fprs['all'] = {}
tprs['all'] = {}
ctds['all'] = {}
brss['all'] = {}
for group in groups:
fprs[group] = {}
tprs[group] = {}
ctds[group] = {}
brss[group] = {}
for fold in set(folds):
ate = a[folds == fold]
str_test = baseline_models.structure_for_eval_(t[folds == fold],
e[folds == fold])
if len(set(folds)) == 1:
atr = ate
str_train = str_test
else:
atr = a[folds != fold]
str_train = baseline_models.structure_for_eval_(t[folds != fold],
e[folds != fold])
t_tr_max = np.max([t_[1] for t_ in str_train])
t_ = np.array([t_[1] for t_ in str_test])
clean = (t_<=t_tr_max)
str_test = str_test[t_<=t_tr_max]
ate = ate[t_<=t_tr_max]
scores_f = scores[fold][clean]
for group in groups:
te_protg = (ate == group)
tr_protg = (atr == group)
try:
roc_m = cumulative_dynamic_auc(str_train[tr_protg], str_test[te_protg],
-scores_f[te_protg], [quant])
brs_m = brier_score(str_train[tr_protg], str_test[te_protg],
scores_f[te_protg], quant)
ctd_m = concordance_index_ipcw(str_train[tr_protg], str_test[te_protg],
-scores_f[te_protg], quant)[0]
except:
roc_m = cumulative_dynamic_auc(str_train, str_test[te_protg],
-scores_f[te_protg], [quant])
brs_m = brier_score(str_train, str_test[te_protg],
scores_f[te_protg], quant)
ctd_m = concordance_index_ipcw(str_train, str_test[te_protg],
-scores_f[te_protg], quant)[0]
fprs[group][fold] = roc_m[0][0][1]
tprs[group][fold] = roc_m[0][0][0]
ctds[group][fold] = ctd_m
brss[group][fold] = brs_m[1][0]
roc_m = cumulative_dynamic_auc(str_train, str_test, -scores_f, [quant])
ctd_m = concordance_index_ipcw(str_train, str_test, -scores_f, quant)[0]
brs_m = brier_score(str_train, str_test, scores_f, quant)
fprs['all'][fold], tprs['all'][fold] = roc_m[0][0][1], roc_m[0][0][0]
ctds['all'][fold] = ctd_m
brss['all'][fold] = brs_m[1][0]
cols = ['b', 'r', 'g']
roc_auc = {}
ctds_mean = {}
brss_mean = {}
j = 0
for group in list(groups) + ['all']:
all_fpr = np.unique(np.concatenate([fprs[group][i] for i in set(folds)]))
# The ROC curves are interpolated at these points.
mean_tprs = []
for i in set(folds):
mean_tprs.append(np.interp(all_fpr, fprs[group][i], tprs[group][i]))
# Finally the interpolated curves are averaged over to compute AUC.
mean_tpr = np.mean(mean_tprs, axis=0)
std_tpr = 1.96 * np.std(mean_tprs, axis=0) / np.sqrt(10)
fprs[group]['macro'] = all_fpr
tprs[group]['macro'] = mean_tpr
tprs_std[group] = std_tpr
roc_auc[group] = auc(fprs[group]['macro'], tprs[group]['macro'])
ctds_mean[group] = np.mean([ctds[group][fold] for fold in folds])
brss_mean[group] = np.mean([brss[group][fold] for fold in folds])
lbl = str(group)
lbl += ' AUC:' + str(round(roc_auc[group], 3))
lbl += ' Ctd:'+ str(round(ctds_mean[group], 3))
lbl += ' BS:'+ str(round(brss_mean[group], 3))
if plot:
ax.plot(
all_fpr,
mean_tpr,
c=cols[j],
label=lbl)
ax.fill_between(
all_fpr,
mean_tpr - std_tpr,
mean_tpr + std_tpr,
color=cols[j],
alpha=0.25)
j += 1
if plot:
ax.set_xlabel('False Positive Rate', fontsize=fs)
ax.set_ylabel('True Positive Rate', fontsize=fs)
ax.legend(fontsize=fs)
ax.set_xscale('log')
return roc_auc, ctds_mean, brss_mean
def plot_results(outputs, x, e, t, a, folds, groups,
quantiles, strat='quantile', adj='KM', plot=True):
"""Function to plot the ROC and Calibration curves from a survival model.
Accepts a trained survival analysis model, features and horizon of interest
and generates the IPCW adjusted ROC curve and Calibration curve at
pre-specified horizons of time.
Args:
outputs:
a python dict with survival probabilities for each fold
x:
a numpy array of input features.
e:
a numpy array of input features.
t:
a numpy array of input features.
a:
a numpy vector of protected attributes.
folds:
a numpy vector of cv fold.
groups:
List of the demogrpahics to adjust for.
quantiles:
a list of event time quantiles at which the models are to be evaluated.
strat:
Specifies how the bins are computed. One of:
"quantile": Equal sized bins.
"uniform": Uniformly stratified.
adj:
Adjustment strategy for the Expected Calibration Error. One of:
"KM": Kaplan-Meier (Default)
"IPCW": Inverse Propensity of Censoring
Returns:
a numpy vector of estimated risks P(T>t|X) at the horizon "quant".
"""
if plot:
mpl.rcParams['hatch.linewidth'] = 2.0
fig, big_axes = plt.subplots(
figsize=(8 * (len(groups) + 2), 6 * len(quantiles)),
nrows=len(quantiles),
ncols=1)
plt.subplots_adjust(hspace=0.4)
i = 0
for _, big_ax in enumerate(big_axes, start=1):
big_ax.set_title(
'Receiver Operator Characteristic and Calibration at t=' +
str(quantiles[i]) + '\n',
fontsize=16)
big_ax.tick_params(
labelcolor=(1., 1., 1., 0.0),
top='off',
bottom='off',
left='off',
right='off')
i += 1
eces = {}
metrics = {}
for quant in quantiles:
eces[quant] = {}
for i in range(len(quantiles)):
scores = outputs[quantiles[i]]
for j in range(len(groups) + 2):
pt = (i * (len(groups) + 2) + j + 1)
if plot:
ax = fig.add_subplot(len(quantiles), len(groups) + 2, pt)
else:
ax = None
if (j==1):
eces[quantiles[i]]['all'] = plot_calibration_curve(ax,
scores,
e,
t,
a,
folds,
None,
quantiles[i],
strat=strat,
adj=adj,
plot=plot)
if (j>1):
eces[quantiles[i]][groups[j - 2]] = plot_calibration_curve(ax,
scores,
e,
t,
a,
folds,
groups[j - 2],
quantiles[i],
strat=strat,
adj=adj,
plot=plot)
if (j==0):
metrics[quantiles[i]] = plot_roc_curve(ax,
scores,
e,
t,
a,
folds,
groups,
quantiles[i],
plot=plot)
for quant in quantiles:
metrics[quant] = metrics[quant] + (eces[quant], )
if plot:
plt.show()
return metrics
| 29.095723 | 80 | 0.51568 |
4a21854143462e6a8113a4dcf56bf74dc2a0ac90 | 14,912 | py | Python | storagevet/Technology/DistributedEnergyResource.py | epri-dev/StorageVET | 26b740ba03b1ad4fc0734af56787ec5c0ebd3824 | [
"BSD-3-Clause"
] | 35 | 2020-06-04T18:04:11.000Z | 2022-02-24T07:48:29.000Z | storagevet/Technology/DistributedEnergyResource.py | epri-dev/StorageVET | 26b740ba03b1ad4fc0734af56787ec5c0ebd3824 | [
"BSD-3-Clause"
] | 4 | 2021-05-13T22:36:24.000Z | 2022-01-13T03:42:16.000Z | storagevet/Technology/DistributedEnergyResource.py | epri-dev/StorageVET | 26b740ba03b1ad4fc0734af56787ec5c0ebd3824 | [
"BSD-3-Clause"
] | 16 | 2020-06-04T18:07:18.000Z | 2022-03-10T13:27:08.000Z | """
Copyright (c) 2021, Electric Power Research Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of DER-VET nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
Technology
This Python class contains methods and attributes specific for technology analysis within StorageVet.
"""
import pandas as pd
import numpy as np
import cvxpy as cvx
from storagevet.ErrorHandling import *
class DER:
""" A general template for DER object, which could be any kind of Distributed Energy Resources currently
supported in DERVET: storage (CAES, Battery), generator (CHP, ICE), renewable (PV), and loads
"""
def __init__(self, params):
""" Initialize all technology with the following attributes.
Args:
params (dict): Dict of parameters
"""
TellUser.debug(f"Initializing {__name__}")
# initialize internal attributes
self.name = params['name'] # specific tech model name
self.dt = params['dt']
self.technology_type = None # "Energy Storage System", "Rotating Generator", "Intermittent Resource", "Load"
self.tag = None
self.variable_om = 0 # $/kWh
self.id = params.get('ID')
# attributes about specific to each DER
self.variables_df = pd.DataFrame() # optimization variables are saved here
self.variables_dict = {} # holds the CVXPY variables upon creation in the technology instance
# boolean attributes
self.is_electric = False # can this DER consume or generate electric power?
self.is_hot = False # can this DER consume or generate heat?
self.is_cold = False # can this DER consume or generate cooling power?
self.is_fuel = False # can this DER consume fuel?
self.can_participate_in_market_services = True
def zero_column_name(self):
return self.unique_tech_id() + ' Capital Cost' # used for proforma creation
def fixed_column_name(self):
return self.unique_tech_id() + ' Fixed O&M Cost' # used for proforma creation
def get_capex(self, **kwargs) -> cvx.Variable or float:
"""
Returns: the capex of this DER
"""
return 0
def grow_drop_data(self, years, frequency, load_growth):
""" Adds data by growing the given data OR drops any extra data that might have slipped in.
Update variable that hold timeseries data after adding growth data. These method should be called after
add_growth_data and before the optimization is run.
Args:
years (List): list of years for which analysis will occur on
frequency (str): period frequency of the timeseries data
load_growth (float): percent/ decimal value of the growth rate of loads in this simulation
"""
pass
def discharge_capacity(self):
"""
Returns: the maximum discharge that can be attained
"""
return 0
def charge_capacity(self):
"""
Returns: the maximum charge that can be attained
"""
return 0
def operational_max_energy(self):
"""
Returns: the maximum energy that should stored in this DER based on user inputs
"""
return 0
def operational_min_energy(self):
"""
Returns: the minimum energy that should stored in this DER based on user inputs
"""
return 0
def qualifying_capacity(self, event_length):
""" Describes how much power the DER can discharge to qualify for RA or DR. Used to determine
the system's qualifying commitment.
Args:
event_length (int): the length of the RA or DR event, this is the
total hours that a DER is expected to discharge for
Returns: int/float
"""
return 0
def initialize_variables(self, size):
""" Adds optimization variables to dictionary
Variables added:
Args:
size (Int): Length of optimization variables to create
"""
pass
def get_state_of_energy(self, mask):
"""
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: the state of energy as a function of time for the
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}-Zero')
def get_discharge(self, mask):
""" The effective discharge of this DER
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: the discharge as a function of time for the
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}-Zero')
def get_charge(self, mask):
"""
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: the charge as a function of time for the
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}-Zero')
def get_net_power(self, mask):
"""
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: the net power [= charge - discharge] as a function of time for the
"""
return self.get_charge(mask) - self.get_discharge(mask)
def get_charge_up_schedule(self, mask):
""" the amount of charging power in the up direction (supplying power up into the grid) that
this DER can schedule to reserve
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: CVXPY parameter/variable
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}ZeroUp')
def get_charge_down_schedule(self, mask):
""" the amount of charging power in the up direction (pulling power down from the grid) that
this DER can schedule to reserve
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: CVXPY parameter/variable
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}ZeroDown')
def get_discharge_up_schedule(self, mask):
""" the amount of discharge power in the up direction (supplying power up into the grid) that
this DER can schedule to reserve
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: CVXPY parameter/variable
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}ZeroUp')
def get_discharge_down_schedule(self, mask):
""" the amount of discharging power in the up direction (pulling power down from the grid) that
this DER can schedule to reserve
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns: CVXPY parameter/variable
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}ZeroDown')
def get_delta_uenegy(self, mask):
""" the amount of energy, from the current SOE level the DER's state of energy changes
from subtimestep energy shifting
Returns: the energy throughput in kWh for this technology
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}Zero')
def get_uenergy_increase(self, mask):
""" the amount of energy in a timestep that is provided to the distribution grid
Returns: the energy throughput in kWh for this technology
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}Zero')
def get_uenergy_decrease(self, mask):
""" the amount of energy in a timestep that is taken from the distribution grid
Returns: the energy throughput in kWh for this technology
"""
return cvx.Parameter(value=np.zeros(sum(mask)), shape=sum(mask), name=f'{self.name}Zero')
def objective_function(self, mask, annuity_scalar=1):
""" Generates the objective function related to a technology. Default includes O&M which can be 0
Args:
mask (Series): Series of booleans used, the same length as case.power_kw
annuity_scalar (float): a scalar value to be multiplied by any yearly cost or benefit that helps capture the cost/benefit over
the entire project lifetime (only to be set iff sizing)
Returns:
costs - benefits (Dict): Dict of objective costs
"""
return {}
def constraints(self, mask, **kwargs):
"""Default build constraint list method. Used by services that do not have constraints.
Args:
mask (DataFrame): A boolean array that is true for indices corresponding to time_series data included
in the subs data set
Returns:
A list of constraints that corresponds the battery's physical constraints and its service constraints
"""
return []
def save_variable_results(self, subs_index):
""" Searches through the dictionary of optimization variables and saves the ones specific to each
DER instance and saves the values it to itself
Args:
subs_index (Index): index of the subset of data for which the variables were solved for
"""
variable_values = pd.DataFrame({name: variable.value for name, variable in self.variables_dict.items()}, index=subs_index)
self.variables_df = pd.concat([self.variables_df, variable_values], sort=True)
def unique_tech_id(self):
""" String id that serves as the prefix for reporting optimization variables for specific DER via timeseries
or proforma method. USED IN REPORTING ONLY
"""
return f'{self.tag.upper()}: {self.name}'
def timeseries_report(self):
""" Summaries the optimization results for this DER.
Returns: A timeseries dataframe with user-friendly column headers that summarize the results
pertaining to this instance
"""
return pd.DataFrame()
def monthly_report(self):
""" Collects all monthly data that are saved within this object
Returns: A dataframe with the monthly input price of the service
"""
def drill_down_reports(self, monthly_data=None, time_series_data=None, technology_summary=None, sizing_df=None):
"""Calculates any service related dataframe that is reported to the user.
Args:
monthly_data:
time_series_data:
technology_summary:
sizing_df:
Returns: dictionary of DataFrames of any reports that are value stream specific
keys are the file name that the df will be saved with
"""
return {}
def proforma_report(self, apply_inflation_rate_func, fill_forward_func, results):
""" Calculates the proforma that corresponds to participation in this value stream
Args:
apply_inflation_rate_func:
fill_forward_func:
results (pd.DataFrame):
Returns: A DateFrame of with each year in opt_year as the index and
the corresponding value this stream provided.
"""
if not self.zero_column_name():
return None
pro_forma = pd.DataFrame({self.zero_column_name(): -self.get_capex(solution=True)}, index=['CAPEX Year'])
return pro_forma
def interpolate_energy_dispatch(self, df, start_year, end_year, interpolation_method):
"""
Interpolates cumulative energy dispatch values between
the analysis start_year and end_year, given a dataframe with
values only included for optimization years
Args:
df (pd.Dataframe): profroma type df with only years in the index,
the years correspond to optimization years
start_year (Integer): the project start year
end_year (Integer): the project end year
interpolation_method (String): defaults to 'linear'
Returns: a df where the data is interpolated between known values using
interpolation_method. Values prior to the first year with data get
set to the first year's value. Same for the last year's value.
"""
# default to linear interpolation
if interpolation_method is None:
interpolation_method = 'linear'
filled_df = pd.DataFrame(index=pd.period_range(start_year, end_year, freq='y'))
filled_df = pd.concat([filled_df, df], axis=1)
filled_df = filled_df.apply(lambda x: x.interpolate(
method=interpolation_method, limit_direction='both'), axis=0)
return filled_df
| 37.28 | 138 | 0.660475 |
4a2185d972ab6f4d0bf6d4963cf06da70aac1d21 | 10,247 | py | Python | scripts/takamura.py | WladimirSidorenko/SentimentLexicon | 0d7203b7b7e3ca5d11759fdad656f775fa5d6e95 | [
"MIT"
] | 13 | 2016-08-03T18:46:02.000Z | 2022-02-22T22:30:19.000Z | scripts/takamura.py | WladimirSidorenko/SentimentLexicon | 0d7203b7b7e3ca5d11759fdad656f775fa5d6e95 | [
"MIT"
] | 2 | 2019-10-22T13:03:48.000Z | 2019-12-05T21:41:36.000Z | scripts/takamura.py | WladimirSidorenko/SentimentLexicon | 0d7203b7b7e3ca5d11759fdad656f775fa5d6e95 | [
"MIT"
] | 5 | 2019-12-25T13:53:18.000Z | 2020-06-05T20:47:31.000Z | #!/usr/bin/env python2.7
# -*- mode: python; coding: utf-8; -*-
"""Module for generating lexicon using Takamura's method (2005).
"""
##################################################################
# Imports
from __future__ import unicode_literals, print_function
from common import lemmatize, POSITIVE, NEGATIVE, TOKENIZER, \
SYNRELS, ANTIRELS, NEGATORS, STOP_WORDS, FORM2LEMMA, \
TAB_RE, NONMATCH_RE, ENCODING, check_word
from germanet import normalize
from ising import Ising, ITEM_IDX, WGHT_IDX, HAS_FXD_WGHT, FXD_WGHT_IDX
from itertools import chain, combinations
from math import isnan
import codecs
import sys
##################################################################
# Methods
def _annotate_re(a_re, a_ising, a_wght):
"""Assign polarity values to terms matching regular expressions.
@param a_re - regular expression to match
@param a_ising - dictionary of word scores
@param a_wght - weight to assign to terms that match regular expressions
@return void
"""
for itok, _ in a_ising.iteritems():
if a_re.search(itok):
a_ising[itok][FXD_WGHT_IDX] = a_wght
a_ising[itok][HAS_FXD_WGHT] = 1
def _tkm_add_germanet(ising, a_germanet):
"""Add lexical nodes from GermaNet to the Ising spin model
@param a_ising - instance of the Ising spin model
@param a_germanet - GermaNet instance
@return void
"""
# add all lemmas from the `FORM2LEMMA` dictionary
for ilemma in FORM2LEMMA.itervalues():
if ilemma not in STOP_WORDS:
ising.add_node(ilemma)
# add all lemmas from synsets
for ilexid in a_germanet.lexid2synids.iterkeys():
for ilex in a_germanet.lexid2lex[ilexid]:
ising.add_node(ilex)
# establish links between synset words and lemmas appearing in
# examples and definitions
def_lexemes = []
negation_seen = False
for isynid, (idef, iexamples) in a_germanet.synid2defexmp.iteritems():
def_lexemes = [lemmatize(iword, a_prune=False)
for itxt in chain(idef, iexamples)
for iword in TOKENIZER.tokenize(itxt)]
def_lexemes = [ilexeme
for ilexeme in def_lexemes
if ilexeme
and ising.item2nid.get(ilexeme, None) is not None]
if def_lexemes:
negation_seen = False
for idef_lex in def_lexemes:
if idef_lex in NEGATORS:
negation_seen = True
continue
elif idef_lex in STOP_WORDS:
continue
for ilexid in a_germanet.synid2lexids[isynid]:
for ilex in a_germanet.lexid2lex[ilexid]:
ising.add_edge(ilex,
idef_lex, -1. if negation_seen else 1.)
# establish links between synset lemmas based on the lexical
# relations
iwght = 1.
lemmas1 = lemmas2 = None
for ifrom, irelset in a_germanet.lex_relations.iteritems():
lemmas1 = a_germanet.lexid2lex.get(ifrom)
assert lemmas1 is not None, "No lemma found for id {:s}".format(ifrom)
for ito, irel in irelset:
lemmas2 = a_germanet.lexid2lex.get(ito)
assert lemmas2 is not None, \
"No lemma found for id {:s}".format(ito)
if irel in SYNRELS:
iwght = 1.
elif irel in ANTIRELS:
iwght = -1.
else:
continue
for ilemma1 in lemmas1:
for ilemma2 in lemmas2:
ising.add_edge(ilemma1, ilemma2, iwght)
# establish links between synset lemmas based on the con relations
for ifrom, irelset in a_germanet.con_relations.iteritems():
# iterate over all lexemes pertaining to the first synset
for ilex_id1 in a_germanet.synid2lexids[ifrom]:
lemmas1 = a_germanet.lexid2lex.get(ilex_id1)
assert lemmas1 is not None, \
"No lemma found for id {:s}".format(ifrom)
# iterate over target synsets and their respective relations
for ito, irel in irelset:
if irel in SYNRELS:
iwght = 1.
elif irel in ANTIRELS:
iwght = -1.
else:
continue
# iterate over all lexemes pertaining to the second synset
for ilex_id2 in a_germanet.synid2lexids[ito]:
lemmas2 = a_germanet.lexid2lex.get(ilex_id2)
assert lemmas2 is not None, \
"No lemma found for id {:s}".format(ito)
for ilemma1 in lemmas1:
for ilemma2 in lemmas2:
ising.add_edge(ilemma1, ilemma2, iwght)
# establish links between lemmas which pertain to the same synset
ilexemes = set()
for ilex_ids in a_germanet.synid2lexids.itervalues():
ilexemes = set([ilex
for ilex_id in ilex_ids
for ilex in a_germanet.lexid2lex[ilex_id]])
# generate all possible (n choose 2) combinations of lexemes
# and put links between them
for ilemma1, ilemma2 in combinations(ilexemes, 2):
ising.add_edge(ilemma1, ilemma2, 1.)
def _tkm_add_corpus(ising, a_cc_file):
"""Add lexical nodes from corpus to the Ising spin model
@param a_ising - instance of the Ising spin model
@param a_cc_file - file containing conjoined word pairs extracted from
corpus
@return \c void
"""
ifields = []
iwght = 1.
ilemma1 = ilemma2 = ""
with codecs.open(a_cc_file, 'r', ENCODING) as ifile:
for iline in ifile:
iline = iline.strip()
if not iline:
continue
ifields = TAB_RE.split(iline)
if len(ifields) != 3:
continue
ilemma1, ilemma2, iwght = ifields
if ilemma1 in FORM2LEMMA:
ilemma1 = FORM2LEMMA[ilemma1]
if ilemma2 in FORM2LEMMA:
ilemma2 = FORM2LEMMA[ilemma2]
if check_word(ilemma1) and check_word(ilemma2):
ising.add_edge(normalize(ilemma1),
normalize(ilemma2), float(iwght),
a_add_missing=True)
def takamura(a_germanet, a_N, a_cc_file, a_pos, a_neg, a_neut, a_plot=None,
a_pos_re=NONMATCH_RE, a_neg_re=NONMATCH_RE):
"""Method for generating sentiment lexicons using Takamura's approach.
@param a_germanet - GermaNet instance
@param a_N - number of terms to extract
@param a_cc_file - file containing coordinatively conjoined phrases
@param a_pos - initial set of positive terms to be expanded
@param a_neg - initial set of negative terms to be expanded
@param a_neut - initial set of neutral terms to be expanded
@param a_plot - name of file in which generated statics plots should be
saved (None if no plot should be generated)
@param a_pos_re - regular expression for matching positive terms
@param a_neg_re - regular expression for matching negative terms
@return \c 0 on success, non-\c 0 otherwise
"""
# estimate the number of terms to extract
seed_set = a_pos | a_neg
# create initial empty network
ising = Ising()
# populate network from GermaNet
print("Adding GermaNet synsets...", end="", file=sys.stderr)
_tkm_add_germanet(ising, a_germanet)
print("done (Ising model has {:d} nodes)".format(ising.n_nodes),
file=sys.stderr)
# populate network from corpus
print("Adding coordinate phrases from corpus...",
end="", file=sys.stderr)
_tkm_add_corpus(ising, a_cc_file)
print("done (Ising model has {:d} nodes)".format(ising.n_nodes),
file=sys.stderr)
# reweight edges
ising.reweight()
# set fixed weights for words pertaining to the positive, negative, and
# neutral set
for ipos in a_pos:
if ipos in ising:
ising[ipos][FXD_WGHT_IDX] = 1.
else:
ising.add_node(ipos, 1.)
ising[ipos][HAS_FXD_WGHT] = 1
if a_pos_re != NONMATCH_RE:
_annotate_re(a_pos_re, ising, 1)
for ineg in a_neg:
if ineg in ising:
ising[ineg][FXD_WGHT_IDX] = -1.
else:
ising.add_node(ineg, -1.)
ising[ineg][HAS_FXD_WGHT] = 1
if a_pos_re != NONMATCH_RE:
_annotate_re(a_neg_re, ising, -1.)
for ineut in a_neut:
if ineut in ising:
ising[ineut][FXD_WGHT_IDX] = 0.
else:
ising.add_node(ineut, 0.)
ising[ineut][HAS_FXD_WGHT] = 1
ising.train(a_plot=a_plot)
# nodes = [inode[ITEM_IDX]
# for inode in sorted(ising.nodes, key = lambda x: x[WGHT_IDX])
# if inode[ITEM_IDX] not in seed_set]
seed_set |= a_neut
nodes = [inode
for inode in sorted(ising.nodes,
key=lambda x: abs(x[WGHT_IDX]), reverse=True)
if inode[ITEM_IDX] not in seed_set]
seed_set.clear()
# populate polarity sets and flush all terms to an external file
i = 0
if a_N < 0:
a_N = len(nodes)
# generate final set of polar terms
max_w = max(inode[WGHT_IDX] for inode in nodes) + 1.
min_w = max(inode[WGHT_IDX] for inode in nodes) - 1.
# add all original seed terms
ret = [(iterm, POSITIVE, max_w) for iterm in a_pos] + \
[(iterm, NEGATIVE, min_w) for iterm in a_neg]
# add remaining automatically derived terms
for inode in nodes:
if isnan(inode[WGHT_IDX]):
print(inode[ITEM_IDX].encode(ENCODING), "\t", inode[WGHT_IDX],
file=sys.stderr)
else:
if i < a_N:
if inode[WGHT_IDX] > 0:
ret.append((inode[ITEM_IDX], POSITIVE, inode[WGHT_IDX]))
elif inode[WGHT_IDX] < 0:
ret.append((inode[ITEM_IDX], NEGATIVE, inode[WGHT_IDX]))
else:
continue
i += 1
else:
break
return ret
| 38.378277 | 78 | 0.590514 |
4a218642c15d31e06847073f011a01622dc46208 | 19,603 | py | Python | docker/zap-full-scan.py | sshniro/zaproxy | 572dbfd25ec6bab0dec477281958c171ce524368 | [
"Apache-2.0"
] | null | null | null | docker/zap-full-scan.py | sshniro/zaproxy | 572dbfd25ec6bab0dec477281958c171ce524368 | [
"Apache-2.0"
] | null | null | null | docker/zap-full-scan.py | sshniro/zaproxy | 572dbfd25ec6bab0dec477281958c171ce524368 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Zed Attack Proxy (ZAP) and its related class files.
#
# ZAP is an HTTP/HTTPS proxy for assessing web application security.
#
# Copyright 2017 ZAP Development Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script runs a full scan against a target URL using ZAP
#
# It can either be run 'standalone', in which case depends on
# https://pypi.python.org/pypi/python-owasp-zap-v2.4 and Docker, or it can be run
# inside one of the ZAP docker containers. It automatically detects if it is
# running in docker so the parameters are the same.
#
# By default it will spider the target URL with no time limit, but you can change
# that via the -m parameter.
# It will then perform an active scan of all of the URLs found by the spider.
# This may take a significant amount of time.
# It will exit with codes of:
# 0: Success
# 1: At least 1 FAIL
# 2: At least one WARN and no FAILs
# 3: Any other failure
# By default all alerts found by ZAP will be treated as WARNings.
# You can use the -c or -u parameters to specify a configuration file to override
# this.
# You can generate a template configuration file using the -g parameter. You will
# then need to change 'WARN' to 'FAIL', 'INFO' or 'IGNORE' for the rules you want
# to be handled differently.
# You can also add your own messages for the rules by appending them after a tab
# at the end of each line.
# By default all of the active scan rules run but you can prevent rules from
# running by supplying a configuration file with the rules set to IGNORE.
import getopt
import json
import logging
import os
import os.path
import sys
import time
from datetime import datetime
from zapv2 import ZAPv2
from zap_common import *
config_dict = {}
config_msg = {}
out_of_scope_dict = {}
min_level = 0
# Scan rules that aren't really relevant, e.g. the examples rules in the alpha set
blacklist = ['-1', '50003', '60000', '60001', '60100', '60101']
# Scan rules that are being addressed
in_progress_issues = {}
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
# Hide "Starting new HTTP connection" messages
logging.getLogger("requests").setLevel(logging.WARNING)
def usage():
print('Usage: zap-full-scan.py -t <target> [options]')
print(' -t target target URL including the protocol, e.g. https://www.example.com')
print('Options:')
print(' -h print this help message')
print(' -c config_file config file to use to INFO, IGNORE or FAIL warnings')
print(' -u config_url URL of config file to use to INFO, IGNORE or FAIL warnings')
print(' -g gen_file generate default config file(all rules set to WARN)')
print(' -m mins the number of minutes to spider for (defaults to no limit)')
print(' -r report_html file to write the full ZAP HTML report')
print(' -w report_md file to write the full ZAP Wiki(Markdown) report')
print(' -x report_xml file to write the full ZAP XML report')
print(' -J report_json file to write the full ZAP JSON document')
print(' -a include the alpha active and passive scan rules as well')
print(' -d show debug messages')
print(' -P specify listen port')
print(' -D delay in seconds to wait for passive scanning ')
print(' -i default rules not in the config file to INFO')
print(' -j use the Ajax spider in addition to the traditional one')
print(' -l level minimum level to show: PASS, IGNORE, INFO, WARN or FAIL, use with -s to hide example URLs')
print(' -n context_file context file which will be loaded prior to scanning the target')
print(' -p progress_file progress file which specifies issues that are being addressed')
print(' -s short output format - dont show PASSes or example URLs')
print(' -T max time in minutes to wait for ZAP to start and the passive scan to run')
print(' -C directory for locating the config file')
print(' -B base directory')
print(' -z zap_options ZAP command line options e.g. -z "-config aaa=bbb -config ccc=ddd"')
print(' --hook path to python file that define your custom hooks')
print('')
print('For more details see https://github.com/zaproxy/zaproxy/wiki/ZAP-Full-Scan')
def main(argv):
global min_level
global in_progress_issues
cid = ''
context_file = ''
progress_file = ''
config_file = ''
config_url = ''
mins = 0
generate = ''
port = 0
detailed_output = True
report_html = ''
report_md = ''
report_xml = ''
report_json = ''
target = ''
zap_alpha = False
info_unspecified = False
ajax = False
base_dir = ''
config_file_dir = ''
zap_ip = 'localhost'
zap_options = ''
delay = 0
timeout = 0
hook_file = None
pass_count = 0
warn_count = 0
fail_count = 0
info_count = 0
ignore_count = 0
warn_inprog_count = 0
fail_inprog_count = 0
try:
opts, args = getopt.getopt(argv, "t:c:u:g:m:n:r:J:w:x:l:hdaijp:sz:P:D:T:C:B:", ["hook="])
except getopt.GetoptError as exc:
logging.warning('Invalid option ' + exc.opt + ' : ' + exc.msg)
usage()
sys.exit(3)
for opt, arg in opts:
if opt == '-h':
usage()
sys.exit(0)
elif opt == '-t':
target = arg
logging.debug('Target: ' + target)
elif opt == '-c':
config_file = arg
elif opt == '-u':
config_url = arg
elif opt == '-g':
generate = arg
elif opt == '-d':
logging.getLogger().setLevel(logging.DEBUG)
elif opt == '-m':
mins = int(arg)
elif opt == '-P':
port = int(arg)
elif opt == '-D':
delay = int(arg)
elif opt == '-n':
context_file = arg
elif opt == '-p':
progress_file = arg
elif opt == '-r':
report_html = arg
elif opt == '-J':
report_json = arg
elif opt == '-w':
report_md = arg
elif opt == '-x':
report_xml = arg
elif opt == '-a':
zap_alpha = True
elif opt == '-i':
info_unspecified = True
elif opt == '-j':
ajax = True
elif opt == '-l':
try:
min_level = zap_conf_lvls.index(arg)
except ValueError:
logging.warning('Level must be one of ' + str(zap_conf_lvls))
usage()
sys.exit(3)
elif opt == '-z':
zap_options = arg
elif opt == '-s':
detailed_output = False
elif opt == '-T':
timeout = int(arg)
elif opt == '-B':
base_dir = arg
elif opt == '-C':
config_file_dir = arg
elif opt == '--hook':
hook_file = arg
check_zap_client_version()
load_custom_hooks(hook_file)
trigger_hook('cli_opts', opts)
# Check target supplied and ok
if len(target) == 0:
usage()
sys.exit(3)
if not (target.startswith('http://') or target.startswith('https://')):
logging.warning('Target must start with \'http://\' or \'https://\'')
usage()
sys.exit(3)
if running_in_docker():
if not base_dir:
base_dir = '/zap/wrk/'
if config_file or generate or report_html or report_xml or report_json or report_md or progress_file or context_file:
# Check directory has been mounted
if not os.path.exists(base_dir):
logging.warning('A file based option has been specified but the directory \'/zap/wrk\' is not mounted ')
usage()
sys.exit(3)
# Choose a random 'ephemeral' port and check its available if it wasn't specified with -P option
if port == 0:
port = get_free_port()
logging.debug('Using port: ' + str(port))
if config_file:
# load config file from filestore
if not config_file_dir:
config_file_dir = base_dir
with open(config_file_dir + config_file) as f:
try:
load_config(f, config_dict, config_msg, out_of_scope_dict)
except ValueError as e:
logging.warning("Failed to load config file " + base_dir + config_file + " " + str(e))
sys.exit(3)
elif config_url:
# load config file from url
try:
config_data = urlopen(config_url).read().decode('UTF-8').splitlines()
load_config(config_data, config_dict, config_msg, out_of_scope_dict)
except ValueError as e:
logging.warning("Failed to read configs from " + config_url + " " + str(e))
sys.exit(3)
except:
logging.warning('Failed to read configs from ' + config_url)
sys.exit(3)
if progress_file:
# load progress file from filestore
with open(base_dir + progress_file) as f:
progress = json.load(f)
# parse into something more useful...
# in_prog_issues = map of vulnid -> {object with everything in}
for issue in progress["issues"]:
if issue["state"] == "inprogress":
in_progress_issues[issue["id"]] = issue
if running_in_docker():
try:
params = [
'-config', 'spider.maxDuration=' + str(mins),
'-addonupdate',
'-addoninstall', 'pscanrulesBeta', # In case we're running in the stable container
'-addoninstall', 'ascanrulesBeta']
if zap_alpha:
params.extend(['-addoninstall', 'pscanrulesAlpha'])
params.extend(['-addoninstall', 'ascanrulesAlpha'])
add_zap_options(params, zap_options)
start_zap(port, params)
except OSError:
logging.warning('Failed to start ZAP :(')
sys.exit(3)
else:
# Not running in docker, so start one
mount_dir = ''
if context_file:
mount_dir = os.path.dirname(os.path.abspath(context_file))
params = [
'-config', 'spider.maxDuration=' + str(mins),
'-addonupdate',
'-addoninstall', 'pscanrulesBeta', # In case we're running in the stable container
'-addoninstall', 'ascanrulesBeta']
if (zap_alpha):
params.extend(['-addoninstall', 'pscanrulesAlpha'])
params.extend(['-addoninstall', 'ascanrulesAlpha'])
add_zap_options(params, zap_options)
try:
cid = start_docker_zap('owasp/zap2docker-weekly', port, params, mount_dir)
zap_ip = ipaddress_for_cid(cid)
logging.debug('Docker ZAP IP Addr: ' + zap_ip)
except OSError:
logging.warning('Failed to start ZAP in docker :(')
sys.exit(3)
try:
zap = ZAPv2(proxies={'http': 'http://' + zap_ip + ':' + str(port), 'https': 'http://' + zap_ip + ':' + str(port)})
wait_for_zap_start(zap, timeout * 60)
trigger_hook('zap_started', zap, target)
if context_file:
# handle the context file, cant use base_dir as it might not have been set up
zap_import_context(zap, '/zap/wrk/' + os.path.basename(context_file))
zap_access_target(zap, target)
if target.count('/') > 2:
# The url can include a valid path, but always reset to spider the host
target = target[0:target.index('/', 8)+1]
time.sleep(2)
# Spider target
zap_spider(zap, target)
if (ajax):
zap_ajax_spider(zap, target, mins)
if (delay):
start_scan = datetime.now()
while ((datetime.now() - start_scan).seconds < delay):
time.sleep(5)
logging.debug('Delay active scan ' + str(delay -(datetime.now() - start_scan).seconds) + ' seconds')
if target.count('/') > 2:
# The url can include a valid path, but always reset to scan the host
target = target[0:target.index('/', 8)+1]
# Set up the scan policy
scan_policy = 'Default Policy'
if config_dict:
# They have supplied a config file, use this to define the ascan rules
zap.ascan.enable_all_scanners(scanpolicyname=scan_policy)
for scanner, state in config_dict.items():
if state == 'IGNORE':
# Dont bother checking the result - this will fail for pscan rules
zap.ascan.set_scanner_alert_threshold(id=scanner, alertthreshold='OFF', scanpolicyname=scan_policy)
zap_active_scan(zap, target, scan_policy)
zap_wait_for_passive_scan(zap, timeout * 60)
# Print out a count of the number of urls
num_urls = len(zap.core.urls())
if num_urls == 0:
logging.warning('No URLs found - is the target URL accessible? Local services may not be accessible from the Docker container')
else:
if detailed_output:
print('Total of ' + str(num_urls) + ' URLs')
alert_dict = zap_get_alerts(zap, target, blacklist, out_of_scope_dict)
all_ascan_rules = zap.ascan.scanners('Default Policy')
all_pscan_rules = zap.pscan.scanners
all_dict = {}
for rule in all_pscan_rules:
plugin_id = rule.get('id')
if plugin_id in blacklist:
continue
all_dict[plugin_id] = rule.get('name') + ' - Passive/' + rule.get('quality')
for rule in all_ascan_rules:
plugin_id = rule.get('id')
if plugin_id in blacklist:
continue
all_dict[plugin_id] = rule.get('name') + ' - Active/' + rule.get('quality')
if generate:
# Create the config file
with open(base_dir + generate, 'w') as f:
f.write('# zap-full-scan rule configuration file\n')
f.write('# Change WARN to IGNORE to ignore rule or FAIL to fail if rule matches\n')
f.write('# Active scan rules set to IGNORE will not be run which will speed up the scan\n')
f.write('# Only the rule identifiers are used - the names are just for info\n')
f.write('# You can add your own messages to each rule by appending them after a tab on each line.\n')
for key, rule in sorted(all_dict.items()):
f.write(key + '\tWARN\t(' + rule + ')\n')
# print out the passing rules
pass_dict = {}
for rule in all_pscan_rules:
plugin_id = rule.get('id')
if plugin_id in blacklist:
continue
if plugin_id not in alert_dict:
pass_dict[plugin_id] = rule.get('name')
for rule in all_ascan_rules:
plugin_id = rule.get('id')
if plugin_id in blacklist:
continue
if plugin_id not in alert_dict and not(plugin_id in config_dict and config_dict[plugin_id] == 'IGNORE'):
pass_dict[plugin_id] = rule.get('name')
if min_level == zap_conf_lvls.index("PASS") and detailed_output:
for key, rule in sorted(pass_dict.items()):
print('PASS: ' + rule + ' [' + key + ']')
pass_count = len(pass_dict)
if detailed_output:
# print out the ignored ascan rules(there will be no alerts for these as they were not run)
for rule in all_ascan_rules:
plugin_id = rule.get('id')
if plugin_id in blacklist:
continue
if plugin_id in config_dict and config_dict[plugin_id] == 'IGNORE':
print('SKIP: ' + rule.get('name') + ' [' + plugin_id + ']')
# print out the ignored rules
ignore_count, not_used = print_rules(zap, alert_dict, 'IGNORE', config_dict, config_msg, min_level,
inc_ignore_rules, True, detailed_output, {})
# print out the info rules
info_count, not_used = print_rules(zap, alert_dict, 'INFO', config_dict, config_msg, min_level,
inc_info_rules, info_unspecified, detailed_output, in_progress_issues)
# print out the warning rules
warn_count, warn_inprog_count = print_rules(zap, alert_dict, 'WARN', config_dict, config_msg, min_level,
inc_warn_rules, not info_unspecified, detailed_output, in_progress_issues)
# print out the failing rules
fail_count, fail_inprog_count = print_rules(zap, alert_dict, 'FAIL', config_dict, config_msg, min_level,
inc_fail_rules, True, detailed_output, in_progress_issues)
if report_html:
# Save the report
write_report(base_dir + report_html, zap.core.htmlreport())
if report_json:
# Save the report
write_report(base_dir + report_json, zap.core.jsonreport())
if report_md:
# Save the report
write_report(base_dir + report_md, zap.core.mdreport())
if report_xml:
# Save the report
write_report(base_dir + report_xml, zap.core.xmlreport())
print('FAIL-NEW: ' + str(fail_count) + '\tFAIL-INPROG: ' + str(fail_inprog_count) +
'\tWARN-NEW: ' + str(warn_count) + '\tWARN-INPROG: ' + str(warn_inprog_count) +
'\tINFO: ' + str(info_count) + '\tIGNORE: ' + str(ignore_count) + '\tPASS: ' + str(pass_count))
trigger_hook('zap_pre_shutdown', zap)
# Stop ZAP
zap.core.shutdown()
except IOError as e:
if hasattr(e, 'args') and len(e.args) > 1:
errno, strerror = e
print("ERROR " + str(strerror))
logging.warning('I/O error(' + str(errno) + '): ' + str(strerror))
else:
print("ERROR %s" % e)
logging.warning('I/O error: ' + str(e))
dump_log_file(cid)
except:
print("ERROR " + str(sys.exc_info()[0]))
logging.warning('Unexpected error: ' + str(sys.exc_info()[0]))
dump_log_file(cid)
if not running_in_docker():
stop_docker(cid)
trigger_hook('pre_exit', fail_count, warn_count, pass_count)
if fail_count > 0:
sys.exit(1)
elif warn_count > 0:
sys.exit(2)
elif pass_count > 0:
sys.exit(0)
else:
sys.exit(3)
if __name__ == "__main__":
main(sys.argv[1:])
| 38.741107 | 139 | 0.578942 |
4a218687d66a55f41e986f3290ab5d9e84010901 | 11,638 | py | Python | ari/client.py | afluence/ari-py | d3510202c6be9dcdaabd4673d1f6c2d4f5563947 | [
"BSD-3-Clause"
] | null | null | null | ari/client.py | afluence/ari-py | d3510202c6be9dcdaabd4673d1f6c2d4f5563947 | [
"BSD-3-Clause"
] | null | null | null | ari/client.py | afluence/ari-py | d3510202c6be9dcdaabd4673d1f6c2d4f5563947 | [
"BSD-3-Clause"
] | null | null | null | #
# Copyright (c) 2013, Digium, Inc.
#
"""ARI client library.
"""
import json
import logging
import urllib.parse
import swaggerpy.client
from ari.model import *
log = logging.getLogger(__name__)
class Client(object):
"""ARI Client object.
:param base_url: Base URL for accessing Asterisk.
:param http_client: HTTP client interface.
"""
def __init__(self, base_url, http_client):
url = urllib.parse.urljoin(base_url, "ari/api-docs/resources.json")
self.swagger = swaggerpy.client.SwaggerClient(
url, http_client=http_client)
self.repositories = {
name: Repository(self, name, api)
for (name, api) in list(self.swagger.resources.items())}
# Extract models out of the events resource
events = [api['api_declaration']
for api in self.swagger.api_docs['apis']
if api['name'] == 'events']
if events:
self.event_models = events[0]['models']
else:
self.event_models = {}
self.websockets = set()
self.event_listeners = {}
self.exception_handler = \
lambda ex: log.exception("Event listener threw exception")
def __getattr__(self, item):
"""Exposes repositories as fields of the client.
:param item: Field name
"""
repo = self.get_repo(item)
if not repo:
raise AttributeError(
"'%r' object has no attribute '%s'" % (self, item))
return repo
def close(self):
"""Close this ARI client.
This method will close any currently open WebSockets, and close the
underlying Swaggerclient.
"""
for ws in self.websockets:
ws.send_close()
self.swagger.close()
def get_repo(self, name):
"""Get a specific repo by name.
:param name: Name of the repo to get
:return: Repository, or None if not found.
:rtype: ari.model.Repository
"""
return self.repositories.get(name)
def __run(self, ws):
"""Drains all messages from a WebSocket, sending them to the client's
listeners.
:param ws: WebSocket to drain.
"""
# TypeChecker false positive on iter(callable, sentinel) -> iterator
# Fixed in plugin v3.0.1
# noinspection PyTypeChecker
for msg_str in iter(lambda: ws.recv(), None):
msg_json = json.loads(msg_str)
if not isinstance(msg_json, dict) or 'type' not in msg_json:
log.error("Invalid event: %s" % msg_str)
continue
listeners = list(self.event_listeners.get(msg_json['type'], []))
for listener in listeners:
# noinspection PyBroadException
try:
callback, args, kwargs = listener
args = args or ()
kwargs = kwargs or {}
callback(msg_json, *args, **kwargs)
except Exception as e:
self.exception_handler(e)
def run(self, apps):
"""Connect to the WebSocket and begin processing messages.
This method will block until all messages have been received from the
WebSocket, or until this client has been closed.
:param apps: Application (or list of applications) to connect for
:type apps: str or list of str
"""
if isinstance(apps, list):
apps = ','.join(apps)
ws = self.swagger.events.eventWebsocket(app=apps)
self.websockets.add(ws)
try:
self.__run(ws)
finally:
ws.close()
self.websockets.remove(ws)
def on_event(self, event_type, event_cb, *args, **kwargs):
"""Register callback for events with given type.
:param event_type: String name of the event to register for.
:param event_cb: Callback function
:type event_cb: (dict) -> None
:param args: Arguments to pass to event_cb
:param kwargs: Keyword arguments to pass to event_cb
"""
listeners = self.event_listeners.setdefault(event_type, list())
for cb in listeners:
if event_cb == cb[0]:
listeners.remove(cb)
callback_obj = (event_cb, args, kwargs)
listeners.append(callback_obj)
client = self
class EventUnsubscriber(object):
"""Class to allow events to be unsubscribed.
"""
def close(self):
"""Unsubscribe the associated event callback.
"""
if callback_obj in client.event_listeners[event_type]:
client.event_listeners[event_type].remove(callback_obj)
return EventUnsubscriber()
def on_object_event(self, event_type, event_cb, factory_fn, model_id,
*args, **kwargs):
"""Register callback for events with the given type. Event fields of
the given model_id type are passed along to event_cb.
If multiple fields of the event have the type model_id, a dict is
passed mapping the field name to the model object.
:param event_type: String name of the event to register for.
:param event_cb: Callback function
:type event_cb: (Obj, dict) -> None or (dict[str, Obj], dict) ->
:param factory_fn: Function for creating Obj from JSON
:param model_id: String id for Obj from Swagger models.
:param args: Arguments to pass to event_cb
:param kwargs: Keyword arguments to pass to event_cb
"""
# Find the associated model from the Swagger declaration
event_model = self.event_models.get(event_type)
if not event_model:
raise ValueError("Cannot find event model '%s'" % event_type)
# Extract the fields that are of the expected type
obj_fields = [k for (k, v) in list(event_model['properties'].items())
if v['type'] == model_id]
if not obj_fields:
raise ValueError("Event model '%s' has no fields of type %s"
% (event_type, model_id))
def extract_objects(event, *args, **kwargs):
"""Extract objects of a given type from an event.
:param event: Event
:param args: Arguments to pass to the event callback
:param kwargs: Keyword arguments to pass to the event
callback
"""
# Extract the fields which are of the expected type
obj = {obj_field: factory_fn(self, event[obj_field])
for obj_field in obj_fields
if event.get(obj_field)}
# If there's only one field in the schema, just pass that along
if len(obj_fields) == 1:
if obj:
obj = list(obj.values())[0]
else:
obj = None
event_cb(obj, event, *args, **kwargs)
return self.on_event(event_type, extract_objects,
*args,
**kwargs)
def on_channel_event(self, event_type, fn, *args, **kwargs):
"""Register callback for Channel related events
:param event_type: String name of the event to register for.
:param fn: Callback function
:type fn: (Channel, dict) -> None or (list[Channel], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, Channel, 'Channel',
*args, **kwargs)
def on_bridge_event(self, event_type, fn, *args, **kwargs):
"""Register callback for Bridge related events
:param event_type: String name of the event to register for.
:param fn: Callback function
:type fn: (Bridge, dict) -> None or (list[Bridge], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, Bridge, 'Bridge',
*args, **kwargs)
def on_playback_event(self, event_type, fn, *args, **kwargs):
"""Register callback for Playback related events
:param event_type: String name of the event to register for.
:param fn: Callback function
:type fn: (Playback, dict) -> None or (list[Playback], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, Playback, 'Playback',
*args, **kwargs)
def on_live_recording_event(self, event_type, fn, *args, **kwargs):
"""Register callback for LiveRecording related events
:param event_type: String name of the event to register for.
:param fn: Callback function
:type fn: (LiveRecording, dict) -> None or (list[LiveRecording], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, LiveRecording,
'LiveRecording', *args, **kwargs)
def on_stored_recording_event(self, event_type, fn, *args, **kwargs):
"""Register callback for StoredRecording related events
:param event_type: String name of the event to register for.
:param fn: Callback function
:type fn: (StoredRecording, dict) -> None or (list[StoredRecording], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, StoredRecording,
'StoredRecording', *args, **kwargs)
def on_endpoint_event(self, event_type, fn, *args, **kwargs):
"""Register callback for Endpoint related events
:param event_type: String name of the event to register for.
:param fn: Callback function
:type fn: (Endpoint, dict) -> None or (list[Endpoint], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, Endpoint, 'Endpoint',
*args, **kwargs)
def on_device_state_event(self, event_type, fn, *args, **kwargs):
"""Register callback for DeviceState related events
:param event_type: String name of the event to register for.
:param fn: Callback function
:type fn: (DeviceState, dict) -> None or (list[DeviceState], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, DeviceState, 'DeviceState',
*args, **kwargs)
def on_sound_event(self, event_type, fn, *args, **kwargs):
"""Register callback for Sound related events
:param event_type: String name of the event to register for.
:param fn: Sound function
:type fn: (Sound, dict) -> None or (list[Sound], dict) -> None
:param args: Arguments to pass to fn
:param kwargs: Keyword arguments to pass to fn
"""
return self.on_object_event(event_type, fn, Sound, 'Sound',
*args, **kwargs)
| 38.793333 | 91 | 0.587128 |
4a2186b2db2ce8b846cdfdfa16d4e2b935c06b95 | 10,993 | py | Python | salt/modules/gem.py | Noah-Huppert/salt | 998c382f5f2c3b4cbf7d96aa6913ada6993909b3 | [
"Apache-2.0"
] | 19 | 2016-01-29T14:37:52.000Z | 2022-03-30T18:08:01.000Z | salt/modules/gem.py | Noah-Huppert/salt | 998c382f5f2c3b4cbf7d96aa6913ada6993909b3 | [
"Apache-2.0"
] | 223 | 2016-03-02T16:39:41.000Z | 2022-03-03T12:26:35.000Z | salt/modules/gem.py | Noah-Huppert/salt | 998c382f5f2c3b4cbf7d96aa6913ada6993909b3 | [
"Apache-2.0"
] | 64 | 2016-02-04T19:45:26.000Z | 2021-12-15T02:02:31.000Z | # -*- coding: utf-8 -*-
"""
Manage ruby gems.
"""
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import python libs
import re
# Import Salt libs
import salt.utils.itertools
import salt.utils.platform
from salt.exceptions import CommandExecutionError
__func_alias__ = {"list_": "list"}
log = logging.getLogger(__name__) # pylint: disable=C0103
def _gem(command, ruby=None, runas=None, gem_bin=None):
"""
Run the actual gem command. If rvm or rbenv is installed, run the command
using the corresponding module. rbenv is not available on windows, so don't
try.
:param command: string
Command to run
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
:param gem_bin: string : None
Full path to the ``gem`` binary
:return:
Returns the full standard out including success codes or False if it fails
"""
cmdline = [gem_bin or "gem"] + command
# If a custom gem is given, use that and don't check for rvm/rbenv. User
# knows best!
if gem_bin is None:
if __salt__["rvm.is_installed"](runas=runas):
return __salt__["rvm.do"](ruby, cmdline, runas=runas)
if not salt.utils.platform.is_windows() and __salt__["rbenv.is_installed"](
runas=runas
):
if ruby is None:
return __salt__["rbenv.do"](cmdline, runas=runas)
else:
return __salt__["rbenv.do_with_ruby"](ruby, cmdline, runas=runas)
ret = __salt__["cmd.run_all"](cmdline, runas=runas, python_shell=False)
if ret["retcode"] == 0:
return ret["stdout"]
else:
raise CommandExecutionError(ret["stderr"])
def install(
gems, # pylint: disable=C0103
ruby=None,
gem_bin=None,
runas=None,
version=None,
rdoc=False,
ri=False,
pre_releases=False,
proxy=None,
source=None,
): # pylint: disable=C0103
"""
Installs one or several gems.
:param gems: string
The gems to install
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
:param version: string : None
Specify the version to install for the gem.
Doesn't play nice with multiple gems at once
:param rdoc: boolean : False
Generate RDoc documentation for the gem(s).
For rubygems > 3 this is interpreted as the --no-document arg and the
ri option will then be ignored
:param ri: boolean : False
Generate RI documentation for the gem(s).
For rubygems > 3 this is interpreted as the --no-document arg and the
rdoc option will then be ignored
:param pre_releases: boolean : False
Include pre-releases in the available versions
:param proxy: string : None
Use the specified HTTP proxy server for all outgoing traffic.
Format: http://hostname[:port]
source : None
Use the specified HTTP gem source server to download gem.
Format: http://hostname[:port]
CLI Example:
.. code-block:: bash
salt '*' gem.install vagrant
salt '*' gem.install redphone gem_bin=/opt/sensu/embedded/bin/gem
"""
try:
gems = gems.split()
except AttributeError:
pass
options = []
if version:
options.extend(["--version", version])
if _has_rubygems_3(ruby=ruby, runas=runas, gem_bin=gem_bin):
if not rdoc or not ri:
options.append("--no-document")
if pre_releases:
options.append("--prerelease")
else:
if not rdoc:
options.append("--no-rdoc")
if not ri:
options.append("--no-ri")
if pre_releases:
options.append("--pre")
if proxy:
options.extend(["-p", proxy])
if source:
options.extend(["--source", source])
return _gem(["install"] + gems + options, ruby, gem_bin=gem_bin, runas=runas)
def uninstall(gems, ruby=None, runas=None, gem_bin=None):
"""
Uninstall one or several gems.
:param gems: string
The gems to uninstall.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.uninstall vagrant
"""
try:
gems = gems.split()
except AttributeError:
pass
return _gem(["uninstall"] + gems + ["-a", "-x"], ruby, gem_bin=gem_bin, runas=runas)
def update(gems, ruby=None, runas=None, gem_bin=None):
"""
Update one or several gems.
:param gems: string
The gems to update.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.update vagrant
"""
try:
gems = gems.split()
except AttributeError:
pass
return _gem(["update"] + gems, ruby, gem_bin=gem_bin, runas=runas)
def update_system(version="", ruby=None, runas=None, gem_bin=None):
"""
Update rubygems.
:param version: string : (newest)
The version of rubygems to install.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.update_system
"""
return _gem(["update", "--system", version], ruby, gem_bin=gem_bin, runas=runas)
def version(ruby=None, runas=None, gem_bin=None):
"""
Print out the version of gem
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.version
"""
cmd = ["--version"]
stdout = _gem(cmd, ruby, gem_bin=gem_bin, runas=runas)
ret = {}
for line in salt.utils.itertools.split(stdout, "\n"):
match = re.match(r"[.0-9]+", line)
if match:
ret = line
break
return ret
def _has_rubygems_3(ruby=None, runas=None, gem_bin=None):
match = re.match(r"^3\..*", version(ruby=ruby, runas=runas, gem_bin=gem_bin))
if match:
return True
return False
def list_(prefix="", ruby=None, runas=None, gem_bin=None):
"""
List locally installed gems.
:param prefix: string :
Only list gems when the name matches this prefix.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.list
"""
cmd = ["list"]
if prefix:
cmd.append(prefix)
stdout = _gem(cmd, ruby, gem_bin=gem_bin, runas=runas)
ret = {}
for line in salt.utils.itertools.split(stdout, "\n"):
match = re.match(r"^([^ ]+) \((.+)\)", line)
if match:
gem = match.group(1)
versions = match.group(2).split(", ")
ret[gem] = versions
return ret
def list_upgrades(ruby=None, runas=None, gem_bin=None):
"""
.. versionadded:: 2015.8.0
Check if an upgrade is available for installed gems
gem_bin : None
Full path to ``gem`` binary to use.
ruby : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
runas : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.list_upgrades
"""
result = _gem(["outdated"], ruby, gem_bin=gem_bin, runas=runas)
ret = {}
for line in salt.utils.itertools.split(result, "\n"):
match = re.search(r"(\S+) \(\S+ < (\S+)\)", line)
if match:
name, version = match.groups()
else:
log.error("Can't parse line '%s'", line)
continue
ret[name] = version
return ret
def sources_add(source_uri, ruby=None, runas=None, gem_bin=None):
"""
Add a gem source.
:param source_uri: string
The source URI to add.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.sources_add http://rubygems.org/
"""
return _gem(["sources", "--add", source_uri], ruby, gem_bin=gem_bin, runas=runas)
def sources_remove(source_uri, ruby=None, runas=None, gem_bin=None):
"""
Remove a gem source.
:param source_uri: string
The source URI to remove.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.sources_remove http://rubygems.org/
"""
return _gem(["sources", "--remove", source_uri], ruby, gem_bin=gem_bin, runas=runas)
def sources_list(ruby=None, runas=None, gem_bin=None):
"""
List the configured gem sources.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.sources_list
"""
ret = _gem(["sources"], ruby, gem_bin=gem_bin, runas=runas)
return [] if ret is False else ret.splitlines()[2:]
| 27.901015 | 88 | 0.607932 |
4a2186eb03bc72d20131e0f32740189915939e88 | 166 | py | Python | app/tests/test_dummy.py | afrokita/books-list | 5afe40ffd7e28e0f62f12a8f780470a0b2500110 | [
"MIT"
] | null | null | null | app/tests/test_dummy.py | afrokita/books-list | 5afe40ffd7e28e0f62f12a8f780470a0b2500110 | [
"MIT"
] | null | null | null | app/tests/test_dummy.py | afrokita/books-list | 5afe40ffd7e28e0f62f12a8f780470a0b2500110 | [
"MIT"
] | null | null | null | import pytest
@pytest.fixture
def foo():
bar = 42
return bar
@pytest.mark.parametrize("num, output", [(42,42)])
def test_foo(num, output):
assert num == output
| 13.833333 | 50 | 0.686747 |
4a218815c795b8a14dcce873c65687ae29f23960 | 521 | py | Python | themissc/Tools/ReadCDF.py | mattkjames7/themissc | 1e7257d60da1069fffc7fed848ddcf5c780e9250 | [
"MIT"
] | null | null | null | themissc/Tools/ReadCDF.py | mattkjames7/themissc | 1e7257d60da1069fffc7fed848ddcf5c780e9250 | [
"MIT"
] | 1 | 2021-06-10T22:51:09.000Z | 2021-06-10T22:51:09.000Z | Arase/Tools/ReadCDF.py | mattkjames7/Arase | 996167be35a13bbb1fdddfbe75e3a06d124b1d25 | [
"MIT"
] | null | null | null | import numpy as np
import cdflib
import os
def ReadCDF(fname,Verbose=True):
'''
Read a CDF file contents
'''
if not os.path.isfile(fname):
print('File not found')
return None,None
#open the file
f = cdflib.CDF(fname)
#get the list of zVariables
var = f.cdf_info()['zVariables']
#create ouput dicts
data = {}
attr = {}
for v in var:
data[v] = f.varget(v)
attr[v] = f.varattsget(v)
#delete cdf (not sure if this is necessary - no idea if there is a close function)
del f
return data,attr
| 16.806452 | 83 | 0.666027 |
4a218840c432db771a357f49c805fac16b834177 | 13,768 | py | Python | config/settings/base.py | pygabo/bus_system | ffb76d3414e058286799f3df1cb551b26286e7c3 | [
"MIT"
] | null | null | null | config/settings/base.py | pygabo/bus_system | ffb76d3414e058286799f3df1cb551b26286e7c3 | [
"MIT"
] | null | null | null | config/settings/base.py | pygabo/bus_system | ffb76d3414e058286799f3df1cb551b26286e7c3 | [
"MIT"
] | null | null | null | """
Base settings to build other settings files upon.
"""
from datetime import timedelta
from pathlib import Path
import environ
ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent.parent
# bus_system/
APPS_DIR = ROOT_DIR / "bus_system"
env = environ.Env()
READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=False)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(ROOT_DIR / ".env"))
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool("DJANGO_DEBUG", False)
# Local time zone. Choices are
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# though not all of them may be available with every OS.
# In Windows, this must be set to your system time zone.
TIME_ZONE = "UTC"
# https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = "en-us"
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# https://docs.djangoproject.com/en/dev/ref/settings/#locale-paths
LOCALE_PATHS = [str(ROOT_DIR / "locale")]
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {"default": env.db("DATABASE_URL")}
DATABASES["default"]["ATOMIC_REQUESTS"] = True
# URLS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = "config.urls"
# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = "config.wsgi.application"
# APPS
# ------------------------------------------------------------------------------
DJANGO_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.messages",
"django.contrib.staticfiles",
# "django.contrib.humanize", # Handy template tags
"django.contrib.admin",
"django.forms",
]
THIRD_PARTY_APPS = [
"crispy_forms",
"allauth",
"allauth.account",
"allauth.socialaccount",
"dj_rest_auth",
"dj_rest_auth.registration",
"django_celery_beat",
"rest_framework",
"rest_framework.authtoken",
"corsheaders",
]
LOCAL_APPS = [
"bus_system.users.apps.UsersConfig",
"bus_system.apps.bus.apps.BusConfig",
"bus_system.apps.bus_driver.apps.BusDriverConfig",
"bus_system.apps.passenger.apps.PassengerConfig",
"bus_system.apps.trip.apps.TripConfig",
"bus_system.apps.ticket.apps.TicketConfig"
]
# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIGRATIONS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules
MIGRATION_MODULES = {"sites": "bus_system.contrib.sites.migrations"}
# AUTHENTICATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model
AUTH_USER_MODEL = "users.User"
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
# LOGIN_REDIRECT_URL = "users:redirect"
# https://docs.djangoproject.com/en/dev/ref/settings/#login-url
# LOGIN_URL = "account_login"
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = [
# https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django
"django.contrib.auth.hashers.Argon2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# MIDDLEWARE
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"corsheaders.middleware.CorsMiddleware",
"whitenoise.middleware.WhiteNoiseMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.common.BrokenLinkEmailsMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR / "staticfiles")
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = "/static/"
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [str(APPS_DIR / "static")]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
]
# MEDIA
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR / "media")
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = "/media/"
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
"BACKEND": "django.template.backends.django.DjangoTemplates",
# https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
"DIRS": [str(APPS_DIR / "templates")],
"OPTIONS": {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
"loaders": [
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"bus_system.utils.context_processors.settings_context",
],
},
}
]
# https://docs.djangoproject.com/en/dev/ref/settings/#form-renderer
FORM_RENDERER = "django.forms.renderers.TemplatesSetting"
# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = "bootstrap4"
# FIXTURES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs
FIXTURE_DIRS = (str(APPS_DIR / "fixtures"),)
# SECURITY
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-httponly
SESSION_COOKIE_HTTPONLY = True
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-httponly
CSRF_COOKIE_HTTPONLY = True
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-browser-xss-filter
SECURE_BROWSER_XSS_FILTER = True
# https://docs.djangoproject.com/en/dev/ref/settings/#x-frame-options
X_FRAME_OPTIONS = "DENY"
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env(
"DJANGO_EMAIL_BACKEND",
default="django.core.mail.backends.smtp.EmailBackend",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#email-timeout
EMAIL_TIMEOUT = 5
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL.
ADMIN_URL = "admin/"
# https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [("""JOse gabriel guzman""", "[email protected]")]
# https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# LOGGING
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#logging
# See https://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "%(levelname)s %(asctime)s %(module)s "
"%(process)d %(thread)d %(message)s"
}
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
}
},
"root": {"level": "INFO", "handlers": ["console"]},
}
# Celery
# ------------------------------------------------------------------------------
if USE_TZ:
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-timezone
CELERY_TIMEZONE = TIME_ZONE
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url
CELERY_BROKER_URL = env("CELERY_BROKER_URL")
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content
CELERY_ACCEPT_CONTENT = ["json"]
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer
CELERY_TASK_SERIALIZER = "json"
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
CELERY_RESULT_SERIALIZER = "json"
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-time-limit
# TODO: set to whatever value is adequate in your circumstances
CELERY_TASK_TIME_LIMIT = 5 * 60
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit
# TODO: set to whatever value is adequate in your circumstances
CELERY_TASK_SOFT_TIME_LIMIT = 60
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#beat-scheduler
CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler"
# django-allauth
# ------------------------------------------------------------------------------
ACCOUNT_ALLOW_REGISTRATION = env.bool("DJANGO_ACCOUNT_ALLOW_REGISTRATION", True)
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_AUTHENTICATION_METHOD = "username_email"
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_REQUIRED = True
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_VERIFICATION = "none"
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_ADAPTER = "bus_system.users.adapters.AccountAdapter"
# https://django-allauth.readthedocs.io/en/latest/configuration.html
SOCIALACCOUNT_ADAPTER = "bus_system.users.adapters.SocialAccountAdapter"
# django-rest-framework
# -------------------------------------------------------------------------------
# django-rest-framework - https://www.django-rest-framework.org/api-guide/settings/
REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": (
"dj_rest_auth.jwt_auth.JWTCookieAuthentication",
),
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
}
REST_USE_JWT = True
SEND_CONFIRMATION_EMAIL = False
ACTIVATION_URL = False
# django-cors-headers - https://github.com/adamchainz/django-cors-headers#setup
CORS_URLS_REGEX = r"^/api/.*$"
# Your stuff...
# ------------------------------------------------------------------------------
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=1),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'ALGORITHM': 'HS256',
'SIGNING_KEY': env("DJANGO_SECRET_KEY",
default="hrcaI0NRSC4gUdfGki2r8S3BmWfU9D3QOctMU9Z5mbF434GKeivOqLlaxndI7Y4Q", ),
'AUTH_HEADER_TYPES': ('Bearer',),
}
| 42.233129 | 101 | 0.652455 |
4a2188b0004b39e1f3ac1aa96ef948b04ee767b1 | 217 | py | Python | Taller/E8.py | Pcetina/algoritmos_programacion | f3292327a6da4bc35efb70bd6b9951664f29585f | [
"MIT"
] | null | null | null | Taller/E8.py | Pcetina/algoritmos_programacion | f3292327a6da4bc35efb70bd6b9951664f29585f | [
"MIT"
] | null | null | null | Taller/E8.py | Pcetina/algoritmos_programacion | f3292327a6da4bc35efb70bd6b9951664f29585f | [
"MIT"
] | null | null | null | while(True):
try:
a = int(input())
if(a==2002):
print("Acesso Permitido")
break
else:
print("Senha Invalida")
except EOFError:
break | 21.7 | 38 | 0.43318 |
4a218a37f1de1279f64edb7ed18813f01a98ec39 | 647 | py | Python | setup.py | jfine2358/python-kwkey | a58b2037568eada57a29e0086f57a981c06e2477 | [
"MIT"
] | 3 | 2020-08-03T17:18:52.000Z | 2020-11-02T07:51:20.000Z | setup.py | jfine2358/python-kwkey | a58b2037568eada57a29e0086f57a981c06e2477 | [
"MIT"
] | 3 | 2020-08-06T12:23:07.000Z | 2020-08-30T19:27:38.000Z | setup.py | jfine2358/python-kwkey | a58b2037568eada57a29e0086f57a981c06e2477 | [
"MIT"
] | 1 | 2020-08-07T12:56:29.000Z | 2020-08-07T12:56:29.000Z | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="kwkey",
version="0.0.2",
author="Jonathan Fine",
author_email="[email protected]",
description="Indexing with keyword arguments",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/jfine2358/python-kwkey",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
| 28.130435 | 52 | 0.664606 |
4a218bd6acbf0beb64e7381921ec206cf84a3ec8 | 7,296 | py | Python | xfel/merging/application/group/group_reflections.py | toastisme/cctbx_project | d1a25147b5958822b6923fb55260749ccf9350ff | [
"BSD-3-Clause-LBNL"
] | 2 | 2018-02-01T14:25:48.000Z | 2021-09-15T16:36:29.000Z | xfel/merging/application/group/group_reflections.py | toastisme/cctbx_project | d1a25147b5958822b6923fb55260749ccf9350ff | [
"BSD-3-Clause-LBNL"
] | 2 | 2018-06-14T17:04:17.000Z | 2019-06-24T20:54:12.000Z | xfel/merging/application/group/group_reflections.py | toastisme/cctbx_project | d1a25147b5958822b6923fb55260749ccf9350ff | [
"BSD-3-Clause-LBNL"
] | 1 | 2022-02-08T10:11:07.000Z | 2022-02-08T10:11:07.000Z | from __future__ import absolute_import, division, print_function
from six.moves import range
from xfel.merging.application.worker import worker
from dials.array_family import flex
from xfel.merging.application.reflection_table_utils import reflection_table_utils
from xfel.merging.application.utils.memory_usage import get_memory_usage
class hkl_group(worker):
'''For each asu hkl, gather all of its measurements from all ranks at a single rank, while trying to evenly distribute asu HKLs over the ranks.'''
def __init__(self, params, mpi_helper=None, mpi_logger=None):
super(hkl_group, self).__init__(params=params, mpi_helper=mpi_helper, mpi_logger=mpi_logger)
def __repr__(self):
return "Group symmetry-reduced HKLs"
def distribute_reflection_table(self, reflections):
'''Create a reflection table for storing reflections distributed over hkl chunks'''
table = flex.reflection_table()
for key in reflections:
table[key] = type(reflections[key])()
return table
def run(self, experiments, reflections):
self.logger.log_step_time("GROUP")
reflections = reflection_table_utils.prune_reflection_table_keys(reflections=reflections,
keys_to_keep=['intensity.sum.value', 'intensity.sum.variance', 'miller_index_asymmetric', \
'exp_id', 'intensity.sum.value.unmodified', 'intensity.sum.variance.unmodified'])
# set up hkl chunks to be used for all-to-all; every avialable rank participates in all-to-all, even a rank that doesn't load any data
self.logger.log_step_time("SETUP_CHUNKS")
self.setup_hkl_chunks(reflections)
self.logger.log_step_time("SETUP_CHUNKS", True)
# for the ranks, which have loaded the data, distribute the reflections over the hkl chunks
self.logger.log_step_time("DISTRIBUTE_OVER_CHUNKS")
self.distribute_reflections_over_hkl_chunks(reflections=reflections)
self.logger.log_step_time("DISTRIBUTE_OVER_CHUNKS", True)
# run all-to-all
if self.params.parallel.a2a == 1: # 1 means: the number of slices in each chunk is 1, i.e. alltoall is done on the whole chunks
alltoall_reflections = self.get_reflections_from_alltoall()
else: # do alltoall on chunk slices - useful if the run-time memory is not sufficient to do alltoall on the whole chunks
alltoall_reflections = self.get_reflections_from_alltoall_sliced(number_of_slices=self.params.parallel.a2a)
self.logger.log_step_time("SORT")
self.logger.log("Sorting consolidated reflection table...")
alltoall_reflections.sort('miller_index_asymmetric')
self.logger.log_step_time("SORT", True)
self.logger.log_step_time("GROUP", True)
return None, alltoall_reflections
def setup_hkl_chunks(self, reflections):
'''Set up a list of reflection tables, or chunks, for distributing reflections'''
# split the full miller set into chunks; the number of chunks is equal to the number of ranks
import numpy as np
self.hkl_split_set = np.array_split(self.params.scaling.miller_set.indices(), self.mpi_helper.size)
# initialize a list of hkl chunks - reflection tables to store distributed reflections
self.hkl_chunks = []
for i in range(len(self.hkl_split_set)):
self.hkl_chunks.append(self.distribute_reflection_table(reflections))
def distribute_reflections_over_hkl_chunks(self, reflections):
'''Distribute reflections, according to their HKLs, over pre-set HKL chunks'''
total_reflection_count = reflections.size()
total_distributed_reflection_count = 0
if total_reflection_count > 0:
# set up two lists to be passed to the C++ extension: HKLs and chunk ids. It's basically a hash table to look up chunk ids by HKLs
hkl_list = flex.miller_index()
chunk_id_list = flex.int()
for i in range(len(self.hkl_split_set)):
for j in range(len(self.hkl_split_set[i])):
hkl = (int(self.hkl_split_set[i][j][0]), int(self.hkl_split_set[i][j][1]), int(self.hkl_split_set[i][j][2]))
hkl_list.append(hkl)
chunk_id_list.append(i)
# distribute reflections over hkl chunks, using a C++ extension
from xfel.merging import get_hkl_chunks_cpp
get_hkl_chunks_cpp(reflections, hkl_list, chunk_id_list, self.hkl_chunks)
for chunk in self.hkl_chunks:
total_distributed_reflection_count += len(chunk)
self.logger.log("Distributed %d out of %d reflections"%(total_distributed_reflection_count, total_reflection_count))
self.logger.log("Memory usage: %d MB"%get_memory_usage())
reflections.clear()
def get_reflections_from_alltoall(self):
'''Use MPI alltoall method to gather all reflections with the same asu hkl from all ranks at a single rank'''
self.logger.log_step_time("ALL-TO-ALL")
self.logger.log("Executing MPI all-to-all...")
received_hkl_chunks = self.mpi_helper.comm.alltoall(self.hkl_chunks)
self.logger.log("Received %d hkl chunks after all-to-all"%len(received_hkl_chunks))
self.logger.log_step_time("ALL-TO-ALL", True)
self.logger.log_step_time("CONSOLIDATE")
self.logger.log("Consolidating reflection tables...")
result_reflections = flex.reflection_table()
for chunk in received_hkl_chunks:
result_reflections.extend(chunk)
self.logger.log_step_time("CONSOLIDATE", True)
return result_reflections
def get_reflections_from_alltoall_sliced(self, number_of_slices):
'''Split each hkl chunk into N slices. This is needed to address the MPI alltoall memory problem'''
result_reflections = self.distribute_reflection_table() # the total reflection table, which this rank will receive after all slices of alltoall
list_of_sliced_hkl_chunks = [] # if self.hkl_chunks is [A,B,C...], this list will be [[A1,A2,...,An], [B1,B2,...,Bn], [C1,C2,...,Cn], ...], where n is the number of chunk slices
for i in range(len(self.hkl_chunks)):
hkl_chunk_slices = []
for chunk_slice in reflection_table_utils.get_next_reflection_table_slice(self.hkl_chunks[i], number_of_slices, self.distribute_reflection_table):
hkl_chunk_slices.append(chunk_slice)
list_of_sliced_hkl_chunks.append(hkl_chunk_slices)
self.logger.log("Ready for all-to-all...")
self.logger.log("Memory usage: %d MB"%get_memory_usage())
for j in range(number_of_slices):
hkl_chunks_for_alltoall = list()
for i in range(len(self.hkl_chunks)):
hkl_chunks_for_alltoall.append(list_of_sliced_hkl_chunks[i][j]) # [Aj,Bj,Cj...]
self.logger.log_step_time("ALL-TO-ALL")
self.logger.log("Executing MPI all-to-all...")
self.logger.log("Memory usage: %d MB"%get_memory_usage())
received_hkl_chunks = comm.alltoall(hkl_chunks_for_alltoall)
self.logger.log("After all-to-all received %d hkl chunks" %len(received_hkl_chunks))
self.logger.log_step_time("ALL-TO-ALL", True)
self.logger.log_step_time("CONSOLIDATE")
self.logger.log("Consolidating reflection tables...")
for chunk in received_hkl_chunks:
result_reflections.extend(chunk)
self.logger.log_step_time("CONSOLIDATE", True)
return result_reflections
if __name__ == '__main__':
from xfel.merging.application.worker import exercise_worker
exercise_worker(hkl_group)
| 46.177215 | 181 | 0.737664 |
4a218d78584c634b95a0f5c0c240b144253344b3 | 44,060 | py | Python | Cython/Compiler/FlowControl.py | joonro/cython | 4ebc647c2fa54179d25335b2dcf5d845e7fc9a79 | [
"Apache-2.0"
] | null | null | null | Cython/Compiler/FlowControl.py | joonro/cython | 4ebc647c2fa54179d25335b2dcf5d845e7fc9a79 | [
"Apache-2.0"
] | null | null | null | Cython/Compiler/FlowControl.py | joonro/cython | 4ebc647c2fa54179d25335b2dcf5d845e7fc9a79 | [
"Apache-2.0"
] | null | null | null | import cython
cython.declare(PyrexTypes=object, ExprNodes=object, Nodes=object,
Builtin=object, InternalError=object,
error=object, warning=object,
py_object_type=object, unspecified_type=object,
object_expr=object, object_expr_not_none=object,
fake_rhs_expr=object, TypedExprNode=object)
import Builtin
import ExprNodes
import Nodes
from PyrexTypes import py_object_type, unspecified_type
import PyrexTypes
from Visitor import TreeVisitor, CythonTransform
from Errors import error, warning, InternalError
class TypedExprNode(ExprNodes.ExprNode):
# Used for declaring assignments of a specified type without a known entry.
def __init__(self, type, may_be_none=None, pos=None):
super(TypedExprNode, self).__init__(pos)
self.type = type
self._may_be_none = may_be_none
def may_be_none(self):
return self._may_be_none != False
object_expr = TypedExprNode(py_object_type, may_be_none=True)
object_expr_not_none = TypedExprNode(py_object_type, may_be_none=False)
# Fake rhs to silence "unused variable" warning
fake_rhs_expr = TypedExprNode(unspecified_type)
class ControlBlock(object):
"""Control flow graph node. Sequence of assignments and name references.
children set of children nodes
parents set of parent nodes
positions set of position markers
stats list of block statements
gen dict of assignments generated by this block
bounded set of entries that are definitely bounded in this block
Example:
a = 1
b = a + c # 'c' is already bounded or exception here
stats = [Assignment(a), NameReference(a), NameReference(c),
Assignment(b)]
gen = {Entry(a): Assignment(a), Entry(b): Assignment(b)}
bounded = set([Entry(a), Entry(c)])
"""
def __init__(self):
self.children = set()
self.parents = set()
self.positions = set()
self.stats = []
self.gen = {}
self.bounded = set()
self.i_input = 0
self.i_output = 0
self.i_gen = 0
self.i_kill = 0
self.i_state = 0
def empty(self):
return (not self.stats and not self.positions)
def detach(self):
"""Detach block from parents and children."""
for child in self.children:
child.parents.remove(self)
for parent in self.parents:
parent.children.remove(self)
self.parents.clear()
self.children.clear()
def add_child(self, block):
self.children.add(block)
block.parents.add(self)
class ExitBlock(ControlBlock):
"""Non-empty exit point block."""
def empty(self):
return False
class AssignmentList(object):
def __init__(self):
self.stats = []
class ControlFlow(object):
"""Control-flow graph.
entry_point ControlBlock entry point for this graph
exit_point ControlBlock normal exit point
block ControlBlock current block
blocks set children nodes
entries set tracked entries
loops list stack for loop descriptors
exceptions list stack for exception descriptors
"""
def __init__(self):
self.blocks = set()
self.entries = set()
self.loops = []
self.exceptions = []
self.entry_point = ControlBlock()
self.exit_point = ExitBlock()
self.blocks.add(self.exit_point)
self.block = self.entry_point
def newblock(self, parent=None):
"""Create floating block linked to `parent` if given.
NOTE: Block is NOT added to self.blocks
"""
block = ControlBlock()
self.blocks.add(block)
if parent:
parent.add_child(block)
return block
def nextblock(self, parent=None):
"""Create block children block linked to current or `parent` if given.
NOTE: Block is added to self.blocks
"""
block = ControlBlock()
self.blocks.add(block)
if parent:
parent.add_child(block)
elif self.block:
self.block.add_child(block)
self.block = block
return self.block
def is_tracked(self, entry):
if entry.is_anonymous:
return False
return (entry.is_local or entry.is_pyclass_attr or entry.is_arg or
entry.from_closure or entry.in_closure or
entry.error_on_uninitialized)
def is_statically_assigned(self, entry):
if (entry.is_local and entry.is_variable and
(entry.type.is_struct_or_union or
entry.type.is_array or
entry.type.is_cpp_class)):
# stack allocated structured variable => never uninitialised
return True
return False
def mark_position(self, node):
"""Mark position, will be used to draw graph nodes."""
if self.block:
self.block.positions.add(node.pos[:2])
def mark_assignment(self, lhs, rhs, entry):
if self.block and self.is_tracked(entry):
assignment = NameAssignment(lhs, rhs, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
def mark_argument(self, lhs, rhs, entry):
if self.block and self.is_tracked(entry):
assignment = Argument(lhs, rhs, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
def mark_deletion(self, node, entry):
if self.block and self.is_tracked(entry):
assignment = NameDeletion(node, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = Uninitialized
self.entries.add(entry)
def mark_reference(self, node, entry):
if self.block and self.is_tracked(entry):
self.block.stats.append(NameReference(node, entry))
# Local variable is definitely bound after this reference
if not node.allow_null:
self.block.bounded.add(entry)
self.entries.add(entry)
def normalize(self):
"""Delete unreachable and orphan blocks."""
queue = set([self.entry_point])
visited = set()
while queue:
root = queue.pop()
visited.add(root)
for child in root.children:
if child not in visited:
queue.add(child)
unreachable = self.blocks - visited
for block in unreachable:
block.detach()
visited.remove(self.entry_point)
for block in visited:
if block.empty():
for parent in block.parents: # Re-parent
for child in block.children:
parent.add_child(child)
block.detach()
unreachable.add(block)
self.blocks -= unreachable
def initialize(self):
"""Set initial state, map assignments to bits."""
self.assmts = {}
bit = 1
for entry in self.entries:
assmts = AssignmentList()
assmts.mask = assmts.bit = bit
self.assmts[entry] = assmts
bit <<= 1
for block in self.blocks:
for stat in block.stats:
if isinstance(stat, NameAssignment):
stat.bit = bit
assmts = self.assmts[stat.entry]
assmts.stats.append(stat)
assmts.mask |= bit
bit <<= 1
for block in self.blocks:
for entry, stat in block.gen.items():
assmts = self.assmts[entry]
if stat is Uninitialized:
block.i_gen |= assmts.bit
else:
block.i_gen |= stat.bit
block.i_kill |= assmts.mask
block.i_output = block.i_gen
for entry in block.bounded:
block.i_kill |= self.assmts[entry].bit
for assmts in self.assmts.itervalues():
self.entry_point.i_gen |= assmts.bit
self.entry_point.i_output = self.entry_point.i_gen
def map_one(self, istate, entry):
ret = set()
assmts = self.assmts[entry]
if istate & assmts.bit:
if self.is_statically_assigned(entry):
ret.add(StaticAssignment(entry))
elif entry.from_closure:
ret.add(Unknown)
else:
ret.add(Uninitialized)
for assmt in assmts.stats:
if istate & assmt.bit:
ret.add(assmt)
return ret
def reaching_definitions(self):
"""Per-block reaching definitions analysis."""
dirty = True
while dirty:
dirty = False
for block in self.blocks:
i_input = 0
for parent in block.parents:
i_input |= parent.i_output
i_output = (i_input & ~block.i_kill) | block.i_gen
if i_output != block.i_output:
dirty = True
block.i_input = i_input
block.i_output = i_output
class LoopDescr(object):
def __init__(self, next_block, loop_block):
self.next_block = next_block
self.loop_block = loop_block
self.exceptions = []
class ExceptionDescr(object):
"""Exception handling helper.
entry_point ControlBlock Exception handling entry point
finally_enter ControlBlock Normal finally clause entry point
finally_exit ControlBlock Normal finally clause exit point
"""
def __init__(self, entry_point, finally_enter=None, finally_exit=None):
self.entry_point = entry_point
self.finally_enter = finally_enter
self.finally_exit = finally_exit
class NameAssignment(object):
def __init__(self, lhs, rhs, entry):
if lhs.cf_state is None:
lhs.cf_state = set()
self.lhs = lhs
self.rhs = rhs
self.entry = entry
self.pos = lhs.pos
self.refs = set()
self.is_arg = False
self.is_deletion = False
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
def infer_type(self, scope):
return self.rhs.infer_type(scope)
def type_dependencies(self, scope):
return self.rhs.type_dependencies(scope)
class StaticAssignment(NameAssignment):
"""Initialised at declaration time, e.g. stack allocation."""
def __init__(self, entry):
if not entry.type.is_pyobject:
may_be_none = False
else:
may_be_none = None # unknown
lhs = TypedExprNode(
entry.type, may_be_none=may_be_none, pos=entry.pos)
super(StaticAssignment, self).__init__(lhs, lhs, entry)
def infer_type(self, scope):
return self.entry.type
def type_dependencies(self, scope):
return []
class Argument(NameAssignment):
def __init__(self, lhs, rhs, entry):
NameAssignment.__init__(self, lhs, rhs, entry)
self.is_arg = True
class NameDeletion(NameAssignment):
def __init__(self, lhs, entry):
NameAssignment.__init__(self, lhs, lhs, entry)
self.is_deletion = True
def infer_type(self, scope):
inferred_type = self.rhs.infer_type(scope)
if (not inferred_type.is_pyobject and
inferred_type.can_coerce_to_pyobject(scope)):
return py_object_type
return inferred_type
class Uninitialized(object):
"""Definitely not initialised yet."""
class Unknown(object):
"""Coming from outer closure, might be initialised or not."""
class NameReference(object):
def __init__(self, node, entry):
if node.cf_state is None:
node.cf_state = set()
self.node = node
self.entry = entry
self.pos = node.pos
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
class ControlFlowState(list):
# Keeps track of Node's entry assignments
#
# cf_is_null [boolean] It is uninitialized
# cf_maybe_null [boolean] May be uninitialized
# is_single [boolean] Has only one assignment at this point
cf_maybe_null = False
cf_is_null = False
is_single = False
def __init__(self, state):
if Uninitialized in state:
state.discard(Uninitialized)
self.cf_maybe_null = True
if not state:
self.cf_is_null = True
elif Unknown in state:
state.discard(Unknown)
self.cf_maybe_null = True
else:
if len(state) == 1:
self.is_single = True
super(ControlFlowState, self).__init__(state)
def one(self):
return self[0]
class GVContext(object):
"""Graphviz subgraph object."""
def __init__(self):
self.blockids = {}
self.nextid = 0
self.children = []
self.sources = {}
def add(self, child):
self.children.append(child)
def nodeid(self, block):
if block not in self.blockids:
self.blockids[block] = 'block%d' % self.nextid
self.nextid += 1
return self.blockids[block]
def extract_sources(self, block):
if not block.positions:
return ''
start = min(block.positions)
stop = max(block.positions)
srcdescr = start[0]
if not srcdescr in self.sources:
self.sources[srcdescr] = list(srcdescr.get_lines())
lines = self.sources[srcdescr]
return '\\n'.join([l.strip() for l in lines[start[1] - 1:stop[1]]])
def render(self, fp, name, annotate_defs=False):
"""Render graphviz dot graph"""
fp.write('digraph %s {\n' % name)
fp.write(' node [shape=box];\n')
for child in self.children:
child.render(fp, self, annotate_defs)
fp.write('}\n')
def escape(self, text):
return text.replace('"', '\\"').replace('\n', '\\n')
class GV(object):
"""Graphviz DOT renderer."""
def __init__(self, name, flow):
self.name = name
self.flow = flow
def render(self, fp, ctx, annotate_defs=False):
fp.write(' subgraph %s {\n' % self.name)
for block in self.flow.blocks:
label = ctx.extract_sources(block)
if annotate_defs:
for stat in block.stats:
if isinstance(stat, NameAssignment):
label += '\n %s [definition]' % stat.entry.name
elif isinstance(stat, NameReference):
if stat.entry:
label += '\n %s [reference]' % stat.entry.name
if not label:
label = 'empty'
pid = ctx.nodeid(block)
fp.write(' %s [label="%s"];\n' % (pid, ctx.escape(label)))
for block in self.flow.blocks:
pid = ctx.nodeid(block)
for child in block.children:
fp.write(' %s -> %s;\n' % (pid, ctx.nodeid(child)))
fp.write(' }\n')
class MessageCollection(object):
"""Collect error/warnings messages first then sort"""
def __init__(self):
self.messages = []
def error(self, pos, message):
self.messages.append((pos, True, message))
def warning(self, pos, message):
self.messages.append((pos, False, message))
def report(self):
self.messages.sort()
for pos, is_error, message in self.messages:
if is_error:
error(pos, message)
else:
warning(pos, message, 2)
def check_definitions(flow, compiler_directives):
flow.initialize()
flow.reaching_definitions()
# Track down state
assignments = set()
# Node to entry map
references = {}
assmt_nodes = set()
for block in flow.blocks:
i_state = block.i_input
for stat in block.stats:
i_assmts = flow.assmts[stat.entry]
state = flow.map_one(i_state, stat.entry)
if isinstance(stat, NameAssignment):
stat.lhs.cf_state.update(state)
assmt_nodes.add(stat.lhs)
i_state = i_state & ~i_assmts.mask
if stat.is_deletion:
i_state |= i_assmts.bit
else:
i_state |= stat.bit
assignments.add(stat)
if stat.rhs is not fake_rhs_expr:
stat.entry.cf_assignments.append(stat)
elif isinstance(stat, NameReference):
references[stat.node] = stat.entry
stat.entry.cf_references.append(stat)
stat.node.cf_state.update(state)
if not stat.node.allow_null:
i_state &= ~i_assmts.bit
# after successful read, the state is known to be initialised
state.discard(Uninitialized)
state.discard(Unknown)
for assmt in state:
assmt.refs.add(stat)
# Check variable usage
warn_maybe_uninitialized = compiler_directives['warn.maybe_uninitialized']
warn_unused_result = compiler_directives['warn.unused_result']
warn_unused = compiler_directives['warn.unused']
warn_unused_arg = compiler_directives['warn.unused_arg']
messages = MessageCollection()
# assignment hints
for node in assmt_nodes:
if Uninitialized in node.cf_state:
node.cf_maybe_null = True
if len(node.cf_state) == 1:
node.cf_is_null = True
else:
node.cf_is_null = False
elif Unknown in node.cf_state:
node.cf_maybe_null = True
else:
node.cf_is_null = False
node.cf_maybe_null = False
# Find uninitialized references and cf-hints
for node, entry in references.iteritems():
if Uninitialized in node.cf_state:
node.cf_maybe_null = True
if not entry.from_closure and len(node.cf_state) == 1:
node.cf_is_null = True
if node.allow_null or entry.from_closure or entry.is_pyclass_attr:
pass # Can be uninitialized here
elif node.cf_is_null:
if (entry.type.is_pyobject or entry.type.is_unspecified or
entry.error_on_uninitialized):
messages.error(
node.pos,
"local variable '%s' referenced before assignment"
% entry.name)
else:
messages.warning(
node.pos,
"local variable '%s' referenced before assignment"
% entry.name)
elif warn_maybe_uninitialized:
messages.warning(
node.pos,
"local variable '%s' might be referenced before assignment"
% entry.name)
elif Unknown in node.cf_state:
# TODO: better cross-closure analysis to know when inner functions
# are being called before a variable is being set, and when
# a variable is known to be set before even defining the
# inner function, etc.
node.cf_maybe_null = True
else:
node.cf_is_null = False
node.cf_maybe_null = False
# Unused result
for assmt in assignments:
if (not assmt.refs and not assmt.entry.is_pyclass_attr
and not assmt.entry.in_closure):
if assmt.entry.cf_references and warn_unused_result:
if assmt.is_arg:
messages.warning(assmt.pos, "Unused argument value '%s'" %
assmt.entry.name)
else:
messages.warning(assmt.pos, "Unused result in '%s'" %
assmt.entry.name)
assmt.lhs.cf_used = False
# Unused entries
for entry in flow.entries:
if (not entry.cf_references
and not entry.is_pyclass_attr):
if entry.name != '_':
# '_' is often used for unused variables, e.g. in loops
if entry.is_arg:
if warn_unused_arg:
messages.warning(entry.pos, "Unused argument '%s'" %
entry.name)
else:
if warn_unused:
messages.warning(entry.pos, "Unused entry '%s'" %
entry.name)
entry.cf_used = False
messages.report()
for node in assmt_nodes:
node.cf_state = ControlFlowState(node.cf_state)
for node in references:
node.cf_state = ControlFlowState(node.cf_state)
class AssignmentCollector(TreeVisitor):
def __init__(self):
super(AssignmentCollector, self).__init__()
self.assignments = []
def visit_Node(self):
self._visitchildren(self, None)
def visit_SingleAssignmentNode(self, node):
self.assignments.append((node.lhs, node.rhs))
def visit_CascadedAssignmentNode(self, node):
for lhs in node.lhs_list:
self.assignments.append((lhs, node.rhs))
class ControlFlowAnalysis(CythonTransform):
def visit_ModuleNode(self, node):
self.gv_ctx = GVContext()
# Set of NameNode reductions
self.reductions = set()
self.in_inplace_assignment = False
self.env_stack = []
self.env = node.scope
self.stack = []
self.flow = ControlFlow()
self.visitchildren(node)
check_definitions(self.flow, self.current_directives)
dot_output = self.current_directives['control_flow.dot_output']
if dot_output:
annotate_defs = self.current_directives['control_flow.dot_annotate_defs']
fp = open(dot_output, 'wt')
try:
self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs)
finally:
fp.close()
return node
def visit_FuncDefNode(self, node):
for arg in node.args:
if arg.default:
self.visitchildren(arg)
self.visitchildren(node, ('decorators',))
self.env_stack.append(self.env)
self.env = node.local_scope
self.stack.append(self.flow)
self.flow = ControlFlow()
# Collect all entries
for entry in node.local_scope.entries.values():
if self.flow.is_tracked(entry):
self.flow.entries.add(entry)
self.mark_position(node)
# Function body block
self.flow.nextblock()
for arg in node.args:
self._visit(arg)
if node.star_arg:
self.flow.mark_argument(node.star_arg,
TypedExprNode(Builtin.tuple_type,
may_be_none=False),
node.star_arg.entry)
if node.starstar_arg:
self.flow.mark_argument(node.starstar_arg,
TypedExprNode(Builtin.dict_type,
may_be_none=False),
node.starstar_arg.entry)
self._visit(node.body)
# Workaround for generators
if node.is_generator:
self._visit(node.gbody.body)
# Exit point
if self.flow.block:
self.flow.block.add_child(self.flow.exit_point)
# Cleanup graph
self.flow.normalize()
check_definitions(self.flow, self.current_directives)
self.flow.blocks.add(self.flow.entry_point)
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
self.flow = self.stack.pop()
self.env = self.env_stack.pop()
return node
def visit_DefNode(self, node):
node.used = True
return self.visit_FuncDefNode(node)
def visit_GeneratorBodyDefNode(self, node):
return node
def visit_CTypeDefNode(self, node):
return node
def mark_assignment(self, lhs, rhs=None):
if not self.flow.block:
return
if self.flow.exceptions:
exc_descr = self.flow.exceptions[-1]
self.flow.block.add_child(exc_descr.entry_point)
self.flow.nextblock()
if not rhs:
rhs = object_expr
if lhs.is_name:
if lhs.entry is not None:
entry = lhs.entry
else:
entry = self.env.lookup(lhs.name)
if entry is None: # TODO: This shouldn't happen...
return
self.flow.mark_assignment(lhs, rhs, entry)
elif isinstance(lhs, ExprNodes.SequenceNode):
for arg in lhs.args:
self.mark_assignment(arg)
else:
self._visit(lhs)
if self.flow.exceptions:
exc_descr = self.flow.exceptions[-1]
self.flow.block.add_child(exc_descr.entry_point)
self.flow.nextblock()
def mark_position(self, node):
"""Mark position if DOT output is enabled."""
if self.current_directives['control_flow.dot_output']:
self.flow.mark_position(node)
def visit_FromImportStatNode(self, node):
for name, target in node.items:
if name != "*":
self.mark_assignment(target)
self.visitchildren(node)
return node
def visit_AssignmentNode(self, node):
raise InternalError, "Unhandled assignment node"
def visit_SingleAssignmentNode(self, node):
self._visit(node.rhs)
self.mark_assignment(node.lhs, node.rhs)
return node
def visit_CascadedAssignmentNode(self, node):
self._visit(node.rhs)
for lhs in node.lhs_list:
self.mark_assignment(lhs, node.rhs)
return node
def visit_ParallelAssignmentNode(self, node):
collector = AssignmentCollector()
collector.visitchildren(node)
for lhs, rhs in collector.assignments:
self._visit(rhs)
for lhs, rhs in collector.assignments:
self.mark_assignment(lhs, rhs)
return node
def visit_InPlaceAssignmentNode(self, node):
self.in_inplace_assignment = True
self.visitchildren(node)
self.in_inplace_assignment = False
self.mark_assignment(node.lhs, node.create_binop_node())
return node
def visit_DelStatNode(self, node):
for arg in node.args:
if arg.is_name:
entry = arg.entry or self.env.lookup(arg.name)
if entry.in_closure or entry.from_closure:
error(arg.pos,
"can not delete variable '%s' "
"referenced in nested scope" % entry.name)
# Mark reference
self._visit(arg)
self.flow.mark_deletion(arg, entry)
return node
def visit_CArgDeclNode(self, node):
entry = self.env.lookup(node.name)
if entry:
may_be_none = not node.not_none
self.flow.mark_argument(
node, TypedExprNode(entry.type, may_be_none), entry)
return node
def visit_NameNode(self, node):
if self.flow.block:
entry = node.entry or self.env.lookup(node.name)
if entry:
self.flow.mark_reference(node, entry)
if entry in self.reductions and not self.in_inplace_assignment:
error(node.pos,
"Cannot read reduction variable in loop body")
return node
def visit_StatListNode(self, node):
if self.flow.block:
for stat in node.stats:
self._visit(stat)
if not self.flow.block:
stat.is_terminator = True
break
return node
def visit_Node(self, node):
self.visitchildren(node)
self.mark_position(node)
return node
def visit_IfStatNode(self, node):
next_block = self.flow.newblock()
parent = self.flow.block
# If clauses
for clause in node.if_clauses:
parent = self.flow.nextblock(parent)
self._visit(clause.condition)
self.flow.nextblock()
self._visit(clause.body)
if self.flow.block:
self.flow.block.add_child(next_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=parent)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
parent.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_WhileStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition block
self.flow.loops.append(LoopDescr(next_block, condition_block))
self._visit(node.condition)
# Body block
self.flow.nextblock()
self._visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
self.flow.block.add_child(next_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def mark_forloop_target(self, node):
# TODO: Remove redundancy with range optimization...
is_special = False
sequence = node.iterator.sequence
target = node.target
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
entry = self.env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name == 'reversed' and len(sequence.args) == 1:
sequence = sequence.args[0]
elif function.name == 'enumerate' and len(sequence.args) == 1:
if target.is_sequence_constructor and len(target.args) == 2:
iterator = sequence.args[0]
if iterator.is_name:
iterator_type = iterator.infer_type(self.env)
if iterator_type.is_builtin_type:
# assume that builtin types have a length within Py_ssize_t
self.mark_assignment(
target.args[0],
ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX',
type=PyrexTypes.c_py_ssize_t_type))
target = target.args[1]
sequence = sequence.args[0]
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
entry = self.env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name in ('range', 'xrange'):
is_special = True
for arg in sequence.args[:2]:
self.mark_assignment(target, arg)
if len(sequence.args) > 2:
self.mark_assignment(
target,
ExprNodes.binop_node(node.pos,
'+',
sequence.args[0],
sequence.args[2]))
if not is_special:
# A for-loop basically translates to subsequent calls to
# __getitem__(), so using an IndexNode here allows us to
# naturally infer the base type of pointers, C arrays,
# Python strings, etc., while correctly falling back to an
# object type when the base type cannot be handled.
self.mark_assignment(target, node.item)
def visit_ForInStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition with iterator
self.flow.loops.append(LoopDescr(next_block, condition_block))
self._visit(node.iterator)
# Target assignment
self.flow.nextblock()
if isinstance(node, Nodes.ForInStatNode):
self.mark_forloop_target(node)
else: # Parallel
self.mark_assignment(node.target)
# Body block
if isinstance(node, Nodes.ParallelRangeNode):
# In case of an invalid
self._delete_privates(node, exclude=node.target.entry)
self.flow.nextblock()
self._visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def _delete_privates(self, node, exclude=None):
for private_node in node.assigned_nodes:
if not exclude or private_node.entry is not exclude:
self.flow.mark_deletion(private_node, private_node.entry)
def visit_ParallelRangeNode(self, node):
reductions = self.reductions
# if node.target is None or not a NameNode, an error will have
# been previously issued
if hasattr(node.target, 'entry'):
self.reductions = set(reductions)
for private_node in node.assigned_nodes:
private_node.entry.error_on_uninitialized = True
pos, reduction = node.assignments[private_node.entry]
if reduction:
self.reductions.add(private_node.entry)
node = self.visit_ForInStatNode(node)
self.reductions = reductions
return node
def visit_ParallelWithBlockNode(self, node):
for private_node in node.assigned_nodes:
private_node.entry.error_on_uninitialized = True
self._delete_privates(node)
self.visitchildren(node)
self._delete_privates(node)
return node
def visit_ForFromStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition with iterator
self.flow.loops.append(LoopDescr(next_block, condition_block))
self._visit(node.bound1)
self._visit(node.bound2)
if node.step is not None:
self._visit(node.step)
# Target assignment
self.flow.nextblock()
self.mark_assignment(node.target, node.bound1)
if node.step is not None:
self.mark_assignment(node.target,
ExprNodes.binop_node(node.pos, '+',
node.bound1, node.step))
# Body block
self.flow.nextblock()
self._visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_LoopNode(self, node):
raise InternalError, "Generic loops are not supported"
def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs, node.rhs)
return node
def visit_WithStatNode(self, node):
self._visit(node.manager)
self._visit(node.enter_call)
self._visit(node.body)
return node
def visit_TryExceptStatNode(self, node):
# After exception handling
next_block = self.flow.newblock()
# Body block
self.flow.newblock()
# Exception entry point
entry_point = self.flow.newblock()
self.flow.exceptions.append(ExceptionDescr(entry_point))
self.flow.nextblock()
## XXX: links to exception handling point should be added by
## XXX: children nodes
self.flow.block.add_child(entry_point)
self._visit(node.body)
self.flow.exceptions.pop()
# After exception
if self.flow.block:
if node.else_clause:
self.flow.nextblock()
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
for clause in node.except_clauses:
self.flow.block = entry_point
if clause.pattern:
for pattern in clause.pattern:
self._visit(pattern)
else:
# TODO: handle * pattern
pass
entry_point = self.flow.newblock(parent=self.flow.block)
self.flow.nextblock()
if clause.target:
self.mark_assignment(clause.target)
self._visit(clause.body)
if self.flow.block:
self.flow.block.add_child(next_block)
if self.flow.exceptions:
entry_point.add_child(self.flow.exceptions[-1].entry_point)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_TryFinallyStatNode(self, node):
body_block = self.flow.nextblock()
# Exception entry point
entry_point = self.flow.newblock()
self.flow.block = entry_point
self._visit(node.finally_clause)
if self.flow.block and self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
# Normal execution
finally_enter = self.flow.newblock()
self.flow.block = finally_enter
self._visit(node.finally_clause)
finally_exit = self.flow.block
descr = ExceptionDescr(entry_point, finally_enter, finally_exit)
self.flow.exceptions.append(descr)
if self.flow.loops:
self.flow.loops[-1].exceptions.append(descr)
self.flow.block = body_block
## XXX: Is it still required
body_block.add_child(entry_point)
self._visit(node.body)
self.flow.exceptions.pop()
if self.flow.loops:
self.flow.loops[-1].exceptions.pop()
if self.flow.block:
self.flow.block.add_child(finally_enter)
if finally_exit:
self.flow.block = self.flow.nextblock(parent=finally_exit)
else:
self.flow.block = None
return node
def visit_RaiseStatNode(self, node):
self.mark_position(node)
self.visitchildren(node)
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
return node
def visit_ReraiseStatNode(self, node):
self.mark_position(node)
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
return node
def visit_ReturnStatNode(self, node):
self.mark_position(node)
self.visitchildren(node)
for exception in self.flow.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(self.flow.exit_point)
break
else:
if self.flow.block:
self.flow.block.add_child(self.flow.exit_point)
self.flow.block = None
return node
def visit_BreakStatNode(self, node):
if not self.flow.loops:
#error(node.pos, "break statement not inside loop")
return node
loop = self.flow.loops[-1]
self.mark_position(node)
for exception in loop.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(loop.next_block)
break
else:
self.flow.block.add_child(loop.next_block)
self.flow.block = None
return node
def visit_ContinueStatNode(self, node):
if not self.flow.loops:
#error(node.pos, "continue statement not inside loop")
return node
loop = self.flow.loops[-1]
self.mark_position(node)
for exception in loop.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(loop.loop_block)
break
else:
self.flow.block.add_child(loop.loop_block)
self.flow.block = None
return node
def visit_ComprehensionNode(self, node):
if node.expr_scope:
self.env_stack.append(self.env)
self.env = node.expr_scope
# Skip append node here
self._visit(node.target)
self._visit(node.loop)
if node.expr_scope:
self.env = self.env_stack.pop()
return node
def visit_ScopedExprNode(self, node):
if node.expr_scope:
self.env_stack.append(self.env)
self.env = node.expr_scope
self.visitchildren(node)
if node.expr_scope:
self.env = self.env_stack.pop()
return node
def visit_PyClassDefNode(self, node):
self.visitchildren(node, ('dict', 'metaclass',
'mkw', 'bases', 'class_result'))
self.flow.mark_assignment(node.target, object_expr_not_none,
self.env.lookup(node.name))
self.env_stack.append(self.env)
self.env = node.scope
self.flow.nextblock()
self.visitchildren(node, ('body',))
self.flow.nextblock()
self.env = self.env_stack.pop()
return node
def visit_AmpersandNode(self, node):
if node.operand.is_name:
# Fake assignment to silence warning
self.mark_assignment(node.operand, fake_rhs_expr)
self.visitchildren(node)
return node
| 34.341387 | 95 | 0.57542 |
4a218db033f4d8d09c09497bf4f9015a93e6eade | 23,970 | py | Python | bhive/snapshot.py | TheCrazyGM/bhive | 1494e90a99123ecfc5efbd927258f9ba59443e2e | [
"MIT"
] | 2 | 2020-03-21T23:50:22.000Z | 2020-03-25T19:10:48.000Z | bhive/snapshot.py | TheCrazyGM/bhive | 1494e90a99123ecfc5efbd927258f9ba59443e2e | [
"MIT"
] | null | null | null | bhive/snapshot.py | TheCrazyGM/bhive | 1494e90a99123ecfc5efbd927258f9ba59443e2e | [
"MIT"
] | 1 | 2020-03-21T23:50:25.000Z | 2020-03-21T23:50:25.000Z | # This Python file uses the following encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import bytes, int, str
import pytz
import json
import re
from datetime import datetime, timedelta, date, time
import math
import random
import logging
from bisect import bisect_left
from bhive.utils import formatTimeString, formatTimedelta, remove_from_dict, reputation_to_score, addTzInfo, parse_time
from bhive.amount import Amount
from bhive.account import Account
from bhive.vote import Vote
from bhive.instance import shared_hive_instance
from bhive.constants import HIVE_VOTE_REGENERATION_SECONDS, HIVE_1_PERCENT, HIVE_100_PERCENT
log = logging.getLogger(__name__)
class AccountSnapshot(list):
""" This class allows to easily access Account history
:param str account_name: Name of the account
:param Hive hive_instance: Hive
instance
"""
def __init__(self, account, account_history=[], hive_instance=None):
self.hive = hive_instance or shared_hive_instance()
self.account = Account(account, hive_instance=self.hive)
self.reset()
super(AccountSnapshot, self).__init__(account_history)
def reset(self):
""" Resets the arrays not the stored account history
"""
self.own_vests = [Amount(0, self.hive.vests_symbol, hive_instance=self.hive)]
self.own_steem = [Amount(0, self.hive.steem_symbol, hive_instance=self.hive)]
self.own_sbd = [Amount(0, self.hive.sbd_symbol, hive_instance=self.hive)]
self.delegated_vests_in = [{}]
self.delegated_vests_out = [{}]
self.timestamps = [addTzInfo(datetime(1970, 1, 1, 0, 0, 0, 0))]
import bhivebase.operationids
self.ops_statistics = bhivebase.operationids.operations.copy()
for key in self.ops_statistics:
self.ops_statistics[key] = 0
self.reward_timestamps = []
self.author_rewards = []
self.curation_rewards = []
self.curation_per_1000_HP_timestamp = []
self.curation_per_1000_HP = []
self.out_vote_timestamp = []
self.out_vote_weight = []
self.in_vote_timestamp = []
self.in_vote_weight = []
self.in_vote_rep = []
self.in_vote_rshares = []
self.vp = []
self.vp_timestamp = []
self.rep = []
self.rep_timestamp = []
def search(self, search_str, start=None, stop=None, use_block_num=True):
""" Returns ops in the given range"""
ops = []
if start is not None:
start = addTzInfo(start)
if stop is not None:
stop = addTzInfo(stop)
for op in self:
if use_block_num and start is not None and isinstance(start, int):
if op["block"] < start:
continue
elif not use_block_num and start is not None and isinstance(start, int):
if op["index"] < start:
continue
elif start is not None and isinstance(start, (datetime, date, time)):
if start > formatTimeString(op["timestamp"]):
continue
if use_block_num and stop is not None and isinstance(stop, int):
if op["block"] > stop:
continue
elif not use_block_num and stop is not None and isinstance(stop, int):
if op["index"] > stop:
continue
elif stop is not None and isinstance(stop, (datetime, date, time)):
if stop < formatTimeString(op["timestamp"]):
continue
op_string = json.dumps(list(op.values()))
if re.search(search_str, op_string):
ops.append(op)
return ops
def get_ops(self, start=None, stop=None, use_block_num=True, only_ops=[], exclude_ops=[]):
""" Returns ops in the given range"""
if start is not None:
start = addTzInfo(start)
if stop is not None:
stop = addTzInfo(stop)
for op in self:
if use_block_num and start is not None and isinstance(start, int):
if op["block"] < start:
continue
elif not use_block_num and start is not None and isinstance(start, int):
if op["index"] < start:
continue
elif start is not None and isinstance(start, (datetime, date, time)):
if start > formatTimeString(op["timestamp"]):
continue
if use_block_num and stop is not None and isinstance(stop, int):
if op["block"] > stop:
continue
elif not use_block_num and stop is not None and isinstance(stop, int):
if op["index"] > stop:
continue
elif stop is not None and isinstance(stop, (datetime, date, time)):
if stop < formatTimeString(op["timestamp"]):
continue
if exclude_ops and op["type"] in exclude_ops:
continue
if not only_ops or op["type"] in only_ops:
yield op
def get_data(self, timestamp=None, index=0):
""" Returns snapshot for given timestamp"""
if timestamp is None:
timestamp = datetime.utcnow()
timestamp = addTzInfo(timestamp)
# Find rightmost value less than x
i = bisect_left(self.timestamps, timestamp)
if i:
index = i - 1
else:
return {}
ts = self.timestamps[index]
own = self.own_vests[index]
din = self.delegated_vests_in[index]
dout = self.delegated_vests_out[index]
hive = self.own_steem[index]
hbd = self.own_sbd[index]
sum_in = sum([din[key].amount for key in din])
sum_out = sum([dout[key].amount for key in dout])
hp_in = self.hive.vests_to_hp(sum_in, timestamp=ts)
hp_out = self.hive.vests_to_hp(sum_out, timestamp=ts)
hp_own = self.hive.vests_to_hp(own, timestamp=ts)
hp_eff = hp_own + hp_in - hp_out
return {"timestamp": ts, "vests": own, "delegated_vests_in": din, "delegated_vests_out": dout,
"hp_own": hp_own, "hp_eff": hp_eff, "hive": hive, "hbd": hbd, "index": index}
def get_account_history(self, start=None, stop=None, use_block_num=True):
""" Uses account history to fetch all related ops
:param start: start number/date of transactions to
return (*optional*)
:type start: int, datetime
:param stop: stop number/date of transactions to
return (*optional*)
:type stop: int, datetime
:param bool use_block_num: if true, start and stop are block numbers,
otherwise virtual OP count numbers.
"""
super(AccountSnapshot, self).__init__(
[
h
for h in self.account.history(start=start, stop=stop, use_block_num=use_block_num)
]
)
def update_rewards(self, timestamp, curation_reward, author_vests, author_steem, author_sbd):
self.reward_timestamps.append(timestamp)
self.curation_rewards.append(curation_reward)
self.author_rewards.append({"vests": author_vests, "hive": author_steem, "hbd": author_sbd})
def update_out_vote(self, timestamp, weight):
self.out_vote_timestamp.append(timestamp)
self.out_vote_weight.append(weight)
def update_in_vote(self, timestamp, weight, op):
v = Vote(op)
try:
v.refresh()
self.in_vote_timestamp.append(timestamp)
self.in_vote_weight.append(weight)
self.in_vote_rep.append(int(v["reputation"]))
self.in_vote_rshares.append(int(v["rshares"]))
except:
print("Could not found: %s" % v)
return
def update(self, timestamp, own, delegated_in=None, delegated_out=None, hive=0, hbd=0):
""" Updates the internal state arrays
:param datetime timestamp: datetime of the update
:param own: vests
:type own: amount.Amount, float
:param dict delegated_in: Incoming delegation
:param dict delegated_out: Outgoing delegation
:param hive: hive
:type hive: amount.Amount, float
:param hbd: hbd
:type hbd: amount.Amount, float
"""
self.timestamps.append(timestamp - timedelta(seconds=1))
self.own_vests.append(self.own_vests[-1])
self.own_steem.append(self.own_steem[-1])
self.own_sbd.append(self.own_sbd[-1])
self.delegated_vests_in.append(self.delegated_vests_in[-1])
self.delegated_vests_out.append(self.delegated_vests_out[-1])
self.timestamps.append(timestamp)
self.own_vests.append(self.own_vests[-1] + own)
self.own_steem.append(self.own_steem[-1] + hive)
self.own_sbd.append(self.own_sbd[-1] + hbd)
new_deleg = dict(self.delegated_vests_in[-1])
if delegated_in is not None and delegated_in:
if delegated_in['amount'] == 0:
del new_deleg[delegated_in['account']]
else:
new_deleg[delegated_in['account']] = delegated_in['amount']
self.delegated_vests_in.append(new_deleg)
new_deleg = dict(self.delegated_vests_out[-1])
if delegated_out is not None and delegated_out:
if delegated_out['account'] is None:
# return_vesting_delegation
for delegatee in new_deleg:
if new_deleg[delegatee]['amount'] == delegated_out['amount']:
del new_deleg[delegatee]
break
elif delegated_out['amount'] != 0:
# new or updated non-zero delegation
new_deleg[delegated_out['account']] = delegated_out['amount']
# skip undelegations here, wait for 'return_vesting_delegation'
# del new_deleg[delegated_out['account']]
self.delegated_vests_out.append(new_deleg)
def build(self, only_ops=[], exclude_ops=[], enable_rewards=False, enable_out_votes=False, enable_in_votes=False):
""" Builds the account history based on all account operations
:param array only_ops: Limit generator by these
operations (*optional*)
:param array exclude_ops: Exclude these operations from
generator (*optional*)
"""
if len(self.timestamps) > 0:
start_timestamp = self.timestamps[-1]
else:
start_timestamp = None
for op in sorted(self, key=lambda k: k['timestamp']):
ts = parse_time(op['timestamp'])
if start_timestamp is not None and start_timestamp > ts:
continue
# print(op)
if op['type'] in exclude_ops:
continue
if len(only_ops) > 0 and op['type'] not in only_ops:
continue
self.ops_statistics[op['type']] += 1
self.parse_op(op, only_ops=only_ops, enable_rewards=enable_rewards, enable_out_votes=enable_out_votes, enable_in_votes=enable_in_votes)
def parse_op(self, op, only_ops=[], enable_rewards=False, enable_out_votes=False, enable_in_votes=False):
""" Parse account history operation"""
ts = parse_time(op['timestamp'])
if op['type'] == "account_create":
fee_steem = Amount(op['fee'], hive_instance=self.hive).amount
fee_vests = self.hive.hp_to_vests(Amount(op['fee'], hive_instance=self.hive).amount, timestamp=ts)
# print(fee_vests)
if op['new_account_name'] == self.account["name"]:
self.update(ts, fee_vests, 0, 0)
return
if op['creator'] == self.account["name"]:
self.update(ts, 0, 0, 0, fee_steem * (-1), 0)
return
elif op['type'] == "account_create_with_delegation":
fee_steem = Amount(op['fee'], hive_instance=self.hive).amount
fee_vests = self.hive.hp_to_vests(Amount(op['fee'], hive_instance=self.hive).amount, timestamp=ts)
if op['new_account_name'] == self.account["name"]:
if Amount(op['delegation'], hive_instance=self.hive).amount > 0:
delegation = {'account': op['creator'], 'amount':
Amount(op['delegation'], hive_instance=self.hive)}
else:
delegation = None
self.update(ts, fee_vests, delegation, 0)
return
if op['creator'] == self.account["name"]:
delegation = {'account': op['new_account_name'], 'amount':
Amount(op['delegation'], hive_instance=self.hive)}
self.update(ts, 0, 0, delegation, fee_steem * (-1), 0)
return
elif op['type'] == "delegate_vesting_shares":
vests = Amount(op['vesting_shares'], hive_instance=self.hive)
# print(op)
if op['delegator'] == self.account["name"]:
delegation = {'account': op['delegatee'], 'amount': vests}
self.update(ts, 0, 0, delegation)
return
if op['delegatee'] == self.account["name"]:
delegation = {'account': op['delegator'], 'amount': vests}
self.update(ts, 0, delegation, 0)
return
elif op['type'] == "transfer":
amount = Amount(op['amount'], hive_instance=self.hive)
# print(op)
if op['from'] == self.account["name"]:
if amount.symbol == self.hive.steem_symbol:
self.update(ts, 0, 0, 0, amount * (-1), 0)
elif amount.symbol == self.hive.sbd_symbol:
self.update(ts, 0, 0, 0, 0, amount * (-1))
if op['to'] == self.account["name"]:
if amount.symbol == self.hive.steem_symbol:
self.update(ts, 0, 0, 0, amount, 0)
elif amount.symbol == self.hive.sbd_symbol:
self.update(ts, 0, 0, 0, 0, amount)
# print(op, vests)
# self.update(ts, vests, 0, 0)
return
elif op['type'] == "fill_order":
current_pays = Amount(op["current_pays"], hive_instance=self.hive)
open_pays = Amount(op["open_pays"], hive_instance=self.hive)
if op["current_owner"] == self.account["name"]:
if current_pays.symbol == self.hive.steem_symbol:
self.update(ts, 0, 0, 0, current_pays * (-1), open_pays)
elif current_pays.symbol == self.hive.sbd_symbol:
self.update(ts, 0, 0, 0, open_pays, current_pays * (-1))
if op["open_owner"] == self.account["name"]:
if current_pays.symbol == self.hive.steem_symbol:
self.update(ts, 0, 0, 0, current_pays, open_pays * (-1))
elif current_pays.symbol == self.hive.sbd_symbol:
self.update(ts, 0, 0, 0, open_pays * (-1), current_pays)
# print(op)
return
elif op['type'] == "transfer_to_vesting":
hive = Amount(op['amount'], hive_instance=self.hive)
vests = self.hive.hp_to_vests(hive.amount, timestamp=ts)
if op['from'] == self.account["name"] and op['to'] == self.account["name"]:
self.update(ts, vests, 0, 0, hive * (-1), 0) # power up from and to given account
elif op['from'] != self.account["name"] and op['to'] == self.account["name"]:
self.update(ts, vests, 0, 0, 0, 0) # power up from another account
else: # op['from'] == self.account["name"] and op['to'] != self.account["name"]
self.update(ts, 0, 0, 0, hive * (-1), 0) # power up to another account
return
elif op['type'] == "fill_vesting_withdraw":
# print(op)
vests = Amount(op['withdrawn'], hive_instance=self.hive)
self.update(ts, vests * (-1), 0, 0)
return
elif op['type'] == "return_vesting_delegation":
delegation = {'account': None, 'amount':
Amount(op['vesting_shares'], hive_instance=self.hive)}
self.update(ts, 0, 0, delegation)
return
elif op['type'] == "claim_reward_balance":
vests = Amount(op['reward_vests'], hive_instance=self.hive)
hive = Amount(op['reward_steem'], hive_instance=self.hive)
hbd = Amount(op['reward_sbd'], hive_instance=self.hive)
self.update(ts, vests, 0, 0, hive, hbd)
return
elif op['type'] == "curation_reward":
if "curation_reward" in only_ops or enable_rewards:
vests = Amount(op['reward'], hive_instance=self.hive)
if "curation_reward" in only_ops:
self.update(ts, vests, 0, 0)
if enable_rewards:
self.update_rewards(ts, vests, 0, 0, 0)
return
elif op['type'] == "author_reward":
if "author_reward" in only_ops or enable_rewards:
# print(op)
vests = Amount(op['vesting_payout'], hive_instance=self.hive)
hive = Amount(op['steem_payout'], hive_instance=self.hive)
hbd = Amount(op['sbd_payout'], hive_instance=self.hive)
if "author_reward" in only_ops:
self.update(ts, vests, 0, 0, hive, hbd)
if enable_rewards:
self.update_rewards(ts, 0, vests, hive, hbd)
return
elif op['type'] == "producer_reward":
vests = Amount(op['vesting_shares'], hive_instance=self.hive)
self.update(ts, vests, 0, 0)
return
elif op['type'] == "comment_benefactor_reward":
if op['benefactor'] == self.account["name"]:
if "reward" in op:
vests = Amount(op['reward'], hive_instance=self.hive)
self.update(ts, vests, 0, 0)
else:
vests = Amount(op['vesting_payout'], hive_instance=self.hive)
hive = Amount(op['steem_payout'], hive_instance=self.hive)
hbd = Amount(op['sbd_payout'], hive_instance=self.hive)
self.update(ts, vests, 0, 0, hive, hbd)
return
else:
return
elif op['type'] == "fill_convert_request":
amount_in = Amount(op["amount_in"], hive_instance=self.hive)
amount_out = Amount(op["amount_out"], hive_instance=self.hive)
if op["owner"] == self.account["name"]:
self.update(ts, 0, 0, 0, amount_out, amount_in * (-1))
return
elif op['type'] == "interest":
interest = Amount(op["interest"], hive_instance=self.hive)
self.update(ts, 0, 0, 0, 0, interest)
return
elif op['type'] == "vote":
if "vote" in only_ops or enable_out_votes:
weight = int(op['weight'])
if op["voter"] == self.account["name"]:
self.update_out_vote(ts, weight)
if "vote" in only_ops or enable_in_votes and op["author"] == self.account["name"]:
weight = int(op['weight'])
self.update_in_vote(ts, weight, op)
return
elif op['type'] in ['comment', 'feed_publish', 'shutdown_witness',
'account_witness_vote', 'witness_update', 'custom_json',
'limit_order_create', 'account_update',
'account_witness_proxy', 'limit_order_cancel', 'comment_options',
'delete_comment', 'interest', 'recover_account', 'pow',
'fill_convert_request', 'convert', 'request_account_recovery']:
return
# if "vests" in str(op).lower():
# print(op)
# else:
# print(op)
def build_hp_arrays(self):
""" Builds the own_hp and eff_hp array"""
self.own_hp = []
self.eff_hp = []
for (ts, own, din, dout) in zip(self.timestamps, self.own_vests,
self.delegated_vests_in,
self.delegated_vests_out):
sum_in = sum([din[key].amount for key in din])
sum_out = sum([dout[key].amount for key in dout])
hp_in = self.hive.vests_to_hp(sum_in, timestamp=ts)
hp_out = self.hive.vests_to_hp(sum_out, timestamp=ts)
hp_own = self.hive.vests_to_hp(own, timestamp=ts)
hp_eff = hp_own + hp_in - hp_out
self.own_hp.append(hp_own)
self.eff_hp.append(hp_eff)
def build_rep_arrays(self):
""" Build reputation arrays """
self.rep_timestamp = [self.timestamps[1]]
self.rep = [reputation_to_score(0)]
current_reputation = 0
for (ts, rshares, rep) in zip(self.in_vote_timestamp, self.in_vote_rshares, self.in_vote_rep):
if rep > 0:
if rshares > 0 or (rshares < 0 and rep > current_reputation):
current_reputation += rshares >> 6
self.rep.append(reputation_to_score(current_reputation))
self.rep_timestamp.append(ts)
def build_vp_arrays(self):
""" Build vote power arrays"""
self.vp_timestamp = [self.timestamps[1]]
self.vp = [HIVE_100_PERCENT]
for (ts, weight) in zip(self.out_vote_timestamp, self.out_vote_weight):
self.vp.append(self.vp[-1])
if self.vp[-1] < HIVE_100_PERCENT:
regenerated_vp = ((ts - self.vp_timestamp[-1]).total_seconds()) * HIVE_100_PERCENT / HIVE_VOTE_REGENERATION_SECONDS
self.vp[-1] += int(regenerated_vp)
if self.vp[-1] > HIVE_100_PERCENT:
self.vp[-1] = HIVE_100_PERCENT
self.vp[-1] -= self.hive._calc_resulting_vote(self.vp[-1], weight)
if self.vp[-1] < 0:
self.vp[-1] = 0
self.vp_timestamp.append(ts)
def build_curation_arrays(self, end_date=None, sum_days=7):
""" Build curation arrays"""
self.curation_per_1000_HP_timestamp = []
self.curation_per_1000_HP = []
if sum_days <= 0:
raise ValueError("sum_days must be greater than 0")
index = 0
curation_sum = 0
days = (self.reward_timestamps[-1] - self.reward_timestamps[0]).days // sum_days * sum_days
if end_date is None:
end_date = self.reward_timestamps[-1] - timedelta(days=days)
for (ts, vests) in zip(self.reward_timestamps, self.curation_rewards):
if vests == 0:
continue
hp = self.hive.vests_to_hp(vests, timestamp=ts)
data = self.get_data(timestamp=ts, index=index)
index = data["index"]
if "hp_eff" in data and data["hp_eff"] > 0:
curation_1k_hp = hp / data["hp_eff"] * 1000 / sum_days * 7
else:
curation_1k_hp = 0
if ts < end_date:
curation_sum += curation_1k_hp
else:
self.curation_per_1000_HP_timestamp.append(end_date)
self.curation_per_1000_HP.append(curation_sum)
end_date = end_date + timedelta(days=sum_days)
curation_sum = 0
def __str__(self):
return self.__repr__()
def __repr__(self):
return "<%s %s>" % (
self.__class__.__name__, str(self.account["name"]))
| 44.636872 | 147 | 0.570547 |
4a218f52365e29e103e58f6de516129bd07d1003 | 22,705 | py | Python | ninjalooter/tests/test_message_handlers.py | rm-you/ninjalooter | 026b0e732964d62721b84c7ae64d418bfe1e2975 | [
"MIT"
] | 4 | 2020-08-30T12:57:03.000Z | 2022-03-18T15:11:13.000Z | ninjalooter/tests/test_message_handlers.py | rm-you/ninjalooter | 026b0e732964d62721b84c7ae64d418bfe1e2975 | [
"MIT"
] | 5 | 2022-01-07T03:17:32.000Z | 2022-03-27T21:20:12.000Z | ninjalooter/tests/test_message_handlers.py | rm-you/ninjalooter | 026b0e732964d62721b84c7ae64d418bfe1e2975 | [
"MIT"
] | 1 | 2021-12-28T02:18:04.000Z | 2021-12-28T02:18:04.000Z | import datetime
from unittest import mock
from ninjalooter import config
from ninjalooter import message_handlers
from ninjalooter import models
from ninjalooter.tests import base
from ninjalooter import utils
class TestMessageHandlers(base.NLTestBase):
def setUp(self) -> None:
utils.setup_aho()
config.ALLIANCES = base.SAMPLE_ALLIANCES
config.ALLIANCE_MAP = base.SAMPLE_ALLIANCE_MAP
@mock.patch('ninjalooter.utils.store_state')
@mock.patch('wx.PostEvent')
def test_handle_start_who(self, mock_post_event, mock_store_state):
# Empty List, full /who
config.PLAYER_AFFILIATIONS = {}
for line in base.SAMPLE_WHO_LOG.splitlines():
match = config.MATCH_WHO.match(line)
if match:
message_handlers.handle_who(match, 'window')
self.assertEqual(25, len(config.PLAYER_AFFILIATIONS))
self.assertEqual(25, mock_post_event.call_count)
mock_post_event.reset_mock()
# Peter and Fred should be marked as guildless
self.assertIsNone(config.PLAYER_AFFILIATIONS['Peter'])
self.assertIsNone(config.PLAYER_AFFILIATIONS['Fred'])
# Mark Peter and Fred as historically belonging to Kingdom
config.HISTORICAL_AFFILIATIONS['Peter'] = 'Kingdom'
config.HISTORICAL_AFFILIATIONS['Fred'] = 'Kingdom'
# Trigger New Who
message_handlers.handle_start_who(None, 'window')
mock_post_event.assert_called_once_with(
'window', models.ClearWhoEvent())
mock_post_event.reset_mock()
# Run the full who-list again
for line in base.SAMPLE_WHO_LOG.splitlines():
match = config.MATCH_WHO.match(line)
if match:
message_handlers.handle_who(match, 'window')
self.assertEqual(25, len(config.PLAYER_AFFILIATIONS))
# Peter should be marked as Kingdom, and Fred as guildless
self.assertEqual('Kingdom', config.PLAYER_AFFILIATIONS['Peter'])
self.assertIsNone(config.PLAYER_AFFILIATIONS['Fred'])
@mock.patch('ninjalooter.utils.store_state')
@mock.patch('wx.PostEvent')
def test_handle_who(self, mock_post_event, mock_store_state):
# Empty List, full /who
config.PLAYER_AFFILIATIONS = {}
for line in base.SAMPLE_WHO_LOG.splitlines():
match = config.MATCH_WHO.match(line)
if match:
message_handlers.handle_who(match, 'window')
self.assertEqual(25, len(config.PLAYER_AFFILIATIONS))
self.assertEqual(25, mock_post_event.call_count)
mock_post_event.reset_mock()
# Member changed from ANONYMOUS/Unguilded to Guilded
config.PLAYER_AFFILIATIONS = {'Jim': None}
line = '[Sun Aug 16 22:46:32 2020] [ANONYMOUS] Jim (Gnome) <Guild>'
match = config.MATCH_WHO.match(line)
message_handlers.handle_who(match, 'window')
self.assertEqual(1, len(config.PLAYER_AFFILIATIONS))
self.assertEqual('Guild', config.PLAYER_AFFILIATIONS['Jim'])
mock_post_event.assert_called_once_with(
'window', models.WhoEvent('Jim', 'ANONYMOUS', '??', 'Guild'))
mock_post_event.reset_mock()
# Member changed guilds
config.PLAYER_AFFILIATIONS = {'Jim': 'Guild'}
line = '[Sun Aug 16 22:46:32 2020] [ANONYMOUS] Jim (Gnome) <Other>'
match = config.MATCH_WHO.match(line)
message_handlers.handle_who(match, 'window')
self.assertEqual(1, len(config.PLAYER_AFFILIATIONS))
self.assertEqual('Other', config.PLAYER_AFFILIATIONS['Jim'])
mock_post_event.assert_called_once_with(
'window', models.WhoEvent('Jim', 'ANONYMOUS', '??', 'Other'))
mock_post_event.reset_mock()
# Member left their guild
config.PLAYER_AFFILIATIONS = {'Jim': 'Guild'}
line = '[Sun Aug 16 22:46:32 2020] [50 Cleric] Jim (Gnome)'
match = config.MATCH_WHO.match(line)
message_handlers.handle_who(match, 'window')
self.assertEqual(1, len(config.PLAYER_AFFILIATIONS))
self.assertIsNone(config.PLAYER_AFFILIATIONS['Jim'])
mock_post_event.assert_called_once_with(
'window', models.WhoEvent('Jim', 'Cleric', '50', None))
mock_post_event.reset_mock()
@mock.patch('ninjalooter.utils.store_state')
@mock.patch('wx.PostEvent')
def test_handle_drop(self, mock_post_event, mock_store_state):
config.PLAYER_AFFILIATIONS = {
'Jim': 'Venerate',
'James': 'Kingdom',
'Dan': 'Dial a Daniel',
}
config.PENDING_AUCTIONS = list()
# # FILTER OFF - Item linked by a non-federation guild member
# config.RESTRICT_BIDS = False
# line = ("[Sun Aug 16 22:47:31 2020] Dan says out of character, "
# "'Belt of Iniquity'")
# match = config.MATCH_DROP.match(line)
# items = message_handlers.handle_drop(match, 'window')
# self.assertEqual(1, len(items))
# self.assertEqual(1, len(config.PENDING_AUCTIONS))
# mock_post_event.assert_called_once_with(
# 'window', models.DropEvent())
# mock_post_event.reset_mock()
# config.PENDING_AUCTIONS = list()
# # FILTER ON - Item linked by a non-federation guild member
# config.RESTRICT_BIDS = True
# line = ("[Sun Aug 16 22:47:31 2020] Dan says out of character, "
# "'Belt of Iniquity'")
# match = config.MATCH_DROP.match(line)
# items = message_handlers.handle_drop(match, 'window')
# self.assertEqual(0, len(items))
# self.assertEqual(0, len(config.PENDING_AUCTIONS))
# mock_post_event.assert_not_called()
# Item linked by a federation guild member
# NODROP filter on, droppable item
config.NODROP_ONLY = True
line = ("[Sun Aug 16 22:47:31 2020] Jim says out of character, "
"'Copper Disc'")
jim_disc_1_uuid = "jim_disc_1_uuid"
jim_disc_1 = models.ItemDrop(
'Copper Disc', 'Jim', 'Sun Aug 16 22:47:31 2020',
uuid=jim_disc_1_uuid)
match = config.MATCH_DROP_OOC.match(line)
items = list(message_handlers.handle_drop(match, 'window'))
self.assertEqual(1, len(items))
self.assertIn('Copper Disc', items)
self.assertEqual(0, len(config.PENDING_AUCTIONS))
mock_post_event.assert_not_called()
mock_post_event.reset_mock()
# NODROP filter on, NODROP item
line = ("[Sun Aug 16 22:47:31 2020] Jim says, "
"'Belt of Iniquity'")
jim_belt_1_uuid = "jim_belt_1_uuid"
jim_belt_1 = models.ItemDrop(
'Belt of Iniquity', 'Jim', 'Sun Aug 16 22:47:31 2020',
uuid=jim_belt_1_uuid)
match = config.MATCH_DROP_SAY.match(line)
with mock.patch('uuid.uuid4') as mock_uuid4:
mock_uuid4.return_value = jim_belt_1_uuid
items = list(message_handlers.handle_drop(match, 'window'))
self.assertEqual(1, len(items))
self.assertIn('Belt of Iniquity', items)
self.assertEqual(1, len(config.PENDING_AUCTIONS))
self.assertListEqual(
[jim_belt_1],
config.PENDING_AUCTIONS)
mock_post_event.assert_called_once_with(
'window', models.DropEvent())
mock_post_event.reset_mock()
# NODROP filter off, droppable item
config.NODROP_ONLY = False
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'Copper Disc'")
match = config.MATCH_DROP_GU.match(line)
with mock.patch('uuid.uuid4') as mock_uuid4:
mock_uuid4.return_value = jim_disc_1_uuid
items = list(message_handlers.handle_drop(match, 'window'))
self.assertEqual(1, len(items))
self.assertIn('Copper Disc', items)
self.assertEqual(2, len(config.PENDING_AUCTIONS))
self.assertListEqual(
[jim_belt_1, jim_disc_1],
config.PENDING_AUCTIONS)
mock_post_event.assert_called_once_with(
'window', models.DropEvent())
mock_post_event.reset_mock()
# Two items linked by a federation guild member, plus chat
line = ("[Sun Aug 16 22:47:41 2020] James tells the guild, "
"'Platinum Disc and Golden Amber Earring woo'")
james_disc_uuid = "james_disc_uuid"
james_earring_uuid = "james_earring_uuid"
james_disc = models.ItemDrop(
'Platinum Disc', 'James', 'Sun Aug 16 22:47:41 2020',
uuid=james_disc_uuid)
james_earring = models.ItemDrop(
'Golden Amber Earring', 'James', 'Sun Aug 16 22:47:41 2020',
uuid=james_earring_uuid)
match = config.MATCH_DROP_GU.match(line)
with mock.patch('uuid.uuid4') as mock_uuid4:
mock_uuid4.side_effect = [james_disc_uuid, james_earring_uuid]
items = list(message_handlers.handle_drop(match, 'window'))
self.assertEqual(2, len(items))
self.assertListEqual(
['Platinum Disc', 'Golden Amber Earring'], items)
self.assertListEqual(
[jim_belt_1, jim_disc_1, james_disc, james_earring],
config.PENDING_AUCTIONS)
mock_post_event.assert_called_once_with(
'window', models.DropEvent())
mock_post_event.reset_mock()
# Random chatter by federation guild member
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'four score and seven years ago, we wanted pixels'")
match = config.MATCH_DROP_GU.match(line)
items = list(message_handlers.handle_drop(match, 'window'))
self.assertEqual(0, len(items))
self.assertListEqual(
[jim_belt_1, jim_disc_1, james_disc, james_earring],
config.PENDING_AUCTIONS)
mock_post_event.assert_not_called()
# Someone reports they looted an item
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'looted Belt of Iniquity'")
match = config.MATCH_DROP_GU.match(line)
items = list(message_handlers.handle_drop(match, 'window'))
self.assertEqual(0, len(items))
self.assertListEqual(
[jim_belt_1, jim_disc_1, james_disc, james_earring],
config.PENDING_AUCTIONS)
mock_post_event.assert_not_called()
# Bid message doesn't register as a drop
config.ACTIVE_AUCTIONS.clear()
jerkin_1 = models.ItemDrop(
'Shiverback-hide Jerkin', 'Jim', 'Sun Aug 16 22:47:31 2020')
config.PENDING_AUCTIONS.append(jerkin_1)
auction1 = utils.start_auction_dkp(jerkin_1, 'VCR')
self.assertEqual(
config.ACTIVE_AUCTIONS.get(auction1.item.uuid), auction1)
config.PENDING_AUCTIONS.clear()
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'Shiverback-hide Jerkin'")
match = config.MATCH_DROP_GU.match(line)
items = list(message_handlers.handle_drop(match, 'window'))
# One item should be found
self.assertListEqual(['Shiverback-hide Jerkin'], items)
self.assertListEqual([], config.PENDING_AUCTIONS)
mock_post_event.assert_not_called()
# A gratss message from another app should not register as a drop
bid_line = ("[Sun Aug 16 22:47:31 2020] Toald tells the guild, "
"'Shiverback-hide Jerkin 1 main'")
config.RESTRICT_BIDS = False
bid_match = config.MATCH_BID_GU.match(bid_line)
message_handlers.handle_bid(bid_match, 'window')
config.HISTORICAL_AUCTIONS[auction1.item.uuid] = (
config.ACTIVE_AUCTIONS.pop(auction1.item.uuid))
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'Gratss Toald on [Shiverback-hide Jerkin] (1 DKP)!'")
match = config.MATCH_DROP_GU.match(line)
items = list(message_handlers.handle_drop(match, 'window'))
self.assertListEqual([], items)
# Ignore items if a number is present, it's probably a bid
match = config.MATCH_DROP_GU.match(bid_line)
items = list(message_handlers.handle_drop(match, 'window'))
self.assertListEqual([], items)
# second same drop shouldn't record if it is within cooldown time
jerkin_2 = models.ItemDrop(
'Shiverback-hide Jerkin', 'Jim',
utils.datetime_to_eq_format(datetime.datetime.now()))
config.PENDING_AUCTIONS.append(jerkin_2)
line = ("[{}] Jim tells the guild, 'Shiverback-hide Jerkin'".format(
utils.datetime_to_eq_format(datetime.datetime.now())))
match = config.MATCH_DROP_GU.match(line)
self.assertEqual([jerkin_2], config.PENDING_AUCTIONS)
items = list(message_handlers.handle_drop(match, 'window'))
self.assertListEqual([jerkin_2.name], items)
self.assertEqual([jerkin_2], config.PENDING_AUCTIONS)
# second same drop should record if it is past cooldown time
jerkin_2.timestamp = utils.datetime_to_eq_format(
datetime.datetime.now() -
datetime.timedelta(seconds=config.DROP_COOLDOWN))
self.assertEqual(1, len(config.PENDING_AUCTIONS))
items = list(message_handlers.handle_drop(match, 'window'))
self.assertListEqual([jerkin_2.name], items)
self.assertEqual(2, len(config.PENDING_AUCTIONS))
@mock.patch('ninjalooter.utils.store_state')
@mock.patch('wx.PostEvent')
def test_handle_bid(self, mock_post_event, mock_store_state):
config.PLAYER_AFFILIATIONS = {
'Jim': 'Venerate',
'Pim': 'Castle',
'Tim': 'Kingdom',
'Dan': 'Dial a Daniel',
}
item_name = 'Copper Disc'
itemdrop = models.ItemDrop(item_name, "Jim", "timestamp")
disc_auction = models.DKPAuction(itemdrop, 'VCR')
config.ACTIVE_AUCTIONS = {
itemdrop.uuid: disc_auction
}
# FILTER ON - Someone in the alliance bids on an inactive item
config.RESTRICT_BIDS = True
line = ("[Sun Aug 16 22:47:31 2020] Jim auctions, "
"'Platinum Disc 10 DKP'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertFalse(result)
self.assertListEqual([], disc_auction.highest())
self.assertEqual(1, len(config.ACTIVE_AUCTIONS))
mock_post_event.assert_not_called()
# FILTER ON - Someone outside the alliance bids on an active item
line = ("[Sun Aug 16 22:47:31 2020] Dan auctions, "
"'Copper Disc 10 DKP'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertFalse(result)
self.assertEqual([], disc_auction.highest())
mock_post_event.assert_not_called()
# FILTER OFF - Someone in the alliance bids on an inactive item
config.RESTRICT_BIDS = False
line = ("[Sun Aug 16 22:47:31 2020] Jim auctions, "
"'Platinum Disc 10 DKP'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertFalse(result)
self.assertListEqual([], disc_auction.highest())
self.assertEqual(1, len(config.ACTIVE_AUCTIONS))
mock_post_event.assert_not_called()
# FILTER ON - Someone outside the alliance bids on an active item
config.RESTRICT_BIDS = True
line = ("[Sun Aug 16 22:47:31 2020] Dan auctions, "
"'Copper Disc 10 DKP'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertFalse(result)
self.assertEqual([], disc_auction.highest())
mock_post_event.assert_not_called()
# Someone we haven't seen bids on an active item
line = ("[Sun Aug 16 22:47:31 2020] Paul auctions, "
"'Copper Disc 10 DKP'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertFalse(result)
self.assertListEqual([], disc_auction.highest())
mock_post_event.assert_not_called()
# Someone in the alliance says random stuff with a number
line = ("[Sun Aug 16 22:47:31 2020] Tim auctions, "
"'I am 12 and what channel is this'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertFalse(result)
self.assertListEqual([], disc_auction.highest())
mock_post_event.assert_not_called()
# Someone in the alliance bids on two items at once
line = ("[Sun Aug 16 22:47:31 2020] Jim auctions, "
"'Copper Disc 10 DKP Platinum Disc'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertFalse(result)
self.assertListEqual([], disc_auction.highest())
mock_post_event.assert_not_called()
# Someone in the alliance bids on an active item
line = ("[Sun Aug 16 22:47:31 2020] Jim auctions, "
"'Copper Disc 10 DKP'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertTrue(result)
self.assertIn(('Jim', 10), disc_auction.highest())
mock_post_event.assert_called_once_with(
'window', models.BidEvent(disc_auction))
mock_post_event.reset_mock()
# Someone in the alliance bids on an active item with wrong case
line = ("[Sun Aug 16 22:47:31 2020] Pim auctions, "
"'copper DISC 11 DKP'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertTrue(result)
self.assertIn(('Pim', 11), disc_auction.highest())
mock_post_event.assert_called_once_with(
'window', models.BidEvent(disc_auction))
mock_post_event.reset_mock()
# Someone in the alliance bids on an active item for their 2nd main
# This would trigger a bug with "2nd" being read as "2 DKP"
line = ("[Sun Aug 16 22:47:31 2020] Jim auctions, "
"'Copper Disc 2nd main 12dkp'")
match = config.MATCH_BID[0].match(line)
result = message_handlers.handle_bid(match, 'window')
self.assertTrue(result)
self.assertIn(('Jim', 12), disc_auction.highest())
mock_post_event.assert_called_once_with(
'window', models.BidEvent(disc_auction))
mock_post_event.reset_mock()
config.ACTIVE_AUCTIONS = {}
@mock.patch('ninjalooter.utils.store_state')
@mock.patch('wx.PostEvent')
def test_handle_gratss(self, mock_post_event, mock_store_state):
config.PENDING_AUCTIONS.clear()
config.ACTIVE_AUCTIONS.clear()
# Set up a historical auction with bids
jerkin_1 = models.ItemDrop(
'Shiverback-hide Jerkin', 'Jim', 'Sun Aug 16 22:47:31 2020')
config.PENDING_AUCTIONS.append(jerkin_1)
auction1 = utils.start_auction_dkp(jerkin_1, 'VCR')
self.assertEqual(
config.ACTIVE_AUCTIONS.get(auction1.item.uuid), auction1)
bid_line = ("[Sun Aug 16 22:47:31 2020] Toald tells the guild, "
"'Shiverback-hide Jerkin 1 main'")
config.RESTRICT_BIDS = False
bid_match = config.MATCH_BID_GU.match(bid_line)
message_handlers.handle_bid(bid_match, 'window')
config.HISTORICAL_AUCTIONS[auction1.item.uuid] = (
config.ACTIVE_AUCTIONS.pop(auction1.item.uuid))
# Set up a historical auction without bids (rot)
disc_1 = models.ItemDrop(
'Copper Disc', 'Jim', 'Sun Aug 16 22:47:31 2020')
config.PENDING_AUCTIONS.append(disc_1)
auction2 = utils.start_auction_dkp(disc_1, 'VCR')
self.assertEqual(
config.ACTIVE_AUCTIONS.get(auction2.item.uuid), auction2)
config.HISTORICAL_AUCTIONS[auction2.item.uuid] = (
config.ACTIVE_AUCTIONS.pop(auction2.item.uuid))
# A gratss message from auction history should not register (bids)
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'Gratss Toald on [Shiverback-hide Jerkin] (1 DKP)!'")
match = config.MATCH_GRATSS.match(line)
self.assertFalse(message_handlers.handle_gratss(match, 'window'))
# A gratss message from auction history should not register (no bids)
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'Gratss ROT on [Copper Disc] (0 DKP)!'")
match = config.MATCH_GRATSS.match(line)
self.assertFalse(message_handlers.handle_gratss(match, 'window'))
# A gratss message that doesn't match auction history SHOULD register
line = ("[Sun Aug 16 22:47:31 2020] Jim tells the guild, "
"'Gratss Jim on [Bladestopper] (100 DKP)!'")
match = config.MATCH_GRATSS.match(line)
self.assertTrue(message_handlers.handle_gratss(match, 'window'))
# A gratss message direct to /tell should register (no tell windows)
line = ("[Sun Aug 16 22:47:31 2020] Jim tells you, "
"'Gratss Jim on [Bladestopper] (100 DKP)!'")
match = config.MATCH_GRATSS.match(line)
self.assertTrue(message_handlers.handle_gratss(match, 'window'))
# A gratss message direct to /tell should register (tell windows)
line = ("[Sun Aug 16 22:47:31 2020] Jim -> You, "
"'Gratss Jim on [Bladestopper] (100 DKP)!'")
match = config.MATCH_GRATSS.match(line)
self.assertTrue(message_handlers.handle_gratss(match, 'window'))
@mock.patch('ninjalooter.utils.store_state')
@mock.patch('wx.PostEvent')
def test_handle_creditt(self, mock_post_event, mock_store_state):
config.PLAYER_NAME = "PlayerName"
# A creditt message direct to /tell should register (no tell windows)
line = ("[Sun Aug 16 22:47:31 2020] Jim tells you, "
"'Creditt Bill'")
match = config.MATCH_CREDITT.match(line)
self.assertTrue(message_handlers.handle_creditt(match, 'window'))
# A creditt message direct to /tell should register (tell windows)
line = ("[Sun Aug 16 22:47:31 2020] Jim -> PlayerName: "
"Creditt Tony")
match = config.MATCH_CREDITT.match(line)
self.assertTrue(message_handlers.handle_creditt(match, 'window'))
config.PLAYER_NAME = ""
| 45.961538 | 77 | 0.638494 |
4a218f71bddb25be1f6fd522cc639b8eae35c87b | 2,248 | py | Python | tests/integration/core/test_scanner.py | asn-d6/sbws | 6070d36022dc2130518dd0c68332166b2bf76c73 | [
"CC0-1.0"
] | null | null | null | tests/integration/core/test_scanner.py | asn-d6/sbws | 6070d36022dc2130518dd0c68332166b2bf76c73 | [
"CC0-1.0"
] | null | null | null | tests/integration/core/test_scanner.py | asn-d6/sbws | 6070d36022dc2130518dd0c68332166b2bf76c73 | [
"CC0-1.0"
] | null | null | null | import pytest
from sbws.core.scanner import measure_relay
from sbws.lib.resultdump import ResultSuccess
import logging
def assert_within(value, target, radius):
'''
Assert that **value** is within **radius** of **target**
If target is 10 and radius is 2, value can be anywhere between 8 and 12
inclusive
'''
assert target - radius < value, 'Value is too small. {} is not within '\
'{} of {}'.format(value, radius, target)
assert target + radius > value, 'Value is too big. {} is not within '\
'{} of {}'.format(value, radius, target)
def test_measure_relay_with_maxadvertisedbandwidth(
persistent_launch_tor, sbwshome_dir, args, conf,
dests, cb, rl, caplog):
caplog.set_level(logging.DEBUG)
# d = get_everything_to_measure(sbwshome, cont, args, conf)
# rl = d['rl']
# dests = d['dests']
# cb = d['cb']
# 117A456C911114076BEB4E757AC48B16CC0CCC5F is relay1mbyteMAB
relay = [r for r in rl.relays
if r.nickname == 'relay1mbyteMAB'][0]
# d['relay'] = relay
result = measure_relay(args, conf, dests, cb, rl, relay)
assert len(result) == 1
result = result[0]
assert isinstance(result, ResultSuccess)
one_mbyte = 1 * 1024 * 1024
dls = result.downloads
for dl in dls:
# This relay has MaxAdvertisedBandwidth set, but should not be limited
# to just 1 Mbyte. Assume and assert that all downloads where at least
# more than 10% faster than 1 MBps
assert dl['amount'] / dl['duration'] > one_mbyte * 1.1
assert result.relay_average_bandwidth == one_mbyte
@pytest.mark.skip(reason="temporally disabled")
def test_measure_relay_with_relaybandwidthrate(
persistent_launch_tor, args, conf, dests, cb, rl):
relay = [r for r in rl.relays
if r.nickname == 'relay1mbyteRBR'][0]
result = measure_relay(args, conf, dests, cb, rl, relay)
assert len(result) == 1
result = result[0]
assert isinstance(result, ResultSuccess)
one_mbyte = 1 * 1024 * 1024
allowed_error = 0.1 * one_mbyte # allow 10% error in either direction
dls = result.downloads
for dl in dls:
assert_within(dl['amount'] / dl['duration'], one_mbyte, allowed_error)
| 36.852459 | 78 | 0.661477 |
4a21904bd7a36aef4740b93efcf503028694d5fc | 1,442 | py | Python | swagger_bundler/orphancheck.py | pilosus/swagger-bundler | 8826fe20f64d55a5616393098e36386b5122b1dc | [
"MIT"
] | 2 | 2019-11-11T08:52:18.000Z | 2021-04-06T09:59:38.000Z | swagger_bundler/orphancheck.py | pilosus/swagger-bundler | 8826fe20f64d55a5616393098e36386b5122b1dc | [
"MIT"
] | 18 | 2016-09-23T12:26:19.000Z | 2016-10-05T12:58:00.000Z | swagger_bundler/orphancheck.py | pilosus/swagger-bundler | 8826fe20f64d55a5616393098e36386b5122b1dc | [
"MIT"
] | 3 | 2019-11-11T08:52:08.000Z | 2021-07-13T19:59:40.000Z | # -*- coding:utf-8 -*-
import re
from . import highlight
def check_orphan_reference(ctx, data, exception_on_fail=False):
# TODO: remove prefixing_targets
prefixing_targets = ctx.options["prefixing_targets"]
refs = set()
orphans = []
rx = re.compile("^#/({})/(.+)".format("|".join(prefixing_targets)))
def collect_refs(d):
if hasattr(d, "keys"):
if "$ref" in d:
refs.add(d["$ref"])
for v in d.values():
collect_refs(v)
elif isinstance(d, (tuple, list)):
for e in d:
collect_refs(e)
def has_ref(ref):
m = rx.search(ref)
if m is None:
return on_error(ref)
target, name = m.groups()
subsection = data.get(target) or {}
if name not in subsection:
return on_error(ref)
def on_error(ref):
msg = "{} is not found.".format(ref)
orphans.append(ref)
if not exception_on_fail:
highlight.show_on_warning(msg)
collect_refs(data)
for ref in refs:
has_ref(ref)
if exception_on_fail and orphans:
raise OrphanReferenceError("these references are not found: {}".format("\n".join(orphans)), orphans)
return orphans
class OrphanReferenceError(ValueError):
def __init__(self, msg, orphans, *args, **kwargs):
super().__init__(msg, *args, **kwargs)
self.orphans = orphans
| 27.730769 | 108 | 0.579057 |
4a21915c4939828095d823444e57851845839569 | 5,420 | py | Python | keras_fake_news_detector/library/encoders/doc2vec.py | chen0040/keras-fake-news-generator-and-detector | fa9b7e22322c49a9258f3452f7556dfa179ffc93 | [
"MIT"
] | 20 | 2018-01-06T02:07:42.000Z | 2021-07-26T05:34:54.000Z | keras_fake_news_detector/library/encoders/doc2vec.py | chen0040/keras-fake-news-generator-and-detector | fa9b7e22322c49a9258f3452f7556dfa179ffc93 | [
"MIT"
] | 1 | 2020-01-03T15:31:34.000Z | 2020-01-03T15:31:34.000Z | keras_fake_news_detector/library/encoders/doc2vec.py | chen0040/keras-fake-news-generator-and-detector | fa9b7e22322c49a9258f3452f7556dfa179ffc93 | [
"MIT"
] | 7 | 2018-02-20T08:24:33.000Z | 2020-02-07T18:38:15.000Z | import numpy as np
from keras.models import Sequential
from keras.callbacks import ModelCheckpoint
from keras.layers import Bidirectional, RepeatVector
from keras.layers.recurrent import LSTM
from keras.preprocessing.sequence import pad_sequences
from keras_fake_news_detector.library.utility.glove_loader import GLOVE_EMBEDDING_SIZE, load_glove
LATENT_SIZE = 256
BATCH_SIZE = 64
EPOCHS = 10
DOC2VEC_MAX_SEQ_LENGTH = 80
DOC2VEC_MAX_VOCAB_SIZE = 2000
def sentence_generator(X, embeddings, batch_size):
while True:
# loop once per epoch
num_recs = X.shape[0]
indices = np.random.permutation(np.arange(num_recs))
num_batches = num_recs // batch_size
for bid in range(num_batches):
sids = indices[bid * batch_size: (bid + 1) * batch_size]
Xbatch = embeddings[X[sids, :]]
yield Xbatch, Xbatch
class Doc2Vec(object):
model_name = 'doc2vec'
def __init__(self, config, target_seq_length=None):
if target_seq_length is None:
target_seq_length = GLOVE_EMBEDDING_SIZE
self.num_input_tokens = config['num_input_tokens']
self.word2idx = config['word2idx']
self.idx2word = config['idx2word']
self.max_input_seq_length = config['max_input_seq_length']
self.target_seq_length = target_seq_length
self.config = config
model = Sequential()
model.add(Bidirectional(LSTM(LATENT_SIZE), input_shape=(self.max_input_seq_length, GLOVE_EMBEDDING_SIZE)))
model.add(RepeatVector(self.max_input_seq_length))
model.add(Bidirectional(LSTM(self.target_seq_length, return_sequences=True), merge_mode="sum"))
model.compile(optimizer="sgd", loss="mse")
self.model = model
self.embedding = np.zeros((len(self.word2idx), GLOVE_EMBEDDING_SIZE))
def load_glove(self, data_dir_path):
word2em = load_glove(data_dir_path)
unk_embed = np.random.uniform(-1, 1, GLOVE_EMBEDDING_SIZE)
embedding = np.zeros((len(self.word2idx), GLOVE_EMBEDDING_SIZE))
for word, idx in self.word2idx.items():
vec = unk_embed
if word in word2em:
vec = word2em[word]
embedding[idx] = vec
embedding[self.word2idx["PAD"]] = np.zeros(shape=GLOVE_EMBEDDING_SIZE)
embedding[self.word2idx["UNK"]] = unk_embed
self.embedding = embedding
def load_weights(self, weight_file_path):
self.model.load_weights(weight_file_path)
def transform_input_text(self, texts):
temp = []
for line in texts:
x = []
for word in line.lower().split(' '):
wid = 1
if word in self.word2idx:
wid = self.word2idx[word]
x.append(wid)
if len(x) >= self.max_input_seq_length:
break
temp.append(x)
temp = pad_sequences(temp, maxlen=self.max_input_seq_length)
return temp
@staticmethod
def get_config_file_path(model_dir_path):
return model_dir_path + '/' + Doc2Vec.model_name + '-config.npy'
@staticmethod
def get_weight_file_path(model_dir_path):
return model_dir_path + '/' + Doc2Vec.model_name + '-weights.h5'
@staticmethod
def get_architecture_file_path(model_dir_path):
return model_dir_path + '/' + Doc2Vec.model_name + '-architecture.json'
def fit(self, Xtrain, Xtest, epochs=None, model_dir_path=None):
if epochs is None:
epochs = EPOCHS
if model_dir_path is None:
model_dir_path = './models'
config_file_path = Doc2Vec.get_config_file_path(model_dir_path)
weight_file_path = Doc2Vec.get_weight_file_path(model_dir_path)
checkpoint = ModelCheckpoint(weight_file_path)
np.save(config_file_path, self.config)
architecture_file_path = Doc2Vec.get_architecture_file_path(model_dir_path)
open(architecture_file_path, 'w').write(self.model.to_json())
Xtrain = self.transform_input_text(Xtrain)
Xtest = self.transform_input_text(Xtest)
train_gen = sentence_generator(Xtrain, self.embedding, BATCH_SIZE)
test_gen = sentence_generator(Xtest, self.embedding, BATCH_SIZE)
num_train_steps = len(Xtrain) // BATCH_SIZE
num_test_steps = len(Xtest) // BATCH_SIZE
history = self.model.fit_generator(train_gen,
steps_per_epoch=num_train_steps,
epochs=epochs,
validation_data=test_gen,
validation_steps=num_test_steps,
callbacks=[checkpoint])
self.model.save_weights(weight_file_path)
return history
def predict(self, x):
is_str = False
if type(x) is str:
is_str = True
x = [x]
Xtest = self.transform_input_text(x)
Xtest = self.embedding[Xtest]
preds = self.model.predict(Xtest)
if is_str:
preds = preds.flatten()
return preds
else:
result = []
for line in preds:
result.append(line.flatten())
return result
def get_doc_vec_length(self):
return self.max_input_seq_length * self.target_seq_length
| 36.621622 | 114 | 0.633395 |
4a21929f852763fc96bba32aecdd4d662df90212 | 1,936 | py | Python | recipes/migrations/0002_auto_20200608_1307.py | Panda4817/MySousChef | 64c3967566b3834d578406884ee6b4a3807b21f8 | [
"MIT"
] | 1 | 2021-02-25T17:54:28.000Z | 2021-02-25T17:54:28.000Z | recipes/migrations/0002_auto_20200608_1307.py | Panda4817/MySousChef | 64c3967566b3834d578406884ee6b4a3807b21f8 | [
"MIT"
] | null | null | null | recipes/migrations/0002_auto_20200608_1307.py | Panda4817/MySousChef | 64c3967566b3834d578406884ee6b4a3807b21f8 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-06-08 12:07
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('recipes', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='usertopantry',
name='added',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AddField(
model_name='usertopantry',
name='bestbefore',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AddField(
model_name='usertopantry',
name='opened',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AddField(
model_name='usertopantry',
name='use_within',
field=models.CharField(blank=True, max_length=64),
),
migrations.AddField(
model_name='usertopantry',
name='usebefore',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.RemoveField(
model_name='usertopantry',
name='pantry_item',
),
migrations.AddField(
model_name='usertopantry',
name='pantry_item',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='recipes.Pantry'),
preserve_default=False,
),
migrations.AlterField(
model_name='usertopantry',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 33.37931 | 114 | 0.588326 |
4a2192c53972ff8a87c0055bb7cc781b7dcf4c9d | 1,423 | py | Python | exercicio-04.py | 1philipe/atividade | 9bb67ef195336de23661f9d2f32706426a16fc3a | [
"MIT"
] | null | null | null | exercicio-04.py | 1philipe/atividade | 9bb67ef195336de23661f9d2f32706426a16fc3a | [
"MIT"
] | null | null | null | exercicio-04.py | 1philipe/atividade | 9bb67ef195336de23661f9d2f32706426a16fc3a | [
"MIT"
] | null | null | null | #encoding: utf-8
import numpy as np
import matplotlib.pyplot as plt; plt.rcdefaults()
#exercice 04
age=["0-4","5-9","10-14","15-19","20-24","25-29","30-34","35-39",\
"40-44","45-49","50-54","55-59","60-64","65-69","70-74","75-79",\
"80-84","85-89","90-94","95-99","100+"]
male_pop=np.array([7016987,7624144,8725413,8558868,8630229,8460995,7717658,\
6766664,6320568,5692014,4834995,3902344,3041035,2224065,\
1667372,1090517,668623,310759,114964,31529,7247])
fem_pop=np.array([6779171,7345231,8441348,8432004,8614963,8643419,\
8026854,7121915,6688796,6141338,5305407,4373877,3468085,\
2616745,2074264,1472930,998349,508724,211594,66806,16989])
fig, axes = plt.subplots(ncols=2, sharey=True, sharex=False)
axes[0].barh(age, male_pop, align='center', color='blue')
axes[1].barh(age, fem_pop, align='center', color='red')
axes[0].invert_xaxis()
axes[0].yaxis.tick_right()
axes[0].set(title="Male Population")
axes[1].set(title="Female Population")
plt.sca(axes[0])
plt.xticks([ 0, 2000000, 4000000, 6000000, 8000000 ], \
[ "0", "2 mi", "4 mi", "6 mi", "8 mi" ] )
plt.sca(axes[1])
plt.xticks([ 0, 2000000, 4000000, 6000000, 8000000 ], \
[ "0", "2 mi", "4 mi", "6 mi", "8 mi" ] )
graph=plt.figure(1)
graph.suptitle("Brazilian Population per Sex and Age Group", fontsize=15)
fig.tight_layout()
fig.subplots_adjust(wspace=0.09)
plt.show() | 36.487179 | 76 | 0.653549 |
4a2192c829490d9c8fc73ae07947cfa756a5ffb2 | 6,297 | py | Python | Lib/gds/burp/dispatchers.py | Raz0r/jython-burp-api | 0fdd8dd3968e356cb9d473c2499aae7a9cbfccdc | [
"0BSD"
] | 1 | 2017-06-03T19:59:41.000Z | 2017-06-03T19:59:41.000Z | Lib/gds/burp/dispatchers.py | Raz0r/jython-burp-api | 0fdd8dd3968e356cb9d473c2499aae7a9cbfccdc | [
"0BSD"
] | null | null | null | Lib/gds/burp/dispatchers.py | Raz0r/jython-burp-api | 0fdd8dd3968e356cb9d473c2499aae7a9cbfccdc | [
"0BSD"
] | null | null | null | # -*- coding: utf-8 -*-
'''
gds.burp.dispatchers
~~~~~~~~~~~~~~~~~~~~
'''
from .api import INewScanIssueHandler, \
IIntruderRequestHandler, IIntruderResponseHandler, \
IProxyRequestHandler, IProxyResponseHandler, \
IRepeaterRequestHandler, IRepeaterResponseHandler, \
IScannerRequestHandler, IScannerResponseHandler, \
ISequencerRequestHandler, ISequencerResponseHandler, \
ISpiderRequestHandler, ISpiderResponseHandler
from .config import OrderedExtensionsOption
from .core import Component, ExtensionPoint
from .models import HttpRequest
import logging
class NewScanIssueDispatcher(Component):
dispatchers = ExtensionPoint(INewScanIssueHandler)
def newScanIssue(self, issue):
for dispatch in self.dispatchers:
if self.log.isEnabledFor(logging.DEBUG):
self.log.debug('Dispatching new scan issue details via %s',
dispatch.__class__.__name__)
dispatch.newScanIssue(issue)
return
class PluginDispatcher(Component):
intruderRequest = OrderedExtensionsOption('plugins', 'intruder.request',
IIntruderRequestHandler, None, False,
'''List of components implmenting the `IIntruderRequestHandler`,
in the order in which they will be applied. These components
handle processing of HTTP requests directly before Burp Intruder
sends it on the wire.''')
intruderResponse = OrderedExtensionsOption('plugins', 'intruder.response',
IIntruderResponseHandler, None, False,
'''List of components implmenting the `IIntruderResponseHandler`,
in the order in which they will be applied. These components
handle processing of HTTP responses directly after Burp Intruder
receives if off the wire.''')
proxyRequest = OrderedExtensionsOption('plugins', 'proxy.request',
IProxyRequestHandler, None, False,
'''List of components implmenting the `IProxyRequestHandler`,
in the order in which they will be applied. These components
handle processing of HTTP requests directly before Burp Proxy
sends it on the wire.''')
proxyResponse = OrderedExtensionsOption('plugins', 'proxy.response',
IProxyResponseHandler, None, False,
'''List of components implmenting the `IProxyResponseHandler`,
in the order in which they will be applied. These components
handle processing of HTTP responses directly after Burp Proxy
receives if off the wire.''')
repeaterRequest = OrderedExtensionsOption('plugins', 'repeater.request',
IRepeaterRequestHandler, None, False,
'''List of components implmenting the `IRepeaterRequestHandler`,
in the order in which they will be applied. These components
handle processing of HTTP requests directly before Burp Repeater
sends it on the wire.''')
repeaterResponse = OrderedExtensionsOption('plugins', 'repeater.response',
IRepeaterResponseHandler, None, False,
'''List of components implmenting the `IRepeaterResponseHandler`,
in the order in which they will be applied. These components
handle processing of HTTP responses directly after Burp Repeater
receives if off the wire.''')
scannerRequest = OrderedExtensionsOption('plugins', 'scanner.request',
IScannerRequestHandler, None, False,
'''List of components implmenting the `IScannerRequestHandler`,
in the order in which they will be applied. These components
handle processing of HTTP requests directly before Burp Scanner
sends it on the wire.''')
scannerResponse = OrderedExtensionsOption('plugins', 'scanner.response',
IScannerResponseHandler, None, False,
'''List of components implmenting the `IScannerResponseHandler`,
in the order in which they will be applied. These components
handle processing of HTTP responses directly after Burp Scanner
receives if off the wire.''')
sequencerRequest = OrderedExtensionsOption('plugins', 'sequencer.request',
ISequencerRequestHandler, None, False,
'''List of components implmenting the `ISequencerRequestHandler`,
in the order in which they will be applied. These components
handle processing of HTTP requests directly before Burp Sequencer
sends it on the wire.''')
sequencerResponse = OrderedExtensionsOption('plugins', 'sequencer.response',
ISequencerResponseHandler, None, False,
'''List of components implmenting the `ISequencerResponseHandler`,
in the order in which they will be applied. These components
handle processing of HTTP responses directly after Burp Sequencer
receives if off the wire.''')
spiderRequest = OrderedExtensionsOption('plugins', 'spider.request',
ISpiderRequestHandler, None, False,
'''List of components implmenting the `ISpiderRequestHandler`,
in the order in which they will be applied. These components
handle processing of HTTP requests directly before Burp Spider
sends it on the wire.''')
spiderResponse = OrderedExtensionsOption('plugins', 'spider.response',
ISpiderResponseHandler, None, False,
'''List of components implmenting the `ISpiderResponseHandler`,
in the order in which they will be applied. These components
handle processing of HTTP responses directly after Burp Spider
receives if off the wire.''')
def processHttpMessage(self, toolName, messageIsRequest, messageInfo):
handlers = ''.join([toolName.lower(),
'Request' if messageIsRequest else 'Response'])
method = ''.join(['process',
'Request' if messageIsRequest else 'Response'])
request = HttpRequest(messageInfo, _burp=self.burp)
for handler in getattr(self, handlers):
if self.log.isEnabledFor(logging.DEBUG):
self.log.debug('Dispatching handler via %s: %s.%s(%r)',
toolName, handler.__class__.__name__,
method, request)
getattr(handler, method)(request)
return
| 44.34507 | 80 | 0.688264 |
4a2193c1a0babb725bdc738f6f4e7ff811b1f58c | 5,538 | py | Python | sympy/multipledispatch/tests/test_dispatcher.py | iamabhishek0/sympy | c461bd1ff9d178d1012b04fd0bf37ee3abb02cdd | [
"BSD-3-Clause"
] | 15 | 2020-06-29T08:33:39.000Z | 2022-02-12T00:28:51.000Z | sympy/multipledispatch/tests/test_dispatcher.py | iamabhishek0/sympy | c461bd1ff9d178d1012b04fd0bf37ee3abb02cdd | [
"BSD-3-Clause"
] | 13 | 2020-03-24T17:53:51.000Z | 2022-02-10T20:01:14.000Z | sympy/multipledispatch/tests/test_dispatcher.py | iamabhishek0/sympy | c461bd1ff9d178d1012b04fd0bf37ee3abb02cdd | [
"BSD-3-Clause"
] | 11 | 2020-06-29T08:40:24.000Z | 2022-02-24T17:39:16.000Z | from sympy.multipledispatch.dispatcher import (Dispatcher, MDNotImplementedError,
MethodDispatcher, halt_ordering,
restart_ordering)
from sympy.utilities.pytest import raises, XFAIL, warns
def identity(x):
return x
def inc(x):
return x + 1
def dec(x):
return x - 1
def test_dispatcher():
f = Dispatcher('f')
f.add((int,), inc)
f.add((float,), dec)
with warns(DeprecationWarning):
assert f.resolve((int,)) == inc
assert f.dispatch(int) is inc
assert f(1) == 2
assert f(1.0) == 0.0
def test_union_types():
f = Dispatcher('f')
f.register((int, float))(inc)
assert f(1) == 2
assert f(1.0) == 2.0
def test_dispatcher_as_decorator():
f = Dispatcher('f')
@f.register(int)
def inc(x):
return x + 1
@f.register(float)
def inc(x):
return x - 1
assert f(1) == 2
assert f(1.0) == 0.0
def test_register_instance_method():
class Test(object):
__init__ = MethodDispatcher('f')
@__init__.register(list)
def _init_list(self, data):
self.data = data
@__init__.register(object)
def _init_obj(self, datum):
self.data = [datum]
a = Test(3)
b = Test([3])
assert a.data == b.data
def test_on_ambiguity():
f = Dispatcher('f')
def identity(x): return x
ambiguities = [False]
def on_ambiguity(dispatcher, amb):
ambiguities[0] = True
f.add((object, object), identity, on_ambiguity=on_ambiguity)
assert not ambiguities[0]
f.add((object, float), identity, on_ambiguity=on_ambiguity)
assert not ambiguities[0]
f.add((float, object), identity, on_ambiguity=on_ambiguity)
assert ambiguities[0]
@XFAIL
def test_raise_error_on_non_class():
f = Dispatcher('f')
assert raises(TypeError, lambda: f.add((1,), inc))
def test_docstring():
def one(x, y):
""" Docstring number one """
return x + y
def two(x, y):
""" Docstring number two """
return x + y
def three(x, y):
return x + y
master_doc = 'Doc of the multimethod itself'
f = Dispatcher('f', doc=master_doc)
f.add((object, object), one)
f.add((int, int), two)
f.add((float, float), three)
assert one.__doc__.strip() in f.__doc__
assert two.__doc__.strip() in f.__doc__
assert f.__doc__.find(one.__doc__.strip()) < \
f.__doc__.find(two.__doc__.strip())
assert 'object, object' in f.__doc__
assert master_doc in f.__doc__
def test_help():
def one(x, y):
""" Docstring number one """
return x + y
def two(x, y):
""" Docstring number two """
return x + y
def three(x, y):
""" Docstring number three """
return x + y
master_doc = 'Doc of the multimethod itself'
f = Dispatcher('f', doc=master_doc)
f.add((object, object), one)
f.add((int, int), two)
f.add((float, float), three)
assert f._help(1, 1) == two.__doc__
assert f._help(1.0, 2.0) == three.__doc__
def test_source():
def one(x, y):
""" Docstring number one """
return x + y
def two(x, y):
""" Docstring number two """
return x - y
master_doc = 'Doc of the multimethod itself'
f = Dispatcher('f', doc=master_doc)
f.add((int, int), one)
f.add((float, float), two)
assert 'x + y' in f._source(1, 1)
assert 'x - y' in f._source(1.0, 1.0)
@XFAIL
def test_source_raises_on_missing_function():
f = Dispatcher('f')
assert raises(TypeError, lambda: f.source(1))
def test_halt_method_resolution():
g = [0]
def on_ambiguity(a, b):
g[0] += 1
f = Dispatcher('f')
halt_ordering()
def func(*args):
pass
f.add((int, object), func)
f.add((object, int), func)
assert g == [0]
restart_ordering(on_ambiguity=on_ambiguity)
assert g == [1]
assert set(f.ordering) == set([(int, object), (object, int)])
@XFAIL
def test_no_implementations():
f = Dispatcher('f')
assert raises(NotImplementedError, lambda: f('hello'))
@XFAIL
def test_register_stacking():
f = Dispatcher('f')
@f.register(list)
@f.register(tuple)
def rev(x):
return x[::-1]
assert f((1, 2, 3)) == (3, 2, 1)
assert f([1, 2, 3]) == [3, 2, 1]
assert raises(NotImplementedError, lambda: f('hello'))
assert rev('hello') == 'olleh'
def test_dispatch_method():
f = Dispatcher('f')
@f.register(list)
def rev(x):
return x[::-1]
@f.register(int, int)
def add(x, y):
return x + y
class MyList(list):
pass
assert f.dispatch(list) is rev
assert f.dispatch(MyList) is rev
assert f.dispatch(int, int) is add
@XFAIL
def test_not_implemented():
f = Dispatcher('f')
@f.register(object)
def _(x):
return 'default'
@f.register(int)
def _(x):
if x % 2 == 0:
return 'even'
else:
raise MDNotImplementedError()
assert f('hello') == 'default' # default behavior
assert f(2) == 'even' # specialized behavior
assert f(3) == 'default' # fall bac to default behavior
assert raises(NotImplementedError, lambda: f(1, 2))
@XFAIL
def test_not_implemented_error():
f = Dispatcher('f')
@f.register(float)
def _(a):
raise MDNotImplementedError()
assert raises(NotImplementedError, lambda: f(1.0))
| 20.435424 | 81 | 0.578909 |
4a21945736fdcb5570cf2bf874e72b8ef25afef1 | 3,184 | py | Python | nvtabular/ops/drop_low_cardinality.py | mikemckiernan/NVTabular | efb93340653c4a69b1c3a60c88a82116d7906148 | [
"Apache-2.0"
] | 124 | 2021-10-08T19:59:52.000Z | 2022-03-27T22:13:26.000Z | nvtabular/ops/drop_low_cardinality.py | NVIDIA-Merlin/NVTabular | 650f6923f0533dcfa18f6e181116ada706a41f6a | [
"Apache-2.0"
] | 325 | 2021-10-08T19:58:49.000Z | 2022-03-31T21:27:39.000Z | nvtabular/ops/drop_low_cardinality.py | mikemckiernan/NVTabular | efb93340653c4a69b1c3a60c88a82116d7906148 | [
"Apache-2.0"
] | 26 | 2021-10-13T21:43:22.000Z | 2022-03-29T14:33:58.000Z | #
# Copyright (c) 2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from merlin.core.dispatch import DataFrameType
from merlin.schema import Schema, Tags
from .operator import ColumnSelector, Operator
class DropLowCardinality(Operator):
"""
DropLowCardinality drops low cardinality categorical columns. This requires the
cardinality of these columns to be known in the schema - for instance by
first encoding these columns using Categorify.
"""
def __init__(self, min_cardinality=2):
super().__init__()
self.min_cardinality = min_cardinality
def transform(self, col_selector: ColumnSelector, df: DataFrameType) -> DataFrameType:
"""
Selects all non-categorical columns and any categorical columns
of at least the minimum cardinality from the dataframe.
Parameters
----------
col_selector : ColumnSelector
The columns to select.
df : DataFrameType
The dataframe to transform
Returns
-------
DataFrameType
Dataframe with only the selected columns.
"""
return super()._get_columns(df, col_selector)
def compute_selector(
self,
input_schema: Schema,
selector: ColumnSelector,
parents_selector: ColumnSelector,
dependencies_selector: ColumnSelector,
) -> ColumnSelector:
"""
Checks the cardinality of the input columns and drops any categorical
columns with cardinality less than the specified minimum.
Parameters
----------
input_schema : Schema
The current node's input schema
selector : ColumnSelector
The current node's selector
parents_selector : ColumnSelector
A selector for the output columns of the current node's parents
dependencies_selector : ColumnSelector
A selector for the output columns of the current node's dependencies
Returns
-------
ColumnSelector
Selector that contains all non-categorical columns and any categorical columns
of at least the minimum cardinality.
"""
self._validate_matching_cols(input_schema, selector, self.compute_selector.__name__)
cols_to_keep = [col for col in input_schema if Tags.CATEGORICAL not in col.tags]
for col in input_schema:
if Tags.CATEGORICAL in col.tags:
domain = col.int_domain
if not domain or domain.max > self.min_cardinality:
cols_to_keep.append(col.name)
return ColumnSelector(cols_to_keep)
| 34.989011 | 92 | 0.670854 |
4a2194f152870e8579896cc5f51958edf696d1c2 | 8,254 | py | Python | diayn_seq_code_revised/main_revised_discrete_highdim_cheetah_vthree.py | fgitmichael/SelfSupevisedSkillDiscovery | 60eee11cfd67046190dd2784bf40e97bdbed9d40 | [
"MIT"
] | null | null | null | diayn_seq_code_revised/main_revised_discrete_highdim_cheetah_vthree.py | fgitmichael/SelfSupevisedSkillDiscovery | 60eee11cfd67046190dd2784bf40e97bdbed9d40 | [
"MIT"
] | 6 | 2021-02-02T23:00:02.000Z | 2022-01-13T03:13:51.000Z | diayn_seq_code_revised/main_revised_discrete_highdim_cheetah_vthree.py | fgitmichael/SelfSupevisedSkillDiscovery | 60eee11cfd67046190dd2784bf40e97bdbed9d40 | [
"MIT"
] | null | null | null | import argparse
import torch
import numpy as np
import copy
import gym
from gym.envs.mujoco.half_cheetah_v3 import HalfCheetahEnv as HalfCheetahVersionThreeEnv
from my_utils.env_pixel_wrapper.mujoco_pixel_wrapper import MujocoPixelWrapper
import rlkit.torch.pytorch_util as ptu
from rlkit.launchers.launcher_util import setup_logger
from self_supervised.utils.writer import MyWriterWithActivation
from self_supervised.network.flatten_mlp import FlattenMlp as \
MyFlattenMlp
from self_supervised.env_wrapper.rlkit_wrapper import NormalizedBoxEnvWrapper
from self_supervised.env_wrapper.pixel_wrapper import PixelNormalizedBoxEnvWrapper
from self_sup_combined.base.writer.diagnostics_writer import DiagnosticsWriter
from self_sup_comb_discrete_skills.memory.replay_buffer_discrete_skills import \
SelfSupervisedEnvSequenceReplayBufferDiscreteSkills
#from diayn_rnn_seq_rnn_stepwise_classifier.networks.bi_rnn_stepwise_seqwise import \
# BiRnnStepwiseSeqWiseClassifier
from diayn_seq_code_revised.networks.bi_rnn_stepwise_seqwise_obs_dimension_selection \
import RnnStepwiseSeqwiseClassifierObsDimSelect
from diayn_seq_code_revised.data_collector.seq_collector_revised_discrete_skills import \
SeqCollectorRevisedDiscreteSkills
from diayn_seq_code_revised.policies.skill_policy import \
SkillTanhGaussianPolicyRevised, MakeDeterministicRevised
from diayn_seq_code_revised.data_collector.skill_selector import SkillSelectorDiscrete
from diayn_seq_code_revised.trainer.trainer_seqwise_stepwise_revised import \
DIAYNAlgoStepwiseSeqwiseRevisedTrainer
from diayn_seq_code_revised.algo.seqwise_algo_revised_highdim import \
SeqwiseAlgoRevisedDiscreteSkillsHighdim
from diayn_seq_code_revised.data_collector.seq_collector_revised_discreteskills_pixel \
import SeqCollectorRevisedDiscreteSkillsPixel
from diayn_rnn_seq_rnn_stepwise_classifier.networks.bi_rnn_stepwise_seqwise import \
BiRnnStepwiseSeqWiseClassifier
from diayn_seq_code_revised.policies.skill_policy_obsdim_select \
import SkillTanhGaussianPolicyRevisedObsSelect
from diayn_no_oh.utils.hardcoded_grid_two_dim import NoohGridCreator, OhGridCreator
def experiment(variant, args):
expl_env = HalfCheetahVersionThreeEnv(
exclude_current_positions_from_observation=False
)
eval_env = copy.deepcopy(expl_env)
render_kwargs = dict(
width=64,
height=64,
)
pixel_env = MujocoPixelWrapper(
env=copy.deepcopy(eval_env),
render_kwargs=render_kwargs,
)
obs_dim = expl_env.observation_space.low.size
action_dim = eval_env.action_space.low.size
oh_grid_creator = OhGridCreator(
num_skills=args.skill_dim,
)
get_oh_grid = oh_grid_creator.get_grid
seq_len = 120
skill_dim = args.skill_dim
num_skills = args.skill_dim
hidden_size_rnn = 20
variant['algorithm_kwargs']['batch_size'] //= seq_len
pos_encoding = "transformer"
obs_dim_used_df = (0, 1, 2)
obs_dim_used_policy = tuple(i for i in range(1, obs_dim))
sep_str = " | "
run_comment = sep_str
run_comment += "seq_len: {}".format(seq_len) + sep_str
run_comment += "seq wise step wise revised high dim" + sep_str
run_comment += "hidden rnn_dim: {}{}".format(hidden_size_rnn, sep_str)
run_comment += "pos encoding: {}{}".format(pos_encoding, sep_str)
run_comment += "include current positions{}".format(sep_str)
seed = 0
torch.manual_seed = seed
expl_env.seed(seed)
eval_env.seed(seed)
np.random.seed(seed)
M = variant['layer_size']
qf1 = MyFlattenMlp(
input_size=obs_dim + action_dim + skill_dim,
output_size=1,
hidden_sizes=[M, M],
)
qf2 = MyFlattenMlp(
input_size=obs_dim + action_dim + skill_dim,
output_size=1,
hidden_sizes=[M, M],
)
target_qf1 = MyFlattenMlp(
input_size=obs_dim + action_dim + skill_dim,
output_size=1,
hidden_sizes=[M, M],
)
target_qf2 = MyFlattenMlp(
input_size=obs_dim + action_dim + skill_dim,
output_size=1,
hidden_sizes=[M, M],
)
df = BiRnnStepwiseSeqWiseClassifier(
input_size=obs_dim,
output_size=num_skills,
hidden_size_rnn=hidden_size_rnn,
hidden_sizes=[M, M],
seq_len=seq_len,
pos_encoder_variant=pos_encoding,
dropout=0.5,
obs_dims_used=obs_dim_used_df,
)
policy = SkillTanhGaussianPolicyRevisedObsSelect(
obs_dim=len(obs_dim_used_policy),
action_dim=action_dim,
skill_dim=skill_dim,
hidden_sizes=[M, M],
obs_dim_real=obs_dim,
obs_dims_selected=obs_dim_used_policy,
)
eval_policy = MakeDeterministicRevised(policy)
skill_selector = SkillSelectorDiscrete(
get_skill_grid_fun=get_oh_grid
)
eval_path_collector = SeqCollectorRevisedDiscreteSkills(
eval_env,
eval_policy,
max_seqs=50,
skill_selector=skill_selector
)
expl_step_collector = SeqCollectorRevisedDiscreteSkills(
expl_env,
policy,
max_seqs=50,
skill_selector=skill_selector
)
seq_eval_collector = SeqCollectorRevisedDiscreteSkills(
env=eval_env,
policy=eval_policy,
max_seqs=50,
skill_selector=skill_selector
)
seqpixel_eval_collector = SeqCollectorRevisedDiscreteSkillsPixel(
env=pixel_env,
policy=eval_policy,
max_seqs=50,
skill_selector=skill_selector,
)
replay_buffer = SelfSupervisedEnvSequenceReplayBufferDiscreteSkills(
max_replay_buffer_size=variant['replay_buffer_size'],
seq_len=seq_len,
mode_dim=skill_dim,
env=expl_env,
)
trainer = DIAYNAlgoStepwiseSeqwiseRevisedTrainer(
env=eval_env,
policy=policy,
qf1=qf1,
qf2=qf2,
df=df,
target_qf1=target_qf1,
target_qf2=target_qf2,
**variant['trainer_kwargs']
)
writer = MyWriterWithActivation(
seed=seed,
log_dir='logshighdim',
run_comment=run_comment
)
diagno_writer = DiagnosticsWriter(
writer=writer,
log_interval=3
)
algorithm = SeqwiseAlgoRevisedDiscreteSkillsHighdim(
trainer=trainer,
exploration_env=expl_env,
evaluation_env=eval_env,
exploration_data_collector=expl_step_collector,
evaluation_data_collector=eval_path_collector,
seqpixel_eval_collector=seqpixel_eval_collector,
replay_buffer=replay_buffer,
seq_len=seq_len,
seq_len_eval=seq_len//2,
diagnostic_writer=diagno_writer,
seq_eval_collector=seq_eval_collector,
**variant['algorithm_kwargs']
)
algorithm.to(ptu.device)
algorithm.train()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--env',
type=str,
default="HalfCheetah-v2",
help='environment'
)
parser.add_argument('--skill_dim',
type=int,
default=10,
help='skill dimension'
)
args = parser.parse_args()
# noinspection PyTypeChecker
variant = dict(
algorithm="DIAYN",
version="normal",
layer_size=256,
replay_buffer_size=int(5000),
algorithm_kwargs=dict(
num_epochs=1000,
num_eval_steps_per_epoch=5000,
num_trains_per_train_loop=10,
num_expl_steps_per_train_loop=10,
min_num_steps_before_training=1000,
max_path_length=1000,
batch_size=1096,
),
trainer_kwargs=dict(
discount=0.99,
soft_target_tau=5e-3,
target_update_period=1,
policy_lr=3E-4,
qf_lr=3E-4,
reward_scale=1,
df_lr_seq=1E-3,
df_lr_step=1E-3,
use_automatic_entropy_tuning=True,
),
)
setup_logger('DIAYN_' + str(args.skill_dim) + '_' + args.env, variant=variant)
ptu.set_gpu_mode(True) # optionally set the GPU (default=False)
experiment(variant, args)
| 32.884462 | 89 | 0.697117 |
4a21950af33caf28d45dba92c35bfe1e3ded37b8 | 6,310 | py | Python | isi_sdk_8_0_1/isi_sdk_8_0_1/models/result_histogram.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_8_0_1/isi_sdk_8_0_1/models/result_histogram.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_8_0_1/isi_sdk_8_0_1/models/result_histogram.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_0_1.models.result_histogram_histogram_item import ResultHistogramHistogramItem # noqa: F401,E501
class ResultHistogram(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'atime_enabled': 'bool',
'attribute_count': 'int',
'begin_time': 'int',
'histogram': 'list[ResultHistogramHistogramItem]'
}
attribute_map = {
'atime_enabled': 'atime_enabled',
'attribute_count': 'attribute_count',
'begin_time': 'begin_time',
'histogram': 'histogram'
}
def __init__(self, atime_enabled=None, attribute_count=None, begin_time=None, histogram=None): # noqa: E501
"""ResultHistogram - a model defined in Swagger""" # noqa: E501
self._atime_enabled = None
self._attribute_count = None
self._begin_time = None
self._histogram = None
self.discriminator = None
self.atime_enabled = atime_enabled
self.attribute_count = attribute_count
self.begin_time = begin_time
self.histogram = histogram
@property
def atime_enabled(self):
"""Gets the atime_enabled of this ResultHistogram. # noqa: E501
Access time enabled. # noqa: E501
:return: The atime_enabled of this ResultHistogram. # noqa: E501
:rtype: bool
"""
return self._atime_enabled
@atime_enabled.setter
def atime_enabled(self, atime_enabled):
"""Sets the atime_enabled of this ResultHistogram.
Access time enabled. # noqa: E501
:param atime_enabled: The atime_enabled of this ResultHistogram. # noqa: E501
:type: bool
"""
if atime_enabled is None:
raise ValueError("Invalid value for `atime_enabled`, must not be `None`") # noqa: E501
self._atime_enabled = atime_enabled
@property
def attribute_count(self):
"""Gets the attribute_count of this ResultHistogram. # noqa: E501
User attribute count. # noqa: E501
:return: The attribute_count of this ResultHistogram. # noqa: E501
:rtype: int
"""
return self._attribute_count
@attribute_count.setter
def attribute_count(self, attribute_count):
"""Sets the attribute_count of this ResultHistogram.
User attribute count. # noqa: E501
:param attribute_count: The attribute_count of this ResultHistogram. # noqa: E501
:type: int
"""
if attribute_count is None:
raise ValueError("Invalid value for `attribute_count`, must not be `None`") # noqa: E501
self._attribute_count = attribute_count
@property
def begin_time(self):
"""Gets the begin_time of this ResultHistogram. # noqa: E501
Unix Epoch time of start of results collection job. # noqa: E501
:return: The begin_time of this ResultHistogram. # noqa: E501
:rtype: int
"""
return self._begin_time
@begin_time.setter
def begin_time(self, begin_time):
"""Sets the begin_time of this ResultHistogram.
Unix Epoch time of start of results collection job. # noqa: E501
:param begin_time: The begin_time of this ResultHistogram. # noqa: E501
:type: int
"""
if begin_time is None:
raise ValueError("Invalid value for `begin_time`, must not be `None`") # noqa: E501
self._begin_time = begin_time
@property
def histogram(self):
"""Gets the histogram of this ResultHistogram. # noqa: E501
Histogram data of specified file count parameter. # noqa: E501
:return: The histogram of this ResultHistogram. # noqa: E501
:rtype: list[ResultHistogramHistogramItem]
"""
return self._histogram
@histogram.setter
def histogram(self, histogram):
"""Sets the histogram of this ResultHistogram.
Histogram data of specified file count parameter. # noqa: E501
:param histogram: The histogram of this ResultHistogram. # noqa: E501
:type: list[ResultHistogramHistogramItem]
"""
if histogram is None:
raise ValueError("Invalid value for `histogram`, must not be `None`") # noqa: E501
self._histogram = histogram
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResultHistogram):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.780488 | 112 | 0.61046 |
4a21953cf4445d32913c537a706588e8bb5ba9ad | 32,225 | py | Python | __main__.py | labscript-suite-temp-archive/blacs-fork--zachglassman-blacs--forked-from--labscript_suite-blacs | 29ae7f966886017fadab02ac217f266175a4ad84 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | __main__.py | labscript-suite-temp-archive/blacs-fork--zachglassman-blacs--forked-from--labscript_suite-blacs | 29ae7f966886017fadab02ac217f266175a4ad84 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | __main__.py | labscript-suite-temp-archive/blacs-fork--zachglassman-blacs--forked-from--labscript_suite-blacs | 29ae7f966886017fadab02ac217f266175a4ad84 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | #####################################################################
# #
# /main.pyw #
# #
# Copyright 2013, Monash University #
# #
# This file is part of the program BLACS, in the labscript suite #
# (see http://labscriptsuite.org), and is licensed under the #
# Simplified BSD License. See the license.txt file in the root of #
# the project for the full license. #
# #
#####################################################################
import cgi
import ctypes
import logging, logging.handlers
import os
import socket
import subprocess
import sys
import threading
import time
import signal
# Quit on ctrl-c
signal.signal(signal.SIGINT, signal.SIG_DFL)
# check if we should delay!
try:
if '--delay' in sys.argv:
delay = int(sys.argv[sys.argv.index('--delay')+1])
time.sleep(delay)
except:
print 'You should specify "--delay x" where x is an integer'
lower_argv = [s.lower() for s in sys.argv]
if 'pyside' in lower_argv:
# Import Qt
from PySide.QtCore import *
from PySide.QtGui import *
# from PySide.QtUiTools import QUiLoader
elif 'pyqt' in lower_argv:
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtCore import pyqtSignal as Signal
else:
try:
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtCore import pyqtSignal as Signal
except Exception:
from PySide.QtCore import *
from PySide.QtGui import *
try:
from labscript_utils import check_version
except ImportError:
raise ImportError('Require labscript_utils > 2.1.0')
check_version('labscript_utils', '2.2', '3')
check_version('qtutils', '1.5.1', '2')
check_version('zprocess', '1.1.2', '2')
check_version('labscript_devices', '2.0', '3')
# Pythonlib imports
### Must be in this order
import zprocess.locking, labscript_utils.h5_lock, h5py
zprocess.locking.set_client_process_name('BLACS')
###
from zprocess import zmq_get, ZMQServer
from setup_logging import setup_logging
import labscript_utils.shared_drive
# Custom Excepthook
import labscript_utils.excepthook
# Setup logging
logger = setup_logging()
labscript_utils.excepthook.set_logger(logger)
# now log versions (must be after setup logging)
try:
import sys
logger.info('Python Version: %s'%sys.version)
logger.info('Platform: %s'%sys.platform)
except Exception:
logger.error('Failed to find python version')
try:
import sys
logger.info('windows version: %s'%str(sys.getwindowsversion()))
except Exception:
pass
try:
import zmq
logger.info('PyZMQ Version: %s'%zmq.__version__)
logger.info('ZMQ Version: %s'%zmq.zmq_version())
except Exception:
logger.error('Failed to find PyZMQ version')
try:
import h5py
logger.info('h5py Version: %s'%h5py.version.info)
except Exception:
logger.error('Failed to find h5py version')
try:
if 'PySide' in sys.modules.copy():
import PySide
logger.info('PySide Version: %s'%PySide.__version__)
logger.info('Qt Version: %s'%PySide.QtCore.__version__)
else:
import PyQt4.QtCore
logger.info('PyQt Version: %s'%PyQt4.QtCore.PYQT_VERSION_STR)
logger.info('Qt Version: %s'%PyQt4.QtCore.QT_VERSION_STR)
except Exception:
logger.error('Failed to find PySide/PyQt version')
try:
import qtutils
logger.info('qtutils Version: %s'%qtutils.__version__)
except Exception:
logger.error('Failed to find qtutils version')
try:
import zprocess
logger.info('zprocess Version: %s'%zprocess.__version__)
except Exception:
logger.error('Failed to find zprocess version')
try:
import labscript_utils
logger.info('labscript_utils Version: %s'%labscript_utils.__version__)
except Exception:
logger.error('Failed to find labscript_utils version')
try:
import blacs
logger.info('BLACS Version: %s'%blacs.__version__)
except Exception:
logger.error('Failed to find blacs version')
# Connection Table Code
from connections import ConnectionTable
#Draggable Tab Widget Code
from labscript_utils.qtwidgets.dragdroptab import DragDropTabWidget
# Lab config code
from labscript_utils.labconfig import LabConfig, config_prefix
# Qt utils for running functions in the main thread
from qtutils import *
# And for icons:
import qtutils.icons
# Analysis Submission code
from analysis_submission import AnalysisSubmission
# Queue Manager Code
from queue import QueueManager, QueueTreeview
# Module containing hardware compatibility:
import labscript_devices
# Save/restore frontpanel code
from front_panel_settings import FrontPanelSettings
# Notifications system
from notifications import Notifications
# Preferences system
from labscript_utils.settings import Settings
#import settings_pages
import plugins
os.chdir(os.path.abspath(os.path.dirname(__file__)))
def set_win_appusermodel(window_id):
from labscript_utils.winshell import set_appusermodel, appids, app_descriptions
icon_path = os.path.abspath('blacs.ico')
executable = sys.executable.lower()
if not executable.endswith('w.exe'):
executable = executable.replace('.exe', 'w.exe')
relaunch_command = executable + ' ' + os.path.abspath(__file__.replace('.pyc', '.py'))
relaunch_display_name = app_descriptions['blacs']
set_appusermodel(window_id, appids['blacs'], icon_path, relaunch_command, relaunch_display_name)
class BLACSWindow(QMainWindow):
newWindow = Signal(int)
def event(self, event):
result = QMainWindow.event(self, event)
if event.type() == QEvent.WinIdChange:
self.newWindow.emit(self.effectiveWinId())
return result
def closeEvent(self, event):
#print 'aaaaa'
if self.blacs.exit_complete:
event.accept()
if self.blacs._relaunch:
logger.info('relaunching BLACS after quit')
relaunch_delay = '2'
if '--delay' in sys.argv:
index = sys.argv.index('--delay') + 1
try:
int(sys.argv[index])
sys.argv[index] = relaunch_delay
except:
sys.argv.insert(index,relaunch_delay)
else:
sys.argv.append('--delay')
sys.argv.append(relaunch_delay)
subprocess.Popen([sys.executable] + sys.argv)
else:
event.ignore()
logger.info('destroy called')
if not self.blacs.exiting:
self.blacs.exiting = True
self.blacs.queue.manager_running = False
self.blacs.settings.close()
experiment_server.shutdown()
for module_name, plugin in self.blacs.plugins.items():
try:
plugin.close()
except Exception as e:
logger.error('Could not close plugin %s. Error was: %s'%(module_name,str(e)))
inmain_later(self.blacs.on_save_exit)
QTimer.singleShot(100,self.close)
class BLACS(object):
tab_widget_ids = 7
def __init__(self,application):
self.qt_application = application
#self.qt_application.aboutToQuit.connect(self.destroy)
self._relaunch = False
self.exiting = False
self.exit_complete = False
logger.info('Loading BLACS ui')
#self.ui = BLACSWindow(self).ui
loader = UiLoader()
loader.registerCustomWidget(QueueTreeview)
#loader.registerCustomPromotion('BLACS',BLACSWindow)
self.ui = loader.load(os.path.join(os.path.dirname(os.path.realpath(__file__)),'main.ui'), BLACSWindow())
logger.info('BLACS ui loaded')
self.ui.blacs=self
self.tab_widgets = {}
self.exp_config = exp_config # Global variable
self.settings_path = settings_path # Global variable
self.connection_table = connection_table # Global variable
self.connection_table_h5file = self.exp_config.get('paths','connection_table_h5')
self.connection_table_labscript = self.exp_config.get('paths','connection_table_py')
# Setup the UI
self.ui.main_splitter.setStretchFactor(0,0)
self.ui.main_splitter.setStretchFactor(1,1)
self.tablist = {}
self.panes = {}
self.settings_dict = {}
# Find which devices are connected to BLACS, and what their labscript class names are:
logger.info('finding connected devices in connection table')
self.attached_devices = self.connection_table.get_attached_devices()
# Store the panes in a dictionary for easy access
self.panes['tab_top_vertical_splitter'] = self.ui.tab_top_vertical_splitter
self.panes['tab_bottom_vertical_splitter'] = self.ui.tab_bottom_vertical_splitter
self.panes['tab_horizontal_splitter'] = self.ui.tab_horizontal_splitter
self.panes['main_splitter'] = self.ui.main_splitter
# Get settings to restore
logger.info('Loading front panel settings')
self.front_panel_settings = FrontPanelSettings(self.settings_path, self.connection_table)
self.front_panel_settings.setup(self)
settings,question,error,tab_data = self.front_panel_settings.restore()
# TODO: handle question/error cases
logger.info('restoring window data')
self.restore_window(tab_data)
#splash.update_text('Creating the device tabs...')
# Create the notebooks
logger.info('Creating tab widgets')
for i in range(4):
self.tab_widgets[i] = DragDropTabWidget(self.tab_widget_ids)
getattr(self.ui,'tab_container_%d'%i).addWidget(self.tab_widgets[i])
logger.info('Instantiating devices')
for device_name, labscript_device_class_name in self.attached_devices.items():
self.settings_dict.setdefault(device_name,{"device_name":device_name})
# add common keys to settings:
self.settings_dict[device_name]["connection_table"] = self.connection_table
self.settings_dict[device_name]["front_panel_settings"] = settings[device_name] if device_name in settings else {}
self.settings_dict[device_name]["saved_data"] = tab_data[device_name]['data'] if device_name in tab_data else {}
# Instantiate the device
logger.info('instantiating %s'%device_name)
TabClass = labscript_devices.get_BLACS_tab(labscript_device_class_name)
self.tablist[device_name] = TabClass(self.tab_widgets[0],self.settings_dict[device_name])
logger.info('reordering tabs')
self.order_tabs(tab_data)
logger.info('starting analysis submission thread')
# setup analysis submission
self.analysis_submission = AnalysisSubmission(self,self.ui)
if 'analysis_data' not in tab_data['BLACS settings']:
tab_data['BLACS settings']['analysis_data'] = {}
else:
tab_data['BLACS settings']['analysis_data'] = eval(tab_data['BLACS settings']['analysis_data'])
self.analysis_submission.restore_save_data(tab_data['BLACS settings']["analysis_data"])
logger.info('starting queue manager thread')
# Setup the QueueManager
self.queue = QueueManager(self,self.ui)
if 'queue_data' not in tab_data['BLACS settings']:
tab_data['BLACS settings']['queue_data'] = {}
else:
tab_data['BLACS settings']['queue_data'] = eval(tab_data['BLACS settings']['queue_data'])
self.queue.restore_save_data(tab_data['BLACS settings']['queue_data'])
logger.info('instantiating plugins')
# setup the plugin system
settings_pages = []
self.plugins = {}
plugin_settings = eval(tab_data['BLACS settings']['plugin_data']) if 'plugin_data' in tab_data['BLACS settings'] else {}
for module_name, module in plugins.modules.items():
try:
# instantiate the plugin
self.plugins[module_name] = module.Plugin(plugin_settings[module_name] if module_name in plugin_settings else {})
except Exception:
logger.exception('Could not instantiate plugin \'%s\'. Skipping')
blacs_data = {'exp_config':self.exp_config,
'ui':self.ui,
'set_relaunch':self.set_relaunch,
'plugins':self.plugins,
'connection_table_h5file':self.connection_table_h5file,
'connection_table_labscript':self.connection_table_labscript,
'experiment_queue':self.queue
}
def create_menu(parent, menu_parameters):
if 'name' in menu_parameters:
if 'menu_items' in menu_parameters:
child = parent.addMenu(menu_parameters['name'])
for child_menu_params in menu_parameters['menu_items']:
create_menu(child,child_menu_params)
else:
child = parent.addAction(menu_parameters['name'])
if 'action' in menu_parameters:
child.triggered.connect(menu_parameters['action'])
elif 'separator' in menu_parameters:
parent.addSeparator()
# setup the Notification system
logger.info('setting up notification system')
self.notifications = Notifications(blacs_data)
settings_callbacks = []
for module_name, plugin in self.plugins.items():
try:
# Setup settings page
settings_pages.extend(plugin.get_setting_classes())
# Setup menu
if plugin.get_menu_class():
# must store a reference or else the methods called when the menu actions are triggered
# (contained in this object) will be garbaged collected
menu = plugin.get_menu_class()(blacs_data)
create_menu(self.ui.menubar,menu.get_menu_items())
plugin.set_menu_instance(menu)
# Setup notifications
plugin_notifications = {}
for notification_class in plugin.get_notification_classes():
self.notifications.add_notification(notification_class)
plugin_notifications[notification_class] = self.notifications.get_instance(notification_class)
plugin.set_notification_instances(plugin_notifications)
# Register callbacks
callbacks = plugin.get_callbacks()
# save the settings_changed callback in a separate list for setting up later
if isinstance(callbacks,dict) and 'settings_changed' in callbacks:
settings_callbacks.append(callbacks['settings_changed'])
except Exception:
logger.exception('Plugin \'%s\' error. Plugin may not be functional.'%module_name)
# setup the BLACS preferences system
logger.info('setting up preferences system')
self.settings = Settings(file=self.settings_path, parent = self.ui, page_classes=settings_pages)
for callback in settings_callbacks:
self.settings.register_callback(callback)
# update the blacs_data dictionary with the settings system
blacs_data['settings'] = self.settings
for module_name, plugin in self.plugins.items():
try:
plugin.plugin_setup_complete(blacs_data)
except Exception:
# backwards compatibility for old plugins
try:
plugin.plugin_setup_complete()
logger.warning('Plugin \'%s\' using old API. Please update Plugin.plugin_setup_complete method to accept a dictionary of blacs_data as the only argument.'%module_name)
except Exception:
logger.exception('Plugin \'%s\' error. Plugin may not be functional.'%module_name)
# Connect menu actions
self.ui.actionOpenPreferences.triggered.connect(self.on_open_preferences)
self.ui.actionSave.triggered.connect(self.on_save_front_panel)
self.ui.actionOpen.triggered.connect(self.on_load_front_panel)
# Connect the windows AppId stuff:
if os.name == 'nt':
self.ui.newWindow.connect(set_win_appusermodel)
logger.info('showing UI')
self.ui.show()
def set_relaunch(self,value):
self._relaunch = bool(value)
def restore_window(self,tab_data):
# read out position settings:
try:
# There are some dodgy hacks going on here to try and restore the window position correctly
# Unfortunately Qt has two ways of measuring teh window position, one with the frame/titlebar
# and one without. If you use the one that measures including the titlebar, you don't
# know what the window size was when the window was UNmaximized.
#
# Anyway, no idea if this works cross platform (tested on windows 8)
# Feel free to rewrite this, along with the code in front_panel_settings.py
# which stores the values
#
# Actually this is a waste of time because if you close when maximized, reoopen and then
# de-maximize, the window moves to a random position (not the position it was at before maximizing)
# so bleh!
self.ui.move(tab_data['BLACS settings']["window_xpos"]-tab_data['BLACS settings']['window_frame_width']/2,tab_data['BLACS settings']["window_ypos"]-tab_data['BLACS settings']['window_frame_height']+tab_data['BLACS settings']['window_frame_width']/2)
self.ui.resize(tab_data['BLACS settings']["window_width"],tab_data['BLACS settings']["window_height"])
if 'window_maximized' in tab_data['BLACS settings'] and tab_data['BLACS settings']['window_maximized']:
self.ui.showMaximized()
for pane_name,pane in self.panes.items():
pane.setSizes(tab_data['BLACS settings'][pane_name])
except Exception as e:
logger.warning("Unable to load window and notebook defaults. Exception:"+str(e))
def order_tabs(self,tab_data):
# Move the tabs to the correct notebook
for device_name in self.attached_devices:
notebook_num = 0
if device_name in tab_data:
notebook_num = int(tab_data[device_name]["notebook"])
if notebook_num not in self.tab_widgets:
notebook_num = 0
#Find the notebook the tab is in, and remove it:
for notebook in self.tab_widgets.values():
tab_index = notebook.indexOf(self.tablist[device_name]._ui)
if tab_index != -1:
notebook.removeTab(tab_index)
self.tab_widgets[notebook_num].addTab(self.tablist[device_name]._ui,device_name)
break
# splash.update_text('restoring tab positions...')
# # Now that all the pages are created, reorder them!
for device_name in self.attached_devices:
if device_name in tab_data:
notebook_num = int(tab_data[device_name]["notebook"])
if notebook_num in self.tab_widgets:
self.tab_widgets[notebook_num].tab_bar.moveTab(self.tab_widgets[notebook_num].indexOf(self.tablist[device_name]._ui),int(tab_data[device_name]["page"]))
# # Now that they are in the correct order, set the correct one visible
for device_name,device_data in tab_data.items():
if device_name == 'BLACS settings':
continue
# if the notebook still exists and we are on the entry that is visible
if bool(device_data["visible"]) and int(device_data["notebook"]) in self.tab_widgets:
self.tab_widgets[int(device_data["notebook"])].tab_bar.setCurrentIndex(int(device_data["page"]))
def update_all_tab_settings(self,settings,tab_data):
for device_name,tab in self.tablist.items():
self.settings_dict[device_name]["front_panel_settings"] = settings[device_name] if device_name in settings else {}
self.settings_dict[device_name]["saved_data"] = tab_data[device_name]['data'] if device_name in tab_data else {}
tab.update_from_settings(self.settings_dict[device_name])
def on_load_front_panel(self,*args,**kwargs):
# get the file:
# create file chooser dialog
dialog = QFileDialog(None,"Select file to load", self.exp_config.get('paths','experiment_shot_storage'), "HDF5 files (*.h5 *.hdf5)")
dialog.setViewMode(QFileDialog.Detail)
dialog.setFileMode(QFileDialog.ExistingFile)
if dialog.exec_():
selected_files = dialog.selectedFiles()
filepath = str(selected_files[0])
# Qt has this weird behaviour where if you type in the name of a file that exists
# but does not have the extension you have limited the dialog to, the OK button is greyed out
# but you can hit enter and the file will be selected.
# So we must check the extension of each file here!
if filepath.endswith('.h5') or filepath.endswith('.hdf5'):
try:
# TODO: Warn that this will restore values, but not channels that are locked
message = QMessageBox()
message.setText("""Warning: This will modify front panel values and cause device output values to update.
\nThe queue and files waiting to be sent for analysis will be cleared.
\n
\nNote: Channels that are locked will not be updated.\n\nDo you wish to continue?""")
message.setIcon(QMessageBox.Warning)
message.setWindowTitle("BLACS")
message.setStandardButtons(QMessageBox.Yes|QMessageBox.No)
if message.exec_() == QMessageBox.Yes:
front_panel_settings = FrontPanelSettings(filepath, self.connection_table)
settings,question,error,tab_data = front_panel_settings.restore()
#TODO: handle question/error
# Restore window data
self.restore_window(tab_data)
self.order_tabs(tab_data)
self.update_all_tab_settings(settings,tab_data)
# restore queue data
if 'queue_data' not in tab_data['BLACS settings']:
tab_data['BLACS settings']['queue_data'] = {}
else:
tab_data['BLACS settings']['queue_data'] = eval(tab_data['BLACS settings']['queue_data'])
self.queue.restore_save_data(tab_data['BLACS settings']['queue_data'])
# restore analysis data
if 'analysis_data' not in tab_data['BLACS settings']:
tab_data['BLACS settings']['analysis_data'] = {}
else:
tab_data['BLACS settings']['analysis_data'] = eval(tab_data['BLACS settings']['analysis_data'])
self.analysis_submission.restore_save_data(tab_data['BLACS settings']["analysis_data"])
except Exception as e:
logger.exception("Unable to load the front panel in %s."%(filepath))
message = QMessageBox()
message.setText("Unable to load the front panel. The error encountered is printed below.\n\n%s"%str(e))
message.setIcon(QMessageBox.Information)
message.setWindowTitle("BLACS")
message.exec_()
finally:
dialog.deleteLater()
else:
dialog.deleteLater()
message = QMessageBox()
message.setText("You did not select a file ending with .h5 or .hdf5. Please try again")
message.setIcon(QMessageBox.Information)
message.setWindowTitle("BLACS")
message.exec_()
QTimer.singleShot(10,self.on_load_front_panel)
def on_save_exit(self):
# Save front panel
data = self.front_panel_settings.get_save_data()
# with h5py.File(self.settings_path,'r+') as h5file:
# if 'connection table' in h5file:
# del h5file['connection table']
self.front_panel_settings.save_front_panel_to_h5(self.settings_path,data[0],data[1],data[2],data[3],{"overwrite":True},force_new_conn_table=True)
logger.info('Destroying tabs')
for tab in self.tablist.values():
tab.destroy()
#gobject.timeout_add(100,self.finalise_quit,time.time())
QTimer.singleShot(100,lambda: self.finalise_quit(time.time()))
def finalise_quit(self,initial_time):
logger.info('finalise_quit called')
tab_close_timeout = 2
# Kill any tabs which didn't close themselves:
for name, tab in self.tablist.items():
if tab.destroy_complete:
del self.tablist[name]
if self.tablist:
for name, tab in self.tablist.items():
# If a tab has a fatal error or is taking too long to close, force close it:
if (time.time() - initial_time > tab_close_timeout) or tab.state == 'fatal error':
try:
tab.close_tab()
except Exception as e:
logger.error('Couldn\'t close tab:\n%s'%str(e))
del self.tablist[name]
if self.tablist:
QTimer.singleShot(100,lambda: self.finalise_quit(initial_time))
else:
self.exit_complete = True
logger.info('quitting')
def on_save_front_panel(self,*args,**kwargs):
data = self.front_panel_settings.get_save_data()
# Open save As dialog
dialog = QFileDialog(None,"Save BLACS state", self.exp_config.get('paths','experiment_shot_storage'), "HDF5 files (*.h5)")
try:
dialog.setViewMode(QFileDialog.Detail)
dialog.setFileMode(QFileDialog.AnyFile)
dialog.setAcceptMode(QFileDialog.AcceptSave)
if dialog.exec_():
current_file = str(dialog.selectedFiles()[0])
if not current_file.endswith('.h5'):
current_file += '.h5'
self.front_panel_settings.save_front_panel_to_h5(current_file,data[0],data[1],data[2],data[3])
except Exception:
raise
finally:
dialog.deleteLater()
def on_open_preferences(self,*args,**kwargs):
self.settings.create_dialog()
class ExperimentServer(ZMQServer):
def handler(self, h5_filepath):
print h5_filepath
message = self.process(h5_filepath)
logger.info('Request handler: %s ' % message.strip())
return message
@inmain_decorator(wait_for_return=True)
def process(self,h5_filepath):
# Convert path to local slashes and shared drive prefix:
logger.info('received filepath: %s'%h5_filepath)
h5_filepath = labscript_utils.shared_drive.path_to_local(h5_filepath)
logger.info('local filepath: %s'%h5_filepath)
return app.queue.process_request(h5_filepath)
if __name__ == '__main__':
if 'tracelog' in sys.argv:
##########
import labscript_utils.tracelog
labscript_utils.tracelog.log('blacs_trace.log',['__main__','BLACS.tab_base_classes',
'qtutils',
'labscript_utils.qtwidgets.ddsoutput',
'labscript_utils.qtwidgets.analogoutput',
'BLACS.hardware_interfaces.ni_pcie_6363',
'BLACS.hardware_interfaces.output_classes',
'BLACS.device_base_class',
'BLACS.tab_base_classes',
'BLACS.plugins.connection_table',
'BLACS.recompile_and_restart',
'filewatcher',
'queue',
'notifications',
'connections',
'analysis_submission',
'settings',
'front_panel_settings',
'labscript_utils.h5_lock',
'labscript_utils.shared_drive',
'labscript_utils.labconfig',
'zprocess',
], sub=True)
##########
config_path = os.path.join(config_prefix,'%s.ini'%socket.gethostname())
settings_path = os.path.join(config_prefix,'%s_BLACS.h5'%socket.gethostname())
required_config_params = {"DEFAULT":["experiment_name"],
"programs":["text_editor",
"text_editor_arguments",
],
"paths":["shared_drive",
"connection_table_h5",
"connection_table_py",
],
"ports":["BLACS", "lyse"],
}
exp_config = LabConfig(config_path,required_config_params)
port = int(exp_config.get('ports','BLACS'))
# Start experiment server
experiment_server = ExperimentServer(port)
# Create Connection Table object
logger.info('About to load connection table: %s'%exp_config.get('paths','connection_table_h5'))
connection_table_h5_file = exp_config.get('paths','connection_table_h5')
try:
connection_table = ConnectionTable(connection_table_h5_file)
except:
# dialog = gtk.MessageDialog(None,gtk.DIALOG_MODAL,gtk.MESSAGE_ERROR,gtk.BUTTONS_NONE,"The connection table in '%s' is not valid. Please check the compilation of the connection table for errors\n\n"%self.connection_table_h5file)
# dialog.run()
# dialog.destroy()
logger.exception('connection table failed to load')
raise
sys.exit("Invalid Connection Table")
logger.info('connection table loaded')
qapplication = QApplication(sys.argv)
logger.info('QApplication instantiated')
app = BLACS(qapplication)
logger.info('BLACS instantiated')
def execute_program():
qapplication.exec_()
sys.exit(execute_program())
| 45.259831 | 262 | 0.596804 |
4a219694d10ef075e0e0403cdd7ed100c39ddadd | 3,572 | py | Python | tensorflow/examples/learn/iris_custom_decay_dnn.py | tianyapiaozi/tensorflow | fb3ce0467766a8e91f1da0ad7ada7c24fde7a73a | [
"Apache-2.0"
] | 71 | 2017-05-25T16:02:15.000Z | 2021-06-09T16:08:08.000Z | tensorflow/examples/learn/iris_custom_decay_dnn.py | shrikunjsarda/tensorflow | 7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae | [
"Apache-2.0"
] | 133 | 2017-04-26T16:49:49.000Z | 2019-10-15T11:39:26.000Z | tensorflow/examples/learn/iris_custom_decay_dnn.py | shrikunjsarda/tensorflow | 7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae | [
"Apache-2.0"
] | 31 | 2018-09-11T02:17:17.000Z | 2021-12-15T10:33:35.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example of DNNClassifier for Iris plant dataset, with exponential decay."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from sklearn import datasets
from sklearn import metrics
from sklearn import model_selection
import tensorflow as tf
X_FEATURE = 'x' # Name of the input feature.
def my_model(features, labels, mode):
"""DNN with three hidden layers."""
# Create three fully connected layers respectively of size 10, 20, and 10.
net = features[X_FEATURE]
for units in [10, 20, 10]:
net = tf.layers.dense(net, units=units, activation=tf.nn.relu)
# Compute logits (1 per class).
logits = tf.layers.dense(net, 3, activation=None)
# Compute predictions.
predicted_classes = tf.argmax(logits, 1)
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = {
'class': predicted_classes,
'prob': tf.nn.softmax(logits)
}
return tf.estimator.EstimatorSpec(mode, predictions=predictions)
# Compute loss.
loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)
# Create training op with exponentially decaying learning rate.
if mode == tf.estimator.ModeKeys.TRAIN:
global_step = tf.train.get_global_step()
learning_rate = tf.train.exponential_decay(
learning_rate=0.1, global_step=global_step,
decay_steps=100, decay_rate=0.001)
optimizer = tf.train.AdagradOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss, global_step=global_step)
return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op)
# Compute evaluation metrics.
eval_metric_ops = {
'accuracy': tf.metrics.accuracy(
labels=labels, predictions=predicted_classes)
}
return tf.estimator.EstimatorSpec(
mode, loss=loss, eval_metric_ops=eval_metric_ops)
def main(unused_argv):
iris = datasets.load_iris()
x_train, x_test, y_train, y_test = model_selection.train_test_split(
iris.data, iris.target, test_size=0.2, random_state=42)
classifier = tf.estimator.Estimator(model_fn=my_model)
# Train.
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={X_FEATURE: x_train}, y=y_train, num_epochs=None, shuffle=True)
classifier.train(input_fn=train_input_fn, steps=1000)
# Predict.
test_input_fn = tf.estimator.inputs.numpy_input_fn(
x={X_FEATURE: x_test}, y=y_test, num_epochs=1, shuffle=False)
predictions = classifier.predict(input_fn=test_input_fn)
y_predicted = np.array(list(p['class'] for p in predictions))
y_predicted = y_predicted.reshape(np.array(y_test).shape)
# Score with sklearn.
score = metrics.accuracy_score(y_test, y_predicted)
print('Accuracy (sklearn): {0:f}'.format(score))
# Score with tensorflow.
scores = classifier.evaluate(input_fn=test_input_fn)
print('Accuracy (tensorflow): {0:f}'.format(scores['accuracy']))
if __name__ == '__main__':
tf.app.run()
| 35.366337 | 78 | 0.741321 |
4a2196badde628b2896ca6517ff948febe685f69 | 668 | py | Python | server/manage.py | Dylane-JC/agfzb-CloudAppDevelopment_Capstone | 17ad58a3849ba703eec7f56378ac5d462cf2aac0 | [
"Apache-2.0"
] | null | null | null | server/manage.py | Dylane-JC/agfzb-CloudAppDevelopment_Capstone | 17ad58a3849ba703eec7f56378ac5d462cf2aac0 | [
"Apache-2.0"
] | null | null | null | server/manage.py | Dylane-JC/agfzb-CloudAppDevelopment_Capstone | 17ad58a3849ba703eec7f56378ac5d462cf2aac0 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangobackend.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 30.363636 | 77 | 0.682635 |
4a2196ca930752bd4e9767d859e791f8b5cb3cd6 | 16,040 | py | Python | train.py | tyjiang1997/NonLocalProp_MVD | 5cf5a5b422fd20e710429363447dc36a90f12b18 | [
"MIT"
] | 1 | 2022-03-25T12:27:59.000Z | 2022-03-25T12:27:59.000Z | train.py | tyjiang1997/NonLocalProp_MVD | 5cf5a5b422fd20e710429363447dc36a90f12b18 | [
"MIT"
] | null | null | null | train.py | tyjiang1997/NonLocalProp_MVD | 5cf5a5b422fd20e710429363447dc36a90f12b18 | [
"MIT"
] | null | null | null | import imp
import os
from re import I
import time
import csv
import numpy as np
from path import Path
import argparse
import matplotlib.pyplot as plt
from tensorboardX import SummaryWriter
import cv2
import torch
import torch.nn.functional as F
from core.dataset import custom_transforms
from core.networks.MVDNet_conf import MVDNet_conf
from core.networks.MVDNet_joint import MVDNet_joint
from core.networks.MVDNet_nslpn import MVDNet_nslpn
from core.networks.MVDNet_prop import MVDNet_prop
from core.utils.inverse_warp_d import inverse_warp_d, pixel2cam
from core.utils.utils import load_config_file, save_checkpoint, adjust_learning_rate
from core.networks.loss_functions import compute_errors_test, compute_angles, cross_entropy
from core.utils.logger import AverageMeter
from core.dataset import SequenceFolder, NoisySequenceFolder
def main(cfg):
os.environ['CUDA_VISIBLE_DEVICES'] = ','.join(cfg.cuda)
global n_iter
save_path = Path(cfg.output_dir)
if not os.path.exists(save_path):
os.makedirs(save_path)
print('=> will save everything to {}'.format(save_path))
training_writer = SummaryWriter(save_path)
output_writers = []
for i in range(3):
output_writers.append(SummaryWriter(save_path/'valid'/str(i)))
# Loading data
normalize = custom_transforms.Normalize(mean=[0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5])
train_transform = custom_transforms.Compose([
custom_transforms.RandomScaleCrop(),
custom_transforms.ArrayToTensor(),
normalize
])
valid_transform = custom_transforms.Compose([custom_transforms.ArrayToTensor(), normalize])
print("=> fetching scenes in '{}'".format(cfg.dataset_path))
if cfg.dataset == 'scannet':
if cfg.dataloader == 'NoisySequenceFolder':
train_set = NoisySequenceFolder(cfg.dataset_path, transform=train_transform, ttype=cfg.train_list)
test_set = NoisySequenceFolder(cfg.dataset_path, transform=valid_transform, ttype=cfg.test_list)
else:
train_set = SequenceFolder(cfg.dataset_path, transform=train_transform, ttype=cfg.train_list)
test_set = SequenceFolder(cfg.dataset_path, transform=valid_transform, ttype=cfg.test_list)
else:
raise NotImplementedError
train_set[0]
train_set.samples = train_set.samples[:len(train_set) - len(train_set)%cfg.batch_size]
print('{} samples found in {} train scenes'.format(len(train_set), len(train_set.scenes)))
print('{} samples found in {} test scenes'.format(len(test_set), len(test_set.scenes)))
train_loader = torch.utils.data.DataLoader(train_set, batch_size=cfg.batch_size, shuffle=True,
num_workers=cfg.num_workers, pin_memory=True, drop_last=True)
test_loader = torch.utils.data.DataLoader(test_set, batch_size=cfg.batch_size, shuffle=False,
num_workers=cfg.num_workers, pin_memory=True)
epoch_size = len(train_loader)
# create model
print("=> creating model")
if cfg.model_name == 'MVDNet_conf':
mvdnet = MVDNet_conf(cfg).cuda()
elif cfg.model_name == 'MVDNet_joint':
mvdnet = MVDNet_joint(cfg).cuda()
elif cfg.model_name == 'MVDNet_nslpn':
mvdnet = MVDNet_nslpn(cfg).cuda()
elif cfg.model_name == 'MVDNet_prop':
mvdnet = MVDNet_prop(cfg).cuda()
else:
raise NotImplementedError
mvdnet.init_weights()
if cfg.pretrained_mvdn:
print("=> using pre-trained weights for MVDNet")
weights = torch.load(cfg.pretrained_mvdn)
mvdnet.load_state_dict(weights['state_dict'], strict=True)
print('=> setting adam solver')
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, mvdnet.parameters()), cfg.learning_rate, betas=(cfg.momentum, cfg.beta),
weight_decay=cfg.weight_decay)
torch.backends.cudnn.benchmark = True
if len(cfg.cuda) > 1:
mvdnet = torch.nn.DataParallel(mvdnet, device_ids=[int(id) for id in cfg.cuda])
print(' ==> setting log files')
with open(save_path/'log_summary.txt', 'w') as csvfile:
writer = csv.writer(csvfile, delimiter='\t')
writer.writerow(['train_loss', 'validation_abs_rel', 'validation_abs_diff','validation_sq_rel', 'validation_rms', 'validation_log_rms', 'validation_a1', 'validation_a2','validation_a3'])
print(' ==> main Loop')
for epoch in range(cfg.epochs):
adjust_learning_rate(cfg, optimizer, epoch)
# train for one epoch
train_loss = train_epoch(cfg, train_loader, mvdnet, optimizer, epoch_size, training_writer, epoch)
errors, error_names = validate_with_gt(cfg, test_loader, mvdnet, epoch, output_writers)
for error, name in zip(errors, error_names):
training_writer.add_scalar(name, error, epoch)
# Up to you to chose the most relevant error to measure your model's performance, careful some measures are to maximize (such as a1,a2,a3)
decisive_error = errors[0]
with open(save_path/'log_summary.txt', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter='\t')
writer.writerow([train_loss, decisive_error, errors[1], errors[2], errors[3], errors[4], errors[5], errors[6], errors[7]])
save_checkpoint(os.path.join(save_path, 'checkpoints'), {'epoch': epoch + 1, 'state_dict': mvdnet.module.state_dict()},
epoch, file_prefixes = ['mvdnet'])
def train_epoch(cfg, train_loader, mvdnet, optimizer, epoch_size, train_writer, epoch):
global n_iter
batch_time = AverageMeter()
data_time = AverageMeter()
total_losses = AverageMeter(precision=4)
d_losses = AverageMeter(precision=4)
nmap_losses = AverageMeter(precision=4)
dconf_losses = AverageMeter(precision=4)
nconf_losses = AverageMeter(precision=4)
mvdnet.train()
print("Training")
end = time.time()
for i, (tgt_img, ref_imgs, gt_nmap, ref_poses, intrinsics, intrinsics_inv, tgt_depth, ref_depths, tgt_id) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
tgt_img_var = tgt_img.cuda()
ref_imgs_var = [img.cuda() for img in ref_imgs]
gt_nmap_var = gt_nmap.cuda()
ref_poses_var = [pose.cuda() for pose in ref_poses]
intrinsics_var = intrinsics.cuda()
intrinsics_inv_var = intrinsics_inv.cuda()
tgt_depth_var = tgt_depth.cuda()
ref_dep_var = [ref_dep.cuda() for ref_dep in ref_depths]
ref_depths = torch.stack(ref_dep_var,1)
# compute output
pose = torch.cat(ref_poses_var,1)
# get mask
mask = (tgt_depth_var <= 10.0) & (tgt_depth_var >= 0.5) & (tgt_depth_var == tgt_depth_var)
if mask.any() == 0:
continue
if cfg.depth_fliter_by_multi_views['use']:
valid_threshod = cfg.depth_fliter_by_multi_views['valid_threshod']
multi_view_mask = tgt_depth_var.new_ones(tgt_depth_var.shape).bool()
views = ref_depths.shape[1]
for viw in range(views):
warp_rerdep = inverse_warp_d(ref_depths[:,viw:viw+1], ref_depths[:,viw:viw+1], pose[:,viw], intrinsics_var, intrinsics_inv_var)
warp_rerdep = warp_rerdep.squeeze()
diff_depth = torch.abs(warp_rerdep - tgt_depth_var)
max_diff = diff_depth.max()
diff_depth = diff_depth / (max_diff + 1e-8)
multi_view_mask &= (diff_depth < valid_threshod)
# ids = 0
# tht_vis = tgt_depth[ids].cpu().numpy()
# ref_vis = warp_rerdep[ids].cpu().numpy()
# diff_vis = diff_depth[ids].cpu().numpy()
# max_ = tht_vis.max()
# tht_vis = tht_vis *255 / max_
# ref_vis = ref_vis *255 / max_
# diff_vis = diff_vis *255 / max_
# cv2.imwrite('/home/jty/mvs/idn-solver/vis/tdtdep.png', tht_vis)
# cv2.imwrite('/home/jty/mvs/idn-solver/vis/refdep.png', ref_vis)
# cv2.imwrite('/home/jty/mvs/idn-solver/vis/diffdep.png', diff_vis)
# from pdb import set_trace; set_trace()
mask &= multi_view_mask
mask.detach_()
if cfg.model_name == 'MVDNet_conf':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, intrinsics_var, intrinsics_inv_var)
elif cfg.model_name == 'MVDNet_joint':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, tgt_depth_var, gt_nmap_var, intrinsics_var, intrinsics_inv_var)
elif cfg.model_name == 'MVDNet_nslpn':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, intrinsics_var, intrinsics_inv_var)
elif cfg.model_name == 'MVDNet_prop':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, tgt_depth_var, gt_nmap_var, intrinsics_var, intrinsics_inv_var)
else:
raise NotImplementedError
depth0, depth1 = outputs[0], outputs[1]
nmap0 = outputs[2]
dconf, nconf = outputs[-2], outputs[-1]
# Loss
d_loss = cfg.d_weight * F.smooth_l1_loss(depth0[mask], tgt_depth_var[mask]) + \
F.smooth_l1_loss(depth1[mask], tgt_depth_var[mask])
gt_dconf = 1.0 - cfg.conf_dgamma * torch.abs(depth0 - tgt_depth_var) / (tgt_depth_var + 1e-6)
gt_dconf = torch.clamp(gt_dconf, 0.01, 1.0).detach_()
dconf_loss = cross_entropy(dconf[mask], gt_dconf[mask])
n_mask = mask.unsqueeze(1).expand(-1,3,-1,-1)
nmap_loss = F.smooth_l1_loss(nmap0[n_mask], gt_nmap_var[n_mask])
gt_nconf = 1.0 - cfg.conf_ngamma * compute_angles(nmap0, gt_nmap_var, dim=1) / 180.0
gt_nconf = torch.clamp(gt_nconf, 0.01, 1.0).detach_()
nconf_loss = cross_entropy(nconf[mask], gt_nconf[mask])
loss = d_loss + cfg.n_weight * nmap_loss + cfg.dc_weight * dconf_loss + cfg.nc_weight * nconf_loss
if i > 0 and n_iter % cfg.print_freq == 0:
train_writer.add_scalar('total_loss', loss.item(), n_iter)
# record loss and EPE
total_losses.update(loss.item(), n=cfg.batch_size)
d_losses.update(d_loss.mean().item(), n=cfg.batch_size)
nmap_losses.update(nmap_loss.mean().item(), n=cfg.batch_size)
dconf_losses.update(dconf_loss.mean().item(), n=cfg.batch_size)
nconf_losses.update(nconf_loss.mean().item(), n=cfg.batch_size)
# compute gradient and do Adam step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if cfg.log_mode == 'full':
with open(cfg.output_dir/'log_full.txt', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter='\t')
writer.writerow([loss.item()])
if i % cfg.print_freq == 0:
print('Train: Time {} Loss {} NLoss {} DLoss {} DCLoss {} NCLoss {} Iter {}/{} Epoch {}/{}'.format(batch_time, total_losses, nmap_losses,
d_losses, dconf_losses, nconf_losses, i, len(train_loader), epoch, cfg.epochs))
if i >= epoch_size - 1:
break
n_iter += 1
return total_losses.avg[0]
def validate_with_gt(cfg, test_loader, mvdnet, epoch, output_writers=[]):
batch_time = AverageMeter()
test_error_names = ['abs_rel','abs_diff','sq_rel','rms','log_rms','a1','a2','a3', 'dconf', 'nconf', 'mean_angle']
test_errors = AverageMeter(i=len(test_error_names))
log_outputs = len(output_writers) > 0
mvdnet.eval()
end = time.time()
with torch.no_grad():
for i, (tgt_img, ref_imgs, gt_nmap, ref_poses, intrinsics, intrinsics_inv, tgt_depth, ref_depths, tgt_id) in enumerate(test_loader):
tgt_img_var = tgt_img.cuda()
ref_imgs_var = [img.cuda() for img in ref_imgs]
gt_nmap_var = gt_nmap.cuda()
ref_poses_var = [pose.cuda() for pose in ref_poses]
intrinsics_var = intrinsics.cuda()
intrinsics_inv_var = intrinsics_inv.cuda()
tgt_depth_var = tgt_depth.cuda()
pose = torch.cat(ref_poses_var,1)
if (pose != pose).any():
continue
if cfg.model_name == 'MVDNet_conf':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, intrinsics_var, intrinsics_inv_var)
elif cfg.model_name == 'MVDNet_joint':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, tgt_depth_var, gt_nmap_var, intrinsics_var, intrinsics_inv_var)
elif cfg.model_name == 'MVDNet_nslpn':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, intrinsics_var, intrinsics_inv_var)
elif cfg.model_name == 'MVDNet_prop':
outputs = mvdnet(tgt_img_var, ref_imgs_var, pose, tgt_depth_var, gt_nmap_var, intrinsics_var, intrinsics_inv_var)
else:
raise NotImplementedError
output_depth = outputs[0].data.cpu()
nmap = outputs[1].permute(0,2,3,1)
dconf, nconf = outputs[-2], outputs[-1]
mask = (tgt_depth <= 10) & (tgt_depth >= 0.5) & (tgt_depth == tgt_depth)
if not mask.any():
continue
test_errors_ = list(compute_errors_test(tgt_depth[mask], output_depth[mask]))
gt_dconf = 1.0 - cfg.conf_dgamma * torch.abs(tgt_depth - output_depth) / (tgt_depth + 1e-6)
dconf_e = torch.abs(dconf.cpu()[mask] - gt_dconf[mask]).mean()
test_errors_.append(dconf_e.item())
n_mask = (gt_nmap_var.permute(0,2,3,1)[0,:,:] != 0)
n_mask = n_mask[:,:,0] | n_mask[:,:,1] | n_mask[:,:,2]
total_angles_m = compute_angles(gt_nmap_var.permute(0,2,3,1)[0], nmap[0])
gt_nconf = 1.0 - cfg.conf_ngamma * total_angles_m / 180.0
nconf_e = torch.abs(nconf[0][n_mask] - gt_nconf[n_mask]).mean()
test_errors_.append(nconf_e.item())
mask_angles = total_angles_m[n_mask]
total_angles_m[~ n_mask] = 0
test_errors_.append(torch.mean(mask_angles).item())
test_errors.update(test_errors_)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % cfg.print_freq == 0 or i == len(test_loader)-1:
print('valid: Time {} Rel Error {:.4f} ({:.4f}) DConf Error {:.4f} ({:.4f}) Iter {}/{}'.format(batch_time, test_errors.val[0], test_errors.avg[0], test_errors.val[-3], test_errors.avg[-3], i, len(test_loader)))
if cfg.save_samples:
output_dir = Path(os.path.join(cfg.output_dir, 'vis'))
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
output_depth = output_depth.numpy()
for picid, imgsave in zip(tgt_id, output_depth):
plt.imsave(output_dir/ f'{picid}_depth.png',imgsave, cmap='rainbow')
return test_errors.avg, test_error_names
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Iterative solver for multi-view depth and normal',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('config_file', metavar='DIR', help='path to config file')
args = parser.parse_args()
cfg = load_config_file(args.config_file)
n_iter = 0
main(cfg)
| 46.091954 | 227 | 0.621945 |
4a2198b36b78dda5b37c783439a7bf44c92060cc | 33,549 | py | Python | qa/rpc-tests/fundrawtransaction.py | zalcchain/zalcchain | 3f104cc3d8e3f0c2cd325348cbf408bb2612f2b8 | [
"MIT"
] | null | null | null | qa/rpc-tests/fundrawtransaction.py | zalcchain/zalcchain | 3f104cc3d8e3f0c2cd325348cbf408bb2612f2b8 | [
"MIT"
] | null | null | null | qa/rpc-tests/fundrawtransaction.py | zalcchain/zalcchain | 3f104cc3d8e3f0c2cd325348cbf408bb2612f2b8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def get_unspent(listunspent, amount):
for utx in listunspent:
if utx['amount'] == amount:
return utx
raise AssertionError('Could not find unspent with amount={}'.format(amount))
class RawTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
# ensure that setting changePosition in fundraw with an exact match is handled properly
rawmatch = self.nodes[2].createrawtransaction([], {self.nodes[2].getnewaddress():50})
rawmatch = self.nodes[2].fundrawtransaction(rawmatch, {"changePosition":1, "subtractFeeFromOutputs":[0]})
assert_equal(rawmatch["changepos"], -1)
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test that we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
####################################################
# test a fundrawtransaction with an invalid option #
####################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'foo': 'bar'})
raise AssertionError("Accepted invalid option foo")
except JSONRPCException as e:
assert("Unexpected key foo" in e.error['message'])
############################################################
# test a fundrawtransaction with an invalid change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': 'foobar'})
raise AssertionError("Accepted invalid zalcchain address")
except JSONRPCException as e:
assert("changeAddress must be a valid zalcchain address" in e.error['message'])
############################################################
# test a fundrawtransaction with a provided change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
change = self.nodes[2].getnewaddress()
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 2})
except JSONRPCException as e:
assert('changePosition out of bounds' == e.error['message'])
else:
assert(False)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
out = dec_tx['vout'][0]
assert_equal(change, out['scriptPubKey']['addresses'][0])
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_node(self.nodes[0], 0)
stop_node(self.nodes[1], 2)
stop_node(self.nodes[2], 3)
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 600)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True })
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
# Backward compatibility test (2nd param is includeWatching)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
#######################
# Test feeRate option #
#######################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[3].getnewaddress() : 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee)
result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee})
result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee})
result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
#############################
# Test address reuse option #
#############################
result3 = self.nodes[3].fundrawtransaction(rawtx, {"reserveChangeKey": False})
res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
changeaddress = ""
for out in res_dec['vout']:
if out['value'] > 1.0:
changeaddress += out['scriptPubKey']['addresses'][0]
assert(changeaddress != "")
nextaddr = self.nodes[3].getnewaddress()
# frt should not have removed the key from the keypool
assert(changeaddress == nextaddr)
result3 = self.nodes[3].fundrawtransaction(rawtx)
res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
changeaddress = ""
for out in res_dec['vout']:
if out['value'] > 1.0:
changeaddress += out['scriptPubKey']['addresses'][0]
assert(changeaddress != "")
nextaddr = self.nodes[3].getnewaddress()
# Now the change address key should be removed from the keypool
assert(changeaddress != nextaddr)
######################################
# Test subtractFeeFromOutputs option #
######################################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[2].getnewaddress(): 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = [self.nodes[3].fundrawtransaction(rawtx), # uses min_relay_tx_fee (set by settxfee)
self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list
self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0]}), # uses min_relay_tx_fee (set by settxfee)
self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}),
self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee, "subtractFeeFromOutputs": [0]})]
dec_tx = [self.nodes[3].decoderawtransaction(tx['hex']) for tx in result]
output = [d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result)]
change = [d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result)]
assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee'])
assert_equal(result[3]['fee'], result[4]['fee'])
assert_equal(change[0], change[1])
assert_equal(output[0], output[1])
assert_equal(output[0], output[2] + result[2]['fee'])
assert_equal(change[0] + result[0]['fee'], change[2])
assert_equal(output[3], output[4] + result[4]['fee'])
assert_equal(change[3] + result[3]['fee'], change[4])
inputs = []
outputs = {self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3)}
keys = list(outputs.keys())
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = [self.nodes[3].fundrawtransaction(rawtx),
# split the fee between outputs 0, 2, and 3, but not output 1
self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})]
dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']),
self.nodes[3].decoderawtransaction(result[1]['hex'])]
# Nested list of non-change output amounts for each transaction
output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']]
for d, r in zip(dec_tx, result)]
# List of differences in output amounts between normal and subtractFee transactions
share = [o0 - o1 for o0, o1 in zip(output[0], output[1])]
# output 1 is the same in both transactions
assert_equal(share[1], 0)
# the other 3 outputs are smaller as a result of subtractFeeFromOutputs
assert_greater_than(share[0], 0)
assert_greater_than(share[2], 0)
assert_greater_than(share[3], 0)
# outputs 2 and 3 take the same share of the fee
assert_equal(share[2], share[3])
# output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3
assert_greater_than_or_equal(share[0], share[2])
assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0])
# the fee is the same in both transactions
assert_equal(result[0]['fee'], result[1]['fee'])
# the total subtracted from the outputs is equal to the fee
assert_equal(share[0] + share[2] + share[3], result[0]['fee'])
if __name__ == '__main__':
RawTransactionsTest().main()
| 43.683594 | 223 | 0.568333 |
4a2198cd0aff96d7f407be336846cb72853bb815 | 632 | py | Python | backend/manage.py | crowdbotics-apps/todemp-31857 | 99d31d4b7c9876a3abc2a43d43e68b629df9b681 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/manage.py | crowdbotics-apps/todemp-31857 | 99d31d4b7c9876a3abc2a43d43e68b629df9b681 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/manage.py | crowdbotics-apps/todemp-31857 | 99d31d4b7c9876a3abc2a43d43e68b629df9b681 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'todemp_31857.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.727273 | 76 | 0.685127 |
4a2199f09170bec48ea87fc53cd6380aaee304f0 | 7,280 | py | Python | zoom/_assets/standard_apps/admin/groups.py | zodman/ZoomFoundry | 87a69f519a2ab6b63aeec0a564ce41259e64f88d | [
"MIT"
] | 8 | 2017-04-10T09:53:15.000Z | 2020-08-16T09:53:14.000Z | zoom/_assets/standard_apps/admin/groups.py | zodman/ZoomFoundry | 87a69f519a2ab6b63aeec0a564ce41259e64f88d | [
"MIT"
] | 49 | 2017-04-13T22:51:48.000Z | 2019-08-15T22:53:25.000Z | zoom/_assets/standard_apps/admin/groups.py | zodman/ZoomFoundry | 87a69f519a2ab6b63aeec0a564ce41259e64f88d | [
"MIT"
] | 12 | 2017-04-11T04:16:47.000Z | 2019-08-10T21:41:54.000Z | """
system users
"""
import zoom
from zoom.collect import CollectionController, RawSearch
from zoom.models import Group, Groups
from zoom.tools import ensure_listy, is_listy
import zoom.validators as v
import zoom.fields as f
import model
no_app_groups = v.Validator(
'group names cannot start with a_',
lambda a: not a.startswith('a_')
)
class SelectionField(f.ChosenMultiselectField):
"""Selects things related to groups"""
select_layout = '<select data-placeholder="{}" multiple="multiple" style="width: 400px" class="{}" name="{}" id="{}">\n'
def __init__(self, *args, **kwargs):
f.ChosenMultiselectField.__init__(self, width=60, *args, **kwargs)
def _scan(self, t, func):
if t:
t = [str(i) for i in ensure_listy(t)]
result = []
for option in self.options:
if len(option) == 2 and is_listy(option):
label, value = option
if label in t or str(value) in t:
result.append(func(option))
elif option in t:
result.append(option)
return result
return []
def display_value(self):
return ', '.join(self._scan(self.value, lambda a: a[0]))
def group_fields(request):
user_groups = model.get_user_group_options(request.site)
fields = f.Fields([
f.TextField('Name', v.required, v.valid_name, no_app_groups),
f.TextField('Description', maxlength=60),
f.PulldownField(
'Administrators',
default='administrators',
name='admin_group_id',
options=user_groups
),
])
db = request.site.db
admin = model.AdminModel(db)
if len(request.route) > 2 and request.route[2] not in ['new', 'clear', 'reindex']:
group_id = int(request.route[2])
else:
group_id = None
include_fields = f.Section('Includes',[
SelectionField('Subgroups', options=admin.get_subgroup_options(group_id)),
SelectionField('Users', options=admin.get_user_options()),
])
access_fields = f.Section('Accesses',[
SelectionField('Roles', options=admin.get_role_options(group_id)),
SelectionField('Apps', options=admin.get_app_options()),
])
return f.Fields(fields, include_fields, access_fields)
def group_activity_log(group):
"""gathers log information for the group
Authorization activity related to groups is captured
by zoom and retreived from the audit_log table.
Note that system log entries are host specific,
however, audit logs are not host specific
as hosts sometimes share authozation databases and
thus changes in authorizations affect all hosts
using that database.
"""
query = """
select
app,
user_id,
activity,
subject1,
subject2,
timestamp
from audit_log
where subject2=%s
union select
app,
user_id,
activity,
subject1,
subject2,
timestamp
from audit_log
where
activity in (
"add group",
"remove group",
"create group",
"remove subgroup",
"add subgroup",
"delete group",
"add member",
"remove member"
)
and subject1=%s
order by timestamp desc
limit 20
"""
db = zoom.system.site.db
items = [
(
app,
zoom.helpers.who(user_id),
activity,
subject1,
subject2,
timestamp,
zoom.helpers.when(timestamp)
) for app, user_id, activity, subject1, subject2, timestamp in db(query, group.name, group.name)
]
labels = [
'App',
'User',
'Activity',
'Subject1',
'Subject2',
'Timestamp',
'When'
]
auth_activity = zoom.browse(items, labels=labels)
return """
<h2>Recent Authorizations Activity</h2>
{}
""".format(auth_activity)
class GroupCollectionView(zoom.collect.CollectionView):
def index(self, q='', *args, **kwargs):
"""collection landing page"""
c = self.collection
user = c.user
if c.request.route[-1:] == ['index']:
return zoom.redirect_to('/'+'/'.join(c.request.route[:-1]), **kwargs)
actions = user.can('create', c) and ['New'] or []
if q:
title = 'Selected ' + c.title
records = c.search(q)
else:
title = c.title
records = c.store.find(type='U')
authorized = (i for i in records if user.can('read', i))
items = sorted(authorized, key=c.order)
num_items = len(items)
if num_items != 1:
footer_name = c.title.lower()
else:
footer_name = c.item_title.lower()
if q:
footer = '{:,} {} found in search of {:,} {}'.format(
num_items,
footer_name,
len(c.store),
c.title.lower(),
)
else:
footer = '%s %s' % (len(items), footer_name)
admin_ids = [item.admin_group_id for item in items]
admin_lookup = {
group.group_id: zoom.link_to(group.name, 'groups', group.group_id)
for group in zoom.system.site.groups
if group.group_id in admin_ids
}
for item in items:
item.administrators = admin_lookup.get(item.admin_group_id, '')
content = zoom.browse(
[c.model(i) for i in items],
labels=c.get_labels(),
columns=c.get_columns(),
footer=footer
)
return zoom.page(content, title=title, actions=actions, search=q)
def show(self, key):
page = super().show(key)
group = zoom.system.site.groups.get(key)
if group:
page.content += group_activity_log(group)
return page
class GroupCollectionController(CollectionController):
def before_insert(self, record):
record['type'] = 'U'
def after_insert(self, record):
model.update_group_relationships(record)
def before_update(self, record):
record['type'] = 'U'
model.update_group_relationships(record)
def get_groups_collection(request):
def user_group(group):
return group.type == 'U' and not group.name.startswith('a_')
def get_fields():
return group_fields(request)
db = request.site.db
users = Groups(db)
labels = 'Name', 'Description', 'Administrators'
columns = 'link', 'description', 'administrators'
return model.GroupsCollection(
get_fields,
model=Group,
controller=GroupCollectionController,
view=GroupCollectionView,
store=users,
item_name='group',
url='/admin/groups',
filter=user_group,
columns=columns,
labels=labels,
key_name='id',
search_engine=RawSearch
)
def main(route, request):
return get_groups_collection(request)(route, request)
| 27.680608 | 124 | 0.567033 |
4a219a00cf4c316a48d4aa57099a403f5c590d0d | 6,425 | py | Python | video.py | wangpengcheng/consistent_depth | b370309fc16cb9c940a68688f9579230f1af728d | [
"MIT"
] | null | null | null | video.py | wangpengcheng/consistent_depth | b370309fc16cb9c940a68688f9579230f1af728d | [
"MIT"
] | null | null | null | video.py | wangpengcheng/consistent_depth | b370309fc16cb9c940a68688f9579230f1af728d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
import cv2
import logging
import os
from os.path import join as pjoin
import sys
import tempfile
from utils import (frame_sampling, image_io)
from utils.helpers import mkdir_ifnotexists
#
ffmpeg = "ffmpeg"
ffprobe = "ffprobe"
# 查找相似的图片帧
def sample_pairs(frame_range, flow_ops):
"""[summary]
Args:
frame_range ([type]): [description]
flow_ops ([type]): [description]
Returns:
[type]: [description]
"""
#TODO: update the frame range with reconstruction range
#TODO: 更新帧范围为重构范围
name_mode_map = frame_sampling.SamplePairsMode.name_mode_map()
opts = [
frame_sampling.SamplePairsOptions(mode=name_mode_map[op]) for op in flow_ops
]
pairs = frame_sampling.SamplePairs.sample(
opts, frame_range=frame_range, two_way=True
)
print(f"Sampled {len(pairs)} frame pairs.")
return pairs
# 基础视频抽象类
class Video:
def __init__(self, path, video_file=None):
self.path = path
self.video_file = video_file
def check_extracted_pts(self):
pts_file = "%s/frames.txt" % self.path
if not os.path.exists(pts_file):
return False
with open(pts_file, "r") as file:
lines = file.readlines()
self.frame_count = int(lines[0])
width = int(lines[1])
height = int(lines[2])
print("%d frames detected (%d x %d)." % (self.frame_count, width, height))
if len(lines) != self.frame_count + 3:
sys.exit("frames.txt has wrong number of lines")
print("frames.txt exists, checked OK.")
return True
return False
def extract_pts(self):
if self.check_extracted_pts():
# frames.txt exists and checked OK.
return
# 检查是否存在文件
if not os.path.exists(self.video_file):
sys.exit("ERROR: input video file '%s' not found.", self.video_file)
# Get width and height
tmp_file = tempfile.mktemp(".png")
cmd = "%s -i %s -vframes 1 %s" % (ffmpeg, self.video_file, tmp_file)
print(cmd)
res = os.popen(cmd).read()
image = image_io.load_image(tmp_file)
height = image.shape[0]
width = image.shape[1]
os.remove(tmp_file) # 删除中间文件
if os.path.exists(tmp_file):
sys.exit("ERROR: unable to remove '%s'" % tmp_file)
# Get PTS
def parse_line(line, token):
if line[: len(token)] != token:
sys.exit("ERROR: record is malformed, expected to find '%s'." % token)
return line[len(token) :]
# 查询视频帧以及关键时间点
ffprobe_cmd = "%s %s -select_streams v:0 -show_frames" % (
ffprobe,
self.video_file,
)
cmd = ffprobe_cmd + " | grep pkt_pts_time"
print(cmd)
res = os.popen(cmd).read()
pts = []
for line in res.splitlines():
pts.append(parse_line(line, "pkt_pts_time="))
self.frame_count = len(pts)
print("%d frames detected." % self.frame_count)
# 设置帧输出文件
pts_file = "%s/frames.txt" % self.path
with open(pts_file, "w") as file:
file.write("%d\n" % len(pts))
file.write("%s\n" % width)
file.write("%s\n" % height)
for t in pts:
file.write("%s\n" % t)
# 再次确认文件
self.check_extracted_pts()
def check_frames(self, frame_dir, extension, frames=None):
if not os.path.isdir(frame_dir):
return False
# 获取文件夹中的文件
files = os.listdir(frame_dir)
files = [n for n in files if n.endswith(extension)]
if len(files) == 0:
return False
# 随机选帧进行生成
if frames is None:
frames = range(self.frame_count)
if len(files) != len(frames):
sys.exit(
"ERROR: expected to find %d files but found %d in '%s'"
% (self.frame_count, len(files), frame_dir)
)
for i in frames:
frame_file = "%s/frame_%06d.%s" % (frame_dir, i, extension)
if not os.path.exists(frame_file):
sys.exit("ERROR: did not find expected file '%s'" % frame_file)
print("Frames found, checked OK.")
return True
# 将视频扩展成为一张张的图片
def extract_frames(self):
frame_dir = "%s/color_full" % self.path # 设置输出文件夹
mkdir_ifnotexists(frame_dir)
# 检查frame是否已经存在
if self.check_frames(frame_dir, "png"):
# Frames are already extracted and checked OK.
return
if not os.path.exists(self.video_file):
sys.exit("ERROR: input video file '%s' not found.", self.video_file)
# 不存在的话,就生成关键帧
cmd = "%s -i %s -start_number 0 -vsync 0 %s/frame_%%06d.png" % (
ffmpeg,
self.video_file,
frame_dir,
)
print(cmd)
os.popen(cmd).read()
count = len(os.listdir(frame_dir))
if count != self.frame_count:
sys.exit(
"ERROR: %d frames extracted, but %d PTS entries."
% (count, self.frame_count)
)
# 再次进行帧确认
self.check_frames(frame_dir, "png")
# 对所有帧进行缩放
def downscale_frames(
self, subdir, max_size, ext, align=16, full_subdir="color_full"
):
full_dir = pjoin(self.path, full_subdir)
down_dir = pjoin(self.path, subdir)
# 创建缩放的文件夹
mkdir_ifnotexists(down_dir)
# 检查缩放文件夹是否存在
if self.check_frames(down_dir, ext):
# Frames are already extracted and checked OK.
return
# 检查所有帧
for i in range(self.frame_count):
full_file = "%s/frame_%06d.png" % (full_dir, i)
down_file = ("%s/frame_%06d." + ext) % (down_dir, i)
suppress_messages = (i > 0)
# 加载图片
image = image_io.load_image(
full_file, max_size=max_size, align=align,
suppress_messages=suppress_messages
)
image = image[..., ::-1] # Channel swizzle
# 根据加载格式进行数据修改
if ext == "raw":
image_io.save_raw_float32_image(down_file, image)
else:
cv2.imwrite(down_file, image * 255)
self.check_frames(down_dir, ext)
| 33.118557 | 86 | 0.561556 |
4a219b7526f68ce078399ab84b1c4ba9465800fd | 1,001 | py | Python | controllers/door_test/door_test.py | SDP-Team10/Railly-Clean | f2960d995d9440ecfde6c04f67635f81977c140b | [
"MIT"
] | null | null | null | controllers/door_test/door_test.py | SDP-Team10/Railly-Clean | f2960d995d9440ecfde6c04f67635f81977c140b | [
"MIT"
] | 14 | 2021-01-24T11:52:26.000Z | 2021-04-02T06:59:59.000Z | controllers/door_test/door_test.py | SDP-Team10/Railly-Clean | f2960d995d9440ecfde6c04f67635f81977c140b | [
"MIT"
] | null | null | null | """door_test controller."""
# You may need to import some classes of the controller module. Ex:
# from controller import Robot, Motor, DistanceSensor
from controller import Robot
# create the Robot instance.
robot = Robot()
# get the time step of the current world.
timestep = int(robot.getBasicTimeStep())
door_motor = robot.getDevice("door_motor")
# You should insert a getDevice-like function in order to get the
# instance of a device of the robot. Something like:
# motor = robot.getMotor('motorname')
# ds = robot.getDistanceSensor('dsname')
# ds.enable(timestep)
# Main loop:
# - perform simulation steps until Webots is stopping the controller
while robot.step(timestep) != -1:
door_motor.setPosition(-0.01)
# Read the sensors:
# Enter here functions to read sensor data, like:
# val = ds.getValue()
# Process sensor data here.
# Enter here functions to send actuator commands, like:
# motor.setPosition(10.0)
pass
# Enter here exit cleanup code.
| 28.6 | 68 | 0.721279 |
4a219bab21bda7ef6a928b56581c8f897c59d688 | 17,183 | py | Python | src/GraphicsView.py | chiefenne/PyAero-Qt-for-Python- | d9a8057afc97b6e78f72528cb97fc7ac27480823 | [
"MIT"
] | 74 | 2017-05-30T13:08:36.000Z | 2022-03-21T04:19:56.000Z | src/GraphicsView.py | chiefenne/PyAero-Qt-for-Python- | d9a8057afc97b6e78f72528cb97fc7ac27480823 | [
"MIT"
] | 9 | 2019-09-15T07:05:10.000Z | 2022-02-07T18:03:38.000Z | src/GraphicsView.py | chiefenne/PyAero-Qt-for-Python- | d9a8057afc97b6e78f72528cb97fc7ac27480823 | [
"MIT"
] | 16 | 2017-01-13T18:01:26.000Z | 2022-03-19T15:23:42.000Z | import os
import math
from PySide6 import QtGui, QtCore, QtWidgets
from Settings import ZOOMANCHOR, SCALEINC, MINZOOM, MAXZOOM, \
MARKERSIZE, RUBBERBANDSIZE, VIEWSTYLE
# put constraints on rubberband zoom (relative rectangle wdith)
RUBBERBANDSIZE = min(RUBBERBANDSIZE, 1.0)
RUBBERBANDSIZE = max(RUBBERBANDSIZE, 0.05)
class GraphicsView(QtWidgets.QGraphicsView):
"""The graphics view is the canvas where airfoils are drawn upon
Its coordinates are in pixels or "physical" coordinates.
Attributes:
origin (QPoint): stores location of mouse press
parent (QMainWindow): mainwindow instance
rubberband (QRubberBand): an instance of the custom rubberband class
used for zooming and selecting
sceneview (QRectF): stores current view in scene coordinates
"""
def __init__(self, parent=None, scene=None):
"""Default settings for graphicsview instance
Args:
parent (QMainWindow, optional): mainwindow instance
"""
super().__init__(scene)
self.parent = parent
self._leftMousePressed = False
# allow drops from drag and drop
self.setAcceptDrops(True)
# use custom rubberband
self.rubberband = RubberBand(QtWidgets.QRubberBand.Rectangle, self)
# needed for correct mouse wheel zoom
# otherwise mouse anchor is wrong; it would use (0, 0)
self.setInteractive(True)
# set QGraphicsView attributes
self.setRenderHints(QtGui.QPainter.Antialiasing |
QtGui.QPainter.TextAntialiasing)
self.setViewportUpdateMode(QtWidgets.QGraphicsView.FullViewportUpdate)
self.setResizeAnchor(QtWidgets.QGraphicsView.AnchorViewCenter)
# view behaviour when zooming
if ZOOMANCHOR == 'mouse':
# point under mouse pointer stays fixed during zoom
self.setTransformationAnchor(
QtWidgets.QGraphicsView.AnchorUnderMouse)
else:
# view center stays fixed during zoom
self.setTransformationAnchor(
QtWidgets.QGraphicsView.AnchorViewCenter)
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
# normally (0, 0) is upperleft corner of view
# swap y-axis in order to make (0, 0) lower left
# and y-axis pointing upwards
self.scale(1, -1)
# cache view to be able to keep it during resize
self.getSceneFromView()
# set background style and color for view
self.setBackground(VIEWSTYLE)
def setBackground(self, styletype):
"""Switches between gradient and simple background using style sheets.
border-color (in HTML) works only if border-style is set.
"""
if styletype == 'gradient':
style = """
border-style:solid; border-color: lightgrey;
border-width: 1px; background-color: QLinearGradient(x1: 0.0, y1: 0.0,
x2: 0.0, y2: 1.0, stop: 0.3 white, stop: 1.0 #263a5a);
"""
# if more stops are needed
# stop: 0.3 white, stop: 0.6 #4b73b4, stop: 1.0 #263a5a); } """)
else:
style = ("""
border-style:solid; border-color: lightgrey; \
border-width: 1px; background-color: white;""")
self.setStyleSheet(style)
def resizeEvent(self, event):
"""Re-implement QGraphicsView's resizeEvent handler"""
# call corresponding base class method
super().resizeEvent(event)
# scrollbars need to be switched off when calling fitinview from
# within resize event otherwise strange recursion can occur
self.fitInView(self.sceneview,
aspectRadioMode=QtCore.Qt.KeepAspectRatio)
def mousePressEvent(self, event):
"""Re-implement QGraphicsView's mousePressEvent handler"""
# status of CTRL key
ctrl = event.modifiers() == QtCore.Qt.ControlModifier
# if a mouse event happens in the graphics view
# put the keyboard focus to the view as well
self.setFocus()
self.origin = event.pos()
# do rubberband zoom only with left mouse button
if event.button() == QtCore.Qt.LeftButton:
self._leftMousePressed = True
self._dragPos = event.pos()
if ctrl:
self.setCursor(QtCore.Qt.ClosedHandCursor)
else:
# initiate rubberband origin and size (zero at first)
self.rubberband.setGeometry(QtCore.QRect(self.origin,
QtCore.QSize()))
# show, even at zero size
# allows to check later using isVisible()
self.rubberband.show()
# call corresponding base class method
super().mousePressEvent(event)
def mouseMoveEvent(self, event):
"""Re-implement QGraphicsView's mouseMoveEvent handler"""
# if a mouse event happens in the graphics view
# put the keyboard focus to the view as well
self.setFocus()
# status of CTRL key
ctrl = event.modifiers() == QtCore.Qt.ControlModifier
# pan the view with the left mouse button and CRTL down
if self._leftMousePressed and ctrl:
self.setCursor(QtCore.Qt.ClosedHandCursor)
newPos = event.pos()
diff = newPos - self._dragPos
self._dragPos = newPos
# this actually does the pan
# no matter if scroll bars are displayed or not
self.horizontalScrollBar().setValue(
self.horizontalScrollBar().value() - diff.x())
self.verticalScrollBar().setValue(
self.verticalScrollBar().value() - diff.y())
if self.rubberband.isVisible() and not ctrl:
self.setInteractive(False)
self.rubberband.setGeometry(
QtCore.QRect(self.origin, event.pos()).normalized())
# call corresponding base class method
super().mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
"""Re-implement QGraphicsView's mouseReleaseEvent handler"""
self._leftMousePressed = False
self.setCursor(QtCore.Qt.ArrowCursor)
# do zoom wrt to rect of rubberband
if self.rubberband.isVisible():
self.rubberband.hide()
rect = self.rubberband.geometry()
rectf = self.mapToScene(rect).boundingRect()
# zoom the selected rectangle (works on scene coordinates)
# zoom rect must be at least 5% of view width to allow zoom
if self.rubberband.allow_zoom:
self.fitInView(rectf,
aspectRadioMode=QtCore.Qt.KeepAspectRatio)
# rescale markers during zoom
# i.e. keep them constant size
self.adjustMarkerSize()
# reset to True, so that mouse wheel zoom anchor works
self.setInteractive(True)
# reset ScrollHandDrag if it was active
if self.dragMode() == QtWidgets.QGraphicsView.ScrollHandDrag:
self.setDragMode(QtWidgets.QGraphicsView.NoDrag)
# call corresponding base class method
super().mouseReleaseEvent(event)
def wheelEvent(self, event):
"""Re-implement QGraphicsView's wheelEvent handler"""
f = SCALEINC
# wheelevent.angleDelta() returns a QPoint instance
# the angle increment of the wheel is stored on the .y() attribute
angledelta = event.angleDelta().y()
if math.copysign(1, angledelta) > 0:
f = 1.0 / SCALEINC
self.scaleView(f)
# DO NOT CONTINUE HANDLING EVENTS HERE!!!
# this would destroy the mouse anchor
# call corresponding base class method
# super().wheelEvent(event)
def keyPressEvent(self, event):
"""Re-implement QGraphicsView's keyPressEvent handler"""
key = event.key()
if key == QtCore.Qt.Key_Plus or key == QtCore.Qt.Key_PageDown:
f = SCALEINC
# if scaling with the keys, the do not use mouse as zoom anchor
anchor = self.transformationAnchor()
self.setTransformationAnchor(QtWidgets.QGraphicsView.AnchorViewCenter)
self.scaleView(f)
self.setTransformationAnchor(anchor)
if key == QtCore.Qt.Key_PageDown:
# return here so that later base class is NOT called
# because QAbstractScrollArea would otherwise handle
# the event and do something we do not want
return
elif key == QtCore.Qt.Key_Minus or key == QtCore.Qt.Key_PageUp:
f = 1.0 / SCALEINC
# if scaling with the keys, the do not use mouse as zoom anchor
anchor = self.transformationAnchor()
self.setTransformationAnchor(QtWidgets.QGraphicsView.AnchorViewCenter)
self.scaleView(f)
self.setTransformationAnchor(anchor)
if key == QtCore.Qt.Key_PageUp:
# return here so that later base class is NOT called
# because QAbstractScrollArea would otherwise handle
# the event and do something we do not want
return
elif key == QtCore.Qt.Key_Home:
self.parent.slots.onViewAll()
elif key == QtCore.Qt.Key_Delete:
# removes all selected airfoils
self.parent.slots.removeAirfoil()
# call corresponding base class method
super().keyPressEvent(event)
def keyReleaseEvent(self, event):
"""Re-implement QGraphicsView's keyReleaseEvent handler"""
# call corresponding base class method
super().keyReleaseEvent(event)
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.accept()
else:
event.ignore()
def dragLeaveEvent(self, event):
pass
def dragMoveEvent(self, event):
if event.mimeData().hasUrls():
if event.mimeData().hasText():
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
else:
event.ignore()
def dropEvent(self, event):
for url in event.mimeData().urls():
path = url.toLocalFile()
if os.path.isfile(path):
self.parent.slots.loadAirfoil(path, comment='#')
def scaleView(self, factor):
# check if zoom limits are exceeded
# m11 = x-scaling
sx = self.transform().m11()
too_big = sx > MAXZOOM and factor > 1.0
too_small = sx < MINZOOM and factor < 1.0
if too_big or too_small:
return
# do the actual zooming
self.scale(factor, factor)
# rescale markers during zoom, i.e. keep them constant size
self.adjustMarkerSize()
# cache view to be able to keep it during resize
self.getSceneFromView()
def adjustMarkerSize(self):
"""Adjust marker size during zoom. Marker items are circles
which are otherwise affected by zoom. Using MARKERSIZE from
Settings a fixed markersize (e.g. 3 pixels) can be kept.
This method immitates the behaviour of pen.setCosmetic()
"""
if not self.parent.airfoil:
return
# markers are drawn in GraphicsItem using scene coordinates
# in order to keep them constant size, also when zooming
# a fixed pixel size (MARKERSIZE from settings) is mapped to
# scene coordinates
# depending on the zoom, this leads to always different
# scene coordinates
# map a square with side length of MARKERSIZE to the scene coords
mappedMarker = self.mapToScene(
QtCore.QRect(0, 0, MARKERSIZE, MARKERSIZE))
mappedMarkerWidth = mappedMarker.boundingRect().width()
if self.parent.airfoil.contourPolygon:
markers = self.parent.airfoil.polygonMarkers
x, y = self.parent.airfoil.raw_coordinates
for i, marker in enumerate(markers):
# in case of circle, args is a QRectF
marker.args = [QtCore.QRectF(x[i] - mappedMarkerWidth,
y[i] - mappedMarkerWidth,
2. * mappedMarkerWidth,
2. * mappedMarkerWidth)]
# if self.parent.airfoil.contourSpline:
if hasattr(self.parent.airfoil, 'contourSpline'):
markers = self.parent.airfoil.splineMarkers
x, y = self.parent.airfoil.spline_data[0]
for i, marker in enumerate(markers):
# in case of circle, args is a QRectF
marker.args = [QtCore.QRectF(x[i] - mappedMarkerWidth,
y[i] - mappedMarkerWidth,
2. * mappedMarkerWidth,
2. * mappedMarkerWidth)]
def getSceneFromView(self):
"""Cache view to be able to keep it during resize"""
# map view rectangle to scene coordinates
polygon = self.mapToScene(self.rect())
# sceneview describes the rectangle which is currently
# being viewed in scene coordinates
# this is needed during resizing to be able to keep the view
self.sceneview = QtCore.QRectF(polygon[0], polygon[2])
def contextMenuEvent(self, event):
"""creates popup menu for the graphicsview"""
menu = QtWidgets.QMenu(self)
fitairfoil = menu.addAction('Fit airfoil in view')
fitairfoil.setShortcut('CTRL+f')
fitall = menu.addAction('Fit all items in view')
fitall.setShortcut('HOME, CTRL+SHIFT+f')
menu.addSeparator()
delitems = menu.addAction('Delete airfoil')
delitems.setShortcut('Del')
menu.addSeparator()
togglebg = menu.addAction('Toggle background')
togglebg.setShortcut('CTRL+b')
action = menu.exec_(self.mapToGlobal(event.pos()))
if action == togglebg:
self.parent.slots.onBackground()
elif action == fitairfoil:
self.parent.slots.fitAirfoilInView()
elif action == fitall:
self.parent.slots.onViewAll()
# remove all selected items from the scene
elif action == delitems:
self.parent.slots.removeAirfoil()
# call corresponding base class method
super().contextMenuEvent(event)
class RubberBand(QtWidgets.QRubberBand):
"""Custom rubberband
from: http://stackoverflow.com/questions/25642618
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.view = args[1]
# set pen and brush (filling)
self.pen = QtGui.QPen()
self.pen.setStyle(QtCore.Qt.DotLine)
self.pen.setColor(QtGui.QColor(80, 80, 100))
self.brush = QtGui.QBrush()
color = QtGui.QColor(20, 20, 80, 30)
self.brush.setColor(color)
# self.brush.setStyle(QtCore.Qt.NoBrush)
self.brush.setStyle(QtCore.Qt.SolidPattern)
# set style selectively for the rubberband like that
# see: http://stackoverflow.com/questions/25642618
# required as opacity might not work
# NOTE: opacity removed here
self.setStyle(QtWidgets.QStyleFactory.create('windowsvista'))
# set boolean for allowing zoom
self.allow_zoom = False
def paintEvent(self, QPaintEvent):
painter = QtGui.QPainter(self)
self.pen.setColor(QtGui.QColor(80, 80, 100))
self.pen.setWidthF(1.5)
self.pen.setStyle(QtCore.Qt.DotLine)
# zoom rect must be at least RUBBERBANDSIZE % of view to allow zoom
if (QPaintEvent.rect().width() < RUBBERBANDSIZE * self.view.width()) \
or \
(QPaintEvent.rect().height() < RUBBERBANDSIZE * self.view.height()):
self.brush.setStyle(QtCore.Qt.NoBrush)
# set boolean for allowing zoom
self.allow_zoom = False
else:
# if rubberband rect is big enough indicate this by fill color
color = QtGui.QColor(10, 30, 140, 45)
self.brush.setColor(color)
self.brush.setStyle(QtCore.Qt.SolidPattern)
# set boolean for allowing zoom
self.allow_zoom = True
painter.setBrush(self.brush)
painter.setPen(self.pen)
painter.drawRect(QPaintEvent.rect())
| 37.112311 | 83 | 0.593144 |
4a219bde445d659598f3771eadc92e36811cb495 | 1,668 | py | Python | src/main/python/systemds/operator/algorithm/builtin/slicefinder.py | dkerschbaumer/systemds | dc3a9f489951d7e13ec47c5181d2c5d7022665ce | [
"Apache-2.0"
] | null | null | null | src/main/python/systemds/operator/algorithm/builtin/slicefinder.py | dkerschbaumer/systemds | dc3a9f489951d7e13ec47c5181d2c5d7022665ce | [
"Apache-2.0"
] | null | null | null | src/main/python/systemds/operator/algorithm/builtin/slicefinder.py | dkerschbaumer/systemds | dc3a9f489951d7e13ec47c5181d2c5d7022665ce | [
"Apache-2.0"
] | null | null | null | # -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
# Autogenerated By : src/main/python/generator/generator.py
# Autogenerated From : scripts/builtin/slicefinder.dml
from typing import Dict, Iterable
from systemds.operator import OperationNode
from systemds.script_building.dag import OutputType
from systemds.utils.consts import VALID_INPUT_TYPES
def slicefinder(X: OperationNode, e: OperationNode, **kwargs: Dict[str, VALID_INPUT_TYPES]) -> OperationNode:
X._check_matrix_op()
e._check_matrix_op()
params_dict = {'X':X, 'e':e}
params_dict.update(kwargs)
return OperationNode(X.sds_context, 'slicefinder', named_input_nodes=params_dict, output_type=OutputType.LIST, number_of_outputs=3, output_types=[OutputType.MATRIX, OutputType.MATRIX, OutputType.MATRIX])
| 40.682927 | 207 | 0.71283 |
4a219c378a0fcd70eec28698e71e215985efde95 | 1,455 | py | Python | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/verified_track_content/migrations/0003_migrateverifiedtrackcohortssetting.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | 3 | 2021-12-15T04:58:18.000Z | 2022-02-06T12:15:37.000Z | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/verified_track_content/migrations/0003_migrateverifiedtrackcohortssetting.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | null | null | null | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/verified_track_content/migrations/0003_migrateverifiedtrackcohortssetting.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | 1 | 2019-01-02T14:38:50.000Z | 2019-01-02T14:38:50.000Z | from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
from opaque_keys.edx.django.models import CourseKeyField
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('verified_track_content', '0002_verifiedtrackcohortedcourse_verified_cohort_name'),
]
operations = [
migrations.CreateModel(
name='MigrateVerifiedTrackCohortsSetting',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('old_course_key', CourseKeyField(help_text='Course key for which to migrate verified track cohorts from', max_length=255)),
('rerun_course_key', CourseKeyField(help_text='Course key for which to migrate verified track cohorts to enrollment tracks to', max_length=255)),
('audit_cohort_names', models.TextField(help_text='Comma-separated list of audit cohort names')),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
),
]
| 51.964286 | 178 | 0.697595 |
4a219ca1689a5e85f0d729bc2eec8dbc334ba6ce | 8,764 | bzl | Python | apple/ios.bzl | michaeleisel/rules_apple | 424c73847eba4d2a093fa59df1aa22b5629b0fda | [
"Apache-2.0"
] | null | null | null | apple/ios.bzl | michaeleisel/rules_apple | 424c73847eba4d2a093fa59df1aa22b5629b0fda | [
"Apache-2.0"
] | null | null | null | apple/ios.bzl | michaeleisel/rules_apple | 424c73847eba4d2a093fa59df1aa22b5629b0fda | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bazel rules for creating iOS applications and bundles."""
load(
"@build_bazel_rules_apple//apple/internal/testing:apple_test_assembler.bzl",
"apple_test_assembler",
)
load(
"@build_bazel_rules_apple//apple/internal/testing:build_test_rules.bzl",
"apple_build_test_rule",
)
load(
"@build_bazel_rules_apple//apple/internal/testing:ios_rules.bzl",
_ios_internal_ui_test_bundle = "ios_internal_ui_test_bundle",
_ios_internal_unit_test_bundle = "ios_internal_unit_test_bundle",
_ios_ui_test = "ios_ui_test",
_ios_unit_test = "ios_unit_test",
)
load(
"@build_bazel_rules_apple//apple/internal:apple_product_type.bzl",
"apple_product_type",
)
load(
"@build_bazel_rules_apple//apple/internal:binary_support.bzl",
"binary_support",
)
load(
"@build_bazel_rules_apple//apple/internal:ios_rules.bzl",
_ios_app_clip = "ios_app_clip",
_ios_application = "ios_application",
_ios_extension = "ios_extension",
_ios_framework = "ios_framework",
_ios_imessage_application = "ios_imessage_application",
_ios_imessage_extension = "ios_imessage_extension",
_ios_static_framework = "ios_static_framework",
_ios_sticker_pack_extension = "ios_sticker_pack_extension",
)
def ios_application(name, **kwargs):
"""Builds and bundles an iOS application."""
bundling_args = binary_support.add_entitlements_and_swift_linkopts(
name,
platform_type = str(apple_common.platform_type.ios),
product_type = apple_product_type.application,
**kwargs
)
_ios_application(
name = name,
dylibs = kwargs.get("frameworks", []),
**bundling_args
)
def ios_app_clip(name, **kwargs):
"""Builds and bundles an iOS app clip."""
bundling_args = binary_support.add_entitlements_and_swift_linkopts(
name,
platform_type = str(apple_common.platform_type.ios),
product_type = apple_product_type.app_clip,
**kwargs
)
_ios_app_clip(
name = name,
dylibs = kwargs.get("frameworks", []),
**bundling_args
)
def ios_extension(name, **kwargs):
"""Builds and bundles an iOS application extension."""
bundling_args = binary_support.add_entitlements_and_swift_linkopts(
name,
platform_type = str(apple_common.platform_type.ios),
product_type = apple_product_type.app_extension,
**kwargs
)
_ios_extension(
name = name,
dylibs = kwargs.get("frameworks", []),
**bundling_args
)
def ios_framework(name, **kwargs):
# buildifier: disable=function-docstring-args
"""Builds and bundles an iOS dynamic framework."""
binary_args = dict(kwargs)
# TODO(b/120861201): The linkopts macro additions here only exist because the Starlark linking
# API does not accept extra linkopts and link inputs. With those, it will be possible to merge
# these workarounds into the rule implementations.
linkopts = binary_args.pop("linkopts", [])
bundle_name = binary_args.get("bundle_name", name)
linkopts += ["-install_name", "@rpath/%s.framework/%s" % (bundle_name, bundle_name)]
binary_args["linkopts"] = linkopts
bundling_args = binary_support.add_entitlements_and_swift_linkopts(
name,
include_entitlements = False,
platform_type = str(apple_common.platform_type.ios),
product_type = apple_product_type.framework,
exported_symbols_lists = binary_args.pop("exported_symbols_lists", None),
**binary_args
)
# Remove any kwargs that shouldn't be passed to the underlying rule.
bundling_args.pop("entitlements", None)
_ios_framework(
name = name,
dylibs = binary_args.get("frameworks", []),
**bundling_args
)
def ios_static_framework(name, **kwargs):
# buildifier: disable=function-docstring-args
"""Builds and bundles an iOS static framework for third-party distribution."""
avoid_deps = kwargs.get("avoid_deps")
deps = kwargs.get("deps")
apple_static_library_name = "%s.apple_static_library" % name
native.apple_static_library(
name = apple_static_library_name,
deps = deps,
avoid_deps = avoid_deps,
minimum_os_version = kwargs.get("minimum_os_version"),
platform_type = str(apple_common.platform_type.ios),
testonly = kwargs.get("testonly"),
visibility = kwargs.get("visibility"),
)
passthrough_args = kwargs
passthrough_args.pop("avoid_deps", None)
passthrough_args.pop("deps", None)
_ios_static_framework(
name = name,
deps = [apple_static_library_name],
avoid_deps = [apple_static_library_name],
**passthrough_args
)
# TODO(b/118104491): Remove this macro and move the rule definition back to this file.
def ios_imessage_application(name, **kwargs):
"""Macro to preprocess entitlements for iMessage applications."""
bundling_args = binary_support.add_entitlements_and_swift_linkopts(
name,
platform_type = str(apple_common.platform_type.ios),
product_type = apple_product_type.messages_application,
is_stub = True,
**kwargs
)
_ios_imessage_application(
name = name,
**bundling_args
)
# TODO(b/118104491): Remove this macro and move the rule definition back to this file.
def ios_sticker_pack_extension(name, **kwargs):
"""Macro to preprocess entitlements for Sticker Pack extensions."""
bundling_args = binary_support.add_entitlements_and_swift_linkopts(
name,
platform_type = str(apple_common.platform_type.ios),
product_type = apple_product_type.messages_sticker_pack_extension,
is_stub = True,
**kwargs
)
_ios_sticker_pack_extension(
name = name,
**bundling_args
)
# TODO(b/118104491): Remove this macro and move the rule definition back to this file.
def ios_imessage_extension(name, **kwargs):
"""Macro to override the linkopts and preprocess entitlements for iMessage extensions."""
bundling_args = binary_support.add_entitlements_and_swift_linkopts(
name,
platform_type = str(apple_common.platform_type.ios),
product_type = apple_product_type.messages_extension,
**kwargs
)
return _ios_imessage_extension(
name = name,
dylibs = bundling_args.get("frameworks", []),
**bundling_args
)
_DEFAULT_TEST_RUNNER = "@build_bazel_rules_apple//apple/testing/default_runner:ios_default_runner"
def ios_unit_test(name, **kwargs):
runner = kwargs.pop("runner", None) or _DEFAULT_TEST_RUNNER
apple_test_assembler.assemble(
name = name,
bundle_rule = _ios_internal_unit_test_bundle,
test_rule = _ios_unit_test,
runner = runner,
bundle_loader = kwargs.get("test_host"),
dylibs = kwargs.get("frameworks"),
**kwargs
)
def ios_ui_test(name, **kwargs):
runner = kwargs.pop("runner", None) or _DEFAULT_TEST_RUNNER
apple_test_assembler.assemble(
name = name,
bundle_rule = _ios_internal_ui_test_bundle,
test_rule = _ios_ui_test,
runner = runner,
dylibs = kwargs.get("frameworks"),
**kwargs
)
def ios_unit_test_suite(name, **kwargs):
apple_test_assembler.assemble(
name = name,
bundle_rule = _ios_internal_unit_test_bundle,
test_rule = _ios_unit_test,
bundle_loader = kwargs.get("test_host"),
dylibs = kwargs.get("frameworks"),
**kwargs
)
def ios_ui_test_suite(name, **kwargs):
apple_test_assembler.assemble(
name = name,
bundle_rule = _ios_internal_ui_test_bundle,
test_rule = _ios_ui_test,
dylibs = kwargs.get("frameworks"),
**kwargs
)
ios_build_test = apple_build_test_rule(
doc = """\
Test rule to check that the given library targets (Swift, Objective-C, C++)
build for iOS.
Typical usage:
```starlark
ios_build_test(
name = "my_build_test",
minimum_os_version = "12.0",
targets = [
"//some/package:my_library",
],
)
```
""",
platform_type = "ios",
)
| 32.82397 | 98 | 0.691693 |
4a219d13e02f7398dd19c708f05cb8c4625f8da7 | 821 | py | Python | userbot/modules/stickerjason.py | TAMILVIP007/javes-3.0 | d9238785fa2d79740bbb526aca92455dbccb3838 | [
"MIT"
] | null | null | null | userbot/modules/stickerjason.py | TAMILVIP007/javes-3.0 | d9238785fa2d79740bbb526aca92455dbccb3838 | [
"MIT"
] | null | null | null | userbot/modules/stickerjason.py | TAMILVIP007/javes-3.0 | d9238785fa2d79740bbb526aca92455dbccb3838 | [
"MIT"
] | null | null | null | #by Sh1vam
#Kangers stay away
import os
from userbot import bot as javes
import subprocess, os , asyncio, shutil
from userbot.utils import admin_cmd
@javes.on(admin_cmd("tgsjson"))
async def messup(message):
await message.edit("`jsoning....`")
reply = await message.get_reply_message()
stkr = await reply.download_media("tgs.tgs")
process = await asyncio.create_subprocess_shell(f"lottie_convert.py {stkr} json.json",stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
stdout, stderr = await process.communicate()
os.remove(stkr)
if message.reply_to_msg_id:
message_id = message.reply_to_msg_id
await message.client.send_file(message.chat_id, "json.json",force_document=False,reply_to=message_id)
os.remove("json.json")
await message.delete()
| 26.483871 | 152 | 0.73447 |
4a219f515dfe3869fe68ba4ae7bcab618674a717 | 1,144 | py | Python | tps/problems/migrations/0006_validatorresult.py | akmohtashami/tps-web | 9dab3ffe97c21f658be30ce2f2711dd93e4ba60f | [
"MIT"
] | 5 | 2019-02-26T06:10:43.000Z | 2021-07-24T17:11:45.000Z | tps/problems/migrations/0006_validatorresult.py | akmohtashami/tps-web | 9dab3ffe97c21f658be30ce2f2711dd93e4ba60f | [
"MIT"
] | 3 | 2019-08-15T13:56:03.000Z | 2021-06-10T18:43:16.000Z | tps/problems/migrations/0006_validatorresult.py | jonathanirvings/tps-web | 46519347d4fc8bdced9b5bceb6cdee5ea4e508f2 | [
"MIT"
] | 2 | 2018-12-28T13:12:59.000Z | 2020-12-25T18:42:13.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-06 09:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('problems', '0005_auto_20160731_2252'),
]
operations = [
migrations.CreateModel(
name='ValidatorResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('exit_code', models.CharField(max_length=200, verbose_name='exit code')),
('exit_status', models.CharField(max_length=200, verbose_name='exit status')),
('testcase', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.TestCase', verbose_name='testcase')),
('validator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.Validator', verbose_name='validator')),
],
options={
'abstract': False,
},
bases=(models.Model, ),
),
]
| 36.903226 | 145 | 0.618007 |
4a219f57ad3e68e7bba3e79b05f77bd20163ac6f | 68 | py | Python | run-tests.py | mhorod/checker | acc807ba9697914b22d897c2a9e485d48f114218 | [
"MIT"
] | 1 | 2020-11-12T11:37:34.000Z | 2020-11-12T11:37:34.000Z | run-tests.py | mhorod/checker | acc807ba9697914b22d897c2a9e485d48f114218 | [
"MIT"
] | null | null | null | run-tests.py | mhorod/checker | acc807ba9697914b22d897c2a9e485d48f114218 | [
"MIT"
] | null | null | null | # pylint: disable-all
from simple_checker import run
run.from_cli()
| 17 | 30 | 0.794118 |
4a219f5e292d46e9106454b84830a41361b122fa | 6,041 | py | Python | tensorflow/python/compat/compat.py | guodongxiaren/tensorflow | 9768822159624e54ade255b55b8e6a6c7ec5a962 | [
"Apache-2.0"
] | 1 | 2022-03-29T23:09:34.000Z | 2022-03-29T23:09:34.000Z | tensorflow/python/compat/compat.py | saibaldas/tensorflow | 6a67b522214d50fceff0140b968fc200f4123324 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/compat/compat.py | saibaldas/tensorflow | 6a67b522214d50fceff0140b968fc200f4123324 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2022, 3, 14)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
| 35.745562 | 82 | 0.74342 |
4a219ff139a2416bde15553947549da7a8796ba1 | 12,959 | py | Python | atomic_reactor/plugins/pre_add_labels_in_df.py | tveverka-RH/atomic-reactor | e3fca7dd435250cb06565f19c8758a908f7f3c62 | [
"BSD-3-Clause"
] | null | null | null | atomic_reactor/plugins/pre_add_labels_in_df.py | tveverka-RH/atomic-reactor | e3fca7dd435250cb06565f19c8758a908f7f3c62 | [
"BSD-3-Clause"
] | 17 | 2021-12-06T09:37:07.000Z | 2022-03-25T13:12:35.000Z | atomic_reactor/plugins/pre_add_labels_in_df.py | lkolacek/atomic-reactor | 11ffc82e51d7c95c8770a9d0cb53f2430f141a92 | [
"BSD-3-Clause"
] | null | null | null | """
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
Pre build plugin which adds labels to dockerfile. Labels have to be specified either
as a dict:
{
"name": "add_labels_in_dockerfile",
"args": {
"labels": {
"label1": "value1",
"label 2": "some value"
}
}
}
Or as a string, which must be a dict serialised as JSON.
this will add turn this dockerfile:
```dockerfile
FROM fedora
CMD date
```
into this:
```dockerfile
FROM fedora
LABEL "label1"="value1" "label 2"="some value"
CMD date
```
By default there is parameter:
dont_overwrite=("Architecture", "architecture")
which disallows to overwrite labels in the list if they are in parent image.
After that is also another check via parameter :
dont_overwrite_if_in_dockerfile=("distribution-scope",)
which disallows to overwrite labels in the list if they are in dockerfile
Keys and values are quoted as necessary.
Equal labels, are more precisely labels of equal preferences, as they might
have same values, in case there is more equal labels specified in dockerfile
with different values, the value from the first in the list will be used
to set value for the missing ones.
"""
from atomic_reactor import start_time as atomic_reactor_start_time
from atomic_reactor.plugin import PreBuildPlugin
from atomic_reactor.constants import INSPECT_CONFIG
from atomic_reactor.util import (df_parser,
label_to_string,
LabelFormatter)
from osbs.utils import Labels
import json
import datetime
import platform
class AddLabelsPlugin(PreBuildPlugin):
key = "add_labels_in_dockerfile"
is_allowed_to_fail = False
@staticmethod
def args_from_user_params(user_params: dict) -> dict:
if release := user_params.get("release"):
return {"labels": {"release": release}}
return {}
def __init__(self, workflow, labels=None, dont_overwrite=None,
auto_labels=("build-date",
"architecture",
"vcs-type",
"vcs-ref",
"com.redhat.build-host"),
aliases=None,
dont_overwrite_if_in_dockerfile=("distribution-scope",
"com.redhat.license_terms")):
"""
constructor
:param workflow: DockerBuildWorkflow instance
:param labels: dict, key value pairs to set as labels; or str, JSON-encoded dict
:param dont_overwrite: iterable, list of label keys which should not be overwritten
if they are present in parent image
:param auto_labels: iterable, list of labels to be determined automatically, if supported
it should contain only new label names and not old label names,
as they will be managed automatically
:param aliases: dict, maps old label names to new label names - for each old name found in
base image, dockerfile, or labels argument, a label with the new name is
added (with the same value)
:param dont_overwrite_if_in_dockerfile : iterable, list of label keys which should not be
overwritten if they are present in dockerfile
"""
# call parent constructor
super(AddLabelsPlugin, self).__init__(workflow)
if isinstance(labels, str):
labels = json.loads(labels)
if labels and not isinstance(labels, dict):
raise RuntimeError("labels have to be dict")
# see if REACTOR_CONFIG has any labels. If so, merge them with the existing argument
# and otherwise use the existing argument
image_labels = self.workflow.conf.image_labels
# validity of image_labels is enforced by REACTOR_CONFIG's schema, so no need to check
if image_labels:
if labels:
labels.update(image_labels)
else:
labels = image_labels
self.labels = labels
self.dont_overwrite = dont_overwrite or ()
self.dont_overwrite_if_in_dockerfile = dont_overwrite_if_in_dockerfile
self.aliases = aliases or Labels.get_new_names_by_old()
self.auto_labels = auto_labels or ()
self.info_url_format = self.workflow.conf.image_label_info_url_format
self.equal_labels = self.workflow.conf.image_equal_labels
if not isinstance(self.equal_labels, list):
raise RuntimeError("equal_labels have to be list")
def generate_auto_labels(self, base_labels, df_labels, plugin_labels):
generated = {}
all_labels = base_labels.copy()
all_labels.update(df_labels)
all_labels.update(plugin_labels)
# build date
dt = datetime.datetime.utcfromtimestamp(atomic_reactor_start_time)
generated['build-date'] = dt.isoformat()
# architecture - assuming host and image architecture is the same
# OSBS2 TBD
generated['architecture'] = platform.processor()
# build host
# generated['com.redhat.build-host'] = docker_info['Name']
# OSBS2 TBD get host somehow
generated['com.redhat.build-host'] = 'dummy_host'
# VCS info
vcs = self.workflow.source.get_vcs_info()
if vcs:
generated['vcs-type'] = vcs.vcs_type
generated['vcs-url'] = vcs.vcs_url
generated['vcs-ref'] = vcs.vcs_ref
for lbl in self.auto_labels:
if lbl not in generated:
self.log.warning("requested automatic label %r is not available", lbl)
else:
self.labels[lbl] = generated[lbl]
self.log.info("automatic label %r is generated to %r", lbl, generated[lbl])
def add_aliases(self, base_labels, df_labels, plugin_labels):
all_labels = base_labels.copy()
all_labels.update(df_labels)
all_labels.update(plugin_labels)
new_labels = df_labels.copy()
new_labels.update(plugin_labels)
applied_alias = False
not_applied = []
def add_as_an_alias(set_to, set_from):
self.log.warning("adding label %r as an alias for label %r", set_to, set_from)
self.labels[set_to] = all_labels[set_from]
self.log.info(self.labels)
return True
for old, new in self.aliases.items():
if old not in all_labels:
applied_alias = not_applied.append(old)
continue
# new label doesn't exists but old label does
# add new label with value from old label
if new not in all_labels:
applied_alias = add_as_an_alias(new, old)
continue
# new and old label exists, and have same value
if all_labels[old] == all_labels[new]:
self.log.debug("alias label %r for %r already exists, skipping", new, old)
continue
# new overwrites old, if new is explicitly specified,
# or if old and new are in baseimage
if new in new_labels or (new not in new_labels and old not in new_labels):
applied_alias = add_as_an_alias(old, new)
continue
# old is explicitly specified so overwriting new (from baseimage)
applied_alias = add_as_an_alias(new, old)
# this will ensure that once we've added once new label based on
# old label, if there are multiple old names, just first will be used
all_labels[new] = all_labels[old]
# warn if we applied only some aliases
if applied_alias and not_applied:
self.log.debug("applied only some aliases, following old labels were not found: %s",
", ".join(not_applied))
def set_missing_labels(labels_found, all_labels, value_from, not_in=(), not_value=None):
labels_to_set = all_labels.difference(set(labels_found))
for set_label in labels_to_set:
if set_label in not_in and value_from[labels_found[0]] == not_value[set_label]:
self.log.debug("skipping label %r because it is set correctly in base image",
set_label)
else:
self.labels[set_label] = value_from[labels_found[0]]
self.log.warning("adding equal label %r with value %r",
set_label, value_from[labels_found[0]])
for equal_list in self.equal_labels:
all_equal = set(equal_list)
found_labels_base = []
found_labels_new = []
for equal_label in equal_list:
if equal_label in new_labels:
found_labels_new.append(equal_label)
elif equal_label in base_labels:
found_labels_base.append(equal_label)
if found_labels_new:
set_missing_labels(found_labels_new, all_equal, new_labels,
found_labels_base, base_labels)
elif found_labels_base:
set_missing_labels(found_labels_base, all_equal, base_labels)
def add_info_url(self, base_labels, df_labels, plugin_labels):
all_labels = base_labels.copy()
all_labels.update(df_labels)
all_labels.update(plugin_labels)
info_url = LabelFormatter().vformat(self.info_url_format, [], all_labels)
self.labels['url'] = info_url
def add_release_env_var(self, df_parser):
release_env_var = self.workflow.source.config.release_env_var
if release_env_var:
final_labels = Labels(df_parser.labels)
try:
_, final_release = final_labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
release_line = "ENV {}={}".format(release_env_var, final_release)
df_parser.add_lines(release_line, at_start=True, all_stages=True)
except KeyError:
self.log.warning("environment release variable %s could not be set because no "
"release label found", release_env_var)
def run(self):
"""
run the plugin
"""
dockerfile = df_parser(self.workflow.df_path, workflow=self.workflow)
lines = dockerfile.lines
if (self.workflow.dockerfile_images.custom_base_image or
self.workflow.dockerfile_images.base_from_scratch):
base_image_labels = {}
else:
try:
# OSBS2 TBD: inspect the correct architecture
config = self.workflow.imageutil.base_image_inspect()[INSPECT_CONFIG]
except KeyError as exc:
message = "base image was not inspected"
self.log.error(message)
raise RuntimeError(message) from exc
else:
base_image_labels = config["Labels"] or {}
self.generate_auto_labels(base_image_labels.copy(), dockerfile.labels.copy(),
self.labels.copy())
# changing dockerfile.labels writes out modified Dockerfile - err on
# the safe side and make a copy
self.add_aliases(base_image_labels.copy(), dockerfile.labels.copy(), self.labels.copy())
if self.info_url_format:
self.add_info_url(base_image_labels.copy(), dockerfile.labels.copy(),
self.labels.copy())
labels = []
for key, value in self.labels.items():
if key not in dockerfile.labels or dockerfile.labels[key] != value:
if key in self.dont_overwrite_if_in_dockerfile and key in dockerfile.labels:
self.log.info("denying overwrite of label %r, using from Dockerfile", key)
elif (key in base_image_labels and
key in self.dont_overwrite and
key not in dockerfile.labels):
self.log.info("denying overwrite of label %r, using from baseimage", key)
else:
label = label_to_string(key, value)
self.log.info("setting label %r", label)
labels.append(label)
content = ""
if labels:
content = 'LABEL ' + " ".join(labels)
# put labels at the end of dockerfile (since they change metadata and do not interact
# with FS, this should cause no harm)
lines.append('\n' + content + '\n')
dockerfile.lines = lines
self.add_release_env_var(dockerfile)
return content
| 39.629969 | 98 | 0.612084 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.