max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
pdiffcopy/hashing.py | xolox/python-pdiffcopy | 5 | 6100 | # Fast large file synchronization inspired by rsync.
#
# Author: <NAME> <<EMAIL>>
# Last Change: March 6, 2020
# URL: https://pdiffcopy.readthedocs.io
"""Parallel hashing of files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`."""
# Standard library modules.
import functools
import hashlib
import os
# External dependencies.
from six.moves import range
# Modules included in our package.
from pdiffcopy.mp import WorkerPool
# Public identifiers that require documentation.
__all__ = ("compute_hashes", "hash_worker")
def compute_hashes(filename, block_size, method, concurrency):
"""Compute checksums of a file in blocks (parallel)."""
with WorkerPool(
concurrency=concurrency,
generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size),
worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method),
) as pool:
for offset, digest in pool:
yield offset, digest
def hash_worker(offset, block_size, filename, method):
"""Worker function to be run in child processes."""
with open(filename, "rb") as handle:
handle.seek(offset)
context = hashlib.new(method)
context.update(handle.read(block_size))
return offset, context.hexdigest()
| 2.671875 | 3 |
pyscf/nao/test/test_0003_na2_nao.py | robert-anderson/pyscf | 3 | 6101 | <filename>pyscf/nao/test/test_0003_na2_nao.py
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo
class KnowValues(unittest.TestCase):
def test_siesta2sv_df(self):
import subprocess
import os
siesta_fdf = """
xml.write .true.
PAO.EnergyShift 100 meV
%block ChemicalSpeciesLabel
1 11 Na
%endblock ChemicalSpeciesLabel
NumberOfAtoms 2
NumberOfSpecies 1
%block AtomicCoordinatesAndAtomicSpecies
0.77573521 0.00000000 0.00000000 1
-0.77573521 0.00000000 0.00000000 1
%endblock AtomicCoordinatesAndAtomicSpecies
MD.NumCGsteps 0
COOP.Write .true.
WriteDenchar .true.
"""
label = 'siesta'
fi = open(label+'.fdf', 'w')
print(siesta_fdf, file=fi)
fi.close()
for sp in ['Na']:
try:
os.remove(sp+'.psf')
except :
pass
try:
pppath = get_pseudo(sp)
except:
print('get_pseudo( '+sp+' ) is not working--> skip siesta run' )
return
os.symlink(pppath, sp+'.psf')
errorcode = subprocess.call(get_siesta_command(label), shell=True)
if errorcode: raise RuntimeError('siesta returned an error: {0}'.format(errorcode))
# run test system_vars
from pyscf.nao import mf
sv = mf(label=label)
self.assertEqual(sv.norbs, 10)
self.assertTrue( sv.diag_check() )
self.assertTrue( sv.overlap_check())
if __name__ == "__main__": unittest.main()
| 2.015625 | 2 |
tests/moz_library/rental_books_test.py | mozkzki/moz-library | 0 | 6102 | import pytest
from moz_library.rental_books import RentalBooks
class TestRentalBooks:
@pytest.fixture()
def books1(self):
return RentalBooks()
def test_can_extend_period_1(self, books1):
assert books1._can_extend_period("延長できません") is False
def test_can_extend_period_2(self, books1):
assert books1._can_extend_period("すでに延長されています") is False
def test_can_extend_period_3(self, books1):
assert books1._can_extend_period("それ以外") is True
| 2.671875 | 3 |
examples/src/Charts/MultiCategoryChart.py | aspose-slides/Aspose.Slides-for-Python-via-.NET | 0 | 6103 | <gh_stars>0
import aspose.pydrawing as drawing
import aspose.slides as slides
def charts_multi_category_chart():
#ExStart:MultiCategoryChart
# The path to the documents directory.
outDir = "./examples/out/"
with slides.Presentation() as pres:
slide = pres.slides[0]
ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450)
ch.chart_data.series.clear()
ch.chart_data.categories.clear()
fact = ch.chart_data.chart_data_workbook
fact.clear(0)
defaultWorksheetIndex = 0
category = ch.chart_data.categories.add(fact.get_cell(0, "c2", "A"))
category.grouping_levels.set_grouping_item(1, "Group1")
category = ch.chart_data.categories.add(fact.get_cell(0, "c3", "B"))
category = ch.chart_data.categories.add(fact.get_cell(0, "c4", "C"))
category.grouping_levels.set_grouping_item(1, "Group2")
category = ch.chart_data.categories.add(fact.get_cell(0, "c5", "D"))
category = ch.chart_data.categories.add(fact.get_cell(0, "c6", "E"))
category.grouping_levels.set_grouping_item(1, "Group3")
category = ch.chart_data.categories.add(fact.get_cell(0, "c7", "F"))
category = ch.chart_data.categories.add(fact.get_cell(0, "c8", "G"))
category.grouping_levels.set_grouping_item(1, "Group4")
category = ch.chart_data.categories.add(fact.get_cell(0, "c9", "H"))
# Adding Series
series = ch.chart_data.series.add(fact.get_cell(0, "D1", "Series 1"),
slides.charts.ChartType.CLUSTERED_COLUMN)
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D2", 10))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D3", 20))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D4", 30))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D5", 40))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D6", 50))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D7", 60))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D8", 70))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D9", 80))
# Save presentation with chart
pres.save(outDir + "charts_multi_category_chart_out.pptx", slides.export.SaveFormat.PPTX)
#ExEnd:MultiCategoryChart | 2.671875 | 3 |
netbox/extras/forms/filtersets.py | cybarox/netbox | 0 | 6104 | <reponame>cybarox/netbox<gh_stars>0
from django import forms
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import gettext as _
from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup
from extras.choices import *
from extras.models import *
from extras.utils import FeatureQuery
from netbox.forms.base import NetBoxModelFilterSetForm
from tenancy.models import Tenant, TenantGroup
from utilities.forms import (
add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField,
ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField,
StaticSelect, TagFilterField,
)
from virtualization.models import Cluster, ClusterGroup, ClusterType
__all__ = (
'ConfigContextFilterForm',
'CustomFieldFilterForm',
'CustomLinkFilterForm',
'ExportTemplateFilterForm',
'JournalEntryFilterForm',
'LocalConfigContextFilterForm',
'ObjectChangeFilterForm',
'TagFilterForm',
'WebhookFilterForm',
)
class CustomFieldFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('type', 'content_types', 'weight', 'required')),
)
content_types = ContentTypeMultipleChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('custom_fields'),
required=False
)
type = MultipleChoiceField(
choices=CustomFieldTypeChoices,
required=False,
label=_('Field type')
)
weight = forms.IntegerField(
required=False
)
required = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class CustomLinkFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('content_type', 'enabled', 'new_window', 'weight')),
)
content_type = ContentTypeChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('custom_links'),
required=False
)
enabled = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
new_window = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
weight = forms.IntegerField(
required=False
)
class ExportTemplateFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')),
)
content_type = ContentTypeChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('export_templates'),
required=False
)
mime_type = forms.CharField(
required=False,
label=_('MIME type')
)
file_extension = forms.CharField(
required=False
)
as_attachment = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class WebhookFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('content_types', 'http_method', 'enabled')),
('Events', ('type_create', 'type_update', 'type_delete')),
)
content_types = ContentTypeMultipleChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('webhooks'),
required=False
)
http_method = MultipleChoiceField(
choices=WebhookHttpMethodChoices,
required=False,
label=_('HTTP method')
)
enabled = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
type_create = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
type_update = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
type_delete = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class TagFilterForm(FilterForm):
model = Tag
content_type_id = ContentTypeMultipleChoiceField(
queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()),
required=False,
label=_('Tagged object type')
)
class ConfigContextFilterForm(FilterForm):
fieldsets = (
(None, ('q', 'tag_id')),
('Location', ('region_id', 'site_group_id', 'site_id')),
('Device', ('device_type_id', 'platform_id', 'role_id')),
('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')),
('Tenant', ('tenant_group_id', 'tenant_id'))
)
region_id = DynamicModelMultipleChoiceField(
queryset=Region.objects.all(),
required=False,
label=_('Regions')
)
site_group_id = DynamicModelMultipleChoiceField(
queryset=SiteGroup.objects.all(),
required=False,
label=_('Site groups')
)
site_id = DynamicModelMultipleChoiceField(
queryset=Site.objects.all(),
required=False,
label=_('Sites')
)
device_type_id = DynamicModelMultipleChoiceField(
queryset=DeviceType.objects.all(),
required=False,
label=_('Device types')
)
role_id = DynamicModelMultipleChoiceField(
queryset=DeviceRole.objects.all(),
required=False,
label=_('Roles')
)
platform_id = DynamicModelMultipleChoiceField(
queryset=Platform.objects.all(),
required=False,
label=_('Platforms')
)
cluster_type_id = DynamicModelMultipleChoiceField(
queryset=ClusterType.objects.all(),
required=False,
label=_('Cluster types'),
fetch_trigger='open'
)
cluster_group_id = DynamicModelMultipleChoiceField(
queryset=ClusterGroup.objects.all(),
required=False,
label=_('Cluster groups')
)
cluster_id = DynamicModelMultipleChoiceField(
queryset=Cluster.objects.all(),
required=False,
label=_('Clusters')
)
tenant_group_id = DynamicModelMultipleChoiceField(
queryset=TenantGroup.objects.all(),
required=False,
label=_('Tenant groups')
)
tenant_id = DynamicModelMultipleChoiceField(
queryset=Tenant.objects.all(),
required=False,
label=_('Tenant')
)
tag_id = DynamicModelMultipleChoiceField(
queryset=Tag.objects.all(),
required=False,
label=_('Tags')
)
class LocalConfigContextFilterForm(forms.Form):
local_context_data = forms.NullBooleanField(
required=False,
label=_('Has local config context data'),
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class JournalEntryFilterForm(NetBoxModelFilterSetForm):
model = JournalEntry
fieldsets = (
(None, ('q', 'tag')),
('Creation', ('created_before', 'created_after', 'created_by_id')),
('Attributes', ('assigned_object_type_id', 'kind'))
)
created_after = forms.DateTimeField(
required=False,
label=_('After'),
widget=DateTimePicker()
)
created_before = forms.DateTimeField(
required=False,
label=_('Before'),
widget=DateTimePicker()
)
created_by_id = DynamicModelMultipleChoiceField(
queryset=User.objects.all(),
required=False,
label=_('User'),
widget=APISelectMultiple(
api_url='/api/users/users/',
)
)
assigned_object_type_id = DynamicModelMultipleChoiceField(
queryset=ContentType.objects.all(),
required=False,
label=_('Object Type'),
widget=APISelectMultiple(
api_url='/api/extras/content-types/',
)
)
kind = forms.ChoiceField(
choices=add_blank_choice(JournalEntryKindChoices),
required=False,
widget=StaticSelect()
)
tag = TagFilterField(model)
class ObjectChangeFilterForm(FilterForm):
model = ObjectChange
fieldsets = (
(None, ('q',)),
('Time', ('time_before', 'time_after')),
('Attributes', ('action', 'user_id', 'changed_object_type_id')),
)
time_after = forms.DateTimeField(
required=False,
label=_('After'),
widget=DateTimePicker()
)
time_before = forms.DateTimeField(
required=False,
label=_('Before'),
widget=DateTimePicker()
)
action = forms.ChoiceField(
choices=add_blank_choice(ObjectChangeActionChoices),
required=False,
widget=StaticSelect()
)
user_id = DynamicModelMultipleChoiceField(
queryset=User.objects.all(),
required=False,
label=_('User'),
widget=APISelectMultiple(
api_url='/api/users/users/',
)
)
changed_object_type_id = DynamicModelMultipleChoiceField(
queryset=ContentType.objects.all(),
required=False,
label=_('Object Type'),
widget=APISelectMultiple(
api_url='/api/extras/content-types/',
)
)
| 1.859375 | 2 |
bin/write2cly.py | docdiesel/smartmetertools | 1 | 6105 | #!/usr/bin/python3
## write2cly.py - reads json (generated by sml_reader.py) from stdin
## - writes values to Corlysis time series InfluxDB
##
## Writes data from smart meter to time series database (InfluxDB)
## at Corlysis.com [1]. You need to configure your database and token
## in the config section.
##
## [1] https://corlysis.com/
##==== license section ========
## This code is under MIT License: Copyright (C) 2019 <NAME>
## License details see https://choosealicense.com/licenses/mit/
##==== config section ========
# define corlysis settings here - set db and token at least
cly_base_url = 'https://corlysis.com:8086/write'
cly_parameters = {
"db": "energy",
"u" : "token",
"p" : "placeyourtokenhere",
"precision": "ms"}
# assign readable field names
config = {
"1.8.0": "Bezug",
"2.8.0": "Einspeisung",
"16.7.0": "Wirkleistung"
}
##==== code section ==== no need to change lines below ====
##-- import libraries
import json, sys, requests
import requests
import time
# load json from stdin
try:
myjson = json.load(sys.stdin)
except:
sys.stderr.write('!! error loading json')
exit(1)
# decode json
try:
line = "meter_data "
# add each meter value to line
for obis in myjson['data']:
key = config[obis] # set human readable field name
value = myjson['data'][obis] # get value from smart meter
line += key + '=' + str(value) + ',' # add key=value to insert line
# cut off last comma
line = line[:-1]
# add timestamp as unix timestamp in ms
line += ' ' + str(int(time.time()*1000)) #+ '\n'
# post data into time series database; http response should be 204
r = requests.post(cly_base_url, params=cly_parameters, data=line)
if r.status_code != 204 :
sys.stderr.write(r.status_code)
sys.stderr.write(r.content)
# catch if input is no valid json
except:
sys.stderr.write('!!error: no data block in json')
exit(2)
| 2.671875 | 3 |
dns/rdtypes/ANY/__init__.py | Ashiq5/dnspython | 0 | 6106 | <reponame>Ashiq5/dnspython
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Class ANY (generic) rdata type classes."""
__all__ = [
'AFSDB',
'AMTRELAY',
'AVC',
'CAA',
'CDNSKEY',
'CDS',
'CERT',
'CNAME',
'CSYNC',
'DLV',
'DNAME',
'DNSKEY',
'DS',
'EUI48',
'EUI64',
'GPOS',
'HINFO',
'HIP',
'ISDN',
'LOC',
'MX',
'NINFO',
'NS',
'NSEC',
'NSEC3',
'NSEC3PARAM',
'OPENPGPKEY',
'OPT',
'PTR',
'RP',
'RRSIG',
'RT',
'SMIMEA',
'SOA',
'SPF',
'SSHFP',
'TKEY',
'TLSA',
'TSIG',
'TXT',
'URI',
'X25',
]
| 1.421875 | 1 |
01_test.py | KhubbatulinMark/DCase2020-Task-2-on-Wigner-Ville-transform | 3 | 6107 | <gh_stars>1-10
"""
@file 01_test.py
@brief Script for test
@author <NAME>, <NAME>, and <NAME> (Hitachi Ltd.)
Copyright (C) 2020 Hitachi, Ltd. All right reserved.
"""
########################################################################
# import default python-library
########################################################################
import os
import glob
import csv
import re
import itertools
import sys
########################################################################
########################################################################
# import additional python-library
########################################################################
import numpy
# from import
from tqdm import tqdm
from sklearn import metrics
# original lib
import common as com
import keras_model
########################################################################
########################################################################
# load parameter.yaml
########################################################################
param = com.yaml_load()
#######################################################################
########################################################################
# def
########################################################################
def save_csv(save_file_path,
save_data):
with open(save_file_path, "w", newline="") as f:
writer = csv.writer(f, lineterminator='\n')
writer.writerows(save_data)
def get_machine_id_list_for_test(target_dir,
dir_name="test",
ext="json"):
"""
target_dir : str
base directory path of "dev_data" or "eval_data"
test_dir_name : str (default="test")
directory containing test data
ext : str (default="wav)
file extension of audio files
return :
machine_id_list : list [ str ]
list of machine IDs extracted from the names of test files
"""
# create test files
dir_path = os.path.abspath("{dir}/{dir_name}/*.{ext}".format(dir=target_dir, dir_name=dir_name, ext=ext))
file_paths = sorted(glob.glob(dir_path))
# extract id
machine_id_list = sorted(list(set(itertools.chain.from_iterable(
[re.findall('id_[0-9][0-9]', ext_id) for ext_id in file_paths]))))
return machine_id_list
def test_file_list_generator(target_dir,
id_name,
dir_name="test",
prefix_normal="normal",
prefix_anomaly="anomaly",
ext="json"):
"""
target_dir : str
base directory path of the dev_data or eval_data
id_name : str
id of wav file in <<test_dir_name>> directory
dir_name : str (default="test")
directory containing test data
prefix_normal : str (default="normal")
normal directory name
prefix_anomaly : str (default="anomaly")
anomaly directory name
ext : str (default="wav")
file extension of audio files
return :
if the mode is "development":
test_files : list [ str ]
file list for test
test_labels : list [ boolean ]
label info. list for test
* normal/anomaly = 0/1
if the mode is "evaluation":
test_files : list [ str ]
file list for test
"""
com.logger.info("target_dir : {}".format(target_dir+"_"+id_name))
# development
if mode:
normal_files = sorted(
glob.glob("{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}".format(dir=target_dir,
dir_name=dir_name,
prefix_normal=prefix_normal,
id_name=id_name,
ext=ext)))
normal_labels = numpy.zeros(len(normal_files))
anomaly_files = sorted(
glob.glob("{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}".format(dir=target_dir,
dir_name=dir_name,
prefix_anomaly=prefix_anomaly,
id_name=id_name,
ext=ext)))
anomaly_labels = numpy.ones(len(anomaly_files))
files = numpy.concatenate((normal_files, anomaly_files), axis=0)
labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0)
com.logger.info("test_file num : {num}".format(num=len(files)))
if len(files) == 0:
com.logger.exception("no_wav_file!!")
print("\n========================================")
# evaluation
else:
files = sorted(
glob.glob("{dir}/{dir_name}/*{id_name}*.{ext}".format(dir=target_dir,
dir_name=dir_name,
id_name=id_name,
ext=ext)))
labels = None
com.logger.info("test_file num : {num}".format(num=len(files)))
if len(files) == 0:
com.logger.exception("no_wav_file!!")
print("\n=========================================")
return files, labels
########################################################################
########################################################################
# main 01_test.py
########################################################################
if __name__ == "__main__":
# check mode
# "development": mode == True
# "evaluation": mode == False
mode = com.command_line_chk()
if mode is None:
sys.exit(-1)
# make output result directory
os.makedirs(param["result_directory"], exist_ok=True)
# load base directory
dirs = com.select_dirs(param=param, mode=mode)
# initialize lines in csv for AUC and pAUC
csv_lines = []
# loop of the base directory
for idx, target_dir in enumerate(dirs):
print("\n===========================")
print("[{idx}/{total}] {dirname}".format(dirname=target_dir, idx=idx+1, total=len(dirs)))
machine_type = os.path.split(target_dir)[1]
print("============== MODEL LOAD ==============")
# set model path
model_file = "{model}/model_{machine_type}.hdf5".format(model=param["model_directory"],
machine_type=machine_type)
# load model file
if not os.path.exists(model_file):
com.logger.error("{} model not found ".format(machine_type))
sys.exit(-1)
model = keras_model.load_model(model_file)
model.summary()
if mode:
# results by type
csv_lines.append([machine_type])
csv_lines.append(["id", "AUC", "pAUC"])
performance = []
machine_id_list = get_machine_id_list_for_test(target_dir)
print(machine_id_list)
for id_str in machine_id_list:
# load test file
test_files, y_true = test_file_list_generator(target_dir, id_str)
# setup anomaly score file path
anomaly_score_csv = "{result}/anomaly_score_{machine_type}_{id_str}.csv".format(
result=param["result_directory"],
machine_type=machine_type,
id_str=id_str)
anomaly_score_list = []
print("\n============== BEGIN TEST FOR A MACHINE ID ==============")
y_pred = [0. for k in test_files]
for file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)):
try:
data = com.file_to_vector_array(file_path,
n_mels=param["feature"]["n_mels"],
frames=param["feature"]["frames"],
n_fft=param["feature"]["n_fft"],
hop_length=param["feature"]["hop_length"],
power=param["feature"]["power"])
errors = numpy.mean(numpy.square(data - model.predict(data)), axis=1)
y_pred[file_idx] = numpy.mean(errors)
anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]])
except:
com.logger.error("file broken!!: {}".format(file_path))
# save anomaly score
save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list)
com.logger.info("anomaly score result -> {}".format(anomaly_score_csv))
if mode:
# append AUC and pAUC to lists
auc = metrics.roc_auc_score(y_true, y_pred)
p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param["max_fpr"])
csv_lines.append([id_str.split("_", 1)[1], auc, p_auc])
performance.append([auc, p_auc])
com.logger.info("AUC : {}".format(auc))
com.logger.info("pAUC : {}".format(p_auc))
print("\n============ END OF TEST FOR A MACHINE ID ============")
if mode:
# calculate averages for AUCs and pAUCs
averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0)
csv_lines.append(["Average"] + list(averaged_performance))
csv_lines.append([])
if mode:
# output results
result_path = "{result}/{file_name}".format(result=param["result_directory"], file_name=param["result_file"])
com.logger.info("AUC and pAUC results -> {}".format(result_path))
save_csv(save_file_path=result_path, save_data=csv_lines)
| 2.109375 | 2 |
src/text_split/split.py | i1123581321/word_split | 0 | 6108 | <reponame>i1123581321/word_split
import argparse
import os
parser = argparse.ArgumentParser(description="a simple parser")
parser.add_argument("filename", type=str)
parser.add_argument("lineno", nargs="+", type=int)
parser.add_argument("--same_length", action=argparse.BooleanOptionalAction)
def main():
args = parser.parse_args()
filename = args.filename
linenos = args.lineno
same_length = args.same_length
linenos = list(map(lambda x: x - 1, linenos))
linenos.sort()
results = []
with open(filename, "r", encoding="utf-8") as f:
content = f.readlines()
if not same_length:
start = 0
for lineno in linenos:
results.append("".join(content[start:lineno]))
start = lineno
results.append("".join(content[start:]))
else:
lineno = linenos[0] + 1 if linenos[0] else 100000
start = 0
while start < len(content):
results.append("".join(content[start: start + lineno]))
start += lineno
name, ext = os.path.splitext(filename)
for i, result in enumerate(results):
with open(f"{name}-{i + 1:02}{ext}", "w", encoding="utf-8") as f:
f.write(result)
| 2.859375 | 3 |
src/sentry/models/event.py | Ali-Tahir/sentry | 0 | 6109 | <reponame>Ali-Tahir/sentry
from __future__ import absolute_import
import six
import string
import warnings
import pytz
from collections import OrderedDict
from dateutil.parser import parse as parse_date
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from hashlib import md5
from semaphore.processing import StoreNormalizer
from sentry import eventtypes
from sentry.db.models import (
BoundedBigIntegerField,
BoundedIntegerField,
Model,
NodeData,
NodeField,
sane_repr,
)
from sentry.db.models.manager import EventManager
from sentry.interfaces.base import get_interfaces
from sentry.utils import json
from sentry.utils.cache import memoize
from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView
from sentry.utils.safe import get_path
from sentry.utils.strings import truncatechars
class EventDict(CanonicalKeyDict):
"""
Creating an instance of this dictionary will send the event through basic
(Rust-based) type/schema validation called "re-normalization".
This is used as a wrapper type for `Event.data` such that creating an event
object (or loading it from the DB) will ensure the data fits the type
schema.
"""
def __init__(self, data, skip_renormalization=False, **kwargs):
is_renormalized = isinstance(data, EventDict) or (
isinstance(data, NodeData) and isinstance(data.data, EventDict)
)
if not skip_renormalization and not is_renormalized:
normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False)
data = normalizer.normalize_event(dict(data))
CanonicalKeyDict.__init__(self, data, **kwargs)
class EventCommon(object):
"""
Methods and properties common to both Event and SnubaEvent.
"""
@classmethod
def generate_node_id(cls, project_id, event_id):
"""
Returns a deterministic node_id for this event based on the project_id
and event_id which together are globally unique. The event body should
be saved under this key in nodestore so it can be retrieved using the
same generated id when we only have project_id and event_id.
"""
return md5("{}:{}".format(project_id, event_id)).hexdigest()
# TODO (alex) We need a better way to cache these properties. functools32
# doesn't quite do the trick as there is a reference bug with unsaved
# models. But the current _group_cache thing is also clunky because these
# properties need to be stripped out in __getstate__.
@property
def group(self):
from sentry.models import Group
if not self.group_id:
return None
if not hasattr(self, "_group_cache"):
self._group_cache = Group.objects.get(id=self.group_id)
return self._group_cache
@group.setter
def group(self, group):
self.group_id = group.id
self._group_cache = group
@property
def project(self):
from sentry.models import Project
if not hasattr(self, "_project_cache"):
self._project_cache = Project.objects.get(id=self.project_id)
return self._project_cache
@project.setter
def project(self, project):
if project is None:
self.project_id = None
else:
self.project_id = project.id
self._project_cache = project
def get_interfaces(self):
return CanonicalKeyView(get_interfaces(self.data))
@memoize
def interfaces(self):
return self.get_interfaces()
def get_interface(self, name):
return self.interfaces.get(name)
def get_legacy_message(self):
# TODO(mitsuhiko): remove this code once it's unused. It's still
# being used by plugin code and once the message rename is through
# plugins should instead swithc to the actual message attribute or
# this method could return what currently is real_message.
return (
get_path(self.data, "logentry", "formatted")
or get_path(self.data, "logentry", "message")
or self.message
)
def get_event_type(self):
"""
Return the type of this event.
See ``sentry.eventtypes``.
"""
return self.data.get("type", "default")
def get_event_metadata(self):
"""
Return the metadata of this event.
See ``sentry.eventtypes``.
"""
# For some inexplicable reason we have some cases where the data
# is completely empty. In that case we want to hobble along
# further.
return self.data.get("metadata") or {}
def get_grouping_config(self):
"""Returns the event grouping config."""
from sentry.grouping.api import get_grouping_config_dict_for_event_data
return get_grouping_config_dict_for_event_data(self.data, self.project)
def get_hashes(self, force_config=None):
"""
Returns the calculated hashes for the event. This uses the stored
information if available. Grouping hashes will take into account
fingerprinting and checksums.
"""
# If we have hashes stored in the data we use them, otherwise we
# fall back to generating new ones from the data. We can only use
# this if we do not force a different config.
if force_config is None:
hashes = self.data.get("hashes")
if hashes is not None:
return hashes
return filter(
None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()]
)
def get_grouping_variants(self, force_config=None, normalize_stacktraces=False):
"""
This is similar to `get_hashes` but will instead return the
grouping components for each variant in a dictionary.
If `normalize_stacktraces` is set to `True` then the event data will be
modified for `in_app` in addition to event variants being created. This
means that after calling that function the event data has been modified
in place.
"""
from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config
from sentry.stacktraces.processing import normalize_stacktraces_for_grouping
# Forcing configs has two separate modes. One is where just the
# config ID is given in which case it's merged with the stored or
# default config dictionary
if force_config is not None:
if isinstance(force_config, six.string_types):
stored_config = self.get_grouping_config()
config = dict(stored_config)
config["id"] = force_config
else:
config = force_config
# Otherwise we just use the same grouping config as stored. if
# this is None the `get_grouping_variants_for_event` will fill in
# the default.
else:
config = self.data.get("grouping_config")
config = load_grouping_config(config)
if normalize_stacktraces:
normalize_stacktraces_for_grouping(self.data, config)
return get_grouping_variants_for_event(self, config)
def get_primary_hash(self):
# TODO: This *might* need to be protected from an IndexError?
return self.get_hashes()[0]
@property
def title(self):
# also see event_manager.py which inserts this for snuba
et = eventtypes.get(self.get_event_type())()
return et.get_title(self.get_event_metadata())
@property
def culprit(self):
# For a while events did not save the culprit
if self.group_id:
return self.data.get("culprit") or self.group.culprit
return self.data.get("culprit")
@property
def location(self):
# also see event_manager.py which inserts this for snuba
et = eventtypes.get(self.get_event_type())()
return et.get_location(self.get_event_metadata())
@property
def real_message(self):
# XXX(mitsuhiko): this is a transitional attribute that should be
# removed. `message` will be renamed to `search_message` and this
# will become `message`.
return (
get_path(self.data, "logentry", "formatted")
or get_path(self.data, "logentry", "message")
or ""
)
@property
def organization(self):
return self.project.organization
@property
def version(self):
return self.data.get("version", "5")
@property
def ip_address(self):
ip_address = get_path(self.data, "user", "ip_address")
if ip_address:
return ip_address
remote_addr = get_path(self.data, "request", "env", "REMOTE_ADDR")
if remote_addr:
return remote_addr
return None
@property
def tags(self):
try:
rv = sorted(
[
(t, v)
for t, v in get_path(self.data, "tags", filter=True) or ()
if t is not None and v is not None
]
)
return rv
except ValueError:
# at one point Sentry allowed invalid tag sets such as (foo, bar)
# vs ((tag, foo), (tag, bar))
return []
# For compatibility, still used by plugins.
def get_tags(self):
return self.tags
def get_tag(self, key):
for t, v in self.get_tags():
if t == key:
return v
return None
@property
def release(self):
return self.get_tag("sentry:release")
@property
def dist(self):
return self.get_tag("sentry:dist")
def get_raw_data(self):
"""Returns the internal raw event data dict."""
return dict(self.data.items())
@property
def size(self):
return len(json.dumps(dict(self.data)))
@property
def transaction(self):
return self.get_tag("transaction")
def get_email_subject(self):
template = self.project.get_option("mail:subject_template")
if template:
template = EventSubjectTemplate(template)
else:
template = DEFAULT_SUBJECT_TEMPLATE
return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode(
"utf-8"
)
def get_environment(self):
from sentry.models import Environment
if not hasattr(self, "_environment_cache"):
self._environment_cache = Environment.objects.get(
organization_id=self.project.organization_id,
name=Environment.get_name_or_default(self.get_tag("environment")),
)
return self._environment_cache
def get_minimal_user(self):
"""
A minimal 'User' interface object that gives us enough information
to render a user badge.
"""
return self.get_interface("user")
def as_dict(self):
"""Returns the data in normalized form for external consumers."""
# We use a OrderedDict to keep elements ordered for a potential JSON serializer
data = OrderedDict()
data["event_id"] = self.event_id
data["project"] = self.project_id
data["release"] = self.release
data["dist"] = self.dist
data["platform"] = self.platform
data["message"] = self.real_message
data["datetime"] = self.datetime
data["time_spent"] = self.time_spent
data["tags"] = [(k.split("sentry:", 1)[-1], v) for (k, v) in self.tags]
for k, v in sorted(six.iteritems(self.data)):
if k in data:
continue
if k == "sdk":
v = {v_k: v_v for v_k, v_v in six.iteritems(v) if v_k != "client_ip"}
data[k] = v
# for a long time culprit was not persisted. In those cases put
# the culprit in from the group.
if data.get("culprit") is None and self.group_id:
data["culprit"] = self.group.culprit
# Override title and location with dynamically generated data
data["title"] = self.title
data["location"] = self.location
return data
# ============================================
# DEPRECATED
# ============================================
@property
def level(self):
# we might want to move to this:
# return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level
if self.group:
return self.group.level
else:
return None
def get_level_display(self):
# we might want to move to this:
# return self.get_tag('level') or self.group.get_level_display()
if self.group:
return self.group.get_level_display()
else:
return None
# deprecated accessors
@property
def logger(self):
warnings.warn("Event.logger is deprecated. Use Event.tags instead.", DeprecationWarning)
return self.get_tag("logger")
@property
def site(self):
warnings.warn("Event.site is deprecated. Use Event.tags instead.", DeprecationWarning)
return self.get_tag("site")
@property
def server_name(self):
warnings.warn(
"Event.server_name is deprecated. Use Event.tags instead.", DeprecationWarning
)
return self.get_tag("server_name")
@property
def checksum(self):
warnings.warn("Event.checksum is no longer used", DeprecationWarning)
return ""
def error(self): # TODO why is this not a property?
warnings.warn("Event.error is deprecated, use Event.title", DeprecationWarning)
return self.title
error.short_description = _("error")
@property
def message_short(self):
warnings.warn("Event.message_short is deprecated, use Event.title", DeprecationWarning)
return self.title
class SnubaEvent(EventCommon):
"""
An event backed by data stored in snuba.
This is a readonly event and does not support event creation or save.
The basic event data is fetched from snuba, and the event body is
fetched from nodestore and bound to the data property in the same way
as a regular Event.
"""
# The minimal list of columns we need to get from snuba to bootstrap an
# event. If the client is planning on loading the entire event body from
# nodestore anyway, we may as well only fetch the minimum from snuba to
# avoid duplicated work.
minimal_columns = ["event_id", "group_id", "project_id", "timestamp"]
# A list of all useful columns we can get from snuba.
selected_columns = minimal_columns + [
"culprit",
"location",
"message",
"platform",
"title",
"type",
# Required to provide snuba-only tags
"tags.key",
"tags.value",
# Required to provide snuba-only 'user' interface
"email",
"ip_address",
"user_id",
"username",
]
__repr__ = sane_repr("project_id", "group_id")
def __init__(self, snuba_values):
"""
When initializing a SnubaEvent, think about the attributes you
might need to access on it. If you only need a few properties, and
they are all available in snuba, then you should use
`SnubaEvent.selected_colums` (or a subset depending on your needs)
But if you know you are going to need the entire event body anyway
(which requires a nodestore lookup) you may as well just initialize
the event with `SnubaEvent.minimal_colums` and let the rest of of
the attributes come from nodestore.
"""
assert all(k in snuba_values for k in SnubaEvent.minimal_columns)
# self.snuba_data is a dict of all the stuff we got from snuba
self.snuba_data = snuba_values
# self.data is a (lazy) dict of everything we got from nodestore
node_id = SnubaEvent.generate_node_id(
self.snuba_data["project_id"], self.snuba_data["event_id"]
)
self.data = NodeData(None, node_id, data=None, wrapper=EventDict)
def __getattr__(self, name):
"""
Depending on what snuba data this event was initialized with, we may
have the data available to return, or we may have to look in the
`data` dict (which would force a nodestore load). All unresolved
self.foo type accesses will come through here.
"""
if name in ("_project_cache", "_group_cache", "_environment_cache"):
raise AttributeError()
if name in self.snuba_data:
return self.snuba_data[name]
else:
return self.data[name]
# ============================================
# Snuba-only implementations of properties that
# would otherwise require nodestore data.
# ============================================
@property
def tags(self):
"""
Override of tags property that uses tags from snuba rather than
the nodestore event body. This might be useful for implementing
tag deletions without having to rewrite nodestore blobs.
"""
if "tags.key" in self.snuba_data and "tags.value" in self.snuba_data:
keys = getattr(self, "tags.key")
values = getattr(self, "tags.value")
if keys and values and len(keys) == len(values):
return sorted(zip(keys, values))
else:
return []
else:
return super(SnubaEvent, self).tags
def get_minimal_user(self):
from sentry.interfaces.user import User
return User.to_python(
{
"id": self.user_id,
"email": self.email,
"username": self.username,
"ip_address": self.ip_address,
}
)
# If the data for these is available from snuba, we assume
# it was already normalized on the way in and we can just return
# it, otherwise we defer to EventCommon implementation.
def get_event_type(self):
if "type" in self.snuba_data:
return self.snuba_data["type"]
return super(SnubaEvent, self).get_event_type()
@property
def ip_address(self):
if "ip_address" in self.snuba_data:
return self.snuba_data["ip_address"]
return super(SnubaEvent, self).ip_address
@property
def title(self):
if "title" in self.snuba_data:
return self.snuba_data["title"]
return super(SnubaEvent, self).title
@property
def culprit(self):
if "culprit" in self.snuba_data:
return self.snuba_data["culprit"]
return super(SnubaEvent, self).culprit
@property
def location(self):
if "location" in self.snuba_data:
return self.snuba_data["location"]
return super(SnubaEvent, self).location
# ====================================================
# Snuba implementations of the django fields on Event
# ====================================================
@property
def datetime(self):
"""
Reconstruct the datetime of this event from the snuba timestamp
"""
# dateutil seems to use tzlocal() instead of UTC even though the string
# ends with '+00:00', so just replace the TZ with UTC because we know
# all timestamps from snuba are UTC.
return parse_date(self.timestamp).replace(tzinfo=pytz.utc)
@property
def time_spent(self):
return None
@property
def message(self):
if "message" in self.snuba_data:
return self.snuba_data["message"]
return self.data.get("message")
@property
def platform(self):
if "platform" in self.snuba_data:
return self.snuba_data["platform"]
return self.data.get("platform")
@property
def id(self):
# Because a snuba event will never have a django row id, just return
# the hex event_id here. We should be moving to a world where we never
# have to reference the row id anyway.
return self.event_id
def save(self):
raise NotImplementedError
class Event(EventCommon, Model):
"""
An event backed by data stored in postgres.
"""
__core__ = False
group_id = BoundedBigIntegerField(blank=True, null=True)
event_id = models.CharField(max_length=32, null=True, db_column="message_id")
project_id = BoundedBigIntegerField(blank=True, null=True)
message = models.TextField()
platform = models.CharField(max_length=64, null=True)
datetime = models.DateTimeField(default=timezone.now, db_index=True)
time_spent = BoundedIntegerField(null=True)
data = NodeField(
blank=True,
null=True,
ref_func=lambda x: x.project_id or x.project.id,
ref_version=2,
wrapper=EventDict,
)
objects = EventManager()
class Meta:
app_label = "sentry"
db_table = "sentry_message"
verbose_name = _("message")
verbose_name_plural = _("messages")
unique_together = (("project_id", "event_id"),)
index_together = (("group_id", "datetime"),)
__repr__ = sane_repr("project_id", "group_id")
def __getstate__(self):
state = Model.__getstate__(self)
# do not pickle cached info. We want to fetch this on demand
# again. In particular if we were to pickle interfaces we would
# pickle a CanonicalKeyView which old sentry workers do not know
# about
state.pop("_project_cache", None)
state.pop("_environment_cache", None)
state.pop("_group_cache", None)
state.pop("interfaces", None)
return state
class EventSubjectTemplate(string.Template):
idpattern = r"(tag:)?[_a-z][_a-z0-9]*"
class EventSubjectTemplateData(object):
tag_aliases = {"release": "sentry:release", "dist": "sentry:dist", "user": "sentry:user"}
def __init__(self, event):
self.event = event
def __getitem__(self, name):
if name.startswith("tag:"):
name = name[4:]
value = self.event.get_tag(self.tag_aliases.get(name, name))
if value is None:
raise KeyError
return six.text_type(value)
elif name == "project":
return self.event.project.get_full_name()
elif name == "projectID":
return self.event.project.slug
elif name == "shortID" and self.event.group_id:
return self.event.group.qualified_short_id
elif name == "orgID":
return self.event.organization.slug
elif name == "title":
return self.event.title
raise KeyError
DEFAULT_SUBJECT_TEMPLATE = EventSubjectTemplate("$shortID - $title")
| 2.046875 | 2 |
Assignment3/src/data/make_nowcast_dataset.py | shikashyam/BigDataSystemsCoursework | 0 | 6110 | <gh_stars>0
"""
Makes training and test dataset for nowcasting model using SEVIR
"""
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import h5py
os.environ["HDF5_USE_FILE_LOCKING"]='FALSE'
import sys
import numpy as np
import tensorflow as tf
from nowcast_generator import get_nowcast_test_generator
# parser = argparse.ArgumentParser(description='Make nowcast training & test datasets using SEVIR')
# parser.add_argument('--sevir_data', type=str, help='location of SEVIR dataset',default='../../data/sevir')
# parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv')
# parser.add_argument('--output_location', type=str, help='location of SEVIR dataset',default='../../data/interim')
# parser.add_argument('--n_chunks', type=int, help='Number of chucks to use (increase if memory limited)',default=10)
#args = parser.parse_args()
def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10):
"""
Runs data processing scripts to extract training set from SEVIR
"""
logger = logging.getLogger(__name__)
logger.info('making final data set from raw data')
#trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data)
tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location)
#ogger.info('Reading/writing training data to %s' % ('%s/nowcast_training.h5' % args.output_location))
#read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks)
logger.info('Reading/writing testing data to ' + output_location+'/nowcast_testing.h5')
read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks)
def read_write_chunks( filename, generator, n_chunks ):
logger = logging.getLogger(__name__)
chunksize = len(generator)//n_chunks
# get first chunk
logger.info('Gathering chunk 0/%s:' % n_chunks)
X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True)
# Create datasets
with h5py.File(filename, 'w') as hf:
hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3]))
hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3]))
# Gather other chunks
for c in range(1,n_chunks+1):
offset = c*chunksize
n_batches = min(chunksize,len(generator)-offset)
if n_batches<0: # all done
break
logger.info('Gathering chunk %d/%s:' % (c,n_chunks))
X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True)
with h5py.File(filename, 'a') as hf:
hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis = 0)
hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis = 0)
hf['IN'][-X[0].shape[0]:] = X[0]
hf['OUT'][-Y[0].shape[0]:] = Y[0]
| 2.234375 | 2 |
blender-plugin/import_cast.py | rtasan/ApexCastImporter | 0 | 6111 | <filename>blender-plugin/import_cast.py
# The Original importer was created by Nick
# Copyright (c) 2020 Nick
import bpy
import bmesh
import os
import array
import math
from mathutils import *
from bpy_extras.image_utils import load_image
from .cast import Cast, Model, Animation, Curve, NotificationTrack, Mesh, Skeleton, Bone, Material, File
def utilityBuildPath(root, asset):
if os.path.isabs(asset):
return asset
root = os.path.dirname(root)
return os.path.join(root, asset)
def utilityAssignBSDFMaterialSlots(material, slots, path):
material.node_tree.nodes.remove(
material.node_tree.nodes["Principled BSDF"])
shader = material.node_tree.nodes.new("ShaderNodeGroup")
output = material.node_tree.nodes['Material Output']
# グループシェーダーを作成
shader.node_tree = bpy.data.node_groups['S/G-Blender']
#テクスチャを定義
switcher = {
"albedo": "Diffuse map",
"diffuse": "Diffuse map",
"specular": "Specular map",
"ao": "AO map",
"cavity": "Cavity map",
"gloss": "Glossiness map",
"normal": "Normal map",
"emissive": "Emission input"
}
# Loop and connect the slots
for slot in slots:
connection = slots[slot]
if not connection.__class__ is File:
continue
if not slot in switcher:
continue
texture = material.node_tree.nodes.new("ShaderNodeTexImage") #画像ノードを作成
try:
texture.image = bpy.data.images.load(
utilityBuildPath(path, connection.Path())) #画像を読み込み
except RuntimeError:
pass
if texture.image:
material.node_tree.links.new(
shader.inputs[switcher[slot]], texture.outputs["Color"])
material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link)
else:
material.node_tree.nodes.remove(texture)
def importSkeletonNode(name, skeleton):
if skeleton is None:
return None
armature = bpy.data.armatures.new("Joints")
armature.display_type = "STICK"
skeletonObj = bpy.data.objects.new(name, armature)
skeletonObj.show_in_front = True
bpy.context.view_layer.active_layer_collection.collection.objects.link(
skeletonObj)
bpy.context.view_layer.objects.active = skeletonObj
bpy.ops.object.mode_set(mode='EDIT')
bones = skeleton.Bones()
handles = [None] * len(bones)
matrices = {}
for i, bone in enumerate(bones):
newBone = armature.edit_bones.new(bone.Name())
newBone.tail = 0, 0.05, 0 # I am sorry but blender sucks
tempQuat = bone.LocalRotation() # Also sucks, WXYZ? => XYZW master race
matRotation = Quaternion(
(tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4()
matTranslation = Matrix.Translation(Vector(bone.LocalPosition()))
matrices[bone.Name()] = matTranslation @ matRotation
handles[i] = newBone
for i, bone in enumerate(bones):
if bone.ParentIndex() > -1:
handles[i].parent = handles[bone.ParentIndex()]
bpy.context.view_layer.objects.active = skeletonObj
bpy.ops.object.mode_set(mode='POSE')
for bone in skeletonObj.pose.bones:
bone.matrix_basis.identity()
bone.matrix = matrices[bone.name]
bpy.ops.pose.armature_apply()
return skeletonObj
def importMaterialNode(path, material):
# If you already created the material, ignore this
materialNew = bpy.data.materials.get(material.Name())
if materialNew is not None:
return material.Name(), materialNew
materialNew = bpy.data.materials.new(name=material.Name())
materialNew.use_nodes = True
# Blender really only wants a BSDF shader node
# so we're gonna give it one
utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path)
return material.Name(), materialNew
def importModelNode(model, path):
# Extract the name of this model from the path
modelName = os.path.splitext(os.path.basename(path))[0]
# Import skeleton for binds, materials for meshes
skeletonObj = importSkeletonNode(modelName, model.Skeleton())
materialArray = {key: value for (key, value) in (
importMaterialNode(path, x) for x in model.Materials())}
meshes = model.Meshes()
for mesh in meshes:
newMesh = bpy.data.meshes.new("polySurfaceMesh")
blendMesh = bmesh.new()
vertexColorLayer = blendMesh.loops.layers.color.new("color1")
vertexWeightLayer = blendMesh.verts.layers.deform.new()
vertexUVLayers = [blendMesh.loops.layers.uv.new(
"map%d" % x) for x in range(mesh.UVLayerCount())]
vertexPositions = mesh.VertexPositionBuffer()
for x in range(0, len(vertexPositions), 3):
blendMesh.verts.new(
Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x + 2])))
blendMesh.verts.ensure_lookup_table()
faceLookupMap = [1, 2, 0]
vertexNormalLayer = []
vertexNormals = mesh.VertexNormalBuffer()
vertexColors = mesh.VertexColorBuffer()
vertexUVs = [mesh.VertexUVLayerBuffer(
x) for x in range(mesh.UVLayerCount())]
def vertexToFaceVertex(face):
for x, loop in enumerate(face.loops):
vertexIndex = faces[faceStart + faceLookupMap[x]]
if vertexNormals is not None:
vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[(
vertexIndex * 3) + 1], vertexNormals[(vertexIndex * 3) + 2]))
for uvLayer in range(mesh.UVLayerCount()):
uv = Vector(
(vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex * 2) + 1]))
uv.y = 1.0 - uv.y
loop[vertexUVLayers[uvLayer]].uv = uv
if vertexColors is not None:
loop[vertexColorLayer] = [
(vertexColors[vertexIndex] >> i & 0xff) / 255.0 for i in (24, 16, 8, 0)]
faces = mesh.FaceBuffer()
for faceStart in range(0, len(faces), 3):
indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]],
blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]]
try:
newLoop = blendMesh.faces.new(indices)
except ValueError:
continue
else:
vertexToFaceVertex(newLoop)
maximumInfluence = mesh.MaximumWeightInfluence()
if maximumInfluence > 0:
weightBoneBuffer = mesh.VertexWeightBoneBuffer()
weightValueBuffer = mesh.VertexWeightValueBuffer()
for x, vert in enumerate(blendMesh.verts):
if (weightValueBuffer[x * maximumInfluence] > 0.0):
vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence]
] = weightValueBuffer[x * maximumInfluence]
blendMesh.to_mesh(newMesh)
newMesh.create_normals_split()
if len(vertexNormalLayer) > 0:
for x, _loop in enumerate(newMesh.loops):
newMesh.loops[x].normal = vertexNormalLayer[x]
newMesh.validate(clean_customdata=False)
clnors = array.array('f', [0.0] * (len(newMesh.loops) * 3))
newMesh.loops.foreach_get("normal", clnors)
newMesh.polygons.foreach_set(
"use_smooth", [True] * len(newMesh.polygons))
newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
newMesh.use_auto_smooth = True
meshObj = bpy.data.objects.new("CastMesh", newMesh)
bpy.context.view_layer.active_layer_collection.collection.objects.link(
meshObj)
bpy.context.view_layer.objects.active = meshObj
meshMaterial = mesh.Material()
if meshMaterial is not None:
meshObj.data.materials.append(materialArray[meshMaterial.Name()])
for bone in skeletonObj.pose.bones:
meshObj.vertex_groups.new(name=bone.name)
meshObj.parent = skeletonObj
modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE')
modifier.object = skeletonObj
modifier.use_bone_envelopes = False
modifier.use_vertex_groups = True
def importRootNode(node, path):
for child in node.ChildrenOfType(Model):
importModelNode(child, path)
# for child in node.ChildrenOfType(Animation):
# importAnimationNode(child, path)
def importCast(path):
cast = Cast()
cast.load(path)
for root in cast.Roots():
importRootNode(root, path)
def load(self, context, filepath=""):
# シェーダーをアペンド
shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path
try:
file_path = shader_path
inner_path = 'NodeTree'
object_name = 'S/G-Blender'
bpy.ops.wm.append(
filepath=os.path.join(file_path, inner_path, object_name),
directory=os.path.join(file_path, inner_path),
filename=object_name
)
except:
self.report({'ERROR'}, 'Set the Shader path in AddonPreferences first.')
return False
# Parse and load cast nodes
importCast(filepath)
# Update the scene, reset view mode before returning.
bpy.context.view_layer.update()
bpy.ops.object.mode_set(mode="OBJECT")
return True
| 2.015625 | 2 |
study/migrations/0003_auto_20200224_2316.py | hpathipati/Quick-Tutor | 0 | 6112 | <gh_stars>0
# Generated by Django 3.0.2 on 2020-02-24 23:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('study', '0002_student'),
]
operations = [
migrations.AlterField(
model_name='student',
name='bio',
field=models.CharField(blank=True, max_length=200),
),
]
| 1.484375 | 1 |
tests/unit/resources/test_resource.py | gzecchi/oneview-python | 0 | 6113 | <reponame>gzecchi/oneview-python<filename>tests/unit/resources/test_resource.py
# -*- coding: utf-8 -*-
###
# (C) Copyright [2019] Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
import io
import unittest
import mock
from mock import call
from tests.test_utils import mock_builtin
from hpOneView.connection import connection
from hpOneView import exceptions
from hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin,
ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin,
ResourceSchemaMixin, Resource,
RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor,
RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED,
transform_list_to_dict, extract_id_from_uri, merge_resources,
merge_default_values, unavailable_method)
class StubResourceFileHandler(ResourceFileHandlerMixin, Resource):
"""Stub class to test resource file operations"""
class StubResourceZeroBody(ResourceZeroBodyMixin, Resource):
"""Stub class to test resoruce zero body methods"""
class StubResourcePatch(ResourcePatchMixin, Resource):
"""Stub class to test resource patch operations"""
class StubResourceUtilization(ResourceUtilizationMixin, Resource):
"""Stub class to test resource utilization methods"""
class StubResourceSchema(ResourceSchemaMixin, Resource):
"""Stub class to test resource schema methods"""
class StubResource(Resource):
"""Stub class to test resource common methods"""
URI = "/rest/testuri"
class BaseTest(unittest.TestCase):
URI = "/rest/testuri"
TYPE_V200 = "typeV200"
TYPE_V300 = "typeV300"
DEFAULT_VALUES = {
"200": {"type": TYPE_V200},
"300": {"type": TYPE_V300}
}
def setUp(self, resource_client=None):
self.resource_client = resource_client
self.resource_client.URI = self.URI
self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES
self.resource_client.data = {"uri": "/rest/testuri"}
self.resource_client._merge_default_values()
self.task = {"task": "task", "taskState": "Finished"}
self.response_body = {"body": "body"}
self.custom_headers = {"Accept-Language": "en_US"}
class ResourceFileHandlerMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceFileHandler(self.connection)
super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client)
@mock.patch.object(connection, "post_multipart_with_response_handling")
def test_upload_should_call_post_multipart(self, mock_post_multipart):
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_post_multipart.assert_called_once_with(uri, filepath, "SPPgen9snap6.2015_0405.81.iso")
@mock.patch.object(connection, "post_multipart_with_response_handling")
def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart):
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath)
mock_post_multipart.assert_called_once_with("/rest/testuri", mock.ANY, mock.ANY)
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
@mock.patch.object(connection, "get")
def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.assert_called_once_with(self.task, -1)
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.not_been_called()
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
@mock.patch.object(connection, "get")
def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task,
mock_post_multipart):
fake_associated_resurce = mock.Mock()
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
mock_wait4task.return_value = fake_associated_resurce
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_associated_resurce)
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
fake_response_body = mock.Mock()
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, fake_response_body
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_response_body)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
mock_open.return_value = io.StringIO()
self.resource_client.download(uri, file_path)
mock_download_to_stream.assert_called_once_with(mock.ANY, uri)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
fake_file = io.StringIO()
mock_open.return_value = fake_file
self.resource_client.download(uri, file_path)
mock_open.assert_called_once_with(file_path, 'wb')
mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
mock_download_to_stream.return_value = True
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertTrue(result)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
mock_download_to_stream.return_value = False
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertFalse(result)
class ResourceZeroBodyMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceZeroBody(self.connection)
super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body()
mock_post.assert_called_once_with(
"/rest/testuri", {}, custom_headers=None)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body(timeout=-1)
mock_post.assert_called_once_with(
"/rest/testuri", {}, custom_headers=None)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body(custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post):
response_body = {"resource_name": "name"}
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = response_body
new_resource = self.resource_client.create_with_zero_body(timeout=-1)
self.assertNotEqual(new_resource, self.resource_client)
@mock.patch.object(connection, "post")
def test_create_with_zero_body_without_task(self, mock_post):
mock_post.return_value = None, self.response_body
new_resource = self.resource_client.create_with_zero_body()
self.assertNotEqual(new_resource, self.resource_client)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.URI = "/rest/enclosures"
self.resource_client.update_with_zero_body("/rest/enclosures/09USE133E5H4/configuration",
timeout=-1)
mock_update.assert_called_once_with(
"/rest/enclosures/09USE133E5H4/configuration", None, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.update_with_zero_body(uri="/rest/testuri", custom_headers=self.custom_headers)
mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource):
response_body = {"resource_name": "name"}
self.resource_client.URI = "/rest/enclosures"
mock_put.return_value = self.task, self.task
mock_wait4task.return_value = response_body
result = self.resource_client.update_with_zero_body(
"/rest/enclosures/09USE133E5H4/configuration", timeout=-1)
self.assertEqual(result, response_body)
@mock.patch.object(connection, "put")
def test_update_with_zero_body_without_task(self, mock_put):
mock_put.return_value = None, self.response_body
self.resource_client.URI = "/rest/enclosures"
result = self.resource_client.update_with_zero_body(
"/rest/enclosures/09USE133E5H4/configuration", timeout=-1)
self.assertEqual(result, self.response_body)
class ResourcePatchMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourcePatch(self.connection)
super(ResourcePatchMixinTest, self).setUp(self.resource_client)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource):
uri = "/rest/testuri"
request_body = [{
"op": "replace",
"path": "/name",
"value": "new_name",
}]
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch("replace", "/name", "new_name")
mock_patch.assert_called_once_with(uri, request_body, custom_headers={})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource):
request_body = [{
"op": "replace",
"path": "/name",
"value": "new_name",
}]
mock_patch.return_value = {}, {}
self.resource_client.patch("replace", "/name", "new_name")
mock_patch.assert_called_once_with(
"/rest/testuri", request_body, custom_headers={"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource):
request_body = [{
"op": "replace",
"path": "/name",
"value": "new_name",
}]
mock_patch.return_value = {}, {}
self.resource_client.patch("replace", "/name", "new_name")
mock_patch.assert_called_once_with(
"/rest/testuri", request_body, custom_headers={"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource):
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch("operation", "/field", "value",
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource):
mock_patch.return_value = {}, {}
self.resource_client.patch("operation", "/field", "value",
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY,
mock.ANY,
custom_headers={"Accept-Language": "en_US",
"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
self.resource_client.patch("replace", "/name", "new_name")
self.assertEqual(self.resource_client.data, entity)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "get_completed_task")
def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource):
uri = "/rest/testuri"
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {"Content-Type": "application/json",
"Extra": "extra"}
self.connection._apiVersion = 300
self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "get_completed_task")
def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource):
uri = "/rest/testuri"
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {"Extra": "extra"}
self.connection._apiVersion = 300
self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with(
uri,
dict_info,
custom_headers={"Extra": "extra",
"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
self.resource_client.patch("replace", "/name", "new_name")
mock_wait4task.assert_called_once_with(self.task, mock.ANY)
class ResourceUtilizationMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceUtilization(self.connection)
super(ResourceUtilizationMixinTest, self).setUp(self.resource_client)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_with_args(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization(fields="AmbientTemperature,AveragePower,PeakPower",
filter="startDate=2016-05-30T03:29:42.361Z",
refresh=True, view="day")
expected_uri = "/rest/testuri/utilization" \
"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z" \
"&fields=AmbientTemperature%2CAveragePower%2CPeakPower" \
"&refresh=true" \
"&view=day"
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization(
fields="AmbientTemperature,AveragePower,PeakPower",
filter=["startDate=2016-05-30T03:29:42.361Z",
"endDate=2016-05-31T03:29:42.361Z"],
refresh=True,
view="day")
expected_uri = "/rest/testuri/utilization" \
"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z" \
"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z" \
"&fields=AmbientTemperature%2CAveragePower%2CPeakPower" \
"&refresh=true" \
"&view=day"
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization()
expected_uri = "/rest/testuri/utilization"
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization()
expected_uri = "/rest/testuri/utilization"
mock_get.assert_called_once_with(expected_uri)
class ResourceSchemaMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceSchema(self.connection)
super(ResourceSchemaMixinTest, self).setUp(self.resource_client)
@mock.patch.object(connection, "get")
def test_get_schema_uri(self, mock_get):
self.resource_client.get_schema()
mock_get.assert_called_once_with(self.URI + "/schema")
class ResourceTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResource(self.connection)
super(ResourceTest, self).setUp(self.resource_client)
self.resource_helper = ResourceHelper(self.URI, self.connection, None)
@mock.patch.object(ResourceHelper, "do_put")
@mock.patch.object(Resource, "ensure_resource_data")
def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource):
self.resource_client.data = {"uri": "/rest/test"}
self.resource_client.update(data={"name": "test"})
mock_do_put.assert_called_once()
mock_ensure_resource.assert_called_once()
def test_ensure_resource_raise_unique_identifier_exception(self):
self.resource_client.data = []
self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers,
self.resource_client.ensure_resource_data)
@mock.patch.object(ResourceHelper, "do_get")
def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get):
self.resource_client.data = {"uri": "/uri/test"}
mock_do_get.return_value = []
with self.assertRaises(exceptions.HPOneViewResourceNotFound):
self.resource_client.ensure_resource_data(update_data=True)
@mock.patch.object(Resource, "get_by")
def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by):
self.resource_client.data = {"name": "testname"}
mock_get_by.return_value = []
with self.assertRaises(exceptions.HPOneViewResourceNotFound):
self.resource_client.ensure_resource_data(update_data=True)
@mock.patch.object(ResourceHelper, "do_get")
@mock.patch.object(Resource, "get_by")
def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by):
get_by_return_value = [{"name": "testname", "uri": "/rest/testuri"}]
self.resource_client.data = {"name": "testname"}
mock_do_get.return_value = get_by_return_value
self.resource_client.ensure_resource_data(update_data=True)
self.assertEqual(self.resource_client.data, get_by_return_value[0])
@mock.patch.object(Resource, "get_by")
def test_ensure_resource_without_data_update(self, mock_get_by):
mock_get_by.return_value = []
actual_result = self.resource_client.ensure_resource_data(update_data=False)
expected_result = None
self.assertEqual(actual_result, expected_result)
@mock.patch.object(connection, "get")
def test_get_all_called_once(self, mock_get):
filter = "'name'='OneViewSDK \"Test FC Network'"
sort = "name:ascending"
query = "name NE 'WrongName'"
mock_get.return_value = {"members": [{"member": "member"}]}
result = self.resource_helper.get_all(
1, 500, filter, query, sort)
uri = "{resource_uri}?start=1" \
"&count=500" \
"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27" \
"&query=name%20NE%20%27WrongName%27" \
"&sort=name%3Aascending".format(resource_uri=self.URI)
self.assertEqual([{"member": "member"}], result)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_defaults(self, mock_get):
self.resource_client.get_all()
uri = "{resource_uri}?start=0&count=-1".format(resource_uri=self.URI)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_custom_uri(self, mock_get):
self.resource_helper.get_all(uri="/rest/testuri/12467836/subresources")
uri = "/rest/testuri/12467836/subresources?start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_custom_uri_and_query_string(self, mock_get):
self.resource_helper.get_all(uri="/rest/testuri/12467836/subresources?param=value")
uri = "/rest/testuri/12467836/subresources?param=value&start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_different_resource_uri_should_fail(self, mock_get):
try:
self.resource_helper.get_all(uri="/rest/other/resource/12467836/subresources")
except exceptions.HPOneViewUnknownType as e:
self.assertEqual(UNRECOGNIZED_URI, e.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, "get")
def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ["/rest/testuri?start=0&count=-1",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=3"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}, {"id": "8"}]}]
mock_get.side_effect = results
self.resource_client.get_all()
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, "get")
def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ["/rest/testuri?start=0&count=15",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=3"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{'nextPageUri': None, "members": [{"id": "7"}, {"id": "8"}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=15)
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, "get")
def test_get_all_should_return_all_items_when_response_paginated(self, mock_get):
uri_list = ["/rest/testuri?start=0&count=-1",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=1"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}]}]
mock_get.side_effect = results
result = self.resource_client.get_all()
expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, 'get')
def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=15',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=1']
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}]}]
mock_get.side_effect = results
result = self.resource_client.get_all(count=15)
expected_items = [{"id": "1"}, {"id": "2"}, {"id": "3"}, {"id": "4"}, {"id": "5"}, {"id": "6"}, {"id": "7"}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, "get")
def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get):
"""
In this case, the user provides a maximum number of results to be returned but for pagination purposes, a
nextPageUri is returned by OneView.
"""
uri_list = ["/rest/testuri?start=0&count=3",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=3"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}, {"id": "8"}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=3)
mock_get.assert_called_once_with(uri_list[0])
@mock.patch.object(connection, "get")
def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get):
uri = "/rest/testuri?start=0&count=-1"
members = [{"id": "1"}, {"id": "2"}, {"id": "3"}]
mock_get.return_value = {
"nextPageUri": uri,
"members": members,
"uri": uri
}
result = self.resource_client.get_all()
self.assertSequenceEqual(result, members)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get):
mock_get.return_value = {"nextPageUri": None, "members": []}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(connection, "get")
def test_get_all_should_return_empty_list_when_no_members(self, mock_get):
mock_get.return_value = {"nextPageUri": None, "members": None}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(ResourceHelper, "do_get")
def test_refresh(self, mock_do_get):
updated_data = {"resource_name": "updated name"}
mock_do_get.return_value = updated_data
self.resource_client.refresh()
self.assertEqual(self.resource_client.data, updated_data)
@mock.patch.object(connection, "post")
def test_create_uri(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
expected_dict = {"resource_name": "a name", "type": self.TYPE_V300}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_with_api_version_200(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client._merge_default_values()
expected_dict = {"resource_name": "a name", "type": self.TYPE_V200}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_with_default_api_version_300(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
expected_dict = {"resource_name": "a name", "type": self.TYPE_V300}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_should_not_override_resource_properties(self, mock_post):
dict_to_create = {"resource_name": "a name", "type": "anotherType"}
mock_post.return_value = {}, {}
expected = {"resource_name": "a name", "type": "anotherType"}
self.resource_client.create(dict_to_create)
mock_post.assert_called_once_with(self.URI, expected, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_without_default_values(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_with_custom_headers(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(connection, "post")
def test_create_should_return_new_resource_instance(self, mock_post):
mock_post.return_value = {}, {}
new_instance = self.resource_client.create({})
self.assertNotEqual(self.resource_client, new_instance)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_wait_for_activity_on_create(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, {}
mock_wait4task.return_value = self.task
self.resource_client.create({"test": "test"}, timeout=60)
mock_wait4task.assert_called_once_with(self.task, 60)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "delete")
def test_delete_should_return_true(self, mock_delete, mock_ensure_resource):
mock_delete.return_value = None, self.response_body
self.resource_client.data = {"uri": "/rest/testuri"}
result = self.resource_client.delete()
self.assertTrue(result)
@mock.patch.object(connection, 'delete')
def test_helper_delete_all_should_return_true(self, mock_delete):
mock_delete.return_value = None, self.response_body
filter = "name='Exchange Server'"
result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1)
self.assertTrue(result)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "delete")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.data = {"uri": "/rest/testuri"}
self.resource_client.delete(force=True)
mock_delete.assert_called_once_with("/rest/testuri?force=True", custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "delete")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.data = {"uri": "/rest/testuri"}
self.resource_client.delete(custom_headers=self.custom_headers)
mock_delete.assert_called_once_with(mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"name": "test", "type": "typeV300"}
self.resource_client.data = {'uri': uri}
expected = {"name": "test", "type": "typeV300", "uri": uri}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update)
self.assertEqual(self.response_body, self.resource_client.data)
mock_put.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_custom_headers(self, mock_put, mock_ensure_resource):
dict_to_update = {"name": "test"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, custom_headers=self.custom_headers)
mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_force(self, mock_put, mock_laod_resource):
dict_to_update = {"name": "test"}
uri = "/rest/testuri"
expected = {"name": "test", "uri": uri, "type": "typeV300"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update)
expected_uri = "/rest/testuri"
mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource):
dict_to_update = {"name": "test"}
uri = "/rest/testuri"
mock_put.return_value = None, self.response_body
expected_dict = {"name": "test", "type": self.TYPE_V300, "uri": uri}
self.resource_client._merge_default_values()
self.resource_client.update(dict_to_update)
mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource):
dict_to_update = {"name": "test", "type": "anotherType"}
uri = "/rest/testuri"
mock_put.return_value = None, self.response_body
expected = {"name": "test", "type": "anotherType", "uri": uri}
self.resource_client.update(dict_to_update)
mock_put.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_without_default_values(self, mock_put, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"name": "test"}
expected = {"name": "test", "uri": uri, "type": "typeV300"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update)
mock_put.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"resource_data": "resource_data", "uri": uri}
expected = {"resource_data": "resource_data", "uri": uri, "type": "typeV300"}
mock_update.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.update(dict_to_update, False)
self.assertEqual(self.task, self.resource_client.data)
mock_update.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"resource_name": "a name", "uri": uri}
mock_put.return_value = self.task, {}
mock_wait4task.return_value = dict_to_update
self.resource_client.update(dict_to_update, timeout=-1)
self.assertEqual(self.resource_client.data, dict_to_update)
@mock.patch.object(Resource, "get_by")
def test_get_by_name_with_result(self, mock_get_by):
self.resource_client.get_by_name("Resource Name,")
mock_get_by.assert_called_once_with("name", "Resource Name,")
@mock.patch.object(Resource, "get_by")
def test_get_by_name_without_result(self, mock_get_by):
mock_get_by.return_value = []
response = self.resource_client.get_by_name("Resource Name,")
self.assertIsNone(response)
mock_get_by.assert_called_once_with("name", "Resource Name,")
@mock.patch.object(connection, "get")
def test_get_by_uri(self, mock_get):
self.resource_client.get_by_uri("/rest/testuri")
mock_get.assert_called_once_with('/rest/testuri')
@mock.patch.object(connection, "get")
def test_get_by_id_with_result(self, mock_get):
self.resource_client.get_by_id("123")
mock_get.assert_called_once_with("/rest/testuri/123")
@mock.patch.object(connection, "get")
def test_get_by_id_without_result(self, mock_get):
mock_get.return_value = []
response = self.resource_client.get_by_id("123")
self.assertIsNone(response)
mock_get.assert_called_once_with("/rest/testuri/123")
@mock.patch.object(connection, "get")
def test_get_collection_uri(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
self.resource_helper.get_collection()
mock_get.assert_called_once_with(self.URI)
@mock.patch.object(connection, "get")
def test_get_collection_with_filter(self, mock_get):
mock_get.return_value = {}
self.resource_helper.get_collection(filter="name=name")
mock_get.assert_called_once_with(self.URI + "?filter=name%3Dname")
@mock.patch.object(connection, "get")
def test_get_collection_with_path(self, mock_get):
mock_get.return_value = {}
self.resource_helper.get_collection(path="/test")
mock_get.assert_called_once_with(self.URI + "/test")
@mock.patch.object(connection, "get")
def test_get_collection_with_multiple_filters(self, mock_get):
mock_get.return_value = {}
self.resource_helper.get_collection(filter=["name1=one", "name2=two", "name=three"])
mock_get.assert_called_once_with(self.URI + "?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree")
@mock.patch.object(connection, "get")
def test_get_collection_should_return_list(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
collection = self.resource_helper.get_collection()
self.assertEqual(len(collection), 2)
def test_build_uri_with_id_should_work(self):
input = "09USE7335NW35"
expected_output = "/rest/testuri/09USE7335NW35"
result = self.resource_client._helper.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_uri_should_work(self):
input = "/rest/testuri/09USE7335NW3"
expected_output = "/rest/testuri/09USE7335NW3"
result = self.resource_client._helper.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_none_should_raise_exception(self):
try:
self.resource_client._helper.build_uri(None)
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_empty_str_should_raise_exception(self):
try:
self.resource_client._helper.build_uri('')
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_different_resource_uri_should_raise_exception(self):
try:
self.resource_client._helper.build_uri(
"/rest/test/another/resource/uri/09USE7335NW3")
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_incomplete_uri_should_raise_exception(self):
try:
self.resource_client._helper.build_uri("/rest/")
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_subresource_uri(self):
options = [
dict(
resource="1",
subresource="2",
path="sub",
uri="/rest/testuri/1/sub/2"),
dict(
resource="/rest/testuri/3",
subresource="4",
path="sub",
uri="/rest/testuri/3/sub/4"),
dict(
resource="5",
subresource="/rest/testuri/5/sub/6",
path="sub",
uri="/rest/testuri/5/sub/6"),
dict(
resource="/rest/testuri/7",
subresource="/rest/testuri/7/sub/8",
path="sub",
uri="/rest/testuri/7/sub/8"),
dict(
resource=None,
subresource="/rest/testuri/9/sub/10",
path="sub",
uri="/rest/testuri/9/sub/10"),
dict(
resource="/rest/testuri/11",
subresource="12",
path="/sub/",
uri="/rest/testuri/11/sub/12"),
dict(
resource="/rest/testuri/13",
subresource=None,
path="/sub/",
uri="/rest/testuri/13/sub"),
]
for option in options:
uri = self.resource_client._helper.build_subresource_uri(option["resource"], option["subresource"], option["path"])
self.assertEqual(uri, option["uri"])
def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self):
try:
self.resource_client._helper.build_subresource_uri(None, "123456", "sub-path")
except exceptions.HPOneViewValueError as exception:
self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_merge_resources(self):
resource1 = {"name": "resource1", "type": "resource"}
resource2 = {"name": "resource2", "port": "1"}
expected_resource = {"name": "resource2", "type": "resource", "port": "1"}
merged_resource = merge_resources(resource1, resource2)
self.assertEqual(merged_resource, expected_resource)
def test_merge_default_values(self):
default_type = {"type": "type1"}
resource1 = {"name": "resource1"}
resource2 = {"name": "resource2"}
result_list = merge_default_values([resource1, resource2], default_type)
expected_list = [
{"name": "resource1", "type": "type1"},
{"name": "resource2", "type": "type1"}
]
self.assertEqual(result_list, expected_list)
def test_raise_unavailable_method_exception(self):
self.assertRaises(exceptions.HPOneViewUnavailableMethod,
unavailable_method)
class FakeResource(object):
def __init__(self, con):
self._connection = con
self._client = ResourceClient(con, "/rest/fake/resource")
def get_fake(self, uri):
return self._client.get(uri)
class ResourceClientTest(unittest.TestCase):
URI = "/rest/testuri"
TYPE_V200 = 'typeV200'
TYPE_V300 = 'typeV300'
DEFAULT_VALUES = {
'200': {'type': TYPE_V200},
'300': {'type': TYPE_V300}
}
def setUp(self):
super(ResourceClientTest, self).setUp()
self.host = '127.0.0.1'
self.connection = connection(self.host, 300)
self.resource_client = ResourceClient(self.connection, self.URI)
self.task = {"task": "task", "taskState": "Finished"}
self.response_body = {"body": "body"}
self.custom_headers = {'Accept-Language': 'en_US'}
@mock.patch.object(connection, 'get')
def test_get_all_called_once(self, mock_get):
filter = "'name'='OneViewSDK \"Test FC Network'"
sort = 'name:ascending'
query = "name NE 'WrongName'"
view = '"{view-name}"'
scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'
mock_get.return_value = {"members": [{"member": "member"}]}
result = self.resource_client.get_all(
1, 500, filter, query, sort, view, 'name,owner,modified', scope_uris=scope_uris)
uri = '{resource_uri}?start=1' \
'&count=500' \
'&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \
'&query=name%20NE%20%27WrongName%27' \
'&sort=name%3Aascending' \
'&view=%22%7Bview-name%7D%22' \
'&fields=name%2Cowner%2Cmodified' \
'&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI)
self.assertEqual([{'member': 'member'}], result)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_defaults(self, mock_get):
self.resource_client.get_all()
uri = "{resource_uri}?start=0&count=-1".format(resource_uri=self.URI)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_custom_uri(self, mock_get):
self.resource_client.get_all(uri='/rest/testuri/12467836/subresources')
uri = "/rest/testuri/12467836/subresources?start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_custom_uri_and_query_string(self, mock_get):
self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value')
uri = "/rest/testuri/12467836/subresources?param=value&start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_different_resource_uri_should_fail(self, mock_get):
try:
self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources')
except exceptions.HPOneViewUnknownType as e:
self.assertEqual(UNRECOGNIZED_URI, e.args[0])
else:
self.fail('Expected Exception was not raised')
@mock.patch.object(connection, 'get')
def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=-1',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=3']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}]
mock_get.side_effect = results
self.resource_client.get_all()
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, 'get')
def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=15',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=3']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=15)
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, 'get')
def test_get_all_should_return_all_items_when_response_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=-1',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=1']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}]}]
mock_get.side_effect = results
result = self.resource_client.get_all()
expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, 'get')
def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=15',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=1']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}]}]
mock_get.side_effect = results
result = self.resource_client.get_all(count=15)
expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, 'get')
def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get):
"""
In this case, the user provides a maximum number of results to be returned but for pagination purposes, a
nextPageUri is returned by OneView.
"""
uri_list = ['/rest/testuri?start=0&count=3',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=3']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=3)
mock_get.assert_called_once_with(uri_list[0])
@mock.patch.object(connection, 'get')
def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get):
uri = '/rest/testuri?start=0&count=-1'
members = [{'id': '1'}, {'id': '2'}, {'id': '3'}]
mock_get.return_value = {
'nextPageUri': uri,
'members': members,
'uri': uri
}
result = self.resource_client.get_all()
self.assertSequenceEqual(result, members)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get):
mock_get.return_value = {'nextPageUri': None, 'members': []}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(connection, 'get')
def test_get_all_should_return_empty_list_when_no_members(self, mock_get):
mock_get.return_value = {'nextPageUri': None, 'members': None}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_all_called_once(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
filter = "name='Exchange Server'"
uri = "/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True"
self.resource_client.delete_all(filter=filter, force=True, timeout=-1)
mock_delete.assert_called_once_with(uri)
@mock.patch.object(connection, 'delete')
def test_delete_all_should_return_true(self, mock_delete):
mock_delete.return_value = None, self.response_body
filter = "name='Exchange Server'"
result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1)
self.assertTrue(result)
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
filter = "name='Exchange Server'"
delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1)
mock_wait4task.assert_called_with(self.task, timeout=-1)
self.assertEqual(self.task, delete_task)
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_by_id_called_once(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
delete_task = self.resource_client.delete('1', force=True, timeout=-1)
self.assertEqual(self.task, delete_task)
mock_delete.assert_called_once_with(self.URI + "/1?force=True", custom_headers=None)
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_with_custom_headers(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.delete('1', custom_headers=self.custom_headers)
mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'})
def test_delete_dict_invalid_uri(self):
dict_to_delete = {"task": "task",
"uri": ""}
try:
self.resource_client.delete(dict_to_delete, False, -1)
except exceptions.HPOneViewUnknownType as e:
self.assertEqual("Unknown object type", e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'get')
def test_get_schema_uri(self, mock_get):
self.resource_client.get_schema()
mock_get.assert_called_once_with(self.URI + "/schema")
@mock.patch.object(connection, 'get')
def test_get_by_id_uri(self, mock_get):
self.resource_client.get('12345')
mock_get.assert_called_once_with(self.URI + "/12345")
@mock.patch.object(ResourceClient, 'get_by')
def test_get_by_name_with_result(self, mock_get_by):
mock_get_by.return_value = [{"name": "value"}]
response = self.resource_client.get_by_name('Resource Name,')
self.assertEqual(response, {"name": "value"})
mock_get_by.assert_called_once_with("name", 'Resource Name,')
@mock.patch.object(ResourceClient, 'get_by')
def test_get_by_name_without_result(self, mock_get_by):
mock_get_by.return_value = []
response = self.resource_client.get_by_name('Resource Name,')
self.assertIsNone(response)
mock_get_by.assert_called_once_with("name", 'Resource Name,')
@mock.patch.object(connection, 'get')
def test_get_collection_uri(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
self.resource_client.get_collection('12345')
mock_get.assert_called_once_with(self.URI + "/12345")
@mock.patch.object(connection, 'get')
def test_get_collection_with_filter(self, mock_get):
mock_get.return_value = {}
self.resource_client.get_collection('12345', 'name=name')
mock_get.assert_called_once_with(self.URI + "/12345?filter=name%3Dname")
@mock.patch.object(connection, 'get')
def test_get_collection_with_multiple_filters(self, mock_get):
mock_get.return_value = {}
self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three'])
mock_get.assert_called_once_with(self.URI + "/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree")
@mock.patch.object(connection, 'get')
def test_get_collection_should_return_list(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
collection = self.resource_client.get_collection('12345')
self.assertEqual(len(collection), 2)
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_property(self, mock_get_all):
self.resource_client.get_by('name', 'MyFibreNetwork')
mock_get_all.assert_called_once_with(filter="\"name='MyFibreNetwork'\"", uri='/rest/testuri')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_with_incorrect_result_autofix(self, mock_get_all):
mock_get_all.return_value = [{"name": "EXpected"},
{"name": "not expected"}]
response = self.resource_client.get_by('name', 'exPEcted')
self.assertEqual(response, [{"name": "EXpected"}])
mock_get_all.assert_called_once_with(filter="\"name='exPEcted'\"", uri='/rest/testuri')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all):
mock_get_all.return_value = [{"name": "expected"},
{"name": "not expected"}]
response = self.resource_client.get_by('connection.name', 'expected')
self.assertEqual(response, [{'name': 'expected'}, {'name': 'not expected'}])
mock_get_all.assert_called_once_with(filter="\"connection.name='expected'\"", uri='/rest/testuri')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_property_with_uri(self, mock_get_all):
self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub')
mock_get_all.assert_called_once_with(filter="\"name='MyFibreNetwork'\"", uri='/rest/testuri/5435534/sub')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_property_with__invalid_uri(self, mock_get_all):
try:
self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub')
except exceptions.HPOneViewUnknownType as e:
self.assertEqual('Unrecognized URI for this resource', e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration',
timeout=-1)
mock_update.assert_called_once_with(
"/rest/enclosures/09USE133E5H4/configuration", None, custom_headers=None)
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers)
mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put):
response_body = {"resource_name": "name"}
mock_put.return_value = self.task, self.task
mock_wait4task.return_value = response_body
result = self.resource_client.update_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, response_body)
@mock.patch.object(connection, 'put')
def test_update_with_zero_body_without_task(self, mock_put):
mock_put.return_value = None, self.response_body
result = self.resource_client.update_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, self.response_body)
@mock.patch.object(connection, 'put')
def test_update_with_uri_called_once(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
response = self.resource_client.update(dict_to_update, uri=uri)
self.assertEqual(self.response_body, response)
mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_with_custom_headers(self, mock_put):
dict_to_update = {"name": "test"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, uri="/path", custom_headers=self.custom_headers)
mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'put')
def test_update_with_force(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, uri=uri, force=True)
expected_uri = "/rest/resource/test?force=True"
mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_with_api_version_200(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
self.connection._apiVersion = 200
expected_dict = {"name": "test", "type": self.TYPE_V200}
self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)
mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_with_default_api_version_300(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
expected_dict = {"name": "test", "type": self.TYPE_V300}
self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)
mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_should_not_override_resource_properties(self, mock_put):
dict_to_update = {"name": "test", "type": "anotherType"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)
mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_without_default_values(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
resource_client = ResourceClient(self.connection, self.URI)
resource_client.update(dict_to_update, uri=uri)
mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_uri(self, mock_wait4task, mock_update):
dict_to_update = {"resource_data": "resource_data",
"uri": "a_uri"}
mock_update.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
update_task = self.resource_client.update(dict_to_update, False)
self.assertEqual(self.task, update_task)
mock_update.assert_called_once_with("a_uri", dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_return_entity(self, mock_wait4task, mock_put):
dict_to_update = {
"resource_name": "a name",
"uri": "a_uri",
}
mock_put.return_value = self.task, {}
mock_wait4task.return_value = dict_to_update
result = self.resource_client.update(dict_to_update, timeout=-1)
self.assertEqual(result, dict_to_update)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration',
timeout=-1)
mock_post.assert_called_once_with(
"/rest/enclosures/09USE133E5H4/configuration", {}, custom_headers=None)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body(timeout=-1)
mock_post.assert_called_once_with(
'/rest/testuri', {}, custom_headers=None)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post):
response_body = {"resource_name": "name"}
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = response_body
result = self.resource_client.create_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, response_body)
@mock.patch.object(connection, 'post')
def test_create_with_zero_body_without_task(self, mock_post):
mock_post.return_value = None, self.response_body
result = self.resource_client.create_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, self.response_body)
@mock.patch.object(connection, 'post')
def test_create_uri(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_uri_with_force(self, mock_post):
dict_to_create = {"resource_name": "a name", "force": "yes"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, timeout=-1)
expected_uri = "/rest/testuri"
mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_with_api_version_200(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.connection._apiVersion = 200
expected_dict = {"resource_name": "a name", "type": self.TYPE_V200}
self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_with_default_api_version_300(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
expected_dict = {"resource_name": "a name", "type": self.TYPE_V300}
self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_should_not_override_resource_properties(self, mock_post):
dict_to_create = {"resource_name": "a name", "type": "anotherType"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_without_default_values(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_with_custom_headers(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_return_entity(self, mock_wait4task, mock_post):
dict_to_create = {
"resource_name": "a name",
}
created_resource = {
"resource_id": "123",
"resource_name": "a name",
}
mock_post.return_value = self.task, {}
mock_wait4task.return_value = created_resource
result = self.resource_client.create(dict_to_create, -1)
self.assertEqual(result, created_resource)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_wait_for_activity_on_create(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, {}
mock_wait4task.return_value = self.task
self.resource_client.create({"test": "test"}, timeout=60)
mock_wait4task.assert_called_once_with(self.task, 60)
@mock.patch.object(connection, 'patch')
def test_patch_request_when_id_is_provided_v200(self, mock_patch):
request_body = [{
'op': 'replace',
'path': '/name',
'value': 'new_name',
}]
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', 70)
mock_patch.assert_called_once_with(
'/rest/testuri/123a53cz', request_body, custom_headers={})
@mock.patch.object(connection, 'patch')
def test_patch_request_when_id_is_provided_v300(self, mock_patch):
request_body = [{
'op': 'replace',
'path': '/name',
'value': 'new_name',
}]
mock_patch.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', 70)
mock_patch.assert_called_once_with(
'/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
def test_patch_request_when_uri_is_provided(self, mock_patch):
request_body = [{
'op': 'replace',
'path': '/name',
'value': 'new_name',
}]
mock_patch.return_value = {}, {}
self.resource_client.patch(
'/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60)
mock_patch.assert_called_once_with(
'/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
def test_patch_with_custom_headers_v200(self, mock_patch):
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value',
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'patch')
def test_patch_with_custom_headers_v300(self, mock_patch):
mock_patch.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value',
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY,
mock.ANY,
custom_headers={'Accept-Language': 'en_US',
'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_patch_return_entity(self, mock_wait4task, mock_patch):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
result = self.resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', -1)
self.assertEqual(result, entity)
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch):
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {'Content-Type': 'application/json',
'Extra': 'extra'}
self.connection._apiVersion = 300
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers)
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_patch_request_custom_headers(self, mock_task, mock_patch):
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {'Extra': 'extra'}
self.connection._apiVersion = 300
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with(
'/rest/testuri/id',
dict_info,
custom_headers={'Extra': 'extra',
'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
self.resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', -1)
mock_wait4task.assert_called_once_with(self.task, mock.ANY)
def test_delete_with_none(self):
try:
self.resource_client.delete(None)
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'delete')
def test_delete_with_dict_uri(self, mock_delete):
resource = {"uri": "uri"}
mock_delete.return_value = {}, {}
delete_result = self.resource_client.delete(resource)
self.assertTrue(delete_result)
mock_delete.assert_called_once_with("uri", custom_headers=None)
def test_delete_with_empty_dict(self):
try:
self.resource_client.delete({})
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_get_with_none(self):
try:
self.resource_client.get(None)
except ValueError as e:
self.assertTrue("id" in e.args[0])
else:
self.fail()
def test_get_collection_with_none(self):
try:
self.resource_client.get_collection(None)
except ValueError as e:
self.assertTrue("id" in e.args[0])
else:
self.fail()
def test_create_with_none(self):
try:
self.resource_client.create(None)
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_create_with_empty_dict(self):
try:
self.resource_client.create({})
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_update_with_none(self):
try:
self.resource_client.update(None)
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_update_with_empty_dict(self):
try:
self.resource_client.update({})
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_get_by_with_name_none(self):
try:
self.resource_client.get_by(None, None)
except ValueError as e:
self.assertTrue("field" in e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'get')
def test_get_with_uri_should_work(self, mock_get):
mock_get.return_value = {}
uri = self.URI + "/ad28cf21-8b15-4f92-bdcf-51cb2042db32"
self.resource_client.get(uri)
mock_get.assert_called_once_with(uri)
def test_get_with_uri_with_incompatible_url_shoud_fail(self):
message = "Unrecognized URI for this resource"
uri = "/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32"
try:
self.resource_client.get(uri)
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(message, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self):
message = "Unrecognized URI for this resource"
uri = "/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32"
fake_resource = FakeResource(None)
try:
fake_resource.get_fake(uri)
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(message, exception.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, 'get')
def test_get_utilization_with_args(self, mock_get):
self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower',
filter='startDate=2016-05-30T03:29:42.361Z',
refresh=True, view='day')
expected_uri = '/rest/testuri/09USE7335NW3/utilization' \
'?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \
'&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \
'&refresh=true' \
'&view=day'
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(connection, 'get')
def test_get_utilization_with_multiple_filters(self, mock_get):
self.resource_client.get_utilization(
'09USE7335NW3',
fields='AmbientTemperature,AveragePower,PeakPower',
filter=['startDate=2016-05-30T03:29:42.361Z',
'endDate=2016-05-31T03:29:42.361Z'],
refresh=True,
view='day')
expected_uri = '/rest/testuri/09USE7335NW3/utilization' \
'?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \
'&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \
'&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \
'&refresh=true' \
'&view=day'
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(connection, 'get')
def test_get_utilization_by_id_with_defaults(self, mock_get):
self.resource_client.get_utilization('09USE7335NW3')
expected_uri = '/rest/testuri/09USE7335NW3/utilization'
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(connection, 'get')
def test_get_utilization_by_uri_with_defaults(self, mock_get):
self.resource_client.get_utilization('/rest/testuri/09USE7335NW3')
expected_uri = '/rest/testuri/09USE7335NW3/utilization'
mock_get.assert_called_once_with(expected_uri)
def test_get_utilization_with_empty(self):
try:
self.resource_client.get_utilization('')
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_id_should_work(self):
input = '09USE7335NW35'
expected_output = '/rest/testuri/09USE7335NW35'
result = self.resource_client.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_uri_should_work(self):
input = '/rest/testuri/09USE7335NW3'
expected_output = '/rest/testuri/09USE7335NW3'
result = self.resource_client.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_none_should_raise_exception(self):
try:
self.resource_client.build_uri(None)
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_empty_str_should_raise_exception(self):
try:
self.resource_client.build_uri('')
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_different_resource_uri_should_raise_exception(self):
try:
self.resource_client.build_uri(
'/rest/test/another/resource/uri/09USE7335NW3')
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_incomplete_uri_should_raise_exception(self):
try:
self.resource_client.build_uri('/rest/')
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_subresource_uri(self):
options = [
dict(
resource='1',
subresource='2',
path='sub',
uri='/rest/testuri/1/sub/2'),
dict(
resource='/rest/testuri/3',
subresource='4',
path='sub',
uri='/rest/testuri/3/sub/4'),
dict(
resource='5',
subresource='/rest/testuri/5/sub/6',
path='sub',
uri='/rest/testuri/5/sub/6'),
dict(
resource='/rest/testuri/7',
subresource='/rest/testuri/7/sub/8',
path='sub',
uri='/rest/testuri/7/sub/8'),
dict(
resource=None,
subresource='/rest/testuri/9/sub/10',
path='sub',
uri='/rest/testuri/9/sub/10'),
dict(
resource='/rest/testuri/11',
subresource='12',
path='/sub/',
uri='/rest/testuri/11/sub/12'),
dict(
resource='/rest/testuri/13',
subresource=None,
path='/sub/',
uri='/rest/testuri/13/sub'),
]
for option in options:
uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path'])
self.assertEqual(uri, option['uri'])
def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self):
try:
self.resource_client.build_subresource_uri(None, "123456", 'sub-path')
except exceptions.HPOneViewValueError as exception:
self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
self.resource_client.create_report("/rest/path/create-report")
mock_post.assert_called_once_with("/rest/path/create-report", {})
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
self.resource_client.create_report("/rest/path/create-report", timeout=60)
mock_get_completed_task.assert_called_once_with(self.task, 60)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post):
task_output = [
{"type": "FCIssueResponseV2", "created": "2015-03-24T15: 32: 50.889Z"},
{"type": "FCIssueResponseV2", "created": "2015-03-13T14: 10: 50.322Z"}
]
task_with_output = self.task.copy()
task_with_output['taskOutput'] = task_output
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
result = self.resource_client.create_report("/rest/path/create-report")
self.assertEqual(result, task_output)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
result = self.resource_client.create_report("/rest/path/create-report")
self.assertEqual(result, [])
@mock.patch.object(connection, 'post')
def test_create_report_should_raise_exception_when_not_task(self, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = None, {}
try:
self.resource_client.create_report("/rest/path/create-report")
except exceptions.HPOneViewException as exception:
self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, 'post')
def test_create_when_the_resource_is_a_list(self, mock_post):
dict_to_create = [{"resource_name": "a name"}]
mock_post.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
def test_merge_api_default_values(self):
resource = {'name': 'resource1'}
default_values = {
'200': {"type": "EnclosureGroupV200"},
'300': {"type": "EnclosureGroupV300"}
}
expected = {'name': 'resource1', "type": "EnclosureGroupV300"}
resource_client = ResourceClient(self.connection, self.URI)
result = resource_client.merge_default_values(resource, default_values)
self.assertEqual(result, expected)
def test_should_not_merge_when_default_values_not_defined(self):
resource = {'name': 'resource1'}
default_values = {}
expected = {'name': 'resource1'}
resource_client = ResourceClient(self.connection, self.URI)
result = resource_client.merge_default_values(resource, default_values)
self.assertEqual(result, expected)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
def test_upload_should_call_post_multipart(self, mock_post_multipart):
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso')
@mock.patch.object(connection, 'post_multipart_with_response_handling')
def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart):
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath)
mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
@mock.patch.object(connection, 'get')
def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.assert_called_once_with(self.task, -1)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.not_been_called()
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
@mock.patch.object(connection, 'get')
def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task,
mock_post_multipart):
fake_associated_resurce = mock.Mock()
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
mock_wait4task.return_value = fake_associated_resurce
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_associated_resurce)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
fake_response_body = mock.Mock()
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, fake_response_body
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_response_body)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
mock_open.return_value = io.StringIO()
self.resource_client.download(uri, file_path)
mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
fake_file = io.StringIO()
mock_open.return_value = fake_file
self.resource_client.download(uri, file_path)
mock_open.assert_called_once_with(file_path, 'wb')
mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
mock_download_to_stream.return_value = True
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertTrue(result)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
mock_download_to_stream.return_value = False
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertFalse(result)
def test_transform_list_to_dict(self):
list = ['one', 'two', {'tree': 3}, 'four', 5]
dict_transformed = transform_list_to_dict(list=list)
self.assertEqual(dict_transformed,
{'5': True,
'four': True,
'one': True,
'tree': 3,
'two': True})
def test_extract_id_from_uri(self):
uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155'
id = '3518be0e-17c1-4189-8f81-83f3724f6155'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(id, extracted_id)
def test_extract_id_from_uri_with_extra_slash(self):
uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(extracted_id, '')
def test_extract_id_from_uri_passing_id(self):
uri = '3518be0e-17c1-4189-8f81-83f3724f6155'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155')
def test_extract_id_from_uri_unsupported(self):
# This example is not supported yet
uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(extracted_id, 'otherthing')
| 1.953125 | 2 |
workalendar/usa/colorado.py | vanadium23/workalendar | 0 | 6114 | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .core import UnitedStates
class Colorado(UnitedStates):
"""Colorado"""
# Colorado has only federal state holidays.
# NOTE: <NAME> is an optional holiday
| 1.765625 | 2 |
dataloaders/augmentation.py | thierrypin/gei-pool | 0 | 6115 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import numpy as np
# Generic data augmentation
class Augmenter:
""" Generic data augmentation class with chained operations
"""
def __init__(self, ops=[]):
if not isinstance(ops, list):
print("Error: ops must be a list of functions")
quit()
self.ops = ops
def add(self, op):
self.ops.append(op)
def augment(self, img):
aug = img.copy()
for op in self.ops:
aug = op(aug)
return aug
def __call__(self, img):
return self.augment(img)
##########
# Images #
##########
def horizontal_flip(p=0.5):
def fc(img):
if random.random() < p:
return img[..., ::-1]
else:
return img
return fc
def vertical_flip(p=0.5):
def fc(img):
if random.random() < p:
return img[..., ::-1, :]
else:
return img
return fc
def gaussian_noise(p=0.5, mean=0, sigma=0.02):
def fc(img):
if random.random() < p:
gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32)
return img + gauss
else:
return img
return fc
def black_vstripe(p=0.5, size=10):
def fc(img):
if random.random() < p:
j = int(random.random() * (img.shape[1]-size))
img[..., j:j+size] = 0
return img
else:
return img
return fc
def black_hstripe(p=0.5, size=10):
def fc(img):
if random.random() < p:
j = int(random.random() * (img.shape[0]-size))
img[..., j:j+size, :] = 0
return img
else:
return img
return fc
def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02):
"""Default data augmentation with horizontal flip, vertical flip, gaussian noise, black hstripe, and black vstripe.
Returns:
Augmenter object. Use as: aug.augment(img)
"""
print("Using default image augmenter")
return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ])
##########
# Videos #
##########
def horizontal_flip_vid(p=0.5):
def fc(vid):
if random.random() < p:
return vid[..., ::-1]
else:
return vid
return fc
def black_vstripe_vid(p=0.5, size=10):
def fc(batch):
if random.random() < p:
j = int(random.random() * (batch.shape[-1]-size))
batch[..., j:j+size] = 0
return batch
else:
return batch
return fc
def black_hstripe_vid(p=0.5, size=10):
def fc(batch):
if random.random() < p:
j = int(random.random() * (batch.shape[-2]-size))
batch[..., j:j+size, :] = 0
return batch
else:
return batch
return fc
def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02):
"""Default data augmentation with horizontal flip, gaussian noise, black hstripe, and black vstripe.
Returns:
Augmenter object. Use as: aug.augment(img)
"""
return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma), black_hstripe_vid(p, size=strip_size), black_vstripe_vid(p, size=strip_size) ])
| 3.0625 | 3 |
cigeo/admin.py | CzechInvest/ciis | 1 | 6116 | <reponame>CzechInvest/ciis
from django.contrib import admin
from django.contrib.gis import geos
from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin
from .models import Lau1
from .models import Nuts3
from .models import Airport
from .models import Road
from .models import PublicTransportStop
from .models import RailwayStation
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
import nested_admin
import uuid
import json
class AirportAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class RoadAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class RailwayStationAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class PublicTransportStopAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class LAU1Admin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class NUTS3Admin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline):
model = Nuts3
class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline):
model = Lau1
class NUTS3Filter(admin.SimpleListFilter):
"""Filter for admin interface of NUTS3 regions (Kraje)
"""
title = _('NUTS3 regions')
parameter_name = 'nuts3#'
def lookups(self, request, model_admin):
nuts3 = Nuts3.objects.all()
return (
(obj.id, obj.name) for obj in nuts3
)
def queryset(self, request, queryset):
val = self.value()
if val:
nuts3 = Nuts3.objects.get(pk=val)
results = queryset.filter(
location__geometry__intersects=nuts3.geometry)
else:
results = queryset
return results
class ArealFieldAdmin(nested_admin.NestedModelAdmin):
geojson_attributes = []
def get_place(self, obj):
if hasattr(obj.location, "address") and \
obj.location.address is not None:
return obj.location.address.city
else:
return ", ".join(
[l.__str__() for l in Nuts3.objects.filter(
geometry__intersects=obj.location.geometry)])
def get_search_results(self, request, queryset, search_term):
"""Add NUTS3 (by name) search and area size search (using `<>` operator)
"""
result, use_distinct = super(
ArealFieldAdmin, self).get_search_results(
request, queryset, search_term)
if search_term:
if len(result) == 0 or len(result) == len(queryset):
result = self._search_lay1_nuts3_by_name(
queryset, search_term)
if len(result) == 0 or len(result) == len(queryset):
result = self._search_area(queryset, search_term)
return (result, use_distinct)
def _search_lay1_nuts3_by_name(self, queryset, search_term):
"""Search NUTS3 (kraje) and LAU1 (okresy) region according to name
"""
filtered = queryset.none()
for cls in (Lau1, Nuts3):
objs = cls.objects.filter(name__startswith=search_term)
for o in objs:
objects = queryset.filter(
location__geometry__intersects=o.geometry)
filtered |= objects
return filtered
def _search_area(self, queryset, search_term):
"""Search all features, where MIN < area.total < MAX
"""
filtered = queryset.none()
if search_term.find("<>") > -1:
area_min, area_max = [float(x) for x in search_term.split("<>")]
filtered = queryset.filter(
areal__area__total__gte=area_min,
areal__area__total__lte=area_max)
return filtered
def changelist_view(self, request, extra_context=None):
"""Adjust change list view
add GeoJSON encoded data for the queryset
"""
extra_context = extra_context or {}
response = super().changelist_view(
request, extra_context=extra_context,
)
if hasattr(response, "context_data"):
filtered_query_set = response.context_data["cl"].queryset
extra_context['objects_data'] = \
json.dumps(self.as_geojson(filtered_query_set))
response.context_data.update(extra_context)
return response
def as_geojson(self, queryset):
if self.geojson_attributes:
attributes = self.geojson_attributes
else:
attributes = []
data = {
"type": "FeatureCollection",
"features": []
}
for obj in queryset:
geom = None
if hasattr(obj, "location_set"):
multipoint = geos.MultiPoint(
[loc.address.coordinates for loc in obj.location_set.all()])
geom = multipoint.centroid
elif hasattr(obj, "location"):
geom = obj.location.geometry.centroid
elif hasattr(obj, "geom"):
geom = obj.geom
elif hasattr(obj, "address"):
geom = obj.address.coordinates
if geom:
title = None
if hasattr(obj, "title"):
title = obj.title
elif hasattr(obj, "name"):
title = obj.name
if type(obj.pk) == uuid.UUID:
id = str(obj.pk)
else:
id = obj.pk
feature = {
"type": "Feature",
"properties": {
"name": title,
"object_url":
reverse('admin:{}_{}_change'.format(
obj._meta.app_label,
obj._meta.model_name), args=(obj.pk,)),
},
"geometry": json.loads(geom.json),
"id": id
}
for attribute in attributes:
if hasattr(obj, attribute):
value = getattr(obj, attribute.__str__())
if type(value) == uuid.UUID:
feature[attribute] = str(value)
else:
feature[attribute] = value
#print(feature)
data["features"].append(feature)
return data
# Register your models here.
admin.site.register(Lau1, LAU1Admin)
admin.site.register(Nuts3, NUTS3Admin)
admin.site.register(Road, RoadAdmin)
admin.site.register(PublicTransportStop, PublicTransportStopAdmin)
admin.site.register(RailwayStation, RailwayStationAdmin)
admin.site.register(Airport, AirportAdmin)
| 2.015625 | 2 |
umbra/monitor/main.py | RafaelAPB/umbra | 0 | 6117 | import logging
import json
import asyncio
from google.protobuf import json_format
from umbra.common.protobuf.umbra_grpc import MonitorBase
from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot
from umbra.monitor.tools import Tools
logger = logging.getLogger(__name__)
logging.getLogger("hpack").setLevel(logging.WARNING)
class Monitor(MonitorBase):
def __init__(self, info):
self.tools = Tools()
async def Listen(self, stream):
logging.debug("Instruction Received")
instruction: Instruction = await stream.recv_message()
instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True)
snapshot_dict = await self.tools.handle(instruction_dict)
snapshot = json_format.ParseDict(snapshot_dict, Snapshot())
await stream.send_message(snapshot)
| 1.960938 | 2 |
pycs/spells/hunters_mark.py | dwagon/pycs | 0 | 6118 | <filename>pycs/spells/hunters_mark.py
"""https://www.dndbeyond.com/spells/hunters-mark"""
from unittest.mock import patch
import dice
from pycs.constant import ActionCategory
from pycs.constant import SpellType
from pycs.creature import Creature
from pycs.effect import Effect
from pycs.gear import Shortbow
from pycs.spell import SpellAction
from pycs.spells.spelltest import SpellTest
##############################################################################
##############################################################################
##############################################################################
class HuntersMark(SpellAction):
"""You choose a creature you can see within range and mystically
mark it as your quarry. Until the spell ends, you deal an extra 1d6
damage to the target whenever you hit it with a weapon attack, and
you have advantage on any Wisdom (Perception) or Wisdom (Survival)
check you make to find it. If the target drops to 0 hit points
before this spell ends, you can use a bonus action on a subsequent
turn of yours to mark a new creature.
At Higher Levels. When you cast this spell using a spell slot of
3rd or 4th level, you can maintain your concentration on the spell
for up to 8 hours. When you use a spell slot of 5th level or higher,
you can maintain your concentration on the spell for up to 24
hours."""
##########################################################################
def __init__(self, **kwargs):
name = "Hunters Mark"
kwargs.update(
{
"category": ActionCategory.BONUS,
"concentration": SpellType.CONCENTRATION,
"level": 1,
"reach": 90,
"type": SpellType.BUFF,
}
)
super().__init__(name, **kwargs)
self._victim = None
##########################################################################
def heuristic(self):
"""Should we do the spell"""
if self.pick_target():
return 6
print("No enemy in range")
return 0
##########################################################################
def pick_target(self):
"""Who should we do the spell to"""
for enemy in self.owner.pick_closest_enemy():
if self.owner.distance(enemy) > self.range()[0]:
continue
if enemy.has_effect("Hunters Mark"):
continue
self.target = enemy
return enemy
return None
##########################################################################
def cast(self):
"""Do the spell"""
self._victim = self.target
self._victim.add_effect(HuntersMarkEffect(caster=self.owner))
print(f"Cast Hunters Mark on {self._victim}")
##########################################################################
def end_concentration(self):
"""What happens when we stop concentrating"""
if self._victim:
print(f"Removing Hunters Mark from {self._victim}")
self._victim.remove_effect("Hunters Mark")
self._victim = None
##############################################################################
##############################################################################
##############################################################################
class HuntersMarkEffect(Effect):
"""Hunters Mark Effect"""
##########################################################################
def __init__(self, **kwargs):
"""Initialise"""
super().__init__("Hunters Mark", **kwargs)
##########################################################################
def hook_target_additional_damage(self, _, source, target):
"""More damage"""
if source == self.caster:
return ("1d6", 0, None)
return ("", 0, None)
##############################################################################
##############################################################################
##############################################################################
class TestHuntersMark(SpellTest):
"""Test Spell"""
##########################################################################
def setUp(self):
"""test setup"""
super().setUp()
self.caster.add_action(HuntersMark())
##########################################################################
def test_cast(self):
"""test casting"""
self.caster.options_this_turn = [ActionCategory.BONUS]
self.assertFalse(self.enemy.has_effect("Hunters Mark"))
self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False)
self.assertTrue(self.enemy.has_effect("Hunters Mark"))
##########################################################################
def test_effect(self):
"""Test the effect of casting the spell"""
print(self.caster.arena)
self.caster.moves = 99
self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION]
self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True)
self.assertTrue(self.enemy.has_effect("Hunters Mark"))
self.caster.add_gear(Shortbow())
self.assertEqual(len(self.enemy.damage_this_turn), 0)
with patch.object(Creature, "rolld20") as mock:
mock.return_value = 18
with patch.object(dice, "roll") as mock_dice:
mock_dice.return_value = 5
self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True)
print(f"{self.enemy.damage_this_turn=}")
self.assertEqual(len(self.enemy.damage_this_turn), 2)
##########################################################################
def test_removal(self):
"""Test the effect gets removed"""
self.caster.options_this_turn = [ActionCategory.BONUS]
self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False)
self.assertTrue(self.enemy.has_effect("Hunter<NAME>"))
self.caster.remove_concentration()
self.assertFalse(self.enemy.has_effect("Hunter<NAME>"))
# EOF
| 2.5625 | 3 |
utilities.py | armandok/pySLAM-D | 10 | 6119 | import numpy as np
def rot_to_angle(rot):
return np.arccos(0.5*np.trace(rot)-0.5)
def rot_to_heading(rot):
# This function calculates the heading angle of the rot matrix w.r.t. the y-axis
new_rot = rot[0:3:2, 0:3:2] # remove the mid row and column corresponding to the y-axis
new_rot = new_rot/np.linalg.det(new_rot)
return np.arctan2(new_rot[1, 0], new_rot[0, 0])
| 3.15625 | 3 |
robosuite/models/grippers/__init__.py | kyungjaelee/robosuite | 397 | 6120 | <reponame>kyungjaelee/robosuite<gh_stars>100-1000
from .gripper_model import GripperModel
from .gripper_factory import gripper_factory
from .gripper_tester import GripperTester
from .panda_gripper import PandaGripper
from .rethink_gripper import RethinkGripper
from .robotiq_85_gripper import Robotiq85Gripper
from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper
from .panda_gripper import PandaGripper
from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper
from .robotiq_140_gripper import Robotiq140Gripper
from .wiping_gripper import WipingGripper
from .null_gripper import NullGripper
GRIPPER_MAPPING = {
"RethinkGripper": RethinkGripper,
"PandaGripper": PandaGripper,
"JacoThreeFingerGripper": JacoThreeFingerGripper,
"JacoThreeFingerDexterousGripper": JacoThreeFingerDexterousGripper,
"WipingGripper": WipingGripper,
"Robotiq85Gripper": Robotiq85Gripper,
"Robotiq140Gripper": Robotiq140Gripper,
"RobotiqThreeFingerGripper": RobotiqThreeFingerGripper,
"RobotiqThreeFingerDexterousGripper": RobotiqThreeFingerDexterousGripper,
None: NullGripper,
}
ALL_GRIPPERS = GRIPPER_MAPPING.keys()
| 1.921875 | 2 |
src/pose/visualizations/visualizations.py | Idein/chainer-hand-pose | 11 | 6121 | import logging
logger = logging.getLogger(__name__)
import random
import chainercv
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # NOQA
from pose.hand_dataset.geometry_utils import normalize_joint_zyx
from pose.hand_dataset.image_utils import normalize_depth
# Decimal Code (R,G,B)
BASE_COLOR = {
"RED": (255, 0, 0),
"GREEN": (0, 255, 0),
"BLUE": (0, 0, 255),
"YELLOW": (255, 255, 0),
"CYAN": (0, 255, 255),
"MAGENTA": (255, 0, 255),
}
def vis_image(img, ax=None):
"""
extend chainercv.visualizations.vis_image
"""
C, H, W = img.shape
if C == 1:
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
# remove channnel dimension
ax.imshow(img.squeeze())
else:
ax = chainercv.visualizations.vis_image(img, ax)
return ax
def preprocess(point, ax, img):
input_point = np.asarray(point)
if input_point.ndim == 2:
input_point = np.expand_dims(point, axis=0)
H, W = None, None
if ax is None:
fig = plt.figure()
if input_point.shape[-1] == 3:
ax = fig.add_subplot(1, 1, 1, projection="3d")
else:
ax = fig.add_subplot(1, 1, 1)
if img is not None:
ax = vis_image(img, ax=ax)
_, H, W = img.shape
return input_point, ax, H, W
def vis_point(point, img=None, color=None, ax=None):
"""
Visualize points in an image, customized to our purpose.
Base implementation is taken from chainercv.visualizations.vis_image
"""
point, ax, H, W = preprocess(point, ax, img)
n_inst = len(point)
c = np.asarray(color) / 255. if color is not None else None
for i in range(n_inst):
# note that the shape of `point[i]` is (K,N) and the format of one is (y, x), (z,y,x).
# (K, N) -> (N, K)
pts = point[i].transpose() # (K,N) -> (N,K)
# resort coordinate order : yx -> xy or zyx -> xyz
pts = pts[::-1]
ax.scatter(*pts, c=c)
if W is not None:
ax.set_xlim(left=0, right=W)
if H is not None:
ax.set_ylim(bottom=H - 1, top=0)
return ax
def vis_edge(point, indices, img=None, color=None, ax=None):
"""
Visualize edges in an image
"""
point, ax, H, W = preprocess(point, ax, img)
n_inst = len(point)
if color is not None:
color = np.asarray(color) / 255.
else:
color = [None] * len(indices)
for i in range(n_inst):
# note that the shape of `point[i]` is (K,N) and the format of one is (y, x) or (z,y,x).
pts = point[i]
for ((s, t), c) in zip(indices, color):
# Select point which consists edge. It is a pair or point (start, target).
# Note that [::-1] does resort coordinate order: yx -> xy or zyx -> xyz
edge = pts[[s, t]].transpose()
edge = edge[::-1]
ax.plot(*edge, c=c)
if W is not None:
ax.set_xlim(left=0, right=W)
if H is not None:
ax.set_ylim(bottom=H - 1, top=0)
return ax
def vis_pose(point, indices, img=None, point_color=None, edge_color=None, ax=None):
ax = vis_point(point, img=img, color=point_color, ax=ax)
vis_edge(point, indices, img=img, color=edge_color, ax=ax)
def visualize_both(dataset, keypoint_names, edges, color_map, normalize=False):
import random
idx = random.randint(0, len(dataset) - 1)
logger.info("get example")
example = dataset.get_example(idx)
logger.info("Done get example")
fig = plt.figure(figsize=(8, 8))
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(223, projection="3d")
ax4 = fig.add_subplot(224, projection="3d")
color = [color_map[k] for k in keypoint_names]
edge_color = [color_map[s, t] for s, t in edges]
depth = example["depth"].astype(np.float32)
depth_joint = example["depth_joint"]
depth_camera = example["depth_camera"]
depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True)
z_size = example["param"]["z_size"]
if normalize:
depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size)
depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size)
rgb = example["rgb"]
rgb_joint = example["rgb_joint"]
rgb_camera = example["rgb_camera"]
rgb_vu = rgb_camera.zyx2vu(rgb_joint)
rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size)
print(example["param"])
vis_point(rgb_vu, img=rgb, color=color, ax=ax1)
vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1)
vis_point(rgb_joint, color=color, ax=ax3)
vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3)
vis_point(depth_vu, img=depth, color=color, ax=ax2)
vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2)
vis_point(depth_joint, color=color, ax=ax4)
vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4)
for ax in [ax3, ax4]:
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.view_init(-65, -90)
plt.savefig("output.png")
plt.show()
def visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None):
import random
if idx is None:
idx = random.randint(0, len(dataset) - 1)
logger.info("get example")
example = dataset.get_example(idx)
logger.info("Done get example")
fig = plt.figure(figsize=(5, 10))
ax1 = fig.add_subplot(211)
ax3 = fig.add_subplot(212, projection="3d")
color = [color_map[k] for k in keypoint_names]
edge_color = [color_map[s, t] for s, t in edges]
rgb = example["rgb"]
rgb_joint = example["rgb_joint"]
rgb_camera = example["rgb_camera"]
rgb_vu = rgb_camera.zyx2vu(rgb_joint)
vis_point(rgb_vu, img=rgb, color=color, ax=ax1)
vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1)
vis_point(rgb_joint, color=color, ax=ax3)
vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3)
for ax in [ax3]:
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.view_init(-65, -90)
plt.savefig("output.png")
plt.show()
def visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False):
idx = random.randint(0, len(dataset) - 1)
logger.info("get example")
example = dataset.get_example(idx)
logger.info("Done get example")
fig = plt.figure(figsize=(5, 10))
ax2 = fig.add_subplot(211)
ax4 = fig.add_subplot(212, projection="3d")
color = [color_map[k] for k in keypoint_names]
edge_color = [color_map[s, t] for s, t in edges]
depth = example["depth"].astype(np.float32)
depth_joint = example["depth_joint"]
depth_camera = example["depth_camera"]
depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True)
z_size = example["param"]["z_size"]
if normalize:
depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size)
depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size)
print(example["param"])
vis_point(depth_vu, img=depth, color=color, ax=ax2)
vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2)
vis_point(depth_joint, color=color, ax=ax4)
vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4)
for ax in [ax4]:
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.view_init(-65, -90)
plt.savefig("output.png")
plt.show()
| 2.25 | 2 |
publication-erdf/flask_service.py | ticapix/automated-tasks | 0 | 6122 | <reponame>ticapix/automated-tasks<gh_stars>0
#!/usr/bin/env python3
from flask import Flask
app = Flask(__name__)
@app.route('/process-email')
def process_email():
return "Hello World!"
if __name__ == "__main__":
app.run()
| 1.703125 | 2 |
tools/nn/speaker.py | mikiec84/speaking_detection | 0 | 6123 | import os
import skimage.io
from torch.nn import Module
import torch.nn
from torchvision.models import resnet18
from nn.speaker_dataset import Dataset # @UnusedImport
os.environ['TORCH_MODEL_ZOO'] = '../data/'
VIDTIMIT_PATH = '../data/vidtimit/'
skimage.io.use_plugin('pil')
class Net(Module):
def __init__(self):
super().__init__()
resnet = resnet18(pretrained=True)
self.features = torch.nn.Sequential(*list(resnet.children())[:-1])
self.classifier = torch.nn.Sequential(
torch.nn.Linear(512, 2)
)
# print(len(list(self.features.parameters())))
for p in list(self.features.parameters())[:20]:
p.requires_grad = False
def forward(self, x, **kw):
# X = F.softmax(self.basenet(X))
f = self.features(x)
f = f.view(f.size(0), -1)
y = self.classifier(f)
return y
def get_speaking_detector_final():
m = torch.load('../data/speaker.pt')
m = m.eval();
return m
def get_speaking_detector(e):
m = torch.load('../data/speaker/model.e{}.pt'.format(e))
m = m.eval();
return m
| 2.4375 | 2 |
tensorflow_model_analysis/util_test.py | mdreves/model-analysis | 0 | 6124 | <reponame>mdreves/model-analysis
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple tests for util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow_model_analysis import util
class UtilTest(tf.test.TestCase):
def testKwargsOnly(self):
@util.kwargs_only
def fn(a, b, c, d=None, e=5):
if d is None:
d = 100
if e is None:
e = 1000
return a + b + c + d + e
self.assertEqual(1 + 2 + 3 + 100 + 5, fn(a=1, b=2, c=3))
self.assertEqual(1 + 2 + 3 + 100 + 1000, fn(a=1, b=2, c=3, e=None))
with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'):
fn(1, 2, 3)
with self.assertRaisesRegexp(TypeError, 'with c specified'):
fn(a=1, b=2, e=5) # pylint: disable=no-value-for-parameter
with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'):
fn(a=1, b=2, c=3, f=11) # pylint: disable=unexpected-keyword-arg
if __name__ == '__main__':
tf.test.main()
| 2.109375 | 2 |
laia/data/transforms/vision/random_beta_morphology.py | eivtho/PyLaia | 89 | 6125 | from typing import List, Tuple, Union
import numpy as np
import scipy.special
from PIL import Image, ImageFilter
class RandomBetaMorphology:
def __init__(
self, filter_size_min: int, filter_size_max: int, alpha: float, beta: float
) -> None:
assert filter_size_min % 2 != 0, "Filter size must be odd"
assert filter_size_max % 2 != 0, "Filter size must be odd"
self.filter_size_min = filter_size_min
self.filter_size_max = filter_size_max
self.alpha = alpha
self.beta = beta
self.filter_sizes, self.filter_probs = self._create_filter_distribution(
filter_size_min, filter_size_max, alpha, beta
)
@staticmethod
def _create_filter_distribution(
filter_size_min: int, filter_size_max: int, alpha: float, beta: float
) -> Tuple[List[int], Union[List[float], np.ndarray]]:
n = (filter_size_max - filter_size_min) // 2 + 1
if n < 2:
return [filter_size_min], np.asarray([1.0], dtype=np.float32)
filter_sizes = []
filter_probs = []
for k in range(n):
filter_sizes.append(filter_size_min + 2 * k)
filter_probs.append(
scipy.special.comb(n, k) * scipy.special.beta(alpha + k, n - k + beta)
)
np_filter_probs = np.asarray(filter_probs, dtype=np.float32)
np_filter_probs = filter_probs / np_filter_probs.sum()
return filter_sizes, np_filter_probs
def sample_filter_size(self):
filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs)
return filter_size
def __call__(self, *args, **kwargs):
return NotImplementedError
def __repr__(self) -> str:
return (
f"vision.{self.__class__.__name__}("
f"filter_size_min={self.filter_size_min}, "
f"filter_size_max={self.filter_size_max}, "
f"alpha={self.alpha}, beta={self.beta})"
)
class Dilate(RandomBetaMorphology):
def __init__(
self,
filter_size_min: int = 3,
filter_size_max: int = 7,
alpha: float = 1,
beta: float = 3,
) -> None:
super().__init__(filter_size_min, filter_size_max, alpha, beta)
def __call__(self, img: Image) -> Image:
filter_size = self.sample_filter_size()
return img.filter(ImageFilter.MaxFilter(filter_size))
class Erode(RandomBetaMorphology):
def __init__(
self,
filter_size_min: int = 3,
filter_size_max: int = 5,
alpha: float = 1,
beta: float = 3,
) -> None:
super().__init__(filter_size_min, filter_size_max, alpha, beta)
def __call__(self, img: Image) -> Image:
filter_size = self.sample_filter_size()
return img.filter(ImageFilter.MinFilter(filter_size))
if __name__ == "__main__":
import argparse
from PIL import ImageOps
parser = argparse.ArgumentParser()
parser.add_argument("--operation", choices=("dilate", "erode"), default="dilate")
parser.add_argument("images", type=argparse.FileType("rb"), nargs="+")
args = parser.parse_args()
transformer = Dilate() if args.operation == "dilate" else Erode()
for f in args.images:
x = Image.open(f, "r").convert("L")
x = ImageOps.invert(x)
y = transformer(x)
w, h = x.size
z = Image.new("L", (w, 2 * h))
z.paste(x, (0, 0))
z.paste(y, (0, h))
z = z.resize(size=(w // 2, h), resample=Image.BICUBIC)
z.show()
input()
| 2.5625 | 3 |
hemp/internal/utils.py | Addvilz/hemp | 1 | 6126 | <gh_stars>1-10
import sys
from fabric.utils import error, puts
from git import RemoteProgress
def print_err(message, func=None, exception=None, stdout=None, stderr=None):
error('[Hemp] ' + message, func, exception, stdout, stderr)
def print_info(text, show_prefix=None, end="\n", flush=True):
puts('[Hemp] ' + text, show_prefix, end, flush)
def print_git_output(stdout):
for line in stdout.split('\n'):
sys.stdout.write('[GIT] ' + line + '\n')
sys.stdout.flush()
class SimpleProgressPrinter(RemoteProgress):
def _parse_progress_line(self, line):
if '\r' in line:
line = line.replace('\r', '\r[GIT] ')
sys.stdout.write('[GIT] ' + line + '\n')
sys.stdout.flush()
| 2.578125 | 3 |
backend/links/sentence.py | dla1635/hyLink | 1 | 6127 | <reponame>dla1635/hyLink<gh_stars>1-10
# -*- coding: utf-8 -*-
from collections import Counter
from konlpy.tag import Okt
class Sentence(object):
okt = Okt()
def __init__(self, text, index=0):
self.index = index
self.text = text.strip()
self.tokens = self.okt.phrases(self.text)
self.bow = Counter(self.tokens)
def __str__(self):
return self.text
def __hash__(self):
return self.index
| 2.765625 | 3 |
tests/arch/x86/test_x86parser.py | IMULMUL/barf-project | 1,395 | 6128 | # Copyright (c) 2014, Fundacion Dr. <NAME>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import unittest
from barf.arch import ARCH_X86_MODE_32
from barf.arch import ARCH_X86_MODE_64
from barf.arch.x86.parser import X86Parser
class X86Parser32BitsTests(unittest.TestCase):
def setUp(self):
self._parser = X86Parser(ARCH_X86_MODE_32)
def test_two_oprnd_reg_reg(self):
asm = self._parser.parse("add eax, ebx")
self.assertEqual(str(asm), "add eax, ebx")
def test_two_oprnd_reg_imm(self):
asm = self._parser.parse("add eax, 0x12345678")
self.assertEqual(str(asm), "add eax, 0x12345678")
def test_two_oprnd_reg_mem(self):
asm = self._parser.parse("add eax, [ebx + edx * 4 + 0x10]")
self.assertEqual(str(asm), "add eax, [ebx+edx*4+0x10]")
def test_two_oprnd_mem_reg(self):
asm = self._parser.parse("add [ebx + edx * 4 + 0x10], eax")
self.assertEqual(str(asm), "add [ebx+edx*4+0x10], eax")
def test_one_oprnd_reg(self):
asm = self._parser.parse("inc eax")
self.assertEqual(str(asm), "inc eax")
def test_one_oprnd_imm(self):
asm = self._parser.parse("jmp 0x12345678")
self.assertEqual(str(asm), "jmp 0x12345678")
def test_one_oprnd_mem(self):
asm = self._parser.parse("inc dword ptr [ebx+edx*4+0x10]")
self.assertEqual(str(asm), "inc dword ptr [ebx+edx*4+0x10]")
def test_zero_oprnd(self):
asm = self._parser.parse("nop")
self.assertEqual(str(asm), "nop")
# Misc
# ======================================================================== #
def test_misc_1(self):
asm = self._parser.parse("mov dword ptr [-0x21524111], ecx")
self.assertEqual(str(asm), "mov dword ptr [-0x21524111], ecx")
self.assertNotEqual(str(asm), "mov dword ptr [0xdeadbeef], ecx")
def test_misc_2(self):
asm = self._parser.parse("fucompi st(1)")
self.assertEqual(str(asm), "fucompi st1")
class X86Parser64BitsTests(unittest.TestCase):
def setUp(self):
self._parser = X86Parser(ARCH_X86_MODE_64)
def test_64_two_oprnd_reg_reg(self):
asm = self._parser.parse("add rax, rbx")
self.assertEqual(str(asm), "add rax, rbx")
def test_64_two_oprnd_reg_reg_2(self):
asm = self._parser.parse("add rax, r8")
self.assertEqual(str(asm), "add rax, r8")
def test_64_two_oprnd_reg_mem(self):
asm = self._parser.parse("add rax, [rbx + r15 * 4 + 0x10]")
self.assertEqual(str(asm), "add rax, [rbx+r15*4+0x10]")
# Misc
# ======================================================================== #
def test_misc_offset_1(self):
asm = self._parser.parse("add byte ptr [rax+0xffffff89], cl")
self.assertEqual(str(asm), "add byte ptr [rax+0xffffff89], cl")
def main():
unittest.main()
if __name__ == '__main__':
main()
| 1.695313 | 2 |
Concurrency/codeSample/Part4_Thread_Synchronuzation_Primitives/sema_signal.py | Chyi341152/pyConPaper | 1 | 6129 | <reponame>Chyi341152/pyConPaper
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# sema_signal.py
#
# An example of using a semaphore for signaling between threads
import threading
import time
done = threading.Semaphore(0) # Resource control.
item = None
def producer():
global item
print("I'm the producer and I produce data.")
print("Producer is going to sleep.")
time.sleep(5)
item = "Hello"
print("Producer is alive. Signaling the consumer.")
done.release() # Increments the count and signals waiting threads
def consumer():
print("I'm a consumer and I want for date.")
print("Consumer is waiting.")
done.acquire() # Waits for the count is 0, otherwise decrements the count and continues
print("Consumer got", item)
t1 = threading.Thread(target=producer)
t2 = threading.Thread(target=consumer)
t1.start()
t2.start()
"""
Semaphore Uses:
1. Resource control
You can limit the number of threads performing certain operations.For example, performing database queries making network connections
2. Signaling
Semaphores can be used to send "signals" between threads. For example, having one thread wake up another thread
"""
| 4.5625 | 5 |
temp_wc_analysis/analysis.py | KarrLab/wc_sim | 8 | 6130 | '''Analysis utility functions.
:Author: <NAME> <<EMAIL>>
:Date: 2016-03-26
:Copyright: 2016-2018, Karr Lab
:License: MIT
'''
# TODO(Arthur): IMPORTANT: refactor and replace
from matplotlib import pyplot
from matplotlib import ticker
from wc_lang import Model, Submodel
from scipy.constants import Avogadro
import numpy as np
import re
def plot(model, time = np.zeros(0),
species_counts = None, volume = np.zeros(0), extracellular_volume = np.zeros(0),
selected_species_compartments = [],
yDatas = {},
units = 'mM', title = '', fileName = ''):
#convert time to hours
time = time.copy() / 3600
#create figure
fig = pyplot.figure()
#extract data to plot
if not yDatas:
yDatas = {}
for species_compartment_id in selected_species_compartments:
#extract data
match = re.match('^(?P<speciesId>[a-z0-9\-_]+)\[(?P<compartmentId>[a-z0-9\-_]+)\]$',
species_compartment_id, re.I).groupdict()
speciesId = match['speciesId']
compartmentId = match['compartmentId']
if isinstance(model, Model):
species = model.get_component_by_id(speciesId, 'species')
compartment = model.get_component_by_id(compartmentId, 'compartments')
yData = species_counts[species.index, compartment.index, :]
elif isinstance(model, Submodel):
yData = species_counts[species_compartment_id]
else:
raise Exception('Invalid model type %s' % model.__class__.__name__)
#scale
if compartmentId == 'c':
V = volume
else:
V = extracellular_volume
if units == 'pM':
scale = 1 / Avogadro / V * 1e12
elif units == 'nM':
scale = 1 / Avogadro / V * 1e9
elif units == 'uM':
scale = 1 / Avogadro / V * 1e6
elif units == 'mM':
scale = 1 / Avogadro / V * 1e3
elif units == 'M':
scale = 1 / Avogadro / V * 1e0
elif units == 'molecules':
scale = 1
else:
raise Exception('Invalid units "%s"' % units)
yData *= scale
yDatas[species_compartment_id] = yData
#plot results
yMin = 1e12
yMax = -1e12
for label, yData in yDatas.items():
#update range
yMin = min(yMin, np.min(yData))
yMax = max(yMax, np.max(yData))
#add to plot
pyplot.plot(time, yData, label=label)
#set axis limits
pyplot.xlim((0, time[-1]))
pyplot.ylim((yMin, yMax))
#add axis labels and legend
if title:
pyplot.title(title)
pyplot.xlabel('Time (h)')
if units == 'molecules':
pyplot.ylabel('Copy number')
else:
pyplot.ylabel('Concentration (%s)' % units)
y_formatter = ticker.ScalarFormatter(useOffset=False)
pyplot.gca().get_yaxis().set_major_formatter(y_formatter)
if len(selected_species_compartments) > 1:
pyplot.legend()
#save
if fileName:
fig.savefig(fileName)
pyplot.close(fig)
| 2.171875 | 2 |
setup.py | bstuddard/bonsai | 26 | 6131 | <gh_stars>10-100
from setuptools import setup, find_packages
with open("README.md", "r") as readme_file:
readme = readme_file.read()
requirements = [
'xgboost>=0.90',
'catboost>=0.26',
'bayesian-optimization>=1.2.0',
'numpy>=1.19.5',
'pandas>=1.1.5',
'matplotlib>=3.2.2',
'seaborn>=0.11.1',
'plotly>=4.4.1',
'pyyaml>=5.4.1'
]
setup(
name="bonsai-tree",
version="1.2",
author="<NAME>",
author_email="<EMAIL>",
description="Bayesian Optimization + Gradient Boosted Trees",
long_description=readme,
url="https://github.com/magi-1/bonsai",
packages=find_packages(),
package_data={'': ['*.yml']},
install_requires=requirements,
license = 'MIT',
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
) | 1.320313 | 1 |
_scripts/increment_version.py | clockhart/pathogen | 0 | 6132 | <gh_stars>0
"""
increment_version.py
written in Python3
author: <NAME> <<EMAIL>>
"""
import yaml
# Read in version
with open('version.yml', 'r') as f:
version = yaml.safe_load(f.read())
# Strip "dev" out of micro
version['micro'] = int(str(version['micro']).replace('dev', ''))
# Update patch
version['micro'] += 1
# Add "dev" back to patch
if version['micro'] != 0:
version['micro'] = 'dev' + str(version['micro'])
# Output version
with open('version.yml', 'w') as f:
yaml.safe_dump(version, f, sort_keys=False)
# Transform version dict to string
version = '.'.join([str(version[key]) for key in ['major', 'minor', 'micro']])
# Write version string to pathogen/_version.py
with open('pathogen/version.py', 'w') as f:
f.write("__version__ = '{}'\n".format(version))
# Return
print(version)
| 2.265625 | 2 |
holoviews/core/data/ibis.py | TheoMathurin/holoviews | 1 | 6133 | <gh_stars>1-10
import sys
import numpy
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
from .. import util
from ..element import Element
from ..ndmapping import NdMapping, item_check, sorted_context
from .interface import Interface
from . import pandas
from .util import cached
class IbisInterface(Interface):
types = ()
datatype = "ibis"
default_partitions = 100
zero_indexed_backend_modules = [
'ibis.backends.omniscidb.client',
]
# the rowid is needed until ibis updates versions
@classmethod
def has_rowid(cls):
import ibis.expr.operations
return hasattr(ibis.expr.operations, "RowID")
@classmethod
def is_rowid_zero_indexed(cls, data):
try:
from ibis.client import find_backends, validate_backends
(backend,) = validate_backends(list(find_backends(data)))
except Exception:
backend = data._find_backend()
return type(backend).__module__ in cls.zero_indexed_backend_modules
@classmethod
def loaded(cls):
return "ibis" in sys.modules
@classmethod
def applies(cls, obj):
if not cls.loaded():
return False
from ibis.expr.types import Expr
return isinstance(obj, Expr)
@classmethod
def init(cls, eltype, data, keys, values):
params = eltype.param.objects()
index = params["kdims"]
columns = params["vdims"]
if isinstance(index.bounds[1], int):
ndim = min([index.bounds[1], len(index.default)])
else:
ndim = None
nvdim = columns.bounds[1] if isinstance(columns.bounds[1], int) else None
if keys and values is None:
values = [c for c in data.columns if c not in keys]
elif values and keys is None:
keys = [c for c in data.columns if c not in values][:ndim]
elif keys is None:
keys = list(data.columns[:ndim])
if values is None:
values = [
key
for key in data.columns[ndim : ((ndim + nvdim) if nvdim else None)]
if key not in keys
]
elif keys == [] and values is None:
values = list(data.columns[: nvdim if nvdim else None])
return data, dict(kdims=keys, vdims=values), {}
@classmethod
def compute(cls, dataset):
return dataset.clone(dataset.data.execute())
@classmethod
def persist(cls, dataset):
return cls.compute(dataset)
@classmethod
@cached
def length(self, dataset):
# Get the length by counting the length of an empty query.
return dataset.data[[]].count().execute()
@classmethod
@cached
def nonzero(cls, dataset):
# Make an empty query to see if a row is returned.
return bool(len(dataset.data[[]].head(1).execute()))
@classmethod
@cached
def range(cls, dataset, dimension):
dimension = dataset.get_dimension(dimension, strict=True)
if cls.dtype(dataset, dimension).kind in 'SUO':
return None, None
if dimension.nodata is not None:
return Interface.range(dataset, dimension)
column = dataset.data[dimension.name]
return tuple(
dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :]
)
@classmethod
@cached
def values(
cls,
dataset,
dimension,
expanded=True,
flat=True,
compute=True,
keep_index=False,
):
dimension = dataset.get_dimension(dimension, strict=True)
data = dataset.data[dimension.name]
if not expanded:
data = data.distinct()
return data if keep_index or not compute else data.execute().values
@classmethod
def histogram(cls, expr, bins, density=True, weights=None):
bins = numpy.asarray(bins)
bins = [int(v) if bins.dtype.kind in 'iu' else float(v) for v in bins]
binned = expr.bucket(bins).name('bucket')
hist = numpy.zeros(len(bins)-1)
hist_bins = binned.value_counts().sort_by('bucket').execute()
for b, v in zip(hist_bins['bucket'], hist_bins['count']):
if numpy.isnan(b):
continue
hist[int(b)] = v
if weights is not None:
raise NotImplementedError("Weighted histograms currently "
"not implemented for IbisInterface.")
if density:
hist = hist/expr.count().execute()
return hist, bins
@classmethod
@cached
def shape(cls, dataset):
return cls.length(dataset), len(dataset.data.columns)
@classmethod
@cached
def dtype(cls, dataset, dimension):
dimension = dataset.get_dimension(dimension)
return dataset.data.head(0).execute().dtypes[dimension.name]
dimension_type = dtype
@classmethod
def sort(cls, dataset, by=[], reverse=False):
return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x in by])
@classmethod
def redim(cls, dataset, dimensions):
return dataset.data.mutate(
**{v.name: dataset.data[k] for k, v in dimensions.items()}
)
validate = pandas.PandasInterface.validate
reindex = pandas.PandasInterface.reindex
@classmethod
def _index_ibis_table(cls, data):
import ibis
if not cls.has_rowid():
raise ValueError(
"iloc expressions are not supported for ibis version %s."
% ibis.__version__
)
if "hv_row_id__" in data.columns:
return data
if cls.is_rowid_zero_indexed(data):
return data.mutate(hv_row_id__=data.rowid())
else:
return data.mutate(hv_row_id__=data.rowid() - 1)
@classmethod
def iloc(cls, dataset, index):
rows, columns = index
scalar = all(map(util.isscalar, index))
if isinstance(columns, slice):
columns = [x.name for x in dataset.dimensions()[columns]]
elif numpy.isscalar(columns):
columns = [dataset.get_dimension(columns).name]
else:
columns = [dataset.get_dimension(d).name for d in columns]
data = cls._index_ibis_table(dataset.data[columns])
if scalar:
return (
data.filter(data.hv_row_id__ == rows)[columns]
.head(1)
.execute()
.iloc[0, 0]
)
if isinstance(rows, slice):
# We should use a pseudo column for the row number but i think that is still awaiting
# a pr on ibis
if any(x is not None for x in (rows.start, rows.stop, rows.step)):
predicates = []
if rows.start:
predicates += [data.hv_row_id__ >= rows.start]
if rows.stop:
predicates += [data.hv_row_id__ < rows.stop]
return data.filter(predicates).drop(["hv_row_id__"])
else:
if not isinstance(rows, Iterable):
rows = [rows]
return data.filter([data.hv_row_id__.isin(rows)]).drop(["hv_row_id__"])
return data.drop(["hv_row_id__"])
@classmethod
def unpack_scalar(cls, dataset, data):
"""
Given a dataset object and data in the appropriate format for
the interface, return a simple scalar.
"""
if len(data.columns) > 1 or data[[]].count().execute() != 1:
return data
return data.execute().iat[0, 0]
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
# aggregate the necesary dimensions
index_dims = [dataset.get_dimension(d, strict=True) for d in dimensions]
element_dims = [kdim for kdim in dataset.kdims if kdim not in index_dims]
group_kwargs = {}
if group_type != "raw" and issubclass(group_type, Element):
group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims)
group_kwargs.update(kwargs)
group_kwargs["dataset"] = dataset.dataset
group_by = [d.name for d in index_dims]
# execute a query against the table to find the unique groups.
groups = dataset.data.groupby(group_by).aggregate().execute()
# filter each group based on the predicate defined.
data = [
(
tuple(s.values.tolist()),
group_type(
dataset.data.filter(
[dataset.data[k] == v for k, v in s.to_dict().items()]
),
**group_kwargs
),
)
for i, s in groups.iterrows()
]
if issubclass(container_type, NdMapping):
with item_check(False), sorted_context(False):
return container_type(data, kdims=index_dims)
else:
return container_type(data)
@classmethod
def assign(cls, dataset, new_data):
return dataset.data.mutate(**new_data)
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
import ibis
data = dataset.data
if dimension.name not in data.columns:
if not isinstance(values, ibis.Expr) and not numpy.isscalar(values):
raise ValueError("Cannot assign %s type as a Ibis table column, "
"expecting either ibis.Expr or scalar."
% type(values).__name__)
data = data.mutate(**{dimension.name: values})
return data
@classmethod
@cached
def isscalar(cls, dataset, dim):
return (
dataset.data[dataset.get_dimension(dim, strict=True).name]
.distinct()
.count()
.compute()
== 1
)
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
if selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
data = dataset.data
if isinstance(selection_mask, numpy.ndarray):
data = cls._index_ibis_table(data)
if selection_mask.dtype == numpy.dtype("bool"):
selection_mask = numpy.where(selection_mask)[0]
data = data.filter(
data["hv_row_id__"].isin(list(map(int, selection_mask)))
).drop(["hv_row_id__"])
elif selection_mask is not None and not (isinstance(selection_mask, list) and not selection_mask):
data = data.filter(selection_mask)
if indexed and data.count().execute() == 1 and len(dataset.vdims) == 1:
return data[dataset.vdims[0].name].execute().iloc[0]
return data
@classmethod
def select_mask(cls, dataset, selection):
import ibis
predicates = []
for dim, object in selection.items():
if isinstance(object, tuple):
object = slice(*object)
alias = dataset.get_dimension(dim).name
column = dataset.data[alias]
if isinstance(object, slice):
if object.start is not None:
# Workaround for dask issue #3392
bound = util.numpy_scalar_to_python(object.start)
predicates.append(bound <= column)
if object.stop is not None:
bound = util.numpy_scalar_to_python(object.stop)
predicates.append(column < bound)
elif isinstance(object, (set, list)):
# rowid conditions
condition = None
for id in object:
predicate = column == id
condition = (
predicate if condition is None else condition | predicate
)
if condition is not None:
predicates.append(condition)
elif callable(object):
predicates.append(object(column))
elif isinstance(object, ibis.Expr):
predicates.append(object)
else:
predicates.append(column == object)
return predicates
@classmethod
def sample(cls, dataset, samples=[]):
import ibis
dims = dataset.dimensions()
data = dataset.data
if all(util.isscalar(s) or len(s) == 1 for s in samples):
items = [s[0] if isinstance(s, tuple) else s for s in samples]
return data[data[dims[0].name].isin(items)]
predicates = None
for sample in samples:
if util.isscalar(sample):
sample = [sample]
if not sample:
continue
predicate = None
for i, v in enumerate(sample):
p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v))
if predicate is None:
predicate = p
else:
predicate &= p
if predicates is None:
predicates = predicate
else:
predicates |= predicate
return data if predicates is None else data.filter(predicates)
@classmethod
def aggregate(cls, dataset, dimensions, function, **kwargs):
import ibis.expr.operations
data = dataset.data
columns = [d.name for d in dataset.kdims if d in dimensions]
values = dataset.dimensions("value", label="name")
new = data[columns + values]
function = {
numpy.min: ibis.expr.operations.Min,
numpy.nanmin: ibis.expr.operations.Min,
numpy.max: ibis.expr.operations.Max,
numpy.nanmax: ibis.expr.operations.Max,
numpy.mean: ibis.expr.operations.Mean,
numpy.nanmean: ibis.expr.operations.Mean,
numpy.std: ibis.expr.operations.StandardDev,
numpy.nanstd: ibis.expr.operations.StandardDev,
numpy.sum: ibis.expr.operations.Sum,
numpy.nansum: ibis.expr.operations.Sum,
numpy.var: ibis.expr.operations.Variance,
numpy.nanvar: ibis.expr.operations.Variance,
len: ibis.expr.operations.Count,
}.get(function, function)
if len(dimensions):
selection = new.groupby(columns)
if function is numpy.count_nonzero:
aggregation = selection.aggregate(
**{
x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr()
for x in new.columns
if x not in columns
}
)
else:
aggregation = selection.aggregate(
**{
x: function(new[x]).to_expr()
for x in new.columns
if x not in columns
}
)
else:
aggregation = new.aggregate(
**{x: function(new[x]).to_expr() for x in new.columns}
)
dropped = [x for x in values if x not in data.columns]
return aggregation, dropped
@classmethod
@cached
def mask(cls, dataset, mask, mask_value=numpy.nan):
raise NotImplementedError('Mask is not implemented for IbisInterface.')
@classmethod
@cached
def dframe(cls, dataset, dimensions):
return dataset.data[dimensions].execute()
Interface.register(IbisInterface)
| 1.953125 | 2 |
chess/models/tournament.py | S0Imyr/Projet-4 | 0 | 6134 | # -*- coding: utf-8 -*-
"""
Handles the tournament logic
"""
import datetime
from chess.utils.utils import get_new_id
from chess.models.actors import Player
from chess.models.round import Round
TOURNAMENT_ID_WIDTH = 8
NB_ROUND = 4
NB_PLAYERS = 8
NB_MATCH = 4
class Tournament:
""" The class Tournament is the central piece of the models. """
last_tournament_id = "0" * TOURNAMENT_ID_WIDTH
def __init__(self, name, location, timer_type, description):
Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH)
self.tournament_id = Tournament.last_tournament_id
self.name = name
self.location = location
self.start_date = None
self.end_date = None
self.timer_type = timer_type
self.description = description
self.number_of_rounds = NB_ROUND
self.rounds = []
self.list_of_players = []
self.players_assigned = False
self.finished = False
def define_players(self, actors):
""" Defines the list of identifier of the players who join the tournament.
:param actors:
:return: None
"""
for num_player in range(NB_PLAYERS):
self.list_of_players.append(Player(actors[num_player],
self.tournament_id,
num_player))
def init_round(self, num_round):
""" Launches the round number "num_round".
:param num_round: number of the round played
:return: None
"""
tour = Round(num_round, self.tournament_id, self.list_of_players)
tour.start_date = datetime.date.today()
tour.rank_players()
tour.define_matches()
self.rounds.append(tour)
def register_round_results(self, num_round, winner):
""" Registers the results of the round.
:param num_round: the round number.
:param winner: the list of the winners.
:return: None.
"""
self.rounds[num_round].register_results(winner)
self.rounds[num_round].assign_points()
self.rounds[num_round].finished = True
self.rounds[num_round].memorize_opponents()
self.rounds[num_round].rank_players()
self.rounds[num_round].end_date = datetime.date.today()
def tournament_to_dict(self):
""" Converts the tournament into a dictionary
:return: dictionary of the tournament instance.
"""
string_attributes = ['tournament_id',
'name',
'location',
'timer_type',
'description',
'number_of_rounds',
'players_assigned']
serialized_tournament = {}
for attribute in string_attributes:
serialized_tournament[attribute] = getattr(self, attribute)
serialized_tournament['rounds'] = []
for r0und in self.rounds:
serialized_tournament['rounds'].append(r0und.round_to_dict())
serialized_tournament['list_of_players'] = []
for player in self.list_of_players:
serialized_tournament['list_of_players'].append(player.player_to_dict())
serialized_tournament['start_date'] = str(self.start_date)
serialized_tournament['end_date'] = str(self.end_date)
return serialized_tournament
def end_tournament(self):
""" Handles the end of the tournament.
Adds the tournament_id to the players list of tournaments.
Defines the attribute finished and the end date of the tournament.
"""
for player in self.list_of_players:
player.actor.list_of_tournaments_played.append(self.tournament_id)
self.finished = True
self.end_date = datetime.date.today()
| 3.1875 | 3 |
set-config.py | astubenazy/vrops-metric-collection | 2 | 6135 | # !/usr/bin python
"""
#
# set-config - a small python program to setup the configuration environment for data-collect.py
# data-collect.py contain the python program to gather Metrics from vROps
# Author <NAME> <<EMAIL>>
#
"""
# Importing the required modules
import json
import base64
import os,sys
# Getting the absolute path from where the script is being run
def get_script_path():
return os.path.dirname(os.path.realpath(sys.argv[0]))
def get_the_inputs():
adapterkind = raw_input("Please enter Adapter Kind: ")
resourceKind = raw_input("Please enter Resource Kind: ")
servername = raw_input("Enter enter Server IP/FQDN: ")
serveruid = raw_input("Please enter user id: ")
serverpasswd = raw_input("Please enter vRops password: ")
encryptedvar = base64.b64encode(serverpasswd)
maxsamples = raw_input("Please enter the maximum number of samples to collect: ")
keys_to_monitor = raw_input("Please enter the number of keys to monitor: ")
keys = []
for i in range(int(keys_to_monitor)):
keys.append(raw_input("Enter the key: "))
data = {}
if int(maxsamples) < 1:
maxsamples = 1
data["adapterKind"] = adapterkind
data["resourceKind"] = resourceKind
data["sampleno"] = int(maxsamples)
serverdetails = {}
serverdetails["name"] = servername
serverdetails["userid"] = serveruid
serverdetails["password"] = encryptedvar
data["server"] = serverdetails
data["keys"] = keys
return data
# Getting the path where config.json file should be kept
path = get_script_path()
fullpath = path+"/"+"config.json"
# Getting the data for the config.json file
final_data = get_the_inputs()
# Saving the data to config.json file
with open(fullpath, 'w') as outfile:
json.dump(final_data, outfile, sort_keys = True, indent = 2, separators=(',', ':'), ensure_ascii=False) | 2.609375 | 3 |
tests/test_app.py | inmanta/inmanta-core | 6 | 6136 | <reponame>inmanta/inmanta-core
"""
Copyright 2018 Inmanta
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contact: <EMAIL>
"""
import os
import re
import signal
import subprocess
import sys
from subprocess import TimeoutExpired
from threading import Timer
import pytest
import inmanta.util
from inmanta import const
def get_command(
tmp_dir,
stdout_log_level=None,
log_file=None,
log_level_log_file=None,
timed=False,
dbport=None,
dbname="inmanta",
dbhost=None,
dbuser=None,
dbpass=<PASSWORD>,
config_dir=None,
server_extensions=[],
version=False,
):
root_dir = tmp_dir.mkdir("root").strpath
log_dir = os.path.join(root_dir, "log")
state_dir = os.path.join(root_dir, "data")
for directory in [log_dir, state_dir]:
os.mkdir(directory)
config_file = os.path.join(root_dir, "inmanta.cfg")
if dbport is not None:
port = dbport
else:
port = inmanta.util.get_free_tcp_port()
with open(config_file, "w+", encoding="utf-8") as f:
f.write("[config]\n")
f.write("log-dir=" + log_dir + "\n")
f.write("state-dir=" + state_dir + "\n")
f.write("[database]\n")
f.write("port=" + str(port) + "\n")
f.write("name=" + dbname + "\n")
if dbhost:
f.write(f"host={dbhost}\n")
if dbuser:
f.write(f"username={dbuser}\n")
if dbpass:
f.write(f"password={<PASSWORD>")
f.write("[server]\n")
f.write(f"enabled_extensions={', '.join(server_extensions)}\n")
args = [sys.executable, "-m", "inmanta.app"]
if stdout_log_level:
args.append("-" + "v" * stdout_log_level)
if log_file:
log_file = os.path.join(log_dir, log_file)
args += ["--log-file", log_file]
if log_file and log_level_log_file:
args += ["--log-file-level", str(log_level_log_file)]
if timed:
args += ["--timed-logs"]
if config_dir:
args += ["--config-dir", config_dir]
if version:
args += ["--version"]
args += ["-c", config_file, "server"]
return (args, log_dir)
def do_run(args, env={}, cwd=None):
baseenv = os.environ.copy()
baseenv.update(env)
process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv)
return process
def convert_to_ascii(text):
return [line for line in text.decode("ascii").split("\n") if line != ""]
def do_kill(process, killtime=3, termtime=2):
def do_and_log(func, msg):
def w():
print(msg)
func()
return w
t1 = Timer(killtime, do_and_log(process.kill, "killed process"))
t2 = Timer(termtime, do_and_log(process.terminate, "terminated process"))
t1.start()
t2.start()
out, err = process.communicate()
t1.cancel()
t2.cancel()
stdout = convert_to_ascii(out)
stderr = convert_to_ascii(err)
return (stdout, stderr, process.returncode)
def run_without_tty(args, env={}, killtime=3, termtime=2):
process = do_run(args, env)
return do_kill(process, killtime, termtime)
def run_with_tty(args, killtime=3, termtime=2):
"""Could not get code for actual tty to run stable in docker, so we are faking it """
env = {const.ENVIRON_FORCE_TTY: "true"}
return run_without_tty(args, env=env, killtime=killtime, termtime=termtime)
def get_timestamp_regex():
return r"[\d]{4}\-[\d]{2}\-[\d]{2} [\d]{2}\:[\d]{2}\:[\d]{2}\,[\d]{3}"
def get_compiled_regexes(regexes, timed):
result = []
for regex in regexes:
if timed:
regex = get_timestamp_regex() + " " + regex
compiled_regex = re.compile(regex)
result.append(compiled_regex)
return result
def is_colorama_package_available():
try:
import colorama # noqa: F401
except ModuleNotFoundError:
return False
return True
def test_verify_that_colorama_package_is_not_present():
"""
The colorama package turns the colored characters in TTY-based terminal into uncolored characters.
As such, this package should not be present.
"""
assert not is_colorama_package_available()
@pytest.mark.parametrize_any(
"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines",
[
(
3,
False,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint", r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
[],
),
(
2,
False,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
),
(
3,
False,
True,
[
r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint",
r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint",
],
[],
),
(
2,
False,
True,
[r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint"],
[r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint"],
),
(
3,
True,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint", r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
[],
),
(
2,
True,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
),
(
3,
True,
True,
[
r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint",
r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint",
],
[],
),
(
2,
True,
True,
[r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint"],
[r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint"],
),
],
)
@pytest.mark.timeout(20)
def test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines):
if is_colorama_package_available() and with_tty:
pytest.skip("Colorama is present")
(args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed)
if with_tty:
(stdout, _, _) = run_with_tty(args)
else:
(stdout, _, _) = run_without_tty(args)
log_file = "server.log"
assert log_file not in os.listdir(log_dir)
assert len(stdout) != 0
check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed)
@pytest.mark.parametrize_any(
"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines",
[
(
3,
False,
[
r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint",
r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint",
],
[],
),
(
2,
False,
[r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint"],
),
(
3,
True,
[
r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint",
r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint",
],
[],
),
(
2,
True,
[r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint"],
),
],
)
@pytest.mark.timeout(60)
def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines):
if is_colorama_package_available() and with_tty:
pytest.skip("Colorama is present")
log_file = "server.log"
(args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level)
if with_tty:
(stdout, _, _) = run_with_tty(args)
else:
(stdout, _, _) = run_without_tty(args)
assert log_file in os.listdir(log_dir)
log_file = os.path.join(log_dir, log_file)
with open(log_file, "r") as f:
log_lines = f.readlines()
check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True)
check_logs(stdout, [], regexes_required_lines, timed=True)
check_logs(stdout, [], regexes_required_lines, timed=False)
def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed):
compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed)
compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed)
for line in log_lines:
print(line)
for regex in compiled_regexes_requires_lines:
if not any(regex.match(line) for line in log_lines):
pytest.fail("Required pattern was not found in log lines: %s" % (regex.pattern,))
for regex in compiled_regexes_forbidden_lines:
if any(regex.match(line) for line in log_lines):
pytest.fail("Forbidden pattern found in log lines: %s" % (regex.pattern,))
def test_check_shutdown():
process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), "miniapp.py")])
# wait for handler to be in place
try:
process.communicate(timeout=2)
except TimeoutExpired:
pass
process.send_signal(signal.SIGUSR1)
out, err, code = do_kill(process, killtime=3, termtime=1)
print(out, err)
assert code == 0
assert "----- Thread Dump ----" in out
assert "STOP" in out
assert "SHUTDOWN COMPLETE" in out
def test_check_bad_shutdown():
print([sys.executable, os.path.join(os.path.dirname(__file__), "miniapp.py"), "bad"])
process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), "miniapp.py"), "bad"])
out, err, code = do_kill(process, killtime=5, termtime=2)
print(out, err)
assert code == 3
assert "----- Thread Dump ----" in out
assert "STOP" not in out
assert "SHUTDOWN COMPLETE" not in out
assert not err
def test_startup_failure(tmpdir, postgres_db, database_name):
(args, log_dir) = get_command(
tmpdir,
dbport=postgres_db.port,
dbname=database_name,
dbhost=postgres_db.host,
dbuser=postgres_db.user,
dbpass=<PASSWORD>,
server_extensions=["badplugin"],
)
pp = ":".join(sys.path)
# Add a bad module
extrapath = os.path.join(os.path.dirname(__file__), "data", "bad_module_path")
(stdout, stderr, code) = run_without_tty(args, env={"PYTHONPATH": pp + ":" + extrapath}, killtime=15, termtime=10)
assert "inmanta ERROR Server setup failed" in stdout
assert (
"inmanta.server.protocol.SliceStartupException: "
"Slice badplugin.badslice failed to start because: Too bad, this plugin is broken"
) in stdout
assert code == 4
def test_compiler_exception_output(snippetcompiler):
snippetcompiler.setup_for_snippet(
"""
entity Test:
number attr
end
implement Test using std::none
o = Test(attr="1234")
"""
)
output = (
"""Could not set attribute `attr` on instance `__config__::Test (instantiated at ./main.cf:8)` """
"""(reported in Construct(Test) (./main.cf:8))
caused by:
Invalid value '1234', expected Number (reported in Construct(Test) (./main.cf:8))
"""
)
def exec(*cmd):
process = do_run([sys.executable, "-m", "inmanta.app"] + list(cmd), cwd=snippetcompiler.project_dir)
out, err = process.communicate(timeout=30)
assert out.decode() == ""
assert err.decode() == output
exec("compile")
exec("export", "-J", "out.json")
@pytest.mark.timeout(15)
@pytest.mark.parametrize_any(
"cmd", [(["-X", "compile"]), (["compile", "-X"]), (["compile"]), (["export", "-X"]), (["-X", "export"]), (["export"])]
)
def test_minus_x_option(snippetcompiler, cmd):
snippetcompiler.setup_for_snippet(
"""
entity Test:
nuber attr
end
"""
)
process = do_run([sys.executable, "-m", "inmanta.app"] + cmd, cwd=snippetcompiler.project_dir)
out, err = process.communicate(timeout=30)
assert out.decode() == ""
if "-X" in cmd:
assert "inmanta.ast.TypeNotFoundException: could not find type nuber in namespace" in str(err)
else:
assert "inmanta.ast.TypeNotFoundException: could not find type nuber in namespace" not in str(err)
@pytest.mark.timeout(20)
def test_warning_config_dir_option_on_server_command(tmpdir):
non_existing_dir = os.path.join(tmpdir, "non_existing_dir")
assert not os.path.isdir(non_existing_dir)
(args, _) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir)
(stdout, _, _) = run_without_tty(args)
stdout = "".join(stdout)
assert "Starting server endpoint" in stdout
assert f"Config directory {non_existing_dir} doesn't exist" in stdout
@pytest.mark.timeout(20)
def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir):
non_existing_config_file = os.path.join(tmpdir, "non_existing_config_file")
snippetcompiler.setup_for_snippet(
"""
entity Test:
number attr
end
"""
)
config_options = ["-c", non_existing_config_file, "-vvv"]
args = [sys.executable, "-m", "inmanta.app"] + config_options + ["compile"]
process = do_run(args, cwd=snippetcompiler.project_dir)
out, err = process.communicate(timeout=30)
assert process.returncode == 0
out = out.decode()
err = err.decode()
all_output = out + err
assert "Starting compile" in all_output
assert "Compile done" in all_output
assert f"Config file {non_existing_config_file} doesn't exist" in all_output
@pytest.mark.parametrize_any(
"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines",
[
(False, True, [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"], []),
(True, True, [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"], []),
(False, False, [], [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"]),
(True, False, [], [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"]),
],
)
@pytest.mark.timeout(20)
def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines):
(args, log_dir) = get_command(tmpdir, version=version_should_be_shown)
if with_tty:
(stdout, _, _) = run_with_tty(args, killtime=15, termtime=10)
else:
(stdout, _, _) = run_without_tty(args, killtime=15, termtime=10)
assert len(stdout) != 0
check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False)
def test_init_project(tmpdir):
args = [sys.executable, "-m", "inmanta.app", "project", "init", "-n", "test-project", "-o", tmpdir, "--default"]
(stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10)
test_project_path = os.path.join(tmpdir, "test-project")
assert return_code == 0
assert os.path.exists(test_project_path)
(stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10)
assert return_code != 0
assert len(stderr) == 1
assert "already exists" in stderr[0]
| 1.242188 | 1 |
AtC_Beg_Con_021-030/ABC027/C.py | yosho-18/AtCoder | 0 | 6137 | n = int(input())
row = 0
for i in range(100):
if 2 ** i <= n <= 2 ** (i + 1) - 1:
row = i
break
def seki(k, n):
for _ in range(n):
k = 4 * k + 2
return k
k = 0
if row % 2 != 0:
k = 2
cri = seki(k, row // 2)
if n < cri:
print("Aoki")
else:
print("Takahashi")
else:
k = 1
cri = seki(k, row // 2)
if n < cri:
print("Takahashi")
else:
print("Aoki")
| 3.078125 | 3 |
extplugins/codvote.py | Desi-Boyz/cod4x-server-B3-configs | 1 | 6138 | # CoDVote plugin for BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2015 ph03n1x
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# Changelog:
# v1.0.1 - Fixed vote remaining in progress if requirements for vote unmet.
# v1.0.2 - Added "!vote maps" to show what maps can be called into vote.
# - Fixed issue where person who called vote needed to vote as well. Changed to automatic yes vote.
__version__ = '1.0.2'
__author__ = 'ph03n1x'
import b3, threading
import b3.plugin
import b3.events
class CodvotePlugin(b3.plugin.Plugin):
adminPlugin = None
_vote = None # Stores which vote is currently in progress
_value = None # Stores the value of the vote
_votetime = 30 # Time before a vote will be canceled for not passing
_aVotes = {} # All votes allowed. Imported from "votes" section in config
_aMaps = {} # All vote allowed maps. Imported from "votemaps" section in config
_amt_yes = [] # Amount of players who voted yes. Checked against amount of players in game
_amt_no = []
_allplayers = [] # Amount of players in game
_mapRequested = None # Stores which map is being voted for
_kickRequested = None # Stores which player will be kicked if vote passed
_default_messages = {
'tovote': '^7Use ^2!yes ^7or ^2!no ^7 to vote',
'map': "Map vote in progress: Change map to ^3$s^7?",
'nextmap': "Next map vote in progress. Change next map to ^3$s^7?",
'kick': "Kick vote in progress: Kick ^2$s^7?",
'maprotate': "Rotate map vote in progress. Go to next map?",
'maprestart': "Maprestart vote in progress. Restart current map?",
'friendlyfire': "Friendlyfire vote in progress. Change friendlyfire mode to ^2$s^7?",
'killcam': "Killcam vote in progress. Turn killcam ^2$s^7?",
'scorelimit': "Scorelimit vote in progress. Change score limit to ^2$s^7?",
'timelimit': "Timelimit vote in progress. Change time limit to ^2$s^7?",
'roundlength': "Round length vote in progress. Change round length to ^2$s^7?",
'roundlimit': "Round limit vote in progress. Change round limit to ^2$s^7?",
}
def onStartup(self):
self.adminPlugin = self.console.getPlugin('admin')
if not self.adminPlugin:
self.error('Could not find admin plugin')
return
# Register commands
if 'commands' in self.config.sections():
for cmd in self.config.options('commands'):
level = self.config.get('commands', cmd)
sp = cmd.split('-')
alias = None
if len(sp) == 2:
cmd, alias = sp
func = self.getCmd(cmd)
if func:
self.adminPlugin.registerCommand(self, cmd, level, func, alias)
# Re-deploy commands for consideration of this plugin
self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm')
self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None)
self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None)
# Register events
self.registerEvent('EVT_GAME_EXIT', self.onGameEnd)
def onLoadConfig(self):
# Load settings section
try:
self._votetime = self.config.getint('settings', 'votetime')
except:
self.debug('Unable to get [votetime] from settings. Using default: %s' % self._votetime)
# Load votemaps section
if self.config.has_section('votemaps'):
for (mapname, consolename) in self.config.items('votemaps'):
if mapname:
self._aMaps[mapname] = consolename
self.debug('Successfully entered maps for voting: %s' % self._aMaps)
# Load votes section
if self.config.has_section('votes'):
adLvl = {'guest': 0,
'user': 1,
'reg': 2,
'mod': 20,
'admin': 40,
'fulladmin': 60,
'senioradmin': 80,
'superadmin': 100}
for (entry, value) in self.config.items('votes'):
try:
value = int(value)
self._aVotes[entry.lower()] = value
except ValueError:
self._aVotes[entry.lower()] = adLvl[value]
self.debug('Allowed votes are: %s' % self._aVotes)
def getCmd(self, cmd):
cmd = 'cmd_%s' % cmd
if hasattr(self, cmd):
func = getattr(self, cmd)
return func
return None
######################### VOTE TIMING ##############################
def voteTimer(self):
t1 = threading.Timer((self._votetime - 5), self.voteMessage)
t1.start()
def voteMessage(self):
if self._vote:
self.console.say('^110 seconds until vote end!')
t2 = threading.Timer(10, self.denyVote)
t2.start()
######################### MAP HANDLING ##############################
def _search(self, maplist, partial):
a = []
for mapname, consolename in maplist.iteritems():
if partial in mapname:
a.append(mapname)
elif partial in consolename:
a.append(mapname)
return a
def mapvote(self, client, wantedMap):
# Find if map is in allowed list
match = self._search(self._aMaps, wantedMap)
if len(match) == 1:
self._mapRequested = match[0]
self._value = match[0]
return True
elif len(match) > 1:
match = (', ').join(match)
client.message('^1ABORTED!^7Multiple matches: %s' % match)
return False
elif len(match) == 0:
client.message('^1ABORTED!^7No maps matching your request')
return False
############### NEXTMAP FUNCTIONING ################
def onGameEnd(self, event):
"""
Handle EVT_GAME_ROUND_END
"""
if self._mapRequested:
self.confirmMap()
self._mapRequested = None
############### CONFIRM VOTES ######################
def confirmVote(self):
self.console.say('^3Vote passed!^7')
if self._vote == 'map':
self.confirmMap()
elif self._vote == 'nextmap':
self.debug('nextmap vote passed. Params already stored')
elif self._vote == 'kick':
self.confirmKick()
elif self._vote == 'maprotate':
if self._mapRequested:
self.confirmMap()
else:
self.console.rotateMap()
elif self._vote == 'maprestart':
self.confirmMaprestart()
elif self._vote == 'friendlyfire':
self.confirmFriendlyFire()
elif self._vote == 'killcam':
self.confirmKillCam()
elif self._vote == 'scorelimit':
self.confirmScoreLimit()
elif self._vote == 'timelimit':
self.confirmTimeLimit()
elif self._vote == 'roundlength':
self.confirmRoundLength()
elif self._vote == 'roundlimit':
self.confirmRoundLimit()
else:
self.error('Unable to commit. Vote: %s, Value: %s' % (self._vote, self._value))
self._vote = None
self._value = None
self._amt_no = []
self._amt_yes = []
self._allplayers = []
def denyVote(self):
if self._vote:
self.console.say('^3Vote failed!')
self._vote = None
self._value = None
self._amt_no = []
self._amt_yes = []
self._allplayers = []
def confirmKick(self):
# Note - to kick someone we need: client.kick(reason, keyword, admin, silent=True/False, data)
s = self._kickRequested
self.debug('Kick vote passed. Kicking %s' % s.name)
s.kick('Voted against', '', None, True, '')
self._kickRequested = None
def confirmMap(self):
# This will cycle to next map when needed.
self.console.write('map %s' % self._aMaps[self._mapRequested])
self._mapRequested = None
def confirmMaprestart(self):
# This will restart the current map
self.console.write('fast_restart')
def confirmFriendlyFire(self):
# This will toggle friendly fire on and off
setting = self._value
if not isinstance(setting, int):
if self._value == 'on':
setting = 1
elif self._value == 'off':
setting = 0
else:
self.debug('Unknown wanted setting for Friendlyfire. Toggling to next mode')
now = self.console.getCvar('scr_team_fftype').getInt()
if now >= 1:
setting = 0
elif now == 0:
setting = 1
self.console.setCvar('scr_team_fftype', int(setting))
def confirmKillCam(self):
# rcon for killcam: scr_game_allowkillcam - 0 or 1
setting = self._value
if self._value == 'on':
setting = 1
elif self._value == 'off':
setting = 0
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
now = self.console.getCvar('scr_game_allowkillcam').getInt()
self.debug('Setting being voted for is not valid. Toggling to next mode. Killcam currently: %s' % now)
if now == 0:
setting = 1
else:
setting = 0
self.console.setCvar('scr_game_allowkillcam', int(setting))
def confirmScoreLimit(self):
# CVAR to write is scr_<gametype>_scorelimit <number>
setting = self._value
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('ERROR: Could not set new scorelimit. Voted value is not integer')
return
cparams = 'scr_' + gt + '_scorelimit'
self.console.setCvar(cparams, setting)
def confirmTimeLimit(self):
setting = self._value
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('ERROR: Could not set new timelimit. Voted value is not integer')
return
cparams = 'scr_' + gt + '_timelimit'
self.console.setCvar(cparams, setting)
def confirmRoundLength(self):
setting = self._value
amodes = ['ctf', 'sd', 're', 'bas', 'dom']
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('ERROR: Could not set new round length. Voted value is not integer')
return
if gt in amodes:
cparams = 'scr_' + gt + '_roundlength'
self.console.setCvar(cparams, setting)
def confirmRoundLimit(self):
setting = self._value
amodes = ['ctf', 'sd', 're', 'bas', 'dom']
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('Could not set new round limit. Voted value is not integer')
return
if gt in amodes:
cparams = 'scr_' + gt + '_roundlimit'
self.console.setCvar(cparams, setting)
else:
self.debug('Could not set round limit as gametype do not have rounds')
def getGameType(self):
gametype = self.console.getCvar('g_gametype').getString()
if gametype:
return gametype
else:
self.debug('Error getting gametype. Response is %s' % gametype)
return False
def sendBroadcast(self):
# This wil broadcast vote message to server.
a = self._value
if a == 'maprestart' or a == 'maprotate':
self.console.say(self.getMessage(self._vote))
elif a != 'maprestart' and a != 'maprotate':
param = {'s': a}
self.console.say(self.getMessage(self._vote, param))
self.console.say(self.getMessage('tovote'))
def aquireCmdLock2(self, cmd, client, delay, all=True):
if client.maxLevel >= 20:
return True
elif cmd.time + 5 <= self.console.time():
return True
else:
return False
def checkIfAllowed(self, client, voteType):
if client.maxLevel >= self._aVotes[voteType]:
return True
else:
return False
#################################################################################
# COMMANDS #
#################################################################################
def cmd_vote(self, data, client, cmd=None):
"""\
!vote <setting> <value> - vote to change setting or cvar on server.
"""
# Check if vote already in progress
if self._vote:
client.message('^1ERROR^7: Vote already in progress')
return
# Check if we have enough data for vote
data = data.split()
if len(data) == 1 and data[0] == 'maprotate' or len(data) == 1 and data[0] == 'maprestart' or len(data) == 1 and data[0] == 'maps':
self._vote = data[0]
self._value = data[0]
elif len(data) == 2:
type = data[0]
value = data[1]
self._vote = type
self._value = value
else:
client.message('^1ERROR^7: Invalid usage. Type ^2!help vote ^7for info')
return
# Check if player is asking what maps can be voted on
if self._vote == 'maps':
v1 = self.checkIfAllowed(client, 'map')
v2 = self.checkIfAllowed(client, 'nextmap')
if v1 or v2:
cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys())))
self._vote = None
self._value = None
return
else:
client.message('^2You do not have permission to call map votes')
self._vote = None
self._value = None
return
# Check if enough players in game to vote and store present players. Only players present at vote call can vote
playersInGame = 0
self._allplayers = []
for c in self.console.clients.getList():
if c.team != b3.TEAM_SPEC:
playersInGame += 1
self._allplayers.insert(0, c)
if playersInGame <= 1 and client.maxLevel < 100:
client.message('^1ABORT^7: Not enough players in game to vote.')
self._vote = None
return
# Check if type of vote is allowed
if self._vote not in self._aVotes:
client.message('Vote type not allowed. Use ^2!allvotes ^7for available votes.')
self._vote = None
return
# Check if player has permission to call vote type
v = self.checkIfAllowed(client, self._vote)
if not v:
client.message('You do not have permission to call this vote')
self._vote = None
return
# Get further info for proper processing
if self._vote == 'map' or self._vote == 'nextmap':
q = self.mapvote(client, self._value)
if not q:
self.debug('Vote aborted: Cannot vote for maps. mapvote turned out false')
self._vote = None
return
if self._vote == 'kick':
self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client)
if self._kickRequested:
if self._kickRequested.maxLevel >= 20:
client.message('^1ABORTED^7: Cannot vote to kick admin!')
self._vote = None
self._value = None
self._kickRequested = None
return
self._value = self._kickRequested.name
else:
self.debug('could not get the person to kick')
self._vote = None
self._value = None
self._kickRequested = None
return
# Seems like vote is ok. Broadcast to server
self.sendBroadcast()
# Start timer
self.voteTimer()
# Set person who called vote as yes vote
self._amt_yes.insert(0, client)
if len(self._amt_yes) > (len(self._allplayers) / 2):
self.confirmVote()
def cmd_allvotes(self, data, client, cmd=None):
"""\
Show all the votes you are allowed to call
"""
allowed = []
for k in self._aVotes.keys():
if client.maxLevel >= self._aVotes[k]:
allowed.insert(0, k)
if len(allowed) > 0:
p = sorted(allowed)
x = (', ').join(p)
client.message('Allowed votes are: %s' % x)
elif len(allowed) == 0:
client.message('You are not allowed to call any votes')
def cmd_yes(self, data, client, cmd=None):
"""\
Vote yes to the vote in progress
"""
# Check if there is a vote in progress
if not self._vote:
client.message('No vote in progress')
return
# Check if player is allowed to vote
if client not in self._allplayers:
client.message('Sorry, you cannot enter current vote')
return
# Check if the player already voted. If not, register vote
if client in self._amt_yes or client in self._amt_no:
client.message('Are you drunk? You already voted!')
return
elif client not in self._amt_yes or client not in self._amt_no:
self._amt_yes.insert(0, client)
# Let player know that vote is registered
client.message('^3Your vote has been entered')
# Check if majority of players voted already
vYes = len(self._amt_yes)
vPass = len(self._allplayers) / 2
if vYes > vPass:
self.confirmVote()
def cmd_no(self, data, client=None, cmd=None):
"""\
Vote NO to the current vote
"""
# Check if there is a vote in progress
if not self._vote:
client.message('No vote in progress')
return
# Check if player is allowed to vote
if client not in self._allplayers:
client.message('Sorry, you cannot enter current vote')
return
# Check if the player already voted
if client in self._amt_yes or client in self._amt_no:
client.message('Are you drunk? You already voted!')
return
elif client not in self._amt_yes or client not in self._amt_no:
self._amt_no.insert(0, client)
# Let player know that vote is registered
client.message('^3Your vote has been entered')
# Check if majority of players voted
vNo = len(self._amt_no)
vPass = len(self._allplayers) / 2
if vNo > vPass:
self.denyVote()
def cmd_nextmap(self, data, client=None, cmd=None):
"""\
- list the next map in rotation
"""
if not self.aquireCmdLock2(cmd, client, 60, True):
client.message('^7Do not spam commands')
return
if self._mapRequested:
cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % self._mapRequested.title())
return
mapname = self.console.getNextMap()
if mapname:
cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % mapname)
else:
client.message('^1Error:^7 could not get map list')
def cmd_maprotate(self, data, client, cmd=None):
"""\
Cycle to next map in rotation
"""
if self._mapRequested:
self.confirmMap()
else:
self.console.rotateMap()
def cmd_veto(self, data, client, cmd=None):
"""\
Cancel a vote in progress
"""
if self._vote:
client.message('^3Vote canceled')
self.denyVote()
elif not self._vote:
client.message('^3No vote in progress')
| 2.03125 | 2 |
utils.py | bianan/cfl | 4 | 6139 | <gh_stars>1-10
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for manipulating variables in Federated personalization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
TRAIN_NAME = "Train"
VALIDATION_NAME = "Validation"
TEST_NAME = "Test"
LOSS_NAME = "loss"
LOSS_SUMMARY_NAME = "perplexity"
# Vars type.
VARS_TYPE_ALL = "all"
VARS_TYPE_SHARED = "shared"
VARS_TYPE_PERSONAL = "personal"
def get_train_name_scope(var_scope):
return "/".join((var_scope, TRAIN_NAME))
def get_validation_name_scope(var_scope):
return "/".join((var_scope, VALIDATION_NAME))
def get_test_name_scope(var_scope):
return "/".join((var_scope, TEST_NAME))
def get_model_name_scope(var_scope):
return "/".join((var_scope, "Model"))
def get_update_name_scope(var_scope):
return "/".join((var_scope, "Update"))
def get_var_dict(vars_):
"""Gets a dict of var base_name (e.g. 'w') to the variable."""
var_dict = {}
for v in vars_:
var_base_name = get_base_name(v)
var_dict[var_base_name] = v
return var_dict
def get_var_value_ops(var_dict):
return {k: v.value() for k, v in var_dict.items()}
def get_base_name(var):
return var.name.split("/")[-1].split(":")[0]
def get_update_name(var, var_scope):
var_base_name = get_base_name(var)
var_update_name = "update_%s_%s" % (var_scope, var_base_name)
return var_update_name
def get_update_placeholder_name(var):
var_base_name = get_base_name(var)
placeholder_name = "placeholder_%s" % var_base_name
return placeholder_name
def generate_update_ops(vars_):
"""Generates update ops and placeholders.
For each var, it generates a placeholder to feed in the new values.
Then it takes the mean of the inputs along dimension 0.
Args:
vars_: Vars for which the update ops will be generated.
Returns:
update_ops: A list of update ops.
dict_update_placeholders: A dict of var base name to its update-placeholder.
"""
update_ops = []
dict_update_placeholders = {}
for v in vars_:
# For every var in the scope, add a placeholder to feed in the new values.
# The placeholder may need to hold multiple values, this happens
# when updating the server from many clients.
var_in_shape = [None] + v.shape.as_list()
var_in_name = get_update_placeholder_name(v)
var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name)
var_in_mean = tf.reduce_mean(var_in, 0)
update_op = v.assign(var_in_mean)
update_ops.append(update_op)
dict_update_placeholders[get_base_name(v)] = var_in
return update_ops, dict_update_placeholders
def print_vars_on_clients(clients, sess):
for c in clients.values():
print("client %d:" % c.id)
print(sess.run(c.read_ops_all_vars))
def add_prefix(prefix, name):
"""Adds prefix to name."""
return "/".join((prefix, name))
def add_suffix(suffix, name):
"""Adds subfix to name."""
return "/".join((name, suffix))
def get_attribute_dict(class_instance):
"""Gets a dict of attributeds of a class instance."""
# first start by grabbing the Class items
attribute_dict = dict((x, y)
for x, y in class_instance.__class__.__dict__.items()
if x[:2] != "__")
# then update the class items with the instance items
attribute_dict.update(class_instance.__dict__)
return attribute_dict
| 2.0625 | 2 |
unittest/scripts/py_devapi/scripts/mysqlx_collection_remove.py | mueller/mysql-shell | 119 | 6140 | # Assumptions: validate_crud_functions available
# Assumes __uripwd is defined as <user>:<pwd>@<host>:<plugin_port>
from __future__ import print_function
from mysqlsh import mysqlx
mySession = mysqlx.get_session(__uripwd)
ensure_schema_does_not_exist(mySession, 'js_shell_test')
schema = mySession.create_schema('js_shell_test')
# Creates a test collection and inserts data into it
collection = schema.create_collection('collection1')
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA01", "name": 'jack', "age": 17, "gender": 'male'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA02", "name": 'adam', "age": 15, "gender": 'male'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA03", "name": 'brian', "age": 14, "gender": 'male'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA04", "name": 'alma', "age": 13, "gender": 'female'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA05", "name": 'carol', "age": 14, "gender": 'female'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA06", "name": 'donna', "age": 16, "gender": 'female'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA07", "name": 'angel', "age": 14, "gender": 'male'}).execute()
# ------------------------------------------------
# collection.remove Unit Testing: Dynamic Behavior
# ------------------------------------------------
#@ CollectionRemove: valid operations after remove
crud = collection.remove('some_condition')
validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute'])
#@ CollectionRemove: valid operations after sort
crud = crud.sort(['name'])
validate_crud_functions(crud, ['limit', 'bind', 'execute'])
#@ CollectionRemove: valid operations after limit
crud = crud.limit(1)
validate_crud_functions(crud, ['bind', 'execute'])
#@ CollectionRemove: valid operations after bind
crud = collection.remove('name = :data').bind('data', 'donna')
validate_crud_functions(crud, ['bind', 'execute'])
#@ CollectionRemove: valid operations after execute
result = crud.execute()
validate_crud_functions(crud, ['limit', 'bind', 'execute'])
#@ Reusing CRUD with binding
print('Deleted donna:', result.affected_items_count, '\n')
result=crud.bind('data', 'alma').execute()
print('Deleted alma:', result.affected_items_count, '\n')
# ----------------------------------------------
# collection.remove Unit Testing: Error Conditions
# ----------------------------------------------
#@# CollectionRemove: Error conditions on remove
crud = collection.remove()
crud = collection.remove(' ')
crud = collection.remove(5)
crud = collection.remove('test = "2')
#@# CollectionRemove: Error conditions sort
crud = collection.remove('some_condition').sort()
crud = collection.remove('some_condition').sort(5)
crud = collection.remove('some_condition').sort([])
crud = collection.remove('some_condition').sort(['name', 5])
crud = collection.remove('some_condition').sort('name', 5)
#@# CollectionRemove: Error conditions on limit
crud = collection.remove('some_condition').limit()
crud = collection.remove('some_condition').limit('')
#@# CollectionRemove: Error conditions on bind
crud = collection.remove('name = :data and age > :years').bind()
crud = collection.remove('name = :data and age > :years').bind(5, 5)
crud = collection.remove('name = :data and age > :years').bind('another', 5)
#@# CollectionRemove: Error conditions on execute
crud = collection.remove('name = :data and age > :years').execute()
crud = collection.remove('name = :data and age > :years').bind('years', 5).execute()
# ---------------------------------------
# collection.remove Unit Testing: Execution
# ---------------------------------------
#@ CollectionRemove: remove under condition
//! [CollectionRemove: remove under condition]
result = collection.remove('age = 15').execute()
print('Affected Rows:', result.affected_items_count, '\n')
docs = collection.find().execute().fetch_all()
print('Records Left:', len(docs), '\n')
//! [CollectionRemove: remove under condition]
#@ CollectionRemove: remove with binding
//! [CollectionRemove: remove with binding]
result = collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute()
print('Affected Rows:', result.affected_items_count, '\n')
//! [CollectionRemove: remove with binding]
docs = collection.find().execute().fetch_all()
print('Records Left:', len(docs), '\n')
#@ CollectionRemove: full remove
//! [CollectionRemove: full remove]
result = collection.remove('1').execute()
print('Affected Rows:', result.affected_items_count, '\n')
docs = collection.find().execute().fetch_all()
print('Records Left:', len(docs), '\n')
//! [CollectionRemove: full remove]
# Cleanup
mySession.drop_schema('js_shell_test')
mySession.close()
| 2.171875 | 2 |
indian-flag.py | aditya270520/indian-flag | 0 | 6141 | <gh_stars>0
import turtle
turtle.bgcolor('black')
wn=turtle.Screen()
tr=turtle.Turtle()
move=1
tr.speed("fastest")
for i in range (360):
tr.write("ADITYA",'false','center',font=('Showcard gothic',50))
tr.penup()
tr.goto(-200,100)
tr.pendown()
tr.color("orange")
tr.right(move)
tr.forward(100)
tr.penup()
tr.color("white")
tr.pendown()
tr.right(30)
tr.forward(60)
tr.pendown()
tr.color("light green")
tr.left(10)
tr.forward(50)
tr.right(70)
tr.penup()
tr.pendown()
tr.color('light blue')
tr.forward(50)
tr.color('light green')
tr.pu()
tr.pd()
tr.color("light blue")
tr.forward(100)
tr.color('brown')
tr.forward(200)
tr.pu()
tr.pd()
tr.color('light green')
tr.circle(2)
tr.color('light blue')
tr.circle(4)
tr.pu()
tr.fd(20)
tr.pd()
tr.circle(6)
tr.pu()
tr.fd(40)
tr.pd()
tr.circle(8)
tr.pu()
tr.fd(80)
tr.pd()
tr.circle(10)
tr.pu()
tr.fd(120)
tr.pd()
tr.circle(20)
tr.color('yellow')
tr.circle(10)
tr.pu()
tr.pd()
tr.color('white')
tr.forward(150)
tr.color('red')
tr.fd(50)
tr.color ('blue')
tr.begin_fill()
tr.penup()
tr.home()
move=move+1
tr.penup()
tr.forward(50)
turtle.done() | 3.109375 | 3 |
leaf/rbac/model/__init__.py | guiqiqi/leaf | 119 | 6142 | """用户, 组, 及相关认证数据库模型"""
from .group import Group
from .user import User
from .user import UserIndex
from .auth import Authentication
from .accesspoint import AccessPoint
| 1.4375 | 1 |
programacao basica/7.py | m-brito/Neps-Academy | 0 | 6143 | bino = int(input())
cino = int(input())
if (bino+cino)%2==0:
print("Bino")
else:
print("Cino")
| 3.8125 | 4 |
update_readme.py | CalmScout/LeetCode | 0 | 6144 | """
Script updates `README.md` with respect to files at ./easy and ./medium folders.
"""
import os
curr_dir = os.path.dirname(__file__)
with open(os.path.join(curr_dir, "README.md"), 'w') as readme:
readme.write("# LeetCode\nDeliberate practice in coding.\n")
langs = [l for l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and l[0] != '.']
for lang in langs:
readme.write("## {}\n".format(lang))
readme.write("### Easy\n")
easy = sorted(os.listdir(f"{curr_dir}/{lang}/easy"))
easy = [x.split("_")[0] for x in easy]
easy_solved = ""
for el in easy:
easy_solved += "{}, ".format(el)
readme.write(easy_solved[:-2] + "\n")
readme.write("### Medium\n")
medium = sorted(os.listdir(f"{curr_dir}/{lang}/medium"))
medium = [x.split("_")[0] for x in medium]
medium_solved = ""
for el in medium:
medium_solved += "{}, ".format(el)
readme.write(medium_solved[:-2] + '\n')
| 2.703125 | 3 |
scripts/biotimesql.py | Jay-Iam/retriever | 0 | 6145 | # -*- coding: utf-8 -*-
#retriever
import csv
from pkg_resources import parse_version
from retriever.lib.models import Table
from retriever.lib.templates import Script
try:
from retriever.lib.defaults import VERSION
try:
from retriever.lib.tools import open_fr, open_fw, open_csvw
except ImportError:
from retriever.lib.scripts import open_fr, open_fw
except ImportError:
from retriever import open_fr, open_fw, VERSION
class main(Script):
def __init__(self, **kwargs):
Script.__init__(self, **kwargs)
self.title = "Commercial Fisheries Monthly Trade Data by Product, Country/Association"
self.name = "biotimesql"
self.retriever_minimum_version = "2.2.0"
self.urls = {
"sql_file": "https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1",
}
self.version = "1.0.1"
self.ref = "https://zenodo.org/record/1095628#.WskN7dPwYyn"
self.citation = "<NAME>, <NAME>, <NAME>, et al. BioTIME: A database of biodiversity time series for the Anthropocene. Global Ecology & Biogeography. 2018; 00:1 - 26. https://doi.org/10.1111/geb.12729."
self.description = "The BioTIME database has species identities and abundances in ecological assemblages through time."
self.keywords = ["Time series", "Anthropocene", "Global"]
self.licenses = [{"name": "CC BY 4.0"}]
self.encoding = "latin1"
if parse_version(VERSION) <= parse_version("2.0.0"):
self.shortname = self.name
self.name = self.title
self.tags = self.keywords
def download(self, engine=None, debug=False):
Script.download(self, engine, debug)
engine = self.engine
original_sql_file = "BioTIMESQL02_04_2018.sql"
engine.download_file(self.urls["sql_file"], original_sql_file)
sql_data = open_fr(self.engine.format_filename(original_sql_file))
set_open = False
csv_writer = None
csv_file = None
table_name = None
NULL = None
for line in sql_data:
table_indicator = "-- Table structure for table "
if line.startswith(table_indicator):
st = line[len(table_indicator):].replace("`", "")
table_name = st.strip()
current_file_process = table_name
current_file_open = current_file_process
if set_open and not current_file_process == current_file_open:
csv_file.close()
set_open = False
else:
out_file = "{name}.csv".format(name=table_name)
csv_file = open_fw(engine.format_filename(out_file))
csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
set_open = True
if line.startswith("INSERT INTO `{table_name}`".format(table_name=table_name)):
row_val = line[line.index("VALUES (") + 8:-3]
table_rows = row_val.replace("\r\n","").split("),(")
for i_row in table_rows:
v = eval('[' + str(i_row) + ']')
csv_writer.writerows([v])
if csv_file:
csv_file.close()
# Create abundance table
table = Table("ID_ABUNDANCE", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_ABUNDANCE", ("int",)),
("ABUNDANCE_TYPE", ("char", "100")),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("abundance.csv"))
# Create allrawdata table
table = Table("allrawdata", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_ALL_RAW_DATA", ("int",)),
("ABUNDANCE", ("double",)),
("BIOMASS", ("double",)),
("ID_SPECIES", ("int",)),
("SAMPLE_DESC", ("char", 200)),
("PLOT", ("char", 150)),
("LATITUDE", ("double",)),
("LONGITUDE", ("double",)),
("DEPTH", ("double",)),
("DAY", ("int",)),
("MONTH", ("int",)),
("YEAR", ("int",)),
("STUDY_ID", ("int",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("allrawdata.csv"))
# Create biomass table
table = Table("biomass", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [("ID_BIOMASS", ("int",)), ("BIOMASS_TYPE", ("char", "100"))]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("biomass.csv"))
# Create citation1 table
table = Table("citation1", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_CITATION1", ("int",)),
("STUDY_ID", ("int",)),
("CITATION_LINE", ("char",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("citation1.csv"))
# Create contacts table
table = Table("contacts", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_CONTACTS", ("int",)),
("STUDY_ID", ("int",)),
("CONTACT_1", ("char", 500)),
("CONTACT_2", ("char", 500)),
("CONT_1_MAIL", ("char", 60)),
("CONT_2_MAIL", ("char", 60)),
("LICENSE", ("char", 200)),
("WEB_LINK", ("char", 200)),
("DATA_SOURCE", ("char", 250)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("contacts.csv"))
# Create countries table
table = Table("countries", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [("COUNT_ID", ("int",)), ("COUNTRY_NAME", ("char", 200))]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("countries.csv"))
# Create curation table
table = Table("curation", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_CURATION", ("int",)),
("STUDY_ID", ("int",)),
("LINK_ID", ("int",)),
("COMMENTS", ("char",)),
("DATE_STUDY_ADDED", ("char", 50)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("curation.csv"))
# Create datasets table
table = Table("datasets", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_DATASETS", ("int",)),
("STUDY_ID", ("int",)),
("TAXA", ("char", 50)),
("ORGANISMS", ("char", 200)),
("TITLE", ("char",800)),
("AB_BIO", ("char", 2)),
("HAS_PLOT", ("char", 10)),
("DATA_POINTS", ("char",)),
("START_YEAR", ("char",)),
("END_YEAR", ("char",)),
("CENT_LAT", ("double",)),
("CENT_LONG", ("double",)),
("NUMBER_OF_SPECIES", ("char",)),
("NUMBER_OF_SAMPLES", ("char",)),
("NUMBER_LAT_LONG", ("char",)),
("TOTAL", ("char",)),
("GRAIN_SIZE_TEXT", ("char",)),
("GRAIN_SQ_KM", ("double",)),
("AREA_SQ_KM", ("double",)),
("AB_TYPE", ("char", )),
("BIO_TYPE", ("char",)),
("SAMPLE_TYPE", ("char",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("datasets.csv"))
# Create downloads table
table = Table("downloads", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("D_ID", ("int",)),
("STUDY", ("char", 25)),
("NAME", ("char", 150)),
("EMAIL", ("char", 150)),
("COUNTRY", ("char", 200)),
("ROLE", ("char", 150)),
("PURPOSE", ("char", 500)),
("LOCATION", ("char", 250)),
("DATE_STAMP", ("char",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("downloads.csv"))
# Create methods table
table = Table("methods", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_METHODS", ("int",)),
("STUDY_ID", ("int",)),
("METHODS", ("char",)),
("SUMMARY_METHODS", ("char", 500)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("methods.csv"))
# Create sample table
table = Table("sample", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_SAMPLE", ("int",)),
("ID_TREAT", ("int",)),
("SAMPLE_DESC_NAME", ("char", 200)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("sample.csv"))
# Create site table
table = Table("site", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_SITE", ("int",)),
("STUDY_ID", ("int",)),
("REALM", ("char", 11)),
("CLIMATE", ("char", 20)),
("GENERAL_TREAT", ("char", 200)),
("TREATMENT", ("char", 200)),
("TREAT_COMMENTS", ("char", 250)),
("TREAT_DATE", ("char", 100)),
("CEN_LATITUDE", ("double",)),
("CEN_LONGITUDE", ("double",)),
("HABITAT", ("char", 100)),
("PROTECTED_AREA", ("char", 50)),
("AREA", ("double",)),
("BIOME_MAP", ("char", 500))
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("site.csv"))
# Create species table
table = Table("species", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_SPECIES", ("int",)),
("GENUS", ("char", 100)),
("SPECIES", ("char", 100)),
("GENUS_SPECIES", ("char", 100))
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("species.csv"))
SCRIPT = main()
| 2.25 | 2 |
alipay/aop/api/domain/KbAdvertSettleBillResponse.py | snowxmas/alipay-sdk-python-all | 213 | 6146 | <filename>alipay/aop/api/domain/KbAdvertSettleBillResponse.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class KbAdvertSettleBillResponse(object):
def __init__(self):
self._download_url = None
self._paid_date = None
@property
def download_url(self):
return self._download_url
@download_url.setter
def download_url(self, value):
self._download_url = value
@property
def paid_date(self):
return self._paid_date
@paid_date.setter
def paid_date(self, value):
self._paid_date = value
def to_alipay_dict(self):
params = dict()
if self.download_url:
if hasattr(self.download_url, 'to_alipay_dict'):
params['download_url'] = self.download_url.to_alipay_dict()
else:
params['download_url'] = self.download_url
if self.paid_date:
if hasattr(self.paid_date, 'to_alipay_dict'):
params['paid_date'] = self.paid_date.to_alipay_dict()
else:
params['paid_date'] = self.paid_date
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KbAdvertSettleBillResponse()
if 'download_url' in d:
o.download_url = d['download_url']
if 'paid_date' in d:
o.paid_date = d['paid_date']
return o
| 2.234375 | 2 |
day5.py | PLCoster/adventofcode2019 | 1 | 6147 | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 2 11:06:59 2019
@author: Paul
"""
def read_data(filename):
"""
Reads csv file into a list, and converts to ints
"""
data = []
f = open(filename, 'r')
for line in f:
data += line.strip('\n').split(',')
int_data = [int(i) for i in data]
f.close()
return int_data
def run_intcode(program, input_int):
"""
Takes data, list of ints to run int_code on.
Returns list of ints after intcode program has been run.
Running Intcode program looks reads in the integers sequentially in sets of 4:
data[i] == Parameter Mode + Opcode (last two digits)
data[i+1] == Entry 1
data[i+2] == Entry 2
data[i+3] == Entry 3
If Opcode == 1, the value of the opcode at index location = entry 1 and 2
in the program are summed and stored at the index location of entry 3.
If Opcode == 2, the value of the opcode at index location = entry 1 and 2
in the program are multiplied and stored at the index location of entry 3.
If Opcode == 3, the the single integer (input) is saved to the position given
by index 1.
If Opcode == 4, the program outputs the value of its only parameter. E.g. 4,50
would output the value at address 50.
If Opcode == 5 and entry 1 is != 0, the intcode position moves to the index stored
at entry 2. Otherwise it does nothing.
If Opcode == 6 and entry 1 is 0, the intcode postion moves to the index stored
at entry 2. Otherwise it does nothing.
If Opcode == 7 and entry 1> entry 2, store 1 in position given by third param,
otherwise store 0 at position given by third param.
If Opcode == 7 and entry 1 = entry 2, store 1 in position given by third param,
otherwise store 0 at position given by third param.
If Opcode == 99, the program is completed and will stop running.
Parameters are digits to the left of the opcode, read left to right:
Parameter 0 -> Position mode - the entry is treated as an index location
Parameter 1 -> Immediate mode - the entry is treated as a value
"""
data = program[:]
answer = -1
params = [0, 0, 0]
param_modes = ['', '', '']
i = 0
while (i < len(program)):
#print("i = ", i)
# Determine Opcode and parameter codes:
opcode_str = "{:0>5d}".format(data[i])
opcode = int(opcode_str[3:])
param_modes[0] = opcode_str[2]
param_modes[1] = opcode_str[1]
param_modes[2] = opcode_str[0]
#print(opcode_str)
for j in range(2):
if param_modes[j] == '0':
try:
params[j] = data[data[i+j+1]]
except IndexError:
continue
else:
try:
params[j] = data[i+j+1]
except IndexError:
continue
#print(params, param_modes)
# If opcode is 1, add relevant entries:
if opcode == 1:
data[data[i+3]] = params[0] + params[1]
i += 4;
# If opcode is 2, multiply the relevant entries:
elif opcode == 2:
data[data[i+3]] = params[0] * params[1]
i += 4;
# If opcode is 3, store input value at required location.
elif opcode == 3:
data[data[i+1]] = input_int
i += 2;
# If opcode is 4, print out the input stored at specified location.
elif opcode == 4:
answer = data[data[i+1]]
print("Program output: ", data[data[i+1]])
i += 2;
# If the opcode is 5 and the next parameter !=0, jump forward
elif opcode == 5:
if params[0] != 0:
i = params[1]
else:
i += 3
# If the opcode is 6 and next parameter is 0, jump forward
elif opcode == 6:
if params[0] == 0:
i = params[1]
else:
i += 3
# If the opcode is 7, carry out less than comparison and store 1/0 at loc 3
elif opcode == 7:
if params[0] < params[1]:
data[data[i+3]] = 1
else:
data[data[i+3]] = 0
i += 4
# If the opcode is 8, carry out equality comparison and store 1/0 at loc 3
elif opcode == 8:
if params[0] == params[1]:
data[data[i+3]] = 1
else:
data[data[i+3]] = 0
i += 4
# If the opcode is 99, halt the intcode
elif opcode == 99:
print("Program ended by halt code")
break
# If opcode is anything else something has gone wrong!
else:
print("Problem with the Program")
break
return data, answer
program = read_data("day5input.txt")
#print(program)
result1, answer1 = run_intcode(program, 1)
#print(result1)
print("Part 1: Answer is: ", answer1)
result2, answer2 = run_intcode(program, 5)
#print(result2)
print("Part 2: Answer is: ", answer2)
#test_program = [1002,4,3,4,33]
#test_program2 = [3,0,4,0,99]
#test_program3 = [1101,100,-1,4,0]
#test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1 if input = 8, 0 otherwise
#test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1 if input < 8, 0 otherwise
#test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1 if input = 8, 0 otherwise
#test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1 if input < 8, 0 otherwise
#test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input = 0, 1 otherwise
#test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input = 0, 1 otherwise
#test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,
#36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,
#1105,1,46,98,99] # 999 if input < 8, 1000 if input = 8, 1001 if input > 8
| 3.734375 | 4 |
textvis/textprizm/models.py | scclab/textvisdrg-prototype | 0 | 6148 | from django.db import models
# Create your models here.
class Schema(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
class Code(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
active_instances = models.PositiveIntegerField(default=0)
schema = models.ForeignKey(Schema, related_name="codes")
code_type = models.IntegerField(default=0)
def __unicode__(self):
if self.description:
return "%s/%s (%d): %s" % (self.schema_id, self.name, self.id, self.description)
else:
return "%s/%s (%d)" % (self.schema_id, self.name, self.id)
class DataSet(models.Model):
name = models.CharField(max_length=100)
created = models.DateTimeField()
class Session(models.Model):
set = models.ForeignKey(DataSet)
started = models.DateTimeField()
ended = models.DateTimeField()
def __unicode__(self):
return "%d (%s - %s)" % (self.id, str(self.started), str(self.ended))
class Participant(models.Model):
name = models.CharField(max_length=100)
description = models.TextField()
def __unicode__(self):
return self.name
class Message(models.Model):
session = models.ForeignKey(Session)
idx = models.IntegerField()
time = models.DateTimeField()
type = models.IntegerField()
participant = models.ForeignKey(Participant, related_name='messages')
message = models.TextField()
codes = models.ManyToManyField(Code, through='CodeInstance')
@classmethod
def get_between(cls, start, end):
"""
Get messages that are inclusively between the two messages, or two dates.
Takes into account the exact ordering of messages,
meaning that you won't get messages at the same time but after the last message, for example.
"""
if isinstance(start, Message):
after_first = ~models.Q(session=start.session) | models.Q(idx__gte=start.idx)
after_first = models.Q(time__gte=start.time) & after_first
else:
after_first = models.Q(time__gte=start)
if isinstance(end, Message):
before_last = ~models.Q(session=end.session) | models.Q(idx__lte=end.idx)
before_last = models.Q(time__lte=end.time) & before_last
else:
before_last = models.Q(time__lte=end)
return cls.objects.filter(after_first, before_last)
@property
def text(self):
return self.message
@property
def user_name(self):
return self.participant.name
@property
def created_at(self):
return self.time
class User(models.Model):
name = models.CharField(max_length=100)
full_name = models.CharField(max_length=250)
email = models.CharField(max_length=250)
def __unicode__(self):
return self.name
class AbstractCodeInstance(models.Model):
class Meta:
abstract = True
code = models.ForeignKey(Code)
message = models.ForeignKey(Message)
added = models.DateTimeField()
class CodeInstance(AbstractCodeInstance):
user = models.ForeignKey(User)
task_id = models.PositiveIntegerField()
intensity = models.FloatField()
flag = models.IntegerField()
| 2.609375 | 3 |
test/test_purchasing.py | jacob22/accounting | 0 | 6149 | <gh_stars>0
# -*- coding: utf-8 -*-
# Copyright 2019 Open End AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
if (sys.version_info >=(3, 0)):
PYT3 = True
import urllib.request
import urllib.parse
else:
PYT3 = False
import urllib2
import urlparse
import contextlib
import json
import os
import py
import subprocess
import time
import uuid
from . import support
here = os.path.dirname(__file__)
class Container(object):
def __init__(self, **kw):
self.__dict__.update(kw)
def do_purchase(products, emailaddress):
params = {
'data': [
{'items': [{'product': product} for product in products],
'buyerName': '<NAME>',
'buyerEmail': emailaddress}
]
}
if PYT3:
req = urllib.request.Request(urllib.parse.urljoin(support.url, '/rest/purchase'),
json.dumps(params).encode('ascii'),
{'Content-Type': 'application/json'})
data = json.load(urllib.request.urlopen(req))
else:
req = urllib2.Request(urlparse.urljoin(support.url, '/rest/purchase'),
json.dumps(params),
{'Content-Type': 'application/json'})
data = json.load(urllib2.urlopen(req))
return Container(id=data['purchase'],
invoice=data['invoiceUrl'],
buyerEmail=emailaddress)
def check_mail(client, mailssh, purchase, mailtype):
client.run('sendmail -qf')
message, = mailssh.find_and_delete_mail(None, 'TO', purchase.buyerEmail)
msg, headers = mailssh.parse(message)
assert headers['X-OE-MailType'] == [mailtype]
assert purchase.invoice in msg
return msg, headers
@contextlib.contextmanager
def check_mails(client, mailssh, purchase):
check_mail(client, mailssh, purchase, 'order-confirmation')
yield
check_mail(client, mailssh, purchase, 'full-payment-confirmation')
def gen_pg(client, org, id_args=[1, 1]):
cmd = 'python /root/accounting/members/paymentgen.py %s %s %s' % (
org.id, id_args[0], id_args[1])
id_args[0] += 1
id_args[1] += 1000
stdin, stdout, stderr = client.exec_command('PYTHONPATH=/root/accounting ' +
cmd)
return stdout.read()
def upload_pg(tmpdir, ssh, pgdata):
pgfile = tmpdir.join('pgfile')
pgfile.write(pgdata)
dest = uuid.uuid4()
with ssh(username='nordea') as client:
sftp = client.open_sftp()
sftp.put(str(pgfile), 'incoming/%s' % dest, confirm=False)
@py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh',
'ssh', 'org', 'emailaddress')
def test_full_plusgiro_payment(mailssh, ssh, org, emailaddress, tmpdir):
purchase = do_purchase([org.product], emailaddress)
with ssh() as client:
with check_mails(client, mailssh, purchase):
pgdata = gen_pg(client, org)
upload_pg(tmpdir, ssh, pgdata)
@py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh',
'ssh', 'org', 'emailaddress')
def test_partial_plusgiro_payment(ssh, mailssh, org, emailaddress,
tmpdir):
purchase = do_purchase([org.product], emailaddress)
with ssh() as client:
with check_mails(client, mailssh, purchase):
pgdata1 = gen_pg(client, org)
pgdata2 = gen_pg(client, org)
pgdata3 = gen_pg(client, org)
# The sum is 66666 (öre). It is probably unique in the fake pgfile,
# so we can simply replace it in order to make partial payments.
if PYT3:
partial_payment1 = pgdata1.replace(b'66666', b'22222') # pay 222.22 SEK
partial_payment2 = pgdata2.replace(b'66666', b'33333') # pay 333.33 SEK
final_payment = pgdata3.replace(b'66666', b'11111') # final 111.11 SEK
else:
partial_payment1 = pgdata1.replace('66666', '22222') # pay 222.22 SEK
partial_payment2 = pgdata2.replace('66666', '33333') # pay 333.33 SEK
final_payment = pgdata3.replace('66666', '11111') # final 111.11 SEK
upload_pg(tmpdir, ssh, partial_payment1)
msg, headers = check_mail(client, mailssh, purchase,
'partial-payment-confirmation')
assert '222,22' in msg # amount paid
assert '444,44' in msg # amount remaining
upload_pg(tmpdir, ssh, partial_payment2)
msg, headers = check_mail(client, mailssh, purchase,
'partial-payment-confirmation')
assert '333,33' in msg # amount paid
assert '111,11' in msg # amount remaining
upload_pg(tmpdir, ssh, final_payment)
@py.test.mark.usefixtures('cluster', 'clean_db', 'bootstrapped', 'mailssh',
'nodes', 'ssh', 'org', 'emailaddress')
def test_swish_payment(nodes, ssh, mailssh, org, emailaddress):
#py.test.skip('Skip swish tests until certificates work')
purchase = do_purchase([org.product], emailaddress)
with ssh() as client:
with check_mails(client, mailssh, purchase):
print(purchase.invoice)
if PYT3:
parsed = urllib.parse.urlparse(purchase.invoice)
_, _, purchase, _ = parsed.path.split('/')
path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase)
url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path,
'', '', ''))
data = {'phone': '1231181189'}
req = urllib.request.Request(url, json.dumps(data).encode('ascii'),
{'Content-Type': 'application/json'})
response = json.load(urllib.request.urlopen(req))
else:
parsed = urlparse.urlparse(purchase.invoice)
_, _, purchase, _ = parsed.path.split('/')
path = '/providers/swish/charge/%s/%s' % (org.swish_provider, purchase)
url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path,
'', '', ''))
data = {'phone': '1231181189'}
req = urllib2.Request(url, json.dumps(data),
{'Content-Type': 'application/json'})
response = json.load(urllib2.urlopen(req))
print(response)
assert response['status'] == 'CREATED'
path = '/providers/swish/poll/%s/%s' % (org.swish_provider,
response['id'])
if PYT3:
url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, path,
'', '', ''))
else:
url = urlparse.urlunparse((parsed.scheme, parsed.netloc, path,
'', '', ''))
for _ in range(20):
if PYT3:
req = urllib.request.Request(url)
response = json.load(urllib.request.urlopen(req))
else:
req = urllib2.Request(url)
response = json.load(urllib2.urlopen(req))
print(response)
if response['status'] == 'PAID':
break
time.sleep(1)
| 1.992188 | 2 |
iot/downstream/fog_processes.py | SENERGY-Platform/senergy-connector | 0 | 6150 | """
Copyright 2020 InfAI (CC SES)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ("Router", )
from ..util import conf, get_logger, mqtt
import threading
import cc_lib
logger = get_logger(__name__.split(".", 1)[-1])
class Router(threading.Thread):
def __init__(self, client: cc_lib.client.Client, mqtt_client: mqtt.Client):
super().__init__(name="downstream-fog-processes-router", daemon=True)
self.__cc = client
self.__mqtt = mqtt_client
def run(self) -> None:
try:
while True:
envelope = self.__cc.receive_fog_processes()
logger.debug(envelope)
self.__mqtt.publish(
"{}/{}".format(conf.MQTTClient.fog_processes_pub_topic, envelope.sub_topic),
envelope.message,
qos=conf.MQTTClient.qos
)
except Exception as ex:
logger.error(ex)
| 1.804688 | 2 |
django_project/user_profile/migrations/0003_order_payment_method.py | aliyaandabekova/DJANGO_PROJECT | 0 | 6151 | # Generated by Django 3.2.3 on 2021-05-27 13:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user_profile', '0002_auto_20210526_1747'),
]
operations = [
migrations.AddField(
model_name='order',
name='payment_method',
field=models.CharField(choices=[('cash', 'cash'), ('wallet', 'wallet')], default='cash', max_length=10),
),
]
| 1.882813 | 2 |
day07/test.py | mpirnat/aoc2016 | 0 | 6152 | <reponame>mpirnat/aoc2016
#!/usr/bin/env python
import unittest
from day07 import has_abba, get_abba_allowed_strings, get_abba_disallowed_strings
from day07 import supports_tls, count_tls_addresses
from day07 import find_abas, supports_ssl, count_ssl_addresses
class TestFindingABBASequences(unittest.TestCase):
cases = (
('abba', True),
('oxyyxo', True),
('aaaa', False),
('abcd', False),
)
def test_finds_abba_sequences(self):
for text, expected in self.cases:
self.assertEqual(has_abba(text), expected)
class TestGettingAllowedChunks(unittest.TestCase):
cases = (
('abba[mnop]qrst[abcd]defg', ['abba', 'qrst', 'defg']),
)
def test_finds_allowed_substrings(self):
for text, expected in self.cases:
self.assertEqual(get_abba_allowed_strings(text), expected)
class TestGettingDisallowedChunks(unittest.TestCase):
cases = (
('abba[mnop]qrst[abcd]defg', ['mnop', 'abcd']),
)
def test_finds_disallowed_substrings(self):
for text, expected in self.cases:
self.assertEqual(get_abba_disallowed_strings(text), expected)
class TestCheckingTLSAddresses(unittest.TestCase):
cases = (
('abba[mnop]qrst', True),
('abcd[bddb]xyyx', False),
('aaaa[qwer]tyui', False),
('ioxxoj[asdfgh]zxcvbn', True),
)
def test_finds_tls_addresses(self):
for text, expected in self.cases:
self.assertEqual(supports_tls(text), expected)
def test_counts_tls_addresses(self):
data = [x[0] for x in self.cases]
self.assertEqual(count_tls_addresses(data), 2)
class TestFindingABASequences(unittest.TestCase):
cases = (
('aba', ['aba']),
('xyxxyx', ['xyx']),
('aaakekeke', ['eke', 'kek']),
('zazbzbzbcdb', ['bzb', 'zaz', 'zbz']),
)
def test_finds_aba_sequences(self):
for text, expected in self.cases:
self.assertEqual(find_abas(text), expected)
class TestCheckingSSLAddresses(unittest.TestCase):
cases = (
('aba[bab]xyz', True),
('xyx[xyx]xyx', False),
('aaa[kek]eke', True),
('zazbz[bzb]cdb', True),
)
def test_finds_ssl_addresses(self):
for text, expected in self.cases:
self.assertEqual(supports_ssl(text), expected)
def test_counts_ssl_addresses(self):
data = [x[0] for x in self.cases]
self.assertEqual(count_ssl_addresses(data), 3)
if __name__ == '__main__':
unittest.main()
| 2.359375 | 2 |
rlnets/PG.py | HTRPOCODES/HTRPO-v2 | 7 | 6153 | <reponame>HTRPOCODES/HTRPO-v2<gh_stars>1-10
import torch
import numpy as np
import torch.nn.functional as F
from torch.autograd import Variable
from basenets.MLP import MLP
from basenets.Conv import Conv
from torch import nn
class FCPG_Gaussian(MLP):
def __init__(self,
n_inputfeats,
n_actions,
sigma,
n_hiddens = [30],
nonlinear = F.tanh,
usebn = False,
outactive = None,
outscaler = None,
initializer = "orthogonal",
initializer_param = {"gain":np.sqrt(2), "last_gain": 0.1}
):
self.n_actions = n_actions
super(FCPG_Gaussian, self).__init__(
n_inputfeats, # input dim
n_actions, # output dim
n_hiddens, # hidden unit number list
nonlinear,
usebn,
outactive,
outscaler,
initializer,
initializer_param=initializer_param,
)
self.logstd = nn.Parameter(torch.log(sigma * torch.ones(n_actions) + 1e-8))
def forward(self,x, other_data = None):
x = MLP.forward(self, x, other_data)
# for exploration, we need to make sure that the std is not too low.
logstd = torch.clamp(self.logstd, min = np.log(0.1))
return x, logstd.expand_as(x), torch.exp(logstd).expand_as(x)
def cuda(self, device = None):
self.logstd.cuda()
return self._apply(lambda t: t.cuda(device))
class FCPG_Softmax(MLP):
def __init__(self,
n_inputfeats, # input dim
n_actions, # output dim
n_hiddens = [10], # hidden unit number list
nonlinear = F.tanh,
usebn = False,
outactive = F.softmax,
outscaler = None,
initializer = "orthogonal",
initializer_param = {"gain":np.sqrt(2), "last_gain": 0.1}
):
self.n_actions = n_actions
super(FCPG_Softmax, self).__init__(
n_inputfeats, # input dim
n_actions, # output dim
n_hiddens, # hidden unit number list
nonlinear,
usebn,
outactive,
outscaler,
initializer,
initializer_param=initializer_param,
)
def forward(self, x, other_data=None):
x = MLP.forward(self, x, other_data)
# for exploration, and similar to e-greedy
x = x + 0.01 / self.n_actions
x = x / torch.sum(x, dim = -1, keepdim=True).detach()
return x
class ConvPG_Softmax(Conv):
def __init__(self,
n_inputfeats, # input dim
n_actions, # output dim
k_sizes = [8, 4, 3],
channels = [8, 16, 16],
strides = [4, 2, 2],
fcs = [32, 32, 32], # hidden unit number list
nonlinear = F.relu,
usebn = False,
outactive = F.softmax,
outscaler = None,
initializer="xavier",
initializer_param={}
):
self.n_actions = n_actions
super(ConvPG_Softmax, self).__init__(
n_inputfeats, # input dim
n_actions, # output dim
k_sizes,
channels,
strides,
fcs,
nonlinear,
usebn,
outactive,
outscaler,
initializer,
initializer_param=initializer_param,
)
def forward(self, x, other_data=None):
x = Conv.forward(self, x, other_data)
# for exploration, and similar to e-greedy
x = x + 0.01 / self.n_actions
x = x / torch.sum(x, dim=-1, keepdim=True).detach()
return x
# TODO: support multi-layer value function in which action is concat before the final layer
class FCVALUE(MLP):
def __init__(self,
n_inputfeats,
n_hiddens = [30],
nonlinear = F.tanh,
usebn = False,
outactive = None,
outscaler = None,
initializer="orthogonal",
initializer_param={"gain":np.sqrt(2), "last_gain": 0.1}
):
super(FCVALUE, self).__init__(
n_inputfeats,
1,
n_hiddens,
nonlinear,
usebn,
outactive,
outscaler,
initializer,
initializer_param=initializer_param,
)
| 1.992188 | 2 |
tensorflow/python/kernel_tests/sparse_tensors_map_ops_test.py | m4rkl1u/tensorflow | 2 | 6154 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SparseTensorsMap."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
# pylint: disable=protected-access
add_sparse_to_tensors_map = sparse_ops._add_sparse_to_tensors_map
add_many_sparse_to_tensors_map = sparse_ops._add_many_sparse_to_tensors_map
take_many_sparse_from_tensors_map = (
sparse_ops._take_many_sparse_from_tensors_map)
# pylint: enable=protected-access
class SparseTensorsMapTest(test.TestCase):
def _SparseTensorPlaceholder(self, dtype=None):
if dtype is None:
dtype = dtypes.int32
return sparse_tensor_lib.SparseTensor(
array_ops.placeholder(dtypes.int64),
array_ops.placeholder(dtype), array_ops.placeholder(dtypes.int64))
def _SparseTensorValue_5x6(self, permutation):
ind = np.array([[0, 0], [1, 0], [1, 3], [1, 4], [3, 2],
[3, 3]]).astype(np.int64)
val = np.array([0, 10, 13, 14, 32, 33]).astype(np.int32)
ind = ind[permutation]
val = val[permutation]
shape = np.array([5, 6]).astype(np.int64)
return sparse_tensor_lib.SparseTensorValue(ind, val, shape)
def _SparseTensorValue_3x4(self, permutation):
ind = np.array([[0, 0], [1, 0], [1, 2], [1, 3], [2, 2],
[2, 3]]).astype(np.int64)
val = np.array([0, 10, 13, 14, 32, 33]).astype(np.int32)
ind = ind[permutation]
val = val[permutation]
shape = np.array([3, 4]).astype(np.int64)
return sparse_tensor_lib.SparseTensorValue(ind, val, shape)
def _SparseTensorValue_1x1x1(self):
ind = np.array([[0, 0, 0]]).astype(np.int64)
val = np.array([0]).astype(np.int32)
shape = np.array([3, 4, 5]).astype(np.int64)
return sparse_tensor_lib.SparseTensorValue(ind, val, shape)
def testAddTakeMany(self):
with self.session(graph=ops.Graph(), use_gpu=False) as sess:
sp_input0 = self._SparseTensorValue_5x6(np.arange(6))
sp_input1 = self._SparseTensorValue_3x4(np.arange(6))
handle0 = add_sparse_to_tensors_map(sp_input0, shared_name="a")
handle1 = add_sparse_to_tensors_map(sp_input1, shared_name="a")
self.assertEqual(handle0.get_shape(), ())
handles_concat = array_ops.stack([handle0, handle1])
sp_out = take_many_sparse_from_tensors_map(
sparse_map_op=handle0.op, sparse_handles=handles_concat)
combined_indices, combined_values, combined_shape = self.evaluate(sp_out)
self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch 0
self.assertAllEqual(combined_indices[:6, 1:], sp_input0[0])
self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch 1
self.assertAllEqual(combined_indices[6:, 1:], sp_input1[0])
self.assertAllEqual(combined_values[:6], sp_input0[1])
self.assertAllEqual(combined_values[6:], sp_input1[1])
self.assertAllEqual(combined_shape, [2, 5, 6])
def testFeedAddTakeMany(self):
with self.session(use_gpu=False) as sess:
sp_input = self._SparseTensorPlaceholder()
input0_val = self._SparseTensorValue_5x6(np.arange(6))
input1_val = self._SparseTensorValue_3x4(np.arange(6))
handle = add_sparse_to_tensors_map(sp_input)
handle0_value = sess.run(handle, feed_dict={sp_input: input0_val})
handle1_value = sess.run(handle, feed_dict={sp_input: input1_val})
sparse_handles = ops.convert_to_tensor(
[handle0_value, handle1_value], dtype=dtypes.int64)
sp_roundtrip = take_many_sparse_from_tensors_map(
sparse_map_op=handle.op, sparse_handles=sparse_handles)
combined_indices, combined_values, combined_shape = self.evaluate(
sp_roundtrip)
self.assertAllEqual(combined_indices[:6, 0], [0] * 6) # minibatch 0
self.assertAllEqual(combined_indices[:6, 1:], input0_val[0])
self.assertAllEqual(combined_indices[6:, 0], [1] * 6) # minibatch 1
self.assertAllEqual(combined_indices[6:, 1:], input1_val[0])
self.assertAllEqual(combined_values[:6], input0_val[1])
self.assertAllEqual(combined_values[6:], input1_val[1])
self.assertAllEqual(combined_shape, [2, 5, 6])
def testAddManyTakeManyRoundTrip(self):
with self.session(use_gpu=False) as sess:
# N == 4 because shape_value == [4, 5]
indices_value = np.array([[0, 0], [0, 1], [2, 0]], dtype=np.int64)
values_value = np.array([b"a", b"b", b"c"])
shape_value = np.array([4, 5], dtype=np.int64)
sparse_tensor = self._SparseTensorPlaceholder(dtype=dtypes.string)
handles = add_many_sparse_to_tensors_map(sparse_tensor)
roundtrip = take_many_sparse_from_tensors_map(
sparse_map_op=handles.op, sparse_handles=handles)
handles_value, roundtrip_value = sess.run(
[handles, roundtrip],
feed_dict={
sparse_tensor.indices: indices_value,
sparse_tensor.values: values_value,
sparse_tensor.dense_shape: shape_value
})
self.assertEqual(handles_value.shape, (4,))
self.assertAllEqual(roundtrip_value.indices, indices_value)
self.assertAllEqual(roundtrip_value.values, values_value)
self.assertAllEqual(roundtrip_value.dense_shape, shape_value)
def testDeserializeFailsInconsistentRank(self):
with self.session(use_gpu=False) as sess:
sp_input = self._SparseTensorPlaceholder()
input0_val = self._SparseTensorValue_5x6(np.arange(6))
input1_val = self._SparseTensorValue_1x1x1()
handle = add_sparse_to_tensors_map(sp_input)
handle0_value = sess.run(handle, feed_dict={sp_input: input0_val})
handle1_value = sess.run(handle, feed_dict={sp_input: input1_val})
handle_concat = ops.convert_to_tensor(
[handle0_value, handle1_value], dtype=dtypes.int64)
sp_roundtrip = take_many_sparse_from_tensors_map(
sparse_map_op=handle.op, sparse_handles=handle_concat)
with self.assertRaisesOpError(
r"Inconsistent rank across SparseTensors: rank prior to "
r"SparseTensor\[1\] was: 3 but rank of SparseTensor\[1\] is: 4"):
self.evaluate(sp_roundtrip)
def testTakeManyFailsWrongInputOp(self):
with self.session(use_gpu=False) as sess:
input_val = self._SparseTensorValue_5x6(np.arange(6))
handle = add_sparse_to_tensors_map(input_val)
handle_value = self.evaluate(handle)
bad_handle = handle_value + 10
sp_roundtrip = take_many_sparse_from_tensors_map(
sparse_map_op=handle.op, sparse_handles=[handle_value, bad_handle])
with self.assertRaisesOpError(r"Unable to find SparseTensor: 10"):
self.evaluate(sp_roundtrip)
class BenchmarkSparseTensorsMapVsSerialization(test.Benchmark):
def benchmarkVeryLarge2DFloatSparseTensor(self):
np.random.seed(127)
num_elements = 10000
batch_size = 64
indices_batch = np.random.randint(
batch_size, size=num_elements, dtype=np.int64)
indices_value = np.arange(num_elements, dtype=np.int64)
indices = np.asarray(
sorted(zip(indices_batch, indices_value)), dtype=np.int64)
values = ["feature_value_for_embedding_lookup"] * num_elements
shape = np.asarray([batch_size, num_elements], dtype=np.int64)
with session.Session(config=benchmark.benchmark_config()) as sess:
with ops.device("/cpu:0"):
indices = variables.Variable(indices)
values = variables.Variable(values)
shape = variables.Variable(shape)
st = sparse_tensor_lib.SparseTensor(indices, values, shape)
st_handles = add_many_sparse_to_tensors_map(st)
st_roundtrip = take_many_sparse_from_tensors_map(
sparse_map_op=st_handles.op, sparse_handles=st_handles)
st_roundtrip_op = st_roundtrip.values.op
st_serialized = sparse_ops.serialize_many_sparse(st)
st_deserialized = sparse_ops.deserialize_many_sparse(
st_serialized, dtype=values.dtype)
st_deserialized_op = st_deserialized.values.op
variables.global_variables_initializer().run()
st_roundtrip_values = self.evaluate(st_roundtrip)
st_deserialized_values = self.evaluate(st_deserialized)
np.testing.assert_equal(st_roundtrip_values.values,
st_deserialized_values.values)
np.testing.assert_equal(st_roundtrip_values.indices,
st_deserialized_values.indices)
np.testing.assert_equal(st_roundtrip_values.dense_shape,
st_deserialized_values.dense_shape)
self.run_op_benchmark(
sess,
st_roundtrip_op,
min_iters=2000,
name="benchmark_very_large_2d_float_st_tensor_maps")
self.run_op_benchmark(
sess,
st_deserialized_op,
min_iters=2000,
name="benchmark_very_large_2d_float_st_serialization")
if __name__ == "__main__":
test.main()
| 1.8125 | 2 |
cloudroast/objectstorage/smoke/object_smoke.py | RULCSoft/cloudroast | 0 | 6155 | <filename>cloudroast/objectstorage/smoke/object_smoke.py
"""
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import calendar
import time
import zlib
from hashlib import md5
import unittest
from cafe.drivers.unittest.decorators import (
DataDrivenFixture, data_driven_test)
from cloudcafe.objectstorage.objectstorage_api.common.constants import \
Constants
from cloudroast.objectstorage.fixtures import ObjectStorageFixture
from cloudroast.objectstorage.generators import (
ObjectDatasetList, CONTENT_TYPES)
CONTAINER_DESCRIPTOR = 'object_smoke_test'
STATUS_CODE_MSG = ('{method} expected status code {expected}'
' received status code {received}')
@DataDrivenFixture
class ObjectSmokeTest(ObjectStorageFixture):
@classmethod
def setUpClass(cls):
super(ObjectSmokeTest, cls).setUpClass()
cls.default_obj_name = Constants.VALID_OBJECT_NAME_WITH_UNICODE
@staticmethod
def generate_chunk_data():
for i in range(10):
yield "Test chunk %s\r\n" % i
@data_driven_test(ObjectDatasetList())
def ddtest_object_retrieval_with_valid_object_name(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
response = self.client.get_object(container_name, object_name)
method = 'object creation with valid object name'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))
def ddtest_object_retrieval_with_if_match(
self, object_type, generate_object):
"""
Bug filed for dlo/slo support of If-match Header:
https://bugs.launchpad.net/swift/+bug/1279076
"""
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
obj_info = generate_object(container_name, object_name)
headers = {'If-Match': obj_info.get('etag')}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'object retrieval with if match header'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))
def ddtest_object_retrieval_with_if_none_match(
self, object_type, generate_object):
"""
Bug filed for dlo/slo support of If-match Header:
https://bugs.launchpad.net/swift/+bug/1279076
"""
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_info = generate_object(container_name, object_name)
headers = {'If-None-Match': 'grok'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'object retrieval with if none match header'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
headers = {'If-None-Match': object_info.get('etag')}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'object should be flagged as not modified'
expected = 304
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_object_retrieval_with_if_modified_since(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'If-Modified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'object retrieval with if modified since header (past date)'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_object_not_modified_with_if_modified_since(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'If-Modified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'object retrieval with if modified since header (future date)'
expected = 304
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_object_retrieval_with_if_unmodified_since(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2101 18:44:42 GMT'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'object retrieval with if unmodified since header'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_object_retrieval_fails_with_if_unmodified_since(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'If-Unmodified-Since': 'Fri, 17 Aug 2001 18:44:42 GMT'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = ('object retrieval precondition fail with if unmodified'
' since header')
expected = 412
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_partial_object_retrieval_with_start_range(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'Range': 'bytes=5-'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'partial object retrieval with start range'
expected = 206
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method, expected=expected, received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_partial_object_retrieval_with_end_range(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'Range': 'bytes=-4'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'partial object retrieval with end range'
expected = 206
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_partial_object_retrieval_with_range(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'Range': 'bytes=5-8'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'partial object retrieval with start and end range'
expected = 206
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_partial_object_retrieval_with_complete_range(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'Range': 'bytes=99-0'}
response = self.client.get_object(
container_name,
self.default_obj_name,
headers=headers)
method = 'partial object retrieval with complete range'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_object_creation_with_valid_object_name(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_info = generate_object(container_name, object_name)
response = object_info.get('response')
method = 'object creation with valid object name'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object(
container_name,
self.default_obj_name)
method = 'object retrieval'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response_md5 = md5(response.content).hexdigest()
self.assertEqual(
object_info.get('md5'),
response_md5,
msg='should return identical object')
@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))
def ddtest_object_update_with_valid_object_name(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
updated_object_data = 'Updated test file data'
updated_content_length = str(len(updated_object_data))
headers = {'Content-Length': updated_content_length,
'Content-Type': CONTENT_TYPES.get('text')}
response = self.client.create_object(
container_name,
self.default_obj_name,
headers=headers,
data=updated_object_data)
method = 'object update with valid object name'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_object_creation_with_etag(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_info = generate_object(container_name, object_name)
response = object_info.get('response')
method = 'object creation with etag header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
response = self.client.get_object(
container_name,
self.default_obj_name)
self.assertIn(
'etag',
response.headers,
msg="Etag header was set")
if object_type == 'standard':
expected = object_info.get('etag')
else:
expected = '"{0}"'.format(object_info.get('etag'))
received = response.headers.get('etag')
self.assertEqual(
expected,
received,
msg='object created with Etag header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))
def test_object_creation_with_uppercase_etag(self):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_data = "valid_data"
data_md5 = md5(object_data).hexdigest()
upper_etag = data_md5.upper()
headers = {"ETag": upper_etag}
create_response = self.client.create_object(container_name,
object_name,
data=object_data,
headers=headers)
method = 'object creation with uppercase etag header'
expected = 201
received = create_response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
object_response = self.client.get_object(
container_name,
self.default_obj_name)
self.assertIn(
'etag',
object_response.headers,
msg="Etag header was set")
expected = data_md5
received = object_response.headers.get('etag')
self.assertEqual(
expected,
received,
msg='object created with Etag header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_creation_with_access_control_allow_credentials(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {'Access-Control-Allow-Credentials': 'true'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Access-Control-Allow-Credentials header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Access-Control-Allow-Credentials',
response.headers,
msg="Access-Control-Allow-Credentials header was set")
expected = 'true'
received = response.headers.get('Access-Control-Allow-Credentials')
self.assertEqual(
expected,
received,
msg='object created with Access-Control-Allow-Credentials header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_creation_with_access_control_allow_methods(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Access-Control-Allow-Methods header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Access-Control-Allow-Methods',
response.headers,
msg="Access-Control-Allow-Methods header was set")
expected = 'GET, POST, OPTIONS'
received = response.headers.get('Access-Control-Allow-Methods')
self.assertEqual(
expected,
received,
msg='object created with Access-Control-Allow-Methods header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_creation_with_access_control_allow_origin(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {
'Access-Control-Allow-Origin': 'http://example.com'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Access-Control-Allow-Origin header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name, self.default_obj_name)
self.assertIn(
'Access-Control-Allow-Origin',
response.headers,
msg="Access-Control-Allow-Origin header was set")
expected = 'http://example.com'
received = response.headers.get('Access-Control-Allow-Origin')
self.assertEqual(
expected,
received,
msg='object created with Access-Control-Allow-Origin header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_creation_with_access_control_expose_headers(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {'Access-Control-Expose-Headers': 'X-Foo-Header'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Access-Control-Expose-Headers header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Access-Control-Expose-Headers',
response.headers,
msg="Access-Control-Expose-Headers header was set")
expected = 'X-Foo-Header'
received = response.headers.get('Access-Control-Expose-Headers')
self.assertEqual(
expected,
received,
msg='object created with Access-Control-Expose-Headers header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_creation_with_access_controle_max_age(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {'Access-Control-Max-Age': '5'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Access-Control-Max-Age header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Access-Control-Max-Age',
response.headers,
msg="Access-Control-Max-Age header was set")
expected = '5'
received = response.headers.get('Access-Control-Max-Age')
self.assertEqual(
expected,
received,
msg='object created with Access-Control-Max-Age header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_creation_with_access_control_request_headers(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {'Access-Control-Request-Headers': 'x-requested-with'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Access-Control-Request-Headers header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Access-Control-Request-Headers',
response.headers,
msg="Access-Control-Request-Headers header was set")
expected = 'x-requested-with'
received = response.headers.get('Access-Control-Request-Headers')
self.assertEqual(
expected,
received,
msg='object created with Access-Control-Request-Headers header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_creation_with_access_control_request_method(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {'Access-Control-Request-Method': 'GET'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Access-Control-Request-Method header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Access-Control-Request-Method',
response.headers,
msg="Access-Control-Request-Method header was set")
expected = 'GET'
received = response.headers.get('Access-Control-Request-Method')
self.assertEqual(
expected,
received,
msg='object created with Access-Control-Request-Method header'
' value expected: {0} received: {1}'.format(
expected,
received))
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object-cors')
def ddtest_object_retrieval_with_origin(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
headers = {'access-control-allow-origin': 'http://example.com',
'access-control-expose-headers': 'X-Trans-Id'}
generate_object(container_name, object_name, headers=headers)
headers = {'Origin': 'http://example.com'}
response = self.client.get_object_metadata(
container_name, object_name, headers=headers)
self.assertIn(
'access-control-expose-headers',
response.headers,
msg="access-control-expose-headers header should be set")
self.assertIn(
'access-control-allow-origin',
response.headers,
msg="access-control-allow-origin header should be set")
expected = 'http://example.com'
received = response.headers.get('access-control-allow-origin')
self.assertEqual(
expected,
received,
msg='access-control-allow-origin header should reflect origin'
' expected: {0} received: {1}'.format(expected, received))
@data_driven_test(ObjectDatasetList(exclude=['dlo', 'slo']))
def ddtest_object_creation_with_file_compression(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
def object_data_op(data, extra_data):
data = zlib.compress(data)
return (data, extra_data)
object_headers = {'Content-Encoding': 'gzip'}
object_info = generate_object(container_name, object_name,
data_op=object_data_op,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with Content-Encoding header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Content-Encoding',
response.headers,
msg="Content-Encoding header was set")
expected = 'gzip'
received = response.headers.get('Content-Encoding')
self.assertEqual(
expected,
received,
msg='object created with Content-Encoding header value'
' expected: {0} received: {1}'.format(expected, received))
@data_driven_test(ObjectDatasetList())
def ddtest_object_creation_with_content_disposition(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {
'Content-Disposition': 'attachment; filename=testdata.txt'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with content disposition header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'Content-Disposition',
response.headers,
msg="Content-Disposition header was set")
expected = 'attachment; filename=testdata.txt'
received = response.headers.get('Content-Disposition')
self.assertEqual(
expected,
received,
msg='object created with Content-Disposition header value'
' expected: {0} received: {1}'.format(expected, received))
@data_driven_test(ObjectDatasetList())
def ddtest_object_creation_with_x_delete_at(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
start_time = calendar.timegm(time.gmtime())
future_time = str(int(start_time + 60))
object_headers = {'X-Delete-At': future_time}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with X-Delete-At header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'X-Delete-At',
response.headers,
msg="X-Delete-At header was set")
expected = future_time
received = response.headers.get('X-Delete-At')
self.assertEqual(
expected,
received,
msg='object created with X-Delete-At header value'
' expected: {0} received: {1}'.format(expected, received))
@data_driven_test(ObjectDatasetList())
def ddtest_object_creation_with_delete_after(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
object_headers = {'X-Delete-After': '60'}
object_info = generate_object(container_name, object_name,
headers=object_headers)
response = object_info.get('response')
method = 'object creation with X-Delete-After header'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'X-Delete-At',
response.headers,
msg="X-Delete-At header was set")
@data_driven_test(ObjectDatasetList())
@ObjectStorageFixture.required_features('object_versioning')
def ddtest_versioned_container_creation_with_valid_data(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_history_container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
headers = {'X-Versions-Location': object_history_container_name}
self.client.set_container_metadata(container_name, headers=headers)
# list objects in non-current container
response = self.client.list_objects(
object_history_container_name)
method = 'list on empty versioned container'
expected = 204
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
# Create an object (version 1)
object_name = self.default_obj_name
ver1_info = generate_object(container_name, object_name)
response = ver1_info.get('response')
method = 'object version one creation'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
# Update an object (version 2)
object_name = self.default_obj_name
ver2_info = generate_object(container_name, object_name)
response = ver2_info.get('response')
method = 'update version one object'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.list_objects(object_history_container_name)
method = 'list on versioned container'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@unittest.skip('Problem with this tests assertion, needs review')
@data_driven_test(ObjectDatasetList())
def ddtest_put_copy_object(self, object_type, generate_object):
src_container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
dest_container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
src_object_name = '{0}_source'.format(self.default_obj_name)
generate_object(src_container_name, src_object_name)
dest_obj_name = '{0}_destination'.format(self.default_obj_name)
source = '/{0}/{1}'.format(src_container_name, src_object_name)
hdrs = {'X-Copy-From': source, 'Content-Length': '0'}
response = self.client.copy_object(
dest_container_name,
dest_obj_name,
headers=hdrs)
method = 'put copy object'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object(
dest_container_name,
dest_obj_name)
method = 'copied object retrieval'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_copy_object(self, object_type, generate_object):
src_container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
dest_container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
src_object_name = '{0}_source'.format(self.default_obj_name)
generate_object(src_container_name, src_object_name)
dest_object_name = '{0}_destination'.format(self.default_obj_name)
dest = '/{0}/{1}'.format(dest_container_name, dest_object_name)
headers = {'Destination': dest}
response = self.client.copy_object(
src_container_name,
src_object_name,
headers=headers)
method = 'copy object'
expected = 201
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object(
dest_container_name,
dest_object_name)
method = 'copied object retrieval'
expected = 200
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_object_deletion_with_valid_object(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
response = self.client.delete_object(
container_name,
object_name)
method = 'delete object'
expected = 204
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object(
container_name,
self.default_obj_name)
method = 'object retrieval'
expected = 404
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
@data_driven_test(ObjectDatasetList())
def ddtest_obj_metadata_update_with_object_possessing_metadata(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name,
headers={'X-Object-Meta-Grok': 'Drok'})
response = self.client.get_object_metadata(
container_name, object_name)
self.assertIn(
'X-Object-Meta-Grok',
response.headers,
msg="object not created with X-Object-Meta-Grok header")
expected = 'Drok'
received = response.headers.get('X-Object-Meta-Grok')
self.assertEqual(
expected,
received,
msg='object created with X-Object-Meta-Grok header value'
' expected: {0} received: {1}'.format(expected, received))
headers = {'X-Object-Meta-Foo': 'Bar'}
response = self.client.set_object_metadata(
container_name,
self.default_obj_name,
headers=headers)
method = 'set object metadata'
expected = 202
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'X-Object-Meta-Foo',
response.headers,
msg="object updated with X-Object-Meta-Foo header")
expected = 'Bar'
received = response.headers.get('X-Object-Meta-Foo')
self.assertEqual(
expected,
received,
msg='object X-Object-Meta-Foo header value expected: {0}'
' received: {1}'.format(expected, received))
@data_driven_test(ObjectDatasetList())
def ddtest_obj_metadata_update(self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object_name = self.default_obj_name
generate_object(container_name, object_name)
headers = {'X-Object-Meta-Grok': 'Drok'}
response = self.client.set_object_metadata(
container_name, object_name, headers=headers)
method = 'set object metadata X-Object-Meta-Grok: Drok'
expected = 202
received = response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
response = self.client.get_object_metadata(
container_name,
self.default_obj_name)
self.assertIn(
'X-Object-Meta-Grok',
response.headers,
msg="object updated with X-Object-Meta-Grok header")
expected = 'Drok'
received = response.headers.get('X-Object-Meta-Grok')
self.assertEqual(
expected,
received,
msg='object X-Object-Meta-Grok header value expected: {0}'
' received: {1}'.format(expected, received))
@data_driven_test(ObjectDatasetList())
def ddtest_content_type_not_detected_without_detect_content_type_header(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object1_name = 'object1.txt'
object1_headers = {'Content-Type': 'application/x-www-form-urlencoded'}
generate_object(container_name, object1_name, headers=object1_headers)
object2_name = 'object2.txt'
object2_headers = {'X-Detect-Content-Type': False,
'Content-Type': 'application/x-www-form-urlencoded'}
generate_object(container_name, object2_name, headers=object2_headers)
response = self.client.get_object(
container_name, object1_name)
expected = 'application/x-www-form-urlencoded'
received = response.headers.get('content-type')
self.assertEqual(
expected,
received,
msg='object created should have content type: {0}'
' received: {1}'.format(expected, received))
response = self.client.get_object(
container_name, object2_name)
self.assertEqual(
expected,
received,
msg='object created should have content type: {0}'
' received: {1}'.format(expected, received))
@data_driven_test(ObjectDatasetList())
def ddtest_content_type_detected_with_detect_content_type(
self, object_type, generate_object):
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
object1_name = 'object1.txt'
object1_headers = {'X-Detect-Content-Type': True,
'Content-Type': 'application/x-www-form-urlencoded'}
generate_object(container_name, object1_name, headers=object1_headers)
response = self.client.get_object(
container_name, object1_name)
expected = 'text/plain'
received = response.headers.get('content-type')
self.assertEqual(
expected,
received,
msg='object created should have content type: {0}'
' received: {1}'.format(expected, received))
object2_name = 'object2.txt'
object2_headers = {'X-Detect-Content-Type': True}
generate_object(container_name, object2_name, headers=object2_headers)
response = self.client.get_object(
container_name, object2_name)
expected = 'text/plain'
received = response.headers.get('content-type')
self.assertEqual(
expected,
received,
msg='object created should have content type: {0}'
' received: {1}'.format(expected, received))
def test_object_creation_via_chunked_transfer(self):
"""
Scenario:
Create an object using chunked transfer encoding.
Expected Results:
Return a 201 status code and a single object should
be created.
"""
container_name = self.create_temp_container(
descriptor=CONTAINER_DESCRIPTOR)
headers = {"Transfer-Encoding": "chunked"}
create_response = self.client.create_object(
container_name,
self.default_obj_name,
headers=headers,
data=self.generate_chunk_data())
method = 'Object creation via chunked transfer'
expected = 201
received = create_response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
object_response = self.client.get_object(container_name,
self.default_obj_name)
method = 'Object retrieval'
expected = 200
received = object_response.status_code
self.assertEqual(
expected,
received,
msg=STATUS_CODE_MSG.format(
method=method,
expected=expected,
received=str(received)))
| 1.828125 | 2 |
ceph/tests/conftest.py | remicalixte/integrations-core | 1 | 6156 | <reponame>remicalixte/integrations-core
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
import pytest
from datadog_checks.dev import docker_run
from datadog_checks.dev.conditions import CheckDockerLogs
from datadog_checks.dev.subprocess import run_command
from .common import BASIC_CONFIG, HERE
E2E_METADATA = {
'start_commands': [
'apt-get update',
'apt-get install -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" -y docker.io',
],
'docker_volumes': ['/var/run/docker.sock:/var/run/docker.sock'],
}
@pytest.fixture(scope="session")
def dd_environment():
compose_file = os.path.join(HERE, 'compose', 'docker-compose.yaml')
# We need a custom condition to wait a bit longer
with docker_run(
compose_file=compose_file,
conditions=[
CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph -w', wait=5),
CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'),
],
):
# Clean the disk space warning
run_command(
['docker', 'exec', 'dd-test-ceph', 'ceph', 'tell', 'mon.*', 'injectargs', '--mon_data_avail_warn', '5']
)
# Wait a bit for the change to take effect
condition = CheckDockerLogs(compose_file, 'Cluster is now healthy')
condition()
yield BASIC_CONFIG, E2E_METADATA
| 1.65625 | 2 |
federation/hostmeta/fetchers.py | weex/federation | 93 | 6157 | import json
from typing import Dict, Optional
import requests
from federation.hostmeta.parsers import (
parse_nodeinfo_document, parse_nodeinfo2_document, parse_statisticsjson_document, parse_mastodon_document,
parse_matrix_document, parse_misskey_document)
from federation.utils.network import fetch_document
HIGHEST_SUPPORTED_NODEINFO_VERSION = 2.1
def fetch_mastodon_document(host):
doc, status_code, error = fetch_document(host=host, path='/api/v1/instance')
if not doc:
return
try:
doc = json.loads(doc)
except json.JSONDecodeError:
return
return parse_mastodon_document(doc, host)
def fetch_matrix_document(host: str) -> Optional[Dict]:
doc, status_code, error = fetch_document(host=host, path='/_matrix/federation/v1/version')
if not doc:
return
try:
doc = json.loads(doc)
except json.JSONDecodeError:
return
return parse_matrix_document(doc, host)
def fetch_misskey_document(host: str, mastodon_document: Dict=None) -> Optional[Dict]:
try:
response = requests.post(f'https://{host}/api/meta') # ¯\_(ツ)_/¯
except Exception:
return
try:
doc = response.json()
except json.JSONDecodeError:
return
if response.status_code == 200:
return parse_misskey_document(doc, host, mastodon_document=mastodon_document)
def fetch_nodeinfo_document(host):
doc, status_code, error = fetch_document(host=host, path='/.well-known/nodeinfo')
if not doc:
return
try:
doc = json.loads(doc)
except json.JSONDecodeError:
return
url, highest_version = '', 0.0
if doc.get('0'):
# Buggy NodeInfo from certain old Hubzilla versions
url = doc.get('0', {}).get('href')
elif isinstance(doc.get('links'), dict):
# Another buggy NodeInfo from certain old Hubzilla versions
url = doc.get('links').get('href')
else:
for link in doc.get('links'):
version = float(link.get('rel').split('/')[-1])
if highest_version < version <= HIGHEST_SUPPORTED_NODEINFO_VERSION:
url, highest_version = link.get('href'), version
if not url:
return
doc, status_code, error = fetch_document(url=url)
if not doc:
return
try:
doc = json.loads(doc)
except json.JSONDecodeError:
return
return parse_nodeinfo_document(doc, host)
def fetch_nodeinfo2_document(host):
doc, status_code, error = fetch_document(host=host, path='/.well-known/x-nodeinfo2')
if not doc:
return
try:
doc = json.loads(doc)
except json.JSONDecodeError:
return
return parse_nodeinfo2_document(doc, host)
def fetch_statisticsjson_document(host):
doc, status_code, error = fetch_document(host=host, path='/statistics.json')
if not doc:
return
try:
doc = json.loads(doc)
except json.JSONDecodeError:
return
return parse_statisticsjson_document(doc, host)
| 2.421875 | 2 |
features/analysis_features.py | iag0g0mes/t2_fis_driving_style | 5 | 6158 | <gh_stars>1-10
import numpy as np
from typing import Any, Dict, List, Tuple, NoReturn
import argparse
import os
def parse_arguments() -> Any:
"""Parse command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument(
"--data_dir",
default="",
type=str,
help="Directory where the features (npy files) are saved",
)
parser.add_argument("--mode",
required=True,
type=str,
help="train/val/test/sample",
choices=['train', 'test', 'val','sample'])
parser.add_argument("--obs_len",
default=2,
type=int,
help="Observed length of the trajectory in seconds",
choices=[1,2,3,4,5])
parser.add_argument("--filter",
default='ekf',
type=str,
help="Filter to process the data noise. (ekf/none/ekf-savgol/savgol",
choices=['ekf', 'none', 'ekf-savgol', 'savgol'])
return parser.parse_args()
def stats(traj:np.ndarray) -> NoReturn:
#central tendency : mean
#dispersion : std
#bounds : min max
#quantile : 0.25, 0.5, 0.75
labels = ['mean_v', 'mean_acc', 'mean_deac', 'std_jy']
for i, l in zip(range(0, traj.shape[1]), labels):
t = traj[:, i]
_mean = round(np.mean(t),2)
_std = round(np.std(t),2)
_min = round(np.min(t),2)
_max = round(np.max(t),2)
_q25 = round(np.quantile(t, 0.25),2)
_q50 = round(np.quantile(t, 0.5),2)
_q75 = round(np.quantile(t, 0.75),2)
print (f'Feature: {l}')
print ('\tmean:{} | std:{} | min:{} | max:{} | q25:{} | q50:{} | q75:{}'.format(_mean,
_std, _min, _max, _q25, _q50, _q75))
if __name__== '__main__':
#_filters = ['none', 'ekf', 'savgol', 'ekf-savgol']
#_modes = ['train', 'val', 'test', 'sample']
#_obs_len = [2,5]
#seg = _obs_len[0]
#mode = _modes[3]
#filter_name = _filters[0]
args = parse_arguments()
if args.mode == 'test':
args.obs_len = 2
assert os.path.exists(args.data_dir),\
f'[Analysis][main][ERROR] data_dir not found!({args.data_dir})'
data_file = 'features_{}_{}s_{}.npy'.format(args.mode,
args.obs_len,
args.filter)
assert os.path.exists(os.path.join(args.data_dir, data_file)),\
f'[Analysis][main][ERROR] data_file not found!({data_file})'
print ('[Analysis] loading dataset....')
# (m, 4)
# [mean_v, mean_acc, mean_deac, std_jy]
data = np.load(os.path.join(args.data_dir,data_file))
print ('[Analysis] mode:{} | filter:{} | obs_len:{}'.format(args.mode,
args.filter,
args.obs_len))
print ('[Analysis] data shape:{}'.format(data.shape))
print ('[Analysis] stats:')
stats(data)
| 2.515625 | 3 |
python-watcher-2.0.0/watcher/tests/notifications/test_service_notifications.py | scottwedge/OpenStack-Stein | 0 | 6159 | # -*- encoding: utf-8 -*-
# Copyright (c) 2017 Servionica
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import freezegun
import mock
import oslo_messaging as om
from watcher.common import rpc
from watcher import notifications
from watcher.objects import service as w_service
from watcher.tests.db import base
from watcher.tests.objects import utils
@freezegun.freeze_time('2016-10-18T09:52:05.219414')
class TestActionPlanNotification(base.DbTestCase):
def setUp(self):
super(TestActionPlanNotification, self).setUp()
p_get_notifier = mock.patch.object(rpc, 'get_notifier')
m_get_notifier = p_get_notifier.start()
self.addCleanup(p_get_notifier.stop)
self.m_notifier = mock.Mock(spec=om.Notifier)
def fake_get_notifier(publisher_id):
self.m_notifier.publisher_id = publisher_id
return self.m_notifier
m_get_notifier.side_effect = fake_get_notifier
def test_service_failed(self):
service = utils.get_test_service(mock.Mock(),
created_at=datetime.datetime.utcnow())
state = w_service.ServiceStatus.FAILED
notifications.service.send_service_update(mock.MagicMock(),
service,
state,
host='node0')
notification = self.m_notifier.warning.call_args[1]
payload = notification['payload']
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual({
'watcher_object.data': {
'last_seen_up': '2016-09-22T08:32:06Z',
'name': 'watcher-service',
'sevice_host': 'controller',
'status_update': {
'watcher_object.data': {
'old_state': 'ACTIVE',
'state': 'FAILED'
},
'watcher_object.name': 'ServiceStatusUpdatePayload',
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0'
}
},
'watcher_object.name': 'ServiceUpdatePayload',
'watcher_object.namespace': 'watcher',
'watcher_object.version': '1.0'
},
payload
)
| 1.804688 | 2 |
leetcode/medium/best-time-to-buy-and-sell-stock-ii.py | rainzhop/cumulus-tank | 0 | 6160 | # https://leetcode.com/problems/best-time-to-buy-and-sell-stock-ii/
#
# Say you have an array for which the ith element is the price of a given stock on day i.
#
# Design an algorithm to find the maximum profit.
# You may complete as many transactions as you like (ie, buy one and sell one share of the stock multiple times).
# However, you may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again).
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if prices == []:
return 0
profit_list = []
min_val = prices[0]
max_val = prices[0]
tend = 0 # 0:down, 1:up
for i in range(1, len(prices)):
if prices[i] < prices[i - 1]:
# go down
if tend == 1:
max_val = prices[i - 1]
profit_list.append(max_val - min_val)
tend = 0
pass
if prices[i] > prices[i - 1]:
# go up
if tend == 0:
min_val = prices[i - 1]
tend = 1
pass
if tend == 1:
profit_list.append(prices[i] - min_val)
return sum(profit_list)
if __name__ == '__main__':
prices = [8,9,2,5]
s = Solution()
print s.maxProfit(prices)
| 3.859375 | 4 |
django_loci/tests/base/test_admin.py | yashikajotwani12/django-loci | 205 | 6161 | <filename>django_loci/tests/base/test_admin.py
import json
import os
import responses
from django.urls import reverse
from .. import TestAdminMixin, TestLociMixin
class BaseTestAdmin(TestAdminMixin, TestLociMixin):
geocode_url = 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/'
def test_location_list(self):
self._login_as_admin()
self._create_location(name='test-admin-location-1')
url = reverse('{0}_location_changelist'.format(self.url_prefix))
r = self.client.get(url)
self.assertContains(r, 'test-admin-location-1')
def test_floorplan_list(self):
self._login_as_admin()
self._create_floorplan()
self._create_location()
url = reverse('{0}_floorplan_changelist'.format(self.url_prefix))
r = self.client.get(url)
self.assertContains(r, '1st floor')
def test_location_json_view(self):
self._login_as_admin()
loc = self._create_location()
r = self.client.get(reverse('admin:django_loci_location_json', args=[loc.pk]))
expected = {
'name': loc.name,
'address': loc.address,
'type': loc.type,
'is_mobile': loc.is_mobile,
'geometry': json.loads(loc.geometry.json),
}
self.assertDictEqual(r.json(), expected)
def test_location_floorplan_json_view(self):
self._login_as_admin()
fl = self._create_floorplan()
r = self.client.get(
reverse('admin:django_loci_location_floorplans_json', args=[fl.location.pk])
)
expected = {
'choices': [
{
'id': str(fl.pk),
'str': str(fl),
'floor': fl.floor,
'image': fl.image.url,
'image_width': fl.image.width,
'image_height': fl.image.height,
}
]
}
self.assertDictEqual(r.json(), expected)
def test_location_change_image_removed(self):
self._login_as_admin()
loc = self._create_location(name='test-admin-location-1', type='indoor')
fl = self._create_floorplan(location=loc)
# remove floorplan image
os.remove(fl.image.path)
url = reverse('{0}_location_change'.format(self.url_prefix), args=[loc.pk])
r = self.client.get(url)
self.assertContains(r, 'test-admin-location-1')
def test_floorplan_change_image_removed(self):
self._login_as_admin()
loc = self._create_location(name='test-admin-location-1', type='indoor')
fl = self._create_floorplan(location=loc)
# remove floorplan image
os.remove(fl.image.path)
url = reverse('{0}_floorplan_change'.format(self.url_prefix), args=[fl.pk])
r = self.client.get(url)
self.assertContains(r, 'test-admin-location-1')
def test_is_mobile_location_json_view(self):
self._login_as_admin()
loc = self._create_location(is_mobile=True, geometry=None)
response = self.client.get(
reverse('admin:django_loci_location_json', args=[loc.pk])
)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(content['geometry'], None)
loc1 = self._create_location(
name='location2', address='loc2 add', type='outdoor'
)
response1 = self.client.get(
reverse('admin:django_loci_location_json', args=[loc1.pk])
)
self.assertEqual(response1.status_code, 200)
content1 = json.loads(response1.content)
expected = {
'name': 'location2',
'address': 'loc2 add',
'type': 'outdoor',
'is_mobile': False,
'geometry': {'type': 'Point', 'coordinates': [12.512124, 41.898903]},
}
self.assertEqual(content1, expected)
@responses.activate
def test_geocode(self):
self._login_as_admin()
address = 'Red Square'
url = '{0}?address={1}'.format(
reverse('admin:django_loci_location_geocode_api'), address
)
# Mock HTTP request to the URL to work offline
responses.add(
responses.GET,
f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1',
body=self._load_content('base/static/test-geocode.json'),
content_type='application/json',
)
response = self.client.get(url)
response_lat = round(response.json()['lat'])
response_lng = round(response.json()['lng'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response_lat, 56)
self.assertEqual(response_lng, 38)
def test_geocode_no_address(self):
self._login_as_admin()
url = reverse('admin:django_loci_location_geocode_api')
response = self.client.get(url)
expected = {'error': 'Address parameter not defined'}
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), expected)
@responses.activate
def test_geocode_invalid_address(self):
self._login_as_admin()
invalid_address = 'thisaddressisnotvalid123abc'
url = '{0}?address={1}'.format(
reverse('admin:django_loci_location_geocode_api'), invalid_address
)
responses.add(
responses.GET,
f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc'
'&f=json&maxLocations=1',
body=self._load_content('base/static/test-geocode-invalid-address.json'),
content_type='application/json',
)
response = self.client.get(url)
expected = {'error': 'Not found location with given name'}
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json(), expected)
@responses.activate
def test_reverse_geocode(self):
self._login_as_admin()
lat = 52
lng = 21
url = '{0}?lat={1}&lng={2}'.format(
reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng
)
# Mock HTTP request to the URL to work offline
responses.add(
responses.GET,
f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326',
body=self._load_content('base/static/test-reverse-geocode.json'),
content_type='application/json',
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'POL')
@responses.activate
def test_reverse_location_with_no_address(self):
self._login_as_admin()
lat = -30
lng = -30
url = '{0}?lat={1}&lng={2}'.format(
reverse('admin:django_loci_location_reverse_geocode_api'), lat, lng
)
responses.add(
responses.GET,
f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326',
body=self._load_content(
'base/static/test-reverse-location-with-no-address.json'
),
content_type='application/json',
)
response = self.client.get(url)
response_address = response.json()['address']
self.assertEqual(response.status_code, 404)
self.assertEqual(response_address, '')
def test_reverse_geocode_no_coords(self):
self._login_as_admin()
url = reverse('admin:django_loci_location_reverse_geocode_api')
response = self.client.get(url)
expected = {'error': 'lat or lng parameter not defined'}
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), expected)
| 2.34375 | 2 |
dca_models/deform_offsets_module.py | vatsalag99/Deformable-Channel-Attention | 1 | 6162 | import torch
from torch import nn
from torch.nn.parameter import Parameter
from einops import rearrange, reduce, repeat
class dca_offsets_layer(nn.Module):
"""Constructs a Offset Generation module.
"""
def __init__(self, channel, n_offsets):
super(dca_offsets_layer, self).__init__()
self.channel = channel
self.n_offsets = n_offsets
def covariance_features(self, x):
"""
Takes in a feature map and returns the unnormalized covariance matrix
"""
m_batchsize, C, height, width = x.size()
x = x - x.mean(dim=1, keepdim=True) / (x.std(dim=1, keepdim=True) + 1e-5)
proj_query = x.view(m_batchsize, C, -1)
proj_key = x.view(m_batchsize, C, -1).permute(0, 2, 1)
energy = torch.bmm(proj_query, proj_key)
return energy
def forward(self, x):
m_batchsize, C, height, width = x.size()
cov_matrix = self.covariance_features(x).reshape(m_batchsize, C, 1, C)
_, locations = torch.topk(cov_matrix, self.n_offsets, dim=1)
delta = torch.stack(self.n_offsets*[torch.arange(0, self.channel)], dim=0)
delta = torch.stack(m_batchsize * [delta], dim=0)
offsets = locations.squeeze() - delta.cuda()
return offsets
| 2.453125 | 2 |
tests/__init__.py | egor43/PyImageComparsion | 0 | 6163 | from . import test_helpers
from . import test_image_opener
from . import test_image_metrick
from . import test_compare_tools
from . import test_compare_api | 0.980469 | 1 |
core/urls.py | donnellan0007/blog | 0 | 6164 | <filename>core/urls.py
from django.contrib import admin
from django.urls import path
from .views import index, email, post_detail, posts, hot_takes, take_detail
from . import views
app_name = "core"
urlpatterns = [
path('',views.index,name="index"),
path('email/',views.email,name="email"),
path('post/<slug>/',views.post_detail,name='post'),
path('posts/',views.posts,name='posts'),
path('takes/',views.hot_takes,name='takes'),
path('take/<slug>/',views.take_detail,name='take'),
] | 1.765625 | 2 |
griddy/__init__.py | pgolding/pandas-grid | 1 | 6165 | <reponame>pgolding/pandas-grid
from .grid import render_table | 1.164063 | 1 |
utils/dbconn.py | iamvishnuks/Xmigrate | 4 | 6166 | from mongoengine import *
from dotenv import load_dotenv
from os import getenv
from cassandra.cluster import Cluster
from cassandra.auth import PlainTextAuthProvider
from cassandra.cqlengine import connection
from cassandra.cqlengine.management import sync_table
from cassandra.query import ordered_dict_factory
from model.discover import *
from model.blueprint import *
from model.disk import *
from model.storage import *
from model.project import *
from model.network import *
from model.user import *
load_dotenv()
cass_db = getenv("CASS_DB")
cass_password = getenv("CASS_PASSWORD")
cass_user = getenv("CASS_USER")
def create_db_con():
auth_provider = PlainTextAuthProvider(username=cass_user, password=<PASSWORD>)
cluster = Cluster([cass_db],auth_provider=auth_provider)
session = cluster.connect()
session.execute("""
CREATE KEYSPACE IF NOT EXISTS migration
WITH replication = { 'class': 'SimpleStrategy', 'replication_factor': '2' }
""")
session.set_keyspace('migration')
session.row_factory = ordered_dict_factory
connection.setup([cass_db], "migration",protocol_version=3,auth_provider=auth_provider)
sync_table(BluePrint)
sync_table(Discover)
sync_table(Project)
sync_table(Network)
sync_table(Subnet)
sync_table(Storage)
sync_table(Bucket)
sync_table(GcpBucket)
sync_table(User)
sync_table(Disk)
session.execute("CREATE INDEX IF NOT EXISTS ON blue_print (network);")
session.execute("CREATE INDEX IF NOT EXISTS ON blue_print (subnet);")
return session
| 2.140625 | 2 |
src/syft/lib/__init__.py | godormad/PySyft | 0 | 6167 | <reponame>godormad/PySyft
# stdlib
import importlib
import sys
from typing import Any
from typing import Any as TypeAny
from typing import Dict as TypeDict
from typing import Optional
# third party
from packaging import version
# syft relative
from ..ast.globals import Globals
from ..lib.python import create_python_ast
from ..lib.torch import create_torch_ast
from ..lib.torchvision import create_torchvision_ast
from ..logger import critical
from ..logger import traceback_and_raise
from .misc import create_union_ast
class VendorLibraryImportException(Exception):
pass
def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool:
# see if python version is supported
if "python" in vendor_requirements:
python_reqs = vendor_requirements["python"]
PYTHON_VERSION = sys.version_info
min_version = python_reqs.get("min_version", None)
if min_version is not None:
if PYTHON_VERSION < min_version:
traceback_and_raise(
VendorLibraryImportException(
f"Unable to load {vendor_requirements['lib']}."
+ f"Python: {PYTHON_VERSION} < {min_version}"
)
)
# see if torch version is supported
if "torch" in vendor_requirements:
torch_reqs = vendor_requirements["torch"]
# third party
import torch
TORCH_VERSION = version.parse(torch.__version__.split("+")[0])
min_version = torch_reqs.get("min_version", None)
if min_version is not None:
if TORCH_VERSION < version.parse(min_version):
traceback_and_raise(
VendorLibraryImportException(
f"Unable to load {vendor_requirements['lib']}."
+ f"Torch: {TORCH_VERSION} < {min_version}"
)
)
return True
def load_lib(lib: str, options: TypeDict[str, TypeAny] = {}) -> None:
try:
_ = importlib.import_module(lib)
vendor_ast = importlib.import_module(f"syft.lib.{lib}")
PACKAGE_SUPPORT = getattr(vendor_ast, "PACKAGE_SUPPORT", None)
PACKAGE_SUPPORT.update(options)
if PACKAGE_SUPPORT is not None and vendor_requirements_available(
vendor_requirements=PACKAGE_SUPPORT
):
update_ast = getattr(vendor_ast, "update_ast", None)
if update_ast is not None:
global lib_ast
update_ast(ast_or_client=lib_ast)
for _, client in lib_ast.registered_clients.items():
update_ast(ast_or_client=client)
# cache the constructor for future created clients
lib_ast.loaded_lib_constructors[lib] = update_ast
except VendorLibraryImportException as e:
critical(e)
except Exception as e:
critical(f"Unable to load package support for: {lib}. {e}")
# now we need to load the relevant frameworks onto the node
def create_lib_ast(client: Optional[Any] = None) -> Globals:
python_ast = create_python_ast(client=client)
torch_ast = create_torch_ast(client=client)
torchvision_ast = create_torchvision_ast(client=client)
# numpy_ast = create_numpy_ast()
lib_ast = Globals(client=client)
lib_ast.add_attr(attr_name="syft", attr=python_ast.attrs["syft"])
lib_ast.add_attr(attr_name="torch", attr=torch_ast.attrs["torch"])
lib_ast.add_attr(attr_name="torchvision", attr=torchvision_ast.attrs["torchvision"])
# let the misc creation be always the last, as it needs the full ast solved
# to properly generated unions
union_misc_ast = getattr(getattr(create_union_ast(lib_ast, client), "syft"), "lib")
misc_root = getattr(getattr(lib_ast, "syft"), "lib")
misc_root.add_attr(attr_name="misc", attr=union_misc_ast.attrs["misc"])
return lib_ast
lib_ast = create_lib_ast(None)
| 1.984375 | 2 |
scripts/griffin_GC_counts.py | GavinHaLab/Griffin | 1 | 6168 | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
import pysam
import os
import pandas as pd
import numpy as np
import time
import argparse
import sys
from multiprocessing import Pool
# In[ ]:
# ##arguments for testing
# bam_file_path = '/fh/scratch/delete90/ha_g/realigned_bams/cfDNA_MBC_ULP_hg38/realign_bam_paired_snakemake-master/results/MBC_1041_1_ULP/MBC_1041_1_ULP_recalibrated.bam'
# bam_file_name = 'MBC_1041_1_ULP'
# mapable_path = '../../downloads/genome/repeat_masker.mapable.k50.Umap.hg38.bedGraph'
# ref_seq_path = '/fh/fast/ha_g/grp/reference/GRCh38/GRCh38.fa'
# chrom_sizes_path = '/fh/fast/ha_g/grp/reference/GRCh38/hg38.standard.chrom.sizes'
# out_dir = './tmp/'
# map_q = 20
# size_range = [15,500]
# CPU = 4
# In[ ]:
parser = argparse.ArgumentParser()
parser.add_argument('--bam_file', help='sample_bam_file', required=True)
parser.add_argument('--bam_file_name', help='sample name (does not need to match actual file name)', required=True)
parser.add_argument('--mapable_regions', help='highly mapable regions to be used in GC correction, bedGraph or bed foramt', required=True)
parser.add_argument('--ref_seq',help='reference sequence (fasta format)',required=True)
parser.add_argument('--chrom_sizes',help='path to chromosome sizes for the reference seq',required=True)
parser.add_argument('--out_dir',help='folder for GC bias results',required=True)
parser.add_argument('--map_q',help='minimum mapping quality for reads to be considered',type=int,required=True)
parser.add_argument('--size_range',help='range of read sizes to be included',nargs=2, type=int, required=True)
parser.add_argument('--CPU',help='number of CPU for parallelizing', type=int, required=True)
args = parser.parse_args()
bam_file_path = args.bam_file
bam_file_name = args.bam_file_name
mapable_path=args.mapable_regions
ref_seq_path = args.ref_seq
chrom_sizes_path = args.chrom_sizes
out_dir = args.out_dir
map_q = args.map_q
size_range = args.size_range
CPU = args.CPU
# In[ ]:
print('arguments provided:')
print('\tbam_file_path = "'+bam_file_path+'"')
print('\tbam_file_name = "'+bam_file_name+'"')
print('\tmapable_regions = "'+mapable_path+'"')
print('\tref_seq_path = "'+ref_seq_path+'"')
print('\tchrom_sizes_path = "'+chrom_sizes_path+'"')
print('\tout_dir = "'+out_dir+'"')
print('\tmap_q = '+str(map_q))
print('\tsize_range = '+str(size_range))
print('\tCPU = '+str(CPU))
# In[ ]:
mapable_name = mapable_path.rsplit('/',1)[1].rsplit('.',1)[0]
out_file = out_dir +'/'+mapable_name+'/GC_counts/'+ bam_file_name+'.GC_counts.txt'
print('out_file',out_file)
# In[ ]:
#create a directory for the GC data
if not os.path.exists(out_dir +'/'+mapable_name):
os.mkdir(out_dir +'/'+mapable_name)
if not os.path.exists(out_dir +'/'+mapable_name+'/GC_counts/'):
os.mkdir(out_dir +'/'+mapable_name+'/GC_counts/')
# In[ ]:
#import filter
mapable_intervals = pd.read_csv(mapable_path, sep='\t', header=None)
#remove non standard chromosomes and X and Y
chroms = ['chr'+str(m) for m in range(1,23)]
mapable_intervals = mapable_intervals[mapable_intervals[0].isin(chroms)]
print('chroms:', chroms)
print('number_of_intervals:',len(mapable_intervals))
sys.stdout.flush()
# In[ ]:
def collect_reads(sublist):
#create a dict for holding the frequency of each read length and GC content
GC_dict = {}
for length in range(size_range[0],size_range[1]+1):
GC_dict[length]={}
for num_GC in range(0,length+1):
GC_dict[length][num_GC]=0
#import the bam file
#this needs to be done within the loop otherwise it gives a truncated file warning
bam_file = pysam.AlignmentFile(bam_file_path, "rb")
print('sublist intervals:',len(sublist))
#this might also need to be in the loop
#import the ref_seq
ref_seq=pysam.FastaFile(ref_seq_path)
for i in range(len(sublist)):
chrom = sublist.iloc[i][0]
start = sublist.iloc[i][1]
end = sublist.iloc[i][2]
if i%5000==0:
print('interval',i,':',chrom,start,end,'seconds:',np.round(time.time()-start_time))
sys.stdout.flush()
#fetch any read that overlaps the inteterval (don't need to extend the interval because the fetch function does this automatically)
fetched = bam_file.fetch(chrom,start,end)
for read in fetched:
#use both fw (positive template length) and rv (negative template length) reads
if (read.is_reverse==False and read.template_length>=size_range[0] and read.template_length<=size_range[1]) or (read.is_reverse==True and -read.template_length>=size_range[0] and -read.template_length<=size_range[1]):
#qc filters, some longer fragments are considered 'improper pairs' but I would like to keep these
if read.is_paired==True and read.mapping_quality>=map_q and read.is_duplicate==False and read.is_qcfail==False:
if read.is_reverse==False:
read_start = read.reference_start
read_end = read.reference_start+read.template_length
elif read.is_reverse==True:
read_end = read.reference_start + read.reference_length
read_start = read_end + read.template_length
fragment_seq = ref_seq.fetch(read.reference_name,read_start,read_end)
#tally up the GC content
fragment_seq=fragment_seq.replace('g','G').replace('c','C').replace('a','A').replace('t','T').replace('n','N')
# #################
# ##logic check####
# #################
# if read.is_reverse==False:
# if fragment_seq[0:read.reference_length]==read.query_sequence and len(fragment_seq)==read.template_length:
# print('fw match',read.reference_length)
# else:
# print(fragment_seq[0:read.reference_length],read.reference_length,'fw')
# print(read.query_sequence,len(read.query_sequence),'fw')
# print(len(fragment_seq),read.template_length)
# print('\n')
# elif read.is_reverse==True:
# if fragment_seq[-read.reference_length:]==read.query_sequence and len(fragment_seq)==-read.template_length:
# print('rv match',read.reference_length)
# else:
# print(fragment_seq[-read.reference_length:],read.reference_length,'rv')
# print(read.query_sequence,len(read.query_sequence),'rv')
# print(len(fragment_seq),read.template_length)
# print('\n')
# #################
#split and convert to numpy array
fragment_seq = np.array(list(fragment_seq))
#replace with values
fragment_seq[(fragment_seq=='G') | (fragment_seq=='C')]=1
fragment_seq[(fragment_seq=='A') | (fragment_seq=='T')]=0
fragment_seq[(fragment_seq=='N')]=np.random.randint(2) #choose a random 0 or 1 for N (so that you always get an integer) #should be very rare if the filter is done right
fragment_seq = fragment_seq.astype(int)
num_GC = int(fragment_seq.sum())
GC_dict[abs(read.template_length)][num_GC]+=1
print('done')
return(GC_dict)
# In[ ]:
start_time = time.time()
p = Pool(processes=CPU) #use the available CPU
sublists = np.array_split(mapable_intervals,CPU) #split the list into sublists, one per CPU
GC_dict_list = p.map(collect_reads, sublists, 1)
# In[ ]:
all_GC_df = pd.DataFrame()
for i,GC_dict in enumerate(GC_dict_list):
GC_df = pd.DataFrame()
for length in GC_dict.keys():
current = pd.Series(GC_dict[length]).reset_index()
current = current.rename(columns={'index':'num_GC',0:'number_of_fragments'})
current['length']=length
current = current[['length','num_GC','number_of_fragments']]
GC_df = GC_df.append(current, ignore_index=True)
GC_df = GC_df.set_index(['length','num_GC'])
all_GC_df[i] = GC_df['number_of_fragments']
del(GC_df,GC_dict)
all_GC_df = all_GC_df.sum(axis=1)
all_GC_df = pd.DataFrame(all_GC_df).rename(columns = {0:'number_of_fragments'})
all_GC_df = all_GC_df.reset_index()
all_GC_df.to_csv(out_file,sep='\t',index=False)
# In[ ]:
print('done')
# In[ ]:
# In[ ]:
# In[ ]:
| 1.898438 | 2 |
task2/04-task2-upload-dim-tables.py | canovasjm/InterviewProject_JuanCanovas | 0 | 6169 | <reponame>canovasjm/InterviewProject_JuanCanovas
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 1 18:17:07 2021
@author: jm
"""
# %% required libraries
import numpy as np
import pandas as pd
from sqlalchemy import create_engine
# %% connect to DB
# create connection using pymssql
engine = create_engine('mssql+pymssql://sa:<<PASSWORD>>@localhost:1433/rga')
connection = engine.connect()
# %% read data sets from where I will build the dimension tables
# read employee roster data
employee_roster = pd.read_excel("datasources/Employee_Roster_Data.xlsx", sheet_name = 'Sheet1')
# read skills data
skills = pd.read_excel("datasources/skills.xlsx", sheet_name = "Sheet1")
# read hours data
hours = pd.read_excel("datasources/hours.xlsx", sheet_name = "Sheet1")
# %% dimensions created from source employee_roster
# %% create DIM_Currency
# get unique values
currencies = sorted(employee_roster['Currency'].unique())
# create a data frame
DIM_Currency = pd.DataFrame({'id_currency': (np.arange(len(currencies)) + 1), 'currency': currencies})
# send data frame to DB
DIM_Currency.to_sql('DIM_Currency', con = connection, if_exists = 'append', index = False)
# %% create DIM_Department
# get unique values
departments = sorted(pd.concat([employee_roster['Department'], skills['Department']], axis = 0).unique())
# create a data frame
DIM_Department = pd.DataFrame({'id_department': (np.arange(len(departments)) + 1), 'department': departments})
# send data frame to DB
DIM_Department.to_sql('DIM_Department', con = connection, if_exists = 'append', index = False)
# %% create DIM_Gender
# get unique values
genders = sorted(pd.concat([employee_roster['Gender'], skills['Gender']], axis = 0).unique())
# create a data frame
DIM_Gender = pd.DataFrame({'id_gender': (np.arange(len(genders)) + 1), 'gender': genders})
# send data frame to DB
DIM_Gender.to_sql('DIM_Gender', con = connection, if_exists = 'append', index = False)
# %% create DIM_User
# check if 'UserId' values in 'skills' are in 'User_ID' in 'employee_roster'
# we get 20134 'True' values, meaning that all 'UserId' in 'skills' are already
# in 'User_ID' in employee_roster
users_check_1 = np.isin(skills['UserId'], employee_roster['User_ID']).sum()
# check if 'UserId' values in 'hours' are in 'User_ID' in 'employee_roster'
# we get 7659 'True' values, meaning that NOT all 'UserId' in 'hours' are already
# in 'User_ID' in employee_roster
users_check_2 = np.isin(hours['UserId'], employee_roster['User_ID']).sum()
# get unique values
users = sorted(pd.concat([employee_roster['User_ID'], skills['UserId'], hours['UserId']], axis = 0).unique())
# create a data frame to use pd.merge()
df_users = pd.DataFrame({'User_ID': users})
# left join 'df_user' with 'employee_roster' on 'UserID'
users_final = pd.merge(df_users, employee_roster, on = 'User_ID', how ='left')
# select only columns I need
users_final = users_final[['User_ID', 'Email_ID', 'Fullname']]
# rename columns
users_final.rename(columns = {'User_ID': 'id_user', 'Email_ID': 'id_email', 'Fullname': 'fullname'}, inplace = True)
# send data frame to DB
users_final.to_sql('DIM_User', con = connection, if_exists = 'append', index = False)
# %% dimensions created from source skills
# %% create DIM_AttributeGroup
# get unique values
att_group = sorted(skills['Attribute Group'].unique())
# create a data frame
DIM_AttributeGroup = pd.DataFrame({'id_att_group': (np.arange(len(att_group)) + 1), 'attribute_group': att_group})
# send data frame to DB
DIM_AttributeGroup.to_sql('DIM_AttributeGroup', con = connection, if_exists = 'append', index = False)
# %% create DIM_AttributeSubGroup
# get unique values
att_sub_group = sorted(skills['Attribute Sub-Group'].unique())
# create a data frame
DIM_AttributeSubGroup = pd.DataFrame({'id_att_sub_group': (np.arange(len(att_sub_group)) + 1), 'attribute_sub_group': att_sub_group})
# send data frame to DB
DIM_AttributeSubGroup.to_sql('DIM_AttributeSubGroup', con = connection, if_exists = 'append', index = False)
# %% create DIM_AttributeName
# get unique values
att_name = sorted(skills['Attribute Name'].unique())
# create a data frame
DIM_AttributeName = pd.DataFrame({'id_att_name': (np.arange(len(att_name)) + 1), 'attribute_name': att_name})
# send data frame to DB
DIM_AttributeName.to_sql('DIM_AttributeName', con = connection, if_exists = 'append', index = False)
| 2.828125 | 3 |
cogs/server.py | vikasbaghel1001/Kanna-Chan | 5 | 6170 | import discord
from discord.ext import commands
arrow = "<a:right:877425183839891496>"
kwee = "<:kannawee:877036162122924072>"
kdance = "<a:kanna_dance:877038778798207016>"
kbored = "<:kanna_bored:877036162827583538>"
ksmug = "<:kanna_smug:877038777896427560>"
heart = "<a:explosion_heart:877426228775227392>"
class Server(commands.Cog):
def __init__(self, client):
self.client = client
self.kana_id = 857835279259664403
@commands.command()
@commands.is_owner()
async def sabout(self, ctx):
kana = self.client.get_user(self.kana_id)
about_file = discord.File("./images/about_server.png")
await ctx.send(file = about_file)
emb = discord.Embed(title=f"{kdance} ABOUT SERVER {kdance}",description = f"{arrow} **DRAGON LOLI'S HOME** is the official Server of the bot **Kanna Chan**. It's a friendly community meant for having fun, chilling and spending time with others.\n{arrow} This server has cute emotes and a lot of fun events are about to be done here! So, stay tuned!", color=0xfc74c6)
emb.add_field(
name=f"{kwee} __ROLES__",
value=f"{arrow} <@&876800883441156138> The highest role supposed to be only for Kanna Chan.\n{arrow} <@&876817811396263946> Admins of the Server and have the highest power and authority after owner.\n{arrow} <@&876818242058997791> Moderators of the server meant to moderate the chat and maintain a positive environment in community.\n{arrow} <@&876801038420701196> Developer(s) of <NAME> have this role.\n{arrow} <@&876804164661944340> All other users who join this server get this role by default. They have image and embed perms by deault.\n{arrow} **PS: APART FROM THESE SELF-ROLES ARE ALSO AVAIALBLE FOR MEMBERS.**",
inline=False
)
emb.add_field(
name=f"{ksmug} __CHANNELS__",
value=f"{arrow} <#877030933847490691> Read the rules here.\n{arrow} <#877031867440832574> Channel for grabbing self-roles.\n{arrow} <#876798564704084011> The general chat for the server.\n{arrow} <#876798809819189249> Bot Commands should be executed here.\n{arrow} <#876798696078065694> You can give suggestions for improving Kanna Chan here.\n{arrow} <#876798720254029864> You can report BUGS here if you find any in Kanna Chan.\n{arrow} <#876798750876651530> For any other support or query use this channel.\n{arrow} **P.S: YOU CAN PING ANY STAFF MEMBER OR DEVELOPER WHILE REPORTING BUG OR IN CASE OF ANY QUERY.**",
inline=False
)
emb.set_footer(
text="<NAME>",
icon_url=kana.avatar_url
)
await ctx.send(embed=emb)
@commands.command()
@commands.is_owner()
async def rule(self, ctx):
kana = self.client.get_user(self.kana_id)
rule_file = discord.File("./images/rules.png")
await ctx.send(file=rule_file)
emb = discord.Embed(title=f"{kbored} RULES {kbored}", color=0xfc74c6)
emb.add_field(
name=f"{heart} **Be respectful**",
value=f"You must respect all users, regardless of your liking towards them. Treat others the way you want to be treated.",
inline=False
)
emb.add_field(
name=f"{heart} **No Inappropriate Language**",
value=f"{arrow} The use of profanity should be kept to a minimum. However, any derogatory language towards any user is prohibited.",
inline=False
)
emb.add_field(
name=f"{heart} **No spamming**",
value=f"{arrow} Don't send a lot of small messages right after each other. Do not disrupt chat by spamming.",
inline=False
)
emb.add_field(
name=f"{heart} **No pornographic/adult/other NSFW material**",
value=f"{arrow} This is a community server and not meant to share this kind of material.",
inline=False
)
emb.add_field(
name=f"{heart} **No advertisements**",
value=f"{arrow} We do not tolerate any kind of advertisements, whether it be for other communities or streams. You can post your content in the media channel if it is relevant and provides actual value (Video/Art)",
inline=False
)
emb.add_field(
name=f"{heart} **No offensive names and profile pictures**",
value=f"{arrow} You will be asked to change your name or picture if the staff deems them inappropriate.",
inline=False
)
emb.add_field(
name=f"{heart} **Server Raiding**",
value=f"{arrow} Raiding or mentions of raiding are not allowed.",
inline=False
)
emb.add_field(
name=f"{heart} **Direct & Indirect Threats**",
value=f"{arrow} Threats to other users of DDoS, Death, DoX, abuse, and other malicious threats are absolutely prohibited and disallowed.",
inline=False
)
emb.add_field(
name=f"{heart} **Follow the Discord Community Guidelines**",
value=f"{arrow} You can find them here: https://discordapp.com/guidelines",
inline=False
)
emb.add_field(
name=f"{heart} **VOICE CHANNELS**",
value=f"{arrow} Do not join voice chat channels without permission of the people already in there.",
inline=False
)
emb.add_field(
name=f"{heart} **DECISIONS AND ISSUES**",
value = f"{arrow} ***The Admins and Mods will Mute/Kick/Ban per discretion. If you feel mistreated DM an Admin and we will resolve the issue.***",
inline=False
)
emb.add_field(
name=f"{heart} **CHANGES**",
value = f"{arrow} ***Your presence in this server implies accepting these rules, including all further changes. These changes might be done at any time without notice, it is your responsibility to check for them.***",
inline=False
)
emb.set_footer(
text="<NAME>",
icon_url=kana.avatar_url
)
await ctx.send(embed=emb)
@commands.Cog.listener()
async def on_member_join(self, member):
if member.guild.id == 876798564704084008:
if member.bot:
return
else:
member_role = member.guild.get_role(876804164661944340)
await member.add_roles(member_role)
desc = f"{member.name} Thanks for joining Kanna's Server. The server is currently under construction, Thanks for being an **early supporter**!! If you need any kind of help or support just ping any staff member or DM `aSHish#1198`. Have a nice stay in the server :)"
await member.send(desc)
else:
return
def setup(client):
client.add_cog(Server(client))
print(">> Server Utility loaded") | 2.5625 | 3 |
test/run/t344.py | timmartin/skulpt | 2,671 | 6171 | for ch in "Hello world!":
d = ord(ch)
h = hex(d)
o = oct(d)
b = bin(d)
print ch, d, h, o, b
| 3.390625 | 3 |
paho/mqtt/subscribe.py | RandomGamer342/TTM4115-plantsensor | 8 | 6172 | <gh_stars>1-10
# Copyright (c) 2016 <NAME> <<EMAIL>>
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# and Eclipse Distribution License v1.0 which accompany this distribution.
#
# The Eclipse Public License is available at
# http://www.eclipse.org/legal/epl-v10.html
# and the Eclipse Distribution License is available at
# http://www.eclipse.org/org/documents/edl-v10.php.
#
# Contributors:
# <NAME> - initial API and implementation
"""
This module provides some helper functions to allow straightforward subscribing
to topics and retrieving messages. The two functions are simple(), which
returns one or messages matching a set of topics, and callback() which allows
you to pass a callback for processing of messages.
"""
import paho.mqtt.client as paho
import paho.mqtt as mqtt
import ssl
def _on_connect(c, userdata, flags, rc):
"""Internal callback"""
if rc != 0:
raise mqtt.MQTTException(paho.connack_string(rc))
if type(userdata['topics']) is list:
for t in userdata['topics']:
c.subscribe(t, userdata['qos'])
else:
c.subscribe(userdata['topics'], userdata['qos'])
def _on_message_callback(c, userdata, message):
"""Internal callback"""
userdata['callback'](c, userdata['userdata'], message)
def _on_message_simple(c, userdata, message):
"""Internal callback"""
if userdata['msg_count'] == 0:
return
# Don't process stale retained messages if 'retained' was false
if userdata['retained'] == False and message.retain == True:
return
userdata['msg_count'] = userdata['msg_count'] - 1
if userdata['messages'] is None and userdata['msg_count'] == 0:
userdata['messages'] = message
c.disconnect()
return
userdata['messages'].append(message)
if userdata['msg_count'] == 0:
c.disconnect()
def callback(callback, topics, qos=0, userdata=None, hostname="localhost",
port=1883, client_id="", keepalive=60, will=None, auth=None, tls=None,
protocol=paho.MQTTv311, transport="tcp"):
"""Subscribe to a list of topics and process them in a callback function.
This function creates an MQTT client, connects to a broker and subscribes
to a list of topics. Incoming messages are processed by the user provided
callback. This is a blocking function and will never return.
callback : function of the form "on_message(client, userdata, message)" for
processing the messages received.
topics : either a string containing a single topic to subscribe to, or a
list of topics to subscribe to.
qos : the qos to use when subscribing. This is applied to all topics.
userdata : passed to the callback
hostname : a string containing the address of the broker to connect to.
Defaults to localhost.
port : the port to connect to the broker on. Defaults to 1883.
client_id : the MQTT client id to use. If "" or None, the Paho library will
generate a client id automatically.
keepalive : the keepalive timeout value for the client. Defaults to 60
seconds.
will : a dict containing will parameters for the client: will = {'topic':
"<topic>", 'payload':"<payload">, 'qos':<qos>, 'retain':<retain>}.
Topic is required, all other parameters are optional and will
default to None, 0 and False respectively.
Defaults to None, which indicates no will should be used.
auth : a dict containing authentication parameters for the client:
auth = {'username':"<username>", 'password':"<password>"}
Username is required, password is optional and will default to None
if not provided.
Defaults to None, which indicates no authentication is to be used.
tls : a dict containing TLS configuration parameters for the client:
dict = {'ca_certs':"<ca_certs>", 'certfile':"<certfile>",
'keyfile':"<keyfile>", 'tls_version':"<tls_version>",
'ciphers':"<ciphers">}
ca_certs is required, all other parameters are optional and will
default to None if not provided, which results in the client using
the default behaviour - see the paho.mqtt.client documentation.
Defaults to None, which indicates that TLS should not be used.
transport : set to "tcp" to use the default setting of transport which is
raw TCP. Set to "websockets" to use WebSockets as the transport.
"""
if qos < 0 or qos > 2:
raise ValueError('qos must be in the range 0-2')
callback_userdata = {
'callback':callback,
'topics':topics,
'qos':qos,
'userdata':userdata}
client = paho.Client(client_id=client_id,
userdata=callback_userdata, protocol=protocol, transport=transport)
client.on_message = _on_message_callback
client.on_connect = _on_connect
if auth is not None:
username = auth['username']
try:
password = auth['password']
except KeyError:
password = <PASSWORD>
client.username_pw_set(username, password)
if will is not None:
will_topic = will['topic']
try:
will_payload = will['payload']
except KeyError:
will_payload = None
try:
will_qos = will['qos']
except KeyError:
will_qos = 0
try:
will_retain = will['retain']
except KeyError:
will_retain = False
client.will_set(will_topic, will_payload, will_qos, will_retain)
if tls is not None:
ca_certs = tls['ca_certs']
try:
certfile = tls['certfile']
except KeyError:
certfile = None
try:
keyfile = tls['keyfile']
except KeyError:
keyfile = None
try:
tls_version = tls['tls_version']
except KeyError:
tls_version = ssl.PROTOCOL_SSLv23;
try:
ciphers = tls['ciphers']
except KeyError:
ciphers = None
client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version,
ciphers=ciphers)
client.connect(hostname, port, keepalive)
client.loop_forever()
def simple(topics, qos=0, msg_count=1, retained=True, hostname="localhost", port=1883,
client_id="", keepalive=60, will=None, auth=None, tls=None,
protocol=paho.MQTTv311, transport="tcp"):
"""Subscribe to a list of topics and return msg_count messages.
This function creates an MQTT client, connects to a broker and subscribes
to a list of topics. Once "msg_count" messages have been received, it
disconnects cleanly from the broker and returns the messages.
topics : either a string containing a single topic to subscribe to, or a
list of topics to subscribe to.
qos : the qos to use when subscribing. This is applied to all topics.
msg_count : the number of messages to retrieve from the broker.
if msg_count == 1 then a single MQTTMessage will be returned.
if msg_count > 1 then a list of MQTTMessages will be returned.
retained : If set to True, retained messages will be processed the same as
non-retained messages. If set to False, retained messages will
be ignored. This means that with retained=False and msg_count=1,
the function will return the first message received that does
not have the retained flag set.
hostname : a string containing the address of the broker to connect to.
Defaults to localhost.
port : the port to connect to the broker on. Defaults to 1883.
client_id : the MQTT client id to use. If "" or None, the Paho library will
generate a client id automatically.
keepalive : the keepalive timeout value for the client. Defaults to 60
seconds.
will : a dict containing will parameters for the client: will = {'topic':
"<topic>", 'payload':"<payload">, 'qos':<qos>, 'retain':<retain>}.
Topic is required, all other parameters are optional and will
default to None, 0 and False respectively.
Defaults to None, which indicates no will should be used.
auth : a dict containing authentication parameters for the client:
auth = {'username':"<username>", 'password':"<password>"}
Username is required, password is optional and will default to None
if not provided.
Defaults to None, which indicates no authentication is to be used.
tls : a dict containing TLS configuration parameters for the client:
dict = {'ca_certs':"<ca_certs>", 'certfile':"<certfile>",
'keyfile':"<keyfile>", 'tls_version':"<tls_version>",
'ciphers':"<ciphers">}
ca_certs is required, all other parameters are optional and will
default to None if not provided, which results in the client using
the default behaviour - see the paho.mqtt.client documentation.
Defaults to None, which indicates that TLS should not be used.
transport : set to "tcp" to use the default setting of transport which is
raw TCP. Set to "websockets" to use WebSockets as the transport.
"""
if msg_count < 1:
raise ValueError('msg_count must be > 0')
# Set ourselves up to return a single message if msg_count == 1, or a list
# if > 1.
if msg_count == 1:
messages = None
else:
messages = []
userdata = {'retained':retained, 'msg_count':msg_count, 'messages':messages}
callback(_on_message_simple, topics, qos, userdata, hostname, port,
client_id, keepalive, will, auth, tls, protocol, transport)
return userdata['messages']
| 2.5 | 2 |
py/2017/day24/aoc_day_24.py | cs-cordero/advent-of-code | 0 | 6173 | <gh_stars>0
from collections import defaultdict
def solution():
starting_components = d[0]
best_scores = []
for component in starting_components:
n_a, n_b = get_ports(component)
nxt_port = n_a if n_b == 0 else n_b
best_scores.append(recurse(component, set(), nxt_port, 0))
print("fuck", max(best_scores))
def recurse(component, seen, next_port, level):
seen.add(component)
c_a, c_b = get_ports(component)
next_components = d[next_port] - seen
my_score = sum(get_ports(component))
scores = []
for next_component in next_components:
n_a, n_b = get_ports(next_component)
nxt_port = n_a if n_b in (c_a, c_b) else n_b
score, reclevel = recurse(next_component, seen.copy(), nxt_port, level + 1)
scores.append((score, reclevel))
scores = sorted(scores, key=lambda x: (x[1], x[0]), reverse=True)
print(component, level, scores)
return my_score + (scores[0][0] if scores else 0), scores[0][1] if scores else level
def get_ports(component):
return map(int, component.split("/"))
if __name__ == "__main__":
d = defaultdict(set)
# with open('aoc_day_24_sample.txt') as f:
with open("aoc_day_24_input.txt") as f:
sample = f.readlines()
# sample = [
# '0/1',
# '1/2',
# '1/3',
# '1/4',
# '5/0',
# '2/5',
# '3/6',
# '4/500'
# ]
for component in sample:
a, b = map(int, component.split("/"))
d[a].add(component)
d[b].add(component)
solution()
| 2.640625 | 3 |
scratchnet/scratchnet.py | Gr1m3y/scratchnet | 0 | 6174 | import numpy as np
import network
def main():
x = np.array([2, 3])
nw = network.NeuralNetwork()
print(nw.feedforward(x))
if __name__ == "__main__":
main()
| 3.140625 | 3 |
live-plotting.py | rmhsawyer/EC601-Final-Project-Mapping_User_Face_To_Emoji | 0 | 6175 | <reponame>rmhsawyer/EC601-Final-Project-Mapping_User_Face_To_Emoji
#draw the predictions from real-time.py
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib import style
style.use('fivethirtyeight')
fig = plt.figure()
ax1 = fig.add_subplot(1,1,1)
def animate(i):
graph_data = open('emotion.txt', 'r').read()
lines = graph_data.split('\n')
xs = []
y_angry = []
y_fear = []
y_happy = []
y_sad = []
y_surprise = []
y_neutral = []
for line in lines:
if len(line) > 1:
time, angry, fear, happy, sad, surprise, neutral = line.split(',')
xs.append(time)
y_angry.append(angry)
y_fear.append(fear)
y_happy.append(happy)
y_sad.append(sad)
y_surprise.append(surprise)
y_neutral.append(neutral)
ax1.clear()
ax1.plot(xs, y_angry)
ax1.plot(xs, y_fear)
ax1.plot(xs, y_happy)
ax1.plot(xs, y_sad)
ax1.plot(xs, y_surprise)
ax1.plot(xs, y_neutral)
ani = animation.FuncAnimation(fig, animate, interval=1000)
plt.show()
| 3.21875 | 3 |
code/run_policy.py | kirk86/ARS | 0 | 6176 | <reponame>kirk86/ARS
"""
Code to load a policy and generate rollout data. Adapted from https://github.com/berkeleydeeprlcourse.
Example usage:
python run_policy.py ../trained_policies/Humanoid-v1/policy_reward_11600/lin_policy_plus.npz Humanoid-v1 --render \
--num_rollouts 20
"""
import numpy as np
import gym
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('expert_policy_file', type=str)
parser.add_argument('envname', type=str)
parser.add_argument('--render', action='store_true')
parser.add_argument('--num_rollouts', type=int, default=20,
help='Number of expert rollouts')
args = parser.parse_args()
print('loading and building expert policy')
lin_policy = np.load(args.expert_policy_file)
lin_policy = lin_policy[lin_policy.files[0]]
M = lin_policy[0]
# mean and std of state vectors estimated online by ARS.
mean = lin_policy[1]
std = lin_policy[2]
env = gym.make(args.envname)
returns = []
observations = []
actions = []
for i in range(args.num_rollouts):
print('iter', i)
obs = env.reset()
done = False
totalr = 0.
steps = 0
while not done:
action = np.dot(M, (obs - mean)/std)
observations.append(obs)
actions.append(action)
obs, r, done, _ = env.step(action)
totalr += r
steps += 1
if args.render:
env.render()
if steps % 100 == 0: print("%i/%i"%(steps, env.spec.timestep_limit))
if steps >= env.spec.timestep_limit:
break
returns.append(totalr)
print('returns', returns)
print('mean return', np.mean(returns))
print('std of return', np.std(returns))
if __name__ == '__main__':
main()
| 2.9375 | 3 |
src/poliastro/plotting/tisserand.py | TreshUp/poliastro | 0 | 6177 | """ Generates Tisserand plots """
from enum import Enum
import numpy as np
from astropy import units as u
from matplotlib import pyplot as plt
from poliastro.plotting._base import BODY_COLORS
from poliastro.twobody.mean_elements import get_mean_elements
from poliastro.util import norm
class TisserandKind(Enum):
"""All possible Tisserand kinds"""
APSIS = "apsis"
ENERGY = "energy"
PERIOD = "period"
class TisserandPlotter:
"""Generates Tisserand figures"""
def __init__(self, kind=TisserandKind.APSIS, axes=None):
"""Object initializer
Parameters
----------
kind : TisserandKind
Nature for the Tisserand
axes : ~matplotlib.pyplot.axes
Axes for the figure
"""
# Asign Tisserand kind
self.kind = kind
# Check if axis available
if not axes:
_, self.ax = plt.subplots(1, 1)
else:
self.ax = axes
# Force axes scale regarding Tisserand kind
self.ax.set_xscale("log")
if self.kind == TisserandKind.APSIS:
self.ax.set_yscale("log")
def _solve_tisserand(
self, body, vinf_span, num_contours, alpha_lim=(0, np.pi), N=100
):
"""Solves all possible Tisserand lines with a meshgrid workflow
Parameters
----------
body : ~poliastro.bodies.Body
Body to be plotted Tisserand
vinf_array : ~astropy.units.Quantity
Desired Vinf for the flyby
num_contours : int
Number of contour lines for flyby speed
alpha_lim : tuple
Minimum and maximum flyby angles.
N : int
Number of points for flyby angle.
Notes
-----
The algorithm for generating Tisserand plots is the one depicted in
"Preliminary Trajectory Design of a Mission to Enceladus" by David
<NAME>, section 3.6
"""
# Generate mean orbital elements Earth
body_rv = get_mean_elements(body).to_vectors()
R_body, V_body = norm(body_rv.r), norm(body_rv.v)
# Generate non-dimensional velocity and alpha span
vinf_array = np.linspace(vinf_span[0], vinf_span[-1], num_contours)
alpha_array = np.linspace(alpha_lim[0], alpha_lim[-1], N)
vinf_array /= V_body
# Construct the mesh for any configuration
V_INF, ALPHA = np.meshgrid(vinf_array, alpha_array)
# Solving for non-dimensional a_sc and ecc_sc
A_SC = 1 / np.abs(1 - V_INF ** 2 - 2 * V_INF * np.cos(ALPHA))
ECC_SC = np.sqrt(1 - 1 / A_SC * ((3 - 1 / A_SC - V_INF ** 2) / (2)) ** 2)
# Compute main Tisserand variables
RR_P = A_SC * R_body * (1 - ECC_SC)
RR_A = A_SC * R_body * (1 + ECC_SC)
TT = 2 * np.pi * np.sqrt((A_SC * R_body) ** 3 / body.parent.k)
EE = -body.parent.k / (2 * A_SC * R_body)
# Build color lines to internal canvas
return RR_P, RR_A, EE, TT
def _build_lines(self, RR_P, RR_A, EE, TT, color):
"""Collect lines and append them to internal data
Parameters
----------
data : list
Array containing [RR_P, RR_A, EE, TT, color]
Returns
-------
lines: list
Plotting lines for the Tisserand
"""
# Plot desired kind lines
if self.kind == TisserandKind.APSIS:
# Generate apsis lines
lines = self.ax.plot(RR_A.to(u.AU), RR_P.to(u.AU), color=color)
elif self.kind == TisserandKind.ENERGY:
# Generate energy lines
lines = self.ax.plot(
RR_P.to(u.AU), EE.to(u.km ** 2 / u.s ** 2), color=color
)
elif self.kind == TisserandKind.PERIOD:
# Generate period lines
lines = self.ax.plot(RR_P.to(u.AU), TT.to(u.year), color=color)
return lines
def plot_line(self, body, vinf, alpha_lim=(0, np.pi), color=None):
"""Plots body Tisserand line within flyby angle
Parameters
----------
body : ~poliastro.bodies.Body
Body to be plotted Tisserand
vinf : ~astropy.units.Quantity
Vinf velocity line
alpha_lim : tuple
Minimum and maximum flyby angles
color : str
String representing for the color lines
Returns
-------
self.ax: ~matplotlib.axes.Axes
Apsis tisserand is the default plotting option
"""
# HACK: to reuse Tisserand solver, we transform input Vinf into a tuple
vinf_span = (vinf, vinf)
# Solve Tisserand parameters
RR_P, RR_A, EE, TT = self._solve_tisserand(
body, vinf_span, num_contours=2, alpha_lim=alpha_lim
)
# Check if color defined
if not color:
color = BODY_COLORS[body.name]
# Build canvas lines from Tisserand parameters
self._build_lines(RR_P, RR_A, EE, TT, color)
return self.ax
def plot(self, body, vinf_span, num_contours=10, color=None):
"""Plots body Tisserand for given amount of solutions within Vinf span
Parameters
----------
body : ~poliastro.bodies.Body
Body to be plotted Tisserand
vinf_span : tuple
Minimum and maximum Vinf velocities
num_contours : int
Number of points to iterate over previously defined velocities
color : str
String representing for the color lines
Returns
-------
self.ax: ~matplotlib.axes.Axes
Apsis tisserand is the default plotting option
"""
# Solve Tisserand parameters
RR_P, RR_A, EE, TT = self._solve_tisserand(body, vinf_span, num_contours)
# Check if color defined
if not color:
color = BODY_COLORS[body.name]
# Build canvas lines from Tisserand parameters
self._build_lines(RR_P, RR_A, EE, TT, color)
return self.ax
| 2.953125 | 3 |
libs/Rack.py | jlin/inventory | 22 | 6178 | from KeyValueTree import KeyValueTree
from truth.models import KeyValue as TruthKeyValue, Truth
from systems.models import KeyValue as KeyValue
from django.test.client import RequestFactory
from api_v2.keyvalue_handler import KeyValueHandler
import json
factory = RequestFactory()
class Rack:
rack_name = None
tree = None
kv = None
ru = None
width = None
systems = []
ethernet_patch_panel_24 = []
ethernet_patch_panel_48 = []
def __init__(self, rack_name):
self.systems = []
self.rack_name = rack_name
self.kv = Truth.objects.select_related('truth_key_value').get(name=self.rack_name)
self.system_list = KeyValue.objects.select_related('system').filter(value__contains="truth:%s" % (self.rack_name))
self.ethernet_patch_panel_24 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 24)
self.ethernet_patch_panel_48 = self._get_ethernet_patch_panels(self.kv, 'ethernet', 48)
import pdb
h = KeyValueHandler()
for s in self.system_list:
request = factory.get('/api/v2/keyvalue/?keystore=%s' % (s.system.hostname), follow=True)
tree = h.read(request)
system_ru = self._get_system_ru(tree)
system_image = self._get_system_image(tree)
system_slot = self._get_system_slot(tree)
self.systems.append({
"system_name":s.system.hostname,
"system_id":s.system.id,
"system_ru":system_ru,
"system_image":system_image,
'system_slot':system_slot,
'operating_system':str(s.system.operating_system),
'server_model': str(s.system.server_model),
'oob_ip': str(s.system.oob_ip),
})
self.systems = sorted(self.systems, key=lambda k: k['system_slot'])
try:
self.ru = self.kv.keyvalue_set.get(key='rack_ru').value
except:
self.ru = 42
try:
self.width = self.kv.keyvalue_set.get(key='rack_width').value
except:
self.width = 30
def _get_ethernet_patch_panels(self, tree, type, port_count):
ret = []
for i in tree.keyvalue_set.all():
match_string = "%i_port_%s_patch_panel" % (port_count, type)
if str(i.key) == match_string:
ret.append(i.value)
return ret
def _get_system_ru(self, tree):
for i in tree.iterkeys():
try:
if 'system_ru' in i.split(':'):
return tree[i]
except:
pass
return 4
def _get_system_image(self, tree):
for i in tree.iterkeys():
try:
if 'system_image' in i.split(':'):
return tree[i]
except:
pass
return None
def _get_system_slot(self, tree):
for i in tree.iterkeys():
try:
if 'system_slot' in i.split(':'):
return tree[i]
except:
pass
return 1
| 2.203125 | 2 |
r2c_isg/functions/__init__.py | returntocorp/inputset-generator | 3 | 6179 | from .trim import trim
from .sample import sample
from .sort import sort
function_map = {
'trim': trim,
'sample': sample,
'sort': sort
}
| 1.664063 | 2 |
__init__.py | csalyk/nirspec | 0 | 6180 | from .nirspec import divspec
from .nirspec import gluespec
| 0.9375 | 1 |
poem.py | xcollantes/poetry-generator | 0 | 6181 | from __future__ import absolute_import
from __future__ import print_function
import datetime
import os
import random
import sys
import uuid
import base64
import yaml
import re
try:
import en
except:
print("DOWNLOD NODECUBE")
print("""wget https://www.nodebox.net/code/data/media/linguistics.zip
unzip linguistics.zip""")
VERSION = "1.1"
THEME_PROB = 0
class bnfDictionary:
def __init__(self, file):
self.grammar = yaml.load(open(file,'r'))
self.poemtype = "<poem>"
def generate(self, key, num):
gram = self.grammar[key]
if len(gram)==1:
i = 0
else:
i = random.randint(0, len(gram) - 1)
string = ""
if "<" not in gram[i]:
string = gram[i]
else:
for word in gram[i].split():
if "<" not in word:
string = string + word + " "
else:
if "verb" in word and word != '<adverb>':
if "pverb" in word or "mushy" in self.poemtype:
v = self.generate("<pverb>", 1).strip()
elif "nverb" in word:
v = self.generate("<nverb>", 1).strip()
# else:
# v = self.generate("<verb>", 1).strip()
if random.randint(1, 100) < THEME_PROB:
v = self.generate("<theme-verb>", 1).strip()
if "verb-inf" in word:
string = string + \
en.verb.present_participle(v) + " "
elif "verb-pr" in word:
string = string + \
en.verb.present(
v, person=3, negate=False) + " "
elif "verb-past" in word:
string = string + en.verb.past(v) + " "
else:
string = string + v + " "
elif "noun" in word:
if "pnoun" in word or "mushy" in self.poemtype:
v = self.generate("<pnoun>", 1).strip()
elif "nnoun" in word:
v = self.generate("<nnoun>", 1).strip()
else:
v = self.generate("<noun>", 1).strip()
if random.randint(1, 100) < THEME_PROB:
v = self.generate("<theme-noun>", 1).strip()
if "pl" in word:
v = en.noun.plural(v)
string = string + v + " "
elif "person" in word:
v = self.generate("<person>", 1).strip()
if "pl" in word:
v = en.noun.plural(v)
string = string + v + " "
elif "adj" in word:
if "mushy" in self.poemtype:
v = self.generate("<padj>",1)
else:
if random.randint(1, 100) < THEME_PROB:
v = self.generate("<theme-adj>", 1).strip()
else:
v = self.generate(word, 1).strip()
string = string + v + " "
elif "fruit" in word:
v = self.generate("<fruit>", 1).strip()
if "pl" in word:
v = en.noun.plural(v)
string = string + self.generate(word, 1) + " "
elif "person" in word:
v = self.generate("<fruit>", 1).strip()
if "pl" in word:
v = en.noun.plural(v)
string = string + self.generate(word, 1) + " "
else:
if "-pl" in word:
v = en.noun.plural(self.generate(word.replace("-pl",""),1))
else:
v = self.generate(word, 1)
string = string + v + " "
return string
def generatePretty(self, key, seed_str):
if seed_str == None:
seed_str = str(uuid.uuid4()).split("-")[0]
random.seed(uuid.uuid5(uuid.NAMESPACE_DNS,seed_str).int)
#tool = language_check.LanguageTool('en-US')
self.poemtype = key
if key == "<mushypoem>":
key = "<poem>"
poem = self.generate(key, 1)
poem = poem.replace(" ,", ",")
puncuation = [".", ".", ".", ".", "!", "?"]
dontbreaks = ["of", "behind", "the", "when", "what", "why", "who", ",",
"your", "by", "like", "to", "you", "your", "a", "are", "become", "newline"]
capitalize = False
breaks = 0
poem2 = []
foundFirstBreak = False
for word in poem.replace("\n", "newline").split():
poem2.append(word.lower())
if random.randint(1, 100) < 2 and "newline" not in word and foundFirstBreak:
isgood = True
for dontbreak in list(dontbreaks + puncuation):
if dontbreak == word.lower():
isgood = False
if isgood:
poem2.append("newline")
if "newline" in word:
foundFirstBreak = True
poem3 = []
beforeFirstBreak = True
for word in poem2:
if "newline" in word:
breaks += 1
beforeFirstBreak = False
else:
breaks = 0
if beforeFirstBreak or word == "i" or "i'" in word:
word = word.capitalize()
poem3.append(word)
capitalize = False
else:
if breaks > 1:
capitalize = True
if capitalize == True and "newline" not in word:
word = word.capitalize()
capitalize = False
for punc in list(set(puncuation)):
if punc in word:
capitalize = True
poem3.append(word)
if random.randint(1, 100) < 0 and "newline" not in word:
isgood = True
for dontbreak in list(dontbreaks + puncuation):
if dontbreak == word.lower():
isgood = False
if isgood:
poem3.append(random.choice(puncuation))
capitalize = True
# noPunc = True
# for punc in list(set(puncuation)):
# if punc in word:
# noPunc = False
# if noPunc:
# poem3.append(random.choice(puncuation))
newPoem = " ".join(poem3)
newPoem = newPoem.replace(" a a", " an a")
newPoem = newPoem.replace("newline .", ". newline")
newPoem = newPoem.replace("newline ?", "? newline")
newPoem = newPoem.replace("newline !", "! newline")
newPoem = newPoem.replace("newline ,", ", newline")
newPoem = newPoem.replace("newline", "\n")
newPoem = newPoem.replace(" \n \n", "\n\n")
newPoem = newPoem.replace("\n \n ", "\n\n")
newPoem = newPoem.replace(" '", "'")
for punc in list(set(puncuation)):
newPoem = newPoem.replace(" " + punc, punc)
for punc in list(set(puncuation)):
newPoem = newPoem.replace(" " + punc, punc)
for punc in list(set(puncuation)):
newPoem = newPoem.replace(" " + punc, punc)
newPoem = newPoem.replace(" ,", ",")
newPoem = newPoem.replace("?.", "?")
newPoem = newPoem.replace(".?", ".")
newPoem = newPoem.replace(",.", ",")
newPoem = newPoem.replace("!.", "!")
newPoem = newPoem.replace("..", ".")
newPoem = newPoem.replace("..", ".")
newPoem = newPoem.replace("..", ".")
title = newPoem.split("\n")[0]
newTitle = title.replace(".", "")
newPoem = newPoem.replace(title, "<h1>" + newTitle + "</h1>")
newPoem2 = ""
firstLine = False
secondLine = False
for line in newPoem.split("\n"):
if len(line) > 0:
if firstLine and not secondLine:
newPoem2 = newPoem2 + "<p>\n"
secondLine = True
if firstLine == False:
firstLine = True
newPoem2 = newPoem2 + line + " \n"
if firstLine and secondLine:
newPoem2 = newPoem2 + line + " <br />\n"
else:
newPoem2 = newPoem2 + " <br />\n"
newPoem2 = newPoem2 + "</p>"
return newPoem2,seed_str
bnf = bnfDictionary('brain.yaml')
def generate_poem(poemtype, hex_seed=None):
p,seed_str = bnf.generatePretty('<' + poemtype + '>',hex_seed)
return p,seed_str
if __name__ == '__main__':
poemtype = 'poem'
if 'mushy' in sys.argv[1:]:
poemtype = 'mushypoem'
p,seed_str=generate_poem(poemtype)
print(("*"*30 + "\n"*5))
filtered = []
for line in re.sub("<.*?>", " ", p).split("\n"):
if len(line.strip()) > 0:
filtered.append(line.strip())
else:
filtered.append("pause")
print(p)
| 2.625 | 3 |
openstack/tests/unit/block_storage/v2/test_proxy.py | infonova/openstacksdk | 0 | 6182 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.block_storage.v2 import _proxy
from openstack.block_storage.v2 import snapshot
from openstack.block_storage.v2 import stats
from openstack.block_storage.v2 import type
from openstack.block_storage.v2 import volume
from openstack.tests.unit import test_proxy_base
class TestVolumeProxy(test_proxy_base.TestProxyBase):
def setUp(self):
super(TestVolumeProxy, self).setUp()
self.proxy = _proxy.Proxy(self.session)
def test_snapshot_get(self):
self.verify_get(self.proxy.get_snapshot, snapshot.Snapshot)
def test_snapshots_detailed(self):
self.verify_list(self.proxy.snapshots, snapshot.SnapshotDetail,
paginated=True,
method_kwargs={"details": True, "query": 1},
expected_kwargs={"query": 1})
def test_snapshots_not_detailed(self):
self.verify_list(self.proxy.snapshots, snapshot.Snapshot,
paginated=True,
method_kwargs={"details": False, "query": 1},
expected_kwargs={"query": 1})
def test_snapshot_create_attrs(self):
self.verify_create(self.proxy.create_snapshot, snapshot.Snapshot)
def test_snapshot_delete(self):
self.verify_delete(self.proxy.delete_snapshot,
snapshot.Snapshot, False)
def test_snapshot_delete_ignore(self):
self.verify_delete(self.proxy.delete_snapshot,
snapshot.Snapshot, True)
def test_type_get(self):
self.verify_get(self.proxy.get_type, type.Type)
def test_types(self):
self.verify_list(self.proxy.types, type.Type, paginated=False)
def test_type_create_attrs(self):
self.verify_create(self.proxy.create_type, type.Type)
def test_type_delete(self):
self.verify_delete(self.proxy.delete_type, type.Type, False)
def test_type_delete_ignore(self):
self.verify_delete(self.proxy.delete_type, type.Type, True)
def test_volume_get(self):
self.verify_get(self.proxy.get_volume, volume.Volume)
def test_volumes_detailed(self):
self.verify_list(self.proxy.volumes, volume.VolumeDetail,
paginated=True,
method_kwargs={"details": True, "query": 1},
expected_kwargs={"query": 1})
def test_volumes_not_detailed(self):
self.verify_list(self.proxy.volumes, volume.Volume,
paginated=True,
method_kwargs={"details": False, "query": 1},
expected_kwargs={"query": 1})
def test_volume_create_attrs(self):
self.verify_create(self.proxy.create_volume, volume.Volume)
def test_volume_delete(self):
self.verify_delete(self.proxy.delete_volume, volume.Volume, False)
def test_volume_delete_ignore(self):
self.verify_delete(self.proxy.delete_volume, volume.Volume, True)
def test_volume_extend(self):
self._verify("openstack.block_storage.v2.volume.Volume.extend",
self.proxy.extend_volume,
method_args=["value", "new-size"],
expected_args=["new-size"])
def test_backend_pools(self):
self.verify_list(self.proxy.backend_pools, stats.Pools,
paginated=False)
| 1.9375 | 2 |
src/ncstyler/console.py | starofrainnight/ncstyler | 0 | 6183 | <reponame>starofrainnight/ncstyler<gh_stars>0
#!/usr/bin/env python
import argparse
import CppHeaderParser
import re
import sys
import yaml
import copy
import six
import os.path
import traceback
class CppDefine(dict):
def __init__(self):
self["name"] = None
self["parameters"] = []
self["line_number"] = -1
class CppDefineParameter(dict):
def __init__(self):
self["name"] = None
self["line_number"] = -1
class CppNamespace(dict):
def __init__(self):
self["name"] = None
self["line_number"] = -1
class CppFileName(dict):
def __init__(self):
self["name"] = None
self["line_number"] = -1
class Application(object):
def __init__(self):
description='''A styler just target to naming conventions of source
code'''
parser = argparse.ArgumentParser(description=description)
parser.add_argument("-c", "--config",
help="Configuration file path (In YAML format)",
required=True)
parser.add_argument("-o", "--output", help="Output file path")
parser.add_argument("-d", "--debug", action='store_true', help="Print trace stack")
parser.add_argument("file_path", help="Source file path")
self.__args = parser.parse_args()
# If user does not specific output path, we default it to input file
# path
if self.__args.output is None:
self.__args.output = self.__args.file_path
self.__config = yaml.load(open(self.__args.config))
old_base = self.__config["_base_"]
self.__config["_base_"] = {
"re":"[a-zA-Z0-9_]+",
"error": "",
}
self.__config["_base_"].update(old_base)
def parse_define(self, adefine):
matched = re.match(r"[^\w]*(\w+)(?:\(([^\)]*)\)|\s*).*", adefine)
name = matched.group(1)
parameters = []
if matched.group(2) is not None:
parameter_names = matched.group(2).split(',')
for parameter_name in parameter_names:
aparameter = CppDefineParameter()
aparameter["name"] = parameter_name.strip()
parameters.append(aparameter)
result = CppDefine()
result["name"] = name
result["parameters"] = parameters
return result
def _is_special_method(self, amethod):
if isinstance(amethod, six.string_types):
amethod_name = amethod
else:
amethod_name = amethod["name"]
founded = re.findall(r"(?:^|[^\w]+)operator[^\w]+", amethod_name)
if len(founded) <= 0:
if re.match(r"(?:^|.*\W)operator\W.*", amethod["debug"]) is not None:
return True
return False
return True
def _get_argument_name(self, an_argument):
if isinstance(an_argument, six.string_types):
return an_argument
if len(an_argument["name"]) > 0:
return an_argument["name"]
# If it's a functor?? with "class name::function" style
matched = re.match(r"^\w+\s*\(\w*::\*(\w+)\)\(.*$", an_argument["type"])
if matched is None:
# with normal "function" style
matched = re.match(r"[^\(]*\([^\)]*\W(\w+)\W.*\).*", an_argument["type"])
if matched is None:
return ""
else:
return matched.group(1)
def _get_config(self, name):
override_table = {
"class": "_base_",
"function": "_base_",
"variant": "_base_",
"namespace": "_base_",
"define": "_base_",
"filename": "_base_", # Special config use to define filename rule
"argument": "variant",
"static_variant": "variant",
"global_variant": "variant",
"function_argument": "argument",
"class_method_argument": "function_argument",
"struct_method_argument": "class_method_argument",
"define_function_argument": "function_argument",
"define_function": "function",
"class_method": "function",
"struct_method": "class_method",
"class_variant": "variant",
"struct_variant": "class_variant",
"typedef": "class",
"struct": "class",
"enum": "class",
"enum_value": "define",
"union": "struct",
}
my_config = dict()
if name in override_table:
base_name = override_table[name]
my_config.update(self._get_config(base_name))
if name in self.__config:
my_config.update(self.__config[name])
return my_config
def _is_valid_variable(self, cpp_variable):
if cpp_variable["type"] == "return":
return False
if len(cpp_variable["type"]) <= 0:
return False
return True
def _get_cpp_method_re(self, name):
prefix = "operator"
if not name.startswith(prefix):
return re.escape(name)
# Operator methods
chars = []
for achar in name[len(prefix):]:
chars.append("\\s*")
if achar.isalnum():
chars.append(achar)
else:
chars.append("\\")
chars.append(achar)
return "operator%s" % ''.join(chars)
def _validate_codes_of_cpp_method(self, cpp_method):
start_line_index = cpp_method["line_number"] - 1
# Extract cpp method codes
rest_lines = self._source_lines[start_line_index:]
content = '\n'.join(rest_lines)
code_lines = []
name_re = self._get_cpp_method_re(cpp_method["name"])
name_start_pos = re.search(name_re, content).span()[0]
parameters_start_pos = content.index('(', name_start_pos)
parameters_stop_pos = content.index(')', parameters_start_pos)
stack = []
try:
i = content.index('{', parameters_stop_pos + 1)
except ValueError:
return;
try:
semicolonPos = content.index(';', parameters_stop_pos + 1)
if semicolonPos <= i:
return;
except ValueError:
# Not found a semicolon, just ignored.
pass
skipped_lines = cpp_method["line_number"] + content.count("\n", 0, i) - 2
stack.append(i)
i += 1
first_i = i
last_i = 0
is_finding_block_comment = False
is_finding_single_comment = False
while (len(stack) > 0) and (i < len(content)):
c = content[i]
if is_finding_block_comment:
# If finding block comment, then skip all other searching
if (c == "*") and (content[i + 1] == "/"):
is_finding_block_comment = False
elif (c == "/") and (content[i + 1] == "*"):
is_finding_block_comment = True
elif is_finding_single_comment:
# If finding single comment, then skip all other searching
if c == "\n":
is_finding_single_comment = False
elif (c == "/") and (content[i + 1] == "/"):
is_finding_single_comment = True
elif c == "{":
stack.append(i)
elif c == "}":
last_i = i
del stack[len(stack) - 1]
i += 1
if len(stack) <= 0:
content = content[first_i:last_i]
founded = re.findall(r"\w+\W+(\w+)\s*=[^=]", content)
for aname in founded:
avariant = dict()
avariant["name"] = aname
avariant["line_number"] = cpp_method["line_number"]
self._validate_name(avariant, "variant")
def _validate_name(self, cpp_object, name_re):
cpp_object_name = ""
if isinstance(cpp_object, six.string_types):
cpp_object_name = cpp_object
cpp_object = dict()
cpp_object["name"] = cpp_object_name
cpp_object["line_number"] = -1
elif "name" in cpp_object:
cpp_object_name = cpp_object["name"]
if ('<' in cpp_object_name) and ("debug" in cpp_object):
matched = re.match(r".*?(\w+)\W+$", cpp_object["debug"])
if matched is not None:
cpp_object_name = matched.group(1)
else:
return
# Parse union like names
splitted = cpp_object_name.split()
if len(splitted) > 1:
cpp_object_name = splitted[-1]
if '...' in cpp_object_name:
# Does not have valid name, we must not check it .
return
if len(cpp_object_name) <= 0:
# Does not have valid name, we must not check it .
return
matched = re.match(self._get_config(name_re)["re"], cpp_object_name)
if matched is None:
filename = os.path.basename(self.__args.file_path)
error_message = self._get_config(name_re)["error"]
if len(error_message) > 0:
error_message = "%s %s" % (
' '.join([rule_name.capitalize() for rule_name in name_re.split("_")]),
error_message)
if self.__args.debug:
traceback.print_stack()
raise SyntaxError("%s:%s:error: Name '%s' isn't matched with rule : %s! %s" % (
filename,
cpp_object["line_number"],
cpp_object_name,
name_re,
error_message))
def _get_class_realname(self, class_name):
return re.match(r"(\w+).*", class_name).group(1)
def _validate_cpp_object(self, cpp_object):
cpp_object_type = type(cpp_object)
if cpp_object_type == CppDefine:
if len(cpp_object["parameters"]) <= 0:
# Normal Define Name
self._validate_name(cpp_object, "define")
else:
# Function Liked Define Name
self._validate_name(cpp_object, "define_function")
for aparameter in cpp_object["parameters"]:
self._validate_name(aparameter, "define_function_argument")
elif cpp_object_type == CppHeaderParser.CppClass:
if "struct" in cpp_object["declaration_method"]:
class_re = "struct"
class_method_re = "struct_method"
class_method_argument_re = "struct_method_argument"
class_variant_re = "struct_variant"
else:
class_re = "class"
class_method_re = "class_method"
class_method_argument_re = "class_method_argument"
class_variant_re = "class_variant"
self._validate_name(cpp_object, class_re)
for amethod in cpp_object.get_all_methods():
matched = re.match(r".*typedef\W[^\(]*\([^\)]*\W(\w+)\W.*\).*", amethod["debug"])
if matched is None:
self._validate_codes_of_cpp_method(amethod)
if not self._is_special_method(amethod):
if ((amethod["name"] != self._get_class_realname(cpp_object["name"]))
and (not amethod.get("constructor", False))
and (not amethod.get("destructor", False))):
try:
self._validate_name(amethod, class_method_re)
except SyntaxError:
is_need_reraise = True
try:
self._validate_name(amethod, "define_function")
is_need_reraise = False
except SyntaxError:
pass
if is_need_reraise:
raise
for aparameter in amethod["parameters"]:
an_object = dict()
an_object["line_number"] = aparameter["line_number"]
if (aparameter["type"].endswith("::*")
and (")" in aparameter["name"])):
an_object["name"] = re.match(r"(\w+).*", aparameter["name"]).group(1)
try:
self._validate_name(an_object,
class_method_re)
except SyntaxError:
is_need_reraise = True
try:
self._validate_name(amethod, "define_function")
is_need_reraise = False
except SyntaxError:
pass
if is_need_reraise:
raise
else:
an_object["name"] = self._get_argument_name(aparameter)
self._validate_name(an_object,
class_method_argument_re)
else:
self._validate_name(
{"name":matched.group(1), "line_number":amethod["line_number"]},
"typedef")
for access_specifier in CppHeaderParser.supportedAccessSpecifier:
for amember in cpp_object["properties"][access_specifier]:
is_skip_validate = False
if ("type" in amember) and (amember["type"] is not None):
internal_predeclares = ["class", "struct", "union"]
if amember["type"] in internal_predeclares:
is_skip_validate = True
if not is_skip_validate:
if amember["static"]:
self._validate_name(amember, "static_variant")
else:
self._validate_name(amember, class_variant_re)
for amember in cpp_object["structs"][access_specifier]:
self._validate_cpp_object(amember)
for amember in cpp_object["enums"][access_specifier]:
self._validate_cpp_object(amember)
elif cpp_object_type == CppHeaderParser.CppStruct:
self._validate_name(cpp_object, "struct")
elif cpp_object_type == CppHeaderParser.CppEnum:
self._validate_name(cpp_object, "enum")
line_number = -1
if "line_number" in cpp_object:
line_number = cpp_object["line_number"]
for amember in cpp_object["values"]:
# Use parent line number if enum value does not have it's line
# number
if "line_number" not in amember:
amember["line_number"] = line_number
self._validate_name(amember, "enum_value")
elif cpp_object_type == CppHeaderParser.CppVariable:
if cpp_object["type"] != "return":
if cpp_object["static"]:
self._validate_name(cpp_object, "static_variant")
elif cpp_object["type"] not in ["class", "struct", "union"]:
if not cpp_object["type"].endswith("::"):
# Don't parse variable that implemented outside of
# template class. It's already be parsed when parsing
# the class.
self._validate_name(cpp_object, "global_variant")
elif cpp_object_type == CppHeaderParser.CppMethod:
# Exclude "main" function while parsing global function
while True:
# FIXME: Parse special case : "struct RArraySize <T ( & ) [ N ]> {"
if "debug" in cpp_object:
if re.match(r".*\>\s*{$", cpp_object["debug"]) is not None:
break
self._validate_codes_of_cpp_method(cpp_object)
if cpp_object["name"] == "main":
break
if self._is_special_method(cpp_object):
break
if (cpp_object["class"] is None) or (len(cpp_object["class"]) <= 0):
if ">" in cpp_object["name"]:
regex = r"^[^<:]*?(?:(\w+)::)?(\w+)\s*<"
matched = re.search(regex, cpp_object["debug"])
if matched.group(1) is not None:
cpp_object["class"] = matched.group(1)
cpp_object["name"] = matched.group(2)
self._validate_name(cpp_object, "class_method")
elif len(cpp_object["returns"]) > 0:
# If a function does not have return value(at least
# "void"), it maybe macro invokes.
# FIXME: We just ignored this situation:
# Code Snippets: static RSignal<void(int)> sReceived;
if "<" not in cpp_object["name"]:
self._validate_name(cpp_object, "function")
break
if self._get_class_realname(cpp_object["class"]) == cpp_object["name"]:
# Constructor / Destructor will the same with class name
break
self._validate_name(cpp_object, "class_method")
break
elif cpp_object_type == CppHeaderParser.CppUnion:
self._validate_name(cpp_object, "union")
elif cpp_object_type == CppNamespace:
self._validate_name(cpp_object, "namespace")
elif cpp_object_type == CppFileName:
self._validate_name(cpp_object, "filename")
def exec_(self):
try:
with open(self.__args.file_path, "r") as source_file:
# For later parse by _validate_codes_of_cpp_method()
self._source_lines = source_file.readlines()
parsed_info = CppHeaderParser.CppHeader(self.__args.file_path)
# Verify File Names
filename = os.path.basename(self.__args.file_path)
cpp_object = CppFileName()
cpp_object["name"] = filename
self._validate_cpp_object(cpp_object)
# Verify Define Names
for define_text in parsed_info.defines:
self._validate_cpp_object(self.parse_define(define_text))
# Verify Function Names
for cpp_object in parsed_info.functions:
self._validate_cpp_object(cpp_object)
# Verify Class Names
for cpp_object in parsed_info.classes_order:
self._validate_cpp_object(cpp_object)
# Verify Struct Names
for cpp_object in parsed_info.structs_order:
self._validate_cpp_object(cpp_object)
# Verify Enum Names
for cpp_object in parsed_info.enums:
self._validate_cpp_object(cpp_object)
# Verify Variable Names
for cpp_object in parsed_info.variables:
# Avoid checking member variable inside function body.
if '{' not in cpp_object['type']:
self._validate_cpp_object(cpp_object)
for namespace in parsed_info.namespaces:
cpp_object = CppNamespace()
cpp_object["name"] = namespace
self._validate_cpp_object(cpp_object)
# Verify Typdef Names
for cpp_object in parsed_info.typedefs:
self._validate_cpp_object(cpp_object)
except SyntaxError as e:
print(str(e))
return 1
except CppHeaderParser.CppHeaderParser.CppParseError as e:
# CppHeaderParser can't parse this file, but we should pass it, this
# is the CppHeaderParser's problem.
print(str(e))
return 0
return 0
def main():
a = Application()
sys.exit(a.exec_())
if __name__ == "__main__":
# Execute only if run as a script
main()
| 2.640625 | 3 |
theone/wsgi/server.py | laozijiaojiangnan/TheOne | 0 | 6184 | <gh_stars>0
import typing as t
from http.server import HTTPServer, BaseHTTPRequestHandler
from . import response as resp
class WsgiServer(HTTPServer):
pass
class WsgiHandel(BaseHTTPRequestHandler):
def handle(self) -> None:
handle_response = SimpleHandler(self.wfile)
handle_response.send()
class SimpleHandler:
def __init__(self, wfile):
self._response = resp.Response.create_empty() # type: resp.Response
self.sender = wfile
def send(self):
"""像浏览器发送包
node: 下面分成了三次发送,因为合在发送会有 bug,不确定问题,暂时先这样
"""
line = f"{self._response.line.version} {self._response.line.code} {self._response.line.code}\r\n"
self.sender.write(bytes(line, 'utf-8'))
self.add_header(key='Content-Length', value=len(self._response.body.content))
headers = "".join(
[f"{h.key}:{h.value}\r\n" for h in self._response.headers]
)
print(f'headers: {headers}')
self.sender.write(bytes(headers, 'utf-8'))
body = f"\r\n{self._response.body.content}"
self.sender.write(bytes(body, 'utf-8'))
def add_header(self, key: str, value: t.Any) -> t.List[resp.Headers]:
"""添加请求头键值对
Args:
key: 键
value: 值
Return:
存在的所有键值对信息
"""
if self._response is None:
self._response = resp.Response.create_empty()
h = resp.Headers(key=key, value=value)
self._response.headers.append(h)
return self._response.headers
| 2.75 | 3 |
pytorch_translate/attention/multihead_attention.py | dzhulgakov/translate | 1 | 6185 | <filename>pytorch_translate/attention/multihead_attention.py
#!/usr/bin/env python3
from fairseq.modules import multihead_attention as fair_multihead
from pytorch_translate.attention import (
BaseAttention,
attention_utils,
register_attention,
)
@register_attention("multihead")
class MultiheadAttention(BaseAttention):
"""
Multiheaded Scaled Dot Product Attention
Implements equation:
MultiHead(Q, K, V) = Concat(head_1,...,head_h)W^O
where head_i = Attention(QW_i^Q, KW_i^K, VW_i^V)
Similarly to the above, d_k = d_v = d_model / h
In this implementation, keys and values are both set to encoder output
Inputs
init:
decoder_hidden_state_dim : dimensionality of decoder hidden state
context_dim : dimensionality of encoder output
kwargs :
nheads : integer # of attention heads
unseen_mask: if True, only attend to previous sequence positions
src_lengths_mask: if True, mask padding based on src_lengths
forward:
decoder_state : [batch size, d_model]
source_hids : [sequence length, batch size, d_model]
src_lengths : [batch size]
forward:
query : [sequence length, batch size, d_model]
key: [sequence length, batch size, d_model]
value: [sequence length, batch size, d_model]
Output
result : [batch_size, d_model]
"""
def __init__(
self,
decoder_hidden_state_dim,
context_dim,
*,
nheads=1,
unseen_mask=False,
src_length_mask=True
):
super().__init__(decoder_hidden_state_dim, context_dim)
assert decoder_hidden_state_dim == context_dim
d_model = decoder_hidden_state_dim # for brevity
assert d_model % nheads == 0
if unseen_mask:
raise NotImplementedError(
"Unseen mask not supported with sequential decoding"
)
self._fair_attn = fair_multihead.MultiheadAttention(d_model, nheads)
self.use_src_length_mask = src_length_mask
def forward(self, decoder_state, source_hids, src_lengths, squeeze=True):
"""
Computes MultiheadAttention with respect to either a vector
or a tensor
Inputs:
decoder_state: (bsz x decoder_hidden_state_dim) or
(bsz x T x decoder_hidden_state_dim)
source_hids: srclen x bsz x context_dim
src_lengths: bsz x 1, actual sequence lengths
squeeze: Whether or not to squeeze on the time dimension.
Even if decoder_state.dim() is 2 dimensional an
explicit time step dimension will be unsqueezed.
Outputs:
[batch_size, max_src_len] if decoder_state.dim() == 2 & squeeze
or
[batch_size, 1, max_src_len] if decoder_state.dim() == 2 & !squeeze
or
[batch_size, T, max_src_len] if decoder_state.dim() == 3 & !squeeze
or
[batch_size, T, max_src_len] if decoder_state.dim() == 3 & squeeze & T != 1
or
[batch_size, max_src_len] if decoder_state.dim() == 3 & squeeze & T == 1
"""
batch_size = decoder_state.shape[0]
if decoder_state.dim() == 3:
query = decoder_state
elif decoder_state.dim() == 2:
query = decoder_state.unsqueeze(1)
else:
raise ValueError("decoder state must be either 2 or 3 dimensional")
query = query.transpose(0, 1)
value = key = source_hids
src_len_mask = None
if src_lengths is not None and self.use_src_length_mask:
# [batch_size, 1, seq_len]
src_len_mask_int = attention_utils.create_src_lengths_mask(
batch_size=batch_size, src_lengths=src_lengths
)
src_len_mask = src_len_mask_int != 1
attn, attn_weights = self._fair_attn.forward(
query, key, value, key_padding_mask=src_len_mask, need_weights=True
)
# attn.shape = T X bsz X embed_dim
# attn_weights.shape = bsz X T X src_len
attn_weights = attn_weights.transpose(0, 2)
# attn_weights.shape = src_len X T X bsz
if squeeze:
attn = attn.squeeze(0)
# attn.shape = squeeze(T) X bsz X embed_dim
attn_weights = attn_weights.squeeze(1)
# attn_weights.shape = src_len X squeeze(T) X bsz
return attn, attn_weights
return attn, attn_weights
| 2.625 | 3 |
custom_components/purrsong/__init__.py | RobertD502/home-assistant-lavviebot | 3 | 6186 | """Support for Purrsong LavvieBot S"""
import asyncio
import logging
import voluptuous as vol
from lavviebot import LavvieBotApi
import homeassistant.helpers.config_validation as cv
from homeassistant import config_entries
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME
)
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
def setup(hass, config):
"""Setup of the component"""
return True
async def async_setup_entry(hass, config_entry):
"""Set up Lavviebot integration from a config entry."""
username = config_entry.data.get(CONF_USERNAME)
password = config_entry.data.get(CONF_PASSWORD)
_LOGGER.info("Initializing the Lavviebot API")
lavviebot = await hass.async_add_executor_job(LavvieBotApi, username, password)
_LOGGER.info("Connected to API")
hass.data[DOMAIN] = lavviebot
hass.async_add_job(
hass.config_entries.async_forward_entry_setup(config_entry, "sensor")
)
return True
| 2.125 | 2 |
MathPainting_OOP/shapes.py | matbocz/kurs-python-udemy | 0 | 6187 | class Rectangle:
"""A rectangle shape that can be drawn on a Canvas object"""
def __init__(self, x, y, width, height, color):
self.x = x
self.y = y
self.width = width
self.height = height
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
# Changes a slice of the array with new values
canvas.data[self.x: self.x + self.height, self.y: self.y + self.width] = self.color
class Square:
"""A square shape that can be drawn on a Canvas object"""
def __init__(self, x, y, side, color):
self.x = x
self.y = y
self.side = side
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
# Changes a slice of the array with new values
canvas.data[self.x: self.x + self.side, self.y: self.y + self.side] = self.color
| 4.09375 | 4 |
problems/Kelvin_Helmholtz/problem.py | sddyates/mars | 1 | 6188 |
from mars import main_loop
import numpy as np
from mars.settings import *
class Problem:
"""
Synopsis
--------
User class for the Kelvin-Helmholtz instability
Args
----
None
Methods
-------
initialise
Set all variables in each cell to initialise the simulation.
internal_bc
Specify the internal boundary for the simulation.
TODO
----
None
"""
def __init__(self):
self.parameter = {
'Name':'<NAME> instability.',
'Dimensions':'2D',
'x1 min':-0.5,
'x1 max':0.5,
'x2 min':-0.5,
'x2 max':0.5,
'x3 min':-0.5,
'x3 max':0.5,
'resolution x1':256,
'resolution x2':256,
'resolution x3':0,
'cfl':0.3,
'initial dt':1.0e-5,
'max dt increase':1.5,
'initial t': 0.0,
'max time': 5.0,
'save frequency': 2.5e-2,
'output type': ['numpy'],
'output primitives': True,
'print to file':False,
'profiling': True,
'restart file':None,
'gamma':1.4,
'density unit':1.0,
'length unit':1.0,
'velocity unit':1.0,
'optimisation': 'numba',
'riemann':'hllc',
'reconstruction':'linear',
'limiter':'minmod',
'time stepping':'RK2',
'method':'hydro',
'lower x1 boundary':'reciprocal',
'upper x1 boundary':'reciprocal',
'lower x2 boundary':'reciprocal',
'upper x2 boundary':'reciprocal',
'lower x3 boundary':'reciprocal',
'upper x3 boundary':'reciprocal',
'internal boundary':False
}
def initialise(self, V, g, l):
if self.parameter['Dimensions'] == '2D':
Y, X = np.meshgrid(g.x1, g.x2, indexing='ij')
if self.parameter['Dimensions'] == '3D':
Z, Y, X = np.meshgrid(g.x1, g.x2, g.x3, indexing='ij')
yp = 0.25
dens_1 = 2.0
dens_2 = 1.0
pres = 2.0
vel_1 = 0.5
vel_2 = 0.0
amp = 0.001
vx1_per = (np.random.random(V.shape)*2.0 - 1)*amp
vx2_per = (np.random.random(V.shape)*2.0 - 1)*amp
region_1 = np.absolute(Y) < yp
region_2 = np.absolute(Y) > yp
V[rho, region_1] = dens_1
V[prs, region_1] = pres
V[vx1, region_1] = vel_1 + vx1_per[vx1, region_1]
V[vx2, region_1] = vel_2 + vx2_per[vx2, region_1]
V[rho, region_2] = dens_2
V[prs, region_2] = pres
V[vx1, region_2] = -vel_1 + vx1_per[vx1, region_2]
V[vx2, region_2] = vel_2 + vx2_per[vx2, region_2]
def internal_bc(self):
return None
if __name__ == "__main__":
main_loop(Problem())
| 2.703125 | 3 |
pythainlp/util/thai.py | korkeatw/pythainlp | 0 | 6189 | # -*- coding: utf-8 -*-
"""
Check if it is Thai text
"""
import string
_DEFAULT_IGNORE_CHARS = string.whitespace + string.digits + string.punctuation
def isthaichar(ch: str) -> bool:
"""
Check if a character is Thai
เป็นอักษรไทยหรือไม่
:param str ch: input character
:return: True or False
"""
ch_val = ord(ch)
if ch_val >= 3584 and ch_val <= 3711:
return True
return False
def isthai(word: str, ignore_chars: str = ".") -> bool:
"""
Check if all character is Thai
เป็นคำที่มีแต่อักษรไทยหรือไม่
:param str word: input text
:param str ignore_chars: characters to be ignored (i.e. will be considered as Thai)
:return: True or False
"""
if not ignore_chars:
ignore_chars = ""
for ch in word:
if ch not in ignore_chars and not isthaichar(ch):
return False
return True
def countthai(text: str, ignore_chars: str = _DEFAULT_IGNORE_CHARS) -> float:
"""
:param str text: input text
:return: float, proportion of characters in the text that is Thai character
"""
if not text or not isinstance(text, str):
return 0
if not ignore_chars:
ignore_chars = ""
num_thai = 0
num_ignore = 0
for ch in text:
if ch in ignore_chars:
num_ignore += 1
elif isthaichar(ch):
num_thai += 1
num_count = len(text) - num_ignore
return (num_thai / num_count) * 100
| 4.1875 | 4 |
Numpy/tempCodeRunnerFile.py | zharmedia386/Data-Science-Stuff | 0 | 6190 | <reponame>zharmedia386/Data-Science-Stuff
print(b)
print(c)
print(d)
print(e)
print(f)
print(g) | 1.304688 | 1 |
Python/Filter.py | KilroyWasHere-cs-j/savitzky-golay | 0 | 6191 | import numpy as np
from scipy.signal import savgol_filter
import matplotlib.pyplot as plt
import MadDog
x = []
y = []
def generate():
# Generate random data
base = np.linspace(0, 5, 11)
# base = np.random.randint(0, 10, 5)
outliers = np.random.randint(10, 20, 2)
data = np.concatenate((base, outliers))
np.random.shuffle(data)
return data
def fill_data():
# Build random data
return np.concatenate((np.array([0]), MadDog.find_outliers(generate()))), np.concatenate(
(np.array([0]), MadDog.find_outliers(generate()))) # np.sin(x) + np.cos(x) + np.random.random(100)
# np.linspace(0, 2*np.pi, 100)
def savitzky(x, y, ploy_nom):
return savgol_filter(x, len(x) - 1, 10), savgol_filter(y, len(y) - 1, 10)
def map(x_filtered, y_filtered, x, y, title="title"):
# Generate some test data
heatmap, xedges, yedges = np.histogram2d(x, y, bins=50)
extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]]
plt.clf()
plt.imshow(heatmap.T, extent=extent, origin='lower')
plt.show()
heatmap, xedges, yedges = np.histogram2d(x_filtered, y_filtered, bins=50)
extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]]
plt.clf()
plt.imshow(heatmap.T, extent=extent, origin='lower')
plt.show()
def show(x_filtered, y_filtered, x, y, title="Lorem ipsum"):
# Plotting
fig = plt.figure()
ax = fig.subplots()
plt.plot(x_filtered, y_filtered, 'red', marker="o")
plt.plot(x, y, 'green', marker="o")
plt.subplots_adjust(bottom=0.25)
plt.xlabel('x')
plt.ylabel('y')
plt.title(title)
plt.legend(["Filter", "Raw"])
plt.show()
# Generating the noisy signal
x, y = fill_data()
print(len(y))
# Savitzky-Golay filter
x_filtered, y_filtered = savitzky(x, y, 2)
print("X unfiltered>> ", x)
print("Y unfiltered>> ", y)
print("X filtered>> ", x_filtered)
print("Y filtered>> ", y_filtered)
show(x_filtered, y_filtered, x, y)
| 2.765625 | 3 |
examples/adwords/v201406/advanced_operations/add_ad_customizer.py | dietrichc/streamline-ppc-reports | 0 | 6192 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adds an ad customizer feed.
Associates the feed with customer and adds an ad that
uses the feed to populate dynamic data.
Tags: CustomerFeedService.mutate, FeedItemService.mutate
Tags: FeedMappingService.mutate, FeedService.mutate
Tags: AdGroupAdService.mutate
"""
__author__ = ('<EMAIL> (<NAME>)',
'<EMAIL> (<NAME>)')
# Import appropriate classes from the client library.
from googleads import adwords
# See the Placeholder reference page for a list of all the placeholder types
# and fields:
# https://developers.google.com/adwords/api/docs/appendix/placeholders
PLACEHOLDER_AD_CUSTOMIZER = '10'
PLACEHOLDER_FIELD_INTEGER = '1'
PLACEHOLDER_FIELD_FLOAT = '2'
PLACEHOLDER_FIELD_PRICE = '3'
PLACEHOLDER_FIELD_DATE = '4'
PLACEHOLDER_FIELD_STRING = '5'
ADGROUPS = [
'INSERT_ADGROUP_ID_HERE',
'INSERT_ADGROUP_ID_HERE'
]
FEEDNAME = 'INSERT_FEED_NAME_HERE'
def main(client, adgroups):
# Initialize appropriate services.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201406')
customer_feed_service = client.GetService(
'CustomerFeedService', version='v201406')
feed_item_service = client.GetService('FeedItemService', version='v201406')
feed_mapping_service = client.GetService(
'FeedMappingService', version='v201406')
feed_service = client.GetService('FeedService', version='v201406')
# First, create a customizer feed. One feed per account can be used for all
# ads.
customizer_feed = {
'name': FEEDNAME,
'attributes': [
{'type': 'STRING', 'name': 'Name'},
{'type': 'STRING', 'name': 'Price'},
{'type': 'DATE_TIME', 'name': 'Date'}
]
}
feed_service_operation = {
'operator': 'ADD',
'operand': customizer_feed
}
response = feed_service.mutate([feed_service_operation])
if response and 'value' in response:
feed = response['value'][0]
feed_data = {
'feedId': feed['id'],
'nameId': feed['attributes'][0]['id'],
'priceId': feed['attributes'][1]['id'],
'dateId': feed['attributes'][2]['id']
}
print ('Feed with name \'%s\' and ID %s was added with:'
'\tName attribute ID %s and price attribute ID %s and date attribute'
'ID %s') % (feed['name'], feed['id'], feed_data['nameId'],
feed_data['priceId'], feed_data['dateId'])
else:
raise Exception('No feeds were added')
# Creating feed mapping to map the fields with customizer IDs.
feed_mapping = {
'placeholderType': PLACEHOLDER_AD_CUSTOMIZER,
'feedId': feed_data['feedId'],
'attributeFieldMappings': [
{
'feedAttributeId': feed_data['nameId'],
'fieldId': PLACEHOLDER_FIELD_STRING
},
{
'feedAttributeId': feed_data['priceId'],
'fieldId': PLACEHOLDER_FIELD_PRICE
},
{
'feedAttributeId': feed_data['dateId'],
'fieldId': PLACEHOLDER_FIELD_DATE
}
]
}
feed_mapping_operation = {
'operator': 'ADD',
'operand': feed_mapping
}
response = feed_mapping_service.mutate([feed_mapping_operation])
if response and 'value' in response:
feed_mapping = response['value'][0]
print ('Feed mapping with ID %s and placeholder type %s was saved for feed'
' with ID %s.') % (feed_mapping['feedMappingId'],
feed_mapping['placeholderType'],
feed_mapping['feedId'])
else:
raise Exception('No feed mappings were added.')
# Now adding feed items -- the values we'd like to place.
items_data = [
{
'name': 'Mars',
'price': '$1234.56',
'date': '20140601 000000',
'adGroupId': adgroups[0]
},
{
'name': 'Venus',
'price': '$1450.00',
'date': '20140615 120000',
'adGroupId': adgroups[1]
}
]
feed_items = [{'feedId': feed_data['feedId'],
'adGroupTargeting': {
'TargetingAdGroupId': item['adGroupId']
},
'attributeValues': [
{
'feedAttributeId': feed_data['nameId'],
'stringValue': item['name']
},
{
'feedAttributeId': feed_data['priceId'],
'stringValue': item['price']
},
{
'feedAttributeId': feed_data['dateId'],
'stringValue': item['date']
}
]} for item in items_data]
feed_item_operations = [{
'operator': 'ADD',
'operand': feed_item
} for feed_item in feed_items]
response = feed_item_service.mutate(feed_item_operations)
if response and 'value' in response:
for feed_item in response['value']:
print 'Feed item with ID %s was added.' % feed_item['feedItemId']
else:
raise Exception('No feed items were added.')
# Finally, creating a customer (account-level) feed with a matching function
# that determines when to use this feed. For this case we use the "IDENTITY"
# matching function that is always 'true' just to associate this feed with
# the customer. The targeting is done within the feed items using the
# :campaign_targeting, :ad_group_targeting, or :keyword_targeting attributes.
matching_function = {
'operator': 'IDENTITY',
'lhsOperand': [
{
'xsi_type': 'ConstantOperand',
'type': 'BOOLEAN',
'booleanValue': 'true'
}
]
}
customer_feed = {
'feedId': feed_data['feedId'],
'matchingFunction': matching_function,
'placeholderTypes': [PLACEHOLDER_AD_CUSTOMIZER]
}
customer_feed_operation = {
'operator': 'ADD',
'operand': customer_feed
}
response = customer_feed_service.mutate([customer_feed_operation])
if response and 'value' in response:
feed = response['value'][0]
print 'Customer feed with ID %s was added.' % feed['feedId']
else:
raise Exception('No customer feeds were added.')
# All set! We can now create ads with customizations.
text_ad = {
'xsi_type': 'TextAd',
'headline': 'Luxury Cruise to {=%s.Name}' % FEEDNAME,
'description1': 'Only {=%s.Price}' % FEEDNAME,
'description2': 'Offer ends in {=countdown(%s.Date)}!' % FEEDNAME,
'url': 'http://www.example.com',
'displayUrl': 'www.example.com'
}
# We add the same ad to both ad groups. When they serve, they will show
# different values, since they match different feed items.
operations = [{
'operator': 'ADD',
'operand': {
'adGroupId': adgroup,
'ad': text_ad
}
} for adgroup in adgroups]
print operations
response = ad_group_ad_service.mutate(operations)
print '===ad group ad service==='
print response
if response and 'value' in response:
for ad in response['value']:
print ('\tCreated an ad with ID \'%s\', type \'%s\', and status \'%s\'.'
% (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status']))
else:
raise Exception('No ads were added.')
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, ADGROUPS)
| 2.4375 | 2 |
tests/test_minhash.py | azachar/pyminhash | 0 | 6193 | import pytest
from pyminhash import MinHash
from pyminhash.datasets import load_data
def test__sparse_vector():
df = load_data()
myMinHasher = MinHash(10)
res = myMinHasher._sparse_vectorize(df, 'name')
assert res.columns.tolist() == ['name', 'sparse_vector']
assert res['sparse_vector'].dtype == 'object'
def test__create_hashing_parameters():
n_hashes = 10
myMinHasher = MinHash(n_hash_tables=n_hashes)
res = myMinHasher._create_hashing_parameters()
assert len(res) == n_hashes
assert res.dtype == 'int64'
assert min(res) >= 0
assert min(res) <= myMinHasher.max_token_value
def test__create_minhash():
n_hashes = 10
myMinHasher = MinHash(n_hash_tables=n_hashes)
doc = [59, 65, 66, 67, 118, 150, 266]
res = myMinHasher._create_minhash(doc)
assert len(res) == n_hashes
def test__create_minhash_signatures():
df = load_data()
myMinHasher = MinHash(3)
df = myMinHasher._sparse_vectorize(df, 'name')
df = myMinHasher._create_minhash_signatures(df)
for col in ['hash_0', 'hash_1', 'hash_2']:
assert col in df.columns
assert df[col].dtype == 'int64'
def test_fit_predict():
df = load_data()
myMinHasher = MinHash(10)
res = myMinHasher.fit_predict(df, 'name')
assert res.columns.tolist() == ['row_number_1', 'row_number_2', 'name_1', 'name_2', 'jaccard_sim']
assert res['jaccard_sim'].dtype == 'float'
def test_fit_predict_accuracy():
def jaccard(x, y):
x_tokens = set(x.split())
y_tokens = set(y.split())
return len(x_tokens.intersection(y_tokens)) / len(x_tokens.union(y_tokens))
df = load_data()
myMinHasher = MinHash(1000)
res = myMinHasher.fit_predict(df, 'name')
assert len(res) == 1727
res['jaccard_real'] = res.apply(lambda row: jaccard(row['name_1'], row['name_2']), axis=1)
res['diff'] = res['jaccard_real'] - res['jaccard_sim']
assert abs(res['diff'].mean()) < 0.02
assert res['diff'].std() < 0.1
| 2.3125 | 2 |
settings.py | danylo-dudok/youtube-rss | 0 | 6194 | from datetime import datetime, timedelta
from typing import final
from tools import localize_time
RSS_URL_PREFIX: final = 'https://www.youtube.com/feeds/videos.xml?channel_id={0}'
LOCATION_ARGUMENT_PREFIX: final = '--location='
CHANNEL_ARGUMENT_PREFIX: final = '--channels='
LAST_CHECK_ARGUMENT_PREFIX: final = '--last-check='
TWO_WEEKS_IN_DAYS: final = 14
DEFAULT_LAST_CHECK: final = localize_time(datetime.now() - timedelta(days=TWO_WEEKS_IN_DAYS))
EMPTY: final = ''
CHANNEL_POSTS_LIMIT: final = 20
| 2.390625 | 2 |
openpicle/caravel.py | DX-MON/OpenPICle | 0 | 6195 | # SPDX-License-Identifier: BSD-3-Clause
from amaranth import Elaboratable, Module, Signal, ResetInserter, EnableInserter
__all__ = (
'PIC16Caravel',
)
class PIC16Caravel(Elaboratable):
def elaborate(self, platform):
from .pic16 import PIC16
from .soc.busses.qspi import QSPIBus
m = Module()
reset = Signal()
busy_n = Signal(reset = 1)
m.submodules.qspiFlash = qspiFlash = QSPIBus(resourceName = ('spi_flash_4x', 0))
m.submodules.pic = pic = ResetInserter(reset)(EnableInserter(busy_n)(PIC16()))
run = platform.request('run', 0)
pBus = platform.request('p_bus', 0)
addr = pBus.addr.o
dataIn = pBus.data.i
dataOut = pBus.data.o
dataDir = pBus.data.oe
read = pBus.read
write = pBus.write
with m.If(qspiFlash.complete | reset):
m.d.sync += busy_n.eq(1)
with m.Elif(pic.iBus.read):
m.d.sync += busy_n.eq(0)
m.d.comb += [
reset.eq(~qspiFlash.ready),
run.o.eq(qspiFlash.ready & busy_n),
qspiFlash.address[0].eq(0),
qspiFlash.address[1:].eq(pic.iBus.address),
pic.iBus.data.eq(qspiFlash.data),
qspiFlash.read.eq(pic.iBus.read),
addr.eq(pic.pBus.address),
read.eq(pic.pBus.read),
pic.pBus.readData.eq(dataIn),
write.eq(pic.pBus.write),
dataOut.eq(pic.pBus.writeData),
dataDir.eq(pic.pBus.write),
]
return m
def get_ports(self):
return []
| 2.0625 | 2 |
cogs/stats.py | est73/raid-shack | 0 | 6196 | <reponame>est73/raid-shack<gh_stars>0
from discord.ext import commands
import discord
class Stats(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.has_permissions(manage_channels=True)
async def stats(self, ctx):
members = await ctx.guild.fetch_members(limit=None).flatten()
member_count = 0
member_role_count = 0
instinct_count = 0
mystic_count = 0
valor_count = 0
ign_count = 0
tc_count = 0
level_count = 0
country_count = 0
profile_count = 0
for member in members:
if not member.bot:
member_count += 1
for role in member.roles:
if role.name == "Member":
member_role_count += 1
if role.name == "instinct":
instinct_count += 1
if role.name == "mystic":
mystic_count += 1
if role.name == "valor":
valor_count += 1
if role.name == "ign":
ign_count += 1
if role.name == "tc":
tc_count += 1
if role.name == "level":
level_count += 1
if role.name == "country":
country_count += 1
if role.name == "profile":
profile_count += 1
values = [f'Members: {member_count}',
f'Members Role: {member_role_count}',
f'Members on Team Instinct: {instinct_count}',
f'Members on Team Mystic: {mystic_count}',
f'Members on Team Valor: {valor_count}',
f'Members with IGN set: {ign_count}',
f'Members with TC set: {tc_count}',
f'Members with level set: {level_count}',
f'Members with country set: {country_count}',
f'Members with completed Nexus Profiles: {profile_count}']
embed = discord.Embed(color=discord.Color.green())
embed.set_author(name=ctx.guild.name, icon_url=ctx.guild.icon_url)
embed.add_field(name='Server Stats:', value='\n'.join(values), inline=False)
await ctx.send(embed=embed)
@stats.error
async def permission_error(self, ctx, error):
if isinstance(error, commands.MissingPermissions):
await ctx.send("Sorry, you can't run this command")
else:
raise error
def setup(bot):
bot.add_cog(Stats(bot))
| 2.328125 | 2 |
tests/bucket/test_bucket.py | WillChilds-Klein/mistress-mapreduce | 2 | 6197 | from mrs.bucket import WriteBucket
from mrs import BinWriter, HexWriter
def test_writebucket():
b = WriteBucket(0, 0)
b.addpair((4, 'test'))
b.collect([(3, 'a'), (1, 'This'), (2, 'is')])
values = ' '.join(value for key, value in b)
assert values == 'test a This is'
b.sort()
values = ' '.join(value for key, value in b)
assert values == 'This is a test'
def test_write_only():
b = WriteBucket(0, 0)
b.addpair((4, 'test'), write_only=True)
b.collect([(3, 'a'), (1, 'This'), (2, 'is')], write_only=True)
values = ' '.join(value for key, value in b)
assert values == ''
readonly_copy = b.readonly_copy()
assert readonly_copy.url is None
def test_writing(tmpdir):
b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter)
prefix = b.prefix()
assert prefix == 'source_2_split_4_'
listdir = tmpdir.listdir()
assert listdir == []
b.addpair((1, 2))
filename = prefix + '.mrsb'
path = tmpdir.join(filename).strpath
listdir = tmpdir.listdir()
assert listdir == [path]
readonly_copy = b.readonly_copy()
assert readonly_copy.url == path
def test_roundtrip(tmpdir):
b = WriteBucket(2, 4, dir=tmpdir.strpath, format=BinWriter)
prefix = b.prefix()
assert prefix == 'source_2_split_4_'
listdir = tmpdir.listdir()
assert listdir == []
b.addpair((4, 'test'))
b.collect([(3, 'a'), (1, 'This'), (2, 'is')])
values = ' '.join(value for key, value in b)
assert values == 'test a This is'
b.close_writer(do_sync=False)
filename = prefix + '.mrsb'
path = tmpdir.join(filename).strpath
listdir = tmpdir.listdir()
assert listdir == [path]
readonly_copy = b.readonly_copy()
assert readonly_copy.url == path
values = ' '.join(value for key, value in readonly_copy)
assert values == 'test a This is'
values = ' '.join(value for key, value in readonly_copy.stream())
assert values == 'test a This is'
b.clean()
listdir = tmpdir.listdir()
assert listdir == []
def test_roundtrip_write_only(tmpdir):
b = WriteBucket(7, 1, dir=tmpdir.strpath, format=HexWriter)
prefix = b.prefix()
assert prefix == 'source_7_split_1_'
listdir = tmpdir.listdir()
assert listdir == []
b.addpair((4, 'test'), write_only=True)
b.collect([(3, 'a'), (1, 'This'), (2, 'is')], write_only=True)
values = ' '.join(value for key, value in b)
assert values == ''
b.close_writer(do_sync=False)
filename = prefix + '.mrsx'
path = tmpdir.join(filename).strpath
listdir = tmpdir.listdir()
assert listdir == [path]
readonly_copy = b.readonly_copy()
assert readonly_copy.url == path
values = ' '.join(value for key, value in readonly_copy)
assert values == ''
values = ' '.join(value for key, value in readonly_copy.stream())
assert values == 'test a This is'
b.clean()
listdir = tmpdir.listdir()
assert listdir == []
# vim: et sw=4 sts=4
| 2.15625 | 2 |
inquire/agents/dempref.py | HARPLab/inquire | 0 | 6198 | <reponame>HARPLab/inquire
"""
An agent which uses demonstrations and preferences.
Code adapted from Learning Reward Functions
by Integrating Human Demonstrations and Preferences.
"""
import itertools
import os
import time
from pathlib import Path
from typing import Dict, List
import arviz as az
from inquire.agents.agent import Agent
from inquire.environments.environment import Environment
from inquire.interactions.feedback import Query, Trajectory
from inquire.interactions.modalities import Preference
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pymc3 as pm
import pymc3.distributions.transforms as tr
import scipy.optimize as opt
import theano.tensor as tt
class DemPref(Agent):
"""A preference-querying agent seeded with demonstrations.
Note: We instantiate the agent according to arguments corresponding to
what the the original paper's codebase designates as their main experiment.
"""
def __init__(
self,
weight_sample_count: int,
trajectory_sample_count: int,
trajectory_length: int,
interaction_types: list = [],
w_dim: int = 4,
which_param_csv: int = 0,
visualize: bool = False,
):
"""Initialize the agent.
Note we needn't maintain a domain's start state; that's handled in
inquire/tests/evaluation.py and the respective domain.
"""
self._weight_sample_count = weight_sample_count
self._trajectory_sample_count = trajectory_sample_count
self._trajectory_length = trajectory_length
self._interaction_types = interaction_types
self._visualize = visualize
"""
Get the pre-defined agent parameters
"""
self._dempref_agent_parameters = self.read_param_csv(which_param_csv)
"""
Instance attributes from orginal codebase's 'runner.py' object. Note
that some variable names are modified to be consist with the Inquire
parlance.
"""
self.domain_name = self._dempref_agent_parameters["domain"][0]
self.teacher_type = self._dempref_agent_parameters["teacher_type"][0]
self.n_demos = self._dempref_agent_parameters["n_demos"][0]
self.gen_demos = self._dempref_agent_parameters["gen_demos"][0]
self.opt_iter_count = self._dempref_agent_parameters["opt_iter_count"][
0
]
self.trim_start = self._dempref_agent_parameters["trim_start"][0]
self.query_option_count = self._dempref_agent_parameters[
"query_option_count"
][0]
self.update_func = self._dempref_agent_parameters["update_func"][0]
self.trajectory_length = self._dempref_agent_parameters[
"trajectory_length"
][0]
self.incl_prev_query = self._dempref_agent_parameters[
"incl_prev_query"
][0]
self.gen_scenario = self._dempref_agent_parameters["gen_scenario"][0]
self.n_pref_iters = self._dempref_agent_parameters["n_pref_iters"][0]
self.epsilon = self._dempref_agent_parameters["epsilon"][0]
"""
Instantiate the DemPref-specific sampler and query generator:
"""
self._sampler = None
self._w_samples = None
self._query_generator = None
self._first_q_session = True
self._q_session_index = 0
self._query_index = 0
self._w_dim = w_dim
assert (
self.update_func == "pick_best"
or self.update_func == "approx"
or self.update_func == "rank"
), ("Update" " function must be one of the provided options")
if self.incl_prev_query and self.teacher_type == "term":
assert (
self.n_demos > 0
), "Cannot include previous query if no demonstration is provided"
self.n_samples_summ = self._dempref_agent_parameters["n_samples_summ"][
0
]
self.n_samples_exp = self._dempref_agent_parameters["n_samples_exp"][0]
self.beta_demo = self._dempref_agent_parameters["beta_demo"][0]
self.beta_pref = self._dempref_agent_parameters["beta_pref"][0]
self.beta_teacher = self._dempref_agent_parameters["beta_teacher"][0]
"""If we want to save data as they did in DemPref:"""
self.first_q_session = True
self.q_session_index = 0
self.query_index = 0
self.config = [
self.teacher_type,
self.n_demos,
self.trim_start,
self.query_option_count,
self.update_func,
self.trajectory_length,
self.incl_prev_query,
self.gen_scenario,
self.n_pref_iters,
self.epsilon,
self.n_samples_summ,
self.n_samples_exp,
self.beta_demo,
self.beta_pref,
self.beta_teacher,
]
self.df = pd.DataFrame(columns=["run #", "pref_iter", "type", "value"])
def initialize_weights(self, domain: Environment) -> np.ndarray:
"""Randomly initialize weights for gradient descent."""
self.reset()
return self.w_samples
def reset(self) -> None:
"""Prepare for new query session."""
if self._sampler is not None:
self._sampler.clear_pref()
self._sampler = self.DemPrefSampler(
query_option_count=self.query_option_count,
dim_features=self._w_dim,
update_func=self.update_func,
beta_demo=self.beta_demo,
beta_pref=self.beta_pref,
visualize=self._visualize,
)
self.w_samples = self._sampler.sample(N=self.n_samples_summ)
"""If we want to save data as they did in DemPref:"""
mean_w = np.mean(self.w_samples, axis=0)
mean_w = mean_w / np.linalg.norm(mean_w)
var_w = np.var(self.w_samples, axis=0)
# Make sure to properly index data:
if self.first_q_session:
self.first_q_session = False
else:
self.q_session_index += 1
data = [
[self.q_session_index, 0, "mean", mean_w],
[self.q_session_index, 0, "var", var_w],
]
self.df = self.df.append(
pd.DataFrame(
data, columns=["run #", "pref_iter", "type", "value"]
),
ignore_index=True,
)
def generate_query(
self,
domain: Environment,
query_state: int,
curr_w: np.ndarray,
verbose: bool = False,
) -> list:
"""Generate query using approximate gradients.
Code adapted from DemPref's ApproxQueryGenerator.
"""
if self._query_generator is None:
self._query_generator = self.DemPrefQueryGenerator(
dom=domain,
num_queries=self.query_option_count,
trajectory_length=self.trajectory_length,
num_expectation_samples=self.n_samples_exp,
include_previous_query=self.incl_prev_query,
generate_scenario=self.gen_scenario,
update_func=self.update_func,
beta_pref=self.beta_pref,
)
if self.incl_prev_query:
if len(self.demos) > 0:
self.random_scenario_index = np.random.randint(len(self.demos))
else:
self.random_scenario_index = 0
last_query_choice = self.all_query_choices[
self.random_scenario_index
]
# Generate query_options while ensuring that features of query_options
# are epsilon apart:
query_diff = 0
print("Generating query_options")
while query_diff <= self.epsilon:
if self.incl_prev_query:
if last_query_choice.null:
query_options = self._query_generator.generate_query_options(
self.w_samples, blank_traj=True
)
else:
query_options = self._query_generator.generate_query_options(
self.w_samples, last_query_choice
)
else:
query_options = self._query_generator.generate_query_options(
self.w_samples
)
query_diffs = []
for m in range(len(query_options)):
for n in range(m):
query_diffs.append(
np.linalg.norm(
domain.features_from_trajectory(
query_options[m].trajectory
)
- domain.features_from_trajectory(
query_options[n].trajectory
)
)
)
query_diff = max(query_diffs)
query = Query(
query_type=Preference,
task=None,
start_state=query_state,
trajectories=query_options,
)
return query
def update_weights(
self, current_weights: np.ndarray, domain: Environment, feedback: list
) -> np.ndarray:
"""Update the model's learned weights.
::inputs:
::current_weights: Irrelevant for DemPref; useful to other agents
::domain: The task's environment
::feedback: A list of the human feedback received to this point.
DemPref utilizes only the most recent
"""
if feedback == []:
# No feedback yet received
return self.w_samples
else:
# Use the most recent Choice in feedback:
query_options = feedback[-1].choice.options
choice = feedback[-1].choice.selection
choice_index = query_options.index(choice)
if self.incl_prev_query:
self.all_query_choices[self.random_scenario_index] = choice
# Create dictionary map from rankings to query-option features;
# load into sampler:
features = [
domain.features_from_trajectory(x.trajectory)
for x in query_options
]
phi = {k: features[k] for k in range(len(query_options))}
self._sampler.load_prefs(phi, choice_index)
self.w_samples = self._sampler.sample(N=self.n_samples_summ)
# Return the new weights from the samples:
mean_w = np.mean(self.w_samples, axis=0)
mean_w = mean_w / np.linalg.norm(mean_w)
return np.array(mean_w, copy=True).reshape(1, -1)
def read_param_csv(self, which_csv: int = 0) -> dict:
"""Read an agent-parameterization .csv.
::inputs:
:creation_index: A time-descending .csv file index.
e.g. if creation_index = 0, use the dempref
dempref_agent.csv most recently created.
"""
data_path = Path.cwd() / Path("../inquire/agents/")
# Sort the .csvs in descending order by time of creation:
all_files = np.array(list(Path.iterdir(data_path)))
all_csvs = all_files[
np.argwhere([f.suffix == ".csv" for f in all_files])
]
all_csvs = np.array([str(f[0]).strip() for f in all_csvs])
sorted_csvs = sorted(all_csvs, key=os.path.getmtime)
sorted_csvs = [Path(c) for c in sorted_csvs]
# Select the indicated .csv and convert it to a dictionary:
chosen_csv = sorted_csvs[-which_csv]
df = pd.read_csv(chosen_csv)
params_dict = df.to_dict()
return params_dict
def process_demonstrations(
self, trajectories: list, domain: Environment
) -> None:
"""Generate demonstrations to seed the querying process."""
self.demos = trajectories
phi_demos = [
domain.features_from_trajectory(x.trajectory) for x in self.demos
]
self._sampler.load_demo(np.array(phi_demos))
self.cleaned_demos = self.demos
if self.incl_prev_query:
self.all_query_choices = [d for d in self.cleaned_demos]
class DemPrefSampler:
"""Sample trajectories for querying.
Code adapted from original DemPref agent.
"""
def __init__(
self,
query_option_count: int,
dim_features: int,
update_func: str = "approx",
beta_demo: float = 0.1,
beta_pref: float = 1.0,
visualize: bool = False,
):
"""
Initialize the sampler.
:param query_option_count: Number of queries.
:param dim_features: Dimension of feature vectors.
:param update_func: options are "rank", "pick_best", and
"approx". To use "approx", query_option_count
must be 2; will throw an assertion error
otherwise
:param beta_demo: parameter measuring irrationality of teacher in
providing demonstrations
:param beta_pref: parameter measuring irrationality of teacher in
selecting preferences
"""
self.query_option_count = query_option_count
self.dim_features = dim_features
self.update_func = update_func
self.beta_demo = beta_demo
self.beta_pref = beta_pref
self._visualize = visualize
if self.update_func == "approx":
assert (
self.query_option_count == 2
), "Cannot use approximation to update function if query_option_count > 2"
elif not (
self.update_func == "rank" or self.update_func == "pick_best"
):
raise Exception(
update_func + " is not a valid update function."
)
# feature vectors from demonstrated trajectories
self.phi_demos = np.zeros((1, self.dim_features))
# a list of np.arrays containing feature difference vectors and
# which encode the ranking from the preference
# queries
self.phi_prefs = []
def load_demo(self, phi_demos: np.ndarray):
"""
Load the demonstrations into the Sampler.
:param demos: a Numpy array containing feature vectors for each
demonstration; has dimension
n_dem -by- self.dim_features
"""
self.phi_demos = phi_demos
def load_prefs(self, phi: Dict, rank):
"""
Load the results of a preference query into the Sampler.
:param phi: a dictionary mapping rankings
(0,...,query_option_count-1) to feature vectors
"""
result = []
if self.update_func == "rank":
result = [None] * len(rank)
for i in range(len(rank)):
result[i] = phi[rank[i]]
elif self.update_func == "approx":
result = phi[rank] - phi[1 - rank]
elif self.update_func == "pick_best":
result, tmp = [phi[rank] - phi[rank]], []
for key in sorted(phi.keys()):
if key != rank:
tmp.append(phi[key] - phi[rank])
result.extend(tmp)
self.phi_prefs.append(np.array(result))
def clear_pref(self):
"""Clear all preference information from the sampler."""
self.phi_prefs = []
def sample(self, N: int, T: int = 1, burn: int = 1000) -> np.ndarray:
"""Return N samples from the distribution.
The distribution is defined by applying update_func on the
demonstrations and preferences observed thus far.
:param N: number of w_samples to draw.
:param T: if greater than 1, all samples except each T^{th}
sample are discarded
:param burn: how many samples before the chain converges;
these initial samples are discarded
:return: list of w_samples drawn
"""
"""Define model for MCMC.
NOTE the DemPref codebase creates a sampler via PyMC3 version 3.5;
this codebase adapts their model to PyMC3 version 3.11.2.
We use the NUTS sampling algorithm (an extension of
Hamilitonian Monte Carlo MCMC): https://arxiv.org/abs/1111.4246.
"""
# Define update function:
if self.update_func == "approx":
def update_function(distribution):
result = tt.sum(
[
-tt.nnet.relu(
-self.beta_pref
* tt.dot(self.phi_prefs[i], distribution)
)
for i in range(len(self.phi_prefs))
]
) + tt.sum(
self.beta_demo * tt.dot(self.phi_demos, distribution)
)
return result
elif self.update_func == "pick_best":
def update_function(distribution):
result = tt.sum(
[
-tt.log(
tt.sum(
tt.exp(
self.beta_pref
* tt.dot(
self.phi_prefs[i], distribution
)
)
)
)
for i in range(len(self.phi_prefs))
]
) + tt.sum(
self.beta_demo * tt.dot(self.phi_demos, distribution)
)
return result
elif self.update_func == "rank":
def update_function(distribution):
result = (
tt.sum( # sum across different queries
[
tt.sum( # sum across different terms in PL-update
-tt.log(
[
tt.sum( # sum down different feature-differences in a single term in PL-update
tt.exp(
self.beta_pref
* tt.dot(
self.phi_prefs[i][
j:, :
]
- self.phi_prefs[i][j],
distribution,
)
)
)
for j in range(
self.query_option_count
)
]
)
)
for i in range(len(self.phi_prefs))
]
)
+ tt.sum(
self.beta_demo
* tt.dot(self.phi_demos, distribution)
),
)
return result
self.update_function = update_function
while True:
test_value = np.random.uniform(
low=-1, high=1, size=self.dim_features
)
test_value = test_value / np.linalg.norm(test_value)
norm = (test_value ** 2).sum()
if norm <= 1:
break
# Get a sampling trace (and avoid Bad Initial Energy):
while True:
trace = self.get_trace(test_value)
if trace is not None:
break
if self._visualize:
az.plot_trace(trace)
plt.show()
input("Press enter to continue")
az.plot_energy(trace)
plt.show()
input("Press enter to continue")
az.plot_posterior(trace)
plt.show()
input("Press enter to continue")
all_samples = trace.sel(
draw=slice(burn, None)
).posterior.rv_x.values
all_samples = all_samples.reshape(
all_samples.shape[0] * all_samples.shape[1], -1
)
w_samples = np.array([r / np.linalg.norm(r) for r in all_samples])
return w_samples
def get_trace(self, test_val: np.ndarray) -> az.InferenceData:
"""Create an MCMC trace."""
# model accumulates the objects defined within the proceeding
# context:
model = pm.Model()
with model:
# Add random-variable x to model:
rv_x = pm.Uniform(
name="rv_x",
shape=self.dim_features,
lower=-1,
upper=1,
testval=test_val,
)
# Define the prior as the unit ball centered at 0:
def sphere(w):
"""Determine if w is part of the unit ball."""
w_sum = pm.math.sqr(w).sum()
result = tt.switch(
pm.math.gt(w_sum, 1.0),
-100,
# -np.inf,
self.update_function(w),
)
return result
try:
# Potential is a "potential term" defined as an "additional
# tensor...to be added to the model logp"(PyMC3 developer
# guide). In this instance, the potential is effectively
# the model's log-likelihood.
p = pm.Potential("sphere", sphere(rv_x))
trace = pm.sample(
10000,
tune=5000,
return_inferencedata=True,
init="adapt_diag",
)
# except:
except (
pm.SamplingError,
pm.parallel_sampling.ParallelSamplingError,
):
return None
return trace
class DemPrefQueryGenerator:
"""Generate queries.
Code adapted from original DemPref agent.
"""
def __init__(
self,
dom: Environment,
num_queries: int,
trajectory_length: int,
num_expectation_samples: int,
include_previous_query: bool,
generate_scenario: bool,
update_func: str,
beta_pref: float,
) -> None:
"""
Initialize the approx query generation.
Note: this class generates queries using approx gradients.
::original inputs:
:dom: the domain to generate queries on
:num_queries: number of queries to generate at each time step
:trajectory_length: the length of each query
:num_expectation_samples: number of w_samples to use in
approximating the objective
function
:include_previous_query: boolean for whether one of the
queries is the previously selected
query
:generate_scenario: boolean for whether we want to generate
the scenario -- i.e., other agents'
behavior
:update_func: the update_func used; the options are
"pick_best", "approx", and "rank"
:beta_pref: the rationality parameter for the teacher
selecting her query
::Inquire-specific inputs:
:start_state: The state from which a trajectory begins.
"""
assert (
num_queries >= 1
), "QueryGenerator.__init__: num_queries must be at least 1"
assert (
trajectory_length >= 1
), "QueryGenerator.__init__: trajectory_length must be at least 1"
assert (
num_expectation_samples >= 1
), "QueryGenerator.__init__: num_expectation_samples must be \
at least 1"
self.domain = dom
self.num_queries = num_queries
self.trajectory_length = trajectory_length
self.num_expectation_samples = num_expectation_samples
self.include_previous_query = include_previous_query
self.generate_scenario = (
generate_scenario # Currently must be False
)
assert (
self.generate_scenario is False
), "Cannot generate scenario when using approximate gradients"
self.update_func = update_func
self.beta_pref = beta_pref
self.num_new_queries = (
self.num_queries - 1
if self.include_previous_query
else self.num_queries
)
def generate_query_options(
self,
w_samples: np.ndarray,
last_query_choice: Trajectory = None,
blank_traj: bool = False,
) -> List[Trajectory]:
"""
Generate self.num_queries number of queries.
This function produces query options that (locally) maximize the
maximum volume removal objective.
:param w_samples: Samples of w
:param last_query_choice: The previously selected query. Only
required if self.incl_prev_query is
True
:param blank_traj: True is last_query_choice is blank. (Only
True if not using Dempref but using incl_prev_)
:return: a list of trajectories (queries)
"""
start = time.perf_counter()
def func(controls: np.ndarray, *args) -> float:
"""Minimize via L_BFGS.
:param controls: an array, concatenated to contain the control
input for all queries
:param args: the first argument is the domain, and the second
is the samples that will be used to approximate
the objective function
:return: the value of the objective function for the given set
of controls
"""
domain = args[0]
w_samples = args[1]
controls = np.array(controls)
controls_set = [
controls[i * z : (i + 1) * z]
for i in range(self.num_new_queries)
]
features_each_q_option = np.zeros(
(domain.w_dim, self.num_new_queries)
)
for i, c in enumerate(controls_set):
features_each_q_option[
:, i
] = domain.features_from_trajectory(
c, controls_as_input=True
)
if self.include_previous_query and not blank_traj:
features_each_q_option = np.append(
features_each_q_option,
domain.features_from_trajectory(last_query_choice),
axis=1,
)
if self.update_func == "pick_best":
return -objective(features_each_q_option, w_samples)
elif self.update_func == "approx":
return -approx_objective(features_each_q_option, w_samples)
else:
return -rank_objective(features_each_q_option, w_samples)
def objective(features: List, w_samples: np.ndarray) -> float:
"""
Maximize the volume removal objective.
:param features: a list containing the feature values of each
query
:param w_samples: samples of w, used to approximate the
objective
:return: the value of the objective function, evaluated on the
given queries' features
"""
volumes_removed = []
for i in range(len(features)):
feature_diff = np.array(
[f - features[i] for f in features]
) # query_option_count x feature_size
weighted_feature_diff = (
np.sum(np.dot(feature_diff, w_samples.T), axis=1)
/ w_samples.shape[0]
) # query_option_count x 1 -- summed across w_samples
v_removed = 1.0 - 1.0 / np.sum(
np.exp(self.beta_pref * weighted_feature_diff)
)
volumes_removed.append(v_removed)
return np.min(volumes_removed)
def approx_objective(
features: np.ndarray, w_samples: np.ndarray
) -> float:
"""
Approximate the maximum volume removal objective.
:param features: the feature values of each query option
:param w_samples: w_samples of w used to approximate the
objective
:return: the value of the objective function, evaluated on the
given queries' features
"""
if features.shape[0] > features.shape[1]:
features = features.T
volumes_removed = []
for i in range(len(features)):
feature_diff = (
features[i] - features[1 - i]
) # 1 x feature_size
weighted_feature_diff = (
np.sum(np.dot(feature_diff, w_samples.T))
/ w_samples.shape[0]
) # 1 x 1 -- summed across w_samples
v_removed = 1.0 - np.minimum(
1.0, np.exp(self.beta_pref * weighted_feature_diff)
)
volumes_removed.append(v_removed)
return np.min(volumes_removed)
def rank_objective(features, w_samples) -> float:
"""
The ranking maximum volume removal objective function.
Note: This objective uses the Plackett-Luce model of
teacher behavior.
CANNOT BE USED WITH (incl_prev_QUERY AND NO DEMPREF).
:param features: a list containing the feature values of each
query
:param w_samples: samples of w, used to approximate the
objective
:return: the value of the objective function, evaluated on the
given queries' features
"""
# features: query_option_count x feature_size
# w_samples: n_samples x feature_size
exp_rewards = (
np.sum(np.dot(features, w_samples.T), axis=1)
/ w_samples.shape[0]
) # query_option_count x 1 -- summed across w_samples
volumes_removed = []
rankings = itertools.permutations(
list(range(self.num_queries))
) # iterating over all possible rankings
for rank in rankings:
exp_rewards_sorted = [None] * len(rank)
for i in range(len(rank)):
exp_rewards_sorted[rank[i]] = exp_rewards[i]
value, i = 1, 0
for i in range(len(rank) - 1):
value *= 1.0 / np.sum(
np.exp(
self.beta_pref
* (
np.array(exp_rewards_sorted[i:])
- exp_rewards_sorted[i]
)
)
)
volumes_removed.append(1 - value)
return np.min(volumes_removed)
# The following optimization is w.r.t. volume removal; the domain's
# optimization is w.r.t. the linear combination of weights and
# features; this difference is a trait of the DemPref codebase.
z = self.trajectory_length * self.domain.control_size
lower_input_bound = [
x[0] for x in self.domain.control_bounds
] * self.trajectory_length
upper_input_bound = [
x[1] for x in self.domain.control_bounds
] * self.trajectory_length
opt_res = opt.fmin_l_bfgs_b(
func,
x0=np.random.uniform(
low=self.num_new_queries * lower_input_bound,
high=self.num_new_queries * upper_input_bound,
size=(self.num_new_queries * z),
),
args=(self.domain, w_samples),
bounds=self.domain.control_bounds
* self.num_new_queries
* self.trajectory_length,
approx_grad=True,
)
query_options_controls = [
opt_res[0][i * z : (i + 1) * z]
for i in range(self.num_new_queries)
]
end = time.perf_counter()
print(f"Finished computing queries in {end - start}s")
# Note the domain was reset w/ appropriate seed before beginning
# this query session; domain.run(c) will thus reset to appropriate
# state:
raw_trajectories = [
self.domain.run(c) for c in query_options_controls
]
raw_phis = [
self.domain.features_from_trajectory(t)
for t in raw_trajectories
]
query_options_trajectories = [
Trajectory(raw_trajectories[i], raw_phis[i])
for i in range(len(raw_trajectories))
]
if self.include_previous_query and not blank_traj:
return [last_query_choice] + query_options_trajectories
else:
return query_options_trajectories
| 2.703125 | 3 |
coba/learners/__init__.py | mrucker/banditbenchmark | 1 | 6199 | <reponame>mrucker/banditbenchmark<gh_stars>1-10
"""This module contains all public learners and learner interfaces."""
from coba.learners.primitives import Learner, SafeLearner
from coba.learners.bandit import EpsilonBanditLearner, UcbBanditLearner, FixedLearner, RandomLearner
from coba.learners.corral import CorralLearner
from coba.learners.vowpal import VowpalMediator
from coba.learners.vowpal import VowpalArgsLearner, VowpalEpsilonLearner, VowpalSoftmaxLearner, VowpalBagLearner
from coba.learners.vowpal import VowpalCoverLearner, VowpalRegcbLearner, VowpalSquarecbLearner, VowpalOffPolicyLearner
from coba.learners.linucb import LinUCBLearner
__all__ = [
'Learner',
'SafeLearner',
'RandomLearner',
'FixedLearner',
'EpsilonBanditLearner',
'UcbBanditLearner',
'CorralLearner',
'LinUCBLearner',
'VowpalArgsLearner',
'VowpalEpsilonLearner',
'VowpalSoftmaxLearner',
'VowpalBagLearner',
'VowpalCoverLearner',
'VowpalRegcbLearner',
'VowpalSquarecbLearner',
'VowpalOffPolicyLearner',
'VowpalMediator'
] | 1.460938 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.