code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import mock
from airflow.providers.google.cloud.operators.gcs import (
GCSBucketCreateAclEntryOperator, GCSCreateBucketOperator, GCSDeleteBucketOperator,
GCSDeleteObjectsOperator, GcsFileTransformOperator, GCSListObjectsOperator,
GCSObjectCreateAclEntryOperator, GCSToLocalOperator,
)
TASK_ID = "test-gcs-operator"
TEST_BUCKET = "test-bucket"
TEST_PROJECT = "test-project"
DELIMITER = ".csv"
PREFIX = "TEST"
MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
TEST_OBJECT = "dir1/test-object"
LOCAL_FILE_PATH = "/home/airflow/gcp/test-object"
class TestGoogleCloudStorageCreateBucket(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_execute(self, mock_hook):
operator = GCSCreateBucketOperator(
task_id=TASK_ID,
bucket_name=TEST_BUCKET,
resource={
"lifecycle": {
"rule": [{"action": {"type": "Delete"}, "condition": {"age": 7}}]
}
},
storage_class="MULTI_REGIONAL",
location="EU",
labels={"env": "prod"},
project_id=TEST_PROJECT,
)
operator.execute(None)
mock_hook.return_value.create_bucket.assert_called_once_with(
bucket_name=TEST_BUCKET,
storage_class="MULTI_REGIONAL",
location="EU",
labels={"env": "prod"},
project_id=TEST_PROJECT,
resource={
"lifecycle": {
"rule": [{"action": {"type": "Delete"}, "condition": {"age": 7}}]
}
},
)
class TestGoogleCloudStorageAcl(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_bucket_create_acl(self, mock_hook):
operator = GCSBucketCreateAclEntryOperator(
bucket="test-bucket",
entity="test-entity",
role="test-role",
user_project="test-user-project",
task_id="id",
)
operator.execute(None)
mock_hook.return_value.insert_bucket_acl.assert_called_once_with(
bucket_name="test-bucket",
entity="test-entity",
role="test-role",
user_project="test-user-project",
)
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_object_create_acl(self, mock_hook):
operator = GCSObjectCreateAclEntryOperator(
bucket="test-bucket",
object_name="test-object",
entity="test-entity",
generation=42,
role="test-role",
user_project="test-user-project",
task_id="id",
)
operator.execute(None)
mock_hook.return_value.insert_object_acl.assert_called_once_with(
bucket_name="test-bucket",
object_name="test-object",
entity="test-entity",
generation=42,
role="test-role",
user_project="test-user-project",
)
class TestGoogleCloudStorageDeleteOperator(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_delete_objects(self, mock_hook):
operator = GCSDeleteObjectsOperator(
task_id=TASK_ID, bucket_name=TEST_BUCKET, objects=MOCK_FILES[0:2]
)
operator.execute(None)
mock_hook.return_value.list.assert_not_called()
mock_hook.return_value.delete.assert_has_calls(
calls=[
mock.call(bucket_name=TEST_BUCKET, object_name=MOCK_FILES[0]),
mock.call(bucket_name=TEST_BUCKET, object_name=MOCK_FILES[1]),
],
any_order=True,
)
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_delete_prefix(self, mock_hook):
mock_hook.return_value.list.return_value = MOCK_FILES[1:3]
operator = GCSDeleteObjectsOperator(
task_id=TASK_ID, bucket_name=TEST_BUCKET, prefix=PREFIX
)
operator.execute(None)
mock_hook.return_value.list.assert_called_once_with(
bucket_name=TEST_BUCKET, prefix=PREFIX
)
mock_hook.return_value.delete.assert_has_calls(
calls=[
mock.call(bucket_name=TEST_BUCKET, object_name=MOCK_FILES[1]),
mock.call(bucket_name=TEST_BUCKET, object_name=MOCK_FILES[2]),
],
any_order=True,
)
class TestGoogleCloudStorageDownloadOperator(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_execute(self, mock_hook):
operator = GCSToLocalOperator(
task_id=TASK_ID,
bucket=TEST_BUCKET,
object_name=TEST_OBJECT,
filename=LOCAL_FILE_PATH,
)
operator.execute(None)
mock_hook.return_value.download.assert_called_once_with(
bucket_name=TEST_BUCKET, object_name=TEST_OBJECT, filename=LOCAL_FILE_PATH
)
class TestGoogleCloudStorageListOperator(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_execute(self, mock_hook):
mock_hook.return_value.list.return_value = MOCK_FILES
operator = GCSListObjectsOperator(
task_id=TASK_ID, bucket=TEST_BUCKET, prefix=PREFIX, delimiter=DELIMITER
)
files = operator.execute(None)
mock_hook.return_value.list.assert_called_once_with(
bucket_name=TEST_BUCKET, prefix=PREFIX, delimiter=DELIMITER
)
self.assertEqual(sorted(files), sorted(MOCK_FILES))
class TestGcsFileTransformOperator(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.operators.gcs.NamedTemporaryFile")
@mock.patch("airflow.providers.google.cloud.operators.gcs.subprocess")
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_execute(self, mock_hook, mock_subprocess, mock_tempfile):
source_bucket = TEST_BUCKET
source_object = "test.txt"
destination_bucket = TEST_BUCKET + "-dest"
destination_object = "transformed_test.txt"
transform_script = "script.py"
source = "source"
destination = "destination"
# Mock the name attribute...
mock1 = mock.Mock()
mock2 = mock.Mock()
mock1.name = source
mock2.name = destination
mock_tempfile.return_value.__enter__.side_effect = [mock1, mock2]
mock_subprocess.PIPE = "pipe"
mock_subprocess.STDOUT = "stdout"
mock_subprocess.Popen.return_value.stdout.readline = lambda: b""
mock_subprocess.Popen.return_value.wait.return_value = None
mock_subprocess.Popen.return_value.returncode = 0
op = GcsFileTransformOperator(
task_id=TASK_ID,
source_bucket=source_bucket,
source_object=source_object,
destination_object=destination_object,
destination_bucket=destination_bucket,
transform_script=transform_script,
)
op.execute(None)
mock_hook.return_value.download.assert_called_once_with(
bucket_name=source_bucket, object_name=source_object, filename=source
)
mock_subprocess.Popen.assert_called_once_with(
args=[transform_script, source, destination],
stdout="pipe",
stderr="stdout",
close_fds=True,
)
mock_hook.return_value.upload.assert_called_with(
bucket_name=destination_bucket,
object_name=destination_object,
filename=destination,
)
class TestGCSDeleteBucketOperator(unittest.TestCase):
@mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
def test_delete_bucket(self, mock_hook):
operator = GCSDeleteBucketOperator(
task_id=TASK_ID, bucket_name=TEST_BUCKET)
operator.execute(None)
mock_hook.return_value.delete_bucket.assert_called_once_with(bucket_name=TEST_BUCKET, force=True)
| spektom/incubator-airflow | tests/providers/google/cloud/operators/test_gcs.py | Python | apache-2.0 | 8,920 |
from abc import ABCMeta, abstractmethod
class Parser(object):
__meta__ = ABCMeta
def __init__(self, source):
self.source = source
@abstractmethod
def schedule(self):
pass
| vtemian/uni-west | second_year/os/exams/round2/scheduler/parsers/base.py | Python | apache-2.0 | 207 |
import cv2
import numpy as np
import os
from database import Data
### TRAINING ###
path = ""
samples = np.loadtxt('%sgeneralsamples.data' % path, np.float32)
responses = np.loadtxt('%sgeneralresponses.data' % path, np.float32)
responses = responses.reshape((responses.size, 1))
model = cv2.ml.KNearest_create()
model.train(samples, cv2.ml.ROW_SAMPLE, responses)
def get_speed(frame):
"""
:param frame: Captured image
:return: Speed
"""
# Disable speed detection
return 0
speed = frame[settings.IMAGE_SPEED_Y[0]:settings.IMAGE_SPEED_Y[1], settings.IMAGE_SPEED_X[0]:settings.IMAGE_SPEED_X[1]]
speed_gray = cv2.cvtColor(speed, cv2.COLOR_BGR2GRAY)
# Zoom
rows, cols = speed_gray.shape[:2]
M = np.float32([[2, 0, 0], [0, 2, 0]])
speed_zoom = cv2.warpAffine(speed_gray, M, (cols * 2, rows * 2))
_, speed_threshold = cv2.threshold(speed_zoom, 210, 255, cv2.THRESH_BINARY)
to_detect = speed_threshold[:, 26:]
#cv2.imshow('speed', to_detect)
to_detect = cv2.resize(to_detect, (20, 20))
to_detect = to_detect.reshape((1, 400))
to_detect = np.float32(to_detect)
_, results, _, _ = model.findNearest(to_detect, k=1)
return int((results[0][0]))
| BrunoTh/ETS2Autopilot | speed_detection.py | Python | mit | 1,220 |
# Copyright (C) 2010-2015 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import sys
import requests
import tempfile
import random
import json
from django.conf import settings
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from web import until
sys.path.append(settings.CUCKOO_PATH)
from lib.cuckoo.common.config import Config
from lib.cuckoo.common.utils import store_temp_file
from lib.cuckoo.common.quarantine import unquarantine
from lib.cuckoo.core.database import Database
import pprint
pp = pprint.PrettyPrinter()
import pymongo
results_db = pymongo.MongoClient(settings.MONGO_HOST, settings.MONGO_PORT)[settings.MONGO_DB]
def force_int(value):
try:
value = int(value)
except:
value = 0
finally:
return value
def index(request):
if request.method == "POST":
package = request.POST.get("package", "")
timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
options = request.POST.get("options", "")
priority = force_int(request.POST.get("priority"))
machine = request.POST.get("machine", "")
gateway = request.POST.get("gateway", None)
clock = request.POST.get("clock", None)
custom = request.POST.get("custom", "")
memory = bool(request.POST.get("memory", False))
enforce_timeout = bool(request.POST.get("enforce_timeout", False))
user_status = bool(request.POST.get("user_status", False))
tags = request.POST.get("tags", None)
if request.POST.get("free"):
if options:
options += ","
options += "free=yes"
if request.POST.get("nohuman"):
if options:
options += ","
options += "nohuman=yes"
if request.POST.get("tor"):
if options:
options += ","
options += "tor=yes"
if request.POST.get("process_memory"):
if options:
options += ","
options += "procmemdump=yes"
if request.POST.get("kernel_analysis"):
if options:
options += ","
options += "kernel_analysis=yes"
if gateway and gateway in settings.GATEWAYS:
if "," in settings.GATEWAYS[gateway]:
tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
ngateway = settings.GATEWAYS[tgateway]
else:
ngateway = settings.GATEWAYS[gateway]
if options:
options += ","
options += "setgw=%s" % (ngateway)
db = Database()
task_ids = []
task_machines = []
if machine.lower() == "all":
for entry in db.list_machines():
task_machines.append(entry.label)
else:
task_machines.append(machine)
if "sample" in request.FILES:
for sample in request.FILES.getlist("sample"):
if sample.size == 0:
return render_to_response("error.html",
{"error": "You uploaded an empty file."},
context_instance=RequestContext(request))
elif sample.size > settings.MAX_UPLOAD_SIZE:
return render_to_response("error.html",
{"error": "You uploaded a file that exceeds that maximum allowed upload size."},
context_instance=RequestContext(request))
# Moving sample from django temporary file to Cuckoo temporary storage to
# let it persist between reboot (if user like to configure it in that way).
path = store_temp_file(sample.read(),
sample.name)
for entry in task_machines:
task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority,
machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
task_ids.extend(task_ids_new)
elif "quarantine" in request.FILES:
for sample in request.FILES.getlist("quarantine"):
if sample.size == 0:
return render_to_response("error.html",
{"error": "You uploaded an empty quarantine file."},
context_instance=RequestContext(request))
elif sample.size > settings.MAX_UPLOAD_SIZE:
return render_to_response("error.html",
{"error": "You uploaded a quarantine file that exceeds that maximum allowed upload size."},
context_instance=RequestContext(request))
# Moving sample from django temporary file to Cuckoo temporary storage to
# let it persist between reboot (if user like to configure it in that way).
tmp_path = store_temp_file(sample.read(),
sample.name)
path = unquarantine(tmp_path)
try:
os.remove(tmp_path)
except:
pass
if not path:
return render_to_response("error.html",
{"error": "You uploaded an unsupported quarantine file."},
context_instance=RequestContext(request))
for entry in task_machines:
task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority,
machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
task_ids.extend(task_ids_new)
elif "url" in request.POST and request.POST.get("url").strip():
url = request.POST.get("url").strip()
if not url:
return render_to_response("error.html",
{"error": "You specified an invalid URL!"},
context_instance=RequestContext(request))
url = url.replace("hxxps://", "https://").replace("hxxp://", "http://").replace("[.]", ".")
for entry in task_machines:
task_id = db.add_url(url=url,
package=package,
timeout=timeout,
options=options,
priority=priority,
machine=entry,
custom=custom,
memory=memory,
enforce_timeout=enforce_timeout,
tags=tags,
clock=clock)
if task_id:
task_ids.append(task_id)
elif settings.VTDL_ENABLED and "vtdl" in request.POST:
vtdl = request.POST.get("vtdl").strip()
if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH:
return render_to_response("error.html",
{"error": "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"},
context_instance=RequestContext(request))
else:
base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',dir=settings.VTDL_PATH)
hashlist = []
if "," in vtdl:
hashlist=vtdl.split(",")
else:
hashlist.append(vtdl)
onesuccess = False
for h in hashlist:
filename = base_dir + "/" + h
if settings.VTDL_PRIV_KEY:
url = 'https://www.virustotal.com/vtapi/v2/file/download'
params = {'apikey': settings.VTDL_PRIV_KEY, 'hash': h}
else:
url = 'https://www.virustotal.com/intelligence/download/'
params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h}
try:
r = requests.get(url, params=params, verify=True)
except requests.exceptions.RequestException as e:
return render_to_response("error.html",
{"error": "Error completing connection to VirusTotal: {0}".format(e)},
context_instance=RequestContext(request))
if r.status_code == 200:
try:
f = open(filename, 'wb')
f.write(r.content)
f.close()
except:
return render_to_response("error.html",
{"error": "Error writing VirusTotal download file to temporary path"},
context_instance=RequestContext(request))
onesuccess = True
for entry in task_machines:
task_ids_new = db.demux_sample_and_add_to_db(file_path=filename, package=package, timeout=timeout, options=options, priority=priority,
machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
task_ids.extend(task_ids_new)
elif r.status_code == 403:
return render_to_response("error.html",
{"error": "API key provided is not a valid VirusTotal key or is not authorized for VirusTotal downloads"},
context_instance=RequestContext(request))
if not onesuccess:
return render_to_response("error.html",
{"error": "Provided hash not found on VirusTotal"},
context_instance=RequestContext(request))
tasks_count = len(task_ids)
if tasks_count > 0:
return render_to_response("submission/complete.html",
{"tasks" : task_ids,
"tasks_count" : tasks_count},
context_instance=RequestContext(request))
else:
return render_to_response("error.html",
{"error": "Error adding task to Cuckoo's database."},
context_instance=RequestContext(request))
else:
enabledconf = dict()
enabledconf["vt"] = settings.VTDL_ENABLED
enabledconf["kernel"] = settings.OPT_ZER0M0N
enabledconf["memory"] = Config("processing").memory.get("enabled")
enabledconf["procmemory"] = Config("processing").procmemory.get("enabled")
enabledconf["tor"] = Config("auxiliary").tor.get("enabled")
if Config("auxiliary").gateways:
enabledconf["gateways"] = True
else:
enabledconf["gateways"] = False
enabledconf["tags"] = False
# Get enabled machinery
machinery = Config("cuckoo").cuckoo.get("machinery")
# Get VM names for machinery config elements
vms = [x.strip() for x in getattr(Config(machinery), machinery).get("machines").split(",")]
# Check each VM config element for tags
for vmtag in vms:
if "tags" in getattr(Config(machinery), vmtag).keys():
enabledconf["tags"] = True
files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages"))
packages = []
for name in files:
name = os.path.splitext(name)[0]
if name == "__init__":
continue
packages.append(name)
# Prepare a list of VM names, description label based on tags.
machines = []
for machine in Database().list_machines():
tags = []
for tag in machine.tags:
tags.append(tag.name)
if tags:
label = machine.label + ": " + ", ".join(tags)
else:
label = machine.label
machines.append((machine.label, label))
# Prepend ALL/ANY options.
machines.insert(0, ("", "First available"))
machines.insert(1, ("all", "All"))
return render_to_response("submission/index.html",
{"packages": sorted(packages),
"machines": machines,
"gateways": settings.GATEWAYS,
"config": enabledconf},
context_instance=RequestContext(request))
def submit_url(request):
if request.method == "POST":
package = request.POST.get("package", "")
timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
options = request.POST.get("options", "")
priority = force_int(request.POST.get("priority"))
machine = request.POST.get("machine", "")
gateway = request.POST.get("gateway", None)
clock = request.POST.get("clock", None)
custom = request.POST.get("custom", "")
memory = bool(request.POST.get("memory", False))
enforce_timeout = bool(request.POST.get("enforce_timeout", False))
status = bool(request.POST.get("user_status", False))
if not status:
user_status=0
else:
user_status=1
if request.user.id==None:
user_id = 1
else:
user_id = request.user.id
tags = request.POST.get("tags", None)
if request.POST.get("free"):
if options:
options += ","
options += "free=yes"
if request.POST.get("nohuman"):
if options:
options += ","
options += "nohuman=yes"
if request.POST.get("tor"):
if options:
options += ","
options += "tor=yes"
if request.POST.get("process_memory"):
if options:
options += ","
options += "procmemdump=yes"
if request.POST.get("kernel_analysis"):
if options:
options += ","
options += "kernel_analysis=yes"
if gateway and gateway in settings.GATEWAYS:
if "," in settings.GATEWAYS[gateway]:
tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
ngateway = settings.GATEWAYS[tgateway]
else:
ngateway = settings.GATEWAYS[gateway]
if options:
options += ","
options += "setgw=%s" % (ngateway)
db = Database()
task_ids = []
task_machines = []
if machine.lower() == "all":
for entry in db.list_machines():
task_machines.append(entry.label)
else:
task_machines.append(machine)
if "url" in request.POST and request.POST.get("url").strip():
url = request.POST.get("url").strip()
if not url:
return render_to_response("error.html",
{"error": "You specified an invalid URL!"},
context_instance=RequestContext(request))
provious_analysis = results_db.analysis.find({"target.url": url}).sort([["_id", -1]])
task = []
for single in provious_analysis:
#pp.pprint(single)
single["info"]["base64"] = until.encrpt(single["info"]["id"])
single["info"]["url"] = single["target"]["url"]
pp.pprint(single["info"])
task.append(single["info"])
second_post = json.dumps({"url":url,"package":package,"timeout":timeout,"options":options,"priority":priority,"custom":custom,"memory":memory,"enforce_timeout":enforce_timeout,"tags":tags,"clock":clock,"user_status":user_status,"user_id":user_id},sort_keys=True)
if provious_analysis.count()>=1:
return render_to_response("submission/ShowSimilarUrl.html",
{"tasks" : task, "params" : second_post},
context_instance=RequestContext(request))
url = url.replace("hxxps://", "https://").replace("hxxp://", "http://").replace("[.]", ".")
for entry in task_machines:
task_id = db.add_url(url=url,
package=package,
timeout=timeout,
options=options,
priority=priority,
machine=entry,
custom=custom,
memory=memory,
enforce_timeout=enforce_timeout,
tags=tags,
clock=clock,
user_status=user_status,
user_id=user_id)
if task_id:
#pp.pprint(task_id)
task_ids.append(until.encrpt(task_id))
tasks_count = len(task_ids)
if tasks_count > 0:
return render_to_response("submission/complete.html",
{"tasks" : task_ids,
"tasks_count" : tasks_count},
context_instance=RequestContext(request))
else:
return render_to_response("error.html",
{"error": "Error adding task to Cuckoo's database."},
context_instance=RequestContext(request))
else:
enabledconf = dict()
enabledconf["vt"] = settings.VTDL_ENABLED
enabledconf["kernel"] = settings.OPT_ZER0M0N
enabledconf["memory"] = Config("processing").memory.get("enabled")
enabledconf["procmemory"] = Config("processing").procmemory.get("enabled")
enabledconf["tor"] = Config("auxiliary").tor.get("enabled")
if Config("auxiliary").gateways:
enabledconf["gateways"] = True
else:
enabledconf["gateways"] = False
enabledconf["tags"] = False
# Get enabled machinery
machinery = Config("cuckoo").cuckoo.get("machinery")
# Get VM names for machinery config elements
vms = [x.strip() for x in getattr(Config(machinery), machinery).get("machines").split(",")]
# Check each VM config element for tags
for vmtag in vms:
if "tags" in getattr(Config(machinery), vmtag).keys():
enabledconf["tags"] = True
files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages"))
packages = []
for name in files:
name = os.path.splitext(name)[0]
if name == "__init__":
continue
packages.append(name)
# Prepare a list of VM names, description label based on tags.
machines = []
for machine in Database().list_machines():
tags = []
for tag in machine.tags:
tags.append(tag.name)
if tags:
label = machine.label + ": " + ", ".join(tags)
else:
label = machine.label
machines.append((machine.label, label))
# Prepend ALL/ANY options.
machines.insert(0, ("", "First available"))
machines.insert(1, ("all", "All"))
return render_to_response("submission/submit_url.html",
{"packages": sorted(packages),
"machines": machines,
"gateways": settings.GATEWAYS,
"config": enabledconf},
context_instance=RequestContext(request))
def submit_file(request):
if request.method == "POST":
package = request.POST.get("package", "")
timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
options = request.POST.get("options", "")
priority = force_int(request.POST.get("priority"))
machine = request.POST.get("machine", "")
gateway = request.POST.get("gateway", None)
clock = request.POST.get("clock", None)
custom = request.POST.get("custom", "")
memory = bool(request.POST.get("memory", False))
enforce_timeout = bool(request.POST.get("enforce_timeout", False))
status = bool(request.POST.get("user_status", False))
if not status:
user_status=0
else:
user_status=1
if request.user.id==None:
user_id = 1
else:
user_id = request.user.id
tags = request.POST.get("tags", None)
if request.POST.get("free"):
if options:
options += ","
options += "free=yes"
if request.POST.get("nohuman"):
if options:
options += ","
options += "nohuman=yes"
if request.POST.get("tor"):
if options:
options += ","
options += "tor=yes"
if request.POST.get("process_memory"):
if options:
options += ","
options += "procmemdump=yes"
if request.POST.get("kernel_analysis"):
if options:
options += ","
options += "kernel_analysis=yes"
if gateway and gateway in settings.GATEWAYS:
if "," in settings.GATEWAYS[gateway]:
tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
ngateway = settings.GATEWAYS[tgateway]
else:
ngateway = settings.GATEWAYS[gateway]
if options:
options += ","
options += "setgw=%s" % (ngateway)
db = Database()
task_ids = []
task_machines = []
if machine.lower() == "all":
for entry in db.list_machines():
task_machines.append(entry.label)
else:
task_machines.append(machine)
if "sample" in request.FILES:
for sample in request.FILES.getlist("sample"):
if sample.size == 0:
return render_to_response("error.html",
{"error": "You uploaded an empty file."},
context_instance=RequestContext(request))
elif sample.size > settings.MAX_UPLOAD_SIZE:
return render_to_response("error.html",
{"error": "You uploaded a file that exceeds that maximum allowed upload size."},
context_instance=RequestContext(request))
# Moving sample from django temporary file to Cuckoo temporary storage to
# let it persist between reboot (if user like to configure it in that way).
path = store_temp_file(sample.read(),
sample.name)
pp.pprint("\nFile Path is %s\n" % path)
currentMD5 = until.getBigFileMD5(path)
provious_analysis = results_db.analysis.find({"target.file.md5": currentMD5}).sort([["_id", -1]])
task = []
for single in provious_analysis:
#pp.pprint(single)
single["info"]["base64"] = until.encrpt(single["info"]["id"])
single["info"]["filename"] = single["target"]["file"]["name"]
pp.pprint(single["info"])
task.append(single["info"])
second_post = json.dumps({"file_path":path,"package":package,"timeout":timeout,"options":options,"machine":machine,"priority":priority,"custom":custom,"memory":memory,"enforce_timeout":enforce_timeout,"tags":tags,"clock":clock,"user_status":user_status,"user_id":user_id}, sort_keys=True)
pp.pprint(second_post)
if provious_analysis.count()>=1:
return render_to_response("submission/ShowSimilar.html",
{"tasks" : task, "params" : second_post},
context_instance=RequestContext(request))
else:
#tempfilePath = request.POST.get("file_path", "")
for entry in task_machines:
task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority,
machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock, user_status=user_status, user_id=user_id)
pp.pprint(task_ids_new)
final_task_ids=[]
for taskId in task_ids_new:
final_task_ids.append(until.encrpt(taskId))
task_ids.extend(final_task_ids)
tasks_count = len(task_ids)
pp.pprint(task_ids)
if tasks_count > 0:
return render_to_response("submission/complete.html",
{"tasks" : task_ids,
"tasks_count" : tasks_count},
context_instance=RequestContext(request))
else:
return render_to_response("error.html",
{"error": "Error adding task to Cuckoo's database."},
context_instance=RequestContext(request))
else:
enabledconf = dict()
enabledconf["vt"] = settings.VTDL_ENABLED
enabledconf["kernel"] = settings.OPT_ZER0M0N
enabledconf["memory"] = Config("processing").memory.get("enabled")
enabledconf["procmemory"] = Config("processing").procmemory.get("enabled")
enabledconf["tor"] = Config("auxiliary").tor.get("enabled")
if Config("auxiliary").gateways:
enabledconf["gateways"] = True
else:
enabledconf["gateways"] = False
enabledconf["tags"] = False
# Get enabled machinery
machinery = Config("cuckoo").cuckoo.get("machinery")
# Get VM names for machinery config elements
vms = [x.strip() for x in getattr(Config(machinery), machinery).get("machines").split(",")]
# Check each VM config element for tags
for vmtag in vms:
if "tags" in getattr(Config(machinery), vmtag).keys():
enabledconf["tags"] = True
files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages"))
packages = []
for name in files:
name = os.path.splitext(name)[0]
if name == "__init__":
continue
packages.append(name)
# Prepare a list of VM names, description label based on tags.
machines = []
for machine in Database().list_machines():
tags = []
for tag in machine.tags:
tags.append(tag.name)
if tags:
label = machine.label + ": " + ", ".join(tags)
else:
label = machine.label
machines.append((machine.label, label))
# Prepend ALL/ANY options.
machines.insert(0, ("", "First available"))
machines.insert(1, ("all", "All"))
return render_to_response("submission/submit_file.html",
{"packages": sorted(packages),
"machines": machines,
"gateways": settings.GATEWAYS,
"config": enabledconf},
context_instance=RequestContext(request))
def ajax_submit_file(request):
if request.method == "POST":
package = request.POST.get("package", "")
timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
options = request.POST.get("options", "")
priority = force_int(request.POST.get("priority"))
machine = request.POST.get("machine", "")
gateway = request.POST.get("gateway", None)
clock = request.POST.get("clock", None)
custom = request.POST.get("custom", "")
memory = bool(request.POST.get("memory", False))
enforce_timeout = bool(request.POST.get("enforce_timeout", False))
status = request.POST.get("user_status", False)
print "AJAX SUBMIT FILE USER STATUS %s" % status
#if not status:
# user_status=0
#else:
# user_status=1
if request.user.id==None:
user_id = 1
else:
user_id = request.user.id
tags = request.POST.get("tags", None)
if request.POST.get("free"):
if options:
options += ","
options += "free=yes"
if request.POST.get("nohuman"):
if options:
options += ","
options += "nohuman=yes"
if request.POST.get("tor"):
if options:
options += ","
options += "tor=yes"
if request.POST.get("process_memory"):
if options:
options += ","
options += "procmemdump=yes"
if request.POST.get("kernel_analysis"):
if options:
options += ","
options += "kernel_analysis=yes"
if gateway and gateway in settings.GATEWAYS:
if "," in settings.GATEWAYS[gateway]:
tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
ngateway = settings.GATEWAYS[tgateway]
else:
ngateway = settings.GATEWAYS[gateway]
if options:
options += ","
options += "setgw=%s" % (ngateway)
db = Database()
task_ids = []
task_machines = []
if machine.lower() == "all":
for entry in db.list_machines():
task_machines.append(entry.label)
else:
task_machines.append(machine)
tempfilePath = request.POST.get("file_path", "")
print "AJAX SUBMIT FILE TAMP FILE PATH %s" % tempfilePath
if tempfilePath:
for entry in task_machines:
print "AJAX LIST MACHINE NAME %s" % entry
task_ids_new = db.demux_sample_and_add_to_db(file_path=tempfilePath, package=package, timeout=timeout, options=options, priority=priority,
machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock, user_status=status, user_id=user_id)
#pp.pprint(task_ids_new)
final_task_ids=[]
for taskId in task_ids_new:
final_task_ids.append(until.encrpt(taskId))
task_ids.extend(final_task_ids)
tasks_count = len(task_ids)
pp.pprint(task_ids)
# task_ids = ["YXNkZmRzZmFkc2YxMTVkc2Zhc2RmYXNkZg=="]
# tasks_count = 1
if tasks_count > 0:
return HttpResponse(json.dumps({"correct": "%s" % task_ids[0]}), content_type="application/json")
else:
return HttpResponse(json.dumps({"error": "Error adding task to Cuckoo's database."}), content_type="application/json")
else:
return HttpResponse(json.dumps({"error": "Error adding task to Cuckoo's database."}), content_type="application/json")
def ajax_submit_url(request):
if request.method == "POST":
package = request.POST.get("package", "")
timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
options = request.POST.get("options", "")
priority = force_int(request.POST.get("priority"))
machine = request.POST.get("machine", "")
gateway = request.POST.get("gateway", None)
clock = request.POST.get("clock", None)
custom = request.POST.get("custom", "")
memory = bool(request.POST.get("memory", False))
enforce_timeout = bool(request.POST.get("enforce_timeout", False))
status = bool(request.POST.get("user_status", False))
# if not status:
# user_status=0
# else:
# user_status=1
if request.user.id==None:
user_id = 1
else:
user_id = request.user.id
tags = request.POST.get("tags", None)
if request.POST.get("free"):
if options:
options += ","
options += "free=yes"
if request.POST.get("nohuman"):
if options:
options += ","
options += "nohuman=yes"
if request.POST.get("tor"):
if options:
options += ","
options += "tor=yes"
if request.POST.get("process_memory"):
if options:
options += ","
options += "procmemdump=yes"
if request.POST.get("kernel_analysis"):
if options:
options += ","
options += "kernel_analysis=yes"
if gateway and gateway in settings.GATEWAYS:
if "," in settings.GATEWAYS[gateway]:
tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
ngateway = settings.GATEWAYS[tgateway]
else:
ngateway = settings.GATEWAYS[gateway]
if options:
options += ","
options += "setgw=%s" % (ngateway)
db = Database()
task_ids = []
task_machines = []
if machine.lower() == "all":
for entry in db.list_machines():
task_machines.append(entry.label)
else:
task_machines.append(machine)
if "url" in request.POST and request.POST.get("url").strip():
url = request.POST.get("url").strip()
if not url:
return render_to_response("error.html",
{"error": "You specified an invalid URL!"},
context_instance=RequestContext(request))
url = url.replace("hxxps://", "https://").replace("hxxp://", "http://").replace("[.]", ".")
for entry in task_machines:
task_id = db.add_url(url=url,
package=package,
timeout=timeout,
options=options,
priority=priority,
machine=entry,
custom=custom,
memory=memory,
enforce_timeout=enforce_timeout,
tags=tags,
clock=clock,
user_status=user_status,
user_id=user_id)
if task_id:
#pp.pprint(task_id)
task_ids.append(until.encrpt(task_id))
tasks_count = len(task_ids)
if tasks_count > 0:
return HttpResponse(json.dumps({"correct": "%s" % task_ids[0]}), content_type="application/json")
else:
return HttpResponse(json.dumps({"error": "Error adding task to Cuckoo's database."}), content_type="application/json")
def status(request, task_id):
print task_id
task_id = until.decrpt(task_id)
print task_id
task = Database().view_task(task_id)
if not task:
return render_to_response("error.html",
{"error": "The specified task doesn't seem to exist."},
context_instance=RequestContext(request))
completed = False
if task.status == "reported":
completed = True
status = task.status
if status == "completed":
status = "processing"
return render_to_response("submission/status.html",
{"completed" : completed,
"status" : status,
"task_id" : until.encrpt(task_id)},
context_instance=RequestContext(request))
| lixiangning888/whole_project | web/submission/views.py | Python | lgpl-3.0 | 38,047 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
from bpy.props import IntProperty, EnumProperty
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import (match_short, match_long_cycle, updateNode,
match_long_repeat, match_cross2,
SvSetSocketAnyType, SvGetSocketAnyType)
#
# List Match Node by Linus Yng
#
# could be moved to util
class ListMatchNode(bpy.types.Node, SverchCustomTreeNode):
''' Stream Matching node '''
bl_idname = 'ListMatchNode'
bl_label = 'List Match'
bl_icon = 'OUTLINER_OB_EMPTY'
level = IntProperty(name='level', description='Choose level of data (see help)',
default=1, min=1,
update=updateNode)
modes = [("SHORT", "Short", "Shortest List", 1),
("CYCLE", "Cycle", "Longest List", 2),
("REPEAT", "Repeat", "Longest List", 3),
("XREF", "X-Ref", "Cross reference", 4)]
mode = EnumProperty(default='REPEAT', items=modes,
update=updateNode)
mode_final = EnumProperty(default='REPEAT', items=modes,
update=updateNode)
def sv_init(self, context):
self.inputs.new('StringsSocket', 'Data 0', 'Data 0')
self.inputs.new('StringsSocket', 'Data 1', 'Data 1')
self.outputs.new('StringsSocket', 'Data 0', 'Data 0')
self.outputs.new('StringsSocket', 'Data 1', 'Data 1')
def draw_buttons(self, context, layout):
layout.prop(self, "level", text="Level")
layout.label("Recurse/Final")
layout.prop(self, "mode", expand=True)
layout.prop(self, "mode_final", expand=True)
# recursive update of match function, now performs match function until depth
# works for short&long and simple scenarios. respect sub lists
# matches until the chosen level
# f2 is applied to the final level of matching,
# f1 is applied to every level until the final, where f2 is used.
def match(self, lsts, level, f1, f2):
level -= 1
if level:
tmp = [self.match(obj, level, f1, f2) for obj in zip(*f1(lsts))]
return list(map(list, zip(*tmp)))
elif type(lsts) == list:
return f2(lsts)
elif type(lsts) == tuple:
return tuple(f2(list(lsts)))
return None
def update(self):
# inputs
# these functions are in util.py
# socket handling
if self.inputs[-1].links:
name = 'Data '+str(len(self.inputs))
self.inputs.new('StringsSocket', name, name)
self.outputs.new('StringsSocket', name, name)
else:
while len(self.inputs) > 2 and not self.inputs[-2].links:
self.inputs.remove(self.inputs[-1])
self.outputs.remove(self.outputs[-1])
# check number of connections and type match input socket n with output socket n
count_inputs = 0
count_outputs = 0
for idx, socket in enumerate(self.inputs):
if socket.name in self.outputs and self.outputs[socket.name].links:
count_outputs += 1
if socket.links:
count_inputs += 1
if type(socket.links[0].from_socket) != type(self.outputs[socket.name]):
self.outputs.remove(self.outputs[socket.name])
self.outputs.new(socket.links[0].from_socket.bl_idname, socket.name, socket.name)
self.outputs.move(len(self.outputs)-1, idx)
def process(self):
# check inputs and that there is at least one output
func_dict = {
'SHORT': match_short,
'CYCLE': match_long_cycle,
'REPEAT': match_long_repeat,
'XREF': match_cross2
}
count_inputs = sum(s.is_linked for s in self.inputs)
count_outputs = sum(s.is_linked for s in self.outputs)
if count_inputs == len(self.inputs)-1 and count_outputs:
out = []
lsts = []
# get data
for socket in self.inputs:
if socket.is_linked:
lsts.append(SvGetSocketAnyType(self, socket))
out = self.match(lsts, self.level, func_dict[self.mode], func_dict[self.mode_final])
# output into linked sockets s
for i, socket in enumerate(self.outputs):
if i == len(out): # never write to last socket
break
if socket.is_linked:
SvSetSocketAnyType(self, socket.name, out[i])
def register():
bpy.utils.register_class(ListMatchNode)
def unregister():
bpy.utils.unregister_class(ListMatchNode)
| kilon/sverchok | nodes/list_basic/match.py | Python | gpl-3.0 | 5,511 |
# -*- coding: utf-8 -*-
"""
End-to-end tests for the LMS Instructor Dashboard.
"""
from ..helpers import UniqueCourseTest
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.instructor_dashboard import InstructorDashboardPage
from ...fixtures.course import CourseFixture
class AutoEnrollmentWithCSVTest(UniqueCourseTest):
"""
End-to-end tests for Auto-Registration and enrollment functionality via CSV file.
"""
def setUp(self):
super(AutoEnrollmentWithCSVTest, self).setUp()
self.course_fixture = CourseFixture(**self.course_info).install()
# login as an instructor
AutoAuthPage(self.browser, course_id=self.course_id, staff=True).visit()
# go to the membership page on the instructor dashboard
instructor_dashboard_page = InstructorDashboardPage(self.browser, self.course_id)
instructor_dashboard_page.visit()
self.auto_enroll_section = instructor_dashboard_page.select_membership().select_auto_enroll_section()
def test_browse_and_upload_buttons_are_visible(self):
"""
Scenario: On the Membership tab of the Instructor Dashboard, Auto-Enroll Browse and Upload buttons are visible.
Given that I am on the Membership tab on the Instructor Dashboard
Then I see the 'REGISTER/ENROLL STUDENTS' section on the page with the 'Browse' and 'Upload' buttons
"""
self.assertTrue(self.auto_enroll_section.is_file_attachment_browse_button_visible())
self.assertTrue(self.auto_enroll_section.is_upload_button_visible())
def test_clicking_file_upload_button_without_file_shows_error(self):
"""
Scenario: Clicking on the upload button without specifying a CSV file results in error.
Given that I am on the Membership tab on the Instructor Dashboard
When I click the Upload Button without specifying a CSV file
Then I should be shown an Error Notification
And The Notification message should read 'File is not attached.'
"""
self.auto_enroll_section.click_upload_file_button()
self.assertTrue(self.auto_enroll_section.is_notification_displayed(section_type=self.auto_enroll_section.NOTIFICATION_ERROR))
self.assertEqual(self.auto_enroll_section.first_notification_message(section_type=self.auto_enroll_section.NOTIFICATION_ERROR), "File is not attached.")
def test_uploading_correct_csv_file_results_in_success(self):
"""
Scenario: Uploading a CSV with correct data results in Success.
Given that I am on the Membership tab on the Instructor Dashboard
When I select a csv file with correct data and click the Upload Button
Then I should be shown a Success Notification.
"""
self.auto_enroll_section.upload_correct_csv_file()
self.assertTrue(self.auto_enroll_section.is_notification_displayed(section_type=self.auto_enroll_section.NOTIFICATION_SUCCESS))
def test_uploading_csv_file_with_bad_data_results_in_errors_and_warnings(self):
"""
Scenario: Uploading a CSV with incorrect data results in error and warnings.
Given that I am on the Membership tab on the Instructor Dashboard
When I select a csv file with incorrect data and click the Upload Button
Then I should be shown an Error Notification
And a corresponding Error Message.
And I should be shown a Warning Notification
And a corresponding Warning Message.
"""
self.auto_enroll_section.upload_csv_file_with_errors_warnings()
self.assertTrue(self.auto_enroll_section.is_notification_displayed(section_type=self.auto_enroll_section.NOTIFICATION_ERROR))
self.assertEqual(self.auto_enroll_section.first_notification_message(section_type=self.auto_enroll_section.NOTIFICATION_ERROR), "Data in row #2 must have exactly four columns: email, username, full name, and country")
self.assertTrue(self.auto_enroll_section.is_notification_displayed(section_type=self.auto_enroll_section.NOTIFICATION_WARNING))
self.assertEqual(self.auto_enroll_section.first_notification_message(section_type=self.auto_enroll_section.NOTIFICATION_WARNING), "ename ([email protected]): (An account with email [email protected] exists but the provided username ename is different. Enrolling anyway with [email protected].)")
def test_uploading_non_csv_file_results_in_error(self):
"""
Scenario: Uploading an image file for auto-enrollment results in error.
Given that I am on the Membership tab on the Instructor Dashboard
When I select an image file (a non-csv file) and click the Upload Button
Then I should be shown an Error Notification
And The Notification message should read 'Could not read uploaded file.'
"""
self.auto_enroll_section.upload_non_csv_file()
self.assertTrue(self.auto_enroll_section.is_notification_displayed(section_type=self.auto_enroll_section.NOTIFICATION_ERROR))
self.assertEqual(self.auto_enroll_section.first_notification_message(section_type=self.auto_enroll_section.NOTIFICATION_ERROR), "Could not read uploaded file.")
| OmarIthawi/edx-platform | common/test/acceptance/tests/lms/test_lms_instructor_dashboard.py | Python | agpl-3.0 | 5,227 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class Users(models.Model):
_inherit = 'res.users'
karma = fields.Integer('Karma', default=0)
karma_tracking_ids = fields.One2many('gamification.karma.tracking', 'user_id', string='Karma Changes', groups="base.group_system")
badge_ids = fields.One2many('gamification.badge.user', 'user_id', string='Badges', copy=False)
gold_badge = fields.Integer('Gold badges count', compute="_get_user_badge_level")
silver_badge = fields.Integer('Silver badges count', compute="_get_user_badge_level")
bronze_badge = fields.Integer('Bronze badges count', compute="_get_user_badge_level")
rank_id = fields.Many2one('gamification.karma.rank', 'Rank', index=False)
next_rank_id = fields.Many2one('gamification.karma.rank', 'Next Rank', index=False)
@api.depends('badge_ids')
def _get_user_badge_level(self):
""" Return total badge per level of users
TDE CLEANME: shouldn't check type is forum ? """
for user in self:
user.gold_badge = 0
user.silver_badge = 0
user.bronze_badge = 0
self.env.cr.execute("""
SELECT bu.user_id, b.level, count(1)
FROM gamification_badge_user bu, gamification_badge b
WHERE bu.user_id IN %s
AND bu.badge_id = b.id
AND b.level IS NOT NULL
GROUP BY bu.user_id, b.level
ORDER BY bu.user_id;
""", [tuple(self.ids)])
for (user_id, level, count) in self.env.cr.fetchall():
# levels are gold, silver, bronze but fields have _badge postfix
self.browse(user_id)['{}_badge'.format(level)] = count
@api.model_create_multi
def create(self, values_list):
res = super(Users, self).create(values_list)
karma_trackings = []
for user in res:
if user.karma:
karma_trackings.append({'user_id': user.id, 'old_value': 0, 'new_value': user.karma})
if karma_trackings:
self.env['gamification.karma.tracking'].sudo().create(karma_trackings)
res._recompute_rank()
return res
def write(self, vals):
karma_trackings = []
if 'karma' in vals:
for user in self:
if user.karma != vals['karma']:
karma_trackings.append({'user_id': user.id, 'old_value': user.karma, 'new_value': vals['karma']})
result = super(Users, self).write(vals)
if karma_trackings:
self.env['gamification.karma.tracking'].sudo().create(karma_trackings)
if 'karma' in vals:
self._recompute_rank()
return result
def add_karma(self, karma):
for user in self:
user.karma += karma
return True
def _get_tracking_karma_gain_position(self, user_domain, from_date=None, to_date=None):
""" Get absolute position in term of gained karma for users. First a ranking
of all users is done given a user_domain; then the position of each user
belonging to the current record set is extracted.
Example: in website profile, search users with name containing Norbert. Their
positions should not be 1 to 4 (assuming 4 results), but their actual position
in the karma gain ranking (with example user_domain being karma > 1,
website published True).
:param user_domain: general domain (i.e. active, karma > 1, website, ...)
to compute the absolute position of the current record set
:param from_date: compute karma gained after this date (included) or from
beginning of time;
:param to_date: compute karma gained before this date (included) or until
end of time;
:return list: [{
'user_id': user_id (belonging to current record set),
'karma_gain_total': integer, karma gained in the given timeframe,
'karma_position': integer, ranking position
}, {..}] ordered by karma_position desc
"""
if not self:
return []
where_query = self.env['res.users']._where_calc(user_domain)
user_from_clause, user_where_clause, where_clause_params = where_query.get_sql()
params = []
if from_date:
date_from_condition = 'AND tracking.tracking_date::timestamp >= timestamp %s'
params.append(from_date)
if to_date:
date_to_condition = 'AND tracking.tracking_date::timestamp <= timestamp %s'
params.append(to_date)
params.append(tuple(self.ids))
query = """
SELECT final.user_id, final.karma_gain_total, final.karma_position
FROM (
SELECT intermediate.user_id, intermediate.karma_gain_total, row_number() OVER (ORDER BY intermediate.karma_gain_total DESC) AS karma_position
FROM (
SELECT "res_users".id as user_id, COALESCE(SUM("tracking".new_value - "tracking".old_value), 0) as karma_gain_total
FROM %(user_from_clause)s
LEFT JOIN "gamification_karma_tracking" as "tracking"
ON "res_users".id = "tracking".user_id AND "res_users"."active" = TRUE
WHERE %(user_where_clause)s %(date_from_condition)s %(date_to_condition)s
GROUP BY "res_users".id
ORDER BY karma_gain_total DESC
) intermediate
) final
WHERE final.user_id IN %%s""" % {
'user_from_clause': user_from_clause,
'user_where_clause': user_where_clause or (not from_date and not to_date and 'TRUE') or '',
'date_from_condition': date_from_condition if from_date else '',
'date_to_condition': date_to_condition if to_date else ''
}
self.env.cr.execute(query, tuple(where_clause_params + params))
return self.env.cr.dictfetchall()
def _get_karma_position(self, user_domain):
""" Get absolute position in term of total karma for users. First a ranking
of all users is done given a user_domain; then the position of each user
belonging to the current record set is extracted.
Example: in website profile, search users with name containing Norbert. Their
positions should not be 1 to 4 (assuming 4 results), but their actual position
in the total karma ranking (with example user_domain being karma > 1,
website published True).
:param user_domain: general domain (i.e. active, karma > 1, website, ...)
to compute the absolute position of the current record set
:return list: [{
'user_id': user_id (belonging to current record set),
'karma_position': integer, ranking position
}, {..}] ordered by karma_position desc
"""
if not self:
return {}
where_query = self.env['res.users']._where_calc(user_domain)
user_from_clause, user_where_clause, where_clause_params = where_query.get_sql()
# we search on every user in the DB to get the real positioning (not the one inside the subset)
# then, we filter to get only the subset.
query = """
SELECT sub.user_id, sub.karma_position
FROM (
SELECT "res_users"."id" as user_id, row_number() OVER (ORDER BY res_users.karma DESC) AS karma_position
FROM %(user_from_clause)s
WHERE %(user_where_clause)s
) sub
WHERE sub.user_id IN %%s""" % {
'user_from_clause': user_from_clause,
'user_where_clause': user_where_clause or 'TRUE',
}
self.env.cr.execute(query, tuple(where_clause_params + [tuple(self.ids)]))
return self.env.cr.dictfetchall()
def _rank_changed(self):
"""
Method that can be called on a batch of users with the same new rank
"""
template = self.env.ref('gamification.mail_template_data_new_rank_reached', raise_if_not_found=False)
if template:
for u in self:
if u.rank_id.karma_min > 0:
template.send_mail(u.id, force_send=False, email_layout_xmlid='mail.mail_notification_light')
def _recompute_rank(self):
"""
The caller should filter the users on karma > 0 before calling this method
to avoid looping on every single users
Compute rank of each user by user.
For each user, check the rank of this user
"""
ranks = [{'rank': rank, 'karma_min': rank.karma_min} for rank in
self.env['gamification.karma.rank'].search([], order="karma_min DESC")]
# 3 is the number of search/requests used by rank in _recompute_rank_bulk()
if len(self) > len(ranks) * 3:
self._recompute_rank_bulk()
return
for user in self:
old_rank = user.rank_id
if user.karma == 0 and ranks:
user.write({'next_rank_id': ranks[-1]['rank'].id})
else:
for i in range(0, len(ranks)):
if user.karma >= ranks[i]['karma_min']:
user.write({
'rank_id': ranks[i]['rank'].id,
'next_rank_id': ranks[i - 1]['rank'].id if 0 < i else False
})
break
if old_rank != user.rank_id:
user._rank_changed()
def _recompute_rank_bulk(self):
"""
Compute rank of each user by rank.
For each rank, check which users need to be ranked
"""
ranks = [{'rank': rank, 'karma_min': rank.karma_min} for rank in
self.env['gamification.karma.rank'].search([], order="karma_min DESC")]
users_todo = self
next_rank_id = False
# wtf, next_rank_id should be a related on rank_id.next_rank_id and life might get easier.
# And we only need to recompute next_rank_id on write with min_karma or in the create on rank model.
for r in ranks:
rank_id = r['rank'].id
dom = [
('karma', '>=', r['karma_min']),
('id', 'in', users_todo.ids),
'|', # noqa
'|', ('rank_id', '!=', rank_id), ('rank_id', '=', False),
'|', ('next_rank_id', '!=', next_rank_id), ('next_rank_id', '=', False if next_rank_id else -1),
]
users = self.env['res.users'].search(dom)
if users:
users_to_notify = self.env['res.users'].search([
('karma', '>=', r['karma_min']),
'|', ('rank_id', '!=', rank_id), ('rank_id', '=', False),
('id', 'in', users.ids),
])
users.write({
'rank_id': rank_id,
'next_rank_id': next_rank_id,
})
users_to_notify._rank_changed()
users_todo -= users
nothing_to_do_users = self.env['res.users'].search([
('karma', '>=', r['karma_min']),
'|', ('rank_id', '=', rank_id), ('next_rank_id', '=', next_rank_id),
('id', 'in', users_todo.ids),
])
users_todo -= nothing_to_do_users
next_rank_id = r['rank'].id
if ranks:
lower_rank = ranks[-1]['rank']
users = self.env['res.users'].search([
('karma', '>=', 0),
('karma', '<', lower_rank.karma_min),
'|', ('rank_id', '!=', False), ('next_rank_id', '!=', lower_rank.id),
('id', 'in', users_todo.ids),
])
if users:
users.write({
'rank_id': False,
'next_rank_id': lower_rank.id,
})
def _get_next_rank(self):
""" For fresh users with 0 karma that don't have a rank_id and next_rank_id yet
this method returns the first karma rank (by karma ascending). This acts as a
default value in related views.
TDE FIXME in post-12.4: make next_rank_id a non-stored computed field correctly computed """
if self.next_rank_id:
return self.next_rank_id
elif not self.rank_id:
return self.env['gamification.karma.rank'].search([], order="karma_min ASC", limit=1)
else:
return self.env['gamification.karma.rank']
def get_gamification_redirection_data(self):
"""
Hook for other modules to add redirect button(s) in new rank reached mail
Must return a list of dictionnary including url and label.
E.g. return [{'url': '/forum', label: 'Go to Forum'}]
"""
self.ensure_one()
return []
| jeremiahyan/odoo | addons/gamification/models/res_users.py | Python | gpl-3.0 | 12,740 |
# $Id$
#
# Copyright (C) 2004-2012 Greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
""" uses pymol to interact with molecules
"""
from rdkit import Chem
import os, tempfile, sys
# Python3 compatibility
try:
from xmlrpclib import Server
except ImportError:
from xmlrpc.client import Server
_server=None
class MolViewer(object):
def __init__(self,host=None,port=9123,force=0,**kwargs):
global _server
if not force and _server is not None:
self.server=_server
else:
if not host:
host=os.environ.get('PYMOL_RPCHOST','localhost')
_server=None
serv = Server('http://%s:%d'%(host,port))
serv.ping()
_server = serv
self.server=serv
self.InitializePyMol()
def InitializePyMol(self):
""" does some initializations to set up PyMol according to our
tastes
"""
self.server.do('set valence,1')
self.server.do('set stick_rad,0.15')
self.server.do('set mouse_selection_mode,0')
self.server.do('set line_width,2')
self.server.do('set selection_width,10')
self.server.do('set auto_zoom,0')
def DeleteAll(self):
" blows out everything in the viewer "
self.server.deleteAll()
def DeleteAllExcept(self,excludes):
" deletes everything except the items in the provided list of arguments "
allNames = self.server.getNames('*',False)
for nm in allNames:
if nm not in excludes:
self.server.deleteObject(nm)
def LoadFile(self,filename,name,showOnly=False):
""" calls pymol's "load" command on the given filename; the loaded object
is assigned the name "name"
"""
if showOnly:
self.DeleteAll()
id = self.server.loadFile(filename,name)
return id
def ShowMol(self,mol,name='molecule',showOnly=True,highlightFeatures=[],
molB="",confId=-1,zoom=True,forcePDB=False, showSticks=False):
""" special case for displaying a molecule or mol block """
server = self.server
if not zoom:
self.server.do('view rdinterface,store')
if showOnly:
self.DeleteAll()
if not forcePDB and mol.GetNumAtoms()<999 :
if not molB:
molB = Chem.MolToMolBlock(mol,confId=confId)
mid = server.loadMolBlock(molB,name)
else:
if not molB:
molB = Chem.MolToPDBBlock(mol,confId=confId)
mid = server.loadPDB(molB,name)
if highlightFeatures:
nm = name+'-features'
conf = mol.GetConformer(confId)
for feat in highlightFeatures:
pt = [0.0,0.0,0.0]
for idx in feat:
loc = conf.GetAtomPosition(idx)
pt[0] += loc[0]/len(feat)
pt[1] += loc[1]/len(feat)
pt[2] += loc[2]/len(feat)
server.sphere(pt,0.2,(1,1,1),nm)
if zoom:
server.zoom('visible')
else:
self.server.do('view rdinterface,recall')
if showSticks: # show molecule in stick view
self.server.do('show sticks, {}'.format(name))
return mid
def GetSelectedAtoms(self,whichSelection=None):
" returns the selected atoms "
if not whichSelection:
sels = self.server.getNames('selections')
if sels:
whichSelection = sels[-1]
else:
whichSelection=None
if whichSelection:
items = self.server.index(whichSelection)
else:
items = []
return items
def SelectAtoms(self,itemId,atomIndices,selName='selection'):
" selects a set of atoms "
ids = '(id '
ids += ','.join(['%d'%(x+1) for x in atomIndices])
ids += ')'
cmd = 'select %s,%s and %s'%(selName,ids,itemId)
self.server.do(cmd)
def HighlightAtoms(self,indices,where,extraHighlight=False):
" highlights a set of atoms "
if extraHighlight:
idxText = ','.join(['%s and (id %d)'%(where,x) for x in indices])
self.server.do('edit %s'%idxText)
else:
idxText = ' or '.join(['id %d'%x for x in indices])
self.server.do('select selection, %s and (%s)'%(where,idxText))
def SetDisplayStyle(self,obj,style=''):
" change the display style of the specified object "
self.server.do('hide everything,%s'%(obj,))
if style:
self.server.do('show %s,%s'%(style,obj))
def SelectProteinNeighborhood(self,aroundObj,inObj,distance=5.0,
name='neighborhood',showSurface=False):
""" selects the area of a protein around a specified object/selection name;
optionally adds a surface to that """
self.server.do('select %(name)s,byres (%(aroundObj)s around %(distance)f) and %(inObj)s'%locals())
if showSurface:
self.server.do('show surface,%s'%name)
self.server.do('disable %s'%name)
def AddPharmacophore(self,locs,colors,label,sphereRad=0.5):
" adds a set of spheres "
self.server.do('view rdinterface,store')
self.server.resetCGO(label)
for i,loc in enumerate(locs):
self.server.sphere(loc,sphereRad,colors[i],label,1)
self.server.do('enable %s'%label)
self.server.do('view rdinterface,recall')
def SetDisplayUpdate(self,val):
if not val:
self.server.do('set defer_update,1')
else:
self.server.do('set defer_update,0')
def GetAtomCoords(self,sels):
" returns the coordinates of the selected atoms "
res = {}
for label,idx in sels:
coords = self.server.getAtomCoords('(%s and id %d)'%(label,idx))
res[(label,idx)] = coords
return res
def HideAll(self):
self.server.do('disable all')
def HideObject(self,objName):
self.server.do('disable %s'%objName)
def DisplayObject(self,objName):
self.server.do('enable %s'%objName)
def Redraw(self):
self.server.do('refresh')
def Zoom(self,objName):
self.server.zoom(objName)
def DisplayHBonds(self,objName,molName,proteinName,
molSelText='(%(molName)s)',
proteinSelText='(%(proteinName)s and not het)'):
" toggles display of h bonds between the protein and a specified molecule "
cmd = "delete %(objName)s;\n"
cmd += "dist %(objName)s," + molSelText+","+proteinSelText+",mode=2;\n"
cmd += "enable %(objName)s;"
cmd = cmd%locals()
self.server.do(cmd)
def DisplayCollisions(self,objName,molName,proteinName,distCutoff=3.0,
color='red',
molSelText='(%(molName)s)',
proteinSelText='(%(proteinName)s and not het)'):
" toggles display of collisions between the protein and a specified molecule "
cmd = "delete %(objName)s;\n"
cmd += "dist %(objName)s," + molSelText+","+proteinSelText+",%(distCutoff)f,mode=0;\n"
cmd += """enable %(objName)s
color %(color)s, %(objName)s"""
cmd = cmd%locals()
self.server.do(cmd)
def SaveFile(self,filename):
# PyMol will interpret the path to be relative to where it was started
# from. Remedy that.
if not filename:
raise ValueError('empty filename')
filename = os.path.abspath(filename)
self.server.save(filename)
def GetPNG(self,h=None,w=None,preDelay=0):
try:
import Image
except ImportError:
from PIL import Image
import time
if preDelay>0:
time.sleep(preDelay)
fd = tempfile.NamedTemporaryFile(suffix='.png',delete=False)
fd.close()
self.server.do('png %s'%fd.name)
time.sleep(0.2) # <- wait a short period so that PyMol can finish
for i in range(10):
try:
img = Image.open(fd.name)
break
except IOError:
time.sleep(0.1)
try:
os.unlink(fd.name)
except (OSError,PermissionError):
# happens sometimes on Windows. Not going to worry about this too deeply since
# the files are in a temp dir anyway. This was github #936
pass
fd=None
if h is not None or w is not None:
sz = img.size
if h is None:
h=sz[1]
if w is None:
w=sz[0]
if h<sz[1]:
frac = float(h)/sz[1]
w *= frac
w = int(w)
img=img.resize((w,h),True)
elif w<sz[0]:
frac = float(w)/sz[0]
h *= frac
h = int(h)
img=img.resize((w,h),True)
return img
| adalke/rdkit | rdkit/Chem/PyMol.py | Python | bsd-3-clause | 8,287 |
"""
The mailer module contains the Mailer class, a simple way to send emails.
"""
from __future__ import absolute_import
# let people use: from mailer import Mailer
# (instead of: from mailer.mailer import Mailer)
# pylint: disable-msg=W0403
from .mailer import Mailer
# pylint: enable-msg=W0403
| invenia/mailer | mailer/__init__.py | Python | mit | 296 |
# Copyright 2014 Rackspace Inc.
#
# Author: Tim Simmons <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designate.backend.agent_backend import impl_fake
import designate.tests
from designate.tests.unit.agent import backends
class FakeAgentBackendTestCase(designate.tests.TestCase):
def setUp(self):
super(FakeAgentBackendTestCase, self).setUp()
self.CONF.set_override('listen', ['0.0.0.0:0'], 'service:agent')
self.backend = impl_fake.FakeBackend('foo')
def test_start_backend(self):
self.backend.start()
def test_stop_backend(self):
self.backend.stop()
def test_find_zone_serial(self):
self.backend.find_zone_serial('example.org.')
def test_create_zone(self):
zone = backends.create_dnspy_zone('example.org')
self.backend.create_zone(zone)
def test_update_zone(self):
zone = backends.create_dnspy_zone('example.org')
self.backend.update_zone(zone)
def test_delete_zone(self):
self.backend.delete_zone('example.org.')
| openstack/designate | designate/tests/unit/agent/backends/test_fake.py | Python | apache-2.0 | 1,576 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
n = int(input().strip())
ans = 0
count = 0
while n:
v = n % 10
n //= 10
if v == 4:
ans += 1 * 2 ** count
else:
ans += 2 * 2 ** count
count += 1
print(ans)
| pybae/etc | CodeForces/535B.py | Python | mit | 240 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from horizon.browsers.base import ResourceBrowser
from horizon.browsers.views import ResourceBrowserView
assert ResourceBrowser
assert ResourceBrowserView
| tuskar/tuskar-ui | horizon/browsers/__init__.py | Python | apache-2.0 | 806 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import sys
CONFIG_NAME = ".clang_complete"
def readConfiguration():
try:
f = open(CONFIG_NAME, "r")
except IOError:
return []
result = []
for line in f.readlines():
strippedLine = line.strip()
if strippedLine:
result.append(strippedLine)
f.close()
return result
def writeConfiguration(lines):
f = open(CONFIG_NAME, "w")
f.writelines(lines)
f.close()
def parseArguments(arguments):
nextIsInclude = False
nextIsDefine = False
nextIsIncludeFile = False
includes = []
defines = []
include_file = []
options = []
for arg in arguments:
if nextIsInclude:
includes += [arg]
nextIsInclude = False
elif nextIsDefine:
defines += [arg]
nextIsDefine = False
elif nextIsIncludeFile:
include_file += [arg]
nextIsIncludeFile = False
elif arg == "-I":
nextIsInclude = True
elif arg == "-D":
nextIsDefine = True
elif arg[:2] == "-I":
includes += [arg[2:]]
elif arg[:2] == "-D":
defines += [arg[2:]]
elif arg == "-include":
nextIsIncludeFile = True
elif arg.startswith('-std='):
options.append(arg)
elif arg == '-ansi':
options.append(arg)
elif arg.startswith('-pedantic'):
options.append(arg)
elif arg.startswith('-W'):
options.append(arg)
result = list(map(lambda x: "-I" + x, includes))
result.extend(map(lambda x: "-D" + x, defines))
result.extend(map(lambda x: "-include " + x, include_file))
result.extend(options)
return result
def mergeLists(base, new):
result = list(base)
for newLine in new:
if newLine not in result:
result.append(newLine)
return result
configuration = readConfiguration()
args = parseArguments(sys.argv)
result = mergeLists(configuration, args)
writeConfiguration(map(lambda x: x + "\n", result))
import subprocess
proc = subprocess.Popen(sys.argv[1:])
ret = proc.wait()
if ret is None:
sys.exit(1)
sys.exit(ret)
# vim: set ts=2 sts=2 sw=2 expandtab :
| t0tec/dotfiles | vim/vim.symlink/bin/cc_args.py | Python | mit | 2,045 |
#!/usr/bin/env python
# coding=utf-8
"""
__init__.py
"""
__author__ = 'Rnd495' | SakuraSa/TenhouLoggerX | UI/__init__.py | Python | mit | 80 |
# ---------------------------------------------------------------------
# ------------------------- Plight Rising -----------------------------
# -----------------------------txtsd-----------------------------------
# ---------------------------------------------------------------------
"""Handles the account, login, and connections"""
# Imports -------------------------------------------------------------
import time
import datetime
import pickle
import re
import os
import sys
import requests
from configobj import ConfigObj
from validate import Validator
# End Imports ---------------------------------------------------------
class FRAccount:
configspec = ConfigObj('config.spec', encoding='UTF8', list_values=False)
config = ConfigObj('config.ini', configspec=configspec)
val = Validator()
test = config.validate(val, preserve_errors=True)
domain = 'http://flightrising.com'
headers = {
'User-Agent': config['account']['useragent'],
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language': 'en-us,en;q=0.8',
'Accept-Encoding': 'gzip,deflate,sdch',
'DNT': '1' if config['account']['DNT'] else None
}
def __init__(self, un, pw, proxy=""):
self.un = un
self.pw = pw
self.proxy = proxy
self.referrer = None
self.result = None
self.ID = None
self.cookies = None
if os.path.isfile(self.un + '.bin'):
with open(self.un + '.bin', 'rb') as f:
self.session = pickle.load(f)
else:
self.session = requests.Session()
a = requests.adapters.HTTPAdapter(max_retries=0)
self.session.mount('http://', a)
self.session.headers = self.headers
if (self.proxy != ""):
self.session.proxies = {'http': 'http://' + self.proxy + '/'}
def get(self, url, param={}, referer='', head={}):
if url[0] == '/':
url = self.domain + url
if referer != '':
if referer[0] == '/':
referer = self.domain + referer
head['Referer'] = referer
self.result = self.session.get(url, params=param, headers=head, timeout=90)
return self.result
def post(self, url, data={}, param={}, referer='', head={}):
head['Origin'] = 'http://flightrising.com'
if url[0] == '/':
url = self.domain + url
if referer != '':
if referer[0] == '/':
referer = self.domain + referer
head['Referer'] = referer
self.result = self.session.post(url, params=param, data=data, headers=head, timeout=90)
return self.result
def login(self):
try:
self.result = self.session.get('http://www1.flightrising.com/', timeout=90)
if re.search(self.un, self.result.text):
self.result2 = self.session.get('http://flightrising.com/main.php',
params={
'p': 'hoard',
},
headers={
'Referer': 'http://www1.flightrising.com/'
},
timeout=90
)
if re.search(self.un, self.result2.text):
# self.ID = self.session.cookies['userid']
self.ID = re.search('clan-profile\/(\d+)">Clan Profile', self.result.text).group(1)
print(
'\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Already logged in!')
return True
# print('beforepoop')
token = re.search('"hidden" value="(.+?)"', self.result.text).group(1)
# print('afterpoop')
self.result = self.session.post('https://www1.flightrising.com/login/login',
headers={
# 'Referer': 'http://flightrising.com/main.php?p=coliseum',
'Referer': 'http://www1.flightrising.com/',
# 'Accept': '*/*',
# 'X-Requested-With': 'XMLHttpRequest',
'X-Request-Id': None,
# 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Origin': 'http://www1.flightrising.com',
'Cache-Control': 'max-age=0'
},
data={
'_token': token,
'uname': self.un,
'remember': '1',
'pword': self.pw,
# 'dologin': 'Login'
},
timeout=90
)
# self.result2 = self.session.get('http://flightrising.com/main.php?p=coliseum',
# headers={
# 'Referer': 'http://flightrising.com/main.php?p=coliseum'
# },
# timeout=90
# )
# print(self.result.url)
# if re.search('Logging in...', self.result.text):
if re.search('badpw=true', self.result.url):
print('\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad Password Error.')
return False
if re.search('maint=true', self.result.url):
print('\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Maintenance Error.')
return False
if re.search(self.un, self.result.text):
# self.ID = self.result.cookies['userid']
self.ID = re.search('clan-profile\/(\d+)">Clan Profile', self.result.text).group(1)
print('\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Logged in!')
if os.path.isfile(self.un + '.bin'):
os.remove(self.un + '.bin')
with open(self.un + '.bin', 'wb') as f:
pickle.dump(self.session, f, pickle.HIGHEST_PROTOCOL)
return True
else:
print(
'\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Authorization Error.')
return False
except Exception as e:
print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Network Error.')
print(type(e))
print(e.args)
print(e)
time.sleep(10)
def getID(self):
return self.ID
| txtsd/Plight-Rising | classes/FRAccount.py | Python | gpl-3.0 | 7,336 |
from django import template
from django.utils.encoding import force_unicode
register = template.Library()
@register.filter
def recommends_geodata(user, geodata):
return user.profile.recommends(geodata)
| linventifatelier/cartoterra | cartoterra/templatetags/recommends_geodata.py | Python | agpl-3.0 | 208 |
""" Unit Tests for Beam Block's core/beam_block_radar.py module. """
import pyart
from numpy.testing import assert_almost_equal
import beam_block
def test_beam_block():
""" Unit test for the beam_block_radar.beam_block function. """
radar = pyart.io.read(beam_block.testing.SAMPLE_RADAR_NC_FILE)
tif_file = beam_block.testing.SAMPLE_TIF_FILE
beam_width = 1.0
radar_bb_data = pyart.io.read(
beam_block.testing.SAMPLE_RADAR_BLOCK_DATA_FILE)
pbb_existing = radar_bb_data.fields['partial_beam_block']['data']
cbb_existing = radar_bb_data.fields['cumulative_beam_block']['data']
pbb_all, cbb_all = beam_block.core.beam_block(
radar, tif_file, beam_width)
assert_almost_equal(pbb_all, pbb_existing, 3)
assert_almost_equal(cbb_all, cbb_existing, 3)
def test_beam_block_flags():
""" Unit test for the beam_block_radar.beam_block_flags function. """
radar_bb_data = pyart.io.read(
beam_block.testing.SAMPLE_RADAR_BLOCK_DATA_FILE)
pbb_existing = radar_bb_data.fields['partial_beam_block']['data']
cbb_existing = radar_bb_data.fields['cumulative_beam_block']['data']
pbb_flags_existing = radar_bb_data.fields[
'partial_beam_block_flags']['data']
cbb_flags_existing = radar_bb_data.fields[
'cumulative_beam_block_flags']['data']
no_block_thresh = 0.01
complete_block_thresh = 0.95
pbb_flags, cbb_flags = beam_block.core.beam_block_flags(
pbb_existing, cbb_existing, no_block_thresh,
complete_block_thresh)
assert_almost_equal(pbb_flags, pbb_flags_existing, 3)
assert_almost_equal(cbb_flags, cbb_flags_existing, 3)
| zssherman/beam_block | beam_block/core/tests/test_beam_block_radar.py | Python | bsd-2-clause | 1,653 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from . import framework
from . import core
__all__ = ['L1Decay', 'L2Decay', 'L1DecayRegularizer', 'L2DecayRegularizer']
def append_regularization_ops(parameters_and_grads, regularization=None):
"""Create and add backward regularization Operators
Creates and adds backward regularization operators in the BlockDesc.
This will add gradients of the regularizer function to the gradients
of the parameters and return these modified gradients. This is the
same as implementing weight decay in optimizers for regularization.
Args:
parameters_and_grads: A list of (parameters, gradients) pairs
that need to be regularized.
regularization: A global regularizer. If the parameter is not
set. It will be applied with regularizer.
Returns:
list[(Variable, Variable)]: list of (parameters, gradients) \
pair with the regularized gradient
Raises:
Exception: Unknown regularization type
"""
params_and_grads = []
for param, grad in parameters_and_grads:
# If no gradient then we don't need to do anything
if grad is None:
params_and_grads.append((param, grad))
continue
with param.block.program._optimized_guard(
[param, grad]), framework.name_scope('regularization'):
regularization_term = None
if param.regularizer is not None:
# Add variable for regularization term in grad block
regularization_term = param.regularizer(param, grad, grad.block)
elif regularization is not None:
regularization_term = regularization(param, grad, grad.block)
# If no regularization specified, then we don't need to do anything
if regularization_term is None:
params_and_grads.append((param, grad))
continue
new_grad = grad
if grad.type == core.VarDesc.VarType.SELECTED_ROWS:
# FIXME(zcd): If the grad is SELECTED_ROWS, after regularization,
# the grad's type and name will be changed. But the gradient's name
# is used in ParallelExecutor Reduce mode, so I add a flag for
# the new_grad here.
new_grad = grad.block.create_var(
name=grad.name + core.kNewGradSuffix(),
dtype=param.dtype,
shape=param.shape,
lod_level=param.lod_level,
type=core.VarDesc.VarType.LOD_TENSOR)
grad.block.append_op(
type='sum',
inputs={"X": [grad, regularization_term]},
outputs={"Out": new_grad})
params_and_grads.append((param, new_grad))
return params_and_grads
class WeightDecayRegularizer(object):
"""Base class for weight decay regularizers
Defines the common interface of weight-decay regularizers.
Weight-decay regularizers are added only during the backward
pass for faster regularization. They add operations to the network
that correspond to gradient of the regularization function.
Users should not use this class directly, but need to use one
of its implementations
"""
def __init__(self):
pass
def __call__(self, param, grad, block):
"""Add corresponding weight decay operations to the network
"""
raise NotImplementedError()
def __str__(self):
"""Debug string
"""
raise NotImplementedError()
class L2DecayRegularizer(WeightDecayRegularizer):
"""
Implement the L2 Weight Decay Regularization, which helps to prevent the model over-fitting.
In the implementation, the formula of L2 Weight Decay Regularization is as follows:
.. math::
L2WeightDecay = reg\_coeff * parameter
Args:
regularization_coeff(float, optional): regularization coeff.
Default:0.0
Examples:
.. code-block:: python
import paddle.fluid as fluid
main_prog = fluid.Program()
startup_prog = fluid.Program()
with fluid.program_guard(main_prog, startup_prog):
data = fluid.layers.data(name='image', shape=[3, 28, 28], dtype='float32')
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
hidden = fluid.layers.fc(input=data, size=128, act='relu')
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
avg_loss = fluid.layers.mean(loss)
optimizer = fluid.optimizer.Adagrad(
learning_rate=1e-4,
regularization=fluid.regularizer.L2DecayRegularizer(
regularization_coeff=0.1))
optimizer.minimize(avg_loss)
"""
def __init__(self, regularization_coeff=0.0):
assert regularization_coeff is not None
super(L2DecayRegularizer, self).__init__()
self._regularization_coeff = regularization_coeff
def __call__(self, param, grad, block):
"""Add L2 weight decay ops to network
Adds L2 weight decay ops.
L2WeightDecay = reg_coeff * parameter
Args:
param: parameter variable for which regularization is applied
block: block in which variable is to be created
Returns:
new variable for weight decay
"""
assert isinstance(param, framework.Parameter)
assert isinstance(block, framework.Block)
if framework.in_dygraph_mode():
decay = block.create_var(dtype=param.dtype, shape=param.shape)
else:
decay = block.create_var(
dtype=param.dtype, shape=param.shape, lod_level=param.lod_level)
# Append Op to calculate decay
block.append_op(
type='scale',
inputs={"X": param},
outputs={"Out": decay},
attrs={"scale": self._regularization_coeff})
return decay
def __str__(self):
return "L2Decay, regularization_coeff=%f" % self._regularization_coeff
class L1DecayRegularizer(WeightDecayRegularizer):
"""
Implement the L1 Weight Decay Regularization, which encourages the weights to be sparse.
In the implementation, the formula of L1 Weight Decay Regularization is as follows:
.. math::
L1WeightDecay = reg\_coeff * sign(parameter)
Args:
regularization_coeff(float, optional): regularization coeff.
Default:0.0.
Examples:
.. code-block:: python
import paddle.fluid as fluid
main_prog = fluid.Program()
startup_prog = fluid.Program()
with fluid.program_guard(main_prog, startup_prog):
data = fluid.layers.data(name='image', shape=[3, 28, 28], dtype='float32')
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
hidden = fluid.layers.fc(input=data, size=128, act='relu')
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
avg_loss = fluid.layers.mean(loss)
optimizer = fluid.optimizer.Adagrad(
learning_rate=1e-4,
regularization=fluid.regularizer.L1DecayRegularizer(
regularization_coeff=0.1))
optimizer.minimize(avg_loss)
"""
def __init__(self, regularization_coeff=0.0):
assert regularization_coeff is not None
super(L1DecayRegularizer, self).__init__()
self._regularization_coeff = regularization_coeff
def __call__(self, param, grad, block):
"""Add L1 weight decay ops to network
Adds L1 weight decay ops.
L1WeightDecay = reg_coeff * sign(parameter)
Args:
param: parameter variable for which regularization is applied
block: block in which variable is to be created
Returns:
new variable for weight decay
"""
assert isinstance(param, framework.Parameter)
assert isinstance(block, framework.Block)
if framework.in_dygraph_mode():
decay = block.create_var(dtype=param.dtype, shape=param.shape)
else:
decay = block.create_var(
dtype=param.dtype, shape=param.shape, lod_level=param.lod_level)
# Append sign op
block.append_op(
type='sign', inputs={"X": param}, outputs={"Out": decay})
# Append scale op to the output of sign op
block.append_op(
type='scale',
inputs={"X": decay},
outputs={"Out": decay},
attrs={"scale": self._regularization_coeff})
return decay
def __str__(self):
return "L1Decay, regularization_coeff=%f" % self._regularization_coeff
# We short the class name, since users will use the regulaizer with the package
# name. The sample code:
#
# import paddle.fluid as fluid
#
# hidden = fluid.layers.fc(...,
# param_attr=fluid.regularizer.Xavier())
#
# It is no need to add a `Regularizer` as the class suffix
L1Decay = L1DecayRegularizer
L2Decay = L2DecayRegularizer
| chengduoZH/Paddle | python/paddle/fluid/regularizer.py | Python | apache-2.0 | 10,063 |
# -*- coding: Latin-1 -*-
"""Heap queue algorithm (a.k.a. priority queue).
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
Usage:
heap = [] # creates an empty heap
heappush(heap, item) # pushes a new item on the heap
item = heappop(heap) # pops the smallest item from the heap
item = heap[0] # smallest item on the heap without popping it
heapify(x) # transforms list into a heap, in-place, in linear time
item = heapreplace(heap, item) # pops and returns smallest item, and adds
# new item; the heap size is unchanged
Our API differs from textbook heap algorithms as follows:
- We use 0-based indexing. This makes the relationship between the
index for a node and the indexes for its children slightly less
obvious, but is more suitable since Python uses 0-based indexing.
- Our heappop() method returns the smallest item, not the largest.
These two make it possible to view the heap as a regular Python list
without surprises: heap[0] is the smallest item, and heap.sort()
maintains the heap invariant!
"""
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
__about__ = """Heap queues
[explanation by François Pinard]
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
The strange invariant above is meant to be an efficient memory
representation for a tournament. The numbers below are `k', not a[k]:
0
1 2
3 4 5 6
7 8 9 10 11 12 13 14
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
an usual binary tournament we see in sports, each cell is the winner
over the two cells it tops, and we can trace the winner down the tree
to see all opponents s/he had. However, in many computer applications
of such tournaments, we do not need to trace the history of a winner.
To be more memory efficient, when a winner is promoted, we try to
replace it by something else at a lower level, and the rule becomes
that a cell and the two cells it tops contain three different items,
but the top cell "wins" over the two topped cells.
If this heap invariant is protected at all time, index 0 is clearly
the overall winner. The simplest algorithmic way to remove it and
find the "next" winner is to move some loser (let's say cell 30 in the
diagram above) into the 0 position, and then percolate this new 0 down
the tree, exchanging values, until the invariant is re-established.
This is clearly logarithmic on the total number of items in the tree.
By iterating over all items, you get an O(n ln n) sort.
A nice feature of this sort is that you can efficiently insert new
items while the sort is going on, provided that the inserted items are
not "better" than the last 0'th element you extracted. This is
especially useful in simulation contexts, where the tree holds all
incoming events, and the "win" condition means the smallest scheduled
time. When an event schedule other events for execution, they are
scheduled into the future, so they can easily go into the heap. So, a
heap is a good structure for implementing schedulers (this is what I
used for my MIDI sequencer :-).
Various structures for implementing schedulers have been extensively
studied, and heaps are good for this, as they are reasonably speedy,
the speed is almost constant, and the worst case is not much different
than the average case. However, there are other representations which
are more efficient overall, yet the worst cases might be terrible.
Heaps are also very useful in big disk sorts. You most probably all
know that a big sort implies producing "runs" (which are pre-sorted
sequences, which size is usually related to the amount of CPU memory),
followed by a merging passes for these runs, which merging is often
very cleverly organised[1]. It is very important that the initial
sort produces the longest runs possible. Tournaments are a good way
to that. If, using all the memory available to hold a tournament, you
replace and percolate items that happen to fit the current run, you'll
produce runs which are twice the size of the memory for random input,
and much better for input fuzzily ordered.
Moreover, if you output the 0'th item on disk and get an input which
may not fit in the current tournament (because the value "wins" over
the last output value), it cannot fit in the heap, so the size of the
heap decreases. The freed memory could be cleverly reused immediately
for progressively building a second heap, which grows at exactly the
same rate the first heap is melting. When the first heap completely
vanishes, you switch heaps and start a new run. Clever and quite
effective!
In a word, heaps are useful memory structures to know. I use them in
a few applications, and I think it is good to keep a `heap' module
around. :-)
--------------------
[1] The disk balancing algorithms which are current, nowadays, are
more annoying than clever, and this is a consequence of the seeking
capabilities of the disks. On devices which cannot seek, like big
tape drives, the story was quite different, and one had to be very
clever to ensure (far in advance) that each tape movement will be the
most effective possible (that is, will best participate at
"progressing" the merge). Some tapes were even able to read
backwards, and this was also used to avoid the rewinding time.
Believe me, real good tape sorts were quite spectacular to watch!
From all times, sorting has always been a Great Art! :-)
"""
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'nlargest',
'nsmallest']
from itertools import islice, repeat, count, imap, izip, tee
from operator import itemgetter
import bisect
def heappush(heap, item):
"""Push item onto heap, maintaining the heap invariant."""
heap.append(item)
_siftdown(heap, 0, len(heap)-1)
def heappop(heap):
"""Pop the smallest item off the heap, maintaining the heap invariant."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup(heap, 0)
else:
returnitem = lastelt
return returnitem
def heapreplace(heap, item):
"""Pop and return the current smallest value, and add the new item.
This is more efficient than heappop() followed by heappush(), and can be
more appropriate when using a fixed-size heap. Note that the value
returned may be larger than item! That constrains reasonable uses of
this routine unless written as part of a conditional replacement:
if item > heap[0]:
item = heapreplace(heap, item)
"""
returnitem = heap[0] # raises appropriate IndexError if heap is empty
heap[0] = item
_siftup(heap, 0)
return returnitem
def heapify(x):
"""Transform list into a heap, in-place, in O(len(heap)) time."""
n = len(x)
# Transform bottom-up. The largest index there's any point to looking at
# is the largest with a child index in-range, so must have 2*i + 1 < n,
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
for i in reversed(xrange(n//2)):
_siftup(x, i)
def nlargest(n, iterable):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, reverse=True)[:n]
"""
it = iter(iterable)
result = list(islice(it, n))
if not result:
return result
heapify(result)
_heapreplace = heapreplace
sol = result[0] # sol --> smallest of the nlargest
for elem in it:
if elem <= sol:
continue
_heapreplace(result, elem)
sol = result[0]
result.sort(reverse=True)
return result
def nsmallest(n, iterable):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable)[:n]
"""
if hasattr(iterable, '__len__') and n * 10 <= len(iterable):
# For smaller values of n, the bisect method is faster than a minheap.
# It is also memory efficient, consuming only n elements of space.
it = iter(iterable)
result = sorted(islice(it, 0, n))
if not result:
return result
insort = bisect.insort
pop = result.pop
los = result[-1] # los --> Largest of the nsmallest
for elem in it:
if los <= elem:
continue
insort(result, elem)
pop()
los = result[-1]
return result
# An alternative approach manifests the whole iterable in memory but
# saves comparisons by heapifying all at once. Also, saves time
# over bisect.insort() which has O(n) data movement time for every
# insertion. Finding the n smallest of an m length iterable requires
# O(m) + O(n log m) comparisons.
h = list(iterable)
heapify(h)
return map(heappop, repeat(h, min(n, len(h))))
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
# is the index of a leaf with a possibly out-of-order value. Restore the
# heap invariant.
def _siftdown(heap, startpos, pos):
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if parent <= newitem:
break
heap[pos] = parent
pos = parentpos
heap[pos] = newitem
# The child indices of heap index pos are already heaps, and we want to make
# a heap at index pos too. We do this by bubbling the smaller child of
# pos up (and so on with that child's children, etc) until hitting a leaf,
# then using _siftdown to move the oddball originally at index pos into place.
#
# We *could* break out of the loop as soon as we find a pos where newitem <=
# both its children, but turns out that's not a good idea, and despite that
# many books write the algorithm that way. During a heap pop, the last array
# element is sifted in, and that tends to be large, so that comparing it
# against values starting from the root usually doesn't pay (= usually doesn't
# get us out of the loop early). See Knuth, Volume 3, where this is
# explained and quantified in an exercise.
#
# Cutting the # of comparisons is important, since these routines have no
# way to extract "the priority" from an array element, so that intelligence
# is likely to be hiding in custom __cmp__ methods, or in array elements
# storing (priority, record) tuples. Comparisons are thus potentially
# expensive.
#
# On random arrays of length 1000, making this change cut the number of
# comparisons made by heapify() a little, and those made by exhaustive
# heappop() a lot, in accord with theory. Here are typical results from 3
# runs (3 just to demonstrate how small the variance is):
#
# Compares needed by heapify Compares needed by 1000 heappops
# -------------------------- --------------------------------
# 1837 cut to 1663 14996 cut to 8680
# 1855 cut to 1659 14966 cut to 8678
# 1847 cut to 1660 15024 cut to 8703
#
# Building the heap by using heappush() 1000 times instead required
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
# you can use it.
#
# The total compares needed by list.sort() on the same lists were 8627,
# 8627, and 8632 (this should be compared to the sum of heapify() and
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
# for sorting.
def _siftup(heap, pos):
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the smaller child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of smaller child.
rightpos = childpos + 1
if rightpos < endpos and heap[rightpos] <= heap[childpos]:
childpos = rightpos
# Move the smaller child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown(heap, startpos, pos)
# If available, use C implementation
try:
from _heapq import heappush, heappop, heapify, heapreplace, nlargest, nsmallest
except ImportError:
pass
# Extend the implementations of nsmallest and nlargest to use a key= argument
_nsmallest = nsmallest
def nsmallest(n, iterable, key=None):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable, key=key)[:n]
"""
if key is None:
return _nsmallest(n, iterable)
in1, in2 = tee(iterable)
it = izip(imap(key, in1), count(), in2) # decorate
result = _nsmallest(n, it)
return map(itemgetter(2), result) # undecorate
_nlargest = nlargest
def nlargest(n, iterable, key=None):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
"""
if key is None:
return _nlargest(n, iterable)
in1, in2 = tee(iterable)
it = izip(imap(key, in1), count(), in2) # decorate
result = _nlargest(n, it)
return map(itemgetter(2), result) # undecorate
if __name__ == "__main__":
# Simple sanity test
heap = []
data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
for item in data:
heappush(heap, item)
sort = []
while heap:
sort.append(heappop(heap))
print sort
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.5/Lib/heapq.py | Python | mit | 14,339 |
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'badger').version_string()
| sbauza/badger | badger/__init__.py | Python | apache-2.0 | 662 |
#@PydevCodeAnalysisIgnore
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A binary to train CIFAR-10 using a single GPU.
Accuracy:
cifar10_train.py achieves ~86% accuracy after 100K steps (256 epochs of
data) as judged by cifar10_eval.py.
Speed: With batch_size 128.
System | Step Time (sec/batch) | Accuracy
------------------------------------------------------------------
1 Tesla K20m | 0.35-0.60 | ~86% at 60K steps (5 hours)
1 Tesla K40m | 0.25-0.35 | ~86% at 100K steps (4 hours)
Usage:
Please see the tutorial and website for how to download the CIFAR-10
data set, compile the program and train the model.
http://tensorflow.org/tutorials/deep_cnn/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import time
import tensorflow as tf
import cifar10
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('train_dir', '/tmp/cifar10_train',
"""Directory where to write event logs """
"""and checkpoint.""")
tf.app.flags.DEFINE_integer('max_steps', 1000000,
"""Number of batches to run.""")
tf.app.flags.DEFINE_boolean('log_device_placement', False,
"""Whether to log device placement.""")
tf.app.flags.DEFINE_integer('log_frequency', 10,
"""How often to log results to the console.""")
def train():
"""Train CIFAR-10 for a number of steps."""
with tf.Graph().as_default():
global_step = tf.contrib.framework.get_or_create_global_step()
# Get images and labels for CIFAR-10.
# Force input pipeline to CPU:0 to avoid operations sometimes ending up on
# GPU and resulting in a slow down.
with tf.device('/cpu:0'):
images, labels = cifar10.distorted_inputs()
# Build a Graph that computes the logits predictions from the
# inference model.
logits = cifar10.inference(images)
# Calculate loss.
loss = cifar10.loss(logits, labels)
# Build a Graph that trains the model with one batch of examples and
# updates the model parameters.
train_op = cifar10.train(loss, global_step)
class _LoggerHook(tf.train.SessionRunHook):
"""Logs loss and runtime."""
def begin(self):
self._step = -1
self._start_time = time.time()
def before_run(self, run_context):
self._step += 1
return tf.train.SessionRunArgs(loss) # Asks for loss value.
def after_run(self, run_context, run_values):
if self._step % FLAGS.log_frequency == 0:
current_time = time.time()
duration = current_time - self._start_time
self._start_time = current_time
loss_value = run_values.results
examples_per_sec = FLAGS.log_frequency * FLAGS.batch_size / duration
sec_per_batch = float(duration / FLAGS.log_frequency)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), self._step, loss_value,
examples_per_sec, sec_per_batch))
with tf.train.MonitoredTrainingSession(
checkpoint_dir=FLAGS.train_dir,
hooks=[tf.train.StopAtStepHook(last_step=FLAGS.max_steps),
tf.train.NanTensorHook(loss),
_LoggerHook()],
config=tf.ConfigProto(
log_device_placement=FLAGS.log_device_placement)) as mon_sess:
while not mon_sess.should_stop():
mon_sess.run(train_op)
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if tf.gfile.Exists(FLAGS.train_dir):
tf.gfile.DeleteRecursively(FLAGS.train_dir)
tf.gfile.MakeDirs(FLAGS.train_dir)
train()
if __name__ == '__main__':
tf.app.run()
| rossumai/keras-multi-gpu | keras_tf_multigpu/examples/avolkov1/cifar/tf_examples/cifar10_train.py | Python | mit | 4,529 |
# Copyright (C) 2014 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'Find information about mounts'
import shlex
import subprocess
from .mount_commands import mount_cmd, umount_cmd, findmnt_cmd
__all__ = ('search_fields', 'find_mounts')
search_fields = [
'SOURCE', # source device
'TARGET', # mountpoint
'FSTYPE', # filesystem type
'OPTIONS', # all mount options
'VFS-OPTIONS', # VFS specific mount options
'FS-OPTIONS', # FS specific mount options
'LABEL', # filesystem label
'UUID', # filesystem UUID
'PARTLABEL', # partition label
'PARTUUID', # partition UUID
'MAJ:MIN', # major:minor device number
'FSROOT', # filesystem root
'TID', # task ID
'ID', # mount ID
'OPT-FIELDS', # optional mount fields
'PROPAGATION', # VFS propagation flags
]
def find_mounts(root=None, tab_file=None, task=None, fields=None,
recurse=False, runcmd=findmnt_cmd):
argv = ['--pairs', '--nofsroot']
if task is not None:
argv.extend(('--task', str(task)))
if tab_file is not None:
argv.extend(('--tab-file', str(tab_file)))
if fields is not None:
argv.extend(('--output', ','.join(fields)))
if recurse:
if root is None:
raise ValueError('recurse passed without root')
argv.append('--submounts')
if root is not None:
argv.append(root)
o = runcmd(argv)
mount_list = []
for line in o.splitlines():
matches = dict()
for pair in shlex.split(line):
key, value = pair.split('=', 1)
matches[key] = value.decode('string_escape')
mount_list.append(matches)
return mount_list
| CodethinkLabs/online-atomic-update | migratelib/findmnt.py | Python | lgpl-2.1 | 2,345 |
import sys, csv, string
def generate_R_input(report_path, output_path):
confidence_values = []
report_file = open(report_path, 'r')
first_line = True
for line in report_file:
if first_line:
first_line = False
continue
line = line.strip().split(',')
if line[5] == 'Yes':
confidence_values.append(line[6])
elif line[5] == 'No':
confid = 1 - float(line[6])
confidence_values.append(confid)
output_file = open(output_path, 'w')
output_file.write('Confidence' + '\n')
first_val = True
for confid in confidence_values:
if not first_val:
output_file.write(str(confid) + '\n')
else:
output_file.write(str(confid) + '\n')
first_val = False
output_file.close()
if __name__ =="__main__":
_report_path = sys.argv[1]
_output_path = sys.argv[2]
generate_R_input(_report_path, _output_path)
| emvecchi/mss | src/utils/crowdflower/retrieve_stats.py | Python | apache-2.0 | 925 |
# Top settings file for development
from .base import *
COMPRESS_ENABLED = False
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = ['localhost', '0.0.0.0']
# Including a default secret key since this is just for development
SECRET_KEY = env_or_default('SECRET_KEY', u'dipps!+sq49#e2k#5^@4*^qn#8s83$kawqqxn&_-*xo7twru*8')
| njl/pycon | pycon/settings/dev.py | Python | bsd-3-clause | 325 |
import io, os, re, subprocess, sys
#@snip/WorkDir[
#@requires: mod:os
class WorkDir(object):
'''A context manager for changing to a different working directory. The
original working directory is restored upon exiting the context.'''
def __init__(self, path):
'''Initialize a context for changing the working directory.'''
self.path = path
def __enter__(self):
'''Change the working directory and return the path to the previous
working directory.'''
self.prevdir = os.getcwd()
os.chdir(self.path)
return self.prevdir
def __exit__(self, type, value, traceback):
'''Restore the the temporary directory.'''
os.chdir(self.prevdir)
#@]
#@snip/load_file[
#@requires: mod:io
def load_file(filename, binary=False, encoding=None,
errors=None, newline=None):
'''Read the contents of a file.'''
mode = "r" + ("b" if binary else "")
with io.open(filename, mode, encoding=encoding,
errors=errors, newline=newline) as stream:
return stream.read()
#@]
def pexec(args):
subprocess.check_call(["sh", "-c", 'exec "$@"', "-"] + list(args))
def escape_newlines(s):
return s.replace("\n", " \\\n")
def replace_ident(s, old, new):
return re.sub(r"\b{0}\b".format(re.escape(old)), lambda _: new, s)
def prepend_prefix(s,
prefix_macro="Prefix",
public_pattern="",
exclude_pattern="",
private_subprefix="priv"):
idents = get_global_idents(s)
for ident in idents:
if re.match("({0})$".format(exclude_pattern), ident):
continue
subprefix = private_subprefix
if re.match("({0})$".format(public_pattern), ident):
subprefix = ""
s = replace_ident(
s,
ident,
"cal_cat({0}, {1}_{2})".format(prefix_macro, subprefix, ident),
)
return s
def get_all_files(dir):
for dn, _, bns in os.walk(dir):
for bn in bns:
fn = os.path.join(dn, bn)
if os.path.exists(fn):
yield fn
class FileGenerator(object):
def __init__(self, filename):
self.filename = filename
self.module = {}
with open(filename) as f:
exec(compile(f.read(), filename, "exec"), self.module)
self.deps = self.module["__deps__"]
self.main = self.module["main"]
class Preprocessor(object):
def __init__(self, filename, stream=None):
self.filename = os.path.abspath(filename)
self.stream = stream
self.attributes = {
"deps": [],
}
def preprocess(self):
if self.stream:
write = self.stream.write
else:
write = lambda s: None
s = load_file(self.filename)
with WorkDir(os.path.dirname(self.filename)):
write(self._process_directives(s))
def bench(self):
self.attributes["bench"] = True
def public(self):
self.attributes["public"] = True
def depends(self, deps):
self.attributes["deps"].extend(deps)
def _process_directives_replacement(self, _m):
_body, = _m.groups()
if _body.lstrip().startswith("#"):
return _m.group(0)
stream, sys.stdout = sys.stdout, io.StringIO()
locals = {"self": self}
try:
exec(_body, {}, locals)
finally:
stream, sys.stdout = sys.stdout, stream
return stream.getvalue()
def _process_directives(self, s):
return re.sub(
r"(?s)/\*@(.*?)\*/", self._process_directives_replacement, s)
class TemplateMaker(object):
def __init__(self,
impl_filename,
prefix_macro="Prefix",
public_pattern="",
exclude_pattern="",
private_subprefix="priv",
params=[]):
self.impl_filename = impl_filename
self.prefix_macro = prefix_macro
self.public_pattern = public_pattern
self.exclude_pattern = exclude_pattern
self.private_subprefix = private_subprefix
self.params = params
self.identifiers_filename = "{0}_ids.txt".format(
os.path.splitext(impl_filename)[0])
self.deps = [self.identifiers_filename]
def main(self):
s = []
params = ["Prefix"] + list(self.params)
s.append("/*@self.public()*/\n")
for param in params:
param = param.split("=", 1)
if len(param) == 1:
param, = param
default = None
else:
param, default = param
s.append("#ifndef {0}\n".format(param))
if default is None:
s.append("#error Macro must be defined before "
"including this header: {0}\n".format(param))
else:
s.append("#define {0} {1}\n".format(param, default))
s.append("#endif\n")
idents = load_file(self.identifiers_filename).split()
for ident in idents:
if re.match("({0})$".format(self.exclude_pattern), ident):
continue
subprefix = self.private_subprefix
if re.match("({0})$".format(self.public_pattern), ident):
subprefix = ""
s.append("#define {0} {1}\n".format(
ident,
"cal_cat({0}, {1}_{2})"
.format(self.prefix_macro, subprefix, ident),
))
s.append('#include "{0}"\n'.format(self.impl_filename))
for ident in idents:
if re.match("({0})$".format(self.exclude_pattern), ident):
continue
s.append("#undef {0}\n".format(ident))
for param in params:
s.append("#undef {0}\n".format(param.split("=", 1)[0]))
s = "".join(s).rstrip()
sys.stdout.write(s)
sys.stdout.write("\n")
| Rufflewind/calico | tools/utils.py | Python | mit | 5,989 |
from subprocess import Popen, PIPE
import shlex
import httplib as hlib
import urllib as ulib
import xml.etree.ElementTree as ET
import astropy.io.votable as VT
import warnings
def queryVizier(fitsTableName,format='asu-fits',source='USNO-B1.0',pos='104.9566125,14.2341555',radius=2):
'''
documentation: http://cdsarc.u-strasbg.fr/doc/asu-summary.htx
'''
print 'loading Vizier fits table'
conn2 = hlib.HTTPConnection('vizier.u-strasbg.fr')
params2 = ulib.urlencode({'-source':source,'-c':pos,'-c.rm':radius,'-out':'_RA _DE'})
address = '/viz-bin/'+format
conn2.request('POST',address,params2)
resp2 = conn2.getresponse()
fitsTable = open(fitsTableName,'w')
fitsTable.write(resp2.read())
conn2.close()
fitsTable.close()
print 'done'
def queryFitsImage(fitsImage,voTableName,pos='104.9566125,14.2341555'):
'''
retrieve fits image from 2MASS http://irsa.ipac.caltech.edu/applications/2MASS/IM/docs/siahelp.html
note that fitsImage has to be in gz file in order to be extracted and opened correctly
'''
print 'loading 2MASS fits images'
conn1 = hlib.HTTPConnection('irsa.ipac.caltech.edu')
params1 = ulib.urlencode({'POS':pos,'INTERSECT':'ENCLOSED'})
conn1.request('POST','/cgi-bin/2MASS/IM/nph-im_sia',params1)
resp1 = conn1.getresponse()
xml = open(voTableName,'w')
xml.write(resp1.read())
conn1.close()
xml.close()
#parsing the votable and download the selected image
tree = ET.parse(voTableName)
root = tree.getroot()
#map all null values to 0 which fixes the error when open the votable with astropy
for name in root.iter('TD'):
if name.text == 'null':
name.text = 0
tree.write(voTableName)
#warning suppression when open votable
print 'downloading fits image...'
warnings.resetwarnings()
warnings.filterwarnings('ignore', category=Warning, append=True)
voTable = VT.parse(voTableName)
warnings.resetwarnings()
warnings.filterwarnings('always', category=Warning, append=True)
#raise warning if the image cannot be found
try:
table = voTable.get_first_table()
except:
raise ValueError('Cannot Locate the fits image!')
imageUrls = table.array['download']
print imageUrls
download = imageUrls[0]
print download
ulib.urlretrieve(download,fitsImage)
print 'done'
if __name__ == '__main__':
pos = 'arp147'
queryVizier('test.fits',pos=pos)
queryFitsImage('test_image.fits','test_vo.xml',pos=pos)
| bmazin/ARCONS-pipeline | astrometry/guide-centroid/catalog.py | Python | gpl-2.0 | 2,390 |
#!/usr/bin/python3
import sys
sys.path.append('/usr/lib/python3.5/site-packages')
import numpy as np
from pylamp_const import *
#from bokeh.plotting import figure, output_file, show, save
import vtk
#import gr
from scipy.interpolate import griddata
import os
import glob
###
# program to convert pylamp output (npz files) to plots or vtk files
#
# usage: python3 pylamp_post.py [SCREEN_TRAC_TEMP|VTKTRAC|VTKGRID] {required filenames ...}
#
# currently only the option "VTKTRAC" works
###
POSTTYPES = {
'SCREEN_TRAC_TEMP': 1,
'VTKTRAC': 2,
'VTKGRID': 3
}
if __name__ == "__main__":
if len(sys.argv) < 2:
raise Exception("Needs at least one argument: type")
posttype = POSTTYPES[sys.argv[1]]
if posttype == POSTTYPES['SCREEN_TRAC_TEMP']:
if len(sys.argv) < 4:
raise Exception("Needed arguments: type, tracsfile, gridfile")
tracsdatafile = sys.argv[3]
#griddatafile = sys.argv[2]
tracsdata = np.load(tracsdatafile)
#griddata = np.load(griddatafile)
tr_x = tracsdata["tr_x"]
tr_f = tracsdata["tr_f"]
#gridx = griddata["gridx"]
#gridz = griddata["gridz"]
stride = 1000
maxtmp = np.max(tr_f[::stride, TR_TMP])
mintmp = np.min(tr_f[::stride, TR_TMP])
gr.setviewport(0.1, 0.95, 0.1, 0.95)
gr.setwindow(np.min(tr_x[::stride, IX]), np.max(tr_x[::stride, IX]), np.min(tr_x[::stride, IZ]), np.max(tr_x[::stride, IZ]))
gr.setspace(np.min(tr_f[::stride, TR_TMP]), np.max(tr_f[::stride, TR_TMP]), 0, 90)
gr.setmarkersize(1)
gr.setmarkertype(gr.MARKERTYPE_SOLID_CIRCLE)
gr.setcharheight(0.024)
gr.settextalign(2, 0)
gr.settextfontprec(3, 0)
x = tr_x[::stride, IX]
y = tr_x[::stride, IZ]
z = tr_f[::stride, TR_TMP]
grid = np.mgrid[0:1:200j, 0:1:66j]
Z = griddata((x, y), z, (grid[0], grid[1]), method='cubic')
X = np.unique(grid[0].flatten())
Y = np.unique(grid[1].flatten())
#X, Y, Z = gr.gridit(x, y, z, 200, 200)
H = np.linspace(mintmp, maxtmp, 20)
gr.surface(X, Y, Z, 5)
gr.contour(X, Y, H, Z, 0)
gr.polymarker(x, y)
gr.axes(50e3, 50e3, 0, 0, 0, 0, 10e3)
gr.updatews()
elif posttype == POSTTYPES['VTKTRAC']:
if len(sys.argv) < 3:
raise Exception("Needed arguments: type, tracfile(s)")
trfields = [TR_TMP, TR_RHO, TR_ETA, TR_MRK, TR_MAT, TR__ID]
trfieldnames = ["temp", "dens", "visc", "mark", "mat", "id"]
if os.path.isfile(sys.argv[2]):
fileslist = [sys.argv[2]]
else:
fileslist = glob.glob(sys.argv[2])
fileslist.sort()
for tracsdatafile in fileslist:
if os.path.isfile(tracsdatafile + ".vtp"):
print("skip " + tracsdatafile)
continue
else:
print(tracsdatafile)
tracsdata = np.load(tracsdatafile)
tr_v_present = True
tr_x = tracsdata["tr_x"]
tr_f = tracsdata["tr_f"]
try:
tr_v = tracsdata["tr_v"]
except KeyError:
tr_v_present = False
N = tr_f[:, TR_TMP].shape[0]
stride = 1
vpoints = vtk.vtkPoints()
vvertices = vtk.vtkCellArray()
for i in range(N):
id = vpoints.InsertNextPoint(tr_x[i, IX], tr_x[i, IZ], 0*tr_x[i, IX])
vvertices.InsertNextCell(1)
vvertices.InsertCellPoint(id)
polydata = vtk.vtkPolyData()
polydata.SetPoints(vpoints)
polydata.SetVerts(vvertices)
for ifield in range(len(trfields)):
trac_array = vtk.vtkDoubleArray()
trac_array.SetNumberOfComponents(1)
trac_array.SetNumberOfTuples(N)
for i in range(N):
trac_array.SetValue(i, tr_f[i, trfields[ifield]])
trac_array.SetName(trfieldnames[ifield])
polydata.GetPointData().AddArray(trac_array)
polydata.Modified()
# special field, velocity
if tr_v_present:
trac_array = vtk.vtkDoubleArray()
trac_array.SetNumberOfComponents(3)
trac_array.SetNumberOfTuples(N)
for i in range(N):
trac_array.SetTuple3(i, tr_v[i, IX], tr_v[i, IZ], tr_v[i, IZ]*0.0)
trac_array.SetName("velo")
polydata.GetPointData().AddArray(trac_array)
polydata.Modified()
if vtk.VTK_MAJOR_VERSION <= 5:
polydata.Update()
trac_writer = vtk.vtkXMLPolyDataWriter()
trac_writer.SetDataModeToBinary()
trac_writer.SetCompressorTypeToZLib();
trac_writer.SetFileName(tracsdatafile + ".vtp")
trac_writer.SetCompressorTypeToZLib()
if vtk.VTK_MAJOR_VERSION <= 5:
trac_writer.SetInput(polydata)
else:
trac_writer.SetInputData(polydata)
trac_writer.Write()
elif posttype == POSTTYPES['VTKGRID']:
if len(sys.argv) < 3:
raise Exception("Needed arguments: type, gridfile(s)")
grfields = ["temp", "velz", "velx", "pres", "rho"]
grfieldnames = ["temp", "velz", "velx", "pres", "rho"]
if os.path.isfile(sys.argv[2]):
fileslist = [sys.argv[2]]
else:
fileslist = glob.glob(sys.argv[2])
fileslist.sort()
for griddatafile in fileslist:
if os.path.isfile(griddatafile + ".vtk"):
print("skip " + griddatafile)
continue
else:
print(griddatafile)
griddata = np.load(griddatafile)
grid = [[]] * 2
grid[IZ] = griddata["gridz"]
grid[IX] = griddata["gridx"]
N = np.prod(griddata[grfields[0]].shape)
Ng = len(grid[IZ]) * len(grid[IX])
assert N == Ng
stride = 1
# VTK coords xyz are coords xzy in PyLamp (vtk z = pylamp y = 0 always, 2D)
arrCoords = [vtk.vtkDoubleArray() for i in range(3)]
for i in grid[IX]:
arrCoords[IX].InsertNextValue(i)
for i in grid[IZ]:
arrCoords[IZ].InsertNextValue(i)
arrCoords[IY].InsertNextValue(0)
vtkgrid = vtk.vtkRectilinearGrid()
vtkgrid.SetDimensions(len(grid[IX]), len(grid[IZ]), 1)
vtkgrid.SetXCoordinates(arrCoords[IX])
vtkgrid.SetYCoordinates(arrCoords[IZ])
vtkgrid.SetZCoordinates(arrCoords[IY])
for ifield in range(len(grfields)):
try:
dummy = griddata[grfields[ifield]]
except:
# variable does not exist in output
continue
grid_array = vtk.vtkDoubleArray()
grid_array.SetNumberOfComponents(1)
grid_array.SetNumberOfTuples(N)
for i in range(len(grid[IZ])):
for j in range(len(grid[IX])):
grid_array.SetTuple1(i*len(grid[IX]) + j, griddata[grfields[ifield]][i, j])
grid_array.SetName(grfieldnames[ifield])
vtkgrid.GetPointData().AddArray(grid_array)
grid_writer = vtk.vtkRectilinearGridWriter()
grid_writer.SetFileName(griddatafile + ".vtk")
if vtk.VTK_MAJOR_VERSION <= 5:
grid_writer.SetInput(vtkgrid)
else:
grid_writer.SetInputData(vtkgrid)
grid_writer.Write()
else:
raise Exception("Undefined plot type")
| larskaislaniemi/PyLamp | pylamp_post.py | Python | bsd-2-clause | 7,910 |
import numpy as np
from scipy.interpolate import interp1d
from pandas import DataFrame, read_csv
from shapely.geometry import LineString, Polygon, LinearRing, Point
from warnings import warn
import matplotlib.pyplot as plt
try: # Python 3.x
izip = zip
except: # Python 2.7
from itertools import izip
def SVF_discretized(azimuth, horizon, aspect, dip, increment=2):
"""
az1 = np.array(range(0,360,10))
hor1 = az1 * 0 + 50
SVF_discretized(az1, hor1, 130, 35, plot=True)
"""
# rotate horizon coordinates
rt = rotate_horizon(azimuth, horizon, aspect, dip)
# for overhanging points, flip the azimuth
overhanging = test_overhang(rt[0], rt[1])
rt[0][overhanging] = (180 + rt[0][overhanging]) % 360
rt[1][overhanging] = 180 - rt[1][overhanging]
obs = test_obscured(rt[0], rt[1], increment)
xx = np.append(rt[0], obs)
yy = np.append(rt[1], (obs * 0 + 90))
yy = yy[np.argsort(xx)]
xx = xx[np.argsort(xx)]
# skyview:
# Create spline equation to obtain horizon(azimuth) for any azimuth
# add endpoints on either side of sequence so interpolation is good
xx = np.concatenate((xx[-2:] - 360, xx, xx[:2] + 360))
yy = np.concatenate((yy[-2:], yy, yy[:2]))
FF = interp1d(x=xx, y=yy)
F_sky = svf_helbig_2009(FF, increment)
return(F_sky)
def svf_helbig_2009(f, delta_phi=1):
"""Calculate sky view factor by integrating over all azimuths
Uses the discretized form of the continuum equation
for sky view factor (eq 9.3 from Helbig, 2009). The integral form of the
sky view equation is given in equation 4.41 of the same thesis.
Parameters
----------
f : function
function relating azimuth to horizon angle
delta_phi : int
discretized azimuth width in degrees
Returns
-------
float
Sky view factor between 0 and 1
Examples
--------
>>> delta_phi = 1
>>> f = lambda phi : 0 * phi
>>> svf_helbig_2009(f, delta_phi)
1.0
>>> f = lambda phi : 0 * phi + 30
>>> svf_helbig_2009(f, delta_phi)
0.75
"""
# Measure horizon at evenly spaced interval using spline
phi = np.array(range(0, 360, delta_phi))
theta_h = f(phi)
# Check: don't allow horizons > 90 degrees that are opposite each other
# This might not be a problem.
theta_h = np.array([90 if y > 90 and f((x + 180) % 360) > 90 else y
for (x, y) in izip(phi, theta_h)])
#don't allow negative horizon angles
theta_h = np.max(np.row_stack((theta_h, theta_h * 0)), axis=0)
# calculate cos2(theta)
cos2theta = np.power(np.cos(np.radians(theta_h)), 2)
# To deal with overhanging terrain, take negative cos2() if the horizon
# is greater than 90. This might be wrong... But otherwise overhanging
# terrain increases the skyview factor
S = [y if x <= 90 else -y for (x, y) in izip(theta_h, cos2theta)]
F_sky = (delta_phi / 360.) * np.sum(S)
F_sky = np.round(F_sky, decimals = 5)
return(F_sky)
def annulus(r_in, r_out):
""" Create an annulus
Parameters
----------
r_in, r_out : float
inner and outer radius of annulus
Returns
-------
Shapely Polygon object
Example
-------
>>> A = annulus(1,2)
>>> A.area
9.409645471637816
"""
C1 = Point(0,0).buffer(r_in)
C2 = Point(0,0).buffer(r_out)
return C2.difference(C1)
# Steyn -
def svf_steyn_1980(azimuth, horizon, n=36):
""" Calculate sky view factor using method of annuli
Parameters
----------
azimuth : array_like
Array of horizon angles in degrees
horizon : array_like
Array of horizon angles in degrees
Returns
-------
float
Sky view factor between 0 and 1
Examples
--------
>>> import numpy as np
>>> azimuth = np.arange(0, 360, 10)
>>> horizon = azimuth * 0
>>> svf_steyn_1980(azimuth, horizon, n=18)
1.00102
>>> svf_steyn_1980(azimuth, horizon, n=36)
1.00019
>>> svf_steyn_1980(azimuth, horizon, n=100)
1.0
"""
if not (horizon[0] == horizon[-1] and azimuth[0] == azimuth[-1]):
horizon = np.append(horizon, horizon[0])
azimuth = np.append(azimuth, azimuth[0])
sky_x, sky_y = project_horizon_to_equirectangular(azimuth, horizon)
# make sky polygon
P = Polygon(p for p in zip(sky_x, sky_y))
L = list()
for i in np.arange(1, n+1):
# Calculate sky proportion of each annulus
A = annulus((i-1)/n, (i)/n )
ti = A.area
pi = P.intersection(A).area
annular_svf = np.sin(np.pi * (2 * i-1) / (2 * n)) * (pi / ti)
L.append(annular_svf)
F_sky = sum(L) * np.pi/(2*n)
F_sky = np.round(F_sky, 5)
return F_sky
def rotation_matrix(axis, theta):
""" Create a rotation matrix
Return the rotation matrix associated with counterclockwise rotation about
a given axis by theta radians (Euler-Rodrigues formula).
Parameters
----------
axis : array_like
3-d vector specifying axis around which to rotate
theta : float
angle of rotation in radians
Returns
-------
array
3 x 3 rotation matrix
Examples
--------
>>> import numpy as np
>>> v = [[0,2],[0,0],[2,0]] #[[0, 0, 2], [2, 0, 0]] # data to be rotated
>>> axis = [0, 1, 0]
>>> theta = np.radians(90)
>>> np.dot(rotation_matrix(axis,theta), v)
array([[ 2., 0.],
[ 0., 0.],
[ 0., -2.]])
"""
axis = np.asarray(axis)
theta = np.asarray(theta)
axis = axis / np.sqrt(np.dot(axis, axis))
a = np.cos(theta / 2)
b, c, d = -axis * np.sin(theta / 2)
aa, bb, cc, dd = a*a, b*b, c*c, d*d
bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d
return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],
[2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],
[2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])
def sphr_to_carte(theta, phi, r):
""" Convert from spherical coordinates to cartesian coordinates
Parameters
----------
theta : array_like
angle from x axis in XY plane in radians
phi : array_like
angle from z axis (note that horizon = 90 - phi)
r : array_like
radius
Returns
-------
array
coordinates in cartesian space
Examples
--------
>>> sphr_to_carte([0,45,90], [0,30,45], [1,1,1])
array([[0.00000000e+00, 3.53553391e-01, 4.32978028e-17],
[0.00000000e+00, 3.53553391e-01, 7.07106781e-01],
[1.00000000e+00, 8.66025404e-01, 7.07106781e-01]])
"""
theta = np.asarray(theta)
phi = np.asarray(phi)
r = np.asarray(r)
theta = theta % 360
if not all((0 <= phi) * (phi < 180)):
raise ValueError("phi must be between 0 and 180 degrees")
x = r*np.sin(np.radians(phi))*np.cos(np.radians(theta))
y = r*np.sin(np.radians(phi))*np.sin(np.radians(theta))
z = r*np.cos(np.radians(phi))
coords = np.array((x,y,z))
return(coords)
def carte_to_sphr(x, y, z, degrees=True):
"""
Convert from cartesian coordinates to spherical coordinates
Parameters
----------
x,y,z : array_like
coordinates in cartesian space
degrees : whether output should be in degrees
Returns
-------
array
spherical coordinates (theta, phi, r)
"""
r = np.sqrt(np.power(x,2) + np.power(y,2) + np.power(z,2))
theta = np.arctan2(y, x) # this is fishy - variables in opposite order as expected (because you're looking up at the sky?)
#theta = np.arctan(y / x)
phi = np.arccos(z / r)
#phi = np.arctan(np.sqrt(np.power(x, 2) + np.power(y, 2)) / z)
#phi = np.arctan2(z, np.sqrt(np.power(x, 2) + np.power(y, 2)))
if degrees:
theta = np.degrees(theta)
theta = theta % 360
phi = np.degrees(phi)
coords = np.array((theta, phi, r))
return(coords)
def horiz_to_carte(azimuth, horizon):
"""
returns cartesian coordinates from a horizon angle and azimuth
q=horiz_to_carte([0,30], [10,10], [1,1])
"""
azimuth = np.asarray(azimuth)
horizon = np.asarray(horizon)
azimuth[horizon > 90] = azimuth[horizon > 90] + 180
horizon[horizon > 90] = 180 - horizon[horizon > 90]
azimuth = azimuth % 360
r = azimuth * 0 + 1 # assume unit radius
theta = 90 - azimuth
phi = 90 - horizon
coords = sphr_to_carte(theta, phi, r)
return(coords)
def carte_to_horiz(x, y, z):
"""
returns horizon angle and azimuth from cartesian coordinates
Parameters
----------
x,y,z :
coordinates in cartesian space
Returns
-------
array
coordinates as azimuth-horizon pairs
Examples
--------
>>> carte_to_horiz([0,30], [10,10], [1,1])
array([[ 0. , 71.56505118],
[ 5.71059314, 1.81124805]])
"""
x = np.asarray(x)
y = np.asarray(y)
z = np.asarray(z)
sph = carte_to_sphr(x, y, z)
azimuth = 90 - sph[0] #sph[0] = theta
azimuth = azimuth % 360
horizon = 90 - sph[1] # sph[1] = phi
coords = np.array((azimuth, horizon))
return(coords)
def rotate_towards(azimuth, rot_angle):
"""
returns cartesian axis of rotation for azimuthal dip direction
np.dot(rotmatrix, vector)
"""
phi = np.radians(azimuth)
x = -np.cos(phi)
y = np.sin(phi)
ax = np.array([x, y, 0])
# Calculate rotation matrix
rotmat = rotation_matrix(ax, np.radians(rot_angle))
return(rotmat)
def rotate_horizon(azimuth, horizon, aspect, dip):
"""
Calculates rotated horizon angles relative to a plane
Parameters
----------
azimuth : array_like
Array of horizon angles in degrees
horizon : array_like
Array of horizon angles in degrees
aspect : float
azimuth of plane
dip : float
inclination of plane in direction of aspect
Returns
-------
array
Examples
--------
>>> import numpy as np
>>> azimuth = np.array(range(0, 360, 45))
>>> horizon = (azimuth * 0) + 30
>>> rotate_horizon(azimuth, horizon, 135, 30)
array([[3.53123432e+02, 2.88978862e+01, 6.95972227e+01, 1.35000000e+02,
2.00402777e+02, 2.41102114e+02, 2.76876568e+02, 3.15000000e+02],
[7.28624519e+00, 2.56589063e+01, 4.76632205e+01, 6.00000000e+01,
4.76632205e+01, 2.56589063e+01, 7.28624519e+00, 1.42108547e-14]])
"""
# ensure inputs are arrays
azimuth = np.asarray(azimuth)
horizon = np.asarray(horizon)
# change to cartesian coordinates and rotate
cart_coords = horiz_to_carte(azimuth, horizon)
rot_matrix = rotate_towards(aspect, -dip)
rot = np.dot(rot_matrix, cart_coords)
# put back in spherical coordinates
coords = carte_to_horiz(rot[0], rot[1], rot[2])
# put negative horizons at 0 degrees (Assume self-shading)
coords[1] = [x if x>=0 else 0 for x in coords[1]]
return(coords)
def project_horizon_to_equirectangular(azimuth, horizon, r0=1, degrees=True):
""" Project azimuth and horizon onto x,y plane using equirectangular projection
Parameters
----------
azimuth: array_like
azimuthal direction
horizon: array_like
horizon angle (angle between point, origin and projection of point on x,y plane)
r0 : array_like
radius of points
degrees : boolean
Whether azimuth and horizon are input in degrees
Examples
--------
import numpy as np
>>> project_horizon_to_equirectangular(90, 0)
(1.0, 0.0)
>>> project_horizon_to_equirectangular(180, 45)
(0.0, -0.5)
>>> project_horizon_to_equirectangular(0, 90)
(0.0, 0.0)
"""
if degrees:
azimuth = np.radians(azimuth)
horizon = np.radians(horizon)
offset = np.pi / 2
r = (np.pi / 2 - horizon) * r0 / (np.pi/2)
x = np.round(np.cos(offset - azimuth) * r, 10)
y = np.round(np.sin(offset - azimuth) * r, 10)
return x,y
def project_horizon_top_down(azimuth, horizon, r0=1, degrees=True):
""" Project azimuth and horizon onto x,y plane
Parameters
----------
azimuth: array_like
azimuthal direction
horizon: array_like
horizon angle (angle between point, origin and projection of point on x,y plane)
r0 : array_like
radius of points
degrees : boolean
Whether azimuth and horizon are input in degrees
Examples
--------
>>> project_horizon_top_down(90, 0)
(1.0, 0.0)
>>> project_horizon_top_down(180, 45)
(0.0, -0.7071067812)
>>> project_horizon_top_down(0, 90)
(0.0, 0.0)
"""
if degrees:
azimuth = np.radians(azimuth)
horizon = np.radians(horizon)
offset = np.pi / 2
x = np.round(np.cos(horizon) * np.cos(offset - azimuth), 10)
y = np.round(np.cos(horizon) * np.sin(offset - azimuth) , 10)
return x,y
def add_sky_plot(figure, *args, **kwargs):
ax = figure.add_subplot(*args, **kwargs, projection='polar')
ax.set_theta_direction(1)
ax.set_theta_zero_location('N')
ax.yaxis.set_visible(False)
ax.set_ylim(0, 1)
return ax
def plot_rotated_points(azimuth, horizon, aspect, dip, ax):
rt = rotate_horizon(azimuth, horizon, aspect, dip)
x,y = project_horizon_to_equirectangular(rt[0], rt[1])
r = np.sqrt(x**2 + y**2)
azimuth = np.mod(np.arctan2(x,y), 2 * np.pi)
p = ax.plot(azimuth, r, 'r-')
return p
def test_overhang(theta, horiz):
"""
for a set of horizon points, detects which ones are overhanging and returns
a list of True/False for whether the point is overhanging or not
Args:
theta: array or list of azimuthal directions
horiz: array or list of horizon angles for each theta
Returns:
numpy array of logical values the same length as theta or horiz
"""
# ensure inputs are arrays
theta = np.asarray(theta)
horiz = np.asarray(horiz)
#project the horizon and azimuth onto the x-y plane
xp = [np.cos(np.radians(h)) * np.cos(np.radians(90 - t)) for (t, h) in zip(theta, horiz)]
yp = [np.cos(np.radians(h)) * np.sin(np.radians(90 - t)) for (t, h) in zip(theta, horiz)]
# Draw out the horizon line object (we will test for intersections later)
xy = np.array([[x,y] for (x,y) in zip(xp, yp)])
L = LinearRing(xy) # horizon line
# Make on object for the origin
O = Point([0,0])
# Test each point: does a ray extending from the origin through the point
# intersect the horizon line once? twice? If twice, is the point the nearest
# or the farthest intersection? This tells us whether or not its overhyng
ohang = []
for (x,y) in zip(xp, yp):
pt_dist = O.distance(Point([x,y])) # get length of segment
# make line in direction of point that has length 2 (so its sure to hit all other pts)
l = LineString([[0,0], [x*(2/pt_dist),y*(2/pt_dist)]])
# get intersection with horizon
pts = l.intersection(L)
if hasattr(pts, '__len__'): # for directions with more than one horizon value
if len(pts) > 2:
warn("A single azimuth has 3 or more horizon intersections, This"+
"could be due to an overly complex horizon geometry and may lead"+
"to unexpected behaviour", RuntimeWarning)
# if there is another horizon line at a lower angle
intersect_distances = [O.distance(x) for x in pts] #(distance ~ 1/angle)
if not max([pt_dist] + intersect_distances) == pt_dist:
ohang.append(True)
else:
ohang.append(False)
# if only 1 intersection then not overhanging
else:
ohang.append(False)
return(np.array(ohang))
def test_obscured(theta, horiz, increment):
"""
for a set of horizon points, detect which azimuth directions are completely
overhung (i.e. x and (180 - x) both have 90 degree horizon angles)
Args:
theta: array or list of azimuthal directions
horiz: array or list of horizon angles for each theta
Returns:
numpy array of logical values
"""
# ensure inputs are arrays
theta = np.asarray(theta)
horiz = np.asarray(horiz)
#project the horizon and azimuth onto the x-y plane
xp, yp = project_horizon_top_down(theta, horiz)
# Draw out the horizon line (we will test for intersections later)
xy = np.array([[x,y] for (x,y) in zip(xp, yp)])
L = LinearRing(xy) # horizon line
obscured_points = []
# make test points halfway around the circle (
for angle in range(0,180, increment):
# make line across horizon
x_test = 2 * np.cos(np.radians(90 - angle))
y_test = 2 * np.sin(np.radians(90 - angle))
l = LineString([[x_test, y_test], [-x_test, -y_test]])
# get intersection with horizon
pts = l.intersection(L)
if pts:
pass # intersects horizon at least once
else:
obscured_points.append(angle) # no intersection
obscured_points.append( (180 + angle) % 360)
return(np.array(obscured_points))
if __name__ == "__main__":
import doctest
doctest.testmod()
| geocryology/HorizonPy | horizonpy/skyview.py | Python | gpl-3.0 | 18,037 |
# -*- coding: utf-8 -*-
#
# Copyright 2012 James Thornton (http://jamesthornton.com)
# BSD License (see LICENSE for details)
#
"""
Bulbs supports pluggable clients. This is the Rexster client.
"""
from bulbs.config import Config, DEBUG
from bulbs.registry import Registry
from bulbs.utils import get_logger
# specific to this client
from bulbs.json import JSONTypeSystem
from bulbs.base import Client, Response, Result
from bulbs.rest import Request, RESPONSE_HANDLERS
from bulbs.groovy import GroovyScripts
from bulbs.utils import json, build_path, get_file_path, urlsplit, coerce_id
##### Titan
from bulbs.rexster.client import RexsterClient, \
RexsterResponse, RexsterResult
# The default URIs
TITAN_URI = "http://localhost:8182/graphs/graph"
# The logger defined in Config
log = get_logger(__name__)
# Rexster resource paths
# TODO: local path vars would be faster
vertex_path = "vertices"
edge_path = "edges"
index_path = "indices"
gremlin_path = "tp/gremlin"
transaction_path = "tp/batch/tx"
multi_get_path = "tp/batch"
key_index_path = "keyindices"
class TitanResult(RexsterResult):
"""
Container class for a single result, not a list of results.
:param result: The raw result.
:type result: dict
:param config: The client Config object.
:type config: Config
:ivar raw: The raw result.
:ivar data: The data in the result.
"""
pass
class TitanResponse(RexsterResponse):
"""
Container class for the server response.
:param response: httplib2 response: (headers, content).
:type response: tuple
:param config: Config object.
:type config: bulbs.config.Config
:ivar config: Config object.
:ivar headers: httplib2 response headers, see:
http://httplib2.googlecode.com/hg/doc/html/libhttplib2.html
:ivar content: A dict containing the HTTP response content.
:ivar results: A generator of RexsterResult objects, a single RexsterResult object,
or None, depending on the number of results returned.
:ivar total_size: The number of results returned.
:ivar raw: Raw HTTP response. Only set when log_level is DEBUG.
"""
result_class = TitanResult
class TitanRequest(Request):
"""Makes HTTP requests to Rexster and returns a RexsterResponse."""
response_class = TitanResponse
data_type = dict(string="String",
integer="Integer",
geoshape="Geoshape",)
class TitanClient(RexsterClient):
"""
Low-level client that sends a request to Titan and returns a response.
:param config: Optional Config object. Defaults to default Config.
:type config: bulbs.config.Config
:cvar default_uri: Default URI for the database.
:cvar request_class: Request class for the Client.
:ivar config: Config object.
:ivar registry: Registry object.
:ivar scripts: GroovyScripts object.
:ivar type_system: JSONTypeSystem object.
:ivar request: TitanRequest object.
Example:
>>> from bulbs.titan import TitanClient
>>> client = TitanClient()
>>> script = client.scripts.get("get_vertices")
>>> response = client.gremlin(script, params=None)
>>> result = response.results.next()
"""
#: Default URI for the database.
default_uri = TITAN_URI
request_class = TitanRequest
def __init__(self, config=None, db_name=None):
super(TitanClient, self).__init__(config, db_name)
# override so Rexster create_vertex() method doesn't try to index
self.config.autoindex = False
# GET
# these could replace the Rexster Gremlin version of these methods
def outV(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "out")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def inV(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "in")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def bothV(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "both")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def outV_count(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "outCount")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def inV_count(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "inCount")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def bothV_count(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "bothCount")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def outV_ids(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "outIds")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def inV_ids(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "inIds")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def bothV_ids(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "bothIds")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def outE(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "outE")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def inE(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "inE")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
def bothE(self, _id, label=None, start=None, limit=None, properties=None):
path = build_path(vertex_path, _id, "bothE")
params = build_params(_label=label, _limit=limit, _properties=properties)
return self.request.get(path, params)
# Key Indices
# Titan-Specific Index Methods
# https://github.com/thinkaurelius/titan/wiki/Indexing-Backend-Overview
# https://github.com/thinkaurelius/titan/wiki/Type-Definition-Overview
def create_edge_label(self, label):
# TODO: custom gremlin method
pass
def create_vertex_property_key():
# TODO: custom gremlin method
pass
def create_edge_property_key():
# TODO: custom gremlin method
pass
def create_vertex_key_index(self, key):
path = build_path(key_index_path, "vertex", key)
params = None
return self.request.post(path, params)
def create_edge_key_index(self, key):
path = build_path(key_index_path, "edge", key)
params = None
return self.request.post(path, params)
def get_vertex_keys(self):
path = build_path(key_index_path, "vertex")
params = None
return self.request.get(path, params)
def get_edge_keys(self):
path = build_path(key_index_path, "edge")
params = None
return self.request.get(path, params)
def get_all_keys(self):
path = key_index_path
params = None
return self.request.get(path, params)
# Index Proxy - General
def get_all_indices(self):
"""Returns a list of all the element indices."""
raise NotImplementedError
def get_index(self, name):
raise NotImplementedError
def delete_index(self, name):
raise NotImplementedError
# Index Proxy - Vertex
def create_vertex_index(self, index_name, *args, **kwds):
"""
Creates a vertex index with the specified params.
:param index_name: Name of the index to create.
:type index_name: str
:rtype: TitanResponse
"""
raise NotImplementedError
def get_vertex_index(self, index_name):
"""
Returns the vertex index with the index_name.
:param index_name: Name of the index.
:type index_name: str
:rtype: TitanResponse
"""
raise NotImplementedError
def get_or_create_vertex_index(self, index_name, index_params=None):
raise NotImplementedError
def delete_vertex_index(self, name):
"""
Deletes the vertex index with the index_name.
:param index_name: Name of the index.
:type index_name: str
:rtype: TitanResponse
"""
raise NotImplementedError
# Index Proxy - Edge
# Titan does NOT support edge indices
def create_edge_index(self, name, *args, **kwds):
raise NotImplementedError
def get_edge_index(self, name):
"""
Returns the edge index with the index_name.
:param index_name: Name of the index.
:type index_name: str
:rtype: TitanResponse
"""
raise NotImplementedError
def get_or_create_edge_index(self, index_name, index_params=None):
raise NotImplementedError
def delete_edge_index(self, name):
raise NotImplementedError
# Index Container - Vertex
def put_vertex(self, index_name, key, value, _id):
# Titan only supports automatic indices
raise NotImplementedError
def lookup_vertex(self, index_name, key, value):
"""
Returns the vertices indexed with the key and value.
:param index_name: Name of the index.
:type index_name: str
:param key: Name of the key.
:type key: str
:param value: Value of the key.
:type value: str
:rtype: TitanResponse
"""
# NOTE: this is different than Rexster's version
# it uses vertex_path instead of index_path, and
# index_name is N/A
# Keeping method interface the same for practical reasons so
# index_name will be ignored, any value will work.
path = build_path(vertex_path)
params = dict(key=key,value=value)
return self.request.get(path,params)
def query_vertex(self, index_name, params):
"""Queries for an vertex in the index and returns the Response."""
path = build_path(index_path,index_name)
return self.request.get(path,params)
def remove_vertex(self,index_name,_id,key=None,value=None):
# Titan only supports automatic indices
raise NotImplementedError
# Index Container - Edge
# Titan does NOT support edge indices
def put_edge(self, index_name, key, value, _id):
raise NotImplementedError
def lookup_edge(self, index_name, key, value):
"""
Looks up an edge in the index and returns the Response.
"""
# NOTE: this is different than Rexster's version
# it uses edge_path instead of index_path, and
# index_name is N/A
# Keeping method interface the same for practical reasons so
# index_name will be ignored, any value will work.
#path = build_path(edge_path)
#params = dict(key=key,value=value)
#return self.request.get(path,params)
raise NotImplementedError
def query_edge(self, index_name, params):
"""Queries for an edge in the index and returns the Response."""
raise NotImplementedError
def remove_edge(self, index_name, _id, key=None, value=None):
raise NotImplementedError
# Model Proxy - Vertex
def create_indexed_vertex(self, data, index_name, keys=None):
"""
Creates a vertex, indexes it, and returns the Response.
:param data: Property data.
:type data: dict
:param index_name: Name of the index.
:type index_name: str
:param keys: Property keys to index.
:type keys: list
:rtype: TitanResponse
"""
return self.create_vertex(data)
def update_indexed_vertex(self, _id, data, index_name, keys=None):
"""
Updates an indexed vertex and returns the Response.
:param index_name: Name of the index.
:type index_name: str
:param data: Property data.
:type data: dict
:param index_name: Name of the index.
:type index_name: str
:param keys: Property keys to index.
:type keys: list
:rtype: TitanResponse
"""
return self.update_vertex(_id, data)
# Model Proxy - Edge
def create_indexed_edge(self, outV, label, inV, data, index_name, keys=None):
"""
Creates a edge, indexes it, and returns the Response.
:param outV: Outgoing vertex ID.
:type outV: int
:param label: Edge label.
:type label: str
:param inV: Incoming vertex ID.
:type inV: int
:param data: Property data.
:type data: dict
:param index_name: Name of the index.
:type index_name: str
:param keys: Property keys to index. Defaults to None (indexes all properties).
:type keys: list
:rtype: TitanResponse
"""
return self.create_edge(outV, label, inV, data)
def update_indexed_edge(self, _id, data, index_name, keys=None):
"""
Updates an indexed edge and returns the Response.
:param _id: Edge ID.
:type _id: int
:param data: Property data.
:type data: dict
:param index_name: Name of the index.
:type index_name: str
:param keys: Property keys to index. Defaults to None (indexes all properties).
:type keys: list
:rtype: TitanResponse
"""
return self.update_edge(_id, data)
# Utils
def build_params(**kwds):
# Rexster isn't liking None param values
params = dict()
for key in kwds:
value = kwds[key]
if value is not None:
params[key] = value
return params
| mudbungie/NetExplorer | env/lib/python3.4/site-packages/bulbs/titan/client.py | Python | mit | 14,723 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerNetworkInterfacesOperations:
"""LoadBalancerNetworkInterfacesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets associated load balancer network interfaces.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/networkInterfaces'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_12_01/aio/operations/_load_balancer_network_interfaces_operations.py | Python | mit | 5,642 |
#reset.py
#contains url endpoints for setting and resetting user password
import webapp2
import re
import os
import my_db
from google.appengine.ext import db
import html
import validate_email
from webapp2_extras import security
import util
import my_email
IS_DEV = os.environ['SERVER_SOFTWARE'].count('Development') > 0
REDIRECT = "/static/reset-password.html"
hasNumbersAndLetters = re.compile('(?=.*[0-9])(?=.*[a-zA-Z])')
correctLength = re.compile('^\S{8,30}$')
#endPoint for /resetpw
class Reset(webapp2.RequestHandler):
def post(self):
password1 = self.request.get('password1').encode('utf-8')
password2 = self.request.get('password2').encode('utf-8')
key = self.request.get('key').encode('utf-8')
email = self.request.get('email').encode('utf-8')
if not hasNumbersAndLetters.match(password1):
self.response.write(html.password_reset(key, numbers_and_letters=True))
return
if not correctLength.match(password1):
self.response.write(html.password_reset(key, password_length=True))
return
if password1 != password2:
self.response.write(html.password_reset(key, passwords_different=True))
return
passwordReset = db.get(key)
if not passwordReset: #or passwordReset expired
self.redirect(REDIRECT + "?token_expired=true&email=" + email)
return
user = my_db.get(passwordReset.email)
if user.password:
user.previous_passwords.append(user.password)
user.password_resets += 1
user.password = security.generate_password_hash(password1)
user.login_attempts = 0
user.put()
passwordReset.delete()
self.response.write('Password reset.')
#endpoint for /submitreset
#the password resetting email links here
class SubmitReset(webapp2.RequestHandler):
def get(self):
key = self.request.get('key').encode('utf-8')
email = self.request.get('email').encode('utf-8')
self.response.write(html.password_reset(key=key, email=email))
#endpoint for /startreset
class StartReset(webapp2.RequestHandler):
def post(self):
email = self.request.get('email').encode('utf-8')
suffix = "&email=" + email
user = my_db.get(email)
if not user:
self.redirect(REDIRECT + "?email_problem=true&default=true" + suffix)
return
#add new one
passwordReset = my_db.new_reset(email)
passwordReset.put()
#send email
link = "https://sample-webstore.appspot.com/submitreset?key=" + str(passwordReset.key())\
+ "&email=" + email
my_email.reset_password_email(email, user.name, link)
self.redirect(REDIRECT + "?sent=true")
| whamtet/gae-webstore | reset.py | Python | epl-1.0 | 2,798 |
# Django settings for homepage project.
import django.conf.global_settings as DEFAULT_SETTINGS
import os
import re
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'homepage.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(os.path.split(__file__)[0], 'static') + '/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/media/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.split(__file__)[0], 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'j0ee6taz(y5dqbfp3)dh*+7as@pbs_w25%arj4-ds#j5%zf!nj'
TEMPLATE_CONTEXT_PROCESSORS = DEFAULT_SETTINGS.TEMPLATE_CONTEXT_PROCESSORS + (
'homepage.contacts.context_processors.django_settings',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'homepage.requests.middleware.RequestsStoreMiddleware',
)
ROOT_URLCONF = 'homepage.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
'homepage/templates'
)
# Urls which match pattern will be marked as prioritizied
PRIORITY_REQUESTS_PATT = re.compile(r'/contact.*')
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
'south',
'homepage.contacts',
'homepage.requests',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| crchemist/test42 | homepage/settings.py | Python | unlicense | 5,589 |
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six import text_type as unicode
from contextlib import contextmanager
import os.path
from robot.output.loggerhelper import LEVELS
from robot.utils import (get_link_path, html_attr_escape, html_escape,
html_format, is_string, is_unicode, timestamp_to_secs)
from .stringcache import StringCache
class JsBuildingContext(object):
def __init__(self, log_path=None, split_log=False, prune_input=False):
# log_path can be a custom object in unit tests
self._log_dir = os.path.dirname(log_path) \
if is_string(log_path) else None
self._split_log = split_log
self._prune_input = prune_input
self._strings = self._top_level_strings = StringCache()
self.basemillis = None
self.split_results = []
self.min_level = 'NONE'
self._msg_links = {}
def string(self, string, escape=True, attr=False):
if escape and string:
if not is_unicode(string):
string = unicode(string)
escaper = html_escape if not attr else html_attr_escape
string = escaper(string)
return self._strings.add(string)
def html(self, string):
return self.string(html_format(string), escape=False)
def relative_source(self, source):
rel_source = get_link_path(source, self._log_dir) \
if self._log_dir and source and os.path.exists(source) else ''
return self.string(rel_source)
def timestamp(self, time):
if not time:
return None
# Must use `long` due to http://ironpython.codeplex.com/workitem/31549
millis = int(round(timestamp_to_secs(time) * 1000))
if self.basemillis is None:
self.basemillis = millis
return millis - self.basemillis
def message_level(self, level):
if LEVELS[level] < LEVELS[self.min_level]:
self.min_level = level
def create_link_target(self, msg):
id = self._top_level_strings.add(msg.parent.id)
self._msg_links[self._link_key(msg)] = id
def link(self, msg):
return self._msg_links.get(self._link_key(msg))
def _link_key(self, msg):
return (msg.message, msg.level, msg.timestamp)
@property
def strings(self):
return self._strings.dump()
def start_splitting_if_needed(self, split=False):
if self._split_log and split:
self._strings = StringCache()
return True
return False
def end_splitting(self, model):
self.split_results.append((model, self.strings))
self._strings = self._top_level_strings
return len(self.split_results)
@contextmanager
def prune_input(self, *items):
yield
if self._prune_input:
for item in items:
item.clear()
| userzimmermann/robotframework | src/robot/reporting/jsbuildingcontext.py | Python | apache-2.0 | 3,444 |
from django.utils.translation import ugettext_lazy as _
import horizon
class LogPanels(horizon.PanelGroup):
slug = "logpanels"
name = _("LogPanels")
panels = ('overview','detail')
class LogDashboard(horizon.Dashboard):
name = _("LogDashboard")
slug = "logdashboard"
panels = (LogPanels,) # Add your panels here.
default_panel = 'overview' # Specify the slug of the dashboard's default panel.
permissions = ('openstack.roles.admin',)
horizon.register(LogDashboard)
| TechBK/horizon-dev | openstack_dashboard/dashboards/logdashboard/dashboard.py | Python | apache-2.0 | 503 |
from django.conf.urls import url, include
from django.views.generic import RedirectView
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from nba_py.constants import CURRENT_SEASON
urlpatterns = [
# Redirect to current season ranking view
url(r'^$',
RedirectView.as_view(pattern_name='ranking'),
{'season': CURRENT_SEASON},
name='index'),
# Main application part
url(r'', include('players.urls')),
# Django Admin
url(r'^django_admin/', include(admin.site.urls)),
]
# Serve static files on runserver
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL,
document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| pawelad/nba-rank | src/nba_rank/urls.py | Python | mit | 849 |
from __future__ import division
from collections import Counter
from itertools import chain, combinations
import json
from math import sqrt
import os
from random import randint, random, randrange, uniform
import tarfile
import time
import zipfile
_debug = False
def debug():
global _debug
return _debug
def value_or_default(value, default):
if value is None:
return default
else:
return value
def mode_specific_lists(general, train, validation, test, allow_none=False):
assert (general is None) or (train is None) or (test is None) or (set(general) == set(train) | set(test))
if general is None:
if test is None:
assert allow_none
general = None
train = None
validation = None
test = None
elif train is None:
assert allow_none
general = None
train = None
validation = None if validation is None else list(validation)
test = list(test)
else:
train = list(train)
validation = list(train) if validation is None else list(validation)
test = list(test)
general = list(set(train + validation + test))
elif train is not None:
general = list(general)
train = list(train)
validation = list(train) if validation is None else list(n for n in general if n not in train)
test = list(n for n in general if n not in train)
elif test is not None:
general = list(general)
train = list(n for n in general if n not in validation and n not in test)
validation = list(train) if validation is None else list(validation)
test = list(test)
else:
general = list(general)
train = list(general)
validation = list(general)
test = list(general)
return general, train, validation, test
def class_name(string):
return ''.join(part[0].upper() + part[1:] for part in string.split('_'))
def real_name(string):
return string[0].lower() + ''.join('_' + char.lower() if char.isupper() else char for char in string[1:])
def negative_response(response):
return response.lower() in ('n', 'no', 'c', 'cancel', 'a', 'abort')
def parse_int_with_factor(string):
assert string
if len(string) < 2:
return int(string)
elif string[-1] == 'k':
return int(string[:-1]) * 1000
elif string[-1] == 'M':
return int(string[:-1]) * 1000000
elif string[-2:] == 'Ki':
return int(string[:-2]) * 1024
elif string[-2:] == 'Mi':
return int(string[:-2]) * 1048576
else:
return int(string)
def parse_tuple(parse_item, unary_tuple=True, valid_sizes=None):
def parse(string):
if ',' in string:
if string[0] == '(' and string[-1] == ')':
assert len(string) > 2
xs = string[1:-1].split(',')
assert valid_sizes is None or len(xs) in valid_sizes
return tuple(parse_item(x) for x in xs)
else:
xs = string.split(',')
assert valid_sizes is None or len(xs) in valid_sizes
return tuple(parse_item(x) for x in xs)
elif unary_tuple:
return (parse_item(string),)
else:
return parse_item(string)
return parse
def parse_config(values):
assert len(values) % 2 == 0
config = dict()
for key, value in zip(values[::2], values[1::2]):
if key[0:2] == '--':
key = key[2:].replace('-', '_')
else:
key = key.replace('-', '_')
try:
config[key] = json.loads(value)
except json.decoder.JSONDecodeError:
config[key] = value
return config
def sentence2tokens(sentence):
sentence = sentence[0].lower() + sentence[1:]
return sentence.replace(', ', ' , ').replace('; ', ' ; ').replace('.', ' .').replace('?', ' ?').split()
def tokens2sentence(tokens):
sentence = ' '.join(tokens).replace(' , ', ', ').replace(' ; ', '; ').replace(' .', '.').replace(' ?', '?')
return sentence[0].upper() + sentence[1:]
def alternatives_type(value_type):
if len(value_type) > 5 and value_type[:13] == 'alternatives(' and value_type[-1] == ')':
return value_type[13:-1], True
else:
return value_type, False
def product(xs):
prod = 1
for x in xs:
prod *= x
return prod
def powerset(values, min_num=None, max_num=None):
values = list(values)
min_num = min_num or 0
max_num = max_num or len(values)
return chain.from_iterable(combinations(values, num) for num in range(min_num, max_num + 1))
def merge_dicts(dict1, dict2):
merged = dict1.copy()
merged.update(dict2)
return merged
def all_and_any(xs):
try:
if not next(xs):
return False
except StopIteration:
return False
return all(xs)
def any_or_none(xs):
try:
if next(xs):
return True
except StopIteration:
return True
return any(xs)
def any_not_all(xs):
try:
first = next(xs)
except StopIteration:
return False
if first:
return not all(xs)
else:
return any(xs)
# partial_order is dict: x -> set({>x})
def toposort(partial_order):
result = []
smaller = Counter(y for ys in partial_order.values() for y in ys)
smallest = [x for x in partial_order if not smaller[x]]
while smallest:
x = smallest.pop()
result.append(x)
for y in partial_order[x]:
smaller[y] -= 1
if not smaller[y]:
smallest.append(y)
assert not list(smaller.elements())
return result
def quadratic_uniform(a, b):
return sqrt(uniform(a * a, b * b))
def cumulative_distribution(values):
if isinstance(values, int):
assert values > 0
return [n / values for n in range(1, values + 1)]
elif all(isinstance(x, float) or isinstance(x, int) for x in values):
denominator = sum(values)
prob = 0.0
cdf = []
for x in values:
prob += max(x, 0.0) # negative values are zero
cdf.append(prob / denominator)
return cdf
else:
assert False
def sample(cumulative_distribution, items=None):
sample = random()
if items:
for item, prob in zip(items, cumulative_distribution):
if sample < prob:
return item
else:
for index, prob in enumerate(cumulative_distribution):
if sample < prob:
return index
# def sample_softmax(logits, temperature=1.0):
# probabilities = [exp(logit / temperature) for logit in logits]
# probabilities /= sum(probabilities)
# return sample(cumulative_distribution=cumulative_distribution(values=probabilities))
def choice(items, num_range, auxiliary=None):
items = list(items)
if isinstance(num_range, int):
num_items = num_range
else:
num_items = randint(*num_range)
if len(items) == num_items:
return items
elif len(items) < num_items:
chosen = items
auxiliary = list(auxiliary)
for _ in range(num_items - len(items)):
pick = randrange(len(auxiliary))
chosen.append(auxiliary.pop(pick))
return chosen
else:
chosen = list()
for _ in range(num_items):
pick = randrange(len(items))
chosen.append(items.pop(pick))
return chosen
class Archive(object):
def __init__(self, path, mode, archive=None):
assert mode == 'r' or mode == 'w'
assert archive in (None, 'zip', 'zip:none', 'zip:deflate', 'zip:bzip2', 'zip:lzma', 'tar', 'tar:none', 'tar:gzip', 'tar:bzip2', 'tar:lzma')
self.archive = path
self.mode = mode
if not os.path.isdir(self.archive[:self.archive.rindex('/')]):
os.mkdir(self.archive[:self.archive.rindex('/')])
try:
if not os.path.isdir('/tmp/shapeworld'):
os.makedirs('/tmp/shapeworld')
self.temp_directory = os.path.join('/tmp/shapeworld', 'temp-' + str(time.time()))
os.mkdir(self.temp_directory)
except PermissionError:
self.temp_directory = os.path.join(self.archive[:self.archive.rindex('/')], 'temp-' + str(time.time()))
os.mkdir(self.temp_directory)
if archive is None:
self.archive_type = None
if not os.path.isdir(self.archive):
os.mkdir(self.archive)
elif archive[:3] == 'zip':
self.archive_type = 'zip'
if len(archive) == 3:
compression = zipfile.ZIP_DEFLATED
elif archive[4:] == 'none':
compression = zipfile.ZIP_STORED
elif archive[4:] == 'deflate':
compression = zipfile.ZIP_DEFLATED
elif archive[4:] == 'bzip2':
compression = zipfile.ZIP_BZIP2
elif archive[4:] == 'lzma':
compression = zipfile.ZIP_LZMA
if not self.archive.endswith('.zip'):
self.archive += '.zip'
self.archive = zipfile.ZipFile(self.archive, mode, compression)
elif archive[:3] == 'tar':
self.archive_type = 'tar'
if len(archive) == 3:
mode += ':gz'
extension = '.gz'
elif archive[4:] == 'none':
extension = ''
elif archive[4:] == 'gzip':
mode += ':gz'
extension = '.gz'
elif archive[4:] == 'bzip2':
mode += ':bz2'
extension = '.bz2'
elif archive[4:] == 'lzma':
mode += ':xz'
extension = '.lzma'
if not self.archive.endswith('.tar' + extension):
self.archive += '.tar' + extension
self.archive = tarfile.open(self.archive, mode)
def close(self):
if self.archive_type is not None:
self.archive.close()
os.rmdir(self.temp_directory)
def __enter__(self):
if self.mode == 'r':
return self.read_file
else:
return self.write_file
def __exit__(self, type, value, traceback):
self.close()
return False
def read_file(self, filename, binary=False):
if self.archive_type is None:
filename = os.path.join(self.archive, filename)
if not os.path.isfile(filename):
return None
with open(filename, 'rb' if binary else 'r') as filehandle:
value = filehandle.read()
return value
elif self.archive_type == 'zip':
try:
fileinfo = self.archive.getinfo(filename)
except KeyError:
return None
value = self.archive.read(fileinfo)
if not binary:
value = value.decode()
return value
elif self.archive_type == 'tar':
try:
fileinfo = self.archive.getmember(filename)
except KeyError:
return None
self.archive.extract(member=fileinfo, path=self.temp_directory)
filepath = os.path.join(self.temp_directory, filename)
with open(filepath, 'rb' if binary else 'r') as filehandle:
value = filehandle.read()
os.remove(filepath)
return value
def write_file(self, filename, value, binary=False):
if self.archive_type is None:
filename = os.path.join(self.archive, filename)
with open(filename, 'wb' if binary else 'w') as filehandle:
filehandle.write(value)
elif self.archive_type == 'zip':
if binary:
filepath = os.path.join(self.temp_directory, filename)
with open(filepath, 'wb') as filehandle:
filehandle.write(value)
self.archive.write(filepath, filename)
os.remove(filepath)
else:
self.archive.writestr(filename, value)
elif self.archive_type == 'tar':
filepath = os.path.join(self.temp_directory, filename)
with open(filepath, 'wb' if binary else 'w') as filehandle:
filehandle.write(value)
self.archive.add(filepath, filename)
os.remove(filepath)
| AlexKuhnle/ShapeWorld | shapeworld/util.py | Python | mit | 12,503 |
# synergy -- mouse and keyboard sharing utility
# Copyright (C) 2012-2016 Symless Ltd.
# Copyright (C) 2009 Nick Bolton
#
# This package is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# found in the file LICENSE that should have accompanied this file.
#
# This package is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
| ommokazza/synergy | ext/toolchain/__init__.py | Python | gpl-2.0 | 704 |
#!/usr/bin/env python
import gtk
class FontSelection(gtk.Window):
def __init__(self):
gtk.Window.__init__(self)
self.set_title("FontSelection")
self.set_border_width(5)
self.connect("destroy", gtk.main_quit)
vbox = gtk.VBox(False, 5)
self.add(vbox)
self.fontselection = gtk.FontSelection()
vbox.pack_start(self.fontselection, True, True, 0)
buttonbox = gtk.HButtonBox()
buttonbox.set_layout(gtk.BUTTONBOX_END)
vbox.pack_end(buttonbox, False, False, 0)
button = gtk.Button("Select")
button.connect("clicked", self.on_button_clicked)
buttonbox.add(button)
def on_button_clicked(self, *args):
print "Font selected:", self.fontselection.get_font_name()
window = FontSelection()
window.show_all()
gtk.main()
| Programmica/pygtk-tutorial | examples/fontselection.py | Python | cc0-1.0 | 840 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from perf_tools import smoothness_metrics
from telemetry.core import util
from telemetry.page import page_measurement
class DidNotScrollException(page_measurement.MeasurementFailure):
def __init__(self):
super(DidNotScrollException, self).__init__('Page did not scroll')
class MissingDisplayFrameRate(page_measurement.MeasurementFailure):
def __init__(self, name):
super(MissingDisplayFrameRate, self).__init__(
'Missing display frame rate metrics: ' + name)
def DivideIfPossibleOrZero(numerator, denominator):
if denominator == 0:
return 0
return numerator / denominator
def CalcScrollResults(rendering_stats_deltas, results):
num_frames_sent_to_screen = rendering_stats_deltas['numFramesSentToScreen']
mean_frame_time_seconds = (
rendering_stats_deltas['totalTimeInSeconds'] /
float(num_frames_sent_to_screen))
dropped_percent = (
rendering_stats_deltas['droppedFrameCount'] /
float(num_frames_sent_to_screen))
num_impl_thread_scrolls = rendering_stats_deltas.get(
'numImplThreadScrolls', 0)
num_main_thread_scrolls = rendering_stats_deltas.get(
'numMainThreadScrolls', 0)
percent_impl_scrolled = DivideIfPossibleOrZero(
float(num_impl_thread_scrolls),
num_impl_thread_scrolls + num_main_thread_scrolls)
num_layers = (
rendering_stats_deltas.get('numLayersDrawn', 0) /
float(num_frames_sent_to_screen))
num_missing_tiles = (
rendering_stats_deltas.get('numMissingTiles', 0) /
float(num_frames_sent_to_screen))
results.Add('mean_frame_time', 'ms', round(mean_frame_time_seconds * 1000, 3))
results.Add('dropped_percent', '%', round(dropped_percent * 100, 1),
data_type='unimportant')
results.Add('percent_impl_scrolled', '%',
round(percent_impl_scrolled * 100, 1),
data_type='unimportant')
results.Add('average_num_layers_drawn', '', round(num_layers, 1),
data_type='unimportant')
results.Add('average_num_missing_tiles', '', round(num_missing_tiles, 1),
data_type='unimportant')
def CalcTextureUploadResults(rendering_stats_deltas, results):
if (('totalCommitCount' not in rendering_stats_deltas)
or rendering_stats_deltas['totalCommitCount'] == 0) :
averageCommitTimeMs = 0
else :
averageCommitTimeMs = (
1000 * rendering_stats_deltas['totalCommitTimeInSeconds'] /
rendering_stats_deltas['totalCommitCount'])
results.Add('texture_upload_count', 'count',
rendering_stats_deltas.get('textureUploadCount', 0))
results.Add('total_texture_upload_time', 'seconds',
rendering_stats_deltas.get('totalTextureUploadTimeInSeconds', 0))
results.Add('average_commit_time', 'ms', averageCommitTimeMs,
data_type='unimportant')
def CalcFirstPaintTimeResults(results, tab):
if tab.browser.is_content_shell:
results.Add('first_paint', 'ms', 'unsupported')
return
tab.ExecuteJavaScript("""
window.__rafFired = false;
window.webkitRequestAnimationFrame(function() {
window.__rafFired = true;
});
""")
util.WaitFor(lambda: tab.EvaluateJavaScript('window.__rafFired'), 60)
first_paint_secs = tab.EvaluateJavaScript(
'window.chrome.loadTimes().firstPaintTime - ' +
'window.chrome.loadTimes().startLoadTime')
results.Add('first_paint', 'ms', round(first_paint_secs * 1000, 1))
results.Add('UserAgent', '', tab.EvaluateJavaScript('navigator.userAgent'))
def CalcImageDecodingResults(rendering_stats_deltas, results):
totalDeferredImageDecodeCount = rendering_stats_deltas.get(
'totalDeferredImageDecodeCount', 0)
totalDeferredImageCacheHitCount = rendering_stats_deltas.get(
'totalDeferredImageCacheHitCount', 0)
totalImageGatheringCount = rendering_stats_deltas.get(
'totalImageGatheringCount', 0)
totalDeferredImageDecodeTimeInSeconds = rendering_stats_deltas.get(
'totalDeferredImageDecodeTimeInSeconds', 0)
totalImageGatheringTimeInSeconds = rendering_stats_deltas.get(
'totalImageGatheringTimeInSeconds', 0)
averageImageGatheringTime = DivideIfPossibleOrZero(
(totalImageGatheringTimeInSeconds * 1000), totalImageGatheringCount)
results.Add('total_deferred_image_decode_count', 'count',
totalDeferredImageDecodeCount,
data_type='unimportant')
results.Add('total_image_cache_hit_count', 'count',
totalDeferredImageCacheHitCount,
data_type='unimportant')
results.Add('average_image_gathering_time', 'ms', averageImageGatheringTime,
data_type='unimportant')
results.Add('total_deferred_image_decoding_time', 'seconds',
totalDeferredImageDecodeTimeInSeconds,
data_type='unimportant')
def CalcAnalysisResults(rendering_stats_deltas, results):
totalTilesAnalyzed = rendering_stats_deltas.get(
'totalTilesAnalyzed', 0)
solidColorTilesAnalyzed = rendering_stats_deltas.get(
'solidColorTilesAnalyzed', 0)
totalTileAnalysisTimeInSeconds = rendering_stats_deltas.get(
'totalTileAnalysisTimeInSeconds', 0)
averageAnalysisTimeMS = \
1000 * DivideIfPossibleOrZero(totalTileAnalysisTimeInSeconds,
totalTilesAnalyzed)
results.Add('total_tiles_analyzed', 'count',
totalTilesAnalyzed,
data_type='unimportant')
results.Add('solid_color_tiles_analyzed', 'count',
solidColorTilesAnalyzed,
data_type='unimportant')
results.Add('average_tile_analysis_time', 'ms',
averageAnalysisTimeMS,
data_type='unimportant')
def CalcLatency(rendering_stats_deltas, count_name, total_latency_name,
result_name, results):
eventCount = rendering_stats_deltas.get(count_name, 0)
totalLatencyInSeconds = rendering_stats_deltas.get(total_latency_name, 0)
averageLatency = DivideIfPossibleOrZero(
(totalLatencyInSeconds * 1000), eventCount)
results.Add(result_name, 'ms', averageLatency, data_type='unimportant')
def CalcLatencyResults(rendering_stats_deltas, results):
CalcLatency(rendering_stats_deltas, 'inputEventCount', 'totalInputLatency',
'average_latency', results)
CalcLatency(rendering_stats_deltas, 'touchUICount', 'totalTouchUILatency',
'average_touch_ui_latency', results)
CalcLatency(rendering_stats_deltas, 'touchAckedCount',
'totalTouchAckedLatency',
'average_touch_acked_latency',
results)
CalcLatency(rendering_stats_deltas, 'scrollUpdateCount',
'totalScrollUpdateLatency',
'average_scroll_update_latency', results)
class Smoothness(page_measurement.PageMeasurement):
def __init__(self):
super(Smoothness, self).__init__('smoothness')
self.force_enable_threaded_compositing = False
self.use_gpu_benchmarking_extension = True
self._metrics = None
def AddCommandLineOptions(self, parser):
parser.add_option('--report-all-results', dest='report_all_results',
action='store_true',
help='Reports all data collected, not just FPS')
def CustomizeBrowserOptions(self, options):
if self.use_gpu_benchmarking_extension:
options.extra_browser_args.append('--enable-gpu-benchmarking')
if self.force_enable_threaded_compositing:
options.extra_browser_args.append('--enable-threaded-compositing')
def CanRunForPage(self, page):
return hasattr(page, 'smoothness')
def WillRunAction(self, page, tab, action):
if tab.browser.platform.IsRawDisplayFrameRateSupported():
tab.browser.platform.StartRawDisplayFrameRateMeasurement()
self._metrics = smoothness_metrics.SmoothnessMetrics(tab)
if action.CanBeBound():
self._metrics.BindToAction(action)
else:
self._metrics.Start()
def DidRunAction(self, page, tab, action):
if tab.browser.platform.IsRawDisplayFrameRateSupported():
tab.browser.platform.StopRawDisplayFrameRateMeasurement()
if not action.CanBeBound():
self._metrics.Stop()
def MeasurePage(self, page, tab, results):
rendering_stats_deltas = self._metrics.deltas
if not (rendering_stats_deltas['numFramesSentToScreen'] > 0):
raise DidNotScrollException()
load_timings = tab.EvaluateJavaScript("window.performance.timing")
load_time_seconds = (
float(load_timings['loadEventStart']) -
load_timings['navigationStart']) / 1000
dom_content_loaded_time_seconds = (
float(load_timings['domContentLoadedEventStart']) -
load_timings['navigationStart']) / 1000
results.Add('load_time', 'seconds', load_time_seconds)
results.Add('dom_content_loaded_time', 'seconds',
dom_content_loaded_time_seconds)
CalcFirstPaintTimeResults(results, tab)
CalcScrollResults(rendering_stats_deltas, results)
CalcTextureUploadResults(rendering_stats_deltas, results)
CalcImageDecodingResults(rendering_stats_deltas, results)
CalcAnalysisResults(rendering_stats_deltas, results)
CalcLatencyResults(rendering_stats_deltas, results)
if self.options.report_all_results:
for k, v in rendering_stats_deltas.iteritems():
results.Add(k, '', v)
if tab.browser.platform.IsRawDisplayFrameRateSupported():
for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements():
if r.value is None:
raise MissingDisplayFrameRate(r.name)
results.Add(r.name, r.unit, r.value)
| topcoat/topcoat-grunt-telemetry | src/tools/perf/measurements/smoothness.py | Python | apache-2.0 | 9,662 |
# Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as oslo_logging
from cloudbaseinit.utils import classloader
opts = [
cfg.ListOpt(
'plugins',
default=[
'cloudbaseinit.plugins.common.mtu.MTUPlugin',
'cloudbaseinit.plugins.windows.ntpclient.NTPClientPlugin',
'cloudbaseinit.plugins.common.sethostname.SetHostNamePlugin',
'cloudbaseinit.plugins.windows.createuser.CreateUserPlugin',
'cloudbaseinit.plugins.common.networkconfig.NetworkConfigPlugin',
'cloudbaseinit.plugins.windows.licensing.WindowsLicensingPlugin',
'cloudbaseinit.plugins.common.sshpublickeys.'
'SetUserSSHPublicKeysPlugin',
'cloudbaseinit.plugins.windows.extendvolumes.ExtendVolumesPlugin',
'cloudbaseinit.plugins.common.userdata.UserDataPlugin',
'cloudbaseinit.plugins.common.setuserpassword.'
'SetUserPasswordPlugin',
'cloudbaseinit.plugins.windows.winrmlistener.'
'ConfigWinRMListenerPlugin',
'cloudbaseinit.plugins.windows.winrmcertificateauth.'
'ConfigWinRMCertificateAuthPlugin',
'cloudbaseinit.plugins.common.localscripts.LocalScriptsPlugin',
],
help='List of enabled plugin classes, '
'to executed in the provided order'),
]
CONF = cfg.CONF
CONF.register_opts(opts)
LOG = oslo_logging.getLogger(__name__)
# Some plugins were moved to plugins.common, in order to
# better reflect the fact that they are not platform specific.
# Unfortunately, there are a lot of users out there with old
# config files which are using the old plugin names.
# So in order not to crash cloudbaseinit for their cases,
# we provide this explicit mapping. This will be removed
# when we'll reach 1.0 though.
OLD_PLUGINS = {
'cloudbaseinit.plugins.windows.mtu.MTUPlugin':
'cloudbaseinit.plugins.common.mtu.MTUPlugin',
'cloudbaseinit.plugins.windows.sethostname.SetHostNamePlugin':
'cloudbaseinit.plugins.common.sethostname.SetHostNamePlugin',
'cloudbaseinit.plugins.windows.networkconfig.NetworkConfigPlugin':
'cloudbaseinit.plugins.common.networkconfig.NetworkConfigPlugin',
'cloudbaseinit.plugins.windows.sshpublickeys.SetUserSSHPublicKeysPlugin':
'cloudbaseinit.plugins.common.sshpublickeys.SetUserSSHPublicKeysPlugin',
'cloudbaseinit.plugins.windows.userdata.UserDataPlugin':
'cloudbaseinit.plugins.common.userdata.UserDataPlugin',
'cloudbaseinit.plugins.windows.setuserpassword.SetUserPasswordPlugin':
'cloudbaseinit.plugins.common.setuserpassword.SetUserPasswordPlugin',
'cloudbaseinit.plugins.windows.localscripts.LocalScriptsPlugin':
'cloudbaseinit.plugins.common.localscripts.LocalScriptsPlugin',
}
def load_plugins(stage):
plugins = []
cl = classloader.ClassLoader()
for class_path in CONF.plugins:
if class_path in OLD_PLUGINS:
new_class_path = OLD_PLUGINS[class_path]
LOG.warn("Old plugin module %r was found. The new name is %r. "
"The old name will not be supported starting with "
"cloudbaseinit 1.0", class_path, new_class_path)
class_path = new_class_path
try:
plugin_cls = cl.load_class(class_path)
if not stage or plugin_cls.execution_stage == stage:
plugin = plugin_cls()
plugins.append(plugin)
except ImportError:
LOG.error("Could not import plugin module %r", class_path)
continue
return plugins
| cmin764/cloudbase-init | cloudbaseinit/plugins/factory.py | Python | apache-2.0 | 4,203 |
import os.path
import re
import shutil
import struct
import subprocess
import sys
import zipfile2 as zipfile
import bz2
def import_boto():
global Key, S3Connection, bucket_lister, awscreds
try:
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from boto.s3.bucketlistresultset import bucket_lister
except:
print("You need boto library (http://code.google.com/p/boto/)")
print("svn checkout http://boto.googlecode.com/svn/trunk/ boto")
print("cd boto; python setup.py install")
raise
try:
import awscreds
except:
print "awscreds.py file needed with access and secret globals for aws access"
sys.exit(1)
def log(s):
print(s)
sys.stdout.flush()
def group(list, size):
i = 0
while list[i:]:
yield list[i:i + size]
i += size
def uniquify(array):
return list(set(array))
def test_for_flag(args, arg, has_data=False):
if arg not in args:
if not has_data:
return False
for argx in args:
if argx.startswith(arg + "="):
args.remove(argx)
return argx[len(arg) + 1:]
return None
if not has_data:
args.remove(arg)
return True
ix = args.index(arg)
if ix == len(args) - 1:
return None
data = args[ix + 1]
args.pop(ix + 1)
args.pop(ix)
return data
S3_BUCKET = "kjkpub"
g_s3conn = None
def s3connection():
global g_s3conn
if g_s3conn is None:
import_boto()
g_s3conn = S3Connection(awscreds.access, awscreds.secret, True)
return g_s3conn
def s3PubBucket():
return s3connection().get_bucket(S3_BUCKET)
def ul_cb(sofar, total):
print("So far: %d, total: %d" % (sofar , total))
def s3UploadFilePublic(local_file_name, remote_file_name):
log("s3 upload '%s' as '%s'" % (local_file_name, remote_file_name))
bucket = s3PubBucket()
k = Key(bucket)
k.key = remote_file_name
k.set_contents_from_filename(local_file_name, cb=ul_cb)
k.make_public()
def s3UploadDataPublic(data, remote_file_name):
log("s3 upload data as '%s'" % remote_file_name)
bucket = s3PubBucket()
k = Key(bucket)
k.key = remote_file_name
k.set_contents_from_string(data)
k.make_public()
def s3List(s3dir):
bucket = s3PubBucket()
return bucket_lister(bucket, s3dir)
def s3Delete(s3Name):
log("s3 delete '%s'" % s3Name)
bucket = s3PubBucket()
k = Key(bucket, s3Name)
k.delete()
def ensure_s3_doesnt_exist(remote_file_path):
bucket = s3PubBucket()
if not bucket.get_key(remote_file_path):
return
print("'%s' already exists on s3" % remote_file_path)
sys.exit(1)
def ensure_path_exists(path):
if not os.path.exists(path):
print("path '%s' doesn't exist" % path)
sys.exit(1)
def verify_started_in_right_directory():
if os.path.exists(os.path.join("scripts", "build-release.py")): return
if os.path.exists(os.path.join(os.getcwd(), "scripts", "build-release.py")): return
print("This script must be run from top of the source tree")
sys.exit(1)
# like cmdrun() but throws an exception on failure
def run_cmd_throw(*args):
cmd = " ".join(args)
print("\nrun_cmd_throw: '%s'" % cmd)
cmdproc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
res = cmdproc.communicate()
errcode = cmdproc.returncode
if 0 != errcode:
print("Failed with error code %d" % errcode)
print("Stdout:")
print(res[0])
print("Stderr:")
print(res[1])
raise Exception("'%s' failed with error code %d" % (cmd, errcode))
return (res[0], res[1])
# Parse output of svn info and return revision number indicated by
# "Last Changed Rev" field or, if that doesn't exist, by "Revision" field
def parse_svninfo_out(txt):
ver = re.findall(r'(?m)^Last Changed Rev: (\d+)', txt)
if ver: return ver[0]
ver = re.findall(r'(?m)^Revision: (\d+)', txt)
if ver: return ver[0]
raise Exception("parse_svn_info_out() failed to parse '%s'" % txt)
# version line is in the format:
# #define CURR_VERSION 1.1
def extract_sumatra_version(file_path):
content = open(file_path).read()
ver = re.findall(r'CURR_VERSION (\d+(?:\.\d+)*)', content)[0]
return ver
def zip_file(dst_zip_file, src, src_name=None, compress=True, append=False):
mode = "w"
if append: mode = "a"
if compress:
zf = zipfile.ZipFile(dst_zip_file, mode, zipfile.ZIP_DEFLATED)
else:
zf = zipfile.ZipFile(dst_zip_file, mode, zipfile.ZIP_STORED)
if src_name is None:
src_name = os.path.basename(src)
zf.write(src, src_name)
zf.close()
# build the .zip with with installer data, will be included as part of
# Installer.exe resources
def build_installer_data(dir):
zf = zipfile.ZipFile(os.path.join(dir, "InstallerData.zip"), "w", zipfile.ZIP_BZIP2)
exe = os.path.join(dir, "SumatraPDF-no-MuPDF.exe")
zf.write(exe, "SumatraPDF.exe")
for f in ["libmupdf.dll", "npPdfViewer.dll", "PdfFilter.dll", "PdfPreview.dll", "uninstall.exe"]:
zf.write(os.path.join(dir, f), f)
font_path = os.path.join("mupdf", "fonts", "droid", "DroidSansFallback.ttf")
zf.write(font_path, "DroidSansFallback.ttf")
zf.close()
| kzmkv/SumatraPDF | scripts/util.py | Python | gpl-3.0 | 5,028 |
# -*- coding: utf-8 -*-
from .speedcurve import SpeedCurve
__author__ = 'Matt Chung'
__copyright__ = 'Copyright 2014 Matt Chung'
__license__ = 'MIT'
__title__ = 'speedcurve'
__version__ = '0.1.0'
__all__ = (
'SpeedCurve',
)
| itsmemattchung/speedcurve.py | speedcurve/__init__.py | Python | mit | 230 |
#!/usr/bin/env python
#
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# This script replaces calls to importScripts with script sources
# in input script file and dumps result into output script file.
from cStringIO import StringIO
import jsmin
import os.path
import re
import sys
def main(argv):
if len(argv) < 3:
print('usage: %s input_file imports_dir output_file' % argv[0])
return 1
input_file_name = argv[1]
imports_dir = argv[2]
output_file_name = argv[3]
input_file = open(input_file_name, 'r')
input_script = input_file.read()
input_file.close()
def replace(match):
import_file_name = match.group(1)
full_path = os.path.join(imports_dir, import_file_name)
if not os.access(full_path, os.F_OK):
raise Exception('File %s referenced in %s not found on any source paths, '
'check source tree for consistency' %
(import_file_name, input_file_name))
import_file = open(full_path, 'r')
import_script = import_file.read()
import_file.close()
return import_script
output_script = re.sub(r'importScripts\([\'"]([^\'"]+)[\'"]\)', replace, input_script)
output_file = open(output_file_name, 'w')
output_file.write(jsmin.jsmin(output_script))
output_file.close()
# Touch output file directory to make sure that Xcode will copy
# modified resource files.
if sys.platform == 'darwin':
output_dir_name = os.path.dirname(output_file_name)
os.utime(output_dir_name, None)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| mogoweb/webkit_for_android5.1 | webkit/Source/WebKit/chromium/scripts/inline_js_imports.py | Python | apache-2.0 | 3,145 |
bdLibPath=os.path.abspath(sys.argv[0]+"..")
if not bdLibPath in sys.path: sys.path.append(bdLibPath)
from _lib import *
import unittest
class SmokeTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_001_GoogleSearch(self):
LaunchBrowser("chrome", "www.google.com")
type(GoogleMap.google_search_input, "Telerik academy")
wait(GoogleMap.google_telerik_link, 10)
assert exists(GoogleMap.google_telerik_link)
def test_002_DragAndDrop(self):
LaunchBrowser("chrome", "http://www.dhtmlgoodies.com/scripts/drag-drop-custom/demo-drag-drop-3.html")
dragDrop(CapitalsMap.oslo, CapitalsMap.norway)
dragDrop(CapitalsMap.stockholm, CapitalsMap.sweden)
dragDrop(CapitalsMap.washington, CapitalsMap.us)
dragDrop(CapitalsMap.copenhagen, CapitalsMap.denmark)
dragDrop(CapitalsMap.seoul, CapitalsMap.southKorea)
dragDrop(CapitalsMap.rome, CapitalsMap.italy)
dragDrop(CapitalsMap.madrid, CapitalsMap.spain)
assert exists(CapitalsMap.correctRome)
assert exists(CapitalsMap.correctMadrid)
assert exists(CapitalsMap.correctOslo)
assert exists(CapitalsMap.correctCopenhagen)
assert exists(CapitalsMap.correctSeoul)
assert exists(CapitalsMap.correctStockholm)
assert exists(CapitalsMap.correctWashington)
def test_003_CalculatorFunctionsCorrectly(self):
LaunchCalculator();
click(CalculatorMap.two)
click(CalculatorMap.subtract)
click(CalculatorMap.four)
click(CalculatorMap.equals)
assert exists(CalculatorMap.subtractionResult)
click(CalculatorMap.multiply)
click(CalculatorMap.three)
click(CalculatorMap.equals)
assert exists(CalculatorMap.multiplyResult)
click(CalculatorMap.add)
click(CalculatorMap.one)
click(CalculatorMap.one)
click(CalculatorMap.equals)
assert exists (CalculatorMap.additionResult)
click(CalculatorMap.divide)
click(CalculatorMap.two)
click(CalculatorMap.equals)
assert (CalculatorMap.divisionResult)
click(CalculatorMap.divide)
click(CalculatorMap.zero)
assert exists(CalculatorMap.divisionByZeroMessage)
class Tests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_003_CalculatorFunctionsCorrectly(self):
LaunchCalculator();
click(CalculatorMap.two)
click(CalculatorMap.subtract)
click(CalculatorMap.four)
click(CalculatorMap.equals)
assert exists(CalculatorMap.subtractionResult)
click(CalculatorMap.multiply)
click(CalculatorMap.three)
click(CalculatorMap.equals)
assert exists(CalculatorMap.multiplyResult)
click(CalculatorMap.add)
click(CalculatorMap.one)
click(CalculatorMap.zero)
click(CalculatorMap.equals)
assert exists (CalculatorMap.additionResult)
click(CalculatorMap.divide)
click(CalculatorMap.two)
click(CalculatorMap.equals)
assert exists(CalculatorMap.divisionResult)
click(CalculatorMap.divide)
click(CalculatorMap.zero)
click(CalculatorMap.equals)
assert exists(CalculatorMap.divisionByZeroMessage)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(Tests)
outfile = open("report.html", "w")
runner = HTMLTestRunner.HTMLTestRunner(stream=outfile, title='SmokeTests Report')
runner.run(suite)
outfile.close()
| YoTsenkov/TelerikSoftwareAcademyHomeworks | QA/Sikuli/sikuli_tests/smoke_tests.sikuli/smoke_tests.py | Python | mit | 3,679 |
from event import X10Event
from flask import Flask
from flask import g
from flask import jsonify as j
app = Flask(__name__)
daemon = None
def run_api(d, port=5000):
global daemon
daemon = d
app.run(host='0.0.0.0', port=port)
@app.before_request
def before_request():
g._daemon = daemon
@app.route('/on/<house>/<int:unit>', methods=['GET', 'POST'])
def on(house, unit=X10Event.UNIT_ALL):
result = {'success': g._daemon.on(house, unit)}
return j(**result)
@app.route('/off/<house>/<int:unit>', methods=['GET', 'POST'])
def off(house, unit=X10Event.UNIT_ALL):
result = {'success': g._daemon.off(house, unit)}
return j(**result)
| umbc-hackafe/x10-controller | api.py | Python | unlicense | 662 |
# -*- coding: utf-8 -*-
# Natural Language Toolkit: Interface to the Stanford NER-tagger
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Nitin Madnani <[email protected]>
# Rami Al-Rfou' <[email protected]>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
A module for interfacing with the Stanford taggers.
"""
import os
import tempfile
from subprocess import PIPE
import warnings
from nltk.internals import find_file, find_jar, config_java, java, _java_options
from nltk.tag.api import TaggerI
from nltk import compat
_stanford_url = 'http://nlp.stanford.edu/software'
class StanfordTagger(TaggerI):
"""
An interface to Stanford taggers. Subclasses must define:
- ``_cmd`` property: A property that returns the command that will be
executed.
- ``_SEPARATOR``: Class constant that represents that character that
is used to separate the tokens from their tags.
- ``_JAR`` file: Class constant that represents the jar file name.
"""
_SEPARATOR = ''
_JAR = ''
def __init__(self, path_to_model, path_to_jar=None, encoding='utf8', verbose=False, java_options='-mx1000m'):
if not self._JAR:
warnings.warn('The StanfordTagger class is not meant to be '
'instantiated directly. Did you mean POS- or NERTagger?')
self._stanford_jar = find_jar(
self._JAR, path_to_jar,
searchpath=(), url=_stanford_url,
verbose=verbose)
self._stanford_model = find_file(path_to_model,
env_vars=('STANFORD_MODELS',), verbose=verbose)
self._encoding = encoding
self.java_options = java_options
@property
def _cmd(self):
raise NotImplementedError
def tag(self, tokens):
return list(self.tag_sents([tokens]))
def tag_sents(self, sentences):
encoding = self._encoding
default_options = ' '.join(_java_options)
config_java(options=self.java_options, verbose=False)
# Create a temporary input file
_input_fh, self._input_file_path = tempfile.mkstemp(text=True)
self._cmd.extend(['-encoding', encoding])
# Write the actual sentences to the temporary input file
_input_fh = os.fdopen(_input_fh, 'wb')
_input = '\n'.join((' '.join(x) for x in sentences))
if isinstance(_input, compat.text_type) and encoding:
_input = _input.encode(encoding)
_input_fh.write(_input)
_input_fh.close()
# Run the tagger and get the output
stanpos_output, _stderr = java(self._cmd,classpath=self._stanford_jar,
stdout=PIPE, stderr=PIPE)
stanpos_output = stanpos_output.decode(encoding)
# Delete the temporary file
os.unlink(self._input_file_path)
# Return java configurations to their default values
config_java(options=default_options, verbose=False)
return self.parse_output(stanpos_output)
def parse_output(self, text):
# Output the tagged sentences
tagged_sentences = []
for tagged_sentence in text.strip().split("\n"):
sentence = []
for tagged_word in tagged_sentence.strip().split():
word_tags = tagged_word.strip().split(self._SEPARATOR)
sentence.append((''.join(word_tags[:-1]), word_tags[-1]))
tagged_sentences.append(sentence)
return tagged_sentences
class POSTagger(StanfordTagger):
"""
A class for pos tagging with Stanford Tagger. The input is the paths to:
- a model trained on training data
- (optionally) the path to the stanford tagger jar file. If not specified here,
then this jar file must be specified in the CLASSPATH envinroment variable.
- (optionally) the encoding of the training data (default: ASCII)
Example:
>>> from nltk.tag.stanford import POSTagger
>>> st = POSTagger('/usr/share/stanford-postagger/models/english-bidirectional-distsim.tagger',
... '/usr/share/stanford-postagger/stanford-postagger.jar') # doctest: +SKIP
>>> st.tag('What is the airspeed of an unladen swallow ?'.split()) # doctest: +SKIP
[('What', 'WP'), ('is', 'VBZ'), ('the', 'DT'), ('airspeed', 'NN'), ('of', 'IN'), ('an', 'DT'), ('unladen', 'JJ'), ('swallow', 'VB'), ('?', '.')]
"""
_SEPARATOR = '_'
_JAR = 'stanford-postagger.jar'
def __init__(self, *args, **kwargs):
super(POSTagger, self).__init__(*args, **kwargs)
@property
def _cmd(self):
return ['edu.stanford.nlp.tagger.maxent.MaxentTagger',
'-model', self._stanford_model, '-textFile',
self._input_file_path, '-tokenize', 'false','-outputFormatOptions', 'keepEmptySentences']
class NERTagger(StanfordTagger):
"""
A class for ner tagging with Stanford Tagger. The input is the paths to:
- a model trained on training data
- (optionally) the path to the stanford tagger jar file. If not specified here,
then this jar file must be specified in the CLASSPATH envinroment variable.
- (optionally) the encoding of the training data (default: ASCII)
Example:
>>> from nltk.tag.stanford import NERTagger
>>> st = NERTagger('/usr/share/stanford-ner/classifiers/all.3class.distsim.crf.ser.gz',
... '/usr/share/stanford-ner/stanford-ner.jar') # doctest: +SKIP
>>> st.tag('Rami Eid is studying at Stony Brook University in NY'.split()) # doctest: +SKIP
[('Rami', 'PERSON'), ('Eid', 'PERSON'), ('is', 'O'), ('studying', 'O'),
('at', 'O'), ('Stony', 'ORGANIZATION'), ('Brook', 'ORGANIZATION'),
('University', 'ORGANIZATION'), ('in', 'O'), ('NY', 'LOCATION')]
"""
_SEPARATOR = '/'
_JAR = 'stanford-ner.jar'
_FORMAT = 'slashTags'
def __init__(self, *args, **kwargs):
super(NERTagger, self).__init__(*args, **kwargs)
@property
def _cmd(self):
# Adding -tokenizerFactory edu.stanford.nlp.process.WhitespaceTokenizer -tokenizerOptions tokenizeNLs=false for not using stanford Tokenizer
return ['edu.stanford.nlp.ie.crf.CRFClassifier',
'-loadClassifier', self._stanford_model, '-textFile',
self._input_file_path, '-outputFormat', self._FORMAT, '-tokenizerFactory', 'edu.stanford.nlp.process.WhitespaceTokenizer', '-tokenizerOptions','\"tokenizeNLs=false\"']
def parse_output(self, text):
if self._FORMAT == 'slashTags':
return super(NERTagger, self).parse_output(text)
raise NotImplementedError
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
| devs4v/devs4v-information-retrieval15 | project/venv/lib/python2.7/site-packages/nltk/tag/stanford.py | Python | mit | 6,820 |
def as_bool(value):
'''
Smart cast value to bool
'''
if isinstance(value, bool):
return value
value = value.strip().lower()
if value in ('y', 'yes', 'on', 't', 'true', '1'):
return True
if value in ('n', 'no', 'off', 'f', 'false', '0'):
return False
raise ValueError('Unrecognised value for bool: %r' % value)
def as_list(value):
'''
Smart cast value to list by splittng the input on ",".
'''
if isinstance(value, list):
return value
return [
x.strip()
for x in value.split(',')
if x.strip()
]
def as_tuple(value):
'''
Smart cast value to tuple by splittng the input on ",".
'''
if isinstance(value, tuple):
return value
return tuple(as_list(value))
| funkybob/django-classy-settings | cbs/utils.py | Python | bsd-2-clause | 796 |
"""
This part of code is the reinforcement learning brain, which is a brain of the agent.
All decisions are made in here.
Policy Gradient, Reinforcement Learning.
View more on my tutorial page: https://morvanzhou.github.io/tutorials/
Using:
Tensorflow: 1.0
gym: 0.8.0
"""
import numpy as np
import tensorflow as tf
# reproducible
np.random.seed(1)
tf.set_random_seed(1)
class PolicyGradient:
def __init__(
self,
n_actions,
n_features,
learning_rate=0.01,
reward_decay=0.95,
output_graph=False,
):
self.n_actions = n_actions
self.n_features = n_features
self.lr = learning_rate
self.gamma = reward_decay
self.ep_obs, self.ep_as, self.ep_rs = [], [], []
self._build_net()
self.sess = tf.Session()
if output_graph:
# $ tensorboard --logdir=logs
# http://0.0.0.0:6006/
# tf.train.SummaryWriter soon be deprecated, use following
tf.summary.FileWriter("logs/", self.sess.graph)
self.sess.run(tf.global_variables_initializer())
def _build_net(self):
with tf.name_scope('inputs'):
self.tf_obs = tf.placeholder(tf.float32, [None, self.n_features], name="observations")
self.tf_acts = tf.placeholder(tf.int32, [None, ], name="actions_num")
self.tf_vt = tf.placeholder(tf.float32, [None, ], name="actions_value")
# fc1
layer = tf.layers.dense(
inputs=self.tf_obs,
units=10,
activation=tf.nn.tanh, # tanh activation
kernel_initializer=tf.random_normal_initializer(mean=0, stddev=0.3),
bias_initializer=tf.constant_initializer(0.1),
name='fc1'
)
# fc2
all_act = tf.layers.dense(
inputs=layer,
units=self.n_actions,
activation=None,
kernel_initializer=tf.random_normal_initializer(mean=0, stddev=0.3),
bias_initializer=tf.constant_initializer(0.1),
name='fc2'
)
self.all_act_prob = tf.nn.softmax(all_act, name='act_prob') # use softmax to convert to probability
with tf.name_scope('loss'):
neg_log_prob = tf.reduce_sum(-tf.log(self.all_act_prob)*tf.one_hot(self.tf_acts, self.n_actions), axis=1)
loss = tf.reduce_mean(neg_log_prob * self.tf_vt) # reward guided loss
with tf.name_scope('train'):
self.train_op = tf.train.AdamOptimizer(self.lr).minimize(loss)
def choose_action(self, observation):
prob_weights = self.sess.run(self.all_act_prob, feed_dict={self.tf_obs: observation[np.newaxis, :]})
action = np.random.choice(range(prob_weights.shape[1]), p=prob_weights.ravel()) # select action w.r.t the actions prob
return action
def store_transition(self, s, a, r):
self.ep_obs.append(s)
self.ep_as.append(a)
self.ep_rs.append(r)
def learn(self):
# discount and normalize episode reward
discounted_ep_rs_norm = self._discount_and_norm_rewards()
# train on episode
self.sess.run(self.train_op, feed_dict={
self.tf_obs: np.vstack(self.ep_obs), # shape=[None, n_obs]
self.tf_acts: np.array(self.ep_as), # shape=[None, ]
self.tf_vt: discounted_ep_rs_norm, # shape=[None, ]
})
self.ep_obs, self.ep_as, self.ep_rs = [], [], [] # empty episode data
return discounted_ep_rs_norm
def _discount_and_norm_rewards(self):
# discount episode rewards
discounted_ep_rs = np.zeros_like(self.ep_rs)
running_add = 0
for t in reversed(range(0, len(self.ep_rs))):
running_add = running_add * self.gamma + self.ep_rs[t]
discounted_ep_rs[t] = running_add
# normalize episode rewards
discounted_ep_rs -= np.mean(discounted_ep_rs)
discounted_ep_rs /= np.std(discounted_ep_rs)
return discounted_ep_rs
| DarainS/texas-holdem-tools | ai/tfpreflop/RL_brain.py | Python | mit | 4,040 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the swig wrapper tf_optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.grappler import item as gitem
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class PyWrapOptimizeGraphTest(test.TestCase):
@test_util.run_deprecated_v1
def testBasic(self):
"""Make sure arguments can be passed correctly."""
a = constant_op.constant(10, name='a')
b = constant_op.constant(20, name='b')
c = math_ops.add_n([a, b], name='c')
d = math_ops.add_n([b, c], name='d')
train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
# Being a train_op will make 'd' to be added as a fetch node.
train_op.append(d)
mg = meta_graph.create_meta_graph_def(graph=ops.get_default_graph())
config = config_pb2.ConfigProto()
rewriter_config = config.graph_options.rewrite_options
rewriter_config.optimizers.append('constfold')
rewriter_config.min_graph_nodes = -1
graph = tf_optimizer.OptimizeGraph(config, mg)
self.assertEqual(len(graph.node), 1)
self.assertItemsEqual([node.name for node in graph.node], ['d'])
@test_util.run_v1_only('b/120545219')
def testKeepNodes(self):
g = ops.Graph()
with g.as_default():
a1 = variables.VariableV1(
1.0) # Must be preserved since it's in the collection 'variables'.
a2 = constant_op.constant(0, shape=[50, 50], name='keep')
ops.add_to_collection('a2', a2) # Explicitly add to collection.
with g._attr_scope(
{'_grappler_do_not_remove': attr_value_pb2.AttrValue(b=True)}):
a3 = constant_op.constant(0, name='keep2')
b = constant_op.constant(1, shape=[100, 10])
c = constant_op.constant(0, shape=[10, 30])
d = math_ops.matmul(b, c)
ops.add_to_collection('train_op', d) # d is the fetch node.
# Optimize the graph.
mg = meta_graph.create_meta_graph_def(graph=g)
config = config_pb2.ConfigProto()
rewriter_config = config.graph_options.rewrite_options
rewriter_config.min_graph_nodes = -1
optimized_graph = tf_optimizer.OptimizeGraph(config, mg)
# Check that the nodes referenced in various collections have been preserved
optimized_graph_nodes = [node.name for node in optimized_graph.node]
expected_nodes = [
d.op.name, a1.op.name, a2.op.name, a3.op.name, 'Variable/initial_value',
'Variable/Assign'
]
self.assertEqual(len(optimized_graph_nodes), len(expected_nodes))
self.assertAllInSet(optimized_graph_nodes, expected_nodes)
@test_util.run_v1_only('b/120545219')
def testLoops(self):
g = ops.Graph()
with g.as_default():
def _Cond(_, counter):
return counter < end
def _Body(buf, counter):
buf = array_ops.concat([buf, [counter]], 0)
counter += 1
return [buf, counter]
start = array_ops.placeholder(shape=[], dtype=dtypes.int32)
end = array_ops.placeholder(shape=[], dtype=dtypes.int32)
init_buf = array_ops.zeros(shape=[0], dtype=dtypes.int32)
loop_vars = [init_buf, start]
shape_inv = [
tensor_shape.TensorShape([None]),
tensor_shape.TensorShape([])
]
buf, _ = control_flow_ops.while_loop(_Cond, _Body, loop_vars, shape_inv)
f = -array_ops.ones_like(buf, optimize=False)
buf_shape = array_ops.shape(buf)
f_shape = array_ops.shape(f)
ops.add_to_collection('train_op', buf_shape)
ops.add_to_collection('train_op', f_shape)
# Optimize the graph.
mg = meta_graph.create_meta_graph_def(graph=g)
config = config_pb2.ConfigProto()
rewriter_config = config.graph_options.rewrite_options
rewriter_config.min_graph_nodes = -1
optimized_graph = tf_optimizer.OptimizeGraph(config, mg)
mg.graph_def.CopyFrom(optimized_graph)
# Check that the nodes referenced in various collections have been preserved
item = gitem.Item(mg)
props = item.GetOpProperties()
buf_prop = props[buf.op.name]
f_prop = props[f.op.name]
self.assertEqual(buf_prop, f_prop)
if __name__ == '__main__':
test.main()
| adit-chandra/tensorflow | tensorflow/python/grappler/tf_optimizer_test.py | Python | apache-2.0 | 5,449 |
#!/usr/local/bin/python
'''
Date July 4, 2012
Author: Justin Jessup
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Disclaimer:
All software provided as is. All software covered under the GPL license and free for public redistribution.
If unintended consequences occur due to utilization of this software, user bears the resultant outcome.
The rule of thumb is to test and validate properly all solutions prior to implementation within a production environment.
All solutions should be subject to public scrutiny, and peer review.
'''
import requests, json, csv, re, datetime
from multiprocessing import Pool
def multi_process(jobname):
po = Pool()
po.apply_async(jobname)
po.close
po.join
def search_watchlist(watchlist_search):
for line in open(watchlist_search, 'r'):
yield line.strip('\n')
def search_watchlist_url(watchlist_search_url):
for line in open(watchlist_search_url, 'r'):
yield line.strip('\n')
def cull_reddit(csv_filename):
f = csv.writer(open(csv_filename, "wb+"))
f.writerow(["Created","Origin FQDN","Title","URL","Author"])
SEARCH_BASE = 'http://www.reddit.com/.json'
request_url = SEARCH_BASE
request_urlGet = requests.get(request_url)
if request_urlGet.status_code == 200:
if 'json' in (request_urlGet.headers['content-type']):
data = json.loads(request_urlGet.text)
data_list = list(data['data']['children'])
for search_string in search_watchlist(watchlist_search):
for item in data_list:
ipsrch = re.compile(r'\b%s\b' %search_string, re.IGNORECASE)
if ipsrch.findall(item['data']['title']):
created_Time = str(item['data']['created']).split('.')[0]
convert_createdTime = (datetime.datetime.fromtimestamp(int(created_Time)).strftime('%H:%M:%S %Y-%m-%d'))
f.writerow([convert_createdTime,item['data']['domain'].encode('ascii','ignore'),\
item['data']['title'].encode('ascii','ignore'),item['data']['url'].encode('ascii','ignore'),\
item['data']['author'].encode('ascii','ignore')])
def cull_subreddit(csv_filename):
f = csv.writer(open(csv_filename, "wb+"))
f.writerow(["Created","Origin FQDN","Title","URL","Author"])
for search_string_url in search_watchlist_url(watchlist_search_url):
SEARCH_BASE = 'http://www.reddit.com/r/' + search_string_url + '.json'
request_url = SEARCH_BASE
request_urlGet = requests.get(request_url)
if request_urlGet.status_code == 200:
if 'json' in (request_urlGet.headers['content-type']):
data = json.loads(request_urlGet.text)
data_list = list(data['data']['children'])
for search_string in search_watchlist(watchlist_search):
for item in data_list:
ipsrch = re.compile(r'\b%s\b' %search_string, re.IGNORECASE)
if ipsrch.findall(item['data']['title']):
created_Time = str(item['data']['created']).split('.')[0]
convert_createdTime = (datetime.datetime.fromtimestamp(int(created_Time)).strftime('%H:%M:%S %Y-%m-%d'))
f.writerow([convert_createdTime,item['data']['domain'].encode('ascii','ignore'),\
item['data']['title'].encode('ascii','ignore'),item['data']['url'].encode('ascii','ignore'),\
item['data']['author'].encode('ascii','ignore')])
# Global Variable Assignment
watchlist_search = "watchlist/reddit_watchlist.txt"
watchlist_search_url = "watchlist/reddit_watchlist_url.txt"
date_time_one = (str(datetime.datetime.now()).split(' ')[0])
date_time_two = (str(datetime.datetime.now())).split(' ')[1].replace(':','-').split('.')[0]
csv_file_one = "culled_product/search_one/Reddit-Global-WatchList" + '-' + date_time_one + '-' + date_time_two + '.csv'
csv_file_two = "culled_product/search_two/SubReddit-Global-WatchList" + '-' + date_time_one + '-' + date_time_two + '.csv'
# Main Excecution
if __name__ == '__main__':
po = Pool()
job_one = target=cull_reddit(csv_file_one)
job_two = target=cull_subreddit(csv_file_two)
jobs = []
jobs.append(job_one)
jobs.append(job_two)
for jobname in jobs:
multi_process(jobname)
| proactivecndosint2012/OSINTREALTIMECND | reddit/cull_reddit.py | Python | gpl-3.0 | 4,522 |
# pylint: disable=W0401,W0223
import re
from .datatype import *
from .exception import NamingError
from .util import UnicodeMixin
__all__ = (
'Setting',
'StringSetting',
'IntegerSetting',
'FloatSetting',
'BooleanSetting',
'ListSetting',
'ChoiceSetting',
)
class Setting(UnicodeMixin, DataType):
RE_NAME = re.compile(r'^[a-z](?:[a-z0-9]|[_](?![_]))*[a-z0-9]$')
def __init__(self, name, default=None, required=False):
if Setting.RE_NAME.match(name):
self.name = name
else:
raise NamingError(name)
self._value = None
self.default = self.sanitize(default)
self.required = required
self.established = False
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = self.sanitize(value)
self.established = True
def __unicode__(self): # pragma: no cover
return str(self.name)
def __repr__(self): # pragma: no cover
return '<%s(%s=%s)>' % (
self.__class__.__name__,
self.name,
self.value if self.established else '',
)
class StringSetting(Setting, String):
pass
class IntegerSetting(Setting, Integer):
pass
class FloatSetting(Setting, Float):
pass
class BooleanSetting(Setting, Boolean):
pass
class ListSetting(Setting, List):
def __init__(self, name, subtype, **kwargs):
List.__init__(self, subtype)
Setting.__init__(self, name, **kwargs)
class ChoiceSetting(Setting, Choice):
def __init__(self, name, choices, subtype=None, **kwargs):
Choice.__init__(self, choices, subtype=subtype)
Setting.__init__(self, name, **kwargs)
| jayclassless/setoptconf | setoptconf/setting.py | Python | mit | 1,755 |
""" putJSONdata.py
Copyright 2016 OSIsoft, LLC.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Call:
python putJSONdata.py rest-ufl rest-external
Parameters:
rest-ufl - The Address specified in the Data Source configuration
rest-external - A third party data source which returns JSON data when receving a get request from this URL.
Example:
python putJSONdata.py https://localhost:5460/connectordata/currency http://api.fixer.io/latest?base=USD
"""
import argparse
import getpass
import json
import sys
from functools import lru_cache
import requests
import time
# Suppress insecure HTTPS warnings, if an untrusted certificate is used by the target endpoint
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# Process arguments
parser = argparse.ArgumentParser()
parser.description = 'POST file contents to PI Connector for UFL'
parser.add_argument('restufl', help='The UFL rest endpoint address')
parser.add_argument('restexternal', help='The external data source rest end point')
args = parser.parse_args()
@lru_cache(maxsize=1)
def password():
return getpass.getpass()
@lru_cache(maxsize=1)
def username():
return getpass.getpass('Username: ')
s = requests.session()
# To hardcode the username and password, specify them below
# To use anonymous login, use: ("", "")
s.auth = ("pi", "pi")
def getData(url):
# Being very careful when checking for failure when accessing the external site
try:
response = requests.get(url=url)
if response.status_code != requests.codes.ok:
print("The url {0} did not return the expected value back.".format(response.url))
print("Response: {0} {1}".format(response.status_code, response.reason))
sys.exit(0)
try:
return json.dumps(response.json(), indent=4, sort_keys=True)
except ValueError as e:
print(e)
sys.exit(0)
except requests.exceptions.Timeout:
# Maybe set up for a retry, or continue in a retry loop
print("Connection timed out")
sys.exit(0)
except requests.exceptions.TooManyRedirects:
# Tell the user their URL was bad and try a different one
print("Too many redirects")
sys.exit(0)
except requests.exceptions.RequestException as e:
print("There was an issue with requesting the data:")
print(e)
sys.exit(0)
data = getData(args.restexternal)
# remove verify=False if the certificate used is a trusted one
response = s.put(args.restufl, data=data, verify=False)
# If instead of using the put request, you need to use the post request
# use the function as listed below
# response = s.post(args.resturl + '/post', data=data, verify=False)
if response.status_code != 200:
print("Sending data to the UFL connect failed due to error {0} {1}".format(response.status_code, response.reason))
else:
print('The data was sent successfully over https.')
print('Check the PI Connectors event logs for any further information.')
print("SF Bike Data sent at :")
localtime = time.asctime( time.localtime(time.time()) )
print(localtime)
time.sleep(45)
| danielElopez/PI-Connector-for-UFL-Samples | COMPLETE_SOLUTIONS/North American General Bike Feed/putJSONdata_SF_Bikes_service.py | Python | apache-2.0 | 3,727 |
#! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
RandomGenerator().SetSeed(0)
try :
# We create a distribution
meanPoint = NumericalPoint(1)
meanPoint[0] = 1.0
sigma = NumericalPoint(1)
sigma[0] = 1.0
R = CorrelationMatrix(1)
distribution = Normal(meanPoint, sigma, R)
ref_distribution = distribution
print "distribution = " , repr(ref_distribution)
# We create a distribution-based RandomVector
vect = RandomVector(UsualRandomVector(distribution))
print "vect=" , vect
# Check standard methods of class RandomVector
print "vect dimension=" , vect.getDimension()
print "vect realization (first )=" , repr(vect.getRealization())
print "vect realization (second)=" , repr(vect.getRealization())
print "vect realization (third )=" , repr(vect.getRealization())
print "vect sample =" , repr(vect.getSample(5))
except :
import sys
print "t_RandomVector_distribution.py", sys.exc_type, sys.exc_value
| dbarbier/privot | python/test/t_RandomVector_distribution.py | Python | lgpl-3.0 | 995 |
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
'''
Intuition results analyzer
--------------------------
Wraps session results with convenient analyse methods
:copyright (c) 2014 Xavier Bruhiere
:license: Apache 2.0, see LICENSE for more details.
'''
import pytz
import pandas as pd
import numpy as np
import dna.logging
import dna.debug
import dna.utils
from zipline.data.benchmarks import get_benchmark_returns
from intuition.finance import qstk_get_sharpe_ratio
log = dna.logging.logger(__name__)
class Analyze():
''' Handle backtest results and performances measurments '''
def __init__(self, params, results, metrics, benchmark='^GSPC'):
# NOTE Temporary
# Simulation parameters
self.sim_params = params
# Final risk measurments as returned by the backtester
self.results = results
# Simulation rolling performance
self.metrics = metrics
# Market where we traded
self.benchmark = benchmark
def build_report(self, timestamp='one_month', show=False):
# Get daily, cumulative and not, returns of portfolio and benchmark
# NOTE Temporary fix before intuition would be able to get benchmark
# data on live trading
try:
bm_sym = self.benchmark
returns_df = self.get_returns(benchmark=bm_sym)
skip = False
except:
log.warn('unable to get benchmark data on live trading for now')
skip = True
orders = 0
for order in self.results.orders:
orders += len(order)
final_value = self.results.portfolio_value[-1]
report = {
'portfolio': final_value,
'gain': final_value - self.sim_params.capital_base,
'orders': orders,
'pnl_mean': self.results.pnl.mean(),
'pnl_deviation': self.results.pnl.std(),
}
if not skip:
report['portfolio_perfs'] = returns_df['algo_c_return'][-1] * 100.0
report['benchmark_perfs'] = \
returns_df['benchmark_c_return'][-1] * 100.0
perfs = self.overall_metrics(timestamp)
for k, v in perfs.iteritems():
report[k] = v
# Float values for humans
for key, value in report.iteritems():
report[key] = dna.utils.truncate(value, 3)
log.info('generated report', report=report)
if show:
print
print(dna.debug.emphasis(report, align=True))
print
return report
def _to_perf_array(self, timestamp, key, length):
return np.array([self.metrics[timestamp][i][key] for i in length])
def rolling_performances(self, timestamp='one_month'):
''' Filters self.perfs '''
# TODO Study the impact of month choice
# TODO Check timestamp in an enumeration
# TODO Implement other benchmarks for perf computation
# (zipline issue, maybe expected)
if self.metrics:
perfs = {}
length = range(len(self.metrics[timestamp]))
index = self._get_index(self.metrics[timestamp])
perf_keys = self.metrics[timestamp][0].keys()
perf_keys.pop(perf_keys.index('period_label'))
perfs['period'] = np.array(
[pd.datetime.date(date) for date in index])
for key in perf_keys:
perfs[key] = self._to_perf_array(timestamp, key, length)
else:
# TODO Get it from DB if it exists
raise NotImplementedError()
return pd.DataFrame(perfs, index=index)
def overall_metrics(self, timestamp='one_month', metrics=None):
'''
Use zipline results to compute some performance indicators
'''
perfs = dict()
# If no rolling perfs provided, computes it
if metrics is None:
metrics = self.rolling_performances(timestamp=timestamp)
riskfree = np.mean(metrics['treasury_period_return'])
perfs['sharpe'] = qstk_get_sharpe_ratio(
metrics['algorithm_period_return'].values, risk_free=riskfree)
perfs['algorithm_period_return'] = (
((metrics['algorithm_period_return'] + 1).cumprod()) - 1)[-1]
perfs['max_drawdown'] = max(metrics['max_drawdown'])
perfs['algo_volatility'] = np.mean(metrics['algo_volatility'])
perfs['beta'] = np.mean(metrics['beta'])
perfs['alpha'] = np.mean(metrics['alpha'])
perfs['benchmark_period_return'] = (
((metrics['benchmark_period_return'] + 1).cumprod()) - 1)[-1]
return perfs
def get_returns(self, benchmark=''):
returns = {}
if benchmark:
try:
benchmark_data = (
get_benchmark_returns(benchmark,
self.results.index[0],
self.results.index[-1]))
except Exception as e:
raise KeyError(e)
else:
#TODO Automatic detection given exchange market (on command line) ?
raise NotImplementedError()
# NOTE Could be more efficient. But len(benchmark_data.date) !=
# len(self.results.returns.index). Maybe because of different markets
dates = pd.DatetimeIndex([d.date for d in benchmark_data])
returns['benchmark_return'] = pd.Series(
[d.returns for d in benchmark_data], index=dates)
returns['benchmark_c_return'] = (
(returns['benchmark_return'] + 1).cumprod()) - 1
returns['algo_return'] = pd.Series(
self.results.returns.values, index=dates)
returns['algo_c_return'] = pd.Series(
((self.results.returns.values + 1).cumprod()) - 1, index=dates)
df = pd.DataFrame(returns, index=dates)
if benchmark is None:
df = df.drop(['benchmark_return', 'benchmark_c_return'], axis=1)
return df
def _get_index(self, perfs):
# NOTE No frequency infos or just period number ?
start = pytz.utc.localize(pd.datetime.strptime(
perfs[0]['period_label'] + '-01', '%Y-%m-%d'))
end = pytz.utc.localize(pd.datetime.strptime(
perfs[-1]['period_label'] + '-01', '%Y-%m-%d'))
return pd.date_range(start - pd.datetools.BDay(10),
end,
freq=pd.datetools.MonthBegin())
| intuition-io/intuition | intuition/core/analyzes.py | Python | apache-2.0 | 6,451 |
from django.db import models
class CalendarEvent(models.Model):
title = models.CharField(max_length=255)
scheduled_on = models.DateTimeField()
created = models.DateTimeField(auto_now_add=True)
edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.title | RockingRolli/django-event-calendar | event_calendar/models.py | Python | mit | 309 |
from article_metrics import models, utils
import json
from django.conf import settings
import boto3
import logging
LOG = logging.getLogger(__name__)
def sns_topic_arn(**overrides):
"returns an arn path to an AWS event bus. this is used to connect and send/receive events"
vals = {}
vals.update(settings.EVENT_BUS)
vals.update(overrides)
# ll: arn:aws:sns:us-east-1:112634557572:bus-articles--ci
arn = "arn:aws:sns:{region}:{subscriber}:{name}--{env}".format(**vals)
LOG.info("using topic arn: %s", arn)
return arn
#
#
#
def event_bus_conn(**overrides):
sns = boto3.resource('sns')
return sns.Topic(sns_topic_arn(**overrides))
def notify(obj, **overrides):
"notify the event bus that this Citation or Metric has changes"
if settings.DEBUG:
LOG.debug("application is in DEBUG mode, not notifying anyone")
return
try:
msg = {
"type": "metrics",
"contentType": "article",
"id": utils.pad_msid(utils.doi2msid(obj.article.doi)),
"metric": "citations" if isinstance(obj, models.Citation) else "views-downloads"
}
msg_json = json.dumps(msg)
LOG.info("writing message to event bus", extra={'bus-message': msg_json})
event_bus_conn(**overrides).publish(Message=msg_json)
except ValueError as err:
# probably serializing value
LOG.error("failed to serialize event bus payload %s", err, extra={'bus-message': msg_json})
except Exception as err:
LOG.error("unhandled error attempting to notify event bus of article change: %s", err)
| elifesciences/elife-metrics | src/article_metrics/events.py | Python | gpl-3.0 | 1,616 |
#!/usr/bin/env python
# coding=utf8
import os
import shutil
import sys
reload(sys)
sys.setdefaultencoding('utf-8') # @UndefinedVariable
# 输出文件夹
OUT_DIR = "output"
#
# 删除目录 (递归)
#
# Delete everything reachable from the directory named in "top",
# assuming there are no symbolic links.
# CAUTION: This is dangerous! For example, if top == '/', it
# could delete all your disk files.
def rmdir(top):
import os
for root, dirs, files in os.walk(top, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
#
# 复制目录(递归)
# assets -> output/assets
# dep -> output/dep
#
def copytree(src_dir, dest_dir, symlinks=False, ignore=None):
for item in os.listdir(src_dir):
s = os.path.join(src_dir, item)
d = os.path.join(dest_dir, item)
if os.path.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
#
# 复制类ShellPattern的文件至目录 (非递归)
#
def copyFilePattern(src_dir, dest_dir, pattern):
import glob
files = glob.iglob(os.path.join(src_dir, pattern))
for file in files:
if os.path.isfile(file):
shutil.copy(file, dest_dir)
#
# 入口
#
if __name__ == '__main__':
try:
# 删除输出文件夹
rmdir(OUT_DIR)
#
if not os.path.exists(OUT_DIR):
os.makedirs(OUT_DIR)
#
copytree("assets", OUT_DIR + "/assets")
copytree("dep", OUT_DIR + "/dep")
copyFilePattern(".", OUT_DIR, "*.html")
except KeyboardInterrupt:
pass
| scwanglijun/disconf | disconf-web/src/main/webapp/build.py | Python | gpl-2.0 | 1,700 |
# coding=utf-8
import logging
# About language detecting logic:
#
# Step 1: if member.l10n is not empty/false, use it as the best choice
#
# Step 2: if Accept-Language header has something interesting, use it as the second choice
#
# Step 3: Fallback to site.l10n
def Getlang(langid):
if langid == 'en':
from v2ex.babel.l10n.messages import en as messages
return messages
if langid == 'zh-Hans':
from v2ex.babel.l10n.messages import zhHans as messages
return messages
def GetMessages(handler, member=False, site=False):
logging.info(handler.request.headers)
logging.info(site.l10n)
if member:
return Getlang(member.l10n)
else:
return Getlang(site.l10n)
def GetSupportedLanguages():
return ['en', 'zh-Hans']
def GetSupportedLanguagesNames():
return {'en' : 'English', 'zh-Hans' : u'简体中文'}
def GetLanguageSelect(current):
lang = GetSupportedLanguages()
names = GetSupportedLanguagesNames()
s = '<select name="l10n">'
for l in lang:
if l == current:
s = s + '<option value="' + l + '" selected="selected">' + names[l] + '</option>'
else:
s = s + '<option value="' + l + '">' + names[l] + '</option>'
s = s + '</select>'
return s | melice/akiraguru | v2ex/babel/l10n/__init__.py | Python | bsd-3-clause | 1,288 |
#
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Functions used by the node daemon
@var _ALLOWED_UPLOAD_FILES: denotes which files are accepted in
the L{UploadFile} function
@var _ALLOWED_CLEAN_DIRS: denotes which directories are accepted
in the L{_CleanDirectory} function
"""
# pylint: disable-msg=E1103
# E1103: %s %r has no %r member (but some types could not be
# inferred), because the _TryOSFromDisk returns either (True, os_obj)
# or (False, "string") which confuses pylint
import os
import os.path
import shutil
import time
import stat
import errno
import re
import random
import logging
import tempfile
import zlib
import base64
import signal
from ganeti import errors
from ganeti import utils
from ganeti import ssh
from ganeti import hypervisor
from ganeti import constants
from ganeti import bdev
from ganeti import objects
from ganeti import ssconf
from ganeti import serializer
from ganeti import netutils
from ganeti import runtime
_BOOT_ID_PATH = "/proc/sys/kernel/random/boot_id"
_ALLOWED_CLEAN_DIRS = frozenset([
constants.DATA_DIR,
constants.JOB_QUEUE_ARCHIVE_DIR,
constants.QUEUE_DIR,
constants.CRYPTO_KEYS_DIR,
])
_MAX_SSL_CERT_VALIDITY = 7 * 24 * 60 * 60
_X509_KEY_FILE = "key"
_X509_CERT_FILE = "cert"
_IES_STATUS_FILE = "status"
_IES_PID_FILE = "pid"
_IES_CA_FILE = "ca"
#: Valid LVS output line regex
_LVSLINE_REGEX = re.compile("^ *([^|]+)\|([^|]+)\|([0-9.]+)\|([^|]{6})\|?$")
class RPCFail(Exception):
"""Class denoting RPC failure.
Its argument is the error message.
"""
def _Fail(msg, *args, **kwargs):
"""Log an error and the raise an RPCFail exception.
This exception is then handled specially in the ganeti daemon and
turned into a 'failed' return type. As such, this function is a
useful shortcut for logging the error and returning it to the master
daemon.
@type msg: string
@param msg: the text of the exception
@raise RPCFail
"""
if args:
msg = msg % args
if "log" not in kwargs or kwargs["log"]: # if we should log this error
if "exc" in kwargs and kwargs["exc"]:
logging.exception(msg)
else:
logging.error(msg)
raise RPCFail(msg)
def _GetConfig():
"""Simple wrapper to return a SimpleStore.
@rtype: L{ssconf.SimpleStore}
@return: a SimpleStore instance
"""
return ssconf.SimpleStore()
def _GetSshRunner(cluster_name):
"""Simple wrapper to return an SshRunner.
@type cluster_name: str
@param cluster_name: the cluster name, which is needed
by the SshRunner constructor
@rtype: L{ssh.SshRunner}
@return: an SshRunner instance
"""
return ssh.SshRunner(cluster_name)
def _Decompress(data):
"""Unpacks data compressed by the RPC client.
@type data: list or tuple
@param data: Data sent by RPC client
@rtype: str
@return: Decompressed data
"""
assert isinstance(data, (list, tuple))
assert len(data) == 2
(encoding, content) = data
if encoding == constants.RPC_ENCODING_NONE:
return content
elif encoding == constants.RPC_ENCODING_ZLIB_BASE64:
return zlib.decompress(base64.b64decode(content))
else:
raise AssertionError("Unknown data encoding")
def _CleanDirectory(path, exclude=None):
"""Removes all regular files in a directory.
@type path: str
@param path: the directory to clean
@type exclude: list
@param exclude: list of files to be excluded, defaults
to the empty list
"""
if path not in _ALLOWED_CLEAN_DIRS:
_Fail("Path passed to _CleanDirectory not in allowed clean targets: '%s'",
path)
if not os.path.isdir(path):
return
if exclude is None:
exclude = []
else:
# Normalize excluded paths
exclude = [os.path.normpath(i) for i in exclude]
for rel_name in utils.ListVisibleFiles(path):
full_name = utils.PathJoin(path, rel_name)
if full_name in exclude:
continue
if os.path.isfile(full_name) and not os.path.islink(full_name):
utils.RemoveFile(full_name)
def _BuildUploadFileList():
"""Build the list of allowed upload files.
This is abstracted so that it's built only once at module import time.
"""
allowed_files = set([
constants.CLUSTER_CONF_FILE,
constants.ETC_HOSTS,
constants.SSH_KNOWN_HOSTS_FILE,
constants.VNC_PASSWORD_FILE,
constants.RAPI_CERT_FILE,
constants.RAPI_USERS_FILE,
constants.CONFD_HMAC_KEY,
constants.CLUSTER_DOMAIN_SECRET_FILE,
])
for hv_name in constants.HYPER_TYPES:
hv_class = hypervisor.GetHypervisorClass(hv_name)
allowed_files.update(hv_class.GetAncillaryFiles())
return frozenset(allowed_files)
_ALLOWED_UPLOAD_FILES = _BuildUploadFileList()
def JobQueuePurge():
"""Removes job queue files and archived jobs.
@rtype: tuple
@return: True, None
"""
_CleanDirectory(constants.QUEUE_DIR, exclude=[constants.JOB_QUEUE_LOCK_FILE])
_CleanDirectory(constants.JOB_QUEUE_ARCHIVE_DIR)
def GetMasterInfo():
"""Returns master information.
This is an utility function to compute master information, either
for consumption here or from the node daemon.
@rtype: tuple
@return: master_netdev, master_ip, master_name, primary_ip_family
@raise RPCFail: in case of errors
"""
try:
cfg = _GetConfig()
master_netdev = cfg.GetMasterNetdev()
master_ip = cfg.GetMasterIP()
master_node = cfg.GetMasterNode()
primary_ip_family = cfg.GetPrimaryIPFamily()
except errors.ConfigurationError, err:
_Fail("Cluster configuration incomplete: %s", err, exc=True)
return (master_netdev, master_ip, master_node, primary_ip_family)
def StartMaster(start_daemons, no_voting):
"""Activate local node as master node.
The function will either try activate the IP address of the master
(unless someone else has it) or also start the master daemons, based
on the start_daemons parameter.
@type start_daemons: boolean
@param start_daemons: whether to start the master daemons
(ganeti-masterd and ganeti-rapi), or (if false) activate the
master ip
@type no_voting: boolean
@param no_voting: whether to start ganeti-masterd without a node vote
(if start_daemons is True), but still non-interactively
@rtype: None
"""
# GetMasterInfo will raise an exception if not able to return data
master_netdev, master_ip, _, family = GetMasterInfo()
err_msgs = []
# either start the master and rapi daemons
if start_daemons:
if no_voting:
masterd_args = "--no-voting --yes-do-it"
else:
masterd_args = ""
env = {
"EXTRA_MASTERD_ARGS": masterd_args,
}
result = utils.RunCmd([constants.DAEMON_UTIL, "start-master"], env=env)
if result.failed:
msg = "Can't start Ganeti master: %s" % result.output
logging.error(msg)
err_msgs.append(msg)
# or activate the IP
else:
if netutils.TcpPing(master_ip, constants.DEFAULT_NODED_PORT):
if netutils.IPAddress.Own(master_ip):
# we already have the ip:
logging.debug("Master IP already configured, doing nothing")
else:
msg = "Someone else has the master ip, not activating"
logging.error(msg)
err_msgs.append(msg)
else:
ipcls = netutils.IP4Address
if family == netutils.IP6Address.family:
ipcls = netutils.IP6Address
result = utils.RunCmd(["ip", "address", "add",
"%s/%d" % (master_ip, ipcls.iplen),
"dev", master_netdev, "label",
"%s:0" % master_netdev])
if result.failed:
msg = "Can't activate master IP: %s" % result.output
logging.error(msg)
err_msgs.append(msg)
# we ignore the exit code of the following cmds
if ipcls == netutils.IP4Address:
utils.RunCmd(["arping", "-q", "-U", "-c 3", "-I", master_netdev, "-s",
master_ip, master_ip])
elif ipcls == netutils.IP6Address:
try:
utils.RunCmd(["ndisc6", "-q", "-r 3", master_ip, master_netdev])
except errors.OpExecError:
# TODO: Better error reporting
logging.warning("Can't execute ndisc6, please install if missing")
if err_msgs:
_Fail("; ".join(err_msgs))
def StopMaster(stop_daemons):
"""Deactivate this node as master.
The function will always try to deactivate the IP address of the
master. It will also stop the master daemons depending on the
stop_daemons parameter.
@type stop_daemons: boolean
@param stop_daemons: whether to also stop the master daemons
(ganeti-masterd and ganeti-rapi)
@rtype: None
"""
# TODO: log and report back to the caller the error failures; we
# need to decide in which case we fail the RPC for this
# GetMasterInfo will raise an exception if not able to return data
master_netdev, master_ip, _, family = GetMasterInfo()
ipcls = netutils.IP4Address
if family == netutils.IP6Address.family:
ipcls = netutils.IP6Address
result = utils.RunCmd(["ip", "address", "del",
"%s/%d" % (master_ip, ipcls.iplen),
"dev", master_netdev])
if result.failed:
logging.error("Can't remove the master IP, error: %s", result.output)
# but otherwise ignore the failure
if stop_daemons:
result = utils.RunCmd([constants.DAEMON_UTIL, "stop-master"])
if result.failed:
logging.error("Could not stop Ganeti master, command %s had exitcode %s"
" and error %s",
result.cmd, result.exit_code, result.output)
def EtcHostsModify(mode, host, ip):
"""Modify a host entry in /etc/hosts.
@param mode: The mode to operate. Either add or remove entry
@param host: The host to operate on
@param ip: The ip associated with the entry
"""
if mode == constants.ETC_HOSTS_ADD:
if not ip:
RPCFail("Mode 'add' needs 'ip' parameter, but parameter not"
" present")
utils.AddHostToEtcHosts(host, ip)
elif mode == constants.ETC_HOSTS_REMOVE:
if ip:
RPCFail("Mode 'remove' does not allow 'ip' parameter, but"
" parameter is present")
utils.RemoveHostFromEtcHosts(host)
else:
RPCFail("Mode not supported")
def LeaveCluster(modify_ssh_setup):
"""Cleans up and remove the current node.
This function cleans up and prepares the current node to be removed
from the cluster.
If processing is successful, then it raises an
L{errors.QuitGanetiException} which is used as a special case to
shutdown the node daemon.
@param modify_ssh_setup: boolean
"""
_CleanDirectory(constants.DATA_DIR)
_CleanDirectory(constants.CRYPTO_KEYS_DIR)
JobQueuePurge()
if modify_ssh_setup:
try:
priv_key, pub_key, auth_keys = ssh.GetUserFiles(constants.GANETI_RUNAS)
utils.RemoveAuthorizedKey(auth_keys, utils.ReadFile(pub_key))
utils.RemoveFile(priv_key)
utils.RemoveFile(pub_key)
except errors.OpExecError:
logging.exception("Error while processing ssh files")
try:
utils.RemoveFile(constants.CONFD_HMAC_KEY)
utils.RemoveFile(constants.RAPI_CERT_FILE)
utils.RemoveFile(constants.NODED_CERT_FILE)
except: # pylint: disable-msg=W0702
logging.exception("Error while removing cluster secrets")
result = utils.RunCmd([constants.DAEMON_UTIL, "stop", constants.CONFD])
if result.failed:
logging.error("Command %s failed with exitcode %s and error %s",
result.cmd, result.exit_code, result.output)
# Raise a custom exception (handled in ganeti-noded)
raise errors.QuitGanetiException(True, 'Shutdown scheduled')
def GetNodeInfo(vgname, hypervisor_type):
"""Gives back a hash with different information about the node.
@type vgname: C{string}
@param vgname: the name of the volume group to ask for disk space information
@type hypervisor_type: C{str}
@param hypervisor_type: the name of the hypervisor to ask for
memory information
@rtype: C{dict}
@return: dictionary with the following keys:
- vg_size is the size of the configured volume group in MiB
- vg_free is the free size of the volume group in MiB
- memory_dom0 is the memory allocated for domain0 in MiB
- memory_free is the currently available (free) ram in MiB
- memory_total is the total number of ram in MiB
"""
outputarray = {}
if vgname is not None:
vginfo = bdev.LogicalVolume.GetVGInfo([vgname])
vg_free = vg_size = None
if vginfo:
vg_free = int(round(vginfo[0][0], 0))
vg_size = int(round(vginfo[0][1], 0))
outputarray['vg_size'] = vg_size
outputarray['vg_free'] = vg_free
if hypervisor_type is not None:
hyper = hypervisor.GetHypervisor(hypervisor_type)
hyp_info = hyper.GetNodeInfo()
if hyp_info is not None:
outputarray.update(hyp_info)
outputarray["bootid"] = utils.ReadFile(_BOOT_ID_PATH, size=128).rstrip("\n")
return outputarray
def VerifyNode(what, cluster_name):
"""Verify the status of the local node.
Based on the input L{what} parameter, various checks are done on the
local node.
If the I{filelist} key is present, this list of
files is checksummed and the file/checksum pairs are returned.
If the I{nodelist} key is present, we check that we have
connectivity via ssh with the target nodes (and check the hostname
report).
If the I{node-net-test} key is present, we check that we have
connectivity to the given nodes via both primary IP and, if
applicable, secondary IPs.
@type what: C{dict}
@param what: a dictionary of things to check:
- filelist: list of files for which to compute checksums
- nodelist: list of nodes we should check ssh communication with
- node-net-test: list of nodes we should check node daemon port
connectivity with
- hypervisor: list with hypervisors to run the verify for
@rtype: dict
@return: a dictionary with the same keys as the input dict, and
values representing the result of the checks
"""
result = {}
my_name = netutils.Hostname.GetSysName()
port = netutils.GetDaemonPort(constants.NODED)
vm_capable = my_name not in what.get(constants.NV_VMNODES, [])
if constants.NV_HYPERVISOR in what and vm_capable:
result[constants.NV_HYPERVISOR] = tmp = {}
for hv_name in what[constants.NV_HYPERVISOR]:
try:
val = hypervisor.GetHypervisor(hv_name).Verify()
except errors.HypervisorError, err:
val = "Error while checking hypervisor: %s" % str(err)
tmp[hv_name] = val
if constants.NV_HVPARAMS in what and vm_capable:
result[constants.NV_HVPARAMS] = tmp = []
for source, hv_name, hvparms in what[constants.NV_HVPARAMS]:
try:
logging.info("Validating hv %s, %s", hv_name, hvparms)
hypervisor.GetHypervisor(hv_name).ValidateParameters(hvparms)
except errors.HypervisorError, err:
tmp.append((source, hv_name, str(err)))
if constants.NV_FILELIST in what:
result[constants.NV_FILELIST] = utils.FingerprintFiles(
what[constants.NV_FILELIST])
if constants.NV_NODELIST in what:
result[constants.NV_NODELIST] = tmp = {}
random.shuffle(what[constants.NV_NODELIST])
for node in what[constants.NV_NODELIST]:
success, message = _GetSshRunner(cluster_name).VerifyNodeHostname(node)
if not success:
tmp[node] = message
if constants.NV_NODENETTEST in what:
result[constants.NV_NODENETTEST] = tmp = {}
my_pip = my_sip = None
for name, pip, sip in what[constants.NV_NODENETTEST]:
if name == my_name:
my_pip = pip
my_sip = sip
break
if not my_pip:
tmp[my_name] = ("Can't find my own primary/secondary IP"
" in the node list")
else:
for name, pip, sip in what[constants.NV_NODENETTEST]:
fail = []
if not netutils.TcpPing(pip, port, source=my_pip):
fail.append("primary")
if sip != pip:
if not netutils.TcpPing(sip, port, source=my_sip):
fail.append("secondary")
if fail:
tmp[name] = ("failure using the %s interface(s)" %
" and ".join(fail))
if constants.NV_MASTERIP in what:
# FIXME: add checks on incoming data structures (here and in the
# rest of the function)
master_name, master_ip = what[constants.NV_MASTERIP]
if master_name == my_name:
source = constants.IP4_ADDRESS_LOCALHOST
else:
source = None
result[constants.NV_MASTERIP] = netutils.TcpPing(master_ip, port,
source=source)
if constants.NV_OOB_PATHS in what:
result[constants.NV_OOB_PATHS] = tmp = []
for path in what[constants.NV_OOB_PATHS]:
try:
st = os.stat(path)
except OSError, err:
tmp.append("error stating out of band helper: %s" % err)
else:
if stat.S_ISREG(st.st_mode):
if stat.S_IMODE(st.st_mode) & stat.S_IXUSR:
tmp.append(None)
else:
tmp.append("out of band helper %s is not executable" % path)
else:
tmp.append("out of band helper %s is not a file" % path)
if constants.NV_LVLIST in what and vm_capable:
try:
val = GetVolumeList(utils.ListVolumeGroups().keys())
except RPCFail, err:
val = str(err)
result[constants.NV_LVLIST] = val
if constants.NV_INSTANCELIST in what and vm_capable:
# GetInstanceList can fail
try:
val = GetInstanceList(what[constants.NV_INSTANCELIST])
except RPCFail, err:
val = str(err)
result[constants.NV_INSTANCELIST] = val
if constants.NV_VGLIST in what and vm_capable:
result[constants.NV_VGLIST] = utils.ListVolumeGroups()
if constants.NV_PVLIST in what and vm_capable:
result[constants.NV_PVLIST] = \
bdev.LogicalVolume.GetPVInfo(what[constants.NV_PVLIST],
filter_allocatable=False)
if constants.NV_VERSION in what:
result[constants.NV_VERSION] = (constants.PROTOCOL_VERSION,
constants.RELEASE_VERSION)
if constants.NV_HVINFO in what and vm_capable:
hyper = hypervisor.GetHypervisor(what[constants.NV_HVINFO])
result[constants.NV_HVINFO] = hyper.GetNodeInfo()
if constants.NV_DRBDLIST in what and vm_capable:
try:
used_minors = bdev.DRBD8.GetUsedDevs().keys()
except errors.BlockDeviceError, err:
logging.warning("Can't get used minors list", exc_info=True)
used_minors = str(err)
result[constants.NV_DRBDLIST] = used_minors
if constants.NV_DRBDHELPER in what and vm_capable:
status = True
try:
payload = bdev.BaseDRBD.GetUsermodeHelper()
except errors.BlockDeviceError, err:
logging.error("Can't get DRBD usermode helper: %s", str(err))
status = False
payload = str(err)
result[constants.NV_DRBDHELPER] = (status, payload)
if constants.NV_NODESETUP in what:
result[constants.NV_NODESETUP] = tmpr = []
if not os.path.isdir("/sys/block") or not os.path.isdir("/sys/class/net"):
tmpr.append("The sysfs filesytem doesn't seem to be mounted"
" under /sys, missing required directories /sys/block"
" and /sys/class/net")
if (not os.path.isdir("/proc/sys") or
not os.path.isfile("/proc/sysrq-trigger")):
tmpr.append("The procfs filesystem doesn't seem to be mounted"
" under /proc, missing required directory /proc/sys and"
" the file /proc/sysrq-trigger")
if constants.NV_TIME in what:
result[constants.NV_TIME] = utils.SplitTime(time.time())
if constants.NV_OSLIST in what and vm_capable:
result[constants.NV_OSLIST] = DiagnoseOS()
if constants.NV_BRIDGES in what and vm_capable:
result[constants.NV_BRIDGES] = [bridge
for bridge in what[constants.NV_BRIDGES]
if not utils.BridgeExists(bridge)]
return result
def GetBlockDevSizes(devices):
"""Return the size of the given block devices
@type devices: list
@param devices: list of block device nodes to query
@rtype: dict
@return:
dictionary of all block devices under /dev (key). The value is their
size in MiB.
{'/dev/disk/by-uuid/123456-12321231-312312-312': 124}
"""
DEV_PREFIX = "/dev/"
blockdevs = {}
for devpath in devices:
if os.path.commonprefix([DEV_PREFIX, devpath]) != DEV_PREFIX:
continue
try:
st = os.stat(devpath)
except EnvironmentError, err:
logging.warning("Error stat()'ing device %s: %s", devpath, str(err))
continue
if stat.S_ISBLK(st.st_mode):
result = utils.RunCmd(["blockdev", "--getsize64", devpath])
if result.failed:
# We don't want to fail, just do not list this device as available
logging.warning("Cannot get size for block device %s", devpath)
continue
size = int(result.stdout) / (1024 * 1024)
blockdevs[devpath] = size
return blockdevs
def GetVolumeList(vg_names):
"""Compute list of logical volumes and their size.
@type vg_names: list
@param vg_names: the volume groups whose LVs we should list, or
empty for all volume groups
@rtype: dict
@return:
dictionary of all partions (key) with value being a tuple of
their size (in MiB), inactive and online status::
{'xenvg/test1': ('20.06', True, True)}
in case of errors, a string is returned with the error
details.
"""
lvs = {}
sep = '|'
if not vg_names:
vg_names = []
result = utils.RunCmd(["lvs", "--noheadings", "--units=m", "--nosuffix",
"--separator=%s" % sep,
"-ovg_name,lv_name,lv_size,lv_attr"] + vg_names)
if result.failed:
_Fail("Failed to list logical volumes, lvs output: %s", result.output)
for line in result.stdout.splitlines():
line = line.strip()
match = _LVSLINE_REGEX.match(line)
if not match:
logging.error("Invalid line returned from lvs output: '%s'", line)
continue
vg_name, name, size, attr = match.groups()
inactive = attr[4] == '-'
online = attr[5] == 'o'
virtual = attr[0] == 'v'
if virtual:
# we don't want to report such volumes as existing, since they
# don't really hold data
continue
lvs[vg_name+"/"+name] = (size, inactive, online)
return lvs
def ListVolumeGroups():
"""List the volume groups and their size.
@rtype: dict
@return: dictionary with keys volume name and values the
size of the volume
"""
return utils.ListVolumeGroups()
def NodeVolumes():
"""List all volumes on this node.
@rtype: list
@return:
A list of dictionaries, each having four keys:
- name: the logical volume name,
- size: the size of the logical volume
- dev: the physical device on which the LV lives
- vg: the volume group to which it belongs
In case of errors, we return an empty list and log the
error.
Note that since a logical volume can live on multiple physical
volumes, the resulting list might include a logical volume
multiple times.
"""
result = utils.RunCmd(["lvs", "--noheadings", "--units=m", "--nosuffix",
"--separator=|",
"--options=lv_name,lv_size,devices,vg_name"])
if result.failed:
_Fail("Failed to list logical volumes, lvs output: %s",
result.output)
def parse_dev(dev):
return dev.split('(')[0]
def handle_dev(dev):
return [parse_dev(x) for x in dev.split(",")]
def map_line(line):
line = [v.strip() for v in line]
return [{'name': line[0], 'size': line[1],
'dev': dev, 'vg': line[3]} for dev in handle_dev(line[2])]
all_devs = []
for line in result.stdout.splitlines():
if line.count('|') >= 3:
all_devs.extend(map_line(line.split('|')))
else:
logging.warning("Strange line in the output from lvs: '%s'", line)
return all_devs
def BridgesExist(bridges_list):
"""Check if a list of bridges exist on the current node.
@rtype: boolean
@return: C{True} if all of them exist, C{False} otherwise
"""
missing = []
for bridge in bridges_list:
if not utils.BridgeExists(bridge):
missing.append(bridge)
if missing:
_Fail("Missing bridges %s", utils.CommaJoin(missing))
def GetInstanceList(hypervisor_list):
"""Provides a list of instances.
@type hypervisor_list: list
@param hypervisor_list: the list of hypervisors to query information
@rtype: list
@return: a list of all running instances on the current node
- instance1.example.com
- instance2.example.com
"""
results = []
for hname in hypervisor_list:
try:
names = hypervisor.GetHypervisor(hname).ListInstances()
results.extend(names)
except errors.HypervisorError, err:
_Fail("Error enumerating instances (hypervisor %s): %s",
hname, err, exc=True)
return results
def GetInstanceInfo(instance, hname):
"""Gives back the information about an instance as a dictionary.
@type instance: string
@param instance: the instance name
@type hname: string
@param hname: the hypervisor type of the instance
@rtype: dict
@return: dictionary with the following keys:
- memory: memory size of instance (int)
- state: xen state of instance (string)
- time: cpu time of instance (float)
"""
output = {}
iinfo = hypervisor.GetHypervisor(hname).GetInstanceInfo(instance)
if iinfo is not None:
output['memory'] = iinfo[2]
output['state'] = iinfo[4]
output['time'] = iinfo[5]
return output
def GetInstanceMigratable(instance):
"""Gives whether an instance can be migrated.
@type instance: L{objects.Instance}
@param instance: object representing the instance to be checked.
@rtype: tuple
@return: tuple of (result, description) where:
- result: whether the instance can be migrated or not
- description: a description of the issue, if relevant
"""
hyper = hypervisor.GetHypervisor(instance.hypervisor)
iname = instance.name
if iname not in hyper.ListInstances():
_Fail("Instance %s is not running", iname)
for idx in range(len(instance.disks)):
link_name = _GetBlockDevSymlinkPath(iname, idx)
if not os.path.islink(link_name):
logging.warning("Instance %s is missing symlink %s for disk %d",
iname, link_name, idx)
def GetAllInstancesInfo(hypervisor_list):
"""Gather data about all instances.
This is the equivalent of L{GetInstanceInfo}, except that it
computes data for all instances at once, thus being faster if one
needs data about more than one instance.
@type hypervisor_list: list
@param hypervisor_list: list of hypervisors to query for instance data
@rtype: dict
@return: dictionary of instance: data, with data having the following keys:
- memory: memory size of instance (int)
- state: xen state of instance (string)
- time: cpu time of instance (float)
- vcpus: the number of vcpus
"""
output = {}
for hname in hypervisor_list:
iinfo = hypervisor.GetHypervisor(hname).GetAllInstancesInfo()
if iinfo:
for name, _, memory, vcpus, state, times in iinfo:
value = {
'memory': memory,
'vcpus': vcpus,
'state': state,
'time': times,
}
if name in output:
# we only check static parameters, like memory and vcpus,
# and not state and time which can change between the
# invocations of the different hypervisors
for key in 'memory', 'vcpus':
if value[key] != output[name][key]:
_Fail("Instance %s is running twice"
" with different parameters", name)
output[name] = value
return output
def _InstanceLogName(kind, os_name, instance):
"""Compute the OS log filename for a given instance and operation.
The instance name and os name are passed in as strings since not all
operations have these as part of an instance object.
@type kind: string
@param kind: the operation type (e.g. add, import, etc.)
@type os_name: string
@param os_name: the os name
@type instance: string
@param instance: the name of the instance being imported/added/etc.
"""
# TODO: Use tempfile.mkstemp to create unique filename
base = ("%s-%s-%s-%s.log" %
(kind, os_name, instance, utils.TimestampForFilename()))
return utils.PathJoin(constants.LOG_OS_DIR, base)
def InstanceOsAdd(instance, reinstall, debug):
"""Add an OS to an instance.
@type instance: L{objects.Instance}
@param instance: Instance whose OS is to be installed
@type reinstall: boolean
@param reinstall: whether this is an instance reinstall
@type debug: integer
@param debug: debug level, passed to the OS scripts
@rtype: None
"""
inst_os = OSFromDisk(instance.os)
create_env = OSEnvironment(instance, inst_os, debug)
if reinstall:
create_env['INSTANCE_REINSTALL'] = "1"
logfile = _InstanceLogName("add", instance.os, instance.name)
result = utils.RunCmd([inst_os.create_script], env=create_env,
cwd=inst_os.path, output=logfile,)
if result.failed:
logging.error("os create command '%s' returned error: %s, logfile: %s,"
" output: %s", result.cmd, result.fail_reason, logfile,
result.output)
lines = [utils.SafeEncode(val)
for val in utils.TailFile(logfile, lines=20)]
_Fail("OS create script failed (%s), last lines in the"
" log file:\n%s", result.fail_reason, "\n".join(lines), log=False)
def RunRenameInstance(instance, old_name, debug):
"""Run the OS rename script for an instance.
@type instance: L{objects.Instance}
@param instance: Instance whose OS is to be installed
@type old_name: string
@param old_name: previous instance name
@type debug: integer
@param debug: debug level, passed to the OS scripts
@rtype: boolean
@return: the success of the operation
"""
inst_os = OSFromDisk(instance.os)
rename_env = OSEnvironment(instance, inst_os, debug)
rename_env['OLD_INSTANCE_NAME'] = old_name
logfile = _InstanceLogName("rename", instance.os,
"%s-%s" % (old_name, instance.name))
result = utils.RunCmd([inst_os.rename_script], env=rename_env,
cwd=inst_os.path, output=logfile)
if result.failed:
logging.error("os create command '%s' returned error: %s output: %s",
result.cmd, result.fail_reason, result.output)
lines = [utils.SafeEncode(val)
for val in utils.TailFile(logfile, lines=20)]
_Fail("OS rename script failed (%s), last lines in the"
" log file:\n%s", result.fail_reason, "\n".join(lines), log=False)
def _GetBlockDevSymlinkPath(instance_name, idx):
return utils.PathJoin(constants.DISK_LINKS_DIR, "%s%s%d" %
(instance_name, constants.DISK_SEPARATOR, idx))
def _SymlinkBlockDev(instance_name, device_path, idx):
"""Set up symlinks to a instance's block device.
This is an auxiliary function run when an instance is start (on the primary
node) or when an instance is migrated (on the target node).
@param instance_name: the name of the target instance
@param device_path: path of the physical block device, on the node
@param idx: the disk index
@return: absolute path to the disk's symlink
"""
link_name = _GetBlockDevSymlinkPath(instance_name, idx)
try:
os.symlink(device_path, link_name)
except OSError, err:
if err.errno == errno.EEXIST:
if (not os.path.islink(link_name) or
os.readlink(link_name) != device_path):
os.remove(link_name)
os.symlink(device_path, link_name)
else:
raise
return link_name
def _RemoveBlockDevLinks(instance_name, disks):
"""Remove the block device symlinks belonging to the given instance.
"""
for idx, _ in enumerate(disks):
link_name = _GetBlockDevSymlinkPath(instance_name, idx)
if os.path.islink(link_name):
try:
os.remove(link_name)
except OSError:
logging.exception("Can't remove symlink '%s'", link_name)
def _GatherAndLinkBlockDevs(instance):
"""Set up an instance's block device(s).
This is run on the primary node at instance startup. The block
devices must be already assembled.
@type instance: L{objects.Instance}
@param instance: the instance whose disks we shoul assemble
@rtype: list
@return: list of (disk_object, device_path)
"""
block_devices = []
for idx, disk in enumerate(instance.disks):
device = _RecursiveFindBD(disk)
if device is None:
raise errors.BlockDeviceError("Block device '%s' is not set up." %
str(disk))
device.Open()
try:
link_name = _SymlinkBlockDev(instance.name, device.dev_path, idx)
except OSError, e:
raise errors.BlockDeviceError("Cannot create block device symlink: %s" %
e.strerror)
block_devices.append((disk, link_name))
return block_devices
def StartInstance(instance):
"""Start an instance.
@type instance: L{objects.Instance}
@param instance: the instance object
@rtype: None
"""
running_instances = GetInstanceList([instance.hypervisor])
if instance.name in running_instances:
logging.info("Instance %s already running, not starting", instance.name)
return
try:
block_devices = _GatherAndLinkBlockDevs(instance)
hyper = hypervisor.GetHypervisor(instance.hypervisor)
hyper.StartInstance(instance, block_devices)
except errors.BlockDeviceError, err:
_Fail("Block device error: %s", err, exc=True)
except errors.HypervisorError, err:
_RemoveBlockDevLinks(instance.name, instance.disks)
_Fail("Hypervisor error: %s", err, exc=True)
def InstanceShutdown(instance, timeout):
"""Shut an instance down.
@note: this functions uses polling with a hardcoded timeout.
@type instance: L{objects.Instance}
@param instance: the instance object
@type timeout: integer
@param timeout: maximum timeout for soft shutdown
@rtype: None
"""
hv_name = instance.hypervisor
hyper = hypervisor.GetHypervisor(hv_name)
iname = instance.name
if instance.name not in hyper.ListInstances():
logging.info("Instance %s not running, doing nothing", iname)
return
class _TryShutdown:
def __init__(self):
self.tried_once = False
def __call__(self):
if iname not in hyper.ListInstances():
return
try:
hyper.StopInstance(instance, retry=self.tried_once)
except errors.HypervisorError, err:
if iname not in hyper.ListInstances():
# if the instance is no longer existing, consider this a
# success and go to cleanup
return
_Fail("Failed to stop instance %s: %s", iname, err)
self.tried_once = True
raise utils.RetryAgain()
try:
utils.Retry(_TryShutdown(), 5, timeout)
except utils.RetryTimeout:
# the shutdown did not succeed
logging.error("Shutdown of '%s' unsuccessful, forcing", iname)
try:
hyper.StopInstance(instance, force=True)
except errors.HypervisorError, err:
if iname in hyper.ListInstances():
# only raise an error if the instance still exists, otherwise
# the error could simply be "instance ... unknown"!
_Fail("Failed to force stop instance %s: %s", iname, err)
time.sleep(1)
if iname in hyper.ListInstances():
_Fail("Could not shutdown instance %s even by destroy", iname)
try:
hyper.CleanupInstance(instance.name)
except errors.HypervisorError, err:
logging.warning("Failed to execute post-shutdown cleanup step: %s", err)
_RemoveBlockDevLinks(iname, instance.disks)
def InstanceReboot(instance, reboot_type, shutdown_timeout):
"""Reboot an instance.
@type instance: L{objects.Instance}
@param instance: the instance object to reboot
@type reboot_type: str
@param reboot_type: the type of reboot, one the following
constants:
- L{constants.INSTANCE_REBOOT_SOFT}: only reboot the
instance OS, do not recreate the VM
- L{constants.INSTANCE_REBOOT_HARD}: tear down and
restart the VM (at the hypervisor level)
- the other reboot type (L{constants.INSTANCE_REBOOT_FULL}) is
not accepted here, since that mode is handled differently, in
cmdlib, and translates into full stop and start of the
instance (instead of a call_instance_reboot RPC)
@type shutdown_timeout: integer
@param shutdown_timeout: maximum timeout for soft shutdown
@rtype: None
"""
running_instances = GetInstanceList([instance.hypervisor])
if instance.name not in running_instances:
_Fail("Cannot reboot instance %s that is not running", instance.name)
hyper = hypervisor.GetHypervisor(instance.hypervisor)
if reboot_type == constants.INSTANCE_REBOOT_SOFT:
try:
hyper.RebootInstance(instance)
except errors.HypervisorError, err:
_Fail("Failed to soft reboot instance %s: %s", instance.name, err)
elif reboot_type == constants.INSTANCE_REBOOT_HARD:
try:
InstanceShutdown(instance, shutdown_timeout)
return StartInstance(instance)
except errors.HypervisorError, err:
_Fail("Failed to hard reboot instance %s: %s", instance.name, err)
else:
_Fail("Invalid reboot_type received: %s", reboot_type)
def MigrationInfo(instance):
"""Gather information about an instance to be migrated.
@type instance: L{objects.Instance}
@param instance: the instance definition
"""
hyper = hypervisor.GetHypervisor(instance.hypervisor)
try:
info = hyper.MigrationInfo(instance)
except errors.HypervisorError, err:
_Fail("Failed to fetch migration information: %s", err, exc=True)
return info
def AcceptInstance(instance, info, target):
"""Prepare the node to accept an instance.
@type instance: L{objects.Instance}
@param instance: the instance definition
@type info: string/data (opaque)
@param info: migration information, from the source node
@type target: string
@param target: target host (usually ip), on this node
"""
# TODO: why is this required only for DTS_EXT_MIRROR?
if instance.disk_template in constants.DTS_EXT_MIRROR:
# Create the symlinks, as the disks are not active
# in any way
try:
_GatherAndLinkBlockDevs(instance)
except errors.BlockDeviceError, err:
_Fail("Block device error: %s", err, exc=True)
hyper = hypervisor.GetHypervisor(instance.hypervisor)
try:
hyper.AcceptInstance(instance, info, target)
except errors.HypervisorError, err:
if instance.disk_template in constants.DTS_EXT_MIRROR:
_RemoveBlockDevLinks(instance.name, instance.disks)
_Fail("Failed to accept instance: %s", err, exc=True)
def FinalizeMigration(instance, info, success):
"""Finalize any preparation to accept an instance.
@type instance: L{objects.Instance}
@param instance: the instance definition
@type info: string/data (opaque)
@param info: migration information, from the source node
@type success: boolean
@param success: whether the migration was a success or a failure
"""
hyper = hypervisor.GetHypervisor(instance.hypervisor)
try:
hyper.FinalizeMigration(instance, info, success)
except errors.HypervisorError, err:
_Fail("Failed to finalize migration: %s", err, exc=True)
def MigrateInstance(instance, target, live):
"""Migrates an instance to another node.
@type instance: L{objects.Instance}
@param instance: the instance definition
@type target: string
@param target: the target node name
@type live: boolean
@param live: whether the migration should be done live or not (the
interpretation of this parameter is left to the hypervisor)
@rtype: tuple
@return: a tuple of (success, msg) where:
- succes is a boolean denoting the success/failure of the operation
- msg is a string with details in case of failure
"""
hyper = hypervisor.GetHypervisor(instance.hypervisor)
try:
hyper.MigrateInstance(instance, target, live)
except errors.HypervisorError, err:
_Fail("Failed to migrate instance: %s", err, exc=True)
def BlockdevCreate(disk, size, owner, on_primary, info):
"""Creates a block device for an instance.
@type disk: L{objects.Disk}
@param disk: the object describing the disk we should create
@type size: int
@param size: the size of the physical underlying device, in MiB
@type owner: str
@param owner: the name of the instance for which disk is created,
used for device cache data
@type on_primary: boolean
@param on_primary: indicates if it is the primary node or not
@type info: string
@param info: string that will be sent to the physical device
creation, used for example to set (LVM) tags on LVs
@return: the new unique_id of the device (this can sometime be
computed only after creation), or None. On secondary nodes,
it's not required to return anything.
"""
# TODO: remove the obsolete 'size' argument
# pylint: disable-msg=W0613
clist = []
if disk.children:
for child in disk.children:
try:
crdev = _RecursiveAssembleBD(child, owner, on_primary)
except errors.BlockDeviceError, err:
_Fail("Can't assemble device %s: %s", child, err)
if on_primary or disk.AssembleOnSecondary():
# we need the children open in case the device itself has to
# be assembled
try:
# pylint: disable-msg=E1103
crdev.Open()
except errors.BlockDeviceError, err:
_Fail("Can't make child '%s' read-write: %s", child, err)
clist.append(crdev)
try:
device = bdev.Create(disk.dev_type, disk.physical_id, clist, disk.size)
except errors.BlockDeviceError, err:
_Fail("Can't create block device: %s", err)
if on_primary or disk.AssembleOnSecondary():
try:
device.Assemble()
except errors.BlockDeviceError, err:
_Fail("Can't assemble device after creation, unusual event: %s", err)
device.SetSyncSpeed(constants.SYNC_SPEED)
if on_primary or disk.OpenOnSecondary():
try:
device.Open(force=True)
except errors.BlockDeviceError, err:
_Fail("Can't make device r/w after creation, unusual event: %s", err)
DevCacheManager.UpdateCache(device.dev_path, owner,
on_primary, disk.iv_name)
device.SetInfo(info)
return device.unique_id
def _WipeDevice(path, offset, size):
"""This function actually wipes the device.
@param path: The path to the device to wipe
@param offset: The offset in MiB in the file
@param size: The size in MiB to write
"""
cmd = [constants.DD_CMD, "if=/dev/zero", "seek=%d" % offset,
"bs=%d" % constants.WIPE_BLOCK_SIZE, "oflag=direct", "of=%s" % path,
"count=%d" % size]
result = utils.RunCmd(cmd)
if result.failed:
_Fail("Wipe command '%s' exited with error: %s; output: %s", result.cmd,
result.fail_reason, result.output)
def BlockdevWipe(disk, offset, size):
"""Wipes a block device.
@type disk: L{objects.Disk}
@param disk: the disk object we want to wipe
@type offset: int
@param offset: The offset in MiB in the file
@type size: int
@param size: The size in MiB to write
"""
try:
rdev = _RecursiveFindBD(disk)
except errors.BlockDeviceError:
rdev = None
if not rdev:
_Fail("Cannot execute wipe for device %s: device not found", disk.iv_name)
# Do cross verify some of the parameters
if offset > rdev.size:
_Fail("Offset is bigger than device size")
if (offset + size) > rdev.size:
_Fail("The provided offset and size to wipe is bigger than device size")
_WipeDevice(rdev.dev_path, offset, size)
def BlockdevPauseResumeSync(disks, pause):
"""Pause or resume the sync of the block device.
@type disks: list of L{objects.Disk}
@param disks: the disks object we want to pause/resume
@type pause: bool
@param pause: Wheater to pause or resume
"""
success = []
for disk in disks:
try:
rdev = _RecursiveFindBD(disk)
except errors.BlockDeviceError:
rdev = None
if not rdev:
success.append((False, ("Cannot change sync for device %s:"
" device not found" % disk.iv_name)))
continue
result = rdev.PauseResumeSync(pause)
if result:
success.append((result, None))
else:
if pause:
msg = "Pause"
else:
msg = "Resume"
success.append((result, "%s for device %s failed" % (msg, disk.iv_name)))
return success
def BlockdevRemove(disk):
"""Remove a block device.
@note: This is intended to be called recursively.
@type disk: L{objects.Disk}
@param disk: the disk object we should remove
@rtype: boolean
@return: the success of the operation
"""
msgs = []
try:
rdev = _RecursiveFindBD(disk)
except errors.BlockDeviceError, err:
# probably can't attach
logging.info("Can't attach to device %s in remove", disk)
rdev = None
if rdev is not None:
r_path = rdev.dev_path
try:
rdev.Remove()
except errors.BlockDeviceError, err:
msgs.append(str(err))
if not msgs:
DevCacheManager.RemoveCache(r_path)
if disk.children:
for child in disk.children:
try:
BlockdevRemove(child)
except RPCFail, err:
msgs.append(str(err))
if msgs:
_Fail("; ".join(msgs))
def _RecursiveAssembleBD(disk, owner, as_primary):
"""Activate a block device for an instance.
This is run on the primary and secondary nodes for an instance.
@note: this function is called recursively.
@type disk: L{objects.Disk}
@param disk: the disk we try to assemble
@type owner: str
@param owner: the name of the instance which owns the disk
@type as_primary: boolean
@param as_primary: if we should make the block device
read/write
@return: the assembled device or None (in case no device
was assembled)
@raise errors.BlockDeviceError: in case there is an error
during the activation of the children or the device
itself
"""
children = []
if disk.children:
mcn = disk.ChildrenNeeded()
if mcn == -1:
mcn = 0 # max number of Nones allowed
else:
mcn = len(disk.children) - mcn # max number of Nones
for chld_disk in disk.children:
try:
cdev = _RecursiveAssembleBD(chld_disk, owner, as_primary)
except errors.BlockDeviceError, err:
if children.count(None) >= mcn:
raise
cdev = None
logging.error("Error in child activation (but continuing): %s",
str(err))
children.append(cdev)
if as_primary or disk.AssembleOnSecondary():
r_dev = bdev.Assemble(disk.dev_type, disk.physical_id, children, disk.size)
r_dev.SetSyncSpeed(constants.SYNC_SPEED)
result = r_dev
if as_primary or disk.OpenOnSecondary():
r_dev.Open()
DevCacheManager.UpdateCache(r_dev.dev_path, owner,
as_primary, disk.iv_name)
else:
result = True
return result
def BlockdevAssemble(disk, owner, as_primary, idx):
"""Activate a block device for an instance.
This is a wrapper over _RecursiveAssembleBD.
@rtype: str or boolean
@return: a C{/dev/...} path for primary nodes, and
C{True} for secondary nodes
"""
try:
result = _RecursiveAssembleBD(disk, owner, as_primary)
if isinstance(result, bdev.BlockDev):
# pylint: disable-msg=E1103
result = result.dev_path
if as_primary:
_SymlinkBlockDev(owner, result, idx)
except errors.BlockDeviceError, err:
_Fail("Error while assembling disk: %s", err, exc=True)
except OSError, err:
_Fail("Error while symlinking disk: %s", err, exc=True)
return result
def BlockdevShutdown(disk):
"""Shut down a block device.
First, if the device is assembled (Attach() is successful), then
the device is shutdown. Then the children of the device are
shutdown.
This function is called recursively. Note that we don't cache the
children or such, as oppossed to assemble, shutdown of different
devices doesn't require that the upper device was active.
@type disk: L{objects.Disk}
@param disk: the description of the disk we should
shutdown
@rtype: None
"""
msgs = []
r_dev = _RecursiveFindBD(disk)
if r_dev is not None:
r_path = r_dev.dev_path
try:
r_dev.Shutdown()
DevCacheManager.RemoveCache(r_path)
except errors.BlockDeviceError, err:
msgs.append(str(err))
if disk.children:
for child in disk.children:
try:
BlockdevShutdown(child)
except RPCFail, err:
msgs.append(str(err))
if msgs:
_Fail("; ".join(msgs))
def BlockdevAddchildren(parent_cdev, new_cdevs):
"""Extend a mirrored block device.
@type parent_cdev: L{objects.Disk}
@param parent_cdev: the disk to which we should add children
@type new_cdevs: list of L{objects.Disk}
@param new_cdevs: the list of children which we should add
@rtype: None
"""
parent_bdev = _RecursiveFindBD(parent_cdev)
if parent_bdev is None:
_Fail("Can't find parent device '%s' in add children", parent_cdev)
new_bdevs = [_RecursiveFindBD(disk) for disk in new_cdevs]
if new_bdevs.count(None) > 0:
_Fail("Can't find new device(s) to add: %s:%s", new_bdevs, new_cdevs)
parent_bdev.AddChildren(new_bdevs)
def BlockdevRemovechildren(parent_cdev, new_cdevs):
"""Shrink a mirrored block device.
@type parent_cdev: L{objects.Disk}
@param parent_cdev: the disk from which we should remove children
@type new_cdevs: list of L{objects.Disk}
@param new_cdevs: the list of children which we should remove
@rtype: None
"""
parent_bdev = _RecursiveFindBD(parent_cdev)
if parent_bdev is None:
_Fail("Can't find parent device '%s' in remove children", parent_cdev)
devs = []
for disk in new_cdevs:
rpath = disk.StaticDevPath()
if rpath is None:
bd = _RecursiveFindBD(disk)
if bd is None:
_Fail("Can't find device %s while removing children", disk)
else:
devs.append(bd.dev_path)
else:
if not utils.IsNormAbsPath(rpath):
_Fail("Strange path returned from StaticDevPath: '%s'", rpath)
devs.append(rpath)
parent_bdev.RemoveChildren(devs)
def BlockdevGetmirrorstatus(disks):
"""Get the mirroring status of a list of devices.
@type disks: list of L{objects.Disk}
@param disks: the list of disks which we should query
@rtype: disk
@return: List of L{objects.BlockDevStatus}, one for each disk
@raise errors.BlockDeviceError: if any of the disks cannot be
found
"""
stats = []
for dsk in disks:
rbd = _RecursiveFindBD(dsk)
if rbd is None:
_Fail("Can't find device %s", dsk)
stats.append(rbd.CombinedSyncStatus())
return stats
def BlockdevGetmirrorstatusMulti(disks):
"""Get the mirroring status of a list of devices.
@type disks: list of L{objects.Disk}
@param disks: the list of disks which we should query
@rtype: disk
@return: List of tuples, (bool, status), one for each disk; bool denotes
success/failure, status is L{objects.BlockDevStatus} on success, string
otherwise
"""
result = []
for disk in disks:
try:
rbd = _RecursiveFindBD(disk)
if rbd is None:
result.append((False, "Can't find device %s" % disk))
continue
status = rbd.CombinedSyncStatus()
except errors.BlockDeviceError, err:
logging.exception("Error while getting disk status")
result.append((False, str(err)))
else:
result.append((True, status))
assert len(disks) == len(result)
return result
def _RecursiveFindBD(disk):
"""Check if a device is activated.
If so, return information about the real device.
@type disk: L{objects.Disk}
@param disk: the disk object we need to find
@return: None if the device can't be found,
otherwise the device instance
"""
children = []
if disk.children:
for chdisk in disk.children:
children.append(_RecursiveFindBD(chdisk))
return bdev.FindDevice(disk.dev_type, disk.physical_id, children, disk.size)
def _OpenRealBD(disk):
"""Opens the underlying block device of a disk.
@type disk: L{objects.Disk}
@param disk: the disk object we want to open
"""
real_disk = _RecursiveFindBD(disk)
if real_disk is None:
_Fail("Block device '%s' is not set up", disk)
real_disk.Open()
return real_disk
def BlockdevFind(disk):
"""Check if a device is activated.
If it is, return information about the real device.
@type disk: L{objects.Disk}
@param disk: the disk to find
@rtype: None or objects.BlockDevStatus
@return: None if the disk cannot be found, otherwise a the current
information
"""
try:
rbd = _RecursiveFindBD(disk)
except errors.BlockDeviceError, err:
_Fail("Failed to find device: %s", err, exc=True)
if rbd is None:
return None
return rbd.GetSyncStatus()
def BlockdevGetsize(disks):
"""Computes the size of the given disks.
If a disk is not found, returns None instead.
@type disks: list of L{objects.Disk}
@param disks: the list of disk to compute the size for
@rtype: list
@return: list with elements None if the disk cannot be found,
otherwise the size
"""
result = []
for cf in disks:
try:
rbd = _RecursiveFindBD(cf)
except errors.BlockDeviceError:
result.append(None)
continue
if rbd is None:
result.append(None)
else:
result.append(rbd.GetActualSize())
return result
def BlockdevExport(disk, dest_node, dest_path, cluster_name):
"""Export a block device to a remote node.
@type disk: L{objects.Disk}
@param disk: the description of the disk to export
@type dest_node: str
@param dest_node: the destination node to export to
@type dest_path: str
@param dest_path: the destination path on the target node
@type cluster_name: str
@param cluster_name: the cluster name, needed for SSH hostalias
@rtype: None
"""
real_disk = _OpenRealBD(disk)
# the block size on the read dd is 1MiB to match our units
expcmd = utils.BuildShellCmd("set -e; set -o pipefail; "
"dd if=%s bs=1048576 count=%s",
real_disk.dev_path, str(disk.size))
# we set here a smaller block size as, due to ssh buffering, more
# than 64-128k will mostly ignored; we use nocreat to fail if the
# device is not already there or we pass a wrong path; we use
# notrunc to no attempt truncate on an LV device; we use oflag=dsync
# to not buffer too much memory; this means that at best, we flush
# every 64k, which will not be very fast
destcmd = utils.BuildShellCmd("dd of=%s conv=nocreat,notrunc bs=65536"
" oflag=dsync", dest_path)
remotecmd = _GetSshRunner(cluster_name).BuildCmd(dest_node,
constants.GANETI_RUNAS,
destcmd)
# all commands have been checked, so we're safe to combine them
command = '|'.join([expcmd, utils.ShellQuoteArgs(remotecmd)])
result = utils.RunCmd(["bash", "-c", command])
if result.failed:
_Fail("Disk copy command '%s' returned error: %s"
" output: %s", command, result.fail_reason, result.output)
def UploadFile(file_name, data, mode, uid, gid, atime, mtime):
"""Write a file to the filesystem.
This allows the master to overwrite(!) a file. It will only perform
the operation if the file belongs to a list of configuration files.
@type file_name: str
@param file_name: the target file name
@type data: str
@param data: the new contents of the file
@type mode: int
@param mode: the mode to give the file (can be None)
@type uid: string
@param uid: the owner of the file
@type gid: string
@param gid: the group of the file
@type atime: float
@param atime: the atime to set on the file (can be None)
@type mtime: float
@param mtime: the mtime to set on the file (can be None)
@rtype: None
"""
if not os.path.isabs(file_name):
_Fail("Filename passed to UploadFile is not absolute: '%s'", file_name)
if file_name not in _ALLOWED_UPLOAD_FILES:
_Fail("Filename passed to UploadFile not in allowed upload targets: '%s'",
file_name)
raw_data = _Decompress(data)
if not (isinstance(uid, basestring) and isinstance(gid, basestring)):
_Fail("Invalid username/groupname type")
getents = runtime.GetEnts()
uid = getents.LookupUser(uid)
gid = getents.LookupGroup(gid)
utils.SafeWriteFile(file_name, None,
data=raw_data, mode=mode, uid=uid, gid=gid,
atime=atime, mtime=mtime)
def RunOob(oob_program, command, node, timeout):
"""Executes oob_program with given command on given node.
@param oob_program: The path to the executable oob_program
@param command: The command to invoke on oob_program
@param node: The node given as an argument to the program
@param timeout: Timeout after which we kill the oob program
@return: stdout
@raise RPCFail: If execution fails for some reason
"""
result = utils.RunCmd([oob_program, command, node], timeout=timeout)
if result.failed:
_Fail("'%s' failed with reason '%s'; output: %s", result.cmd,
result.fail_reason, result.output)
return result.stdout
def WriteSsconfFiles(values):
"""Update all ssconf files.
Wrapper around the SimpleStore.WriteFiles.
"""
ssconf.SimpleStore().WriteFiles(values)
def _ErrnoOrStr(err):
"""Format an EnvironmentError exception.
If the L{err} argument has an errno attribute, it will be looked up
and converted into a textual C{E...} description. Otherwise the
string representation of the error will be returned.
@type err: L{EnvironmentError}
@param err: the exception to format
"""
if hasattr(err, 'errno'):
detail = errno.errorcode[err.errno]
else:
detail = str(err)
return detail
def _OSOndiskAPIVersion(os_dir):
"""Compute and return the API version of a given OS.
This function will try to read the API version of the OS residing in
the 'os_dir' directory.
@type os_dir: str
@param os_dir: the directory in which we should look for the OS
@rtype: tuple
@return: tuple (status, data) with status denoting the validity and
data holding either the vaid versions or an error message
"""
api_file = utils.PathJoin(os_dir, constants.OS_API_FILE)
try:
st = os.stat(api_file)
except EnvironmentError, err:
return False, ("Required file '%s' not found under path %s: %s" %
(constants.OS_API_FILE, os_dir, _ErrnoOrStr(err)))
if not stat.S_ISREG(stat.S_IFMT(st.st_mode)):
return False, ("File '%s' in %s is not a regular file" %
(constants.OS_API_FILE, os_dir))
try:
api_versions = utils.ReadFile(api_file).splitlines()
except EnvironmentError, err:
return False, ("Error while reading the API version file at %s: %s" %
(api_file, _ErrnoOrStr(err)))
try:
api_versions = [int(version.strip()) for version in api_versions]
except (TypeError, ValueError), err:
return False, ("API version(s) can't be converted to integer: %s" %
str(err))
return True, api_versions
def DiagnoseOS(top_dirs=None):
"""Compute the validity for all OSes.
@type top_dirs: list
@param top_dirs: the list of directories in which to
search (if not given defaults to
L{constants.OS_SEARCH_PATH})
@rtype: list of L{objects.OS}
@return: a list of tuples (name, path, status, diagnose, variants,
parameters, api_version) for all (potential) OSes under all
search paths, where:
- name is the (potential) OS name
- path is the full path to the OS
- status True/False is the validity of the OS
- diagnose is the error message for an invalid OS, otherwise empty
- variants is a list of supported OS variants, if any
- parameters is a list of (name, help) parameters, if any
- api_version is a list of support OS API versions
"""
if top_dirs is None:
top_dirs = constants.OS_SEARCH_PATH
result = []
for dir_name in top_dirs:
if os.path.isdir(dir_name):
try:
f_names = utils.ListVisibleFiles(dir_name)
except EnvironmentError, err:
logging.exception("Can't list the OS directory %s: %s", dir_name, err)
break
for name in f_names:
os_path = utils.PathJoin(dir_name, name)
status, os_inst = _TryOSFromDisk(name, base_dir=dir_name)
if status:
diagnose = ""
variants = os_inst.supported_variants
parameters = os_inst.supported_parameters
api_versions = os_inst.api_versions
else:
diagnose = os_inst
variants = parameters = api_versions = []
result.append((name, os_path, status, diagnose, variants,
parameters, api_versions))
return result
def _TryOSFromDisk(name, base_dir=None):
"""Create an OS instance from disk.
This function will return an OS instance if the given name is a
valid OS name.
@type base_dir: string
@keyword base_dir: Base directory containing OS installations.
Defaults to a search in all the OS_SEARCH_PATH dirs.
@rtype: tuple
@return: success and either the OS instance if we find a valid one,
or error message
"""
if base_dir is None:
os_dir = utils.FindFile(name, constants.OS_SEARCH_PATH, os.path.isdir)
else:
os_dir = utils.FindFile(name, [base_dir], os.path.isdir)
if os_dir is None:
return False, "Directory for OS %s not found in search path" % name
status, api_versions = _OSOndiskAPIVersion(os_dir)
if not status:
# push the error up
return status, api_versions
if not constants.OS_API_VERSIONS.intersection(api_versions):
return False, ("API version mismatch for path '%s': found %s, want %s." %
(os_dir, api_versions, constants.OS_API_VERSIONS))
# OS Files dictionary, we will populate it with the absolute path names
os_files = dict.fromkeys(constants.OS_SCRIPTS)
if max(api_versions) >= constants.OS_API_V15:
os_files[constants.OS_VARIANTS_FILE] = ''
if max(api_versions) >= constants.OS_API_V20:
os_files[constants.OS_PARAMETERS_FILE] = ''
else:
del os_files[constants.OS_SCRIPT_VERIFY]
for filename in os_files:
os_files[filename] = utils.PathJoin(os_dir, filename)
try:
st = os.stat(os_files[filename])
except EnvironmentError, err:
return False, ("File '%s' under path '%s' is missing (%s)" %
(filename, os_dir, _ErrnoOrStr(err)))
if not stat.S_ISREG(stat.S_IFMT(st.st_mode)):
return False, ("File '%s' under path '%s' is not a regular file" %
(filename, os_dir))
if filename in constants.OS_SCRIPTS:
if stat.S_IMODE(st.st_mode) & stat.S_IXUSR != stat.S_IXUSR:
return False, ("File '%s' under path '%s' is not executable" %
(filename, os_dir))
variants = []
if constants.OS_VARIANTS_FILE in os_files:
variants_file = os_files[constants.OS_VARIANTS_FILE]
try:
variants = utils.ReadFile(variants_file).splitlines()
except EnvironmentError, err:
return False, ("Error while reading the OS variants file at %s: %s" %
(variants_file, _ErrnoOrStr(err)))
if not variants:
return False, ("No supported os variant found")
parameters = []
if constants.OS_PARAMETERS_FILE in os_files:
parameters_file = os_files[constants.OS_PARAMETERS_FILE]
try:
parameters = utils.ReadFile(parameters_file).splitlines()
except EnvironmentError, err:
return False, ("Error while reading the OS parameters file at %s: %s" %
(parameters_file, _ErrnoOrStr(err)))
parameters = [v.split(None, 1) for v in parameters]
os_obj = objects.OS(name=name, path=os_dir,
create_script=os_files[constants.OS_SCRIPT_CREATE],
export_script=os_files[constants.OS_SCRIPT_EXPORT],
import_script=os_files[constants.OS_SCRIPT_IMPORT],
rename_script=os_files[constants.OS_SCRIPT_RENAME],
verify_script=os_files.get(constants.OS_SCRIPT_VERIFY,
None),
supported_variants=variants,
supported_parameters=parameters,
api_versions=api_versions)
return True, os_obj
def OSFromDisk(name, base_dir=None):
"""Create an OS instance from disk.
This function will return an OS instance if the given name is a
valid OS name. Otherwise, it will raise an appropriate
L{RPCFail} exception, detailing why this is not a valid OS.
This is just a wrapper over L{_TryOSFromDisk}, which doesn't raise
an exception but returns true/false status data.
@type base_dir: string
@keyword base_dir: Base directory containing OS installations.
Defaults to a search in all the OS_SEARCH_PATH dirs.
@rtype: L{objects.OS}
@return: the OS instance if we find a valid one
@raise RPCFail: if we don't find a valid OS
"""
name_only = objects.OS.GetName(name)
status, payload = _TryOSFromDisk(name_only, base_dir)
if not status:
_Fail(payload)
return payload
def OSCoreEnv(os_name, inst_os, os_params, debug=0):
"""Calculate the basic environment for an os script.
@type os_name: str
@param os_name: full operating system name (including variant)
@type inst_os: L{objects.OS}
@param inst_os: operating system for which the environment is being built
@type os_params: dict
@param os_params: the OS parameters
@type debug: integer
@param debug: debug level (0 or 1, for OS Api 10)
@rtype: dict
@return: dict of environment variables
@raise errors.BlockDeviceError: if the block device
cannot be found
"""
result = {}
api_version = \
max(constants.OS_API_VERSIONS.intersection(inst_os.api_versions))
result['OS_API_VERSION'] = '%d' % api_version
result['OS_NAME'] = inst_os.name
result['DEBUG_LEVEL'] = '%d' % debug
# OS variants
if api_version >= constants.OS_API_V15:
variant = objects.OS.GetVariant(os_name)
if not variant:
variant = inst_os.supported_variants[0]
result['OS_VARIANT'] = variant
# OS params
for pname, pvalue in os_params.items():
result['OSP_%s' % pname.upper()] = pvalue
return result
def OSEnvironment(instance, inst_os, debug=0):
"""Calculate the environment for an os script.
@type instance: L{objects.Instance}
@param instance: target instance for the os script run
@type inst_os: L{objects.OS}
@param inst_os: operating system for which the environment is being built
@type debug: integer
@param debug: debug level (0 or 1, for OS Api 10)
@rtype: dict
@return: dict of environment variables
@raise errors.BlockDeviceError: if the block device
cannot be found
"""
result = OSCoreEnv(instance.os, inst_os, instance.osparams, debug=debug)
for attr in ["name", "os", "uuid", "ctime", "mtime", "primary_node"]:
result["INSTANCE_%s" % attr.upper()] = str(getattr(instance, attr))
result['HYPERVISOR'] = instance.hypervisor
result['DISK_COUNT'] = '%d' % len(instance.disks)
result['NIC_COUNT'] = '%d' % len(instance.nics)
result['INSTANCE_SECONDARY_NODES'] = \
('%s' % " ".join(instance.secondary_nodes))
# Disks
for idx, disk in enumerate(instance.disks):
real_disk = _OpenRealBD(disk)
result['DISK_%d_PATH' % idx] = real_disk.dev_path
result['DISK_%d_ACCESS' % idx] = disk.mode
if constants.HV_DISK_TYPE in instance.hvparams:
result['DISK_%d_FRONTEND_TYPE' % idx] = \
instance.hvparams[constants.HV_DISK_TYPE]
if disk.dev_type in constants.LDS_BLOCK:
result['DISK_%d_BACKEND_TYPE' % idx] = 'block'
elif disk.dev_type == constants.LD_FILE:
result['DISK_%d_BACKEND_TYPE' % idx] = \
'file:%s' % disk.physical_id[0]
# NICs
for idx, nic in enumerate(instance.nics):
result['NIC_%d_MAC' % idx] = nic.mac
if nic.ip:
result['NIC_%d_IP' % idx] = nic.ip
result['NIC_%d_MODE' % idx] = nic.nicparams[constants.NIC_MODE]
if nic.nicparams[constants.NIC_MODE] == constants.NIC_MODE_BRIDGED:
result['NIC_%d_BRIDGE' % idx] = nic.nicparams[constants.NIC_LINK]
if nic.nicparams[constants.NIC_LINK]:
result['NIC_%d_LINK' % idx] = nic.nicparams[constants.NIC_LINK]
if constants.HV_NIC_TYPE in instance.hvparams:
result['NIC_%d_FRONTEND_TYPE' % idx] = \
instance.hvparams[constants.HV_NIC_TYPE]
# HV/BE params
for source, kind in [(instance.beparams, "BE"), (instance.hvparams, "HV")]:
for key, value in source.items():
result["INSTANCE_%s_%s" % (kind, key)] = str(value)
return result
def BlockdevGrow(disk, amount, dryrun):
"""Grow a stack of block devices.
This function is called recursively, with the childrens being the
first ones to resize.
@type disk: L{objects.Disk}
@param disk: the disk to be grown
@type amount: integer
@param amount: the amount (in mebibytes) to grow with
@type dryrun: boolean
@param dryrun: whether to execute the operation in simulation mode
only, without actually increasing the size
@rtype: (status, result)
@return: a tuple with the status of the operation (True/False), and
the errors message if status is False
"""
r_dev = _RecursiveFindBD(disk)
if r_dev is None:
_Fail("Cannot find block device %s", disk)
try:
r_dev.Grow(amount, dryrun)
except errors.BlockDeviceError, err:
_Fail("Failed to grow block device: %s", err, exc=True)
def BlockdevSnapshot(disk):
"""Create a snapshot copy of a block device.
This function is called recursively, and the snapshot is actually created
just for the leaf lvm backend device.
@type disk: L{objects.Disk}
@param disk: the disk to be snapshotted
@rtype: string
@return: snapshot disk ID as (vg, lv)
"""
if disk.dev_type == constants.LD_DRBD8:
if not disk.children:
_Fail("DRBD device '%s' without backing storage cannot be snapshotted",
disk.unique_id)
return BlockdevSnapshot(disk.children[0])
elif disk.dev_type == constants.LD_LV:
r_dev = _RecursiveFindBD(disk)
if r_dev is not None:
# FIXME: choose a saner value for the snapshot size
# let's stay on the safe side and ask for the full size, for now
return r_dev.Snapshot(disk.size)
else:
_Fail("Cannot find block device %s", disk)
else:
_Fail("Cannot snapshot non-lvm block device '%s' of type '%s'",
disk.unique_id, disk.dev_type)
def FinalizeExport(instance, snap_disks):
"""Write out the export configuration information.
@type instance: L{objects.Instance}
@param instance: the instance which we export, used for
saving configuration
@type snap_disks: list of L{objects.Disk}
@param snap_disks: list of snapshot block devices, which
will be used to get the actual name of the dump file
@rtype: None
"""
destdir = utils.PathJoin(constants.EXPORT_DIR, instance.name + ".new")
finaldestdir = utils.PathJoin(constants.EXPORT_DIR, instance.name)
config = objects.SerializableConfigParser()
config.add_section(constants.INISECT_EXP)
config.set(constants.INISECT_EXP, 'version', '0')
config.set(constants.INISECT_EXP, 'timestamp', '%d' % int(time.time()))
config.set(constants.INISECT_EXP, 'source', instance.primary_node)
config.set(constants.INISECT_EXP, 'os', instance.os)
config.set(constants.INISECT_EXP, "compression", "none")
config.add_section(constants.INISECT_INS)
config.set(constants.INISECT_INS, 'name', instance.name)
config.set(constants.INISECT_INS, 'memory', '%d' %
instance.beparams[constants.BE_MEMORY])
config.set(constants.INISECT_INS, 'vcpus', '%d' %
instance.beparams[constants.BE_VCPUS])
config.set(constants.INISECT_INS, 'disk_template', instance.disk_template)
config.set(constants.INISECT_INS, 'hypervisor', instance.hypervisor)
config.set(constants.INISECT_INS, "tags", " ".join(instance.GetTags()))
nic_total = 0
for nic_count, nic in enumerate(instance.nics):
nic_total += 1
config.set(constants.INISECT_INS, 'nic%d_mac' %
nic_count, '%s' % nic.mac)
config.set(constants.INISECT_INS, 'nic%d_ip' % nic_count, '%s' % nic.ip)
for param in constants.NICS_PARAMETER_TYPES:
config.set(constants.INISECT_INS, 'nic%d_%s' % (nic_count, param),
'%s' % nic.nicparams.get(param, None))
# TODO: redundant: on load can read nics until it doesn't exist
config.set(constants.INISECT_INS, 'nic_count' , '%d' % nic_total)
disk_total = 0
for disk_count, disk in enumerate(snap_disks):
if disk:
disk_total += 1
config.set(constants.INISECT_INS, 'disk%d_ivname' % disk_count,
('%s' % disk.iv_name))
config.set(constants.INISECT_INS, 'disk%d_dump' % disk_count,
('%s' % disk.physical_id[1]))
config.set(constants.INISECT_INS, 'disk%d_size' % disk_count,
('%d' % disk.size))
config.set(constants.INISECT_INS, 'disk_count' , '%d' % disk_total)
# New-style hypervisor/backend parameters
config.add_section(constants.INISECT_HYP)
for name, value in instance.hvparams.items():
if name not in constants.HVC_GLOBALS:
config.set(constants.INISECT_HYP, name, str(value))
config.add_section(constants.INISECT_BEP)
for name, value in instance.beparams.items():
config.set(constants.INISECT_BEP, name, str(value))
config.add_section(constants.INISECT_OSP)
for name, value in instance.osparams.items():
config.set(constants.INISECT_OSP, name, str(value))
utils.WriteFile(utils.PathJoin(destdir, constants.EXPORT_CONF_FILE),
data=config.Dumps())
shutil.rmtree(finaldestdir, ignore_errors=True)
shutil.move(destdir, finaldestdir)
def ExportInfo(dest):
"""Get export configuration information.
@type dest: str
@param dest: directory containing the export
@rtype: L{objects.SerializableConfigParser}
@return: a serializable config file containing the
export info
"""
cff = utils.PathJoin(dest, constants.EXPORT_CONF_FILE)
config = objects.SerializableConfigParser()
config.read(cff)
if (not config.has_section(constants.INISECT_EXP) or
not config.has_section(constants.INISECT_INS)):
_Fail("Export info file doesn't have the required fields")
return config.Dumps()
def ListExports():
"""Return a list of exports currently available on this machine.
@rtype: list
@return: list of the exports
"""
if os.path.isdir(constants.EXPORT_DIR):
return sorted(utils.ListVisibleFiles(constants.EXPORT_DIR))
else:
_Fail("No exports directory")
def RemoveExport(export):
"""Remove an existing export from the node.
@type export: str
@param export: the name of the export to remove
@rtype: None
"""
target = utils.PathJoin(constants.EXPORT_DIR, export)
try:
shutil.rmtree(target)
except EnvironmentError, err:
_Fail("Error while removing the export: %s", err, exc=True)
def BlockdevRename(devlist):
"""Rename a list of block devices.
@type devlist: list of tuples
@param devlist: list of tuples of the form (disk,
new_logical_id, new_physical_id); disk is an
L{objects.Disk} object describing the current disk,
and new logical_id/physical_id is the name we
rename it to
@rtype: boolean
@return: True if all renames succeeded, False otherwise
"""
msgs = []
result = True
for disk, unique_id in devlist:
dev = _RecursiveFindBD(disk)
if dev is None:
msgs.append("Can't find device %s in rename" % str(disk))
result = False
continue
try:
old_rpath = dev.dev_path
dev.Rename(unique_id)
new_rpath = dev.dev_path
if old_rpath != new_rpath:
DevCacheManager.RemoveCache(old_rpath)
# FIXME: we should add the new cache information here, like:
# DevCacheManager.UpdateCache(new_rpath, owner, ...)
# but we don't have the owner here - maybe parse from existing
# cache? for now, we only lose lvm data when we rename, which
# is less critical than DRBD or MD
except errors.BlockDeviceError, err:
msgs.append("Can't rename device '%s' to '%s': %s" %
(dev, unique_id, err))
logging.exception("Can't rename device '%s' to '%s'", dev, unique_id)
result = False
if not result:
_Fail("; ".join(msgs))
def _TransformFileStorageDir(fs_dir):
"""Checks whether given file_storage_dir is valid.
Checks wheter the given fs_dir is within the cluster-wide default
file_storage_dir or the shared_file_storage_dir, which are stored in
SimpleStore. Only paths under those directories are allowed.
@type fs_dir: str
@param fs_dir: the path to check
@return: the normalized path if valid, None otherwise
"""
if not constants.ENABLE_FILE_STORAGE:
_Fail("File storage disabled at configure time")
cfg = _GetConfig()
fs_dir = os.path.normpath(fs_dir)
base_fstore = cfg.GetFileStorageDir()
base_shared = cfg.GetSharedFileStorageDir()
if ((os.path.commonprefix([fs_dir, base_fstore]) != base_fstore) and
(os.path.commonprefix([fs_dir, base_shared]) != base_shared)):
_Fail("File storage directory '%s' is not under base file"
" storage directory '%s' or shared storage directory '%s'",
fs_dir, base_fstore, base_shared)
return fs_dir
def CreateFileStorageDir(file_storage_dir):
"""Create file storage directory.
@type file_storage_dir: str
@param file_storage_dir: directory to create
@rtype: tuple
@return: tuple with first element a boolean indicating wheter dir
creation was successful or not
"""
file_storage_dir = _TransformFileStorageDir(file_storage_dir)
if os.path.exists(file_storage_dir):
if not os.path.isdir(file_storage_dir):
_Fail("Specified storage dir '%s' is not a directory",
file_storage_dir)
else:
try:
os.makedirs(file_storage_dir, 0750)
except OSError, err:
_Fail("Cannot create file storage directory '%s': %s",
file_storage_dir, err, exc=True)
def RemoveFileStorageDir(file_storage_dir):
"""Remove file storage directory.
Remove it only if it's empty. If not log an error and return.
@type file_storage_dir: str
@param file_storage_dir: the directory we should cleanup
@rtype: tuple (success,)
@return: tuple of one element, C{success}, denoting
whether the operation was successful
"""
file_storage_dir = _TransformFileStorageDir(file_storage_dir)
if os.path.exists(file_storage_dir):
if not os.path.isdir(file_storage_dir):
_Fail("Specified Storage directory '%s' is not a directory",
file_storage_dir)
# deletes dir only if empty, otherwise we want to fail the rpc call
try:
os.rmdir(file_storage_dir)
except OSError, err:
_Fail("Cannot remove file storage directory '%s': %s",
file_storage_dir, err)
def RenameFileStorageDir(old_file_storage_dir, new_file_storage_dir):
"""Rename the file storage directory.
@type old_file_storage_dir: str
@param old_file_storage_dir: the current path
@type new_file_storage_dir: str
@param new_file_storage_dir: the name we should rename to
@rtype: tuple (success,)
@return: tuple of one element, C{success}, denoting
whether the operation was successful
"""
old_file_storage_dir = _TransformFileStorageDir(old_file_storage_dir)
new_file_storage_dir = _TransformFileStorageDir(new_file_storage_dir)
if not os.path.exists(new_file_storage_dir):
if os.path.isdir(old_file_storage_dir):
try:
os.rename(old_file_storage_dir, new_file_storage_dir)
except OSError, err:
_Fail("Cannot rename '%s' to '%s': %s",
old_file_storage_dir, new_file_storage_dir, err)
else:
_Fail("Specified storage dir '%s' is not a directory",
old_file_storage_dir)
else:
if os.path.exists(old_file_storage_dir):
_Fail("Cannot rename '%s' to '%s': both locations exist",
old_file_storage_dir, new_file_storage_dir)
def _EnsureJobQueueFile(file_name):
"""Checks whether the given filename is in the queue directory.
@type file_name: str
@param file_name: the file name we should check
@rtype: None
@raises RPCFail: if the file is not valid
"""
queue_dir = os.path.normpath(constants.QUEUE_DIR)
result = (os.path.commonprefix([queue_dir, file_name]) == queue_dir)
if not result:
_Fail("Passed job queue file '%s' does not belong to"
" the queue directory '%s'", file_name, queue_dir)
def JobQueueUpdate(file_name, content):
"""Updates a file in the queue directory.
This is just a wrapper over L{utils.io.WriteFile}, with proper
checking.
@type file_name: str
@param file_name: the job file name
@type content: str
@param content: the new job contents
@rtype: boolean
@return: the success of the operation
"""
_EnsureJobQueueFile(file_name)
getents = runtime.GetEnts()
# Write and replace the file atomically
utils.WriteFile(file_name, data=_Decompress(content), uid=getents.masterd_uid,
gid=getents.masterd_gid)
def JobQueueRename(old, new):
"""Renames a job queue file.
This is just a wrapper over os.rename with proper checking.
@type old: str
@param old: the old (actual) file name
@type new: str
@param new: the desired file name
@rtype: tuple
@return: the success of the operation and payload
"""
_EnsureJobQueueFile(old)
_EnsureJobQueueFile(new)
utils.RenameFile(old, new, mkdir=True)
def BlockdevClose(instance_name, disks):
"""Closes the given block devices.
This means they will be switched to secondary mode (in case of
DRBD).
@param instance_name: if the argument is not empty, the symlinks
of this instance will be removed
@type disks: list of L{objects.Disk}
@param disks: the list of disks to be closed
@rtype: tuple (success, message)
@return: a tuple of success and message, where success
indicates the succes of the operation, and message
which will contain the error details in case we
failed
"""
bdevs = []
for cf in disks:
rd = _RecursiveFindBD(cf)
if rd is None:
_Fail("Can't find device %s", cf)
bdevs.append(rd)
msg = []
for rd in bdevs:
try:
rd.Close()
except errors.BlockDeviceError, err:
msg.append(str(err))
if msg:
_Fail("Can't make devices secondary: %s", ",".join(msg))
else:
if instance_name:
_RemoveBlockDevLinks(instance_name, disks)
def ValidateHVParams(hvname, hvparams):
"""Validates the given hypervisor parameters.
@type hvname: string
@param hvname: the hypervisor name
@type hvparams: dict
@param hvparams: the hypervisor parameters to be validated
@rtype: None
"""
try:
hv_type = hypervisor.GetHypervisor(hvname)
hv_type.ValidateParameters(hvparams)
except errors.HypervisorError, err:
_Fail(str(err), log=False)
def _CheckOSPList(os_obj, parameters):
"""Check whether a list of parameters is supported by the OS.
@type os_obj: L{objects.OS}
@param os_obj: OS object to check
@type parameters: list
@param parameters: the list of parameters to check
"""
supported = [v[0] for v in os_obj.supported_parameters]
delta = frozenset(parameters).difference(supported)
if delta:
_Fail("The following parameters are not supported"
" by the OS %s: %s" % (os_obj.name, utils.CommaJoin(delta)))
def ValidateOS(required, osname, checks, osparams):
"""Validate the given OS' parameters.
@type required: boolean
@param required: whether absence of the OS should translate into
failure or not
@type osname: string
@param osname: the OS to be validated
@type checks: list
@param checks: list of the checks to run (currently only 'parameters')
@type osparams: dict
@param osparams: dictionary with OS parameters
@rtype: boolean
@return: True if the validation passed, or False if the OS was not
found and L{required} was false
"""
if not constants.OS_VALIDATE_CALLS.issuperset(checks):
_Fail("Unknown checks required for OS %s: %s", osname,
set(checks).difference(constants.OS_VALIDATE_CALLS))
name_only = objects.OS.GetName(osname)
status, tbv = _TryOSFromDisk(name_only, None)
if not status:
if required:
_Fail(tbv)
else:
return False
if max(tbv.api_versions) < constants.OS_API_V20:
return True
if constants.OS_VALIDATE_PARAMETERS in checks:
_CheckOSPList(tbv, osparams.keys())
validate_env = OSCoreEnv(osname, tbv, osparams)
result = utils.RunCmd([tbv.verify_script] + checks, env=validate_env,
cwd=tbv.path)
if result.failed:
logging.error("os validate command '%s' returned error: %s output: %s",
result.cmd, result.fail_reason, result.output)
_Fail("OS validation script failed (%s), output: %s",
result.fail_reason, result.output, log=False)
return True
def DemoteFromMC():
"""Demotes the current node from master candidate role.
"""
# try to ensure we're not the master by mistake
master, myself = ssconf.GetMasterAndMyself()
if master == myself:
_Fail("ssconf status shows I'm the master node, will not demote")
result = utils.RunCmd([constants.DAEMON_UTIL, "check", constants.MASTERD])
if not result.failed:
_Fail("The master daemon is running, will not demote")
try:
if os.path.isfile(constants.CLUSTER_CONF_FILE):
utils.CreateBackup(constants.CLUSTER_CONF_FILE)
except EnvironmentError, err:
if err.errno != errno.ENOENT:
_Fail("Error while backing up cluster file: %s", err, exc=True)
utils.RemoveFile(constants.CLUSTER_CONF_FILE)
def _GetX509Filenames(cryptodir, name):
"""Returns the full paths for the private key and certificate.
"""
return (utils.PathJoin(cryptodir, name),
utils.PathJoin(cryptodir, name, _X509_KEY_FILE),
utils.PathJoin(cryptodir, name, _X509_CERT_FILE))
def CreateX509Certificate(validity, cryptodir=constants.CRYPTO_KEYS_DIR):
"""Creates a new X509 certificate for SSL/TLS.
@type validity: int
@param validity: Validity in seconds
@rtype: tuple; (string, string)
@return: Certificate name and public part
"""
(key_pem, cert_pem) = \
utils.GenerateSelfSignedX509Cert(netutils.Hostname.GetSysName(),
min(validity, _MAX_SSL_CERT_VALIDITY))
cert_dir = tempfile.mkdtemp(dir=cryptodir,
prefix="x509-%s-" % utils.TimestampForFilename())
try:
name = os.path.basename(cert_dir)
assert len(name) > 5
(_, key_file, cert_file) = _GetX509Filenames(cryptodir, name)
utils.WriteFile(key_file, mode=0400, data=key_pem)
utils.WriteFile(cert_file, mode=0400, data=cert_pem)
# Never return private key as it shouldn't leave the node
return (name, cert_pem)
except Exception:
shutil.rmtree(cert_dir, ignore_errors=True)
raise
def RemoveX509Certificate(name, cryptodir=constants.CRYPTO_KEYS_DIR):
"""Removes a X509 certificate.
@type name: string
@param name: Certificate name
"""
(cert_dir, key_file, cert_file) = _GetX509Filenames(cryptodir, name)
utils.RemoveFile(key_file)
utils.RemoveFile(cert_file)
try:
os.rmdir(cert_dir)
except EnvironmentError, err:
_Fail("Cannot remove certificate directory '%s': %s",
cert_dir, err)
def _GetImportExportIoCommand(instance, mode, ieio, ieargs):
"""Returns the command for the requested input/output.
@type instance: L{objects.Instance}
@param instance: The instance object
@param mode: Import/export mode
@param ieio: Input/output type
@param ieargs: Input/output arguments
"""
assert mode in (constants.IEM_IMPORT, constants.IEM_EXPORT)
env = None
prefix = None
suffix = None
exp_size = None
if ieio == constants.IEIO_FILE:
(filename, ) = ieargs
if not utils.IsNormAbsPath(filename):
_Fail("Path '%s' is not normalized or absolute", filename)
directory = os.path.normpath(os.path.dirname(filename))
if (os.path.commonprefix([constants.EXPORT_DIR, directory]) !=
constants.EXPORT_DIR):
_Fail("File '%s' is not under exports directory '%s'",
filename, constants.EXPORT_DIR)
# Create directory
utils.Makedirs(directory, mode=0750)
quoted_filename = utils.ShellQuote(filename)
if mode == constants.IEM_IMPORT:
suffix = "> %s" % quoted_filename
elif mode == constants.IEM_EXPORT:
suffix = "< %s" % quoted_filename
# Retrieve file size
try:
st = os.stat(filename)
except EnvironmentError, err:
logging.error("Can't stat(2) %s: %s", filename, err)
else:
exp_size = utils.BytesToMebibyte(st.st_size)
elif ieio == constants.IEIO_RAW_DISK:
(disk, ) = ieargs
real_disk = _OpenRealBD(disk)
if mode == constants.IEM_IMPORT:
# we set here a smaller block size as, due to transport buffering, more
# than 64-128k will mostly ignored; we use nocreat to fail if the device
# is not already there or we pass a wrong path; we use notrunc to no
# attempt truncate on an LV device; we use oflag=dsync to not buffer too
# much memory; this means that at best, we flush every 64k, which will
# not be very fast
suffix = utils.BuildShellCmd(("| dd of=%s conv=nocreat,notrunc"
" bs=%s oflag=dsync"),
real_disk.dev_path,
str(64 * 1024))
elif mode == constants.IEM_EXPORT:
# the block size on the read dd is 1MiB to match our units
prefix = utils.BuildShellCmd("dd if=%s bs=%s count=%s |",
real_disk.dev_path,
str(1024 * 1024), # 1 MB
str(disk.size))
exp_size = disk.size
elif ieio == constants.IEIO_SCRIPT:
(disk, disk_index, ) = ieargs
assert isinstance(disk_index, (int, long))
real_disk = _OpenRealBD(disk)
inst_os = OSFromDisk(instance.os)
env = OSEnvironment(instance, inst_os)
if mode == constants.IEM_IMPORT:
env["IMPORT_DEVICE"] = env["DISK_%d_PATH" % disk_index]
env["IMPORT_INDEX"] = str(disk_index)
script = inst_os.import_script
elif mode == constants.IEM_EXPORT:
env["EXPORT_DEVICE"] = real_disk.dev_path
env["EXPORT_INDEX"] = str(disk_index)
script = inst_os.export_script
# TODO: Pass special environment only to script
script_cmd = utils.BuildShellCmd("( cd %s && %s; )", inst_os.path, script)
if mode == constants.IEM_IMPORT:
suffix = "| %s" % script_cmd
elif mode == constants.IEM_EXPORT:
prefix = "%s |" % script_cmd
# Let script predict size
exp_size = constants.IE_CUSTOM_SIZE
else:
_Fail("Invalid %s I/O mode %r", mode, ieio)
return (env, prefix, suffix, exp_size)
def _CreateImportExportStatusDir(prefix):
"""Creates status directory for import/export.
"""
return tempfile.mkdtemp(dir=constants.IMPORT_EXPORT_DIR,
prefix=("%s-%s-" %
(prefix, utils.TimestampForFilename())))
def StartImportExportDaemon(mode, opts, host, port, instance, ieio, ieioargs):
"""Starts an import or export daemon.
@param mode: Import/output mode
@type opts: L{objects.ImportExportOptions}
@param opts: Daemon options
@type host: string
@param host: Remote host for export (None for import)
@type port: int
@param port: Remote port for export (None for import)
@type instance: L{objects.Instance}
@param instance: Instance object
@param ieio: Input/output type
@param ieioargs: Input/output arguments
"""
if mode == constants.IEM_IMPORT:
prefix = "import"
if not (host is None and port is None):
_Fail("Can not specify host or port on import")
elif mode == constants.IEM_EXPORT:
prefix = "export"
if host is None or port is None:
_Fail("Host and port must be specified for an export")
else:
_Fail("Invalid mode %r", mode)
if (opts.key_name is None) ^ (opts.ca_pem is None):
_Fail("Cluster certificate can only be used for both key and CA")
(cmd_env, cmd_prefix, cmd_suffix, exp_size) = \
_GetImportExportIoCommand(instance, mode, ieio, ieioargs)
if opts.key_name is None:
# Use server.pem
key_path = constants.NODED_CERT_FILE
cert_path = constants.NODED_CERT_FILE
assert opts.ca_pem is None
else:
(_, key_path, cert_path) = _GetX509Filenames(constants.CRYPTO_KEYS_DIR,
opts.key_name)
assert opts.ca_pem is not None
for i in [key_path, cert_path]:
if not os.path.exists(i):
_Fail("File '%s' does not exist" % i)
status_dir = _CreateImportExportStatusDir(prefix)
try:
status_file = utils.PathJoin(status_dir, _IES_STATUS_FILE)
pid_file = utils.PathJoin(status_dir, _IES_PID_FILE)
ca_file = utils.PathJoin(status_dir, _IES_CA_FILE)
if opts.ca_pem is None:
# Use server.pem
ca = utils.ReadFile(constants.NODED_CERT_FILE)
else:
ca = opts.ca_pem
# Write CA file
utils.WriteFile(ca_file, data=ca, mode=0400)
cmd = [
constants.IMPORT_EXPORT_DAEMON,
status_file, mode,
"--key=%s" % key_path,
"--cert=%s" % cert_path,
"--ca=%s" % ca_file,
]
if host:
cmd.append("--host=%s" % host)
if port:
cmd.append("--port=%s" % port)
if opts.ipv6:
cmd.append("--ipv6")
else:
cmd.append("--ipv4")
if opts.compress:
cmd.append("--compress=%s" % opts.compress)
if opts.magic:
cmd.append("--magic=%s" % opts.magic)
if exp_size is not None:
cmd.append("--expected-size=%s" % exp_size)
if cmd_prefix:
cmd.append("--cmd-prefix=%s" % cmd_prefix)
if cmd_suffix:
cmd.append("--cmd-suffix=%s" % cmd_suffix)
if mode == constants.IEM_EXPORT:
# Retry connection a few times when connecting to remote peer
cmd.append("--connect-retries=%s" % constants.RIE_CONNECT_RETRIES)
cmd.append("--connect-timeout=%s" % constants.RIE_CONNECT_ATTEMPT_TIMEOUT)
elif opts.connect_timeout is not None:
assert mode == constants.IEM_IMPORT
# Overall timeout for establishing connection while listening
cmd.append("--connect-timeout=%s" % opts.connect_timeout)
logfile = _InstanceLogName(prefix, instance.os, instance.name)
# TODO: Once _InstanceLogName uses tempfile.mkstemp, StartDaemon has
# support for receiving a file descriptor for output
utils.StartDaemon(cmd, env=cmd_env, pidfile=pid_file,
output=logfile)
# The import/export name is simply the status directory name
return os.path.basename(status_dir)
except Exception:
shutil.rmtree(status_dir, ignore_errors=True)
raise
def GetImportExportStatus(names):
"""Returns import/export daemon status.
@type names: sequence
@param names: List of names
@rtype: List of dicts
@return: Returns a list of the state of each named import/export or None if a
status couldn't be read
"""
result = []
for name in names:
status_file = utils.PathJoin(constants.IMPORT_EXPORT_DIR, name,
_IES_STATUS_FILE)
try:
data = utils.ReadFile(status_file)
except EnvironmentError, err:
if err.errno != errno.ENOENT:
raise
data = None
if not data:
result.append(None)
continue
result.append(serializer.LoadJson(data))
return result
def AbortImportExport(name):
"""Sends SIGTERM to a running import/export daemon.
"""
logging.info("Abort import/export %s", name)
status_dir = utils.PathJoin(constants.IMPORT_EXPORT_DIR, name)
pid = utils.ReadLockedPidFile(utils.PathJoin(status_dir, _IES_PID_FILE))
if pid:
logging.info("Import/export %s is running with PID %s, sending SIGTERM",
name, pid)
utils.IgnoreProcessNotFound(os.kill, pid, signal.SIGTERM)
def CleanupImportExport(name):
"""Cleanup after an import or export.
If the import/export daemon is still running it's killed. Afterwards the
whole status directory is removed.
"""
logging.info("Finalizing import/export %s", name)
status_dir = utils.PathJoin(constants.IMPORT_EXPORT_DIR, name)
pid = utils.ReadLockedPidFile(utils.PathJoin(status_dir, _IES_PID_FILE))
if pid:
logging.info("Import/export %s is still running with PID %s",
name, pid)
utils.KillProcess(pid, waitpid=False)
shutil.rmtree(status_dir, ignore_errors=True)
def _FindDisks(nodes_ip, disks):
"""Sets the physical ID on disks and returns the block devices.
"""
# set the correct physical ID
my_name = netutils.Hostname.GetSysName()
for cf in disks:
cf.SetPhysicalID(my_name, nodes_ip)
bdevs = []
for cf in disks:
rd = _RecursiveFindBD(cf)
if rd is None:
_Fail("Can't find device %s", cf)
bdevs.append(rd)
return bdevs
def DrbdDisconnectNet(nodes_ip, disks):
"""Disconnects the network on a list of drbd devices.
"""
bdevs = _FindDisks(nodes_ip, disks)
# disconnect disks
for rd in bdevs:
try:
rd.DisconnectNet()
except errors.BlockDeviceError, err:
_Fail("Can't change network configuration to standalone mode: %s",
err, exc=True)
def DrbdAttachNet(nodes_ip, disks, instance_name, multimaster):
"""Attaches the network on a list of drbd devices.
"""
bdevs = _FindDisks(nodes_ip, disks)
if multimaster:
for idx, rd in enumerate(bdevs):
try:
_SymlinkBlockDev(instance_name, rd.dev_path, idx)
except EnvironmentError, err:
_Fail("Can't create symlink: %s", err)
# reconnect disks, switch to new master configuration and if
# needed primary mode
for rd in bdevs:
try:
rd.AttachNet(multimaster)
except errors.BlockDeviceError, err:
_Fail("Can't change network configuration: %s", err)
# wait until the disks are connected; we need to retry the re-attach
# if the device becomes standalone, as this might happen if the one
# node disconnects and reconnects in a different mode before the
# other node reconnects; in this case, one or both of the nodes will
# decide it has wrong configuration and switch to standalone
def _Attach():
all_connected = True
for rd in bdevs:
stats = rd.GetProcStatus()
all_connected = (all_connected and
(stats.is_connected or stats.is_in_resync))
if stats.is_standalone:
# peer had different config info and this node became
# standalone, even though this should not happen with the
# new staged way of changing disk configs
try:
rd.AttachNet(multimaster)
except errors.BlockDeviceError, err:
_Fail("Can't change network configuration: %s", err)
if not all_connected:
raise utils.RetryAgain()
try:
# Start with a delay of 100 miliseconds and go up to 5 seconds
utils.Retry(_Attach, (0.1, 1.5, 5.0), 2 * 60)
except utils.RetryTimeout:
_Fail("Timeout in disk reconnecting")
if multimaster:
# change to primary mode
for rd in bdevs:
try:
rd.Open()
except errors.BlockDeviceError, err:
_Fail("Can't change to primary mode: %s", err)
def DrbdWaitSync(nodes_ip, disks):
"""Wait until DRBDs have synchronized.
"""
def _helper(rd):
stats = rd.GetProcStatus()
if not (stats.is_connected or stats.is_in_resync):
raise utils.RetryAgain()
return stats
bdevs = _FindDisks(nodes_ip, disks)
min_resync = 100
alldone = True
for rd in bdevs:
try:
# poll each second for 15 seconds
stats = utils.Retry(_helper, 1, 15, args=[rd])
except utils.RetryTimeout:
stats = rd.GetProcStatus()
# last check
if not (stats.is_connected or stats.is_in_resync):
_Fail("DRBD device %s is not in sync: stats=%s", rd, stats)
alldone = alldone and (not stats.is_in_resync)
if stats.sync_percent is not None:
min_resync = min(min_resync, stats.sync_percent)
return (alldone, min_resync)
def GetDrbdUsermodeHelper():
"""Returns DRBD usermode helper currently configured.
"""
try:
return bdev.BaseDRBD.GetUsermodeHelper()
except errors.BlockDeviceError, err:
_Fail(str(err))
def PowercycleNode(hypervisor_type):
"""Hard-powercycle the node.
Because we need to return first, and schedule the powercycle in the
background, we won't be able to report failures nicely.
"""
hyper = hypervisor.GetHypervisor(hypervisor_type)
try:
pid = os.fork()
except OSError:
# if we can't fork, we'll pretend that we're in the child process
pid = 0
if pid > 0:
return "Reboot scheduled in 5 seconds"
# ensure the child is running on ram
try:
utils.Mlockall()
except Exception: # pylint: disable-msg=W0703
pass
time.sleep(5)
hyper.PowercycleNode()
class HooksRunner(object):
"""Hook runner.
This class is instantiated on the node side (ganeti-noded) and not
on the master side.
"""
def __init__(self, hooks_base_dir=None):
"""Constructor for hooks runner.
@type hooks_base_dir: str or None
@param hooks_base_dir: if not None, this overrides the
L{constants.HOOKS_BASE_DIR} (useful for unittests)
"""
if hooks_base_dir is None:
hooks_base_dir = constants.HOOKS_BASE_DIR
# yeah, _BASE_DIR is not valid for attributes, we use it like a
# constant
self._BASE_DIR = hooks_base_dir # pylint: disable-msg=C0103
def RunHooks(self, hpath, phase, env):
"""Run the scripts in the hooks directory.
@type hpath: str
@param hpath: the path to the hooks directory which
holds the scripts
@type phase: str
@param phase: either L{constants.HOOKS_PHASE_PRE} or
L{constants.HOOKS_PHASE_POST}
@type env: dict
@param env: dictionary with the environment for the hook
@rtype: list
@return: list of 3-element tuples:
- script path
- script result, either L{constants.HKR_SUCCESS} or
L{constants.HKR_FAIL}
- output of the script
@raise errors.ProgrammerError: for invalid input
parameters
"""
if phase == constants.HOOKS_PHASE_PRE:
suffix = "pre"
elif phase == constants.HOOKS_PHASE_POST:
suffix = "post"
else:
_Fail("Unknown hooks phase '%s'", phase)
subdir = "%s-%s.d" % (hpath, suffix)
dir_name = utils.PathJoin(self._BASE_DIR, subdir)
results = []
if not os.path.isdir(dir_name):
# for non-existing/non-dirs, we simply exit instead of logging a
# warning at every operation
return results
runparts_results = utils.RunParts(dir_name, env=env, reset_env=True)
for (relname, relstatus, runresult) in runparts_results:
if relstatus == constants.RUNPARTS_SKIP:
rrval = constants.HKR_SKIP
output = ""
elif relstatus == constants.RUNPARTS_ERR:
rrval = constants.HKR_FAIL
output = "Hook script execution error: %s" % runresult
elif relstatus == constants.RUNPARTS_RUN:
if runresult.failed:
rrval = constants.HKR_FAIL
else:
rrval = constants.HKR_SUCCESS
output = utils.SafeEncode(runresult.output.strip())
results.append(("%s/%s" % (subdir, relname), rrval, output))
return results
class IAllocatorRunner(object):
"""IAllocator runner.
This class is instantiated on the node side (ganeti-noded) and not on
the master side.
"""
@staticmethod
def Run(name, idata):
"""Run an iallocator script.
@type name: str
@param name: the iallocator script name
@type idata: str
@param idata: the allocator input data
@rtype: tuple
@return: two element tuple of:
- status
- either error message or stdout of allocator (for success)
"""
alloc_script = utils.FindFile(name, constants.IALLOCATOR_SEARCH_PATH,
os.path.isfile)
if alloc_script is None:
_Fail("iallocator module '%s' not found in the search path", name)
fd, fin_name = tempfile.mkstemp(prefix="ganeti-iallocator.")
try:
os.write(fd, idata)
os.close(fd)
result = utils.RunCmd([alloc_script, fin_name])
if result.failed:
_Fail("iallocator module '%s' failed: %s, output '%s'",
name, result.fail_reason, result.output)
finally:
os.unlink(fin_name)
return result.stdout
class DevCacheManager(object):
"""Simple class for managing a cache of block device information.
"""
_DEV_PREFIX = "/dev/"
_ROOT_DIR = constants.BDEV_CACHE_DIR
@classmethod
def _ConvertPath(cls, dev_path):
"""Converts a /dev/name path to the cache file name.
This replaces slashes with underscores and strips the /dev
prefix. It then returns the full path to the cache file.
@type dev_path: str
@param dev_path: the C{/dev/} path name
@rtype: str
@return: the converted path name
"""
if dev_path.startswith(cls._DEV_PREFIX):
dev_path = dev_path[len(cls._DEV_PREFIX):]
dev_path = dev_path.replace("/", "_")
fpath = utils.PathJoin(cls._ROOT_DIR, "bdev_%s" % dev_path)
return fpath
@classmethod
def UpdateCache(cls, dev_path, owner, on_primary, iv_name):
"""Updates the cache information for a given device.
@type dev_path: str
@param dev_path: the pathname of the device
@type owner: str
@param owner: the owner (instance name) of the device
@type on_primary: bool
@param on_primary: whether this is the primary
node nor not
@type iv_name: str
@param iv_name: the instance-visible name of the
device, as in objects.Disk.iv_name
@rtype: None
"""
if dev_path is None:
logging.error("DevCacheManager.UpdateCache got a None dev_path")
return
fpath = cls._ConvertPath(dev_path)
if on_primary:
state = "primary"
else:
state = "secondary"
if iv_name is None:
iv_name = "not_visible"
fdata = "%s %s %s\n" % (str(owner), state, iv_name)
try:
utils.WriteFile(fpath, data=fdata)
except EnvironmentError, err:
logging.exception("Can't update bdev cache for %s: %s", dev_path, err)
@classmethod
def RemoveCache(cls, dev_path):
"""Remove data for a dev_path.
This is just a wrapper over L{utils.io.RemoveFile} with a converted
path name and logging.
@type dev_path: str
@param dev_path: the pathname of the device
@rtype: None
"""
if dev_path is None:
logging.error("DevCacheManager.RemoveCache got a None dev_path")
return
fpath = cls._ConvertPath(dev_path)
try:
utils.RemoveFile(fpath)
except EnvironmentError, err:
logging.exception("Can't update bdev cache for %s: %s", dev_path, err)
| ekohl/ganeti | lib/backend.py | Python | gpl-2.0 | 108,319 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import httplib
import json
import logging
import re
import socket
import string
import time
import traceback
import urllib2
import urlparse
from extra.safe2bin.safe2bin import safecharencode
from lib.core.agent import agent
from lib.core.common import asciifyUrl
from lib.core.common import calculateDeltaSeconds
from lib.core.common import clearConsoleLine
from lib.core.common import cpuThrottle
from lib.core.common import dataToStdout
from lib.core.common import evaluateCode
from lib.core.common import extractRegexResult
from lib.core.common import findMultipartPostBoundary
from lib.core.common import getCurrentThreadData
from lib.core.common import getHostHeader
from lib.core.common import getRequestHeader
from lib.core.common import getUnicode
from lib.core.common import logHTTPTraffic
from lib.core.common import pushValue
from lib.core.common import popValue
from lib.core.common import randomizeParameterValue
from lib.core.common import randomInt
from lib.core.common import randomStr
from lib.core.common import readInput
from lib.core.common import removeReflectiveValues
from lib.core.common import singleTimeLogMessage
from lib.core.common import singleTimeWarnMessage
from lib.core.common import stdev
from lib.core.common import wasLastResponseDelayed
from lib.core.common import unicodeencode
from lib.core.common import urldecode
from lib.core.common import urlencode
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.dicts import POST_HINT_CONTENT_TYPES
from lib.core.enums import ADJUST_TIME_DELAY
from lib.core.enums import AUTH_TYPE
from lib.core.enums import CUSTOM_LOGGING
from lib.core.enums import HTTP_HEADER
from lib.core.enums import HTTPMETHOD
from lib.core.enums import NULLCONNECTION
from lib.core.enums import PAYLOAD
from lib.core.enums import PLACE
from lib.core.enums import POST_HINT
from lib.core.enums import REDIRECTION
from lib.core.enums import WEB_API
from lib.core.exception import SqlmapCompressionException
from lib.core.exception import SqlmapConnectionException
from lib.core.exception import SqlmapGenericException
from lib.core.exception import SqlmapSyntaxException
from lib.core.exception import SqlmapTokenException
from lib.core.exception import SqlmapValueException
from lib.core.settings import ASTERISK_MARKER
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
from lib.core.settings import DEFAULT_CONTENT_TYPE
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
from lib.core.settings import MAX_CONNECTIONS_REGEX
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
from lib.core.settings import META_REFRESH_REGEX
from lib.core.settings import MIN_TIME_RESPONSES
from lib.core.settings import IS_WIN
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
from lib.core.settings import PAYLOAD_DELIMITER
from lib.core.settings import PERMISSION_DENIED_REGEX
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
from lib.core.settings import REPLACEMENT_MARKER
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
from lib.core.settings import URI_HTTP_HEADER
from lib.core.settings import WARN_TIME_STDEV
from lib.request.basic import decodePage
from lib.request.basic import forgeHeaders
from lib.request.basic import processResponse
from lib.request.direct import direct
from lib.request.comparison import comparison
from lib.request.methodrequest import MethodRequest
from thirdparty.multipart import multipartpost
from thirdparty.odict.odict import OrderedDict
from thirdparty.socks.socks import ProxyError
class Connect(object):
"""
This class defines methods used to perform HTTP requests
"""
@staticmethod
def _getPageProxy(**kwargs):
return Connect.getPage(**kwargs)
@staticmethod
def _retryProxy(**kwargs):
threadData = getCurrentThreadData()
threadData.retriesCount += 1
if conf.proxyList and threadData.retriesCount >= conf.retries:
warnMsg = "changing proxy"
logger.warn(warnMsg)
conf.proxy = None
setHTTPProxy()
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
# timed based payloads can cause web server unresponsiveness
# if the injectable piece of code is some kind of JOIN-like query
warnMsg = "most probably web server instance hasn't recovered yet "
warnMsg += "from previous timed based payload. If the problem "
warnMsg += "persists please wait for few minutes and rerun "
warnMsg += "without flag T in option '--technique' "
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
singleTimeWarnMessage(warnMsg)
elif kb.originalPage is None:
if conf.tor:
warnMsg = "please make sure that you have "
warnMsg += "Tor installed and running so "
warnMsg += "you could successfully use "
warnMsg += "switch '--tor' "
if IS_WIN:
warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')"
else:
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
else:
warnMsg = "if the problem persists please check that the provided "
warnMsg += "target URL is valid. In case that it is, you can try to rerun "
warnMsg += "with the switch '--random-agent' turned on "
warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)"
singleTimeWarnMessage(warnMsg)
elif conf.threads > 1:
warnMsg = "if the problem persists please try to lower "
warnMsg += "the number of used threads (option '--threads')"
singleTimeWarnMessage(warnMsg)
time.sleep(1)
kwargs['retrying'] = True
return Connect._getPageProxy(**kwargs)
@staticmethod
def _connReadProxy(conn):
retVal = ""
if not kb.dnsMode and conn:
headers = conn.info()
if headers and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
warnMsg = "large compressed response detected. Disabling compression"
singleTimeWarnMessage(warnMsg)
kb.pageCompress = False
else:
while True:
_ = conn.read(MAX_CONNECTION_CHUNK_SIZE)
if len(_) == MAX_CONNECTION_CHUNK_SIZE:
warnMsg = "large response detected. This could take a while"
singleTimeWarnMessage(warnMsg)
_ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _)
retVal += _
else:
retVal += _
break
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
warnMsg = "too large response detected. Automatically trimming it"
singleTimeWarnMessage(warnMsg)
break
return retVal
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target URL or proxy and returns
the target URL page content
"""
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
elif conf.cpuThrottle:
cpuThrottle(conf.cpuThrottle)
if conf.dummy:
return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())
threadData = getCurrentThreadData()
with kb.locks.request:
kb.requestCounter += 1
threadData.lastRequestUID = kb.requestCounter
url = kwargs.get("url", None) or conf.url
get = kwargs.get("get", None)
post = kwargs.get("post", None)
method = kwargs.get("method", None)
cookie = kwargs.get("cookie", None)
ua = kwargs.get("ua", None) or conf.agent
referer = kwargs.get("referer", None) or conf.referer
host = kwargs.get("host", None) or conf.host
direct_ = kwargs.get("direct", False)
multipart = kwargs.get("multipart", False)
silent = kwargs.get("silent", False)
raise404 = kwargs.get("raise404", True)
timeout = kwargs.get("timeout", None) or conf.timeout
auxHeaders = kwargs.get("auxHeaders", None)
response = kwargs.get("response", False)
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout
refreshing = kwargs.get("refreshing", False)
retrying = kwargs.get("retrying", False)
crawling = kwargs.get("crawling", False)
skipRead = kwargs.get("skipRead", False)
if not urlparse.urlsplit(url).netloc:
url = urlparse.urljoin(conf.url, url)
# flag to know if we are dealing with the same target host
target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""]))
if not retrying:
# Reset the number of connection retries
threadData.retriesCount = 0
# fix for known issue when urllib2 just skips the other part of provided
# url splitted with space char while urlencoding it in the later phase
url = url.replace(" ", "%20")
conn = None
code = None
page = None
_ = urlparse.urlsplit(url)
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url
responseMsg = u"HTTP response "
requestHeaders = u""
responseHeaders = None
logHeaders = u""
skipLogTraffic = False
raise404 = raise404 and not kb.ignoreNotFound
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
# support those by default
url = asciifyUrl(url)
# fix for known issues when using url in unicode format
# (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
url = unicodeencode(url)
try:
socket.setdefaulttimeout(timeout)
if direct_:
if '?' in url:
url, params = url.split('?', 1)
params = urlencode(params)
url = "%s?%s" % (url, params)
elif multipart:
# Needed in this form because of potential circle dependency
# problem (option -> update -> connect -> option)
from lib.core.option import proxyHandler
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
conn = multipartOpener.open(unicodeencode(url), multipart)
page = Connect._connReadProxy(conn) if not skipRead else None
responseHeaders = conn.info()
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
return page
elif any((refreshing, crawling)):
pass
elif target:
if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
url = re.sub("\Ahttp:", "https:", url, re.I)
url = re.sub(":80/", ":443/", url, re.I)
if PLACE.GET in conf.parameters and not get:
get = conf.parameters[PLACE.GET]
if not conf.skipUrlEncode:
get = urlencode(get, limit=True)
if get:
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
if PLACE.POST in conf.parameters and not post and method in (None, HTTPMETHOD.POST):
post = conf.parameters[PLACE.POST]
elif get:
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
# Prepare HTTP headers
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer})
if kb.authHeader:
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
if kb.proxyAuthHeader:
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
headers[HTTP_HEADER.HOST] = host or getHostHeader(url)
if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers:
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
warnMsg += "Will try to reconstruct"
singleTimeWarnMessage(warnMsg)
boundary = findMultipartPostBoundary(conf.data)
if boundary:
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
if auxHeaders:
for key, item in auxHeaders.items():
for _ in headers.keys():
if _.upper() == key.upper():
del headers[_]
headers[key] = item
for key, item in headers.items():
del headers[key]
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
post = unicodeencode(post, kb.pageEncoding)
if method:
req = MethodRequest(url, post, headers)
req.set_method(method)
else:
req = urllib2.Request(url, post, headers)
requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items())
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
conf.cj._policy._now = conf.cj._now = int(time.time())
cookies = conf.cj._cookies_for_request(req)
requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
if post is not None:
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION
requestMsg += "\n%s" % requestHeaders
if post is not None:
requestMsg += "\n\n%s" % getUnicode(post)
requestMsg += "\n"
threadData.lastRequestMsg = requestMsg
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
conn = urllib2.urlopen(req)
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
# Return response object
if response:
return conn, None, None
# Get HTTP response
if hasattr(conn, 'redurl'):
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
else Connect._connReadProxy(conn)) if not skipRead else None
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
code = conn.redcode
else:
page = Connect._connReadProxy(conn) if not skipRead else None
code = code or conn.code
responseHeaders = conn.info()
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
status = getUnicode(conn.msg)
if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
url = extractRegexResult(META_REFRESH_REGEX, page)
debugMsg = "got HTML meta refresh header"
logger.debug(debugMsg)
if kb.alwaysRefresh is None:
msg = "sqlmap got a refresh request "
msg += "(redirect like response common to login pages). "
msg += "Do you want to apply the refresh "
msg += "from now on (or stay on the original page)? [Y/n]"
choice = readInput(msg, default="Y")
kb.alwaysRefresh = choice not in ("n", "N")
if kb.alwaysRefresh:
if url.lower().startswith('http://'):
kwargs['url'] = url
else:
kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
kwargs['refreshing'] = True
kwargs['get'] = None
kwargs['post'] = None
try:
return Connect._getPageProxy(**kwargs)
except SqlmapSyntaxException:
pass
# Explicit closing of connection object
if not conf.keepAlive:
try:
if hasattr(conn.fp, '_sock'):
conn.fp._sock.close()
conn.close()
except Exception, msg:
warnMsg = "problem occurred during connection closing ('%s')" % msg
logger.warn(warnMsg)
except urllib2.HTTPError, e:
page = None
responseHeaders = None
try:
page = e.read() if not skipRead else None
responseHeaders = e.info()
responseHeaders[URI_HTTP_HEADER] = e.geturl()
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
except socket.timeout:
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % e.code
logger.warn(warnMsg)
return None, None, None
except KeyboardInterrupt:
raise
except:
pass
finally:
page = page if isinstance(page, unicode) else getUnicode(page)
code = e.code
threadData.lastHTTPError = (threadData.lastRequestUID, code)
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
status = getUnicode(e.msg)
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
skipLogTraffic = True
if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
if e.code == httplib.UNAUTHORIZED and not conf.ignore401:
errMsg = "not authorized, try to provide right HTTP "
errMsg += "authentication type and valid credentials (%d)" % code
raise SqlmapConnectionException(errMsg)
elif e.code == httplib.NOT_FOUND:
if raise404:
errMsg = "page not found (%d)" % code
raise SqlmapConnectionException(errMsg)
else:
debugMsg = "page not found (%d)" % code
singleTimeLogMessage(debugMsg, logging.DEBUG)
processResponse(page, responseHeaders)
elif e.code == httplib.GATEWAY_TIMEOUT:
if ignoreTimeout:
return None, None, None
else:
warnMsg = "unable to connect to the target URL (%d - %s)" % (e.code, httplib.responses[e.code])
if threadData.retriesCount < conf.retries and not kb.threadException:
warnMsg += ". sqlmap is going to retry the request"
logger.critical(warnMsg)
return Connect._retryProxy(**kwargs)
elif kb.testMode:
logger.critical(warnMsg)
return None, None, None
else:
raise SqlmapConnectionException(warnMsg)
else:
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
logger.debug(debugMsg)
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead, ProxyError, SqlmapCompressionException), e:
tbMsg = traceback.format_exc()
if "no host given" in tbMsg:
warnMsg = "invalid URL address used (%s)" % repr(url)
raise SqlmapSyntaxException(warnMsg)
elif "forcibly closed" in tbMsg:
warnMsg = "connection was forcibly closed by the target URL"
elif "timed out" in tbMsg:
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests")
warnMsg = "connection timed out to the target URL"
elif "URLError" in tbMsg or "error" in tbMsg:
warnMsg = "unable to connect to the target URL"
elif "BadStatusLine" in tbMsg:
warnMsg = "connection dropped or unknown HTTP "
warnMsg += "status code received"
if not conf.agent and not conf.randomAgent:
warnMsg += ". Try to force the HTTP User-Agent "
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
elif "IncompleteRead" in tbMsg:
warnMsg = "there was an incomplete read error while retrieving data "
warnMsg += "from the target URL"
else:
warnMsg = "unable to connect to the target URL"
if "BadStatusLine" not in tbMsg:
warnMsg += " or proxy"
if silent:
return None, None, None
elif "forcibly closed" in tbMsg:
logger.critical(warnMsg)
return None, None, None
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")):
return None, None, None
elif threadData.retriesCount < conf.retries and not kb.threadException:
warnMsg += ". sqlmap is going to retry the request"
logger.critical(warnMsg)
return Connect._retryProxy(**kwargs)
elif kb.testMode:
logger.critical(warnMsg)
return None, None, None
else:
raise SqlmapConnectionException(warnMsg)
finally:
if not isinstance(page, unicode):
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
page = unicode(page, errors="ignore")
else:
page = getUnicode(page)
socket.setdefaulttimeout(conf.timeout)
processResponse(page, responseHeaders)
if conn and getattr(conn, "redurl", None):
_ = urlparse.urlsplit(conn.redurl)
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_), requestMsg, 1)
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status)
else:
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
if not skipLogTraffic:
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
return page, responseHeaders, code
@staticmethod
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True):
"""
This method calls a function to get the target URL page content
and returns its page MD5 hash or a boolean value in case of
string match check ('--string' command line parameter)
"""
if conf.direct:
return direct(value, content)
get = None
post = None
cookie = None
ua = None
referer = None
host = None
page = None
pageLength = None
uri = None
code = None
if not place:
place = kb.injection.place or PLACE.GET
if not auxHeaders:
auxHeaders = {}
raise404 = place != PLACE.URI if raise404 is None else raise404
value = agent.adjustLateValues(value)
payload = agent.extractPayload(value)
threadData = getCurrentThreadData()
if conf.httpHeaders:
headers = OrderedDict(conf.httpHeaders)
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode:
kb.postUrlEncode = False
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
if payload:
if kb.tamperFunctions:
for function in kb.tamperFunctions:
try:
payload = function(payload=payload, headers=auxHeaders)
except Exception, ex:
errMsg = "error occurred while running tamper "
errMsg += "function '%s' ('%s')" % (function.func_name, ex)
raise SqlmapGenericException(errMsg)
if not isinstance(payload, basestring):
errMsg = "tamper function '%s' returns " % function.func_name
errMsg += "invalid payload type ('%s')" % type(payload)
raise SqlmapValueException(errMsg)
value = agent.replacePayload(value, payload)
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload))
if place == PLACE.CUSTOM_POST and kb.postHint:
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
# payloads in SOAP/XML should have chars > and < replaced
# with their HTML encoded counterparts
payload = payload.replace('>', ">").replace('<', "<")
elif kb.postHint == POST_HINT.JSON:
if payload.startswith('"') and payload.endswith('"'):
payload = json.dumps(payload[1:-1])
else:
payload = json.dumps(payload)[1:-1]
elif kb.postHint == POST_HINT.JSON_LIKE:
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
if payload.startswith('"') and payload.endswith('"'):
payload = json.dumps(payload[1:-1])
else:
payload = json.dumps(payload)[1:-1]
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
value = agent.replacePayload(value, payload)
else:
# GET, POST, URI and Cookie payload needs to be throughly URL encoded
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
value = agent.replacePayload(value, payload)
if conf.hpp:
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
warnMsg = "HTTP parameter pollution should work only against "
warnMsg += "ASP(.NET) targets"
singleTimeWarnMessage(warnMsg)
if place in (PLACE.GET, PLACE.POST):
_ = re.escape(PAYLOAD_DELIMITER)
match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
if match:
payload = match.group("value")
for splitter in (urlencode(' '), ' '):
if splitter in payload:
prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*"))
parts = payload.split(splitter)
parts[0] = "%s%s" % (parts[0], suffix)
parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1])
for i in xrange(1, len(parts) - 1):
parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix)
payload = "".join(parts)
for splitter in (urlencode(','), ','):
payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")))
value = agent.replacePayload(value, payload)
else:
warnMsg = "HTTP parameter pollution works only with regular "
warnMsg += "GET and POST parameters"
singleTimeWarnMessage(warnMsg)
if place:
value = agent.removePayloadDelimiters(value)
if PLACE.GET in conf.parameters:
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
if PLACE.POST in conf.parameters:
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
if PLACE.CUSTOM_POST in conf.parameters:
post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value
post = post.replace(ASTERISK_MARKER, '*') if post else post
if PLACE.COOKIE in conf.parameters:
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
if PLACE.USER_AGENT in conf.parameters:
ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value
if PLACE.REFERER in conf.parameters:
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
if PLACE.HOST in conf.parameters:
host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value
if PLACE.URI in conf.parameters:
uri = conf.url if place != PLACE.URI or not value else value
else:
uri = conf.url
if value and place == PLACE.CUSTOM_HEADER:
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
if conf.csrfToken:
def _adjustParameter(paramString, parameter, newValue):
retVal = paramString
match = re.search("%s=(?P<value>[^&]*)" % re.escape(parameter), paramString)
if match:
origValue = match.group("value")
retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString)
return retVal
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
if not token:
if conf.csrfUrl != conf.url and code == httplib.OK:
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
token = page
if not token and any(_.name == conf.csrfToken for _ in conf.cj):
for _ in conf.cj:
if _.name == conf.csrfToken:
token = _.value
if not any (conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
if post:
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
elif get:
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
else:
get = "%s=%s" % (conf.csrfToken, token)
break
if not token:
errMsg = "CSRF protection token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url)
if not conf.csrfUrl:
errMsg += ". You can try to rerun by providing "
errMsg += "a valid value for option '--csrf-url'"
raise SqlmapTokenException, errMsg
if token:
for place in (PLACE.GET, PLACE.POST):
if place in conf.parameters:
if place == PLACE.GET and get:
get = _adjustParameter(get, conf.csrfToken, token)
elif place == PLACE.POST and post:
post = _adjustParameter(post, conf.csrfToken, token)
for i in xrange(len(conf.httpHeaders)):
if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower():
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token)
if conf.rParam:
def _randomizeParameter(paramString, randomParameter):
retVal = paramString
match = re.search("%s=(?P<value>[^&;]+)" % re.escape(randomParameter), paramString)
if match:
origValue = match.group("value")
retVal = re.sub("%s=[^&;]+" % re.escape(randomParameter), "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
return retVal
for randomParameter in conf.rParam:
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
if item in conf.parameters:
if item == PLACE.GET and get:
get = _randomizeParameter(get, randomParameter)
elif item == PLACE.POST and post:
post = _randomizeParameter(post, randomParameter)
elif item == PLACE.COOKIE and cookie:
cookie = _randomizeParameter(cookie, randomParameter)
if conf.evalCode:
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
variables = {"uri": uri}
originals = {}
for item in filter(None, (get, post if not kb.postHint else None)):
for part in item.split(delimiter):
if '=' in part:
name, value = part.split('=', 1)
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
evaluateCode("%s=%s" % (name.strip(), repr(value)), variables)
if cookie:
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
if '=' in part:
name, value = part.split('=', 1)
value = urldecode(value, convall=True)
evaluateCode("%s=%s" % (name.strip(), repr(value)), variables)
originals.update(variables)
evaluateCode(conf.evalCode, variables)
uri = variables["uri"]
for name, value in variables.items():
if name != "__builtins__" and originals.get(name, "") != value:
if isinstance(value, (basestring, int)):
found = False
value = unicode(value)
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
if re.search(regex, (get or "")):
found = True
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
if re.search(regex, (post or "")):
found = True
post = re.sub(regex, "\g<1>%s\g<3>" % value, post)
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
if re.search(regex, (cookie or "")):
found = True
cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie)
if not found:
if post is not None:
post += "%s%s=%s" % (delimiter, name, value)
elif get is not None:
get += "%s%s=%s" % (delimiter, name, value)
elif cookie is not None:
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value)
if not conf.skipUrlEncode:
get = urlencode(get, limit=True)
if post is not None:
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
elif kb.postUrlEncode:
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
if timeBasedCompare:
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
clearConsoleLine()
if conf.tor:
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
warnMsg += "time-based injections because of its high latency time"
singleTimeWarnMessage(warnMsg)
warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime("%X")
warnMsg += "larger statistical model, please wait"
dataToStdout(warnMsg)
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
Connect.queryPage(content=True)
dataToStdout('.')
dataToStdout("\n")
elif not kb.testMode:
warnMsg = "it is very important not to stress the network adapter "
warnMsg += "during usage of time-based payloads to prevent potential "
warnMsg += "errors "
singleTimeWarnMessage(warnMsg)
if not kb.laggingChecked:
kb.laggingChecked = True
deviation = stdev(kb.responseTimes)
if deviation > WARN_TIME_STDEV:
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
warnMsg = "considerable lagging has been detected "
warnMsg += "in connection response(s). Please use as high "
warnMsg += "value for option '--time-sec' as possible (e.g. "
warnMsg += "10 or more)"
logger.critical(warnMsg)
if conf.safUrl and conf.saFreq > 0:
kb.queryCounter += 1
if kb.queryCounter % conf.saFreq == 0:
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
start = time.time()
if kb.nullConnection and not content and not response and not timeBasedCompare:
noteResponseTime = False
pushValue(kb.pageCompress)
kb.pageCompress = False
if kb.nullConnection == NULLCONNECTION.HEAD:
method = HTTPMETHOD.HEAD
elif kb.nullConnection == NULLCONNECTION.RANGE:
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
_, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
if headers:
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers:
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
kb.pageCompress = popValue()
if not pageLength:
try:
page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
except MemoryError:
page, headers, code = None, None, None
warnMsg = "site returned insanely large response"
if kb.testMode:
warnMsg += " in testing phase. This is a common "
warnMsg += "behavior in custom WAF/IDS/IPS solutions"
singleTimeWarnMessage(warnMsg)
if conf.secondOrder:
page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
threadData.lastQueryDuration = calculateDeltaSeconds(start)
kb.originalCode = kb.originalCode or code
if kb.testMode:
kb.testQueryCount += 1
if timeBasedCompare:
return wasLastResponseDelayed()
elif noteResponseTime:
kb.responseTimes.append(threadData.lastQueryDuration)
if not response and removeReflection:
page = removeReflectiveValues(page, payload)
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None
if content or response:
return page, headers
if getRatioValue:
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
else:
return comparison(page, headers, code, getRatioValue, pageLength)
def setHTTPProxy(): # Cross-linked function
raise NotImplementedError
| pwnieexpress/raspberry_pwn | src/pentest/sqlmap/lib/request/connect.py | Python | gpl-3.0 | 46,892 |
import os,sys,inspect
try:
binary_path = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe()))[0],"binaries-python" + str(sys.version_info[0]) + "." + str(sys.version_info[1]) + '-' + ('win64' if sys.maxsize>2**32 else 'win32'))))
if binary_path not in sys.path:
sys.path.append(binary_path)
except:
raise Exception("The pylsl module has not been compiled for your Python version.")
from liblsl import *
| sccn/SNAP | src/pylsl/pylsl.py | Python | bsd-3-clause | 473 |
import requests
from instagram.client import InstagramAPI
from flask import Flask, request, render_template, session, redirect, abort, flash, jsonify
import sys
import logging
import os
app = Flask(__name__)
app.logger.addHandler(logging.StreamHandler(sys.stdout))
app.logger.setLevel(logging.ERROR)
app.secret_key = os.environ['FLASK_SECRET_KEY']
# configure Instagram API
instaConfig = {
'client_id':os.environ['INSTA_CLIENT_ID'],
'client_secret':os.environ['INSTA_CLIENT_SECRET'],
'redirect_uri' : os.environ['INSTA_REDIRECT_URI']
}
api = InstagramAPI(**instaConfig)
'''
@app.route('/')
def index():
error_message = 'Username not found'
username = request.values.get('username')
if not username:
return render_template('index.html')
user_id = get_user_id(username)
result = get_unique(user_id)
if result:
return render_template('result.html', username=username,
result=result)
return render_template('index.html', error_message=error_message)
'''
@app.route('/')
def index():
# if instagram info is in session variables, then display user photos
if 'instagram_access_token' in session and 'instagram_user' in session:
userAPI = InstagramAPI(access_token=session['instagram_access_token'])
follows = []
follows, next_ = userAPI.user_follows(user_id=session['instagram_user'].get('id'))
while next_:
more_follows, next_ = userAPI.user_follows(with_next_url=next_)
follows.extend(more_follows)
followed_by = []
followed_by, _ = userAPI.user_followed_by(user_id=session['instagram_user'].get('id'))
while _:
more_followed_by, _ = api.user_followed_by(with_next_url=_)
followed_by.extend(more_followed_by)
followers_names=list(map(str,follows))
followed_by_names=list(map(str,followed_by))
unique_people=list(set(followers_names) - set(followed_by_names))
clean_list=[i.replace("User: ","") for i in unique_people]
result=[i for i in follows if i.username in clean_list]
resultattr = {}
for i in result:
resultattr[i.username]=i.profile_picture
return render_template('result.html', result = resultattr)
else:
return render_template('index.html')
@app.route('/connect')
def main():
url = api.get_authorize_url(scope=["follower_list"])
return redirect(url)
@app.route('/instagram_callback')
def instagram_callback():
code = request.args.get('code')
if code:
access_token, user = api.exchange_code_for_access_token(code)
if not access_token:
return 'Could not get access token'
app.logger.debug('got an access token')
app.logger.debug(access_token)
# Sessions are used to keep this data
session['instagram_access_token'] = access_token
session['instagram_user'] = user
return redirect('/') # redirect back to main page
else:
return "Uhoh no code provided"
@app.errorhandler(404)
def page_not_found(error):
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(debug=True) | mr-karan/Insta-notFollow | main.py | Python | mit | 3,243 |
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The main component of OpenFlow controller.
- Handle connections from switches
- Generate and route events to appropriate entities like Ryu applications
"""
import contextlib
from ryu import cfg
import logging
from ryu.lib import hub
from ryu.lib.hub import StreamServer
import traceback
import random
import ssl
from socket import IPPROTO_TCP, TCP_NODELAY
import ryu.base.app_manager
from ryu.ofproto import ofproto_common
from ryu.ofproto import ofproto_parser
from ryu.ofproto import ofproto_protocol
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import nx_match
from ryu.controller import handler
from ryu.controller import ofp_event
from ryu.lib.dpid import dpid_to_str
LOG = logging.getLogger('ryu.controller.controller')
CONF = cfg.CONF
CONF.register_cli_opts([
cfg.StrOpt('ofp-listen-host', default='', help='openflow listen host'),
cfg.IntOpt('ofp-tcp-listen-port', default=ofproto_common.OFP_TCP_PORT,
help='openflow tcp listen port'),
cfg.IntOpt('ofp-ssl-listen-port', default=ofproto_common.OFP_SSL_PORT,
help='openflow ssl listen port'),
cfg.StrOpt('ctl-privkey', default=None, help='controller private key'),
cfg.StrOpt('ctl-cert', default=None, help='controller certificate'),
cfg.StrOpt('ca-certs', default=None, help='CA certificates')
])
class OpenFlowController(object):
def __init__(self):
super(OpenFlowController, self).__init__()
# entry point
def __call__(self):
# LOG.debug('call')
self.server_loop()
def server_loop(self):
if CONF.ctl_privkey is not None and CONF.ctl_cert is not None:
if CONF.ca_certs is not None:
server = StreamServer((CONF.ofp_listen_host,
CONF.ofp_ssl_listen_port),
datapath_connection_factory,
keyfile=CONF.ctl_privkey,
certfile=CONF.ctl_cert,
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=CONF.ca_certs,
ssl_version=ssl.PROTOCOL_TLSv1)
else:
server = StreamServer((CONF.ofp_listen_host,
CONF.ofp_ssl_listen_port),
datapath_connection_factory,
keyfile=CONF.ctl_privkey,
certfile=CONF.ctl_cert,
ssl_version=ssl.PROTOCOL_TLSv1)
else:
server = StreamServer((CONF.ofp_listen_host,
CONF.ofp_tcp_listen_port),
datapath_connection_factory)
# LOG.debug('loop')
server.serve_forever()
def _deactivate(method):
def deactivate(self):
try:
method(self)
finally:
self.is_active = False
return deactivate
class Datapath(ofproto_protocol.ProtocolDesc):
def __init__(self, socket, address):
super(Datapath, self).__init__()
self.socket = socket
self.socket.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1)
self.address = address
self.is_active = True
# The limit is arbitrary. We need to limit queue size to
# prevent it from eating memory up
self.send_q = hub.Queue(16)
self.xid = random.randint(0, self.ofproto.MAX_XID)
self.id = None # datapath_id is unknown yet
self.ports = None
self.flow_format = ofproto_v1_0.NXFF_OPENFLOW10
self.ofp_brick = ryu.base.app_manager.lookup_service_brick('ofp_event')
self.set_state(handler.HANDSHAKE_DISPATCHER)
def close(self):
self.set_state(handler.DEAD_DISPATCHER)
def set_state(self, state):
self.state = state
ev = ofp_event.EventOFPStateChange(self)
ev.state = state
self.ofp_brick.send_event_to_observers(ev, state)
# Low level socket handling layer
@_deactivate
def _recv_loop(self):
buf = bytearray()
required_len = ofproto_common.OFP_HEADER_SIZE
count = 0
while self.is_active:
ret = self.socket.recv(required_len)
if len(ret) == 0:
self.is_active = False
break
buf += ret
while len(buf) >= required_len:
(version, msg_type, msg_len, xid) = ofproto_parser.header(buf)
required_len = msg_len
if len(buf) < required_len:
break
msg = ofproto_parser.msg(self,
version, msg_type, msg_len, xid, buf)
# LOG.debug('queue msg %s cls %s', msg, msg.__class__)
if msg:
ev = ofp_event.ofp_msg_to_ev(msg)
self.ofp_brick.send_event_to_observers(ev, self.state)
dispatchers = lambda x: x.callers[ev.__class__].dispatchers
handlers = [handler for handler in
self.ofp_brick.get_handlers(ev) if
self.state in dispatchers(handler)]
for handler in handlers:
handler(ev)
buf = buf[required_len:]
required_len = ofproto_common.OFP_HEADER_SIZE
# We need to schedule other greenlets. Otherwise, ryu
# can't accept new switches or handle the existing
# switches. The limit is arbitrary. We need the better
# approach in the future.
count += 1
if count > 2048:
count = 0
hub.sleep(0)
@_deactivate
def _send_loop(self):
try:
while self.is_active:
buf = self.send_q.get()
self.socket.sendall(buf)
finally:
q = self.send_q
# first, clear self.send_q to prevent new references.
self.send_q = None
# there might be threads currently blocking in send_q.put().
# unblock them by draining the queue.
try:
while q.get(block=False):
pass
except hub.QueueEmpty:
pass
def send(self, buf):
if self.send_q:
self.send_q.put(buf)
def set_xid(self, msg):
self.xid += 1
self.xid &= self.ofproto.MAX_XID
msg.set_xid(self.xid)
return self.xid
def send_msg(self, msg):
assert isinstance(msg, self.ofproto_parser.MsgBase)
if msg.xid is None:
self.set_xid(msg)
msg.serialize()
# LOG.debug('send_msg %s', msg)
self.send(msg.buf)
def serve(self):
send_thr = hub.spawn(self._send_loop)
# send hello message immediately
hello = self.ofproto_parser.OFPHello(self)
self.send_msg(hello)
try:
self._recv_loop()
finally:
hub.kill(send_thr)
hub.joinall([send_thr])
#
# Utility methods for convenience
#
def send_packet_out(self, buffer_id=0xffffffff, in_port=None,
actions=None, data=None):
if in_port is None:
in_port = self.ofproto.OFPP_NONE
packet_out = self.ofproto_parser.OFPPacketOut(
self, buffer_id, in_port, actions, data)
self.send_msg(packet_out)
def send_flow_mod(self, rule, cookie, command, idle_timeout, hard_timeout,
priority=None, buffer_id=0xffffffff,
out_port=None, flags=0, actions=None):
if priority is None:
priority = self.ofproto.OFP_DEFAULT_PRIORITY
if out_port is None:
out_port = self.ofproto.OFPP_NONE
flow_format = rule.flow_format()
assert (flow_format == ofproto_v1_0.NXFF_OPENFLOW10 or
flow_format == ofproto_v1_0.NXFF_NXM)
if self.flow_format < flow_format:
self.send_nxt_set_flow_format(flow_format)
if flow_format == ofproto_v1_0.NXFF_OPENFLOW10:
match_tuple = rule.match_tuple()
match = self.ofproto_parser.OFPMatch(*match_tuple)
flow_mod = self.ofproto_parser.OFPFlowMod(
self, match, cookie, command, idle_timeout, hard_timeout,
priority, buffer_id, out_port, flags, actions)
else:
flow_mod = self.ofproto_parser.NXTFlowMod(
self, cookie, command, idle_timeout, hard_timeout,
priority, buffer_id, out_port, flags, rule, actions)
self.send_msg(flow_mod)
def send_flow_del(self, rule, cookie, out_port=None):
self.send_flow_mod(rule=rule, cookie=cookie,
command=self.ofproto.OFPFC_DELETE,
idle_timeout=0, hard_timeout=0, priority=0,
out_port=out_port)
def send_delete_all_flows(self):
rule = nx_match.ClsRule()
self.send_flow_mod(
rule=rule, cookie=0, command=self.ofproto.OFPFC_DELETE,
idle_timeout=0, hard_timeout=0, priority=0, buffer_id=0,
out_port=self.ofproto.OFPP_NONE, flags=0, actions=None)
def send_barrier(self):
barrier_request = self.ofproto_parser.OFPBarrierRequest(self)
self.send_msg(barrier_request)
def send_nxt_set_flow_format(self, flow_format):
assert (flow_format == ofproto_v1_0.NXFF_OPENFLOW10 or
flow_format == ofproto_v1_0.NXFF_NXM)
if self.flow_format == flow_format:
# Nothing to do
return
self.flow_format = flow_format
set_format = self.ofproto_parser.NXTSetFlowFormat(self, flow_format)
# FIXME: If NXT_SET_FLOW_FORMAT or NXFF_NXM is not supported by
# the switch then an error message will be received. It may be
# handled by setting self.flow_format to
# ofproto_v1_0.NXFF_OPENFLOW10 but currently isn't.
self.send_msg(set_format)
self.send_barrier()
def is_reserved_port(self, port_no):
return port_no > self.ofproto.OFPP_MAX
def datapath_connection_factory(socket, address):
LOG.debug('connected socket:%s address:%s', socket, address)
with contextlib.closing(Datapath(socket, address)) as datapath:
try:
datapath.serve()
except:
# Something went wrong.
# Especially malicious switch can send malformed packet,
# the parser raise exception.
# Can we do anything more graceful?
if datapath.id is None:
dpid_str = "%s" % datapath.id
else:
dpid_str = dpid_to_str(datapath.id)
LOG.error("Error in the datapath %s from %s", dpid_str, address)
raise
| netgroup/dreamer-ryu | ryu/controller/controller.py | Python | apache-2.0 | 11,676 |
#!/usr/bin/python
# Copyright (c) 2009, Purdue University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# Neither the name of the Purdue University nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This module is an API to access the dnsManagement database.
This module should only be run by servers with authentication layers
that are active. This module does not include authentication, but does
include authorization.
The api that will be exposed by this module is meant for use in a web
application or rpc server. This module is not for use in command line tools.
The two primary uses of this class are:
1. to use convience functions to get large amounts of data out of the db
without large amounts of db queries. For usage on this consult the pydoc
on the individual functions.
2. to Make/Remove/List rows in the database. The method that is used in this
class is based on generic Make/Remove/Lock functions that take specifc
dictionaries that correspond to the table that is being referenced.
Here is an example of how to remove rows from the acls table:
acls_dict = db_instance.GetEmptyRowDict('acls')
acls_dict['acl_name'] = 'test_acl'
db_instance.StartTransaction()
try:
matching_rows = db_instance.ListRow('acls', acls_dict)
for row in matching_rows:
db_instance.RemoveRow('acls', row)
except Exception:
db_instance.EndTransaction(rollback=True)
else:
db_instance.EndTransaction()
Note: MySQLdb.Error can be raised in almost any function in this module. Please
keep that in mind when using this module.
"""
__copyright__ = 'Copyright (C) 2009, Purdue University'
__license__ = 'BSD'
__version__ = '#TRUNK#'
import Queue
import threading
import time
import uuid
import warnings
import MySQLdb
import constants
import data_validation
import embedded_files
import errors
import helpers_lib
import codecs
class dbAccess(object):
"""This class provides the primary interface for connecting and interacting
with the roster database.
"""
def __init__(self, db_host, db_user, db_passwd, db_name, big_lock_timeout,
big_lock_wait, thread_safe=True, ssl=False, ssl_ca=None,
ssl_cert=None, ssl_key=None, ssl_capath=None, ssl_cipher=None,
db_debug=False, db_debug_log=None):
"""Instantiates the db_access class.
Inputs:
db_host: string of the database host name
db_user: string of the user name used to connect to mysql
db_passwd: string of password used to connect to mysql
db_name: string of name of database in mysql server to use
big_lock_timeout: integer of how long the big lock should be valid for
big_lock_wait: integer of how long to wait for proccesses to finish
before locking the database
thread_safe: boolean of if db_acceess should be thread safe
"""
# Do some better checking of these args
self.db_host = db_host
self.db_user = db_user
self.db_passwd = db_passwd
self.db_name = db_name
self.big_lock_timeout = big_lock_timeout
self.big_lock_wait = big_lock_wait
self.ssl = ssl
self.ssl_ca = ssl_ca
self.ssl_settings = {}
self.db_debug = db_debug
self.db_debug_log = db_debug_log
if( self.ssl ):
if( self.ssl_ca ):
self.ssl_settings['ca'] = ssl_ca
else:
raise errors.ConfigError('ssl_ca not specified in config file.')
self.transaction_init = False
self.connection = None
self.cursor = None
# This is generated only when ListRow is called and is then cached for
# the life of the object.
self.foreign_keys = []
self.data_validation_instance = None
self.locked_db = False
self.thread_safe = thread_safe
self.queue = Queue.Queue()
self.now_serving = None
self.queue_update_lock = threading.Lock()
def close(self):
"""Closes a connection that has been opened.
A new connection will be created on StartTransaction.
"""
if( self.connection is not None ):
self.connection.close()
self.connection = None
def cursor_execute(self, execution_string, values={}):
"""This function allows for the capture of every mysql command that
is run in this class.
Inputs:
execution_string: mysql command string
values: dictionary of values for mysql command
"""
if( self.db_debug ):
if( self.db_debug_log ):
#If the execution_string contains a unicode character we must account
#for it. So we need to use the codecs package to write to a utf-8 log
#file, instead of ASCII like the 'normal' open() results in.
debug_log_handle = codecs.open(self.db_debug_log, encoding='utf-8',
mode='a')
debug_log_handle.write(execution_string % values)
debug_log_handle.write('\n')
debug_log_handle.close()
else:
print execution_string % values
try:
self.cursor.execute(execution_string, values)
except MySQLdb.ProgrammingError:
raise
except MySQLdb.Error, e:
if( e[0] in errors.PARSABLE_MYSQL_ERRORS ):
raise errors.DatabaseError(e)
else:
raise
def StartTransaction(self):
"""Starts a transaction.
Also it starts a db connection if none exists or it times out.
Always creates a new cursor.
This function also serializes all requests on this object and if the
big lock has been activated will wait for it to be released.
Raises:
TransactionError: Cannot start new transaction last transaction not
committed or rolled-back.
"""
if( self.thread_safe ):
unique_id = uuid.uuid4()
self.queue.put(unique_id)
while_sleep = 0
while( unique_id != self.now_serving ):
time.sleep(while_sleep)
self.queue_update_lock.acquire()
if( self.now_serving is None ):
self.now_serving = self.queue.get()
self.queue_update_lock.release()
while_sleep = 0.005
else:
if( self.transaction_init ):
raise errors.TransactionError('Cannot start new transaction last '
'transaction not committed or '
'rolled-back.')
if( self.connection is not None ):
try:
self.cursor = self.connection.cursor(MySQLdb.cursors.DictCursor)
self.cursor_execute('DO 0') # NOOP to test connection
except MySQLdb.OperationalError:
self.connection = None
if( self.connection is None ):
if( self.ssl ):
self.connection = MySQLdb.connect(
host=self.db_host, user=self.db_user, passwd=self.db_passwd,
db=self.db_name, use_unicode=True, charset='utf8',
ssl=self.ssl_settings)
else:
self.connection = MySQLdb.connect(
host=self.db_host, user=self.db_user, passwd=self.db_passwd,
db=self.db_name, use_unicode=True, charset='utf8')
self.cursor = self.connection.cursor(MySQLdb.cursors.DictCursor)
while_sleep = 0
db_lock_locked = 1
while( db_lock_locked ):
time.sleep(while_sleep)
try:
self.cursor_execute('SELECT `locked`, `lock_last_updated`, '
'NOW() as `now` from `locks` WHERE '
'`lock_name`="db_lock_lock"')
rows = self.cursor.fetchall()
except MySQLdb.ProgrammingError:
break
if( not rows ):
break
lock_last_updated = rows[0]['lock_last_updated']
db_lock_locked = rows[0]['locked']
now = rows[0]['now']
if( (now - lock_last_updated).seconds > self.big_lock_timeout ):
break
while_sleep = 1
self.transaction_init = True
def EndTransaction(self, rollback=False):
"""Ends a transaction.
Also does some simple checking to make sure a connection was open first
and releases itself from the current queue.
Inputs:
rollback: boolean of if the transaction should be rolled back
Raises:
TransactionError: Must run StartTansaction before EndTransaction.
"""
if( not self.thread_safe ):
if( not self.transaction_init ):
raise errors.TransactionError('Must run StartTansaction before '
'EndTransaction.')
try:
self.cursor.close()
if( rollback ):
self.connection.rollback()
else:
self.connection.commit()
finally:
self.transaction_init = False
if( self.thread_safe ):
if( not self.queue.empty() ):
self.now_serving = self.queue.get()
else:
self.now_serving = None
def CheckMaintenanceFlag(self):
"""Checks the maintenance flag in the database.
Outputs:
bool: boolean of maintenance mode
"""
row = self.ListRow('locks', {'lock_name': u'maintenance', 'locked': None})
return bool(row[0]['locked'])
def LockDb(self):
"""This function is to lock the whole database for consistent data
retrevial.
This function expects for self.db_instance.cursor to be instantiated and
valid.
Raises:
TransactionError: Must unlock tables before re-locking them.
"""
if( self.locked_db is True ):
raise errors.TransactionError('Must unlock tables before re-locking them')
self.cursor_execute('UPDATE `locks` SET `locked`=1 WHERE '
'`lock_name`="db_lock_lock"')
time.sleep(self.big_lock_wait)
self.cursor_execute(
'LOCK TABLES %s READ' % ' READ, '.join(self.ListTableNames()))
self.locked_db = True
def UnlockDb(self):
"""This function is to unlock the whole database.
This function expects for self.db_instance.cursor to be instantiated and
valid. It also expects all tables to be locked.
Raises:
TransactionError: Must lock tables before unlocking them.
"""
if( self.locked_db is False ):
raise errors.TransactionError('Must lock tables before unlocking them')
self.cursor_execute('UNLOCK TABLES')
self.cursor_execute('UPDATE `locks` SET `locked`=0 WHERE '
'`lock_name`="db_lock_lock"')
self.locked_db = False
def InitDataValidation(self):
"""Get all reserved words and group permissions and init the
data_validation_instance
"""
cursor = self.connection.cursor()
try:
if( self.db_debug ):
if( self.db_debug_log ):
#If the execution_string contains a unicode character we must account
#for it. So we need to use the codecs package to write to a utf-8 log
#file, instead of ASCII like the 'normal' open() results in.
debug_log_handle = codecs.open(self.db_debug_log, encoding='utf-8',
mode='a')
debug_log_handle.write('SELECT reserved_word FROM reserved_words')
debug_log_handle.write('\n')
debug_log_handle.close()
else:
print 'SELECT reserved_word FROM reserved_words'
cursor.execute('SELECT reserved_word FROM reserved_words')
reserved_words_rows = cursor.fetchall()
if( self.db_debug ):
if( self.db_debug_log ):
debug_log_handle = codecs.open(self.db_debug_log, encoding='utf-8',
mode='a')
debug_log_handle.write('SELECT record_type FROM record_types')
debug_log_handle.write('\n')
debug_log_handle.close()
else:
print 'SELECT record_type FROM record_types'
cursor.execute('SELECT record_type FROM record_types')
record_types_rows = cursor.fetchall()
finally:
cursor.close()
words = [row[0] for row in reserved_words_rows]
record_types = [row[0] for row in record_types_rows]
self.data_validation_instance = data_validation.DataValidation(
words, record_types)
def MakeRow(self, table_name, row_dict):
"""Creates a row in the database using the table name and row dict
Inputs:
table_name: string of valid table name from constants
row_dict: dictionary that coresponds to table_name
Raises:
InvalidInputError: Table name not valid
TransactionError: Must run StartTansaction before inserting
Outputs:
int: last insert id
"""
if( not table_name in helpers_lib.GetValidTables() ):
raise errors.InvalidInputError('Table name not valid: %s' % table_name)
if( not self.transaction_init ):
raise errors.TransactionError('Must run StartTansaction before '
'inserting.')
if( self.data_validation_instance is None ):
self.InitDataValidation()
self.data_validation_instance.ValidateRowDict(table_name, row_dict)
column_names = []
column_assignments = []
for k in row_dict.iterkeys():
column_names.append(k)
column_assignments.append('%s%s%s' % ('%(', k, ')s'))
query = 'INSERT INTO %s (%s) VALUES (%s)' % (table_name,
','.join(column_names),
','.join(column_assignments))
self.cursor_execute(query, row_dict)
return self.cursor.lastrowid
def TableRowCount(self, table_name):
"""Counts the amount of records in a table and returns it.
Inputs:
table_name: string of valid table name from constants
Raises:
InvalidInputError: Table name not valid
TransactionError: Must run StartTansaction before getting row count.
Outputs:
int: number of rows found
"""
if( not table_name in helpers_lib.GetValidTables() ):
raise errors.InvalidInputError('Table name not valid: %s' % table_name)
if( not self.transaction_init ):
raise errors.TransactionError('Must run StartTansaction before getting '
'row count.')
self.cursor_execute('SELECT COUNT(*) FROM %s' % table_name)
row_count = self.cursor.fetchone()
return row_count['COUNT(*)']
def RemoveRow(self, table_name, row_dict):
"""Removes a row in the database using the table name and row dict
Inputs:
table_name: string of valid table name from constants
row_dict: dictionary that coresponds to table_name
Raises:
InvalidInputError: Table name not valid
TransactionError: Must run StartTansaction before deleting
Outputs:
int: number of rows affected
"""
if( not table_name in helpers_lib.GetValidTables() ):
raise errors.InvalidInputError('Table name not valid: %s' % table_name)
if( not self.transaction_init ):
raise errors.TransactionError('Must run StartTansaction before deleting.')
if( self.data_validation_instance is None ):
self.InitDataValidation()
self.data_validation_instance.ValidateRowDict(table_name, row_dict)
where_list = []
for k in row_dict.iterkeys():
where_list.append('%s=%s%s%s' % (k, '%(', k, ')s'))
query = 'DELETE FROM %s WHERE %s' % (table_name, ' AND '.join(where_list))
self.cursor_execute(query, row_dict)
return self.cursor.rowcount
def UpdateRow(self, table_name, search_row_dict, update_row_dict):
"""Updates a row in the database using search and update dictionaries.
Inputs:
table_name: string of valid table name from constants
search_row_dict: dictionary that coresponds to table_name containing
search args
update_row_dict: dictionary that coresponds to table_name containing
update args
Raises:
InvalidInputError: Table name not valid
TransactionError: Must run StartTansaction before inserting
Outputs:
int: number of rows affected
"""
if( not table_name in helpers_lib.GetValidTables() ):
raise errors.InvalidInputError('Table name not valid: %s' % table_name)
if( not self.transaction_init ):
raise errors.TransactionError('Must run StartTansaction before deleting.')
if( self.data_validation_instance is None ):
self.InitDataValidation()
self.data_validation_instance.ValidateRowDict(table_name, search_row_dict,
none_ok=True)
self.data_validation_instance.ValidateRowDict(table_name, update_row_dict,
none_ok=True)
query_updates = []
query_searches = []
combined_dict = {}
for k, v in update_row_dict.iteritems():
if( v is not None ):
query_updates.append('%s%s%s%s' % (k, '=%(update_', k, ')s'))
combined_dict['update_%s' % k] = v
for k, v in search_row_dict.iteritems():
if( v is not None ):
query_searches.append('%s=%s%s%s' % (k, '%(search_', k, ')s'))
combined_dict['search_%s' % k] = v
query = 'UPDATE %s SET %s WHERE %s' % (table_name, ','.join(query_updates),
' AND '.join(query_searches))
self.cursor_execute(query, combined_dict)
return self.cursor.rowcount
def ListRow(self, *args, **kwargs):
"""Lists rows in the database using a dictionary of tables. Then returns
the rows found. Joins are auto generated on the fly based on foreign keys
in the database.
Inputs:
args: pairs of string of table name and dict of rows
kwargs: lock_rows: default False
column: column to search range on, if using multiple
tables, the column must be in the first table
in args.
range_values: range tuple of values to search within for on column
is_date: boolean of if range is of dates
example usage: ListRow('users', user_row_dict,
'user_group_assignments', user_assign_row_dict,
lock_rows=True)
Raises:
TransactionError: Must run StartTansaction before inserting
UnexpectedDataError: If is_date is specified you must specify column and range
UnexpectedDataError: If column or range is specified both are needed
InvalidInputError: Found unknown option(s)
UnexpectedDataError: No args given, must at least have a pair of table name and row dict
UnexpectedDataError: Number of unnamed args is not even.
Args should be entered in pairs of table name and row dict.
InvalidInputError: Table name not valid
InvalidInputError: Column not found in row
UnexpectedDataError: Column in table is not a DateTime type
UnexpectedDataError: Date from range is not a valid datetime object
InvalidInputError: Range must be int if is_date is not set
InvalidInputError: Multiple tables were passed in but no joins were found
Outputs:
tuple of row dicts consisting of all the tables that were in the input.
all column names in the db are unique so no colisions occour
example: ({'user_name': 'sharrell', 'access_level': 10,
'user_group_assignments_group_name: 'cs',
'user_group_assignments_user_name: 'sharrell'},
{'user_name': 'sharrell', 'access_level': 10,
'user_group_assignments_group_name: 'eas',
'user_group_assignments_user_name: 'sharrell'})
"""
if( not self.transaction_init ):
raise errors.TransactionError('Must run StartTansaction before getting '
'data.')
if( self.data_validation_instance is None ):
self.InitDataValidation()
valid_tables = helpers_lib.GetValidTables()
tables = {}
table_names = []
lock_rows = False
column = None
range_values = ()
is_date = None
if( kwargs ):
if( 'lock_rows' in kwargs ):
lock_rows = kwargs['lock_rows']
del kwargs['lock_rows']
if( 'column' in kwargs ):
column = kwargs['column']
del kwargs['column']
if( 'range_values' in kwargs ):
range_values = kwargs['range_values']
del kwargs['range_values']
if( 'is_date' in kwargs ):
is_date = kwargs['is_date']
del kwargs['is_date']
if( column is None and is_date is not None ):
raise errors.UnexpectedDataError('If is_date is specified you must '
'specify column and range')
if( bool(column) ^ bool(range_values) ):
raise errors.UnexpectedDataError('If column or range is specified '
'both are needed')
if( kwargs ):
raise errors.InvalidInputError('Found unknown option(s): '
'%s' % kwargs.keys())
if( not args ):
raise errors.UnexpectedDataError('No args given, must at least have a '
'pair of table name and row dict')
if( len(args) % 2 ):
raise errors.UnexpectedDataError(
'Number of unnamed args is not even. Args '
'should be entered in pairs of table name '
'and row dict.')
count = 0
for arg in args:
count += 1
if( count % 2 ):
if( not arg in valid_tables ):
raise errors.InvalidInputError('Table name not valid: %s' % arg)
current_table_name = arg
else:
# do checking in validate row dict to check if it is a dict
self.data_validation_instance.ValidateRowDict(current_table_name, arg,
none_ok=True,
all_none_ok=True)
tables[current_table_name] = arg
table_names.append(current_table_name)
if( range_values ):
if( column not in args[1] ):
raise errors.InvalidInputError('Column %s not found in row'
'dictionary: %s' % (column, args[1]))
if( is_date ):
if( constants.TABLES[args[0]][column] != 'DateTime' ):
raise errors.UnexpectedDataError('column: %s in table %s is not a'
'DateTime type' % (column, args[0]))
for date in range_values:
if( not self.data_validation_instance.isDateTime(date) ):
raise errors.UnexpectedDataError(
'Date: %s from range is not a valid '
'datetime object' % date)
else:
for value in range_values:
if( not self.data_validation_instance.isUnsignedInt(value) ):
raise errors.InvalidInputError('Range must be int if is_date '
'is not set')
query_where = []
if( len(tables) > 1 ):
if( not self.foreign_keys ):
self.cursor_execute('SELECT table_name, column_name, '
'referenced_table_name, referenced_column_name '
'FROM information_schema.key_column_usage WHERE '
'referenced_table_name IS NOT NULL AND '
'referenced_table_schema="%s"' % self.db_name)
self.foreign_keys = self.cursor.fetchall()
for key in self.foreign_keys:
if( key['table_name'] in table_names and
key['referenced_table_name'] in table_names ):
query_where.append('(%(table_name)s.%(column_name)s='
'%(referenced_table_name)s.'
'%(referenced_column_name)s)' % key)
if( not query_where ):
raise errors.InvalidInputError('Multiple tables were passed in but no '
'joins were found')
column_names = []
search_dict = {}
for table_name, row_dict in tables.iteritems():
for key, value in row_dict.iteritems():
column_names.append('%s.%s' % (table_name, key))
if( value is not None ):
search_dict[key] = value
query_where.append('%s%s%s%s' % (key, '=%(', key, ')s'))
if( range_values ):
search_dict['start'] = range_values[0]
search_dict['end'] = range_values[1]
query_where.append('%s%s%s%s' % (column, '>=%(start)s AND ',
column, '<=%(end)s'))
query_end = ''
if( query_where ):
query_end = 'WHERE %s' % ' AND '.join(query_where)
if( lock_rows ):
query_end = '%s FOR UPDATE' % query_end
query = 'SELECT %s FROM %s %s' % (','.join(column_names),
','.join(table_names),
query_end)
self.cursor_execute(query, search_dict)
return self.cursor.fetchall()
def GetEmptyRowDict(self, table_name):
"""Gives a dict that has all the members needed to interact with the
the given table using the Make/Remove/ListRow functions.
Inputs:
table_name: string of valid table name from constants
Raises:
InvalidInputError: Table name not valid
Outputs:
dictionary: of empty row for specificed table.
example acls dict:
{'acl_name': None
'acl_range_allowed: None,
'acl_cidr_block': None }
"""
row_dict = helpers_lib.GetRowDict(table_name)
if( not row_dict ):
raise errors.InvalidInputError('Table name not valid: %s' % table_name)
for key in row_dict.iterkeys():
row_dict[key] = None
return row_dict
# Not sure this is needed, buuuuut.
def GetValidTables(self):
"""Export this function to the top level of the db_access stuff so
it can be used without importing un-needed classes.
Outputs:
list: valid table names
"""
helpers_lib.GetValidTables()
def GetRecordArgsDict(self, record_type):
"""Get args for a specific record type from the db and shove them into
a dictionary.
Inputs:
record_type: string of record type
Raises:
InvalidInputError: Unknown record type
Outputs:
dictionary: keyed by argument name with values of data type of that arg
example: {'mail_host': 'Hostname'
'priority': 'UnsignedInt'}
"""
search_record_arguments_dict = self.GetEmptyRowDict('record_arguments')
search_record_arguments_dict['record_arguments_type'] = record_type
self.StartTransaction()
try:
record_arguments = self.ListRow('record_arguments',
search_record_arguments_dict)
finally:
self.EndTransaction()
record_arguments_dict = {}
if( not record_arguments ):
raise errors.InvalidInputError('Unknown record type: %s' % record_type)
for record_argument in record_arguments:
record_arguments_dict[record_argument['argument_name']] = (
record_argument['argument_data_type'])
return record_arguments_dict
def GetEmptyRecordArgsDict(self, record_type):
"""Gets empty args dict for a specific record type
Inputs:
record_type: string of record type
Outputs:
dictionary: keyed by argument name with values of None
example: {'mail_host': None
'priority': None}
"""
args_dict = self.GetRecordArgsDict(record_type)
for k in args_dict.iterkeys():
args_dict[k] = None
return args_dict
def ValidateRecordArgsDict(self, record_type, record_args_dict,
none_ok=False):
"""Type checks record args dynamically.
Inputs:
record_type: string of record_type
record_args_dict: dictionary for args keyed by arg name.
a filled out dict from GetEmptyRecordArgsDict()
none_ok: boolean of if None types should be acepted.
Raises:
InvalidInputError: dict for record type should have these keys
FucntionError: No function to check data type
UnexpectedDataError: Invalid data type
"""
record_type_dict = self.GetRecordArgsDict(record_type)
if( not set(record_type_dict.keys()) == set(record_args_dict.keys()) ):
raise errors.InvalidInputError('dict for record type %s should have '
'these keys: %s' % (record_type,
record_type_dict))
if( self.data_validation_instance is None ):
self.InitDataValidation()
data_validation_methods = dir(data_validation.DataValidation([], []))
for record_arg_name in record_args_dict.keys():
if( not 'is%s' % record_type_dict[record_arg_name] in
data_validation_methods ):
raise errors.FucntionError('No function to check data type %s' %
record_type_dict[record_arg_name])
if( none_ok and record_args_dict[record_arg_name] is None ):
continue
if( not getattr(self.data_validation_instance, 'is%s' %
record_type_dict[record_arg_name])(
record_args_dict[record_arg_name]) ):
raise errors.UnexpectedDataError('Invalid data type %s: %s' % (
record_type_dict[record_arg_name],
record_args_dict[record_arg_name]))
def ListTableNames(self):
"""Lists all tables in the database.
Outputs:
List: List of tables
"""
query = 'SHOW TABLES'
self.cursor_execute(query)
tables = self.cursor.fetchall()
table_list = []
for table_dict in tables:
for table in table_dict:
table_list.append(table_dict[table])
return table_list
def GetCurrentTime(self):
"""Returns datetime object of current time in database.
Outputs:
datetime: current time in the database
"""
self.cursor_execute('SELECT NOW()')
return self.cursor.fetchone()['NOW()']
def CreateRosterDatabase(self, schema=None):
"""Destroys existing table structure in database and replaces it
with schema that is passed in(or default schema).
DO NOT RUN THIS AGAINST A DATABASE THAT IS NOT READY TO BE CLEARED
This function is used because of a poorly understood bug in MySQLdb
that does not allow our schema to be executed as one big query. The
work around is splitting the whole thing up and commiting each piece
separately.
Inputs:
schema: string of sql schema
"""
if( schema is None ):
schema = embedded_files.SCHEMA_FILE
schema_lines = schema.split('\n')
execute_lines = []
continued_line = []
for line in schema_lines:
if( line.lstrip().startswith('#') ):
continue
if( line.endswith(';') ):
continued_line.append(line)
execute_lines.append('\n'.join(continued_line))
continued_line = []
else:
continued_line.append(line)
warnings.filterwarnings('ignore', 'Unknown table.*')
for line in execute_lines:
self.StartTransaction()
try:
self.cursor_execute(line)
finally:
self.EndTransaction()
def DumpDatabase(self):
"""This will dump the entire database to memory.
This would be done by mysqldump but it needs to be done in the same lock
as other processes. So this is a simple mysqldump function.
Outputs:
Dictionary: Dictionary with keys of table name and schema/data for each
table as values.
"""
table_data = {}
self.cursor_execute('SHOW TABLES')
table_names = self.cursor.fetchall()
self.cursor_execute('SET OPTION SQL_QUOTE_SHOW_CREATE=1')
for table_name in table_names:
table_name = table_name.values()[0]
table_data[table_name] = {}
self.cursor_execute('SHOW CREATE TABLE %s' % table_name)
table_data[table_name]['schema'] = self.cursor.fetchone()['Create Table']
self.cursor_execute('DESCRIBE %s' % table_name)
table_data[table_name]['columns'] = []
table_descriptions = self.cursor.fetchall()
for table_description in table_descriptions:
table_data[table_name]['columns'].append(table_description['Field'])
self.cursor_execute('SELECT %s FROM %s' %
(','.join(table_data[table_name]['columns']),
table_name))
table_rows = self.cursor.fetchall()
table_data[table_name]['rows'] = []
for row in table_rows:
row_dict = {}
for key, value in row.iteritems():
row_dict[key] = self.connection.literal(value)
if( isinstance(row_dict[key], str) ):
row_dict[key] = unicode(row_dict[key], 'utf-8')
table_data[table_name]['rows'].append(row_dict)
return table_data
### These functions are for the user class
def GetUserAuthorizationInfo(self, user):
"""Grabs authorization data from the db and returns a dict.
This function does two selects on the db, one for forward and one for
reverse zones. It also parses the data into a dict for ease of use.
Inputs:
user: string of username
Raises:
UnexpectedDataError: Row did not contain
reverse_range_permissions or
forward_zone_permissions
Outputs:
dict: dict with all the relevant information
example:
{'user_access_level': '2',
'user_name': 'shuey',
'forward_zones': [
{'zone_name': 'cs.university.edu',
'group_permission': ['a', 'aaaa']},
{'zone_name': 'eas.university.edu',
'group_permission': ['a', 'aaaa', 'cname']},
{'zone_name': 'bio.university.edu',
'group_permission': ''a', 'ns'}],
'groups': ['cs', 'bio'],
'reverse_ranges': [
{'cidr_block': '192.168.0.0/24',
'group_permission': ['ptr', 'cname']},
{'cidr_block': '192.168.0.0/24',
'group_permission': ['ptr']},
{'cidr_block': '192.168.1.0/24',
'group_permission': ['ptr', 'cname']}]}
"""
auth_info_dict = {}
db_data = []
users_dict = self.GetEmptyRowDict('users')
users_dict['user_name'] = user
groups_dict = self.GetEmptyRowDict('groups')
user_group_assignments_dict = self.GetEmptyRowDict('user_group_assignments')
forward_zone_permissions_dict = self.GetEmptyRowDict(
'forward_zone_permissions')
reverse_range_permissions_dict = self.GetEmptyRowDict(
'reverse_range_permissions')
group_forward_permissions_dict = self.GetEmptyRowDict(
'group_forward_permissions')
group_reverse_permissions_dict = self.GetEmptyRowDict(
'group_reverse_permissions')
auth_info_dict['user_name'] = user
auth_info_dict['groups'] = []
auth_info_dict['forward_zones'] = []
auth_info_dict['reverse_ranges'] = []
self.StartTransaction()
try:
db_data.extend(self.ListRow('users', users_dict,
'groups', groups_dict,
'user_group_assignments',
user_group_assignments_dict,
'forward_zone_permissions',
forward_zone_permissions_dict,
'group_forward_permissions',
group_forward_permissions_dict))
db_data.extend(self.ListRow('users', users_dict,
'groups', groups_dict,
'user_group_assignments',
user_group_assignments_dict,
'reverse_range_permissions',
reverse_range_permissions_dict,
'group_reverse_permissions',
group_reverse_permissions_dict))
if( not db_data ):
self.cursor_execute('SELECT access_level FROM users '
'WHERE user_name="%s"' % user)
db_data.extend(self.cursor.fetchall())
if( db_data ):
auth_info_dict['user_access_level'] = db_data[0]['access_level']
return auth_info_dict
else:
return {}
finally:
self.EndTransaction()
auth_info_dict['user_access_level'] = db_data[0]['access_level']
for row in db_data:
if( row.has_key('group_forward_permissions_group_permission') ):
if( not row['user_group_assignments_group_name'] in
auth_info_dict['groups'] ):
auth_info_dict['groups'].append(
row['user_group_assignments_group_name'])
if( not {'zone_name': row['forward_zone_permissions_zone_name'],
'group_permission': row[
'group_forward_permissions_group_permission']}
in (auth_info_dict['forward_zones']) ):
auth_info_dict['forward_zones'].append(
{'zone_name': row['forward_zone_permissions_zone_name'],
'group_permission': row[
'group_forward_permissions_group_permission']})
elif( row.has_key('group_reverse_permissions_group_permission') ):
if( not row['user_group_assignments_group_name'] in
auth_info_dict['groups'] ):
auth_info_dict['groups'].append(
row['user_group_assignments_group_name'])
if( not {'cidr_block': row['reverse_range_permissions_cidr_block'],
'group_permission': row[
'group_reverse_permissions_group_permission']}
in auth_info_dict['reverse_ranges'] ):
auth_info_dict['reverse_ranges'].append(
{'cidr_block': row['reverse_range_permissions_cidr_block'],
'group_permission': row[
'group_reverse_permissions_group_permission']})
elif( auth_info_dict.has_key('forward_zones') and auth_info_dict[
'user_access_level'] >= 64 ):
if( {'zone_name': row['forward_zone_permissions_zone_name'],
'group_permission': []} not in auth_info_dict['forward_zones'] ):
auth_info_dict['forward_zones'].append(
{'zone_name': row['forward_zone_permissions_zone_name'],
'group_permission': []})
elif( auth_info_dict.has_key('reverse_ranges') and auth_info_dict[
'user_access_level'] >= 64 ):
if( {'cidr_block': row['reverse_permissions_cidr_block'],
'group_permission': []} not in auth_info_dict['reverse_ranges'] ):
auth_info_dict['reverse_ranges'].append(
{'cidr_block': row['reverse_range_permissions_cidr_block'],
'group_permission': []})
else:
raise errors.RecordError('Returned row is corrupt.')
return auth_info_dict
def GetZoneOrigins(self, zone_name, view_name):
"""Returns zone origins of zone_name that is passed in.
If no zone origins are found, return None.
If None is passed for view_name the output may contain multiple origins
per zone name.
Inputs:
zone_name: string of zone_name
view_name: string of view_name or None
Outputs:
a dictionary keyed by zone name with values of lists of origins
Example:
{'test_zone': ['192.168.0.in-addr.arpa.', '10.0.1.in-addr.arpa.'],
'example_rev': ['10.0.in-addr.arpa.']}
"""
zone_view_assignments_dict = self.GetEmptyRowDict(
'zone_view_assignments')
zone_view_assignments_dict['zone_view_assignments_zone_name'] = zone_name
zone_view_assignments_dict[
'zone_view_assignments_view_dependency'] = view_name
zone_view_assignment_rows = self.ListRow(
'zone_view_assignments', zone_view_assignments_dict)
origins = {}
if( zone_view_assignment_rows ):
for row in zone_view_assignment_rows:
if( row['zone_view_assignments_zone_name'] not in origins ):
origins[row['zone_view_assignments_zone_name']] = []
if( row['zone_origin'] not in origins[
row['zone_view_assignments_zone_name']] ):
origins[row['zone_view_assignments_zone_name']].append(
row['zone_origin'])
else:
return None
return origins
# vi: set ai aw sw=2:
| stephenlienharrell/roster-dns-management | roster-core/roster_core/db_access.py | Python | bsd-3-clause | 41,336 |
#!/usr/bin/python
import serial
import time
import random
import sys
s = None
num_leds = 93
play_time = 0
def flush_input():
s.flushInput()
def wait_for_ack():
while s.inWaiting() <= 0:
pass
s.read(s.inWaiting())
def command(cmd_text):
s.write((cmd_text + ':').encode())
wait_for_ack()
def setup():
global s, ticks, play_time
s = serial.Serial("/dev/ttyS0", 115200)
flush_input()
choose_colors()
command(":::pau:clr")
if len(sys.argv) > 1:
command(sys.argv[1])
if len(sys.argv) > 2:
play_time = float(sys.argv[2])
command("6:zon:blk:red:6:rep:blk:grn:6:rep:blk:org:6:rep:blk:blu:6:rep")
command("5:zon:blk:red:4:rep:blk:grn:4:rep:blk:org:4:rep:blk:blu:4:rep")
command("4:zon:blk:red:2:rep:blk:grn:2:rep:blk:org:2:rep:blk:blu:2:rep")
command("3:zon:blk:red:1:rep:blk:grn:1:rep:blk:org:1:rep:blk:blu:1:rep")
command("2:zon:blk:red:blk:grn:blk:org:blk:blu")
command("1:zon:pur")
num_colors = 12
colors = [ "red", "orange", "yellow", "ltgreen", "green", "seafoam", "cyan", "ltblue", "blue", "purple", "magenta", "pink", "black", "random" ]
effects = ['blink1','blink2','blink3','blink4','blink5','blink6']
effect_index = 0
chosen_colors = [0,1,2,3,4,5]
def random_color():
r = random.randrange(0, num_colors)
return colors[r]
def choose_colors():
global chosen_colors
for i in range(0, 6):
chosen_colors[i] = random_color()
def shift_colors():
global chosen_colors
for i in xrange(5, 0, -1):
chosen_colors[i] = chosen_colors[i-1]
def clear_colors():
for j in range(0,6):
chosen_colors[j] = "black"
def place_color(zone, color):
command(str(zone) + ":zone:" + color + ":blink" + str(zone) + ":flood")
def place_colors():
place_color(6, chosen_colors[0])
place_color(5, chosen_colors[1])
place_color(4, chosen_colors[2])
place_color(3, chosen_colors[3])
place_color(2, chosen_colors[4])
place_color(1, chosen_colors[5])
def display():
place_colors()
command("flush")
def do_zone(zone):
command(str(zone) + ":zon:rot")
def do_zones():
for i in range(2, 7):
do_zone(i)
command("flush")
global idx
idx = -1
def loop():
global idx
do_flush = False
idx = idx + 1
if (idx % 15 == 0):
command("6:zon:rot")
do_flush = True
if (idx % 20 == 0):
command("5:zon:rot")
do_flush = True
if (idx % 30 == 0):
command("4:zon:rot")
do_flush = True
if (idx % 40 == 0):
command("3:zon:rot")
do_flush = True
if (idx % 60 == 0):
command("2:zon:rot")
do_flush = True
if (idx % 8 == 0):
command("1:zon:rot")
do_flush = True
if do_flush == True:
command("flu")
time.sleep(play_time)
if __name__ == '__main__':
setup()
while True:
loop()
| jhogsett/linkit | python/windmill.py | Python | mit | 5,144 |
# Copyright (c) 2013, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""testsuite module."""
import bits
import bits.pause
import bits.pyfs
from collections import namedtuple
import functools
import itertools
import os
import textwrap
import ttypager
# Set to true when the most recently run test failed, to support the default
# verbosity level, which shows detail only for failures.
last_test_failed = False
V_NONE, V_FAIL, V_DETAIL, V_PASS = range(4)
verbose = V_DETAIL
def set_verbose(value):
global verbose
if value < V_NONE or value > V_PASS:
raise ValueError("set_verbose: value out of range: {}".format(value))
verbose = value
def show_verbose():
ttypager.ttypager(text=
"""Current test verbosity level: {}
Test verbosity levels
0 = Summary of PASS / FAIL counts only
1 = Test string output for FAIL only
2 = Detailed output for FAIL only (default)
3 = Detailed output for PASS / FAIL
""".format(verbose))
pass_count = fail_count = 0
def passed():
global last_test_failed, pass_count
pass_count += 1
last_test_failed = False
def failed():
global last_test_failed, fail_count
fail_count += 1
last_test_failed = True
def reset():
global last_test_failed, pass_count, fail_count
pass_count = fail_count = 0
last_test_failed = False
def CSR(name, uncore_bus_num, dev, fun, reg, bytes=4, highbit=63, lowbit=0):
if bytes == 4:
highbit = min(highbit, 31)
elif bytes == 2:
highbit = min(highbit, 15)
elif bytes == 1:
highbit = min(highbit, 7)
value = (bits.pcie_read(uncore_bus_num, dev, fun, reg, bytes) & ((1 << (highbit + 1)) - 1)) >> lowbit
detail = "{0} (CSR B{1:x}h:D{2}:F{3}:{4:x}h [{5:d}:{6:d}])".format(name, uncore_bus_num, dev, fun, reg, highbit, lowbit)
detail += " = 0x{0:x}".format(value)
return value, detail
def get_summary_count():
return pass_count, fail_count
def test(desc, value):
"""Test a condition; pass if bool(value) is True. Returns bool(value)."""
condition = bool(value)
passed() if condition else failed()
if verbose == V_PASS or (verbose >= V_FAIL and not(condition)):
print "[assert] {0} {1}".format(desc, pass_fail_str(condition))
return condition
def pass_fail_str(condition):
if condition:
return 'PASS'
return 'FAIL'
_wrapper = textwrap.TextWrapper(width=78, initial_indent=' ', subsequent_indent=' ')
def print_info(text):
"""Print informative text"""
if verbose == V_PASS:
print "[info] {}".format(text)
def show_detail():
return verbose == V_PASS or (verbose == V_DETAIL and last_test_failed)
def format_detail(data):
return "\n".join(_wrapper.fill(line) for line in data.splitlines(True))
def print_detail(data):
if show_detail():
print format_detail(data)
def summary():
print 'Summary: {} passed, {} failed'.format(pass_count, fail_count)
reset()
tests = {}
submenus = []
test_cfg = ""
test_submenu_cfgs = []
class _Test(namedtuple("_Test", ("name", "func", "runall", "runsub"))):
__slots__ = ()
def __str__(self):
tags = []
if not self.runall:
tags.append("!all")
if not self.runsub:
tags.append("!sub")
if tags:
tagstr = " ({})".format(",".join(tags))
else:
tagstr = ""
return self.name + tagstr
def add_test(name, func, submenu=None, runall=True, runsub=None):
"""Add a new test to the test menu.
Set submenu to a string to put the test in a submenu with that name. Set
runall=False to exclude the test from the top-level "Run all tests"; runall
defaults to True. Set runsub=False to exclude the test from "Run all
tests" in its submenu; runsub defaults to the same as runall."""
if runsub is None:
runsub = runall
if submenu not in tests:
tests[submenu] = []
if submenu is not None:
i = len(submenus)
submenus.append(submenu)
tests[submenu].append(_Test(name, func, runall, runsub))
def generate_test_cfg():
global test_cfg, test_submenu_cfgs
if not tests:
return ""
test_cfg = textwrap.dedent('''
py 'import testsuite'
menuentry "Run all tests (excluding tests marked !all)" {
py 'testsuite.test_cfg_callback_all()'
}''')
for i, name in enumerate(submenus):
test_cfg += textwrap.dedent('''
menuentry "{}" {{
configfile (python)/test.{}.cfg
}}'''.format(name, i))
test_cfg += generate_submenu_config(None, None)
test_submenu_cfgs = [generate_submenu_config(i, submenu) for i, submenu in enumerate(submenus)]
def generate_submenu_config(submenu_index, submenu):
cfg = ""
if submenu is not None:
cfg += textwrap.dedent('''
menuentry "Run all tests (excluding tests marked !sub)" {{
py 'testsuite.test_cfg_callback_suball({})'
}}'''.format(submenu_index))
for i, t in enumerate(tests.get(submenu, [])):
cfg += textwrap.dedent('''
menuentry "{}" {{
py 'testsuite.test_cfg_callback({}, {})'
}}'''.format(str(t), submenu_index, i))
return cfg
def test_cfg_callback(submenu_index, test_index):
try:
if submenu_index is None:
t = tests[None][test_index]
else:
t = tests[submenus[submenu_index]][test_index]
os.putenv("pager", "1")
print '\n==== {} ===='.format(t.name)
reset()
t.func()
except Exception as e:
test("Internal error; test threw exception", False)
import traceback
traceback.print_exc()
finally:
summary()
bits.pause.pause()
os.putenv("pager", "0")
def test_cfg_callback_suball(submenu_index):
total_passed = total_failed = 0
submenu = submenus[submenu_index]
try:
os.putenv("pager", "1")
print '\n==== {} ===='.format(submenu)
reset()
for t in tests[submenu]:
if not t.runsub:
continue
print '---- {} ----'.format(t.name)
try:
t.func()
except Exception as e:
test("Internal error; test threw exception", False)
import traceback
traceback.print_exc()
total_passed += pass_count
total_failed += fail_count
summary()
finally:
print '\n==== Overall summary: {} passed, {} failed ===='.format(total_passed, total_failed)
bits.pause.pause()
os.putenv("pager", "0")
def test_cfg_callback_all():
try:
os.putenv("pager", "1")
run_all_tests()
finally:
bits.pause.pause()
os.putenv("pager", "0")
def run_all_tests():
total_passed = total_failed = 0
try:
print "\nRunning all tests"
reset()
for submenu in itertools.chain(submenus, [None]):
heading_printed = False
for t in tests[submenu]:
if not t.runall:
continue
if not heading_printed and submenu is not None:
print '\n==== {} ===='.format(submenu)
heading_printed = True
try:
if submenu is None:
print '\n==== {} ===='.format(t.name)
else:
print '---- {} ----'.format(t.name)
t.func()
except Exception as e:
test("Internal error; test threw exception", False)
import traceback
traceback.print_exc()
total_passed += pass_count
total_failed += fail_count
summary()
finally:
print '\n==== Overall summary: {} passed, {} failed ===='.format(total_passed, total_failed)
def finalize_cfgs():
generate_test_cfg()
bits.pyfs.add_static("test.cfg", test_cfg)
for i in range(len(submenus)):
bits.pyfs.add_static("test.{}.cfg".format(i), test_submenu_cfgs[i])
| ii0/bits | python/testsuite.py | Python | bsd-3-clause | 9,610 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-04 02:49
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('djangosourcecontrol', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='project',
options={'permissions': (('can_run_project', '[DjangoSourceControl]: Can run projects'), ('can_add_project', '[DjangoSourceControl]: Can add projects'))},
),
]
| kull2222/DjangoSourceControl | dsc/djangosourcecontrol/migrations/0002_auto_20161203_1849.py | Python | gpl-3.0 | 535 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 McKinsey Academy
#
# Authors:
# Jonathan Piacenti <[email protected]>
#
# This software's license gives you freedom; you can copy, convey,
# propagate, redistribute and/or modify this program under the terms of
# the GNU Affero General Public License (AGPL) as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version of the AGPL published by the FSF.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
#
"""
Runs tests for the studio views.
"""
from __future__ import absolute_import
from ddt import ddt, unpack, data
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.wait import WebDriverWait
from .base_test import PollBaseTest
from .studio_scenarios import ddt_scenarios
@ddt
class StudioTest(PollBaseTest):
"""
Mixin class for poll studio tests.
"""
default_css_selector = '#settings-tab'
def studio_save(self):
self.browser.find_element_by_css_selector('#poll-submit-options').click()
@data(*ddt_scenarios)
@unpack
def test_add_items(self, page_name, item_type, num_existing_items, answer_css_selector):
"""
Verify we can add more than one item and they both save.
"""
self.go_to_page(page_name, view_name='studio_view')
add_item_button = self.browser.find_element_by_css_selector('#poll-add-%s' % item_type)
# Add two answers
add_item_button.click()
add_item_button.click()
# Make sure we get forms for both.
wait = WebDriverWait(self.browser, self.timeout)
selector = '.poll-%s-studio-item' % item_type
total = num_existing_items + 2
def get_last_item(driver):
items = driver.find_elements_by_css_selector(selector)
try:
# Start from 0
return items[total - 1]
except IndexError:
raise NoSuchElementException
wait.until(get_last_item, u"{}th copy of selector '{}' should exist.".format(total, selector))
answers = self.browser.find_elements_by_css_selector('.poll-%s-studio-item' % item_type)
results = []
for index, element in enumerate(answers[-2:]):
# First input is the label, which should always be there.
label = element.find_element_by_css_selector('input')
text = 'Test %s %s' % (item_type, index)
label.send_keys(text)
results.append(text)
self.studio_save()
self.go_to_page(page_name, css_selector='div.poll-block')
answers = [element.text for element in self.browser.find_elements_by_css_selector(answer_css_selector)]
self.assertEqual(answers[-2:], results)
| open-craft/xblock-poll | tests/integration/test_studio.py | Python | agpl-3.0 | 3,214 |
## Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SavedModel."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.framework import errors
from tensorflow.python.lib.io import file_io
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import constants
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import utils
from tensorflow.python.util import compat
def tearDownModule():
file_io.delete_recursively(tf.test.get_temp_dir())
class SavedModelTest(tf.test.TestCase):
def testSequence(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_sequence")
builder = saved_model_builder.SavedModelBuilder(export_dir)
# Expect an assertion error since add_meta_graph_and_variables() should be
# invoked before any add_meta_graph() calls.
with self.test_session(graph=tf.Graph()) as sess:
self.assertRaises(AssertionError, builder.add_meta_graph, ["foo"])
# Expect an assertion error for multiple calls of
# add_meta_graph_and_variables() since weights should be saved exactly once.
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(42, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(42, v.eval())
builder.add_meta_graph_and_variables(sess, ["bar"])
self.assertRaises(AssertionError, builder.add_meta_graph_and_variables,
sess, ["baz"])
def testTags(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_tags")
builder = saved_model_builder.SavedModelBuilder(export_dir)
# Graph with a single variable. SavedModel invoked to:
# - add with weights.
# - a single tag (from predefined constants).
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(42, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(42, v.eval())
builder.add_meta_graph_and_variables(sess, [constants.TAG_TRAINING])
# Graph that updates the single variable. SavedModel invoked to:
# - simply add the model (weights are not updated).
# - a single tag (from predefined constants).
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(43, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(43, v.eval())
builder.add_meta_graph([constants.TAG_SERVING])
# Graph that updates the single variable. SavedModel is invoked:
# - to add the model (weights are not updated).
# - multiple custom tags.
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(44, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(44, v.eval())
builder.add_meta_graph(["foo", "bar"])
# Save the SavedModel to disk.
builder.save()
# Restore the graph with a single predefined tag whose variables were saved.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, [constants.TAG_TRAINING], export_dir)
self.assertEqual(42, tf.get_collection(tf.GraphKeys.VARIABLES)[0].eval())
# Restore the graph with a single predefined tag whose variables were not
# saved.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, [constants.TAG_SERVING], export_dir)
self.assertEqual(42, tf.get_collection(tf.GraphKeys.VARIABLES)[0].eval())
# Restore the graph with multiple tags. Provide duplicate tags to test set
# semantics.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, ["foo", "bar", "foo"], export_dir)
self.assertEqual(42, tf.get_collection(tf.GraphKeys.VARIABLES)[0].eval())
# Try restoring a graph with a non-existent tag. This should yield a runtime
# error.
with self.test_session(graph=tf.Graph()) as sess:
self.assertRaises(RuntimeError, loader.load, sess, ["INVALID"],
export_dir)
# Try restoring a graph where a subset of the tags match. Since tag matching
# for meta graph defs follows "all" semantics, this should yield a runtime
# error.
with self.test_session(graph=tf.Graph()) as sess:
self.assertRaises(RuntimeError, loader.load, sess, ["foo", "baz"],
export_dir)
def testVariables(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_variables")
builder = saved_model_builder.SavedModelBuilder(export_dir)
# Graph with two variables. SavedModel invoked to:
# - add with weights.
with self.test_session(graph=tf.Graph()) as sess:
v1 = tf.Variable(1, name="v1")
v2 = tf.Variable(2, name="v2")
sess.run(tf.initialize_all_variables())
self.assertEqual(1, v1.eval())
self.assertEqual(2, v2.eval())
builder.add_meta_graph_and_variables(sess, ["foo"])
# Graph with a single variable (subset of the variables from the previous
# graph whose weights were saved). SavedModel invoked to:
# - simply add the model (weights are not updated).
with self.test_session(graph=tf.Graph()) as sess:
v2 = tf.Variable(3, name="v2")
sess.run(tf.initialize_all_variables())
self.assertEqual(3, v2.eval())
builder.add_meta_graph(["bar"])
# Graph with a single variable (disjoint set of variables from the previous
# graph whose weights were saved). SavedModel invoked to:
# - simply add the model (weights are not updated).
with self.test_session(graph=tf.Graph()) as sess:
v3 = tf.Variable(4, name="v3")
sess.run(tf.initialize_all_variables())
self.assertEqual(4, v3.eval())
builder.add_meta_graph(["baz"])
# Save the SavedModel to disk.
builder.save()
# Restore the graph with tag "foo", whose variables were saved.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, ["foo"], export_dir)
collection_vars = tf.get_collection(tf.GraphKeys.VARIABLES)
self.assertEqual(len(collection_vars), 2)
self.assertEqual(1, collection_vars[0].eval())
self.assertEqual(2, collection_vars[1].eval())
# Restore the graph with tag "bar", whose variables were not saved. Only the
# subset of the variables added to the graph will be restored with the
# checkpointed value.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, ["bar"], export_dir)
collection_vars = tf.get_collection(tf.GraphKeys.VARIABLES)
self.assertEqual(len(collection_vars), 1)
self.assertEqual(2, collection_vars[0].eval())
# Try restoring the graph with tag "baz", whose variables were not saved.
# Since this graph has a disjoint set of variables from the set that was
# saved, this should raise an error.
with self.test_session(graph=tf.Graph()) as sess:
self.assertRaises(errors.NotFoundError, loader.load, sess, ["baz"],
export_dir)
def testSaveAsText(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_astext")
builder = saved_model_builder.SavedModelBuilder(export_dir)
# Graph with a single variable. SavedModel invoked to:
# - add with weights.
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(42, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(42, v.eval())
builder.add_meta_graph_and_variables(sess, ["foo"])
# Graph with the same single variable. SavedModel invoked to:
# - simply add the model (weights are not updated).
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(43, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(43, v.eval())
builder.add_meta_graph(["bar"])
# Save the SavedModel to disk in text format.
builder.save(as_text=True)
# Restore the graph with tag "foo", whose variables were saved.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, ["foo"], export_dir)
self.assertEqual(42, tf.get_collection(tf.GraphKeys.VARIABLES)[0].eval())
# Restore the graph with tag "bar", whose variables were not saved.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, ["bar"], export_dir)
self.assertEqual(42, tf.get_collection(tf.GraphKeys.VARIABLES)[0].eval())
def testCollections(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_collections")
builder = saved_model_builder.SavedModelBuilder(export_dir)
# Graph with a single variable added to a collection. SavedModel invoked to:
# - add with weights.
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(42, name="v")
tf.add_to_collection("foo_vars", v)
sess.run(tf.initialize_all_variables())
self.assertEqual(42, v.eval())
builder.add_meta_graph_and_variables(sess, ["foo"])
# Graph with the same single variable added to a different collection.
# SavedModel invoked to:
# - simply add the model (weights are not updated).
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(43, name="v")
tf.add_to_collection("bar_vars", v)
sess.run(tf.initialize_all_variables())
self.assertEqual(43, v.eval())
builder.add_meta_graph(["bar"])
# Save the SavedModel to disk.
builder.save()
# Restore the graph with tag "foo", whose variables were saved. The
# collection 'foo_vars' should contain a single element. The collection
# 'bar_vars' should not be found.
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, ["foo"], export_dir)
collection_foo_vars = tf.get_collection("foo_vars")
self.assertEqual(len(collection_foo_vars), 1)
self.assertEqual(42, collection_foo_vars[0].eval())
self.assertEqual(len(tf.get_collection("bar_vars")), 0)
# Restore the graph with tag "bar", whose variables were not saved. The
# collection-def exported as part of the meta graph def is updated to
# reflect the new collection. The value of the variable in the
# collection-def corresponds to the saved value (from the previous graph
# with tag "foo").
with self.test_session(graph=tf.Graph()) as sess:
loader.load(sess, ["bar"], export_dir)
collection_bar_vars = tf.get_collection("bar_vars")
self.assertEqual(len(collection_bar_vars), 1)
self.assertEqual(42, collection_bar_vars[0].eval())
self.assertEqual(len(tf.get_collection("foo_vars")), 0)
def testSignatureDefs(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_signature_defs")
builder = saved_model_builder.SavedModelBuilder(export_dir)
# Graph with a single variable and a single entry in the signature def map.
# SavedModel is invoked to add with weights.
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(42, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(42, v.eval())
# Build and populate an empty SignatureDef for testing.
foo_signature = utils.build_signature_def(dict(), dict(), "foo")
builder.add_meta_graph_and_variables(
sess, ["foo"], signature_def_map={"foo_key": foo_signature})
# Graph with the same single variable and multiple entries in the signature
# def map. No weights are saved by SavedModel.
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(43, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(43, v.eval())
# Build and populate a different SignatureDef for testing.
bar_signature = utils.build_signature_def(dict(), dict(), "bar")
# Also, build a different SignatureDef corresponding to "foo_key" defined
# in the previous graph.
foo_new_signature = utils.build_signature_def(dict(), dict(), "foo_new")
builder.add_meta_graph(
["bar"],
signature_def_map={"bar_key": bar_signature,
"foo_key": foo_new_signature})
# Save the SavedModel to disk.
builder.save()
# Restore the graph with tag "foo". The single entry in the SignatureDef map
# corresponding to "foo_key" should exist.
with self.test_session(graph=tf.Graph()) as sess:
foo_graph = loader.load(sess, ["foo"], export_dir)
self.assertEqual(42, tf.get_collection(tf.GraphKeys.VARIABLES)[0].eval())
foo_signature = foo_graph.signature_def
self.assertEqual(len(foo_signature), 1)
self.assertEqual("foo", foo_signature["foo_key"].method_name)
# Restore the graph with tag "bar". The SignatureDef map should have two
# entries. One corresponding to "bar_key" and another corresponding to the
# new value of "foo_key".
with self.test_session(graph=tf.Graph()) as sess:
bar_graph = loader.load(sess, ["bar"], export_dir)
self.assertEqual(42, tf.get_collection(tf.GraphKeys.VARIABLES)[0].eval())
bar_signature = bar_graph.signature_def
self.assertEqual(len(bar_signature), 2)
self.assertEqual("bar", bar_signature["bar_key"].method_name)
self.assertEqual("foo_new", bar_signature["foo_key"].method_name)
def testAssets(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_assets")
builder = saved_model_builder.SavedModelBuilder(export_dir)
with self.test_session(graph=tf.Graph()) as sess:
v = tf.Variable(42, name="v")
sess.run(tf.initialize_all_variables())
self.assertEqual(42, v.eval())
# Build an asset collection.
asset_filepath = os.path.join(
compat.as_bytes(tf.test.get_temp_dir()),
compat.as_bytes("hello42.txt"))
file_io.write_string_to_file(asset_filepath, "foo bar baz")
asset_file_tensor = tf.constant(asset_filepath, name="asset_file_tensor")
tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_file_tensor)
ignored_filepath = os.path.join(
compat.as_bytes(tf.test.get_temp_dir()),
compat.as_bytes("ignored.txt"))
file_io.write_string_to_file(ignored_filepath, "will be ignored")
asset_collection = tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)
builder.add_meta_graph_and_variables(
sess, ["foo"], assets_collection=asset_collection)
# Save the SavedModel to disk.
builder.save()
with self.test_session(graph=tf.Graph()) as sess:
foo_graph = loader.load(sess, ["foo"], export_dir)
# Validate the assets.
collection_def = foo_graph.collection_def
assets_any = collection_def[constants.ASSETS_KEY].any_list.value
self.assertEqual(len(assets_any), 1)
asset = meta_graph_pb2.AssetFileDef()
assets_any[0].Unpack(asset)
assets_path = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY),
compat.as_bytes("hello42.txt"))
asset_contents = file_io.read_file_to_string(assets_path)
self.assertEqual("foo bar baz", compat.as_text(asset_contents))
self.assertEqual("hello42.txt", asset.filename)
self.assertEqual("asset_file_tensor:0", asset.tensor_info.name)
ignored_asset_path = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY),
compat.as_bytes("ignored.txt"))
self.assertFalse(file_io.file_exists(ignored_asset_path))
def testOp(self):
export_dir = os.path.join(tf.test.get_temp_dir(), "test_op")
builder = saved_model_builder.SavedModelBuilder(export_dir)
with tf.Session(
graph=tf.Graph(),
config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
with sess.graph.device("/cpu:0"):
v1 = tf.Variable(1, name="v1")
with sess.graph.device("/cpu:1"):
v2 = tf.Variable(2, name="v2")
# v3 is an unsaved variable derived from v1 and v2. It is used to
# exercise the ability to run an init op when restoring a graph.
v3 = tf.Variable(1, name="v3", trainable=False, collections=[])
assign_v3 = tf.assign(v3, tf.add(v1, v2))
init_op = tf.group(assign_v3, name="init_op")
tf.add_to_collection("v", v1)
tf.add_to_collection("v", v2)
tf.add_to_collection("v", v3)
tf.add_to_collection("init_op", init_op)
sess.run(tf.initialize_all_variables())
self.assertEqual(1, tf.get_collection("v")[0].eval())
self.assertEqual(2, tf.get_collection("v")[1].eval())
builder.add_meta_graph_and_variables(sess, ["foo"])
# Save the SavedModel to disk.
builder.save()
with tf.Session(
graph=tf.Graph(),
config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
loader.load(sess, ["foo"], export_dir)
# Validate variables, run the init op and verify result.
self.assertEqual(1, tf.get_collection("v")[0].eval())
self.assertEqual(2, tf.get_collection("v")[1].eval())
tf.get_collection("init_op")[0].run()
self.assertEqual(3, tf.get_collection("v")[2].eval())
if __name__ == "__main__":
tf.test.main()
| mrry/tensorflow | tensorflow/python/saved_model/saved_model_test.py | Python | apache-2.0 | 17,915 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from .api import asex
class ParallelExec:
def __init__(self, cmd_str_lst):
self.cmd_str_lst = cmd_str_lst
self.runcmd_lst = [asex(cmd) for cmd in cmd_str_lst]
def wait(self):
for p in self.runcmd_lst:
p.wait()
return self
def poll(self):
all_done = True
for p in self.runcmd_lst:
if p.poll() is None:
all_done = False
break
if all_done:
return self.runcmd_lst
else:
return None
def cmds(self):
return self.runcmd_lst
| houqp/shell.py | shell/parallel_exec.py | Python | mit | 636 |
import math
import pygame
import time
from random import uniform, choice
from itertools import cycle
import binball_game.collision as collision
import binball_game.events as events
class Point():
def __init__(self, x, y=None):
self.x = x
self.y = y
def copy(self):
return Point(self.x, self.y)
def ints(self):
return (int(self.x), int(self.y))
def dot(self, v):
return self.x*v.x + self.y*v.y
def __add__(self, v):
return Point(self.x+v.x, self.y+v.y)
def __sub__(self, v):
return Point(self.x-v.x, self.y-v.y)
def __mul__(self, v):
try:
return Point(self.x*v.x, self.y*v.y)
except AttributeError:
return Point(self.x*v, self.y*v)
def __truediv__(self, v):
try:
return Point(self.x/v.x, self.y/v.y)
except AttributeError:
return Point(self.x/v, self.y/v)
def __neg__(self):
return Point(self.x, self.y) * Point(-1, -1)
def __str__(self):
return "Point({:.3f}, {:.3f})".format(self.x, self.y)
def __repr__(self):
return str(self)
class Segment():
"""Line segment with which ball can interact
Parameters
----------
a : tuple
Location of beginning of segment
b : tuple
Location of ending of segment
Attributes
----------
angle : float
angle of segment in radians, where a horizontal segment is 0 or pi
"""
def __init__(self, a, b, value=0, noise='seg2', color=(0,0,0)):
self.a = Point(*a)
self.b = Point(*b)
self.angle = (math.atan2(self.b.x-self.a.x, self.b.y-self.a.y) + math.pi/2) % (2*math.pi)
self.value = value
self.noise = noise
self.color = color
self.thickness = 10
def __repr__(self):
base = '{}({}\n{}\nAngle: {:.2f})\n'
return base.format(self.__class__.__name__, self.a, self.b, self.angle)
class Platforms():
""" """
def __init__(self, start_pt1, start_pt2, noise=''):
self.seg_1 = Segment(start_pt1, (start_pt1[0]+50, start_pt1[1]))
self.seg_2 = Segment(start_pt2,
(start_pt2[0]+50, start_pt2[1]),
color=(184, 199, 224))
self.distance = 600-41-200-50
range_ = range(start_pt1[0], start_pt1[0]+self.distance, 2)
self.pos_gen = cycle((*range_, *range_[::-1]))
def update(self):
new_pos = next(self.pos_gen)
self.seg_1.a.x = new_pos
self.seg_1.b.x = new_pos + 50
self.seg_2.a.x = new_pos
self.seg_2.b.x = new_pos + 50
class Particle():
""" A circular object with a velocity, size and mass """
def __init__(self, x, y, size, value=0, noise='jump', bin_gravity=0.01):
self.x = x
self.y = y
self.size = size
self.noise = noise
self.value = value
self.pos = Point(x, y)
self.color = (0, 0, 255)
self.thickness = 0
self.max_speed = 25
self._speed = 0
self.angle = math.pi/2
self.mass = 1
self.drag = 1#.998
self.elasticity = 0.82
self.original_gravity = (3/2*math.pi, 0.065)
self.bin_gravity = (3/2*math.pi, bin_gravity)
self.gravity = self.original_gravity
self.score = 0
self.collision_partner = None
def __repr__(self):
return 'Particle({})'.format(self.pos)
@property
def speed(self):
return self._speed
@speed.setter
def speed(self, val):
"""Limit speed so ball can't pass through objects or move too fast"""
#self._speed = min(.5*self.size-1, val)
self._speed = min(self.max_speed, val)
def move(self):
self.angle, self.speed = self.addVectors(self.angle,
self.speed,
self.gravity[0],
self.gravity[1])
self.x += math.cos(self.angle) * self.speed
self.y -= math.sin(self.angle) * self.speed
self.pos = Point(self.x, self.y)
self.speed *= self.drag
def wall_bounce(self, width, height):
if self.x > width - self.size:
self.x = 2*(width - self.size) - self.x
self.angle = (math.pi - self.angle) % (2*math.pi)
self.speed *= self.elasticity
elif self.x < self.size:
self.x = 2*self.size - self.x
self.angle = (math.pi - self.angle) % (2*math.pi)
self.speed *= self.elasticity
if self.y > height - self.size:
self.y = 2*(height - self.size) - self.y
self.angle = -self.angle % (2*math.pi)
self.speed *= self.elasticity
elif self.y < self.size:
self.y = 2*self.size - self.y
self.angle = - self.angle % (2*math.pi)
self.speed *= self.elasticity
def seg_bounce(self, segment_list):
"""Check for collision with all segments. Update attributes appropriately.
Parameters
----------
segment_list : [Segment]
All segments in the model
"""
for seg in segment_list:
did_collide = collision.segment_particle(seg, self)
if did_collide:
self.collision_partner = seg
self.angle = (2*seg.angle - self.angle) % (2*math.pi)
self.speed *= self.elasticity
while collision.segment_particle(seg, self):
self.x += math.cos(self.angle)
self.y -= math.sin(self.angle)
self.pos = Point(self.x, self.y)
def particle_bounce(self, particle_list):
"""Check for collision with all particles. Update attributes appropriately.
Parameters
----------
segment_list : [Particle]
All particles in the model
"""
for particle in particle_list:
collision_occurs = collision.ball_circle(self, particle, True)
if collision_occurs:
self.collision_partner = particle
self.speed *= self.elasticity
def bounce(self, width, height, segment_list, particle_list):
self.wall_bounce(width, height)
self.seg_bounce(segment_list)
self.particle_bounce(particle_list)
def addVectors(self,angle1, length1, angle2, length2):
""" Returns the sum of two vectors """
x = math.sin(angle1) * length1 + math.sin(angle2) * length2
y = math.cos(angle1) * length1 + math.cos(angle2) * length2
angle = math.atan2(x, y) % (2*math.pi)
length = math.hypot(x, y)
return (angle, length)
class Coin(Particle):
"""An circular object with a value """
def __init__(self, x, y, size, value, noise='coins'):
super().__init__(x, y, size, value=value, noise=noise)
self.color = (255,215,0)
self.coin_timer = 0
def pressed_bonus(self):
message = None
self.coin_timer = time.time()
class Tube(Particle):
def __init__(self, x, y, size, drop_spot, ejection_angle,
value=85, noise='suck'):
super().__init__(x, y, size, value=value, noise=noise)
self.drop_spot = drop_spot
self.ejection_angle = ejection_angle
self.color = (22, 153, 19)
class TubeManager():
"""Repsonsible for controlling and updating Tube components
Notes
-----
This departs from the style of the rest of the components.
Usually collision detection and updating is handled by the Model.
Because the tubes are 'connected', this is a good opportunity to test this style.
Parameters
----------
tube_list
"""
def __init__(self, tube_list):
self.tube_list = tube_list
def teleport_ball(self, ball, tube):
"""Eject the ball from the drop spot of a different tube
Parameters
----------
ball : Particle
Player ball
tube : Tube
Tube with which the ball originally collided
"""
other_tubes = [t for t in self.tube_list if t is not tube]
new_tube = choice(other_tubes)
ball.x, ball.y = new_tube.drop_spot
ball.angle = new_tube.ejection_angle + uniform(-.05, .05)
def update(self, ball):
"""Checks for ball collisions and updates state appropriately.
Parameters
----------
ball : Particle
Player ball
Returns
-------
did_collide : bool
True if ball interacted with one of the tubes
points : int
Value of tube doing the transporting
"""
points = 0
for tube in self.tube_list:
did_collide = collision.ball_circle(ball, tube)
if did_collide:
points = tube.value
self.teleport_ball(ball, tube)
break
return did_collide, points
class Bin():
reload_time = 3
last_pressed = 0
reloaded = True
def __init__(self, num, rekt, color, noise):
self.num = num
self.rekt = rekt
self.color = color
self.noise = noise
self.out_noise = 'flipper'
self._active = False
self.active_color = (0, 0, 0)
self.locked_color = (255, 255, 255)
self.original_color = color
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = value
if self.active:
self.color = self.active_color
else:
self.color = self.original_color
def pressed_event(self, ball):
"""Key press is only valid if the Bins are currently reloaded"""
message = None
if Bin.reloaded:
Bin.last_pressed = time.time()
message = self.do_key_press(ball)
else:
message = events.PressedBinEval(self.num, False)
return message
def do_key_press(self, ball):
message = events.PressedBinEval(self.num, True)
if self.rekt.collidepoint(ball.x, ball.y):
Bin.last_pressed = 0
message = events.PressedBinEval(self.num, 'collide')
ball.speed = ball.max_speed * .75
frac_of_bin = ((ball.y-self.rekt.top)/self.rekt.height)
ball.angle = (0.25 + frac_of_bin*0.5)*math.pi
ball.gravity = ball.original_gravity
ball.y = self.rekt.top - 15
self.active = False
return message
def update(self, bin_list):
"""Change the color if reload state changes"""
#TODO This can be cleaner.
#Not sure how to do this with @property since Bin.reloaded is a class attribute
old_state = Bin.reloaded
Bin.reloaded = time.time() >= Bin.reload_time + Bin.last_pressed
switched = old_state != Bin.reloaded
if switched and Bin.reloaded:
for bin_ in bin_list:
bin_.color = bin_.original_color
elif switched:
for bin_ in bin_list:
bin_.color = bin_.locked_color
class Spinner():
"""Component that spins and flashes when activated by ball.
Spinners are found in tunnels and freeze the ball while they're spinning.
Parameters
----------
rekt : pygame.Rect
Location of spinner
value : int (default=70)
Points scored if ball interacts with component
noise : str (default=spin)
Name of mp3 to play when spinning
Attributes
----------
original_color : (int)
Color of rekt when not spinning
color : (int)
Current color of rekt. Flashes when activated
spinning : bool
True if spinner has collided with ball and is currently activate
spin_counter : int
Number of frames spent spinning
spin_left : int
Number of frames left to spin
"""
def __init__(self, rekt, value=75, noise='spin'):
self.rekt = rekt
self.value = value
self.noise = noise
self.original_color = (50, 100, 150)
self.color = self.original_color
self.spinning = False
self.spin_counter = 100
self.spin_left = self.spin_counter
def update(self):
if self.spinning:
self.spin_left -= 1
if self.spin_left % 10 == 0:
if self.color == self.original_color:
self.color = (150, 100, 50)
else:
self.color = self.original_color
if self.spin_left == 0:
self.spin_left = 100
self.spinning = False
class Flipper():
"""Creates left and right flippers the player controls to hit the ball
Parameters
----------
a : Point
Location of the base of flipper
b : Point
Location of the rotation end of flipper
on_angle : float
radian angle of flipper at the top of rotation when user flippers
side : str (default='l')
Indicates if flipper is on left or right side of board
Attributes
----------
rot : int
Makes flipper rotate clockwise (-1) or counter-clockwise (1)
len : float
Length of flipper
angle : float
Current angle of flipper.
off_angle : float
radian angle of flipper at the bottom of rotation when user flippers
flip_up : bool
Is True after user 'flips', until angle ~= on_angle
flip_down : bool
Is True after angle ~= on_angle, until angle ~= off_angle
thickness : int
Visual thinkness of line
"""
def __init__(self, a, b, on_angle, side='l'):
self.a = a
self.b = b
self.on_angle = on_angle
self.rot = 1 if side == 'l' else -1
self.len = math.hypot(self.b.x - self.a.x, self.b.y - self.a.y)
self.angle = (math.atan2(a.x-b.x, a.y-b.y) + math.pi/2) % (2*math.pi)
self.off_angle = self.angle
self.flip_up = False
self.flip_down = False
self.thickness = 1
self.value = 0
self.noise = 'flipper'
def move(self):
"""change flipper end position while flipping"""
if self.flip_up:
self.angle += (.09 * self.rot)
elif self.flip_down:
self.angle -= (.09 * self.rot)
self.angle %= 2*math.pi
self.b.x = self.a.x + math.cos(self.angle) * self.len
self.b.y = self.a.y - math.sin(self.angle) * self.len
def test_flip_limit():
pass
def update(self):
"""Check flipping state and adjust angle and state accordingly"""
delta = .15
if self.flip_up:
self.move()
if self.on_angle - delta <= self.angle <= self.on_angle + delta:
self.flip_up = False
self.flip_down = True
elif self.flip_down:
self.move()
if self.off_angle - delta <= self.angle <= self.off_angle + delta:
self.flip_down = False
class CurveBall(Particle):
"""Slowly increments the balls angle while in effect field
"""
def __init__(self, x, y, size, curve=.075, value=2, noise='chimes'):
super().__init__(x, y, size, value=value, noise=noise)
self.curve = curve
self.color = (142, 19, 214)
def init_coin_list(width, height):
coin_list = [
# Coin(width-20, 200,9,50), #test coin
# Coin(width-20, 600,9,50) #test coin
Coin(80,810,9,25), #lt.1
Coin(112,822,9,25), #lt.4
Coin(95,777,9,25), #lt.2
Coin(110,740,9,25), #lt.3
Coin(144,835,9,25), #lt.6
Coin(125,790,9,25), #lt.5
Coin(width-41-80,810,9,25), #lrt.1
Coin(width-41-112,822,9,25), #rt.4
Coin(width-41-95,777,9,25), #rt.2
Coin(width-41-110,740,9,25), #rt.3
Coin(width-41-144,835,9,25), #rt.6
Coin(width-41-125,790,9,25), #rt.5
Coin(30,20,15,100),
Coin(540,323,12,100),
#around main curver
Coin(188,500,9,25),
Coin(312,500,9,25),
Coin(250,438,9,25),
Coin(250,562,9,25),
Coin(280,552,9,25),
Coin(302,530,9,25),
Coin(280,448,9,25),
Coin(302,470,9,25),
Coin(198,470,9,25),
Coin(198,530,9,25),
Coin(220,552,9,25),
Coin(220,448,9,25),
Coin(250,500,12,100) #middle coin curver
]
for c in range(110,490,38):
coin_list.append(Coin(c,85,9,25))
return coin_list
def init_launch_runway(width, height):
return pygame.Rect(width-1-40,150,40,height-150)
def init_ball(bin_gravity):
return Particle(599-16,1000-15,15,bin_gravity=bin_gravity)
# return Particle(200, 50, 15,bin_gravity=bin_gravity) #testing platforms
def init_bin_list():
bins = [Bin(0, pygame.Rect(150,912,40,48), (255, 0, 255), 'note1'),
Bin(1, pygame.Rect(150+40,912,80,48), (0, 255, 0), 'note2'),
Bin(2, pygame.Rect(290,912,80,48), (255, 0, 0), 'note3'),
Bin(3, pygame.Rect(290+80,912,40,48), (0, 255, 255), 'note4')]
return bins
def init_spinner_list():
spin = [Spinner(pygame.Rect(482, 400, 25, 25)), #left
Spinner(pygame.Rect(5, 275, 25, 25)), #top
Spinner(pygame.Rect(88, 0, 25, 25))] #right
return spin
def init_tube_list(width):
tube_list = [Tube(17, 50, 7, (17, 20), .25*math.pi), #top left corner
Tube(width - 60, 425, 7, (width-75, 440), 1.4*math.pi), # middle right
Tube(140, 15, 7, (111, 35), 1.5*math.pi)]
return tube_list
def init_curver_list():
curver_list = [CurveBall(250, 500, 50),
CurveBall(525, 250, 25),
CurveBall(520, 200, 20),
CurveBall(490, 290, 20)]
return curver_list
def init_platforms():
return Platforms((100,100),(100,650))
def init_left_flipper():
flipper_left = Flipper(Point(150, 912),
Point(245, 960),
1.57)
return flipper_left
def init_right_flipper():
flipper_right = Flipper(Point(410, 912),
Point(315, 960),
1.57, 'r')
return flipper_right
def init_segment_list(width, height):
segment_data = [((width-1-40, height-1), (width-1-40,150)), #shooter line
((width-1, 25), (width-1-25,0),1), #top right corner
((75, 0), (0,100),10), #top left corner
((width-1-40,837), (410,912)), #right funnel
((0,837), (150,912)), #left funnel
((260, 370), (310, 390),20), #Middle
((55,820), (100,700)), #left triangle pt1
((55,820), (150,860)), #left triangle pt2
((410,860), (width-100,820)), #right triangle pt2
((width-1-141,700), (width-100,820)),#right triangle pt3
((width-1-40, 250), (width-1-150, 450)), #right tunnel top
((width-1-40, 325), (width-1-150, 550)), #right tunnel bottom
((35, 275), (100, 400)), #left tunnel top
((0, 300), (75, 440)), #left tunnel bottom
((80, 0), (78, 25)), # small top tunnel left
((120, 0), (122, 25)), # small top tunnel right
]
segment_list = [Segment(*d) for d in segment_data]
return segment_list
def init_particle_list():
particle_data = [(295, 355, 25,10), #2
(245, 285, 25,10), #1
(345, 270, 25,10), #3
(50, 520, 10,10), #1
(100, 550, 10,10), #3
(55, 585, 10,10) #2
]
particle_list = [Particle(*d) for d in particle_data]
return particle_list
def cap(width):
launch_cap = Segment((width-1-40,150),(width-1,125))
return launch_cap
def init_components(width, height, bin_gravity):
"""Set all the pieces of the game board to their proper locations
Parameters
----------
width : int
width of screen
height : int
height of screen
Returns
-------
components_dict : dict
wrapper around all different types of components
"""
components_dict = {}
components_dict['launch_runway'] = init_launch_runway(width,height)
components_dict['ball'] = init_ball(bin_gravity)
components_dict['bin_list'] = init_bin_list()
components_dict['spinner_list'] = init_spinner_list()
components_dict['tube_manager'] = TubeManager(init_tube_list(width))
components_dict['curver_list'] = init_curver_list()
components_dict['coin_list'] = init_coin_list(width,height)
components_dict['platforms'] = init_platforms()
components_dict['segment_list'] = init_segment_list(width,height)
components_dict['particle_list'] = init_particle_list()
return components_dict
| Jessime/MazeDay2017 | src/binball_game/components.py | Python | mit | 21,274 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
ResultsDialog.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4 import uic
from PyQt4.QtCore import QUrl
from PyQt4.QtGui import QIcon, QStyle, QTreeWidgetItem
from processing.core.ProcessingResults import ProcessingResults
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'DlgResults.ui'))
class ResultsDialog(BASE, WIDGET):
def __init__(self):
super(ResultsDialog, self).__init__(None)
self.setupUi(self)
self.keyIcon = QIcon()
self.keyIcon.addPixmap(self.style().standardPixmap(QStyle.SP_FileIcon))
self.tree.itemClicked.connect(self.changeResult)
self.fillTree()
if self.lastUrl:
self.webView.load(self.lastUrl)
def fillTree(self):
elements = ProcessingResults.getResults()
if len(elements) == 0:
self.lastUrl = None
return
for element in elements:
item = TreeResultItem(element)
item.setIcon(0, self.keyIcon)
self.tree.addTopLevelItem(item)
self.lastUrl = QUrl(elements[-1].filename)
def changeResult(self):
item = self.tree.currentItem()
if isinstance(item, TreeResultItem):
url = QUrl(item.filename)
self.webView.load(url)
class TreeResultItem(QTreeWidgetItem):
def __init__(self, result):
QTreeWidgetItem.__init__(self)
self.filename = result.filename
self.setText(0, result.name)
| sebastic/QGIS | python/plugins/processing/gui/ResultsDialog.py | Python | gpl-2.0 | 2,576 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#windowController.py
from PyQt4 import QtGui
import sys, multiprocessing
import mainWindow, windowListerner
class QWindowsController(multiprocessing.Process):
def __init__(self, messageBox):
super(QWindowsController, self).__init__()
self.messageBox = messageBox
def run(self):
app = QtGui.QApplication(sys.argv)
QMain = mainWindow.QMainWindow()
#界面信息处理线程
wListerner = windowListerner.QWindowListerner(QMain, self.messageBox)
wListerner.start()
#显示主窗口
QMain.show()
sys.exit(app.exec_())
| sharmaking/CoIntegrationAnalysis | windowController.py | Python | mit | 579 |
from .rpc import *
from .pcic import *
| cfreundl/o3d3xx-python | o3d3xx/__init__.py | Python | mit | 40 |
# coding=utf-8
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for CrownIBP."""
import functools
from absl.testing import absltest
from absl.testing import parameterized
import haiku as hk
import jax
import jax.numpy as jnp
import jax_verify
class CrownIBPBoundTest(parameterized.TestCase):
def assertArrayAlmostEqual(self, lhs, rhs):
diff = jnp.abs(lhs - rhs).max()
self.assertAlmostEqual(diff, 0., delta=1e-5)
def test_fc_crownibp(self):
@hk.without_apply_rng
@hk.transform
def linear_model(inp):
return hk.Linear(1)(inp)
z = jnp.array([[1., 2., 3.]])
params = {'linear':
{'w': jnp.ones((3, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
input_bounds = jax_verify.IntervalBound(z-1., z+1.)
fun = functools.partial(linear_model.apply, params)
output_bounds = jax_verify.crownibp_bound_propagation(
fun, input_bounds)
self.assertAlmostEqual(5., output_bounds.lower)
self.assertAlmostEqual(11., output_bounds.upper)
def test_conv2d_crownibp(self):
@hk.without_apply_rng
@hk.transform
def conv2d_model(inp):
return hk.Conv2D(output_channels=1, kernel_shape=(2, 2),
padding='VALID', stride=1, with_bias=True)(inp)
z = jnp.array([1., 2., 3., 4.])
z = jnp.reshape(z, [1, 2, 2, 1])
params = {'conv2_d':
{'w': jnp.ones((2, 2, 1, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
fun = functools.partial(conv2d_model.apply, params)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.crownibp_bound_propagation(
fun, input_bounds)
self.assertAlmostEqual(8., output_bounds.lower)
self.assertAlmostEqual(16., output_bounds.upper)
def test_relu_crownibp(self):
def relu_model(inp):
return jax.nn.relu(inp)
z = jnp.array([[-2., 3.]])
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.crownibp_bound_propagation(
relu_model, input_bounds)
self.assertArrayAlmostEqual(jnp.array([[0., 2.]]), output_bounds.lower)
self.assertArrayAlmostEqual(jnp.array([[0., 4.]]), output_bounds.upper)
if __name__ == '__main__':
absltest.main()
| deepmind/jax_verify | jax_verify/tests/crownibp_test.py | Python | apache-2.0 | 2,800 |
#
# Notification Originator
#
# Send SNMP INFORM notification using the following options:
#
# * SNMPv3
# * with user 'usr-md5-none', auth: MD5, priv NONE
# * over IPv4/UDP
# * using Trollius framework for network transport
# * to a Manager at 127.0.0.1:162
# * send INFORM notification
# * with TRAP ID 'warmStart' specified as an OID
# * include managed object information 1.3.6.1.2.1.1.5.0 = 'system name'
#
# Requires Trollius framework!
#
from pysnmp.entity import engine, config
from pysnmp.entity.rfc3413 import context
from pysnmp.entity.rfc3413.asyncio import ntforg
from pysnmp.carrier.asyncio.dgram import udp
from pysnmp.proto import rfc1902
import trollius
# Get the event loop for this thread
loop = trollius.get_event_loop()
# Create SNMP engine instance
snmpEngine = engine.SnmpEngine()
# SNMPv3/USM setup
# Add USM user
config.addV3User(
snmpEngine, 'usr-md5-none',
config.usmHMACMD5AuthProtocol, 'authkey1'
)
config.addTargetParams(snmpEngine, 'my-creds', 'usr-md5-none', 'authNoPriv')
# Transport setup
#
# Setup transport endpoint and bind it with security settings yielding
# a target name. Since Notifications could be sent to multiple Managers
# at once, more than one target entry may be configured (and tagged).
#
config.addTransport(
snmpEngine,
udp.domainName,
udp.UdpTransport().openClientMode()
)
config.addTargetAddr(
snmpEngine, 'my-nms',
udp.domainName, ('127.0.0.1', 162),
'my-creds',
tagList='all-my-managers'
)
# Specify what kind of notification should be sent (TRAP or INFORM),
# to what targets (chosen by tag) and what filter should apply to
# the set of targets (selected by tag)
config.addNotificationTarget(
snmpEngine, 'my-notification', 'my-filter', 'all-my-managers', 'inform'
)
# Allow NOTIFY access to Agent's MIB by this SNMP model (3), securityLevel
# and SecurityName
config.addContext(snmpEngine, '')
config.addVacmUser(snmpEngine, 3, 'usr-md5-none', 'authNoPriv', (), (), (1,3,6))
@trollius.coroutine
def snmpOperation(snmpEngine, target, snmpContext, contextName,
notificationName, instanceIndex, additionalVarBinds):
( snmpEngine,
errorIndication,
errorStatus,
errorIndex,
varBinds ) = yield trollius.From(
ntforg.NotificationOriginator().sendVarBinds(
snmpEngine,
target,
snmpContext,
contextName,
notificationName,
instanceIndex,
additionalVarBinds
)
)
print('Notification status - %s' % (
errorIndication and errorIndication or 'delivered'
)
)
# This also terminates internal timer
config.delTransport(
snmpEngine,
udp.domainName
).closeTransport()
# Initiate sending SNMP message
loop.run_until_complete(
snmpOperation(
snmpEngine,
# Notification targets
'my-notification',
# Default SNMP context where contextEngineId == SnmpEngineId
context.SnmpContext(snmpEngine),
# contextName
'',
# notification name (SNMPv2-MIB::coldStart)
(1,3,6,1,6,3,1,1,5,1),
# notification objects instance index
None,
# additional var-binds: ( (oid, value), ... )
[ ((1,3,6,1,2,1,1,5,0), rfc1902.OctetString('system name')) ]
)
)
# Clear the event loop
loop.close()
| ww9rivers/pysnmp | examples/v3arch/trollius/agent/ntforg/inform-v3.py | Python | bsd-2-clause | 3,361 |
# Copyright 2014 DreamHost, LLC
#
# Author: DreamHost, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from horizon import forms
from horizon import messages
from horizon import exceptions
from akanda.horizon import common
from akanda.horizon.api import neutron_extensions_client
from akanda.horizon.tabs import alias_tab_redirect
class BasePortAliasForm(forms.SelfHandlingForm):
"""
"""
id = forms.CharField(label=_("Id"),
widget=forms.HiddenInput, required=False)
alias_name = forms.CharField(label=_("Name"), max_length=255)
protocol = forms.ChoiceField(label=_("Protocol"),
choices=common.NEW_PROTOCOL_CHOICES)
port = forms.IntegerField(label=_("Port Number"), min_value=0,
max_value=65536)
class CreatePortAliasForm(BasePortAliasForm):
"""
"""
def handle(self, request, data):
try:
result = self._create_port_alias(request, data)
messages.success(
request,
_('Successfully created port alias: %s') % data['alias_name'])
return result
except:
redirect = "%s?tab=%s" % (
reverse("horizon:project:networking:index"),
alias_tab_redirect())
exceptions.handle(request, _('Unable to create port alias.'),
redirect=redirect)
def _create_port_alias(self, request, data):
return neutron_extensions_client.portalias_create(request, data)
class EditPortAliasForm(BasePortAliasForm):
"""
"""
def handle(self, request, data):
try:
result = self._update_port_alias(request, data)
messages.success(
request,
_('Successfully updated port alias: %s') % data['alias_name'])
return result
except:
redirect = "%s?tab=%s" % (
reverse("horizon:project:networking:index"),
alias_tab_redirect())
exceptions.handle(request, _('Unable to edit port alias.'),
redirect=redirect)
def _update_port_alias(self, request, data):
return neutron_extensions_client.portalias_update(request, data)
| dreamhost/akanda-horizon | akanda/horizon/alias/forms/ports.py | Python | apache-2.0 | 2,876 |
# -*- test-case-name: txorm.test.test_property -*-
# Copyright (c) 2014 Oscar Campos <[email protected]>
# See LICENSE for details
from __future__ import unicode_literals
from .base import SimpleProperty
from ..variable._fraction import FractionVariable
class Fraction(SimpleProperty):
variable_class = FractionVariable
| DamnWidget/txorm | txorm/property/_fraction.py | Python | lgpl-3.0 | 338 |
"""
ldapurl - handling of LDAP URLs as described in RFC 2255
written by Michael Stroeder <[email protected]>
See http://python-ldap.sourceforge.net for details.
\$Id: ldapurl.py,v 1.32 2003/05/26 07:49:07 stroeder Exp $
Python compability note:
This module only works with Python 2.0+ since
1. string methods are used instead of module string and
2. list comprehensions are used.
"""
__version__ = '0.5.1'
__all__ = [
# constants
'SEARCH_SCOPE','SEARCH_SCOPE_STR',
'LDAP_SCOPE_BASE','LDAP_SCOPE_ONELEVEL','LDAP_SCOPE_SUBTREE',
# functions
'isLDAPUrl',
# classes
'LDAPUrlExtension','LDAPUrlExtensions','LDAPUrl'
]
import UserDict
from urllib import quote,unquote
LDAP_SCOPE_BASE = 0
LDAP_SCOPE_ONELEVEL = 1
LDAP_SCOPE_SUBTREE = 2
SEARCH_SCOPE_STR = {None:'',0:'base',1:'one',2:'sub'}
SEARCH_SCOPE = {
'':None,
# the search scope strings defined in RFC2255
'base':LDAP_SCOPE_BASE,
'one':LDAP_SCOPE_ONELEVEL,
'sub':LDAP_SCOPE_SUBTREE,
}
# Some widely used types
StringType = type('')
TupleType=type(())
def isLDAPUrl(s):
"""
Returns 1 if s is a LDAP URL, 0 else
"""
s_lower = s.lower()
return \
s_lower.startswith('ldap://') or \
s_lower.startswith('ldaps://') or \
s_lower.startswith('ldapi://')
def ldapUrlEscape(s):
"""Returns URL encoding of string s"""
return quote(s).replace(',','%2C').replace('/','%2F')
class LDAPUrlExtension:
"""
Class for parsing and unparsing LDAP URL extensions
as described in RFC 2255.
BNF definition of LDAP URL extensions:
extensions = extension *("," extension)
extension = ["!"] extype ["=" exvalue]
extype = token / xtoken
exvalue = LDAPString from section 4.1.2 of [2]
token = oid from section 4.1 of [3]
xtoken = ("X-" / "x-") token
Usable class attributes:
critical
Boolean integer marking the extension as critical
extype
Type of extension
exvalue
Value of extension
"""
def __init__(self,extensionStr=None,critical=0,extype=None,exvalue=None):
self.critical = critical
self.extype = extype
self.exvalue = exvalue
if extensionStr:
self._parse(extensionStr)
def _parse(self,extension):
extension = extension.strip()
if not extension:
# Don't parse empty strings
self.extype,self.exvalue = None,None
return
self.critical = extension[0]=='!'
if extension[0]=='!':
extension = extension[1:].strip()
self.extype,self.exvalue = extension.split('=',1)
self.extype = self.extype.strip()
self.exvalue = unquote(self.exvalue.strip())
def unparse(self):
return '%s%s=%s' % (
'!'*(self.critical>0),
self.extype,self.exvalue.replace(',',r'%2C')
)
def __str__(self):
return self.unparse()
def __repr__(self):
return '<%s.%s instance at %s: %s>' % (
self.__class__.__module__,
self.__class__.__name__,
hex(id(self)),
self.__dict__
)
def __eq__(self,other):
return \
(self.critical==other.critical) and \
(self.extype==other.extype) and \
(self.exvalue==other.exvalue)
def __ne__(self,other):
return not self.__eq__(other)
class LDAPUrlExtensions(UserDict.UserDict):
"""
Models a collection of LDAP URL extensions as
dictionary type
"""
def __init__(self,default=None):
UserDict.UserDict.__init__(self)
for k,v in (default or {}).items():
self[k]=v
def __setitem__(self,name,value):
"""
value
Either LDAPUrlExtension instance, (critical,exvalue)
or string'ed exvalue
"""
assert isinstance(value,LDAPUrlExtension)
assert name==value.extype
self.data[name] = value
def values(self):
return [
self[k]
for k in self.keys()
]
def __str__(self):
return ','.join(map(str,self.values()))
def __repr__(self):
return '<%s.%s instance at %s: %s>' % (
self.__class__.__module__,
self.__class__.__name__,
hex(id(self)),
self.data
)
def __eq__(self,other):
assert isinstance(other,self.__class__),TypeError(
"other has to be instance of %s" % (self.__class__)
)
return self.data==other.data
def parse(self,extListStr):
for extension_str in extListStr.strip().split(','):
e = LDAPUrlExtension(extension_str)
self[e.extype] = e
def unparse(self):
return ','.join([ v.unparse() for v in self.values() ])
class LDAPUrl:
"""
Class for parsing and unparsing LDAP URLs
as described in RFC 2255.
BNF definition of LDAP URL:
hostport host:port
dn distinguished name
attributes list with attributes
scope search scope string
filter LDAP search filter
ldapurl = scheme "://" [hostport] ["/"
[dn ["?" [attrs] ["?" [scope]
["?" [filter] ["?" extensions]]]]]]
Usable class attributes:
urlscheme
URL scheme (either ldap, ldaps or ldapi)
hostport
LDAP host (default '')
dn
String holding distinguished name (default '')
attrs
list of attribute types (default None)
scope
integer search scope for ldap-module
filterstr
String representation of LDAP Search Filters
(see RFC 2254)
extensions
Dictionary used as extensions store
who
Maps automagically to bindname LDAP URL extension
cred
Maps automagically to X-BINDPW LDAP URL extension
"""
attr2extype = {'who':'bindname','cred':'X-BINDPW'}
def __init__(
self,
ldapUrl=None,
urlscheme='ldap',
hostport='',dn='',attrs=None,scope=None,filterstr=None,
extensions=None,
who=None,cred=None
):
self.urlscheme=urlscheme
self.hostport=hostport
self.dn=dn
self.attrs=attrs
self.scope=scope
self.filterstr=filterstr
self.extensions=(extensions or LDAPUrlExtensions({}))
if ldapUrl!=None:
self._parse(ldapUrl)
if who!=None:
self.who = who
if cred!=None:
self.cred = cred
def __eq__(self,other):
return \
self.urlscheme==other.urlscheme and \
self.hostport==other.hostport and \
self.dn==other.dn and \
self.attrs==other.attrs and \
self.scope==other.scope and \
self.filterstr==other.filterstr and \
self.extensions==other.extensions
def __ne__(self,other):
return not self.__eq__(other)
def _parse(self,ldap_url):
"""
parse a LDAP URL and set the class attributes
urlscheme,host,dn,attrs,scope,filterstr,extensions
"""
if not isLDAPUrl(ldap_url):
raise ValueError,'Parameter ldap_url does not seem to be a LDAP URL.'
scheme,rest = ldap_url.split('://',1)
self.urlscheme = scheme.strip()
if not self.urlscheme in ['ldap','ldaps','ldapi']:
raise ValueError,'LDAP URL contains unsupported URL scheme %s.' % (self.urlscheme)
slash_pos = rest.find('/')
qemark_pos = rest.find('?')
if (slash_pos==-1) and (qemark_pos==-1):
# No / and ? found at all
self.hostport = unquote(rest)
self.dn = ''
return
else:
if slash_pos!=-1 and (qemark_pos==-1 or (slash_pos<qemark_pos)):
# Slash separates DN from hostport
self.hostport = unquote(rest[:slash_pos])
# Eat the slash from rest
rest = rest[slash_pos+1:]
elif qemark_pos!=1 and (slash_pos==-1 or (slash_pos>qemark_pos)):
# Question mark separates hostport from rest, DN is assumed to be empty
self.hostport = unquote(rest[:qemark_pos])
# Do not eat question mark
rest = rest[qemark_pos:]
else:
raise ValueError,'Something completely weird happened!'
paramlist=rest.split('?')
paramlist_len = len(paramlist)
if paramlist_len>=1:
self.dn = unquote(paramlist[0]).strip()
if (paramlist_len>=2) and (paramlist[1]):
self.attrs = unquote(paramlist[1].strip()).split(',')
if paramlist_len>=3:
scope = paramlist[2].strip()
try:
self.scope = SEARCH_SCOPE[scope]
except KeyError:
raise ValueError,"Search scope must be either one of base, one or sub. LDAP URL contained %s" % (repr(scope))
if paramlist_len>=4:
filterstr = paramlist[3].strip()
if not filterstr:
self.filterstr = None
else:
self.filterstr = unquote(filterstr)
if paramlist_len>=5:
self.extensions = LDAPUrlExtensions()
self.extensions.parse(paramlist[4])
return
def applyDefaults(self,defaults):
"""
Apply defaults to all class attributes which are None.
defaults
Dictionary containing a mapping from class attributes
to default values
"""
for k in defaults.keys():
if getattr(self,k) is None:
setattr(self,k,defaults[k])
def initializeUrl(self):
"""
Returns LDAP URL suitable to be passed to ldap.initialize()
"""
if self.urlscheme=='ldapi':
# hostport part might contain slashes when ldapi:// is used
hostport = ldapUrlEscape(self.hostport)
else:
hostport = self.hostport
return '%s://%s' % (self.urlscheme,hostport)
def unparse(self):
"""
Returns LDAP URL depending on class attributes set.
"""
if self.attrs is None:
attrs_str = ''
else:
attrs_str = ','.join(self.attrs)
scope_str = SEARCH_SCOPE_STR[self.scope]
if self.filterstr is None:
filterstr = ''
else:
filterstr = ldapUrlEscape(self.filterstr)
dn = ldapUrlEscape(self.dn)
if self.urlscheme=='ldapi':
# hostport part might contain slashes when ldapi:// is used
hostport = ldapUrlEscape(self.hostport)
else:
hostport = self.hostport
ldap_url = '%s://%s/%s?%s?%s?%s' % (
self.urlscheme,
hostport,dn,attrs_str,scope_str,filterstr
)
if self.extensions:
ldap_url = ldap_url+'?'+self.extensions.unparse()
return ldap_url
def htmlHREF(self,urlPrefix='',hrefText=None,hrefTarget=None):
"""Complete """
assert type(urlPrefix)==StringType, "urlPrefix must be StringType"
if hrefText is None:
hrefText = self.unparse()
assert type(hrefText)==StringType, "hrefText must be StringType"
if hrefTarget is None:
target = ''
else:
assert type(hrefTarget)==StringType, "hrefTarget must be StringType"
target = ' target="%s"' % hrefTarget
return '<a%s href="%s%s">%s</a>' % (
target,urlPrefix,self.unparse(),hrefText
)
def __str__(self):
return self.unparse()
def __repr__(self):
return '<%s.%s instance at %s: %s>' % (
self.__class__.__module__,
self.__class__.__name__,
hex(id(self)),
self.__dict__
)
def __getattr__(self,name):
if self.attr2extype.has_key(name):
extype = self.attr2extype[name]
if self.extensions.has_key(extype):
result = unquote(self.extensions[extype].exvalue)
else:
return None
else:
raise AttributeError,"%s has no attribute %s" % (
self.__class__.__name__,name
)
return result # __getattr__()
def __setattr__(self,name,value):
if self.attr2extype.has_key(name):
extype = self.attr2extype[name]
if value is None:
# A value of None means that extension is deleted
delattr(self,name)
elif value!=None:
# Add appropriate extension
self.extensions[extype] = LDAPUrlExtension(
extype=extype,exvalue=unquote(value)
)
else:
self.__dict__[name] = value
def __delattr__(self,name):
if self.attr2extype.has_key(name):
extype = self.attr2extype[name]
try:
del self.extensions[extype]
except KeyError:
pass
else:
del self.__dict__[name]
| fxia22/ASM_xf | PythonD/lib/python2.4/site-packages/link/pyldap/ldapurl.py | Python | gpl-2.0 | 11,775 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, codecs, shutil, string, unicodedata
from PyQt4 import QtCore, QtGui
from peewee import *
from views.newproject import Ui_newProjectWizard
from models.corpus import *
from controllers.mainwindow import project
class newProjectWizard(QtGui.QWizard, Ui_newProjectWizard):
def __init__(self, parent=None):
super (newProjectWizard, self).__init__(parent)
self.setupUi(self)
self.connectActions()
def connectActions(self):
self.projectPathButton.clicked.connect(self.openProjectPath)
self.corpusFilePathButton.clicked.connect(self.openCorpusFilePath)
self.button(newProjectWizard.FinishButton).clicked.connect(self.createProject)
self.wizardPage2.nextId = self.corpusSettingNextId
def openProjectPath(self):
directoryName = QtGui.QFileDialog.getExistingDirectory(self, u"Répertoire", QtCore.QDir.homePath())
if directoryName:
self.folderPath.setText(directoryName)
def openCorpusFilePath(self):
corpusFile = QtGui.QFileDialog.getOpenFileName(self, u"Corpus", QtCore.QDir.homePath(), "Corpus (*.xml)")
if corpusFile:
self.corpusFilePath.setText(corpusFile)
def corpusSettingNextId(self):
self.projectNameValue.setText(self.projectNameEdit.text())
self.projectFolderValue.setText(self.folderPath.text())
if self.newCorpusButton.isChecked():
self.corpusValue.setText(u'nouveau corpus')
return 3
else:
self.corpusValue.setText(u'chargé à partir d\'un fichier existant')
return 2
def sanitize(self, filename):
validFilenameChars = "-_.%s%s" % (string.ascii_letters, string.digits)
cleanedFilename = unicodedata.normalize('NFKD', filename)
cleanedFilename = cleanedFilename.replace(' ', '_')
return ''.join(c for c in cleanedFilename if c in validFilenameChars)
def createProject(self):
directoryName = self.folderPath.text()
project['name'] = unicode(self.projectNameEdit.text())
if directoryName and project['name']:
project['folder'] = unicode(directoryName) + "/" + self.sanitize(project['name'])
if not os.path.exists(project['folder']):
os.mkdir(project['folder'])
if self.newCorpusButton.isChecked():
corpusManager = CorpusManager()
corpus = corpusManager.createCorpus()
corpusFile = codecs.open(project['folder'] + "/" + self.sanitize(project['name']) + ".xml", 'w', 'utf-8')
corpusFile.write(corpus.toprettyxml())
elif self.existingCorpusButton.isChecked():
existingCorpus = unicode(self.corpusFilePath.text())
shutil.copyfile(existingCorpus, project['folder'] + "/" + self.sanitize(project['name']) + ".xml")
project['database'] = project['folder'] + "/" + self.sanitize(project['name']) + ".db"
exec 'from models.models import *'
Meta.create_table()
Video.create_table()
Sequence.create_table()
Tag.create_table()
Meta.create(projectName=project['name'])
if self.newCorpusButton.isChecked():
self.parent().settingsViewUi()
else :
self.parent().directViewUi()
self.parent().setWindowTitle('Flying Squirrel - ' + project['name'])
self.parent().activateMenuLinks()
else:
reply = QtGui.QMessageBox()
reply.setText(u"Ce répertoire existe déjà. Supprimez-le ou changez de nom de projet.")
reply.exec_()
| TopazaPyra/flying_squirrel | controllers/newprojectwizard.py | Python | gpl-2.0 | 4,097 |
"""
Utils for unittest
"""
class MockStorage(object):
def event(self, *args, **kwargs):
return {
"tasks": [],
"workers": [],
"tasks_average": {
"test1": 1,
"test2": 2,
"test3": 3,
},
"workers_average": {
"test1": 1,
"test2": 2,
"test3": 3,
}
}
| ikeikeikeike/celery-tracker | tests/utils.py | Python | mit | 436 |
#!/usr/bin/python2
import matplotlib, sys
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy
matplotlib.rc('font', **{'family': 'serif', 'serif': ['Computer Modern']})
matplotlib.rc('text', usetex=True)
import styles
import readandcompute
readandcompute.absolute_f('../data/scrunched-ww1.30-L2.83/i0/N003/absolute/')
| droundy/deft | papers/renormalization/figs/test.py | Python | gpl-2.0 | 336 |
#!/usr/bin/env python
import argparse
import csv
import random
import sys
import math
from math import inf
from textwrap import dedent
from editCIGAR import CIGAR
from textwrap import dedent
# Don't throw an error if output is piped into a program that doesn't
# read all of its input, like `head`.
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
description = dedent("""
Insert insertions and deletions randomly into a sequence recieved
via standard input.
Expected input format: For every input line: A string of nucleotides
followed by a tab character, followed by a corresponding CIGAR
string. Example:
–––––––––––––––––––––––
seq cigar
GGTGACATAAAGGC 8M5I
TTCCGCAGGG 10M
CTCGTGGAGT 5M2D5M
....
–––––––––––––––––––––––
(white space stands for one tab character)
If no CIGAR strings are available for the nucleotides, then use the
parameter --cigar-new. In that case, only a nucleotide string per line is
expected. Every read is expected to have complete (mis)match to the
reference (CIGAR character M).
""")
def parse_arguments(argv):
p = argparse.ArgumentParser(description=description,
formatter_class= argparse.RawTextHelpFormatter)
for s,l in [('in','insert'),('del','deletion')]:
p.add_argument('--{}-prob'.format(s), default=0,
type = checkPositive(float), metavar='P', help=
"Per-base probability of a seqence {}".format(l))
p.add_argument('--{}-exp'.format(s), default=None, metavar = 'L',
type = float, help=dedent("""\
Length distribution of {}s shall be exponentially
distributed, with 50%% of reads longer than L""")\
.format(l))
p.add_argument('--cigar-new', default = False,
action='store_true', help=dedent("""\
Do not read CIGAR strings from standard input, but assume a
complete (mis)match (no indels, CIGAR character M) for every
nucleotide string."""))
p.add_argument('--col-seq', default = 'seq', type = str,
help = "Column name of the nucleotide strings")
p.add_argument('--col-cigar', default = 'cigar', type = str,
help = "Column name of the CIGAR strings")
# p.add_argument('--input-fmt', default = ['lines'],
# nargs='+', metavar='',
# help =dedent("""\
# Format of the file containing the CIGAR strings.
# Usage: --input-fmt lines
# --input-fmt tab COL-NUCL COL-CIGAR
# Choices:
# 'lines': One nucleotide and CIGAR string per input line
# (use --cigar-new to assume only M instead of giving
# CIGAR input)
#
# 'tab COL-NUCL COL-CIGAR': The file is in a tabular format.
# The first line contains the column names, the following
# files contain the content. Only the contents of the
# columns named COL-NUCL and COL-CIGAR are used. Columns
# are separated by a tab character (\\t), unless another
# character is specified by the --sep argument
# """))
# p.add_argument('--change-coords', nargs=2,
# help=dedent("""\
# If an output CIGAR string begins or ends with a deletion, change
# the true read coordinates instead. Else, such reads would not be
# assigned to their true position."""))
p.add_argument('--no-header',default = False, action = 'store_true',
help="""Do not expect a table header. Nucleotide strings
are expected as first input column and CIGAR strings as
second column. If no CIGAR strings are available, use
--cigar-new in addition to this option.""")
p.add_argument('--sep', default='\t', help=dedent("""\
Character separating the input columns if the input is
in tabular format (see --input-fmt). Common choices
are '\\t' (default), ',' or ';'."""))
p.add_argument('--seed', default = None, type=int,
help = dedent("""\
Set the random number generator seed to
this value. Calls with the same input files
and seed always produce the same result."""))
args = p.parse_args(argv)
if args.sep == '\\t':
args.sep = '\t'
if len(args.sep) != 1:
raise ValueError('--sep must be followed by only one character')
if args.in_exp is not None and (args.in_exp >= 1 or args.in_exp < 0):
raise ValueError('--in-exp must be >=0 and < 1')
if args.del_exp is not None and (args.del_exp >= 1 or args.del_exp < 0):
raise ValueError('--del-exp must be >=0 and < 1')
if args.in_prob < 0 or args.in_prob > 1:
raise ValueError('--in-prob must be >= 0 and <= 1')
if args.del_prob < 0 or args.del_prob > 1:
raise ValueError('--del-prob must be >= 0 and <= 1')
return args
def main(argv):
# --- Argument checks ----------------------------------------
args = parse_arguments(argv[1:])
if args.in_prob != 0:
if args.in_exp is None:
raise ValueError("--in-prob requires --in-exp")
if args.del_prob != 0:
if args.del_exp is None:
raise ValueError("--del-prob requires --del-exp")
if args.seed is not None:
random.seed(args.seed, version=2)
# --- Input parsing --------------------------------------------
input = sys.stdin
def safe_index(l, what):
try: return l.index(what)
except ValueError: return None
if not args.no_header:
header = next(input).rstrip().split(args.sep)
i_nucl, i_cigar = \
(safe_index(header, x) for x in \
(args.col_seq, args.col_cigar))#, cn_start, cn_stop))
if i_nucl is None:
raise ValueError(('The specified nucleotide column {} does '+
'not exist. Use the --col-seq parameter to set an '+
'existing column name.').format(args.col_seq))
if i_cigar is None and not args.cigar_new:
raise ValueError(('The specified CIGAR column {} does '+
'not exist. Use the --col-cigar parameter to set an '+
'existing column name or use the --cigar-new parameter '+
'to create new CIGAR strings.').format(args.col_cigar))
#i_rest = [i for i in range(0,len(header)) if i not in (i_nucl, i_cigar)]
else:
i_nucl, i_cigar = 0, 1
#i_rest = []
rows = (s.rstrip() for s in input)
fields = (s.split(args.sep) for s in rows if s != '')
if args.cigar_new:
if not args.no_header:
if not args.col_cigar in header:
header = header + [args.col_cigar]
else:
raise ValueError((
"The column name {} for the new CIGAR column "+
"already exists in the input. Choose a new column name "+
"using the --col-cigar option or omit the --cigar-new "+
"option if CIGAR strings exist already in the input.")
.format(args.col_cigar))
step1 = ( r + [addCIGAR(r[i_nucl])] for r in fields )
i_cigar = -1 # The last element of the row
else:
step1 = fields
step3 = mutatorExpLen(step1, args.in_prob
, args.in_exp, args.del_prob, args.del_exp, (i_nucl,i_cigar))
if not args.no_header:
print(args.sep.join(header))
for x in step3:
print(args.sep.join(x))
def splitter(tuples, idxs):
"""idxs: list of tuples of indices (integer). Returns a tuple with one
element for each element i in `idxs`. Return tuple i contains all tuples[j]
for all j in idxs[i].
>>> l = [(1,2,3), (4,5,6), (7,8,9)]
>>> list(splitter(l, [(1), (2,3)]))
[((1), (2, 3)), ((4), (5, 6)), ((7), (8, 9))]
"""
for n in tuples:
yield tuple(tuple(n[j] for j in idx) for idx in idxs)
def inputNormalizer(strings, sep):
"""Make sure no invalid data format (too many columns)
is specified and remove newlines"""
iLine = 0
ncols = None
while True:
s = next(strings)
iLine += 1
s.rstrip("\n")
s = s.split(sep)
if ncols is None: ncols = len(s)
if len(s) != ncols:
raise ValueError(("Invalid input in line {}: {} columns "+
"expected on every input line. Got: {}. (Wrong column separator?)")
.format(iLine, ncols, len(s)))
if any(c in x for c in [' ','\t'] for x in s):
raise ValueError(("Invalid input in line {}: Illegal Whitespace"+
"found in Nucleotide or CIGAR strings.").format(iLine))
yield s
def addCIGAR(nucl):
return str(len(nucl))+'M'
def mutatorExpLen(inputTuples, i_prob, i_len, d_prob, d_len, idxs=(0,1) ):
"""`inputTuples` is a interable on string tuples `(nucl, cigar)`.
`cigar` may also be of class CIGAR, this way unnessecary conversions
to and from strings can be avoided if this method is applied multiple
times on the same stream of data.
Insert/delete parts of the nucleotide strings `nucl` (the short read
nucleotide strings) and document the result by changing the CIGAR
strings provided by `cigar`. Operation starts with a constant
per-character (per-base) probability and the operation length is
exponentially distributed with rate `1/i_len` or `1/d_len`,
respectively.
idxs: use these elements of the inputTuples as (nucl, cigar). The
rest is copied to the output as-is.
"""
ln2 = math.log(2)
#rndPos = lambda p: math.floor(random.expovariate(p))+1 if p != 0 else inf
#rndLen = lambda s: math.floor(random.expovariate(ln2/s))+ 1 \
# if s is not None else None
rndPos = lambda p: rGeom(p)+1 if p != 0 else inf
rndLen = lambda s: rGeom(1-s)+1 if s is not None else None
toCIGAR = lambda x: CIGAR.fromString(x) if isinstance(x,str) else x
# State automaton:
# ================
# -- Possible states:
NEXT_STRING = 'next_str' # Optain a new nucleotide string
DET_NEXT_OP = 'det_op' # Determine if in/del takes place in this string
INSERT = 'insert' # Insert a sequence and get a new insert position
DELETE = 'delete' # Delete a sequence and get a new delete position
YIELD = 'yield' # Return current, possibly modified string to the caller
# -- Queue of next states (new states can be inserted at front and at
# end)
todo = [NEXT_STRING]
# -- Current state:
inputRecord = None
l = 0
nucl, cigar = "", ""
bpToNextIns = rndPos(i_prob)-1 # only for the first time may this also be 0
bpInsLen = rndLen(i_len)
bpToNextDel = rndPos(d_prob) # same as bpToNextIns
bpDelLen = rndLen(d_len)
# -- State transitions:
# The loop ends if StopIteration is thrown by next(.)
while True:
# Corner case: if all mutation probabilities are 0, return input
# unchanged
if bpToNextIns == inf and bpToNextDel == inf:
yield next(inputTuples)
continue
#print(",".join(todo))
do = todo.pop(0) if len(todo) > 0 else YIELD
# Tie break by random choice of one of the two actions (insert or
# delete. The other action is skipped => another exp-distributed
# number is added to it.
if do == NEXT_STRING:
bpToNextIns -= l
bpToNextDel -= l
inputRecord = next(inputTuples)
nucl = inputRecord[idxs[0]]
cigar = inputRecord[idxs[1]]
l = len(nucl)
todo.append(DET_NEXT_OP)
elif do == DET_NEXT_OP:
todo.clear()
if bpToNextIns < bpToNextDel:
# Check/queue insert operation first
if bpToNextIns < l: todo.append(INSERT)
if bpToNextDel < l: todo.append(DELETE)
elif bpToNextDel < bpToNextIns:
# Check/queue delete operation first
if bpToNextDel < l: todo.append(DELETE)
if bpToNextIns < l: todo.append(INSERT)
elif bpToNextIns == bpToNextDel:
assert not (bpToNextIns == inf and bpToNextDel == inf)
# Skip one of the two operations, randomly
if random.choice([True, False]):
bpToNextIns += rndPos(i_prob)
else:
bpToNextDel += rndPos(d_prob)
todo.insert(0, DET_NEXT_OP)
else: assert False
elif do == INSERT:
nucl = insertRandom(nucl, bpToNextIns, bpInsLen)
cigar = toCIGAR(cigar)
cigar.operationAt('I',bpInsLen, bpToNextIns)
l = len(nucl) # String gets longer
# Skip the insert when calculating the bp to the next operation
bpToNextDel += bpInsLen
bpToNextIns += rndPos(i_prob) + bpInsLen
bpInsLen = rndLen(i_len)
todo.insert(0, DET_NEXT_OP)
elif do == DELETE:
# Deletion stops at end of string if delete position is
# nearer at `nucl` string end than bpDelLen
nucl = nucl[:bpToNextDel] + nucl[(bpToNextDel+bpDelLen):]
cigar = toCIGAR(cigar)
effDelLen = min(l - bpToNextDel, bpDelLen)
cigar.operationAt('D',effDelLen, bpToNextDel)
l = len(nucl) # String gets shorter
# If an insert operation is pending, it must be recalculated if
# it is still on this nucleotide string, as that just got
# shorter.
bpToNextDel += rndPos(d_prob)
bpDelLen = rndLen(d_len)
todo.insert(0, DET_NEXT_OP)
elif do == YIELD:
inputRecord[idxs[0]] = nucl
inputRecord[idxs[1]] = str(cigar)
yield tuple(inputRecord)
todo.append(NEXT_STRING)
#print((nucl, str(cigar), f'I={bpToNextIns}/{bpInsLen}, D={bpToNextDel}/{bpDelLen}'))
def insertRandom(string, pos, length):
"""Insert a random sequence into a string
>>> s = insertRandom('AAAAA', 2, 3)
>>> [s[0:2], s[5:8]]
['AA', 'AAA']
>>> all(x in 'ATCG' for x in s[2:5])
True
"""
s = "".join(random.choice(['A','T','G','C']) for _ in range(0,length))
string = string[:pos] + s + string[pos:]
return string
def rGeom(p):
"""Generate a geometrically distributed random number. p is the success
probability. The numbers are in the range {0, 1, 2,...}"""
# CDF = 1-(1-p)^(k+1) (CDF of geometric distribution)
# (1-p)^(k+1) = 1-CDF (... solve for k ...)
# k+1 = log(1-CDF)/log(1-p)
# k = (log(1-CDF)/log(1-p)) - 1
# insert a uniform random number in [0;1] for CDF to
# obtain geometrically distributed numbers
u = random.random()
if p == 1 : return 0
return math.ceil( (math.log(1-u,1-p))-1 )
def zip_samelen(*streams):
"""`streams` are multiple iterables. Does the same as zip, except if
it throws ValueError if not all streams throw StopIteration at the
same time.
>>> list(zip_samelen([1,2,3],[4,5,6]))
[(1, 4), (2, 5), (3, 6)]
>>> list(zip_samelen([1,2,3],[4,5,6,7]))
Traceback (most recent call last):
...
ValueError: The supplied inputs are of different lengths
"""
iters = [iter(s) for s in streams]
sentinel = object()
while True:
ret = tuple(next(s,sentinel) for s in iters)
if all(s is sentinel for s in ret):
raise StopIteration
elif any(s is sentinel for s in ret):
# Not all streams have ended
raise ValueError("The supplied inputs are of different lengths")
else:
yield ret
def checkPositive(expectedType):
"For use in parse_arguments"
def check(value):
try:
value = expectedType(value)
if value <= 0: raise ValueError()
except ValueError:
raise argparse.ArgumentTypeError(
("{} is an invalid positive {}"+
+"value").format(value,str(expectedType)))
else:
return value
return check
if __name__ == "__main__": main(sys.argv)
# vim:tw=75
| mlell/tapas | scripts/src/indel.py | Python | mit | 16,326 |
__author__ = "Ole Weidner"
__copyright__ = "Copyright 2012-2013, The SAGA Project"
__license__ = "MIT"
""" This examples shows how to run a job on a remote PBSPRO cluster
using the 'PBSPRO' job adaptor.
More information about the saga-python job API can be found at:
http://saga-project.github.com/saga-python/doc/library/job/index.html
"""
import sys
import saga
# ----------------------------------------------------------------------------
# This is an example for a callback function. Callback functions can be
# registered with a saga.Job object and get 'fired' asynchronously on
# certain conditions.
def job_state_change_cb(src_obj, fire_on, value):
print "Callback : job state changed to '%s'\n" % value
return True
# ----------------------------------------------------------------------------
#
def main():
try:
# Your ssh identity on the remote machine.
ctx = saga.Context("ssh")
# Change e.g., if you have a differnent username on the remote machine
#ctx.user_id = "your_ssh_username"
session = saga.Session()
session.add_context(ctx)
# Create a job service object that represent a remote pbs cluster.
# The keyword 'pbs' in the url scheme triggers the PBS adaptors
# and '+ssh' enables PBS remote access via SSH.
js = saga.job.Service("pbspro+ssh://login.archer.ac.uk/",
session=session)
# Next, we describe the job we want to run. A complete set of job
# description attributes can be found in the API documentation.
jd = saga.job.Description()
jd.wall_time_limit = 1 # minutes
jd.executable = '/bin/data'
#jd.total_cpu_count = 12 # for lonestar this has to be a multiple of 12
#jd.spmd_variation = '12way' # translates to the qsub -pe flag
#jd.queue = "batch"
jd.project = "e291"
jd.output = "examplejob.out"
jd.error = "examplejob.err"
# Create a new job from the job description. The initial state of
# the job is 'New'.
job = js.create_job(jd)
# Register our callback. We want it to 'fire' on job state change
job.add_callback(saga.STATE, job_state_change_cb)
# Check our job's id and state
print "Job ID : %s" % (job.id)
print "Job State : %s" % (job.state)
# Now we can start our job.
print "\n...starting job...\n"
job.run()
print "Job ID : %s" % (job.id)
# List all jobs that are known by the adaptor.
# This should show our job as well.
print "\nListing active jobs: "
for jid in js.list():
print " * %s" % jid
# wait for our job to complete
print "\n...waiting for job...\n"
job.wait()
print "Job State : %s" % (job.state)
print "Exitcode : %s" % (job.exit_code)
print "Exec. hosts : %s" % (job.execution_hosts)
print "Create time : %s" % (job.created)
print "Start time : %s" % (job.started)
print "End time : %s" % (job.finished)
js.close()
return 0
except saga.SagaException, ex:
# Catch all saga exceptions
print "An exception occured: (%s) %s " % (ex.type, (str(ex)))
# Get the whole traceback in case of an exception -
# this can be helpful for debugging the problem
print " \n*** Backtrace:\n %s" % ex.traceback
return -1
if __name__ == "__main__":
sys.exit(main())
| telamonian/saga-python | examples/jobs/pbsprojob.py | Python | mit | 3,616 |
from setuptools import setup
setup(name='YT-Watcher', version='0.0.1', packages=['ytw'], url='', license='MIT', author='jrg', author_email='',
description='Youtube multi search tool', install_requires=['PySide', 'urllib3', 'certifi'])
| jr-garcia/YT-Watcher | setup.py | Python | mit | 242 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011-2014 Cubic ERP - Teradata SAC (<http://cubicerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
import openerp.addons.decimal_precision as dp
class account_journal(osv.Model):
_name = "account.journal"
_inherit = "account.journal"
_columns = {
'parent_invoice': fields.boolean('Has Parent Invoice'),
}
_defaults = {
'parent_invoice': False,
}
def create(self, cr, uid, values, context=None):
if not values.has_key('code'):
values['code'] = self.pool.get('ir.sequence').get(cr, uid, 'account.journal.code')
return super(account_journal, self).create(cr, uid, values, context=context)
class account_invoice(osv.osv):
_name = 'account.invoice'
_inherit = 'account.invoice'
_columns = {
'parent_id' : fields.many2one('account.invoice',string="Related Invoice",
help="This field must be used to register the related invoice from a refound invoice"),
'parent_invoice': fields.related('journal_id','parent_invoice', type='boolean', readonly=True,
string="Has Parent Invoice"),
}
def _prepare_refund(self, cr, uid, invoice, date=None, period_id=None, description=None, journal_id=None, context=None):
res = super(account_invoice,self)._prepare_refund(cr, uid, invoice, date=date, period_id=period_id, description=description, journal_id=journal_id, context=context)
res['parent_id'] = invoice.id
return res
def onchange_journal_id(self, cr, uid, ids, journal_id=False, context=None):
res = super(account_invoice,self).onchange_journal_id(cr, uid, ids, journal_id=journal_id, context=context)
if journal_id:
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context)
res['value']['parent_invoice'] = journal.parent_invoice
return res
class account_account(osv.osv):
def _compute_customize(self, cr, uid, ids, field_names, arg=None, context=None):
if context is None:
context ={}
res = {}
initial = self.__compute(cr, uid, ids, ['balance', 'debit', 'credit'], context=context)
current = self.__compute(cr, uid, ids, ['balance', 'debit', 'credit'], context=context)
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = {'debit_initial': 0.0, 'credit_initial': 0.0, 'balance_initial': 0.0,
'debit_current': 0.0, 'credit_current': 0.0, 'balance_current': 0.0}
res[account.id]['debit_initial'] = initial[account.id]['debit']
res[account.id]['credit_initial'] = initial[account.id]['credit']
res[account.id]['balance_initial'] = initial[account.id]['balance']
res[account.id]['debit_current'] = current[account.id]['debit']
res[account.id]['credit_current'] = current[account.id]['credit']
res[account.id]['balance_current'] = current[account.id]['balance']
return res
def _get_parent_id(self, cr, uid, code, company_id, context=None):
res=False
parent_code = code[:-1]
while parent_code:
parent_ids = self.search(cr, uid, [('company_id','=',company_id),('code','=',parent_code)], context=context)
if parent_ids:
parent = self.browse(cr, uid, parent_ids[0], context=context)
if parent.type == 'view':
res = parent.id
else:
res = parent.parent_id.id
break
parent_code = parent_code[:-1]
return res
_name = 'account.account'
_inherit = 'account.account'
_columns = {
'balance_initial': fields.function(_compute_customize, digits_compute=dp.get_precision('Account'), string='Initial Balance', multi='customize'),
'credit_initial': fields.function(_compute_customize, digits_compute=dp.get_precision('Account'), string='Initial Credit', multi='customize'),
'debit_initial': fields.function(_compute_customize, digits_compute=dp.get_precision('Account'), string='Initial Debit', multi='customize'),
'balance_current': fields.function(_compute_customize, digits_compute=dp.get_precision('Account'), string='Current Balance', multi='customize'),
'debit_current': fields.function(_compute_customize, digits_compute=dp.get_precision('Account'), string='Current Debit', multi='customize'),
'credit_current': fields.function(_compute_customize, digits_compute=dp.get_precision('Account'), string='Current Credit', multi='customize'),
}
def create(self, cr, uid, values, context=None):
if not values.has_key('parent_id'):
values['parent_id'] = self._get_parent_id(cr, uid, values.get('code'), values.get('company_id'), context=context)
return super(account_account, self).create(cr, uid, values, context=context)
| miltonruelas/cursotecnico | branch/customize/account.py | Python | agpl-3.0 | 5,997 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""nginx config file formatter/beautifier with no additional dependencies.
Originally published under https://github.com/1connect/nginx-config-formatter,
then moved to https://github.com/slomkowski/nginx-config-formatter.
"""
import argparse
import codecs
import contextlib
import io
import logging
import pathlib
import re
import sys
__author__ = "Michał Słomkowski"
__license__ = "Apache 2.0"
__version__ = "1.2.2"
class FormatterOptions:
"""Class holds the formatting options. For now, only indentation supported."""
indentation = 4
class Formatter:
"""nginx formatter. Can format config loaded from file or string."""
_TEMPLATE_VARIABLE_OPENING_TAG = '___TEMPLATE_VARIABLE_OPENING_TAG___'
_TEMPLATE_VARIABLE_CLOSING_TAG = '___TEMPLATE_VARIABLE_CLOSING_TAG___'
_TEMPLATE_BRACKET_OPENING_TAG = '___TEMPLATE_BRACKET_OPENING_TAG___'
_TEMPLATE_BRACKET_CLOSING_TAG = '___TEMPLATE_BRACKET_CLOSING_TAG___'
def __init__(self,
options: FormatterOptions = FormatterOptions(),
logger: logging.Logger = None):
self.logger = logger if logger is not None else logging.getLogger(__name__)
self.options = options
def format_string(self,
contents: str) -> str:
"""Accepts the string containing nginx configuration and returns formatted one. Adds newline at the end."""
lines = contents.splitlines()
lines = self._apply_bracket_template_tags(lines)
lines = self._clean_lines(lines)
lines = self._join_opening_bracket(lines)
lines = self._perform_indentation(lines)
text = '\n'.join(lines)
text = self._strip_bracket_template_tags(text)
for pattern, substitute in ((r'\n{3,}', '\n\n\n'), (r'^\n', ''), (r'\n$', '')):
text = re.sub(pattern, substitute, text, re.MULTILINE)
return text + '\n'
def get_formatted_string_from_file(self,
file_path: pathlib.Path) -> str:
"""Loads nginx config from file, performs formatting and returns contents as string.
:param file_path: path to original nginx configuration file."""
_, original_file_content = self._load_file_content(file_path)
return self.format_string(original_file_content)
def format_file(self,
file_path: pathlib.Path,
original_backup_file_path: pathlib.Path = None):
"""Performs the formatting on the given file. The function tries to detect file encoding first.
:param file_path: path to original nginx configuration file. This file will be overridden.
:param original_backup_file_path: optional path, where original file will be backed up."""
chosen_encoding, original_file_content = self._load_file_content(file_path)
with codecs.open(file_path, 'w', encoding=chosen_encoding) as wfp:
wfp.write(self.format_string(original_file_content))
self.logger.info("Formatted content written to original file.")
if original_backup_file_path:
with codecs.open(original_backup_file_path, 'w', encoding=chosen_encoding) as wfp:
wfp.write(original_file_content)
self.logger.info("Original content saved to '%s'.", original_backup_file_path)
def _load_file_content(self,
file_path: pathlib.Path) -> (str, str):
"""Determines the encoding of the input file and loads its content to string.
:param file_path: path to original nginx configuration file."""
encodings = ('utf-8', 'latin1')
encoding_failures = []
chosen_encoding = None
original_file_content = None
for enc in encodings:
try:
with codecs.open(file_path, 'r', encoding=enc) as rfp:
original_file_content = rfp.read()
chosen_encoding = enc
break
except ValueError as e:
encoding_failures.append(e)
if chosen_encoding is None:
raise Exception('none of encodings %s are valid for file %s. Errors: %s'
% (encodings, file_path, [e.message for e in encoding_failures]))
self.logger.info("Loaded file '%s' (detected encoding %s).", file_path, chosen_encoding)
assert original_file_content is not None
return chosen_encoding, original_file_content
@staticmethod
def _strip_line(single_line):
"""Strips the line and replaces neighbouring whitespaces with single space (except when within quotation
marks). """
single_line = single_line.strip()
if single_line.startswith('#'):
return single_line
within_quotes = False
parts = []
for part in re.split('"', single_line):
if within_quotes:
parts.append(part)
else:
parts.append(re.sub(r'[\s]+', ' ', part))
within_quotes = not within_quotes
return '"'.join(parts)
@staticmethod
def _count_multi_semicolon(single_line):
"""Count multi semicolon (except when within quotation marks)."""
single_line = single_line.strip()
if single_line.startswith('#'):
return 0, 0
within_quotes = False
q = 0
c = 0
for part in re.split('"', single_line):
if within_quotes:
q = 1
else:
c += part.count(';')
within_quotes = not within_quotes
return q, c
@staticmethod
def _multi_semicolon(single_line):
"""Break multi semicolon into multiline (except when within quotation marks)."""
single_line = single_line.strip()
if single_line.startswith('#'):
return single_line
within_quotes = False
parts = []
for part in re.split('"', single_line):
if within_quotes:
parts.append(part)
else:
parts.append(part.replace(";", ";\n"))
within_quotes = not within_quotes
return '"'.join(parts)
def _apply_variable_template_tags(self, line: str) -> str:
"""Replaces variable indicators ${ and } with tags, so subsequent formatting is easier."""
return re.sub(r'\${\s*(\w+)\s*}',
self._TEMPLATE_VARIABLE_OPENING_TAG + r"\1" + self._TEMPLATE_VARIABLE_CLOSING_TAG,
line,
flags=re.UNICODE)
def _strip_variable_template_tags(self, line: str) -> str:
"""Replaces tags back with ${ and } respectively."""
return re.sub(self._TEMPLATE_VARIABLE_OPENING_TAG + r'\s*(\w+)\s*' + self._TEMPLATE_VARIABLE_CLOSING_TAG,
r'${\1}',
line,
flags=re.UNICODE)
def _apply_bracket_template_tags(self, lines):
""" Replaces bracket { and } with tags, so subsequent formatting is easier."""
formatted_lines = []
for line in lines:
formatted_line = ""
in_quotes = False
last_char = ""
if line.startswith('#'):
formatted_line += line
else:
for char in line:
if (char == "\'" or char == "\"") and last_char != "\\":
in_quotes = self._reverse_in_quotes_status(in_quotes)
if in_quotes:
if char == "{":
formatted_line += self._TEMPLATE_BRACKET_OPENING_TAG
elif char == "}":
formatted_line += self._TEMPLATE_BRACKET_CLOSING_TAG
else:
formatted_line += char
else:
formatted_line += char
last_char = char
formatted_lines.append(formatted_line)
return formatted_lines
@staticmethod
def _reverse_in_quotes_status(status: bool) -> bool:
if status:
return False
return True
def _strip_bracket_template_tags(self, content: str) -> str:
""" Replaces tags back with { and } respectively."""
content = content.replace(self._TEMPLATE_BRACKET_OPENING_TAG, "{", -1)
content = content.replace(self._TEMPLATE_BRACKET_CLOSING_TAG, "}", -1)
return content
def _clean_lines(self, orig_lines) -> list:
"""Strips the lines and splits them if they contain curly brackets."""
cleaned_lines = []
for line in orig_lines:
line = self._strip_line(line)
line = self._apply_variable_template_tags(line)
if line == "":
cleaned_lines.append("")
elif line == "};":
cleaned_lines.append("}")
elif line.startswith("#"):
cleaned_lines.append(self._strip_variable_template_tags(line))
else:
q, c = self._count_multi_semicolon(line)
if q == 1 and c > 1:
ml = self._multi_semicolon(line)
cleaned_lines.extend(self._clean_lines(ml.splitlines()))
elif q != 1 and c > 1:
newlines = line.split(";")
lines_to_add = self._clean_lines(["".join([ln, ";"]) for ln in newlines if ln != ""])
cleaned_lines.extend(lines_to_add)
else:
if line.startswith("rewrite"):
cleaned_lines.append(self._strip_variable_template_tags(line))
else:
cleaned_lines.extend(
[self._strip_variable_template_tags(ln).strip() for ln in re.split(r"([{}])", line) if
ln != ""])
return cleaned_lines
@staticmethod
def _join_opening_bracket(lines):
"""When opening curly bracket is in it's own line (K&R convention), it's joined with precluding line (Java)."""
modified_lines = []
for i in range(len(lines)):
if i > 0 and lines[i] == "{":
modified_lines[-1] += " {"
else:
modified_lines.append(lines[i])
return modified_lines
def _perform_indentation(self, lines):
"""Indents the lines according to their nesting level determined by curly brackets."""
indented_lines = []
current_indent = 0
indentation_str = ' ' * self.options.indentation
for line in lines:
if not line.startswith("#") and line.endswith('}') and current_indent > 0:
current_indent -= 1
if line != "":
indented_lines.append(current_indent * indentation_str + line)
else:
indented_lines.append("")
if not line.startswith("#") and line.endswith('{'):
current_indent += 1
return indented_lines
@contextlib.contextmanager
def _redirect_stdout_to_stderr():
"""Redirects stdout to stderr for argument parsing. This is to don't pollute the stdout
when --print-result is used."""
old_stdout = sys.stdout
sys.stdout = sys.stderr
try:
yield
finally:
sys.stdout = old_stdout
def _aname(action) -> str:
"""Converts argument name to string to be consistent with argparse."""
return argparse._get_action_name(action)
def _standalone_run(program_arguments):
arg_parser = argparse.ArgumentParser(description="Formats nginx configuration files in consistent way.")
arg_parser.add_argument("-v", "--verbose", action="store_true", help="show formatted file names")
pipe_arg = arg_parser.add_argument("-", "--pipe",
action="store_true",
help="reads content from standard input, prints result to stdout")
pipe_xor_backup_group = arg_parser.add_mutually_exclusive_group()
print_result_arg = pipe_xor_backup_group.add_argument("-p", "--print-result",
action="store_true",
help="prints result to stdout, original file is not changed")
pipe_xor_backup_group.add_argument("-b", "--backup-original",
action="store_true",
help="backup original config file as filename.conf~")
arg_parser.add_argument("config_files",
nargs='*',
help="configuration files to format")
formatter_options_group = arg_parser.add_argument_group("formatting options")
formatter_options_group.add_argument("-i", "--indent", action="store", default=4, type=int,
help="specify number of spaces for indentation")
with _redirect_stdout_to_stderr():
args = arg_parser.parse_args(program_arguments)
logging.basicConfig(
level=logging.INFO if args.verbose else logging.ERROR,
format='%(levelname)s: %(message)s')
try:
if args.pipe and len(args.config_files) != 0:
raise Exception("if %s is enabled, no file can be passed as input" % _aname(pipe_arg))
if args.pipe and args.backup_original:
raise Exception("cannot create backup file when %s is enabled" % _aname(pipe_arg))
if args.print_result and len(args.config_files) > 1:
raise Exception("if %s is enabled, only one file can be passed as input" % _aname(print_result_arg))
if len(args.config_files) == 0 and not args.pipe:
raise Exception("no input files provided, specify at least one file or use %s" % _aname(pipe_arg))
except Exception as e:
arg_parser.error(str(e))
format_options = FormatterOptions()
format_options.indentation = args.indent
formatter = Formatter(format_options)
if args.pipe:
original_content = io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8')
print(formatter.format_string(original_content.read()))
elif args.print_result:
print(formatter.get_formatted_string_from_file(args.config_files[0]))
else:
for config_file_path in args.config_files:
backup_file_path = config_file_path + '~' if args.backup_original else None
formatter.format_file(config_file_path, backup_file_path)
def main():
_standalone_run(sys.argv[1:])
if __name__ == "__main__":
main()
| 1connect/nginx-config-formatter | nginxfmt.py | Python | apache-2.0 | 14,643 |
import unittest
from typing import List
import utils
# DP, prefix.
class NumMatrix:
# O(n) time. O(n) space.
def __init__(self, matrix: List[List[int]]):
sums = [[0] * len(matrix[0]) for _ in range(len(matrix))]
for r, row in enumerate(matrix):
for c, val in enumerate(row):
s = val
if r >= 1:
s += sums[r - 1][c]
if c >= 1:
s += sums[r][c - 1]
if r >= 1:
s -= sums[r - 1][c - 1]
sums[r][c] = s
self.sums = sums
# O(1) time. O(1) space.
def sumRegion(self, row1: int, col1: int, row2: int, col2: int) -> int:
s = self.sums[row2][col2]
if row1 >= 1:
s -= self.sums[row1 - 1][col2]
if col1 >= 1:
s -= self.sums[row2][col1 - 1]
if row1 >= 1:
s += self.sums[row1 - 1][col1 - 1]
return s
class Test(unittest.TestCase):
def test(self):
cls = NumMatrix
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
obj = None
for func, parameters, expected in zip(case.functions, case.args, case.expected):
if func == cls.__name__:
obj = cls(*parameters)
else:
actual = getattr(obj, func)(*parameters)
self.assertEqual(expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
| chrisxue815/leetcode_python | problems/test_0304.py | Python | unlicense | 1,569 |
# ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
from __future__ import absolute_import, division, print_function
import logging
import numpy as np
from numpy.testing import assert_array_almost_equal
from nose.tools import assert_raises, assert_equal
import skbeam.core.utils as utils
from skbeam.core.correlation import (multi_tau_auto_corr,
auto_corr_scat_factor,
lazy_one_time,
lazy_two_time, two_time_corr,
two_time_state_to_results,
one_time_from_two_time,
CrossCorrelator)
from skbeam.core.mask import bad_to_nan_gen
from skbeam.core.roi import ring_edges, segmented_rings
logger = logging.getLogger(__name__)
def setup():
global num_levels, num_bufs, xdim, ydim, stack_size, img_stack, rois
num_levels = 6
num_bufs = 4 # must be even
xdim = 256
ydim = 512
stack_size = 100
img_stack = np.random.randint(1, 3, (stack_size, xdim, ydim))
rois = np.zeros_like(img_stack[0])
# make sure that the ROIs can be any integers greater than 1.
# They do not have to start at 1 and be continuous
rois[0:xdim//10, 0:ydim//10] = 5
rois[xdim//10:xdim//5, ydim//10:ydim//5] = 3
def test_lazy_vs_original():
setup()
# run the correlation on the full stack
full_gen_one = lazy_one_time(
img_stack, num_levels, num_bufs, rois)
for gen_state_one in full_gen_one:
pass
g2, lag_steps = multi_tau_auto_corr(num_levels, num_bufs,
rois, img_stack)
assert np.all(g2 == gen_state_one.g2)
assert np.all(lag_steps == gen_state_one.lag_steps)
full_gen_two = lazy_two_time(rois, img_stack, stack_size,
num_bufs, num_levels)
for gen_state_two in full_gen_two:
pass
final_gen_result_two = two_time_state_to_results(gen_state_two)
two_time = two_time_corr(rois, img_stack, stack_size,
num_bufs, num_levels)
assert np.all(two_time[0] == final_gen_result_two.g2)
assert np.all(two_time[1] == final_gen_result_two.lag_steps)
def test_lazy_two_time():
setup()
# run the correlation on the full stack
full_gen = lazy_two_time(rois, img_stack, stack_size,
stack_size, 1)
for full_state in full_gen:
pass
final_result = two_time_state_to_results(full_state)
# make sure we have essentially zero correlation in the images,
# since they are random integers
assert np.average(final_result.g2-1) < 0.01
# run the correlation on the first half
gen_first_half = lazy_two_time(rois, img_stack[:stack_size//2], stack_size,
num_bufs=stack_size, num_levels=1)
for first_half_state in gen_first_half:
pass
# run the correlation on the second half by passing in the state from the
# first half
gen_second_half = lazy_two_time(rois, img_stack[stack_size//2:],
stack_size, num_bufs=stack_size,
num_levels=1,
two_time_internal_state=first_half_state)
for second_half_state in gen_second_half:
pass
result = two_time_state_to_results(second_half_state)
assert np.all(full_state.g2 == result.g2)
def test_lazy_one_time():
setup()
# run the correlation on the full stack
full_gen = lazy_one_time(img_stack, num_levels, num_bufs, rois)
for full_result in full_gen:
pass
# make sure we have essentially zero correlation in the images,
# since they are random integers
assert np.average(full_result.g2-1) < 0.01
# run the correlation on the first half
gen_first_half = lazy_one_time(
img_stack[:stack_size//2], num_levels, num_bufs, rois)
for first_half_result in gen_first_half:
pass
# run the correlation on the second half by passing in the state from the
# first half
gen_second_half = lazy_one_time(
img_stack[stack_size//2:], num_levels, num_bufs, rois,
internal_state=first_half_result.internal_state
)
for second_half_result in gen_second_half:
pass
assert np.all(full_result.g2 ==
second_half_result.g2)
def test_two_time_corr():
setup()
y = []
for i in range(50):
y.append(img_stack[0])
two_time = two_time_corr(rois, np.asarray(y), 50,
num_bufs=50, num_levels=1)
assert np.all(two_time[0])
# check the number of buffers are even
assert_raises(ValueError, two_time_corr, rois, np.asarray(y), 50,
num_bufs=25, num_levels=1)
def test_auto_corr_scat_factor():
num_levels, num_bufs = 3, 4
tot_channels, lags, dict_lags = utils.multi_tau_lags(num_levels, num_bufs)
beta = 0.5
relaxation_rate = 10.0
baseline = 1.0
g2 = auto_corr_scat_factor(lags, beta, relaxation_rate, baseline)
assert_array_almost_equal(g2, np.array([1.5, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0]), decimal=8)
def test_bad_images():
setup()
g2, lag_steps = multi_tau_auto_corr(4, num_bufs,
rois, img_stack)
# introduce bad images
bad_img_list = [3, 21, 35, 48]
# convert each bad image to np.nan array
images = bad_to_nan_gen(img_stack, bad_img_list)
# then use new images (including bad images)
g2_n, lag_steps_n = multi_tau_auto_corr(4, num_bufs,
rois, images)
assert_array_almost_equal(g2[:, 0], g2_n[:, 0], decimal=3)
assert_array_almost_equal(g2[:, 1], g2_n[:, 1], decimal=3)
def test_one_time_from_two_time():
num_lev = 1
num_buf = 10 # must be even
x_dim = 10
y_dim = 10
stack = 10
imgs = np.random.randint(1, 3, (stack, x_dim, y_dim))
roi = np.zeros_like(imgs[0])
# make sure that the ROIs can be any integers greater than 1.
# They do not have to start at 1 and be continuous
roi[0:x_dim//10, 0:y_dim//10] = 5
roi[x_dim//10:x_dim//5, y_dim//10:y_dim//5] = 3
g2, lag_steps, _state = two_time_corr(roi, imgs, stack,
num_buf, num_lev)
one_time = one_time_from_two_time(g2)
assert_array_almost_equal(one_time[0, :], np.array([1.0, 0.9, 0.8, 0.7,
0.6, 0.5, 0.4, 0.3,
0.2, 0.1]))
def test_CrossCorrelator1d():
''' Test the 1d version of the cross correlator with these methods:
-method='regular', no mask
-method='regular', masked
-method='symavg', no mask
-method='symavg', masked
'''
np.random.seed(123)
# test 1D data
sigma = .1
Npoints = 100
x = np.linspace(-10, 10, Npoints)
sigma = .2
# purposely have sparsely filled values (with lots of zeros)
peak_positions = (np.random.random(10)-.5)*20
y = np.zeros_like(x)
for peak_position in peak_positions:
y += np.exp(-(x-peak_position)**2/2./sigma**2)
mask_1D = np.ones_like(y)
mask_1D[10:20] = 0
mask_1D[60:90] = 0
mask_1D[111:137] = 0
mask_1D[211:237] = 0
mask_1D[411:537] = 0
mask_1D *= mask_1D[::-1]
cc1D = CrossCorrelator(mask_1D.shape)
cc1D_symavg = CrossCorrelator(mask_1D.shape, normalization='symavg')
cc1D_masked = CrossCorrelator(mask_1D.shape, mask=mask_1D)
cc1D_masked_symavg = CrossCorrelator(mask_1D.shape, mask=mask_1D,
normalization='symavg')
assert_equal(cc1D.nids, 1)
ycorr_1D = cc1D(y)
ycorr_1D_masked = cc1D_masked(y*mask_1D)
ycorr_1D_symavg = cc1D_symavg(y)
ycorr_1D_masked_symavg = cc1D_masked_symavg(y*mask_1D)
assert_array_almost_equal(ycorr_1D[::20],
np.array([-1.155123e-14, 6.750373e-03,
6.221636e-01, 7.105527e-01,
1.187275e+00, 2.984563e+00,
1.092725e+00, 1.198341e+00,
1.045922e-01, 5.451511e-06]))
assert_array_almost_equal(ycorr_1D_masked[::20],
np.array([-5.172377e-16, np.nan, 7.481473e-01,
6.066887e-02, 4.470989e-04,
2.330335e+00, np.nan, 7.109758e-01,
np.nan, 2.275846e-14]))
assert_array_almost_equal(ycorr_1D_symavg[::20],
np.array([-5.3002753, 1.54268227, 0.86220476,
0.57715207, 0.86503802, 2.94383202,
0.7587901, 0.99763715, 0.16800951,
1.23506293]))
assert_array_almost_equal(ycorr_1D_masked_symavg[::20][:-1],
np.array([-5.30027530e-01, np.nan,
1.99940257e+00, 7.33127871e-02,
1.00000000e+00, 2.15887870e+00,
np.nan, 9.12832602e-01,
np.nan]))
def testCrossCorrelator2d():
''' Test the 2D case of the cross correlator.
With non-binary labels.
'''
np.random.seed(123)
# test 2D data
Npoints2 = 10
x2 = np.linspace(-10, 10, Npoints2)
X, Y = np.meshgrid(x2, x2)
Z = np.random.random((Npoints2, Npoints2))
np.random.seed(123)
sigma = .2
# purposely have sparsely filled values (with lots of zeros)
# place peaks in random positions
peak_positions = (np.random.random((2, 10))-.5)*20
for peak_position in peak_positions:
Z += np.exp(-((X - peak_position[0])**2 +
(Y - peak_position[1])**2)/2./sigma**2)
mask_2D = np.ones_like(Z)
mask_2D[1:2, 1:2] = 0
mask_2D[7:9, 4:6] = 0
mask_2D[1:2, 9:] = 0
# Compute with segmented rings
edges = ring_edges(1, 3, num_rings=2)
segments = 5
x0, y0 = np.array(mask_2D.shape)//2
maskids = segmented_rings(edges, segments, (y0, x0), mask_2D.shape)
cc2D_ids = CrossCorrelator(mask_2D.shape, mask=maskids)
cc2D_ids_symavg = CrossCorrelator(mask_2D.shape, mask=maskids,
normalization='symavg')
# 10 ids
assert_equal(cc2D_ids.nids, 10)
ycorr_ids_2D = cc2D_ids(Z)
ycorr_ids_2D_symavg = cc2D_ids_symavg(Z)
index = 0
ycorr_ids_2D[index][ycorr_ids_2D[index].shape[0]//2]
assert_array_almost_equal(ycorr_ids_2D[index]
[ycorr_ids_2D[index].shape[0]//2],
np.array([1.22195059, 1.08685771,
1.43246508, 1.08685771, 1.22195059
])
)
index = 1
ycorr_ids_2D[index][ycorr_ids_2D[index].shape[0]//2]
assert_array_almost_equal(ycorr_ids_2D[index]
[ycorr_ids_2D[index].shape[0]//2],
np.array([1.24324268, 0.80748997,
1.35790022, 0.80748997, 1.24324268
])
)
index = 0
ycorr_ids_2D_symavg[index][ycorr_ids_2D[index].shape[0]//2]
assert_array_almost_equal(ycorr_ids_2D_symavg[index]
[ycorr_ids_2D[index].shape[0]//2],
np.array([0.84532695, 1.16405848, 1.43246508,
1.16405848, 0.84532695])
)
index = 1
ycorr_ids_2D_symavg[index][ycorr_ids_2D[index].shape[0]//2]
assert_array_almost_equal(ycorr_ids_2D_symavg[index]
[ycorr_ids_2D[index].shape[0]//2],
np.array([0.94823482, 0.8629459, 1.35790022,
0.8629459, 0.94823482])
)
def test_CrossCorrelator_badinputs():
with assert_raises(ValueError):
CrossCorrelator((1, 1, 1))
with assert_raises(ValueError):
cc = CrossCorrelator((10, 10))
a = np.ones((10, 11))
cc(a)
with assert_raises(ValueError):
cc = CrossCorrelator((10, 10))
a = np.ones((10, 10))
a2 = np.ones((10, 11))
cc(a, a2)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
| danielballan/scikit-xray | skbeam/core/tests/test_correlation.py | Python | bsd-3-clause | 15,181 |
from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
from ImmOrbit.api import router
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^select2/', include('django_select2.urls')),
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
| RedMap/Django-ImmOrbit | example/src/Demo/urls.py | Python | apache-2.0 | 1,077 |
# coding: utf-8
"""
Stakeholder engagement API
This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers.
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class QuestionApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_answer(self, question_id, answer, logged_in_user_id, access_token, client_token, **kwargs):
"""
Answer the specified question
Allows the user to answer the question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_answer(question_id, answer, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str answer: answer (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_answer_with_http_info(question_id, answer, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.add_answer_with_http_info(question_id, answer, logged_in_user_id, access_token, client_token, **kwargs)
return data
def add_answer_with_http_info(self, question_id, answer, logged_in_user_id, access_token, client_token, **kwargs):
"""
Answer the specified question
Allows the user to answer the question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_answer_with_http_info(question_id, answer, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str answer: answer (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'answer', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_answer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `add_answer`")
# verify the required parameter 'answer' is set
if ('answer' not in params) or (params['answer'] is None):
raise ValueError("Missing the required parameter `answer` when calling `add_answer`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `add_answer`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `add_answer`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_answer`")
collection_formats = {}
resource_path = '/questions/{questionId}/answers'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'answer' in params:
form_params.append(('answer', params['answer']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswer',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_question(self, category_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share question without attachment
Allows the user to share question without attachment. Returns the question object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_question(category_id, question_title, question_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str question_title: Question Title (required)
:param str question_description: Describe question (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_question_with_http_info(category_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.add_question_with_http_info(category_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def add_question_with_http_info(self, category_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share question without attachment
Allows the user to share question without attachment. Returns the question object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_question_with_http_info(category_id, question_title, question_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str question_title: Question Title (required)
:param str question_description: Describe question (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'question_title', 'question_description', 'logged_in_user_id', 'access_token', 'client_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_question" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `add_question`")
# verify the required parameter 'question_title' is set
if ('question_title' not in params) or (params['question_title'] is None):
raise ValueError("Missing the required parameter `question_title` when calling `add_question`")
# verify the required parameter 'question_description' is set
if ('question_description' not in params) or (params['question_description'] is None):
raise ValueError("Missing the required parameter `question_description` when calling `add_question`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `add_question`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `add_question`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_question`")
collection_formats = {}
resource_path = '/questions'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'category_id' in params:
form_params.append(('categoryId', params['category_id']))
if 'question_title' in params:
form_params.append(('questionTitle', params['question_title']))
if 'question_description' in params:
form_params.append(('questionDescription', params['question_description']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_question_0(self, body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share question with attachment
Allows the user to share question with attachment. Returns the question object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_question_0(body, body2, body3, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: categoryId (required)
:param str body2: questionTitle (required)
:param str body3: questionDescription (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param list[Attachment] body4:
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_question_0_with_http_info(body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.add_question_0_with_http_info(body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs)
return data
def add_question_0_with_http_info(self, body, body2, body3, logged_in_user_id, access_token, client_token, **kwargs):
"""
Share question with attachment
Allows the user to share question with attachment. Returns the question object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_question_0_with_http_info(body, body2, body3, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: categoryId (required)
:param str body2: questionTitle (required)
:param str body3: questionDescription (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param list[Attachment] body4:
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'body2', 'body3', 'logged_in_user_id', 'access_token', 'client_token', 'body4']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_question_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_question_0`")
# verify the required parameter 'body2' is set
if ('body2' not in params) or (params['body2'] is None):
raise ValueError("Missing the required parameter `body2` when calling `add_question_0`")
# verify the required parameter 'body3' is set
if ('body3' not in params) or (params['body3'] is None):
raise ValueError("Missing the required parameter `body3` when calling `add_question_0`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `add_question_0`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `add_question_0`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_question_0`")
collection_formats = {}
resource_path = '/questions/attachment'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
if 'body4' in params:
body_params = params['body4']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_question_category(self, name, description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Create question category
Creates a question category. Returns the created question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_question_category(name, description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name (required)
:param str description: description (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int organization_id: OrganizationId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_question_category_with_http_info(name, description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.create_question_category_with_http_info(name, description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def create_question_category_with_http_info(self, name, description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Create question category
Creates a question category. Returns the created question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_question_category_with_http_info(name, description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name (required)
:param str description: description (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int organization_id: OrganizationId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'description', 'logged_in_user_id', 'access_token', 'client_token', 'organization_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_question_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `create_question_category`")
# verify the required parameter 'description' is set
if ('description' not in params) or (params['description'] is None):
raise ValueError("Missing the required parameter `description` when calling `create_question_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `create_question_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `create_question_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `create_question_category`")
collection_formats = {}
resource_path = '/questions/categories'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'organization_id' in params:
form_params.append(('OrganizationId', params['organization_id']))
if 'name' in params:
form_params.append(('name', params['name']))
if 'description' in params:
form_params.append(('description', params['description']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_answer(self, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete answer
Allows the user to delete an answer. Returns the deleted answer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_answer(answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_answer_with_http_info(answer_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.delete_answer_with_http_info(answer_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def delete_answer_with_http_info(self, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete answer
Allows the user to delete an answer. Returns the deleted answer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_answer_with_http_info(answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['answer_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_answer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'answer_id' is set
if ('answer_id' not in params) or (params['answer_id'] is None):
raise ValueError("Missing the required parameter `answer_id` when calling `delete_answer`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `delete_answer`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `delete_answer`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `delete_answer`")
collection_formats = {}
resource_path = '/questions/answers/{answerId}'.replace('{format}', 'json')
path_params = {}
if 'answer_id' in params:
path_params['answerId'] = params['answer_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswer',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_question(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete question
Allows the user to delete a question. Returns the deleted answer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_question(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.delete_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def delete_question_with_http_info(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete question
Allows the user to delete a question. Returns the deleted answer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_question" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `delete_question`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `delete_question`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `delete_question`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `delete_question`")
collection_formats = {}
resource_path = '/questions/{questionId}'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_question_category(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete question category
Allows the user to delete the question category. Returns the deleted question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_question_category(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.delete_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def delete_question_category_with_http_info(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Delete question category
Allows the user to delete the question category. Returns the deleted question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_question_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `delete_question_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `delete_question_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `delete_question_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `delete_question_category`")
collection_formats = {}
resource_path = '/questions/categories/{categoryId}'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dislike_answer(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Dislike answer
Allows the user to dislike the answer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.dislike_answer(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.dislike_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.dislike_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def dislike_answer_with_http_info(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Dislike answer
Allows the user to dislike the answer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.dislike_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'answer_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dislike_answer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `dislike_answer`")
# verify the required parameter 'answer_id' is set
if ('answer_id' not in params) or (params['answer_id'] is None):
raise ValueError("Missing the required parameter `answer_id` when calling `dislike_answer`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `dislike_answer`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `dislike_answer`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `dislike_answer`")
collection_formats = {}
resource_path = '/questions/{questionId}/answers/{answerId}/dislike'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
if 'answer_id' in params:
path_params['answerId'] = params['answer_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswer',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_answers(self, question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of answers by questionId
Returns the list of answers by questionId
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_answers(question_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswerList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_answers_with_http_info(question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_answers_with_http_info(question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_answers_with_http_info(self, question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of answers by questionId
Returns the list of answers by questionId
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_answers_with_http_info(question_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswerList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_answers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `get_answers`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_answers`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_answers`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_answers`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_answers`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_answers`")
collection_formats = {}
resource_path = '/questions/{questionId}/answers'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswerList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_friends_questions(self, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of questions shared by friends
Returns the list of questions shared by friends
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_friends_questions(question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_friends_questions_with_http_info(question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_friends_questions_with_http_info(question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_friends_questions_with_http_info(self, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of questions shared by friends
Returns the list of questions shared by friends
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_friends_questions_with_http_info(question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_friends_questions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_status' is set
if ('question_status' not in params) or (params['question_status'] is None):
raise ValueError("Missing the required parameter `question_status` when calling `get_friends_questions`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_friends_questions`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_friends_questions`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_friends_questions`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_friends_questions`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_friends_questions`")
collection_formats = {}
resource_path = '/questions/friends'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'question_status' in params:
query_params['questionStatus'] = params['question_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_question(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get question by id
Returns the question by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_question(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_question_with_http_info(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get question by id
Returns the question by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_question" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `get_question`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_question`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_question`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_question`")
collection_formats = {}
resource_path = '/questions/{questionId}'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_question_categories(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get the list of question categories
Returns the list of question categories
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_question_categories(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategoryList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_question_categories_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_question_categories_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_question_categories_with_http_info(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get the list of question categories
Returns the list of question categories
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_question_categories_with_http_info(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategoryList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_question_categories" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_question_categories`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_question_categories`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_question_categories`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_question_categories`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_question_categories`")
collection_formats = {}
resource_path = '/questions/categories'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionCategoryList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_questions_for_user(self, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of all questions visible to the user
Returns the list of all questions visible to the user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_questions_for_user(question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_questions_for_user_with_http_info(question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_questions_for_user_with_http_info(question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_questions_for_user_with_http_info(self, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of all questions visible to the user
Returns the list of all questions visible to the user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_questions_for_user_with_http_info(question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_questions_for_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_status' is set
if ('question_status' not in params) or (params['question_status'] is None):
raise ValueError("Missing the required parameter `question_status` when calling `get_questions_for_user`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_questions_for_user`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_questions_for_user`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_questions_for_user`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_questions_for_user`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_questions_for_user`")
collection_formats = {}
resource_path = '/questions'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'question_status' in params:
query_params['questionStatus'] = params['question_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recommend_question(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended questions
Returns the list of recommended questions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommend_question(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_recommend_question_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_recommend_question_with_http_info(start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_recommend_question_with_http_info(self, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended questions
Returns the list of recommended questions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommend_question_with_http_info(start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recommend_question" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_recommend_question`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_recommend_question`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_recommend_question`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_recommend_question`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_recommend_question`")
collection_formats = {}
resource_path = '/questions/recommend'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recommended_questions_from_db(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended questions from DB
Returns the list of recommended questions from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_questions_from_db(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_recommended_questions_from_db_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_recommended_questions_from_db_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_recommended_questions_from_db_with_http_info(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended questions from DB
Returns the list of recommended questions from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_questions_from_db_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recommended_questions_from_db" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_recommended_questions_from_db`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_recommended_questions_from_db`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_recommended_questions_from_db`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_recommended_questions_from_db`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_recommended_questions_from_db`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_recommended_questions_from_db`")
collection_formats = {}
resource_path = '/questions/{userId}/recommendedQuestions'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recommended_users_from_db(self, question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended Users from DB
Returns the list of recommended users from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_users_from_db(question_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)profileImage<br/><b>A) Available values-</b><br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)emailId<br/>5)profileImage<br/>6)birthDate<br/>7)currentUserFollowing<br/>8)currentUserFriend<br/>9)equityScore
:return: VerveResponseUserList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_recommended_users_from_db_with_http_info(question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_recommended_users_from_db_with_http_info(question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_recommended_users_from_db_with_http_info(self, question_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of recommended Users from DB
Returns the list of recommended users from DB
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_recommended_users_from_db_with_http_info(question_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)profileImage<br/><b>A) Available values-</b><br/>1)userId<br/>2)firstName<br/>3)lastName<br/>4)emailId<br/>5)profileImage<br/>6)birthDate<br/>7)currentUserFollowing<br/>8)currentUserFriend<br/>9)equityScore
:return: VerveResponseUserList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recommended_users_from_db" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `get_recommended_users_from_db`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_recommended_users_from_db`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_recommended_users_from_db`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_recommended_users_from_db`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_recommended_users_from_db`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_recommended_users_from_db`")
collection_formats = {}
resource_path = '/questions/{questionId}/recommendedUsers'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseUserList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_questions(self, user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of questions shared by user
Returns the list of questions shared by specific user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_questions(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_questions_with_http_info(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_user_questions_with_http_info(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_user_questions_with_http_info(self, user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of questions shared by user
Returns the list of questions shared by specific user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_questions_with_http_info(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'question_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_questions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_questions`")
# verify the required parameter 'question_status' is set
if ('question_status' not in params) or (params['question_status'] is None):
raise ValueError("Missing the required parameter `question_status` when calling `get_user_questions`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_user_questions`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_user_questions`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_user_questions`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_user_questions`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_user_questions`")
collection_formats = {}
resource_path = '/questions/{userId}/shared'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'question_status' in params:
query_params['questionStatus'] = params['question_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_subscribed_question_categories(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of question categories subscribed by the user
Returns the list of question categories subscribed by the user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_question_categories(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategoryList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_subscribed_question_categories_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_user_subscribed_question_categories_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_user_subscribed_question_categories_with_http_info(self, user_id, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of question categories subscribed by the user
Returns the list of question categories subscribed by the user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_question_categories_with_http_info(user_id, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategoryList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_subscribed_question_categories" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_subscribed_question_categories`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_user_subscribed_question_categories`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_user_subscribed_question_categories`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_user_subscribed_question_categories`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_user_subscribed_question_categories`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_user_subscribed_question_categories`")
collection_formats = {}
resource_path = '/questions/categories/{userId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionCategoryList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_subscribed_questions(self, user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of questions subscribed by user
Returns the list of questions subscribed by specific user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_questions(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_subscribed_questions_with_http_info(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.get_user_subscribed_questions_with_http_info(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def get_user_subscribed_questions_with_http_info(self, user_id, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of questions subscribed by user
Returns the list of questions subscribed by specific user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_subscribed_questions_with_http_info(user_id, question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param int category_id: categoryId
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'question_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'category_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_subscribed_questions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_subscribed_questions`")
# verify the required parameter 'question_status' is set
if ('question_status' not in params) or (params['question_status'] is None):
raise ValueError("Missing the required parameter `question_status` when calling `get_user_subscribed_questions`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `get_user_subscribed_questions`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `get_user_subscribed_questions`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `get_user_subscribed_questions`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_user_subscribed_questions`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_user_subscribed_questions`")
collection_formats = {}
resource_path = '/questions/{userId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'question_status' in params:
query_params['questionStatus'] = params['question_status']
if 'category_id' in params:
query_params['categoryId'] = params['category_id']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def like_answer(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Like answer
Allows the user to like the answer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.like_answer(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.like_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.like_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def like_answer_with_http_info(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Like answer
Allows the user to like the answer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.like_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'answer_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method like_answer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `like_answer`")
# verify the required parameter 'answer_id' is set
if ('answer_id' not in params) or (params['answer_id'] is None):
raise ValueError("Missing the required parameter `answer_id` when calling `like_answer`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `like_answer`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `like_answer`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `like_answer`")
collection_formats = {}
resource_path = '/questions/{questionId}/answers/{answerId}/like'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
if 'answer_id' in params:
path_params['answerId'] = params['answer_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswer',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def mark_as_an_answer(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Mark answer as a answer
Marks the answer as accepted. This means the user is satisfied with the answer & then the question will go into closed state
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.mark_as_an_answer(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.mark_as_an_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.mark_as_an_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def mark_as_an_answer_with_http_info(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Mark answer as a answer
Marks the answer as accepted. This means the user is satisfied with the answer & then the question will go into closed state
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.mark_as_an_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'answer_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method mark_as_an_answer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `mark_as_an_answer`")
# verify the required parameter 'answer_id' is set
if ('answer_id' not in params) or (params['answer_id'] is None):
raise ValueError("Missing the required parameter `answer_id` when calling `mark_as_an_answer`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `mark_as_an_answer`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `mark_as_an_answer`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `mark_as_an_answer`")
collection_formats = {}
resource_path = '/questions/{questionId}/answers/{answerId}/mark'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
if 'answer_id' in params:
path_params['answerId'] = params['answer_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswer',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_questions(self, search_text, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of matching questions
Returns the list of matching questions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_questions(search_text, question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search_text: Search Text, keywords to search (required)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.search_questions_with_http_info(search_text, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.search_questions_with_http_info(search_text, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs)
return data
def search_questions_with_http_info(self, search_text, question_status, start, end, logged_in_user_id, access_token, client_token, **kwargs):
"""
Get list of matching questions
Returns the list of matching questions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_questions_with_http_info(search_text, question_status, start, end, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search_text: Search Text, keywords to search (required)
:param str question_status: Question status <br/> 1) ALL <br/> 2) UNREPLIED <br/> 3) REPLIED <br/> 4) CLOSED (required)
:param int start: start, initial value start from 0 (required)
:param int end: end (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['search_text', 'question_status', 'start', 'end', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_questions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'search_text' is set
if ('search_text' not in params) or (params['search_text'] is None):
raise ValueError("Missing the required parameter `search_text` when calling `search_questions`")
# verify the required parameter 'question_status' is set
if ('question_status' not in params) or (params['question_status'] is None):
raise ValueError("Missing the required parameter `question_status` when calling `search_questions`")
# verify the required parameter 'start' is set
if ('start' not in params) or (params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `search_questions`")
# verify the required parameter 'end' is set
if ('end' not in params) or (params['end'] is None):
raise ValueError("Missing the required parameter `end` when calling `search_questions`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `search_questions`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `search_questions`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `search_questions`")
collection_formats = {}
resource_path = '/questions/search'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'search_text' in params:
query_params['searchText'] = params['search_text']
if 'question_status' in params:
query_params['questionStatus'] = params['question_status']
if 'start' in params:
query_params['start'] = params['start']
if 'end' in params:
query_params['end'] = params['end']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def subscribe_question(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe question
Allows the user to subscribe a question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_question(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.subscribe_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.subscribe_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def subscribe_question_with_http_info(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe question
Allows the user to subscribe a question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method subscribe_question" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `subscribe_question`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `subscribe_question`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `subscribe_question`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `subscribe_question`")
collection_formats = {}
resource_path = '/questions/{questionId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def subscribe_question_category(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe question category
Returns the subscribed question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_question_category(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.subscribe_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.subscribe_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def subscribe_question_category_with_http_info(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Subscribe question category
Returns the subscribed question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.subscribe_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method subscribe_question_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `subscribe_question_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `subscribe_question_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `subscribe_question_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `subscribe_question_category`")
collection_formats = {}
resource_path = '/questions/categories/{categoryId}/subscribe'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/x-www-form-urlencoded'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unmark_as_an_answer(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unmark answer as a answer
Unmarks the answer. This will remove the marked answer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unmark_as_an_answer(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.unmark_as_an_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.unmark_as_an_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def unmark_as_an_answer_with_http_info(self, question_id, answer_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unmark answer as a answer
Unmarks the answer. This will remove the marked answer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unmark_as_an_answer_with_http_info(question_id, answer_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param int answer_id: answerId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'answer_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unmark_as_an_answer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `unmark_as_an_answer`")
# verify the required parameter 'answer_id' is set
if ('answer_id' not in params) or (params['answer_id'] is None):
raise ValueError("Missing the required parameter `answer_id` when calling `unmark_as_an_answer`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `unmark_as_an_answer`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `unmark_as_an_answer`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `unmark_as_an_answer`")
collection_formats = {}
resource_path = '/questions/{questionId}/answers/{answerId}/unmark'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
if 'answer_id' in params:
path_params['answerId'] = params['answer_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswer',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unsubscribe_question(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe question
Allows the user to unsubscribe a question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_question(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.unsubscribe_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.unsubscribe_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def unsubscribe_question_with_http_info(self, question_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe question
Allows the user to unsubscribe a question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_question_with_http_info(question_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unsubscribe_question" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `unsubscribe_question`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `unsubscribe_question`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `unsubscribe_question`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `unsubscribe_question`")
collection_formats = {}
resource_path = '/questions/{questionId}/unsubscribe'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unsubscribe_question_category(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe question category
Returns the unsubscribed question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_question_category(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.unsubscribe_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.unsubscribe_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, **kwargs)
return data
def unsubscribe_question_category_with_http_info(self, category_id, logged_in_user_id, access_token, client_token, **kwargs):
"""
Unsubscribe question category
Returns the unsubscribed question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unsubscribe_question_category_with_http_info(category_id, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unsubscribe_question_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `unsubscribe_question_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `unsubscribe_question_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `unsubscribe_question_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `unsubscribe_question_category`")
collection_formats = {}
resource_path = '/questions/categories/{categoryId}/unsubscribe'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_answer(self, answer_id, answer, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update answer
Allows the user to update an answer. Returns the updated answer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_answer(answer_id, answer, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int answer_id: answerId (required)
:param str answer: answer (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_answer_with_http_info(answer_id, answer, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.update_answer_with_http_info(answer_id, answer, logged_in_user_id, access_token, client_token, **kwargs)
return data
def update_answer_with_http_info(self, answer_id, answer, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update answer
Allows the user to update an answer. Returns the updated answer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_answer_with_http_info(answer_id, answer, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int answer_id: answerId (required)
:param str answer: answer (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/><b>A) Available values -</b><br/>1)answerId<br/>2)answerDescription<br/>3)createdDate<br/>4)questionId<br/>5)answeringUser<br/>6)isMarkedAnswer<br/>7)noOfLikes<br/>8)noOfDislikes<br/>9)replyCount<br/>10)isLiked<br/>11)isDisliked
:return: VerveResponseAnswer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['answer_id', 'answer', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_answer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'answer_id' is set
if ('answer_id' not in params) or (params['answer_id'] is None):
raise ValueError("Missing the required parameter `answer_id` when calling `update_answer`")
# verify the required parameter 'answer' is set
if ('answer' not in params) or (params['answer'] is None):
raise ValueError("Missing the required parameter `answer` when calling `update_answer`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `update_answer`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_answer`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `update_answer`")
collection_formats = {}
resource_path = '/questions/answers/{answerId}'.replace('{format}', 'json')
path_params = {}
if 'answer_id' in params:
path_params['answerId'] = params['answer_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'answer' in params:
form_params.append(('answer', params['answer']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAnswer',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_question(self, question_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update question
Allows the user to update question. Returns the updated question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_question(question_id, question_title, question_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str question_title: Question Title (required)
:param str question_description: Describe Question (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_question_with_http_info(question_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.update_question_with_http_info(question_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def update_question_with_http_info(self, question_id, question_title, question_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update question
Allows the user to update question. Returns the updated question
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_question_with_http_info(question_id, question_title, question_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int question_id: questionId (required)
:param str question_title: Question Title (required)
:param str question_description: Describe Question (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)createdDate<br/><b>A) Available values-</b><br/>1)questionId<br/>2)questionTitle<br/>3)questionDescription<br/>4)issuer<br/>5)noOfAnswers<br/>6)isClosed<br/>7)createdDate<br/>8)lastUpdatedDate<br/>9)videoId<br/>10)fileURL<br/>11)isSubscribed<br/>12)sentiment</br>13)entity
:return: VerveResponseQuestion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['question_id', 'question_title', 'question_description', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_question" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'question_id' is set
if ('question_id' not in params) or (params['question_id'] is None):
raise ValueError("Missing the required parameter `question_id` when calling `update_question`")
# verify the required parameter 'question_title' is set
if ('question_title' not in params) or (params['question_title'] is None):
raise ValueError("Missing the required parameter `question_title` when calling `update_question`")
# verify the required parameter 'question_description' is set
if ('question_description' not in params) or (params['question_description'] is None):
raise ValueError("Missing the required parameter `question_description` when calling `update_question`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `update_question`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_question`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `update_question`")
collection_formats = {}
resource_path = '/questions/{questionId}'.replace('{format}', 'json')
path_params = {}
if 'question_id' in params:
path_params['questionId'] = params['question_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'question_title' in params:
form_params.append(('questionTitle', params['question_title']))
if 'question_description' in params:
form_params.append(('questionDescription', params['question_description']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_question_category(self, category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update question category
Allows the user to update the question category. Returns the updated question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_question_category(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str category_name: Category Name (required)
:param str category_description: Describe category (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_question_category_with_http_info(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs)
else:
(data) = self.update_question_category_with_http_info(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs)
return data
def update_question_category_with_http_info(self, category_id, category_name, category_description, logged_in_user_id, access_token, client_token, **kwargs):
"""
Update question category
Allows the user to update the question category. Returns the updated question category
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_question_category_with_http_info(category_id, category_name, category_description, logged_in_user_id, access_token, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int category_id: categoryId (required)
:param str category_name: Category Name (required)
:param str category_description: Describe category (required)
:param str logged_in_user_id: User id of logged / authenticated user (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str fields: Filter fields in result list<br/> <b>A) Default values -</b> <br/>1)categoryId<br/>2)categoryName<br/><b>A) Available values -</b> <br/>1)categoryId<br/>2)categoryName<br/>3)categoryDescription<br/>4)createdDate<br/>5)isSubscribed
:return: VerveResponseQuestionCategory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'category_name', 'category_description', 'logged_in_user_id', 'access_token', 'client_token', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_question_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params) or (params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `update_question_category`")
# verify the required parameter 'category_name' is set
if ('category_name' not in params) or (params['category_name'] is None):
raise ValueError("Missing the required parameter `category_name` when calling `update_question_category`")
# verify the required parameter 'category_description' is set
if ('category_description' not in params) or (params['category_description'] is None):
raise ValueError("Missing the required parameter `category_description` when calling `update_question_category`")
# verify the required parameter 'logged_in_user_id' is set
if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None):
raise ValueError("Missing the required parameter `logged_in_user_id` when calling `update_question_category`")
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_question_category`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `update_question_category`")
collection_formats = {}
resource_path = '/questions/categories/{categoryId}'.replace('{format}', 'json')
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id']
query_params = {}
header_params = {}
if 'logged_in_user_id' in params:
header_params['loggedInUserId'] = params['logged_in_user_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'category_name' in params:
form_params.append(('categoryName', params['category_name']))
if 'category_description' in params:
form_params.append(('categoryDescription', params['category_description']))
if 'fields' in params:
form_params.append(('fields', params['fields']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseQuestionCategory',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| iEngage/python-sdk | iengage_client/apis/question_api.py | Python | apache-2.0 | 248,935 |
# -*- coding: ascii -*-
#
# Copyright 2007, 2008, 2009, 2010, 2011
# Andr\xe9 Malo or his licensors, as applicable
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
===================
C extension tools
===================
C extension tools.
"""
__author__ = "Andr\xe9 Malo"
__docformat__ = "restructuredtext en"
__test__ = False
from distutils import core as _core
from distutils import errors as _distutils_errors
from distutils import log
import os as _os
import posixpath as _posixpath
import shutil as _shutil
import tempfile as _tempfile
from _setup import commands as _commands
def _install_finalizer(installer):
if installer.without_c_extensions:
installer.distribution.ext_modules = []
def _build_finalizer(builder):
if builder.without_c_extensions:
builder.extensions = []
class Extension(_core.Extension):
"""
Extension with prerequisite check interface
If your check is cacheable (during the setup run), override
`cached_check_prerequisites`, `check_prerequisites` otherwise.
:IVariables:
`cached_check` : ``bool``
The cached check result
"""
cached_check = None
def __init__(self, *args, **kwargs):
""" Initialization """
if 'depends' in kwargs:
self.depends = kwargs['depends'] or []
else:
self.depends = []
_core.Extension.__init__(self, *args, **kwargs)
# add include path
included = _posixpath.join('_setup', 'include')
if included not in self.include_dirs:
self.include_dirs.append(included)
# add cext.h to the dependencies
cext_h = _posixpath.join(included, 'cext.h')
if cext_h not in self.depends:
self.depends.append(cext_h)
_commands.add_option('install_lib', 'without-c-extensions',
help_text='Don\'t install C extensions',
inherit='install',
)
_commands.add_finalizer('install_lib', 'c-extensions',
_install_finalizer
)
_commands.add_option('build_ext', 'without-c-extensions',
help_text='Don\'t build C extensions',
inherit=('build', 'install_lib'),
)
_commands.add_finalizer('build_ext', 'c-extensions', _build_finalizer)
def check_prerequisites(self, build):
"""
Check prerequisites
The check should cover all dependencies needed for the extension to
be built and run. The method can do the following:
- return a false value: the extension will be built
- return a true value: the extension will be skipped. This is useful
for optional extensions
- raise an exception. This is useful for mandatory extensions
If the check result is cacheable (during the setup run), override
`cached_check_prerequisites` instead.
:Parameters:
`build` : `BuildExt`
The extension builder
:Return: Skip the extension?
:Rtype: ``bool``
"""
if self.cached_check is None:
log.debug("PREREQ check for %s" % self.name)
self.cached_check = self.cached_check_prerequisites(build)
else:
log.debug("PREREQ check for %s (cached)" % self.name)
return self.cached_check
def cached_check_prerequisites(self, build):
"""
Check prerequisites
The check should cover all dependencies needed for the extension to
be built and run. The method can do the following:
- return a false value: the extension will be built
- return a true value: the extension will be skipped. This is useful
for optional extensions
- raise an exception. This is useful for mandatory extensions
If the check result is *not* cacheable (during the setup run),
override `check_prerequisites` instead.
:Parameters:
`build` : `BuildExt`
The extension builder
:Return: Skip the extension?
:Rtype: ``bool``
"""
# pylint: disable = W0613
log.debug("Nothing to check for %s!" % self.name)
return False
class ConfTest(object):
"""
Single conftest abstraction
:IVariables:
`_tempdir` : ``str``
The tempdir created for this test
`src` : ``str``
Name of the source file
`target` : ``str``
Target filename
`compiler` : ``CCompiler``
compiler instance
`obj` : ``list``
List of object filenames (``[str, ...]``)
"""
_tempdir = None
def __init__(self, build, source):
"""
Initialization
:Parameters:
`build` : ``distuils.command.build_ext.build_ext``
builder instance
`source` : ``str``
Source of the file to compile
"""
self._tempdir = tempdir = _tempfile.mkdtemp()
src = _os.path.join(tempdir, 'conftest.c')
fp = open(src, 'w')
try:
fp.write(source)
finally:
fp.close()
self.src = src
self.compiler = compiler = build.compiler
self.target = _os.path.join(tempdir, 'conftest')
self.obj = compiler.object_filenames([src], output_dir=tempdir)
def __del__(self):
""" Destruction """
self.destroy()
def destroy(self):
""" Destroy the conftest leftovers on disk """
tempdir, self._tempdir = self._tempdir, None
if tempdir is not None:
_shutil.rmtree(tempdir)
def compile(self, **kwargs):
"""
Compile the conftest
:Parameters:
`kwargs` : ``dict``
Optional keyword parameters for the compiler call
:Return: Was the compilation successful?
:Rtype: ``bool``
"""
kwargs['output_dir'] = self._tempdir
try:
self.compiler.compile([self.src], **kwargs)
except _distutils_errors.CompileError:
return False
return True
def link(self, **kwargs):
r"""
Link the conftest
Before you can link the conftest objects they need to be `compile`\d.
:Parameters:
`kwargs` : ``dict``
Optional keyword parameters for the linker call
:Return: Was the linking successful?
:Rtype: ``bool``
"""
try:
self.compiler.link_executable(self.obj, self.target, **kwargs)
except _distutils_errors.LinkError:
return False
return True
def pipe(self, mode="r"):
r"""
Execute the conftest binary and connect to it using a pipe
Before you can pipe to or from the conftest binary it needs to
be `link`\ed.
:Parameters:
`mode` : ``str``
Pipe mode - r/w
:Return: The open pipe
:Rtype: ``file``
"""
return _os.popen(self.compiler.executable_filename(self.target), mode)
| wontfix-org/wtf | _setup/py3/ext.py | Python | apache-2.0 | 7,479 |
# Generated by Django 1.9.13 on 2017-08-31 03:58
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('community', '0002_auto_20150416_1853'),
]
operations = [
migrations.AlterField(
model_name='post',
name='meta',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default={}),
),
]
| manhhomienbienthuy/pythondotorg | community/migrations/0003_auto_20170831_0358.py | Python | apache-2.0 | 460 |
# pylint: disable=missing-docstring
import unittest
from housecanary.object import Property
from housecanary.object import Block
from housecanary.object import ZipCode
from housecanary.object import Msa
class PropertyTestCase(unittest.TestCase):
def setUp(self):
self.test_json = {
'property/value': {
'api_code_description': 'ok',
'api_code': 0,
'result': {
'price_pr': 2938.0, 'price_lwr': 2160.0, 'price_mean': 2296.0, 'fsd': 0.17
}
},
'address_info': {
'city': 'Palos Verdes Estates', 'county_fips': '06037', 'geo_precision': 'rooftop',
'block_id': '060376703241005', 'zipcode': '90274',
'address_full': '43 Valmonte Plz Palos Verdes Estates CA 90274',
'state': 'CA', 'zipcode_plus4': '1444', 'address': '43 Valmonte Plz',
'lat': 33.79814, 'lng': -118.36455,
'slug': '43-Valmonte-Plz-Palos-Verdes-Estates-CA-90274', 'unit': None
},
'meta': 'Test Meta'
}
self.prop = Property.create_from_json(self.test_json)
def test_create_from_json(self):
self.assertEqual(self.prop.address, "43 Valmonte Plz")
self.assertEqual(self.prop.county_fips, "06037")
self.assertEqual(self.prop.geo_precision, "rooftop")
self.assertEqual(self.prop.block_id, "060376703241005")
self.assertEqual(self.prop.zipcode, "90274")
self.assertEqual(self.prop.address_full, "43 Valmonte Plz Palos Verdes Estates CA 90274")
self.assertEqual(self.prop.state, "CA")
self.assertEqual(self.prop.zipcode_plus4, "1444")
self.assertEqual(self.prop.city, "Palos Verdes Estates")
self.assertEqual(self.prop.lat, 33.79814)
self.assertEqual(self.prop.lng, -118.36455)
self.assertEqual(self.prop.slug, "43-Valmonte-Plz-Palos-Verdes-Estates-CA-90274")
self.assertEqual(self.prop.unit, None)
self.assertEqual(self.prop.meta, 'Test Meta')
self.assertEqual(len(self.prop.component_results), 1)
self.assertEqual(self.prop.component_results[0].api_code, 0)
self.assertEqual(self.prop.component_results[0].api_code_description, 'ok')
self.assertEqual(self.prop.component_results[0].json_data, {
'price_pr': 2938.0, 'price_lwr': 2160.0, 'price_mean': 2296.0, 'fsd': 0.17
})
def test_create_from_json_with_multiple_components(self):
test_json2 = {
'property/value': {
'api_code_description': 'ok',
'api_code': 0,
'result': {
'price_pr': 2938.0, 'price_lwr': 2160.0, 'price_mean': 2296.0, 'fsd': 0.17
}
},
'property/census': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'dummy data'
},
'address_info': {
'city': 'Palos Verdes Estates', 'county_fips': '06037', 'geo_precision': 'rooftop',
'block_id': '060376703241005', 'zipcode': '90274',
'address_full': '43 Valmonte Plz Palos Verdes Estates CA 90274',
'state': 'CA', 'zipcode_plus4': '1444', 'address': '43 Valmonte Plz',
'lat': 33.79814, 'lng': -118.36455,
'slug': '43-Valmonte-Plz-Palos-Verdes-Estates-CA-90274', 'unit': None
},
'meta': 'Test Meta'
}
prop2 = Property.create_from_json(test_json2)
self.assertEqual(prop2.address, "43 Valmonte Plz")
self.assertEqual(len(prop2.component_results), 2)
value_result = next(
(cr for cr in prop2.component_results if cr.component_name == "property/value"),
None
)
self.assertIsNotNone(value_result)
census_result = next(
(cr for cr in prop2.component_results if cr.component_name == "property/census"),
None
)
self.assertEqual(census_result.json_data, "dummy data")
def test_has_error(self):
self.assertFalse(self.prop.has_error())
def test_has_error_with_error(self):
self.test_json['property/value']['api_code'] = 1001
prop2 = Property.create_from_json(self.test_json)
self.assertTrue(prop2.has_error())
def test_get_errors(self):
self.assertEqual(self.prop.get_errors(), [])
def test_get_errors_with_errors(self):
self.test_json['property/value']['api_code'] = 1001
self.test_json['property/value']['api_code_description'] = "test error"
prop2 = Property.create_from_json(self.test_json)
self.assertEqual(prop2.get_errors(), [{"property/value": "test error"}])
class BlockTestCase(unittest.TestCase):
def setUp(self):
self.test_json = {
'block/value_ts': {
'api_code_description': 'ok',
'api_code': 0,
'result': {
'value_sqft_median': 303.36
}
},
'block_info': {
'property_type': 'SFD',
'block_id': '060376703241005'
},
'meta': 'Test Meta'
}
self.block = Block.create_from_json(self.test_json)
def test_create_from_json(self):
self.assertEqual(self.block.block_id, "060376703241005")
self.assertEqual(self.block.property_type, "SFD")
self.assertEqual(self.block.num_bins, None)
self.assertEqual(self.block.meta, "Test Meta")
self.assertEqual(len(self.block.component_results), 1)
self.assertEqual(self.block.component_results[0].api_code, 0)
self.assertEqual(self.block.component_results[0].api_code_description, 'ok')
self.assertEqual(self.block.component_results[0].json_data, {'value_sqft_median': 303.36})
def test_create_from_json_with_num_bins(self):
json = self.test_json.copy()
json["block_info"] = {
"block_id": "060376703241005",
"num_bins": "5"
}
self.block = Block.create_from_json(json)
self.assertEqual(self.block.block_id, "060376703241005")
self.assertEqual(self.block.property_type, None)
self.assertEqual(self.block.num_bins, "5")
def test_create_from_json_with_multiple_components(self):
test_json2 = {
'block/value_ts': {
'api_code_description': 'ok',
'api_code': 0,
'result': {
'value_sqft_median': 303.36
}
},
'block/histogram_beds': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'dummy data'
},
'block_info': {
'property_type': 'SFD',
'block_id': '060376703241005'
},
'meta': 'Test Meta'
}
block2 = Block.create_from_json(test_json2)
self.assertEqual(block2.block_id, "060376703241005")
self.assertEqual(len(block2.component_results), 2)
result1 = next(
(cr for cr in block2.component_results if cr.component_name == "block/value_ts"),
None
)
self.assertIsNotNone(result1)
result2 = next(
(cr for cr in block2.component_results if cr.component_name == "block/histogram_beds"),
None
)
self.assertEqual(result2.json_data, "dummy data")
def test_has_error(self):
self.assertFalse(self.block.has_error())
def test_has_error_with_error(self):
self.test_json['block/value_ts']['api_code'] = 1001
block2 = Block.create_from_json(self.test_json)
self.assertTrue(block2.has_error())
def test_get_errors(self):
self.assertEqual(self.block.get_errors(), [])
def test_get_errors_with_errors(self):
self.test_json['block/value_ts']['api_code'] = 1001
self.test_json['block/value_ts']['api_code_description'] = "test error"
block2 = Block.create_from_json(self.test_json)
self.assertEqual(block2.get_errors(), [{"block/value_ts": "test error"}])
class ZipTestCase(unittest.TestCase):
def setUp(self):
self.test_json = {
'zip/details': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'some result'
},
'zipcode_info': {
'zipcode': '90274'
},
'meta': 'Test Meta'
}
self.zip = ZipCode.create_from_json(self.test_json)
def test_create_from_json(self):
self.assertEqual(self.zip.zipcode, "90274")
self.assertEqual(self.zip.meta, "Test Meta")
self.assertEqual(len(self.zip.component_results), 1)
self.assertEqual(self.zip.component_results[0].api_code, 0)
self.assertEqual(self.zip.component_results[0].api_code_description, 'ok')
self.assertEqual(self.zip.component_results[0].json_data, 'some result')
def test_create_from_json_with_multiple_components(self):
test_json2 = {
'zip/details': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'details result'
},
'zip/volatility': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'dummy data'
},
'zipcode_info': {
'zipcode': '90274',
},
'meta': 'Test Meta'
}
zip2 = ZipCode.create_from_json(test_json2)
self.assertEqual(zip2.zipcode, "90274")
self.assertEqual(len(zip2.component_results), 2)
result1 = next(
(cr for cr in zip2.component_results if cr.component_name == "zip/details"),
None
)
self.assertEqual(result1.json_data, "details result")
result2 = next(
(cr for cr in zip2.component_results if cr.component_name == "zip/volatility"),
None
)
self.assertEqual(result2.json_data, "dummy data")
def test_has_error(self):
self.assertFalse(self.zip.has_error())
def test_has_error_with_error(self):
self.test_json['zip/details']['api_code'] = 1001
zip2 = ZipCode.create_from_json(self.test_json)
self.assertTrue(zip2.has_error())
def test_get_errors(self):
self.assertEqual(self.zip.get_errors(), [])
def test_get_errors_with_errors(self):
self.test_json['zip/details']['api_code'] = 1001
self.test_json['zip/details']['api_code_description'] = "test error"
zip2 = ZipCode.create_from_json(self.test_json)
self.assertEqual(zip2.get_errors(), [{"zip/details": "test error"}])
class MsaTestCase(unittest.TestCase):
def setUp(self):
self.test_json = {
'msa/details': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'some result'
},
'msa_info': {
'msa': '41860'
},
'meta': 'Test Meta'
}
self.msa = Msa.create_from_json(self.test_json)
def test_create_from_json(self):
self.assertEqual(self.msa.msa, "41860")
self.assertEqual(self.msa.meta, "Test Meta")
self.assertEqual(len(self.msa.component_results), 1)
self.assertEqual(self.msa.component_results[0].api_code, 0)
self.assertEqual(self.msa.component_results[0].api_code_description, 'ok')
self.assertEqual(self.msa.component_results[0].json_data, 'some result')
def test_create_from_json_with_multiple_components(self):
test_json2 = {
'msa/details': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'details result'
},
'msa/hpi_ts': {
'api_code_description': 'ok',
'api_code': 0,
'result': 'dummy data'
},
'msa_info': {
'msa': '41860',
},
'meta': 'Test Meta'
}
msa2 = Msa.create_from_json(test_json2)
self.assertEqual(msa2.msa, "41860")
self.assertEqual(len(msa2.component_results), 2)
result1 = next(
(cr for cr in msa2.component_results if cr.component_name == "msa/details"),
None
)
self.assertEqual(result1.json_data, "details result")
result2 = next(
(cr for cr in msa2.component_results if cr.component_name == "msa/hpi_ts"),
None
)
self.assertEqual(result2.json_data, "dummy data")
def test_has_error(self):
self.assertFalse(self.msa.has_error())
def test_has_error_with_error(self):
self.test_json['msa/details']['api_code'] = 1001
msa2 = Msa.create_from_json(self.test_json)
self.assertTrue(msa2.has_error())
def test_get_errors(self):
self.assertEqual(self.msa.get_errors(), [])
def test_get_errors_with_errors(self):
self.test_json['msa/details']['api_code'] = 1001
self.test_json['msa/details']['api_code_description'] = "test error"
msa2 = Msa.create_from_json(self.test_json)
self.assertEqual(msa2.get_errors(), [{"msa/details": "test error"}])
if __name__ == "__main__":
unittest.main()
| housecanary/hc-api-python | tests/test_object.py | Python | mit | 13,530 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RNumderiv(RPackage):
"""Methods for calculating (usually) accurate numerical first and
second order derivatives."""
homepage = "https://cran.r-project.org/package=numDeriv"
url = "https://cran.r-project.org/src/contrib/numDeriv_2016.8-1.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/numDeriv"
version('2016.8-1', '30e486298d5126d86560095be8e8aac1')
depends_on('[email protected]:')
| EmreAtes/spack | var/spack/repos/builtin/packages/r-numderiv/package.py | Python | lgpl-2.1 | 1,693 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
setup (name='igep_qa',
version='0.1',
description=u'IGEP Quality Assurance scripts',
long_description=u'This is overall test software for IGEP Technology '
u'based devices, which defines test scope, test strategy, '
u'test configurations as well as test execution cycles. It '
u'will give readers an overview of validation activities done '
u'in any IGEP Technology based devices.',
author='Enric Balletbo i Serra',
author_email='[email protected]',
url='',
packages = ['igep_qa', 'igep_qa.helpers', 'igep_qa.runners', 'igep_qa.suites', 'igep_qa.tests'],
data_files = [('igep_qa/contrib', ['contrib/dtmf.wav']),
('igep_qa/contrib', ['contrib/test.wav'])],
scripts = ['scripts/igep-qa.sh'],
license='LICENSE',
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Quality Assurance',
],
)
| eballetbo/igep_qa | setup.py | Python | mit | 1,324 |
import os
import sys
import pytest
# Hacky way to import application/
my_path = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, my_path + '/../')
from application.api_service import create_app
@pytest.fixture
def app():
app = create_app()
return app
| Tri-Try/REST | tests/conftest.py | Python | gpl-2.0 | 276 |
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .. import Observer
class Cash(Observer):
lines = ('cash',)
plotinfo = dict(plot=True, subplot=True)
def next(self):
self.lines[0][0] = self._owner.broker.getcash()
class Value(Observer):
lines = ('value',)
plotinfo = dict(plot=True, subplot=True)
def next(self):
self.lines[0][0] = self._owner.broker.getvalue()
class Broker(Observer):
alias = ('CashValue',)
lines = ('cash', 'value')
plotinfo = dict(plot=True, subplot=True)
def next(self):
self.lines.cash[0] = self._owner.broker.getcash()
self.lines.value[0] = value = self._owner.broker.getvalue()
| YuepengGuo/backtrader | backtrader/observers/broker.py | Python | gpl-3.0 | 1,655 |
inf = 1000
def Bridge4x4(width1 = 5.0, width2=10.0, spacing = 3.01/1.1):
angle = -45.0
#
y0 = 0.0
y1 = y0 + 0.5*spacing + 0.5*width1
y2 = y1 + 0.5*(width1+width2) + spacing
y_1 = y0 - 0.5*spacing - 0.5*width2
y_2 = y_1 - 0.5*(width1+width2) - spacing
#
line1 = Line(Point(-inf, y1), Point(inf, y1)).Rotate(angle=angle)
line2 = Line(Point(-inf, y2), Point(inf, y2)).Rotate(angle=angle)
line_1 = Line(Point(-inf, y_1), Point(inf, y_1)).Rotate(angle=angle)
line_2 = Line(Point(-inf, y_2), Point(inf, y_2)).Rotate(angle=angle)
#
return line1, line2, line_2, line_1
def Bridge2x2():
angle = -45.0
#
line1 = Line(Point(-inf, 0), Point(inf, 0))
line1 = line1.Rotate(angle=angle)
#
return line1
def Bridge1x1(width1=10, width2=9, spacing=3.01/1.1, angle=45):
# left side
y1l = 0.5*(width2+spacing+width1)
y2l = y1l - width1
y3l = y2l - spacing
y4l = y3l - width2
# bridge side
y1m = 0.5*width1
y2m = y1m - width1
y3m = 0.5*width2
y4m = y3m - width2
# right side
y1r = 0.5*(width2+spacing+width1)
y2r = y1r - width2
y3r = y2r - spacing
y4r = y3r - width1
#
line1 = Line(Point(-inf, y1l), Point(inf,y1l))
line2 = Line(Point(-inf, y2l), Point(inf, y2l))
line3 = Line(Point(-inf, y1m), Point(inf,y1m)).Rotate(angle=-angle)
line4 = Line(Point(-inf, y2m), Point(inf, y2m)).Rotate(angle=-angle)
line5 = Line(Point(-inf, y3r), Point(inf, y3r))
line6 = Line(Point(-inf, y4r), Point(inf, y4r))
point1 = line1.Intersect(line3)
point2 = line3.Intersect(line5)
point3 = line4.Intersect(line6)
point4 = line2.Intersect(line4)
bridge1 = Primitive( )
bridge1.append( point4 )
bridge1.append( Point(point4.x-3, point4.y) )
bridge1.append( Point(point4.x-3, point1.y) )
bridge1.append( point1 )
bridge1.append( point2 )
bridge1.append( Point(point2.x+3, point2.y) )
bridge1.append( Point(point2.x+3, point3.y) )
bridge1.append( point3 )
# left side
y1l = 0.5*(width2+spacing+width1)
y2l = y1l - width2
y3l = y2l - spacing
y4l = y3l - width1
# bridge side
y1m = 0.5*width2
y2m = y1m - width2
y3m = 0.5*width1
y4m = y3m - width1
# right side
y1r = 0.5*(width2+spacing+width1)
y2r = y1r - width1
y3r = y2r - spacing
y4r = y3r - width2
line1 = Line(Point(-inf, y1l), Point(inf,y1l))
line2 = Line(Point(-inf, y2l), Point(inf, y2l))
line3 = Line(Point(-inf, y1m), Point(inf,y1m)).Rotate(angle=-angle)
line4 = Line(Point(-inf, y2m), Point(inf, y2m)).Rotate(angle=-angle)
line5 = Line(Point(-inf, y3r), Point(inf, y3r))
line6 = Line(Point(-inf, y4r), Point(inf, y4r))
point1 = line1.Intersect(line3)
point2 = line3.Intersect(line5)
point3 = line4.Intersect(line6)
point4 = line2.Intersect(line4)
bridge2 = Primitive( )
bridge2.append( point4 )
bridge2.append( Point(point4.x-3, point4.y) )
bridge2.append( Point(point4.x-3, point1.y) )
bridge2.append( point1 )
bridge2.append( point2 )
bridge2.append( Point(point2.x+3, point2.y) )
bridge2.append( Point(point2.x+3, point3.y) )
bridge2.append( point3 )
return bridge1, bridge2.Mirror(planenormal=(1,0))
def Bridge1x1_2(width1=10, width2=9, spacing=3.01/1.1, angle=45):
# non symmetric + pas de terminaison
# left side
y1l = 0.5*(width2+spacing+width1)
y2l = y1l - width1
y3l = y2l - spacing
y4l = y3l - width2
# bridge side
y1m = 0.5*width1
y2m = y1m - width1
y3m = 0.5*width2
y4m = y3m - width2
# right side
y1r = 0.5*(width2+spacing+width1)
y2r = y1r - width2
y3r = y2r - spacing
y4r = y3r - width1
#
line1 = Line(Point(-inf, y1l), Point(inf,y1l))
line2 = Line(Point(-inf, y2l), Point(inf, y2l))
line3 = Line(Point(-inf, y1m), Point(inf,y1m)).Rotate(angle=-angle)
line4 = Line(Point(-inf, y2m), Point(inf, y2m)).Rotate(angle=-angle)
line5 = Line(Point(-inf, y3r), Point(inf, y3r))
line6 = Line(Point(-inf, y4r), Point(inf, y4r))
_line3 = line4
point1 = line1.Intersect(line3)
point2 = line3.Intersect(line5)
point3 = line4.Intersect(line6)
point4 = line2.Intersect(line4)
bridge1 = Primitive( )
bridge1.append( point4 )
bridge1.append( Point(point4.x-3, point4.y) )
bridge1.append( Point(point4.x-3, point1.y) )
bridge1.append( point1 )
bridge1.append( point2 )
bridge1.append( Point(point2.x+3, point2.y) )
bridge1.append( Point(point2.x+3, point3.y) )
bridge1.append( point3 )
# left side
y1l = 0.5*(width2+spacing+width1)
y2l = y1l - width2
y3l = y2l - spacing
y4l = y3l - width1
# bridge side
y1m = 0.5*width2
y2m = y1m - width2
y3m = 0.5*width1
y4m = y3m - width1
# right side
y1r = 0.5*(width2+spacing+width1)
y2r = y1r - width1
y3r = y2r - spacing
y4r = y3r - width2
line1 = Line(Point(-inf, y1l), Point(inf,y1l))
line2 = Line(Point(-inf, y2l), Point(inf, y2l))
line3 = Line(Point(-inf, y1m), Point(inf,y1m)).Rotate(angle=-angle)
line4 = Line(Point(-inf, y2m), Point(inf, y2m)).Rotate(angle=-angle)
line5 = Line(Point(-inf, y3r), Point(inf, y3r))
line6 = Line(Point(-inf, y4r), Point(inf, y4r))
point1 = line1.Intersect(line3)
point2 = line3.Intersect(line5)
point3 = line4.Intersect(line6)
point4 = line2.Intersect(line4)
bridge2 = Primitive( )
line = Line(bridge1[3], bridge1[4]).Mirror(planenormal=(1,0))
d = point4.distance(line)
bridge2.append( point4-(spacing-d)*Point(1,-1)/sqrt(2) )
bridge2.append( point4+width2/sqrt(2)*Point(1,1)-(spacing-d)*Point(1,-1)/sqrt(2) )
bridge2.append( point2 )
bridge2.append( Point(point2.x+3, point2.y) )
bridge2.append( Point(point2.x+3, point3.y) )
bridge2.append( point3 )
return bridge1, bridge2.Mirror(planenormal=(1,0))
def Bridge4x4b(width1 = 10.0, width2=10.0, width3=10.0, width4=10.0, spacing = 3.01/1.1, vector=None):
angle = 45.0
if vector==None:
vector = Point(0,0)
#
y1l = 0.5*spacing+width4+spacing+width3+spacing+width2+spacing+width1
y2l = y1l - width1
y3l = y2l - spacing
y4l = y3l - width2
y5l = y4l - spacing
y6l = y5l - width3
y7l = y6l - spacing
y8l = y7l - width4
#
y1m = 0.5*(width4+spacing+width3+spacing+width2+spacing+width1)
y2m = y1m - width1
y3m = y2m - spacing
y4m = y3m - width2
y5m = y4m - spacing
y6m = y5m - width3
y7m = y6m - spacing
y8m = y7m - width4
#
y1r = -0.5*spacing
y2r = y1r - width1
y3r = y2r - spacing
y4r = y3r - width2
y5r = y4r - spacing
y6r = y5r - width3
y7r = y6r - spacing
y8r = y7r - width4
#
line1l = Line(Point(-inf, y1l), Point(inf, y1l))
line2l = Line(Point(-inf, y2l), Point(inf, y2l))
line3l = Line(Point(-inf, y3l), Point(inf, y3l))
line4l = Line(Point(-inf, y4l), Point(inf, y4l))
line5l = Line(Point(-inf, y5l), Point(inf, y5l))
line6l = Line(Point(-inf, y6l), Point(inf, y6l))
line7l = Line(Point(-inf, y7l), Point(inf, y7l))
line8l = Line(Point(-inf, y8l), Point(inf, y8l))
#
line1m = Line(Point(-inf, y1m), Point(inf, y1m)).Rotate(angle=-angle)
line2m = Line(Point(-inf, y2m), Point(inf, y2m)).Rotate(angle=-angle)
line3m = Line(Point(-inf, y3m), Point(inf, y3m)).Rotate(angle=-angle)
line4m = Line(Point(-inf, y4m), Point(inf, y4m)).Rotate(angle=-angle)
line5m = Line(Point(-inf, y5m), Point(inf, y5m)).Rotate(angle=-angle)
line6m = Line(Point(-inf, y6m), Point(inf, y6m)).Rotate(angle=-angle)
line7m = Line(Point(-inf, y7m), Point(inf, y7m)).Rotate(angle=-angle)
line8m = Line(Point(-inf, y8m), Point(inf, y8m)).Rotate(angle=-angle)
#
line1r = Line(Point(-inf, y1r), Point(inf, y1r))
line2r = Line(Point(-inf, y2r), Point(inf, y2r))
line3r = Line(Point(-inf, y3r), Point(inf, y3r))
line4r = Line(Point(-inf, y4r), Point(inf, y4r))
line5r = Line(Point(-inf, y5r), Point(inf, y5r))
line6r = Line(Point(-inf, y6r), Point(inf, y6r))
line7r = Line(Point(-inf, y7r), Point(inf, y7r))
line8r = Line(Point(-inf, y8r), Point(inf, y8r))
point1l = line1l.Intersect(line1m)
point2l = line2l.Intersect(line2m)
point3l = line3l.Intersect(line3m)
point4l = line4l.Intersect(line4m)
point5l = line5l.Intersect(line5m)
point6l = line6l.Intersect(line6m)
point7l = line7l.Intersect(line7m)
point8l = line8l.Intersect(line8m)
point1r = line1r.Intersect(line1m)
point2r = line2r.Intersect(line2m)
point3r = line3r.Intersect(line3m)
point4r = line4r.Intersect(line4m)
point5r = line5r.Intersect(line5m)
point6r = line6r.Intersect(line6m)
point7r = line7r.Intersect(line7m)
point8r = line8r.Intersect(line8m)
layer1 = Primitives()
layer2 = Primitives()
bridge = Primitive( )
bridge.append( point2l )
bridge.append( Point(point2l.x-3, point2l.y) )
bridge.append( Point(point2l.x-3, point1l.y) )
bridge.append( point1l )
bridge.append( point1r )
bridge.append( Point(point1r.x+3, point1r.y) )
bridge.append( Point(point1r.x+3, point2r.y) )
bridge.append( point2r )
layer1.append(bridge.Translate(vector=vector))
layer2.append(bridge.Translate(vector=vector).Mirror(planenormal=(1,0)))
bridge = Primitive( )
bridge.append( point4l )
bridge.append( Point(point4l.x-3, point4l.y) )
bridge.append( Point(point4l.x-3, point3l.y) )
bridge.append( point3l )
bridge.append( point3r )
bridge.append( Point(point3r.x+3, point3r.y) )
bridge.append( Point(point3r.x+3, point4r.y) )
bridge.append( point4r )
layer1.append(bridge.Translate(vector=vector))
layer2.append(bridge.Translate(vector=vector).Mirror(planenormal=(1,0)))
bridge = Primitive( )
bridge.append( point6l )
bridge.append( Point(point6l.x-3, point6l.y) )
bridge.append( Point(point6l.x-3, point5l.y) )
bridge.append( point5l )
bridge.append( point5r )
bridge.append( Point(point5r.x+3, point5r.y) )
bridge.append( Point(point5r.x+3, point6r.y) )
bridge.append( point6r )
layer1.append(bridge.Translate(vector=vector))
layer2.append(bridge.Translate(vector=vector).Mirror(planenormal=(1,0)))
bridge = Primitive( )
bridge.append( point8l )
bridge.append( Point(point8l.x-3, point8l.y) )
bridge.append( Point(point8l.x-3, point7l.y) )
bridge.append( point7l )
spacing = 2*spacing
bridge.append( Line(Point(-0.5*spacing,-inf), Point(-0.5*spacing,inf)).Intersect(line7m) )
bridge.append( Line(Point(-0.5*spacing,-inf), Point(-0.5*spacing,inf)).Intersect(line7m) - Point(0, 3) )
bridge.append( Point(-0.5*spacing,layer1.ymin) )
bridge.append( Point(-0.5*spacing-width4,layer1.ymin) )
bridge.append( Line(Point(-0.5*spacing-width4,-inf), Point(-0.5*spacing-width4,inf)).Intersect(line8m) )
layer1.append(bridge.Translate(vector=vector))
layer2.append(bridge.Translate(vector=vector).Mirror(planenormal=(1,0)))
#
return layer1, layer2
#layer1 = Primitives()
#layer2 = Primitives()
#_layer1, _layer2 = Bridge4x4b()
#layer1.extend(_layer1)
#layer2.extend(_layer2)
| raphaelvalentin/qtlayout | Projects/Bridges/bridge5.py | Python | gpl-2.0 | 11,681 |
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from typing import Tuple
from absl import flags
from absl.testing import absltest
import grpc
from framework import xds_url_map_testcase
from framework.rpc import grpc_testing
from framework.test_app import client_app
# Type aliases
HostRule = xds_url_map_testcase.HostRule
PathMatcher = xds_url_map_testcase.PathMatcher
GcpResourceManager = xds_url_map_testcase.GcpResourceManager
DumpedXdsConfig = xds_url_map_testcase.DumpedXdsConfig
RpcTypeUnaryCall = xds_url_map_testcase.RpcTypeUnaryCall
RpcTypeEmptyCall = xds_url_map_testcase.RpcTypeEmptyCall
XdsTestClient = client_app.XdsTestClient
ExpectedResult = xds_url_map_testcase.ExpectedResult
logger = logging.getLogger(__name__)
flags.adopt_module_key_flags(xds_url_map_testcase)
# The first batch of RPCs don't count towards the result of test case. They are
# meant to prove the communication between driver and client is fine.
_NUM_RPCS = 10
_LENGTH_OF_RPC_SENDING_SEC = 16
# We are using sleep to synchronize test driver and the client... Even though
# the client is sending at QPS rate, we can't assert that exactly QPS *
# SLEEP_DURATION number of RPC is finished. The final completed RPC might be
# slightly more or less.
_NON_RANDOM_ERROR_TOLERANCE = 0.01
# For random generator involved test cases, we want to be more loose about the
# final result. Otherwise, we will need more test duration (sleep duration) and
# more accurate communication mechanism. The accurate of random number
# generation is not the intention of this test.
_ERROR_TOLERANCE = 0.2
_DELAY_CASE_APPLICATION_TIMEOUT_SEC = 1
_BACKLOG_WAIT_TIME_SEC = 20
def _build_fault_injection_route_rule(abort_percentage: int = 0,
delay_percentage: int = 0):
return {
'priority': 0,
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/UnaryCall'
}],
'service': GcpResourceManager().default_backend_service(),
'routeAction': {
'faultInjectionPolicy': {
'abort': {
'httpStatus': 401,
'percentage': abort_percentage,
},
'delay': {
'fixedDelay': {
'seconds': '20'
},
'percentage': delay_percentage,
}
}
},
}
def _wait_until_backlog_cleared(test_client: XdsTestClient,
timeout: int = _BACKLOG_WAIT_TIME_SEC):
""" Wait until the completed RPC is close to started RPC.
For delay injected test cases, there might be a backlog of RPCs due to slow
initialization of the client. E.g., if initialization took 20s and qps is
25, then there will be a backlog of 500 RPCs. In normal test cases, this is
fine, because RPCs will fail immediately. But for delay injected test cases,
the RPC might linger much longer and affect the stability of test results.
"""
logger.info('Waiting for RPC backlog to clear for %d seconds', timeout)
deadline = time.time() + timeout
while time.time() < deadline:
stats = test_client.get_load_balancer_accumulated_stats()
ok = True
for rpc_type in [RpcTypeUnaryCall, RpcTypeEmptyCall]:
started = stats.num_rpcs_started_by_method.get(rpc_type, 0)
completed = stats.num_rpcs_succeeded_by_method.get(
rpc_type, 0) + stats.num_rpcs_failed_by_method.get(rpc_type, 0)
# We consider the backlog is healthy, if the diff between started
# RPCs and completed RPCs is less than 1.5 QPS.
if abs(started - completed) > xds_url_map_testcase.QPS.value * 1.1:
logger.info(
'RPC backlog exist: rpc_type=%s started=%s completed=%s',
rpc_type, started, completed)
time.sleep(_DELAY_CASE_APPLICATION_TIMEOUT_SEC)
ok = False
else:
logger.info(
'RPC backlog clear: rpc_type=%s started=%s completed=%s',
rpc_type, started, completed)
if ok:
# Both backlog of both types of RPCs is clear, success, return.
return
raise RuntimeError('failed to clear RPC backlog in %s seconds', timeout)
class TestZeroPercentFaultInjection(xds_url_map_testcase.XdsUrlMapTestCase):
@staticmethod
def url_map_change(
host_rule: HostRule,
path_matcher: PathMatcher) -> Tuple[HostRule, PathMatcher]:
path_matcher["routeRules"] = [
_build_fault_injection_route_rule(abort_percentage=0,
delay_percentage=0)
]
return host_rule, path_matcher
def xds_config_validate(self, xds_config: DumpedXdsConfig):
self.assertNumEndpoints(xds_config, 1)
filter_config = xds_config.rds['virtualHosts'][0]['routes'][0][
'typedPerFilterConfig']['envoy.filters.http.fault']
self.assertEqual('20s', filter_config['delay']['fixedDelay'])
self.assertEqual(
0, filter_config['delay']['percentage'].get('numerator', 0))
self.assertEqual('MILLION',
filter_config['delay']['percentage']['denominator'])
self.assertEqual(401, filter_config['abort']['httpStatus'])
self.assertEqual(
0, filter_config['abort']['percentage'].get('numerator', 0))
self.assertEqual('MILLION',
filter_config['abort']['percentage']['denominator'])
def rpc_distribution_validate(self, test_client: XdsTestClient):
rpc_distribution = self.configure_and_send(test_client,
rpc_types=[RpcTypeUnaryCall],
num_rpcs=_NUM_RPCS)
self.assertRpcStatusCode(test_client,
expected=(ExpectedResult(
rpc_type=RpcTypeUnaryCall,
status_code=grpc.StatusCode.OK,
ratio=1),),
length=_LENGTH_OF_RPC_SENDING_SEC,
tolerance=_NON_RANDOM_ERROR_TOLERANCE)
class TestNonMatchingFaultInjection(xds_url_map_testcase.XdsUrlMapTestCase):
"""EMPTY_CALL is not fault injected, so it should succeed."""
@staticmethod
def url_map_change(
host_rule: HostRule,
path_matcher: PathMatcher) -> Tuple[HostRule, PathMatcher]:
path_matcher["routeRules"] = [
_build_fault_injection_route_rule(abort_percentage=100,
delay_percentage=100)
]
return host_rule, path_matcher
def xds_config_validate(self, xds_config: DumpedXdsConfig):
self.assertNumEndpoints(xds_config, 1)
# The first route rule for UNARY_CALL is fault injected
self.assertEqual(
"/grpc.testing.TestService/UnaryCall",
xds_config.rds['virtualHosts'][0]['routes'][0]['match']['path'])
filter_config = xds_config.rds['virtualHosts'][0]['routes'][0][
'typedPerFilterConfig']['envoy.filters.http.fault']
self.assertEqual('20s', filter_config['delay']['fixedDelay'])
self.assertEqual(1000000,
filter_config['delay']['percentage']['numerator'])
self.assertEqual('MILLION',
filter_config['delay']['percentage']['denominator'])
self.assertEqual(401, filter_config['abort']['httpStatus'])
self.assertEqual(1000000,
filter_config['abort']['percentage']['numerator'])
self.assertEqual('MILLION',
filter_config['abort']['percentage']['denominator'])
# The second route rule for all other RPCs is untouched
self.assertNotIn(
'envoy.filters.http.fault',
xds_config.rds['virtualHosts'][0]['routes'][1].get(
'typedPerFilterConfig', {}))
def rpc_distribution_validate(self, test_client: XdsTestClient):
rpc_distribution = self.configure_and_send(test_client,
rpc_types=[RpcTypeEmptyCall],
num_rpcs=_NUM_RPCS)
self.assertRpcStatusCode(test_client,
expected=(ExpectedResult(
rpc_type=RpcTypeEmptyCall,
status_code=grpc.StatusCode.OK,
ratio=1),),
length=_LENGTH_OF_RPC_SENDING_SEC,
tolerance=_NON_RANDOM_ERROR_TOLERANCE)
@absltest.skip('20% RPC might pass immediately, reason unknown')
class TestAlwaysDelay(xds_url_map_testcase.XdsUrlMapTestCase):
@staticmethod
def url_map_change(
host_rule: HostRule,
path_matcher: PathMatcher) -> Tuple[HostRule, PathMatcher]:
path_matcher["routeRules"] = [
_build_fault_injection_route_rule(abort_percentage=0,
delay_percentage=100)
]
return host_rule, path_matcher
def xds_config_validate(self, xds_config: DumpedXdsConfig):
self.assertNumEndpoints(xds_config, 1)
filter_config = xds_config.rds['virtualHosts'][0]['routes'][0][
'typedPerFilterConfig']['envoy.filters.http.fault']
self.assertEqual('20s', filter_config['delay']['fixedDelay'])
self.assertEqual(1000000,
filter_config['delay']['percentage']['numerator'])
self.assertEqual('MILLION',
filter_config['delay']['percentage']['denominator'])
def rpc_distribution_validate(self, test_client: XdsTestClient):
rpc_distribution = self.configure_and_send(
test_client,
rpc_types=[RpcTypeUnaryCall],
num_rpcs=_NUM_RPCS,
app_timeout=_DELAY_CASE_APPLICATION_TIMEOUT_SEC)
_wait_until_backlog_cleared(test_client)
self.assertRpcStatusCode(
test_client,
expected=(ExpectedResult(
rpc_type=RpcTypeUnaryCall,
status_code=grpc.StatusCode.DEADLINE_EXCEEDED,
ratio=1),),
length=_LENGTH_OF_RPC_SENDING_SEC,
tolerance=_NON_RANDOM_ERROR_TOLERANCE)
class TestAlwaysAbort(xds_url_map_testcase.XdsUrlMapTestCase):
@staticmethod
def url_map_change(
host_rule: HostRule,
path_matcher: PathMatcher) -> Tuple[HostRule, PathMatcher]:
path_matcher["routeRules"] = [
_build_fault_injection_route_rule(abort_percentage=100,
delay_percentage=0)
]
return host_rule, path_matcher
def xds_config_validate(self, xds_config: DumpedXdsConfig):
self.assertNumEndpoints(xds_config, 1)
filter_config = xds_config.rds['virtualHosts'][0]['routes'][0][
'typedPerFilterConfig']['envoy.filters.http.fault']
self.assertEqual(401, filter_config['abort']['httpStatus'])
self.assertEqual(1000000,
filter_config['abort']['percentage']['numerator'])
self.assertEqual('MILLION',
filter_config['abort']['percentage']['denominator'])
def rpc_distribution_validate(self, test_client: XdsTestClient):
rpc_distribution = self.configure_and_send(test_client,
rpc_types=[RpcTypeUnaryCall],
num_rpcs=_NUM_RPCS)
self.assertRpcStatusCode(
test_client,
expected=(ExpectedResult(
rpc_type=RpcTypeUnaryCall,
status_code=grpc.StatusCode.UNAUTHENTICATED,
ratio=1),),
length=_LENGTH_OF_RPC_SENDING_SEC,
tolerance=_NON_RANDOM_ERROR_TOLERANCE)
class TestDelayHalf(xds_url_map_testcase.XdsUrlMapTestCase):
@staticmethod
def url_map_change(
host_rule: HostRule,
path_matcher: PathMatcher) -> Tuple[HostRule, PathMatcher]:
path_matcher["routeRules"] = [
_build_fault_injection_route_rule(abort_percentage=0,
delay_percentage=50)
]
return host_rule, path_matcher
def xds_config_validate(self, xds_config: DumpedXdsConfig):
self.assertNumEndpoints(xds_config, 1)
filter_config = xds_config.rds['virtualHosts'][0]['routes'][0][
'typedPerFilterConfig']['envoy.filters.http.fault']
self.assertEqual('20s', filter_config['delay']['fixedDelay'])
self.assertEqual(500000,
filter_config['delay']['percentage']['numerator'])
self.assertEqual('MILLION',
filter_config['delay']['percentage']['denominator'])
def rpc_distribution_validate(self, test_client: XdsTestClient):
rpc_distribution = self.configure_and_send(
test_client,
rpc_types=[RpcTypeUnaryCall],
num_rpcs=_NUM_RPCS,
app_timeout=_DELAY_CASE_APPLICATION_TIMEOUT_SEC)
_wait_until_backlog_cleared(test_client)
self.assertRpcStatusCode(
test_client,
expected=(ExpectedResult(
rpc_type=RpcTypeUnaryCall,
status_code=grpc.StatusCode.DEADLINE_EXCEEDED,
ratio=0.5),),
length=_LENGTH_OF_RPC_SENDING_SEC,
tolerance=_ERROR_TOLERANCE)
class TestAbortHalf(xds_url_map_testcase.XdsUrlMapTestCase):
@staticmethod
def url_map_change(
host_rule: HostRule,
path_matcher: PathMatcher) -> Tuple[HostRule, PathMatcher]:
path_matcher["routeRules"] = [
_build_fault_injection_route_rule(abort_percentage=50,
delay_percentage=0)
]
return host_rule, path_matcher
def xds_config_validate(self, xds_config: DumpedXdsConfig):
self.assertNumEndpoints(xds_config, 1)
filter_config = xds_config.rds['virtualHosts'][0]['routes'][0][
'typedPerFilterConfig']['envoy.filters.http.fault']
self.assertEqual(401, filter_config['abort']['httpStatus'])
self.assertEqual(500000,
filter_config['abort']['percentage']['numerator'])
self.assertEqual('MILLION',
filter_config['abort']['percentage']['denominator'])
def rpc_distribution_validate(self, test_client: XdsTestClient):
rpc_distribution = self.configure_and_send(test_client,
rpc_types=[RpcTypeUnaryCall],
num_rpcs=_NUM_RPCS)
self.assertRpcStatusCode(
test_client,
expected=(ExpectedResult(
rpc_type=RpcTypeUnaryCall,
status_code=grpc.StatusCode.UNAUTHENTICATED,
ratio=0.5),),
length=_LENGTH_OF_RPC_SENDING_SEC,
tolerance=_ERROR_TOLERANCE)
if __name__ == '__main__':
absltest.main()
| vjpai/grpc | tools/run_tests/xds_k8s_test_driver/tests/url_map/fault_injection_test.py | Python | apache-2.0 | 15,937 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.