code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import requests as req
from requests.adapters import HTTPAdapter, Response
from urllib3 import Retry
from sdk.default_http_client_config import DefaultHTTPClientConfig
from sdk.http_client import HTTPClient
class DefaultHTTPClient(HTTPClient):
def __init__(self, config: DefaultHTTPClientConfig):
self.http_client = req.Session()
retry = Retry(total=config.max_retries, read=config.max_retries,
connect=config.max_retries,
backoff_factor=config.retry_interval,
status_forcelist=(502, 503),
allowed_methods=frozenset(['GET', 'POST']))
self.http_client.mount(
'http://', HTTPAdapter(max_retries=retry,
pool_maxsize=200,
pool_connections=20))
self.http_client.mount(
'https://', HTTPAdapter(max_retries=retry,
pool_maxsize=200,
pool_connections=20))
self.timeout = config.connection_timeout
self.request_timeout = config.connection_request_timeout
def get(self,
url: str,
query: dict,
headers: dict) -> Response:
return self.http_client.get(url,
params=query,
headers=headers,
timeout=(self.timeout,
self.request_timeout))
def put(self,
url: str,
query: dict,
headers: dict,
body: bytearray) -> Response:
headers.update({'Content-type': 'application/json'})
return self.http_client.put(url,
data=bytes(body),
params=query,
headers=headers,
timeout=(self.timeout,
self.request_timeout))
def post(self,
url: str,
query: dict,
headers: dict,
body: bytearray) -> Response:
headers.update({'Content-type': 'application/json'})
return self.http_client.put(url,
data=bytes(body),
params=query,
headers=headers,
timeout=(self.timeout,
self.request_timeout)) | ABSmartly | /ABSmartly-0.1.4-py3-none-any.whl/sdk/default_http_client.py | default_http_client.py |
from concurrent.futures import Future
from typing import Optional
from sdk.absmartly_config import ABSmartlyConfig
from sdk.audience_matcher import AudienceMatcher
from sdk.context import Context
from sdk.context_config import ContextConfig
from sdk.default_audience_deserializer import DefaultAudienceDeserializer
from sdk.default_context_data_provider import DefaultContextDataProvider
from sdk.default_context_event_handler import DefaultContextEventHandler
from sdk.default_variable_parser import DefaultVariableParser
from sdk.json.context_data import ContextData
from sdk.time.system_clock_utc import SystemClockUTC
class ABSmartly:
def __init__(self, config: ABSmartlyConfig):
self.context_data_provider = config.context_data_provider
self.context_event_handler = config.context_event_handler
self.context_event_logger = config.context_event_logger
self.variable_parser = config.variable_parser
self.audience_deserializer = config.audience_deserializer
if self.context_data_provider is None or \
self.context_event_handler is None:
self.client = config.client
if self.context_data_provider is None:
self.context_data_provider = \
DefaultContextDataProvider(self.client)
if self.context_event_handler is None:
self.context_event_handler = \
DefaultContextEventHandler(self.client)
if self.variable_parser is None:
self.variable_parser = DefaultVariableParser()
if self.audience_deserializer is None:
self.audience_deserializer = DefaultAudienceDeserializer()
def get_context_data(self) -> Future[Optional[ContextData]]:
return self.context_data_provider.get_context_data()
def create_context(self, config: ContextConfig) -> Context:
return Context(SystemClockUTC(),
config,
self.context_data_provider.get_context_data(),
self.context_data_provider,
self.context_event_handler,
self.context_event_logger,
self.variable_parser,
AudienceMatcher(self.audience_deserializer))
def create_context_with(self,
config: ContextConfig,
data: ContextData) -> Context:
future_data = Future()
future_data.set_result(data)
return Context(SystemClockUTC(), config,
future_data,
self.context_data_provider,
self.context_event_handler,
self.context_event_logger,
self.variable_parser,
AudienceMatcher(self.audience_deserializer)) | ABSmartly | /ABSmartly-0.1.4-py3-none-any.whl/sdk/absmartly.py | absmartly.py |
import decimal
from decimal import Decimal
from sdk.jsonexpr.evaluator import Evaluator
def compare_to(this, that):
if this > that:
return 1
elif this < that:
return -1
elif this == that:
return 0
class ExprEvaluator(Evaluator):
def __init__(self, operators: dict, vars: dict):
self.vars = vars
self.operators = operators
def evaluate(self, expr: object):
if type(expr) is list:
return self.operators["and"].evaluate(self, expr)
elif type(expr) is dict:
for key, value in expr.items():
if key not in self.operators:
return None
op = self.operators[key]
if op is not None:
res = op.evaluate(self, value)
return res
break
return None
def boolean_convert(self, x: object):
if type(x) is bool:
return x
elif type(x) is str:
return x != "False" and x != "0" and x != ""
elif type(x) is int or type(x) is float or type(x) is complex:
return x != 0
return x is not None
def number_convert(self, x: object):
if type(x) is int or type(x) is float or type(x) is complex:
return x
elif type(x) is bool:
return 1.0 if x is True else 0.0
elif type(x) is str:
try:
return Decimal(x)
except decimal.InvalidOperation:
return None
return None
def string_convert(self, x: object):
if type(x) is str:
return x
elif type(x) is bool:
return str(x)
elif type(x) is int or type(x) is float or type(x) is complex:
return str(x)
return None
def extract_var(self, path: str):
frags = path.split("/")
target = self.vars if self.vars is not None else {}
for frag in frags:
value = None
if type(target) is list:
try:
value = target[int(frag)]
except BaseException as err:
print(err)
elif type(target) is dict:
if frag not in target:
return None
value = target[frag]
if value is not None:
target = value
continue
return None
return target
def compare(self, lhs: object, rhs: object):
if lhs is None:
return 0 if rhs is None else None
elif rhs is None:
return None
if type(lhs) is int or type(lhs) is float or type(lhs) is complex:
rvalue = self.number_convert(rhs)
if rvalue is not None:
return compare_to(lhs, rvalue)
elif type(lhs) is str:
rvalue = self.string_convert(rhs)
if rvalue is not None:
return compare_to(lhs, rvalue)
elif type(lhs) is bool:
rvalue = self.boolean_convert(rhs)
if rvalue is not None:
return compare_to(lhs, rvalue)
elif type(lhs) == type(rhs) and lhs == rhs:
return 0
return None | ABSmartly | /ABSmartly-0.1.4-py3-none-any.whl/sdk/jsonexpr/expr_evaluator.py | expr_evaluator.py |
import time
import numpy as np
import os
from pprogress import ProgressBar
import json
from diversipy import lhd_matrix
from diversipy import transform_spread_out
import plotly.graph_objects as go
import plotly.offline
class clock:
start_t = 0
end_t = 0
@staticmethod
def start():
clock.start_t = time.time()
@staticmethod
def end():
clock.end_t = time.time()
print('Elapsed time: ',clock.end_t - clock.start_t)
def box_plot(scalled_posteriors,path_to_save):
fig = go.Figure()
ii = 0
for key,value in scalled_posteriors.items():
fig.add_trace(go.Box(
y=value,
name=key,
boxpoints='all',
jitter=0,
marker_size=5,
whiskerwidth=0.2,
line_width=2)
)
ii += 1
fig.update_layout(yaxis=dict(
# autorange=True,
# showgrid=False,
dtick=0.2,
zeroline = False,range= [-0.1,1.1]
),
margin=dict(
l=40,
r=30,
b=80,
t=100
),
showlegend=False,
paper_bgcolor='rgb(243, 243, 243)',
plot_bgcolor='rgb(243, 243, 243)',
)
fig.write_html(path_to_save+'/box_plot.html')
class ABC:
""" Contains essential function for ABC
Attributes:
comm : MPI communication object
rank (int): ID of each processor
free_params (dict): Content of free parameteres including their tags and bounds
free_params_bounds (narray): Bounds for each free parameter
free_params_keys (array): Names of free parameters
param_sets (list): The list of pararameter sets created during sampling
settings (dict): Settings of the analysis
"""
def __init__(self,free_params,settings):
"""Generates ABM object. Receives free paramatere lists and settings.
Args:
free_params (dict): Content of free parameteres including their tags and bounds
settings (dict): Settings of the analysis
"""
self.settings = settings
if self.settings["MPI_flag"]:
from mpi4py import MPI
self.comm = MPI.COMM_WORLD
self.rank = self.comm.Get_rank()
else:
self.rank = 0
if self.rank == 0:
print("Number of CPUs assigned: ",self.comm.Get_size())
print("Sample number: ",settings['sample_n'])
self.free_params = free_params
self.free_params_keys = list(free_params.keys())
self.free_params_bounds = list(free_params.values())
print("The list of free parameters: ",self.free_params_keys)
try:
os.makedirs(self.settings["output_path"])
except OSError:
print("Creation of the directory %s failed" % self.settings["output_path"])
else:
print("Successfully created the directory %s " % self.settings["output_path"])
def sample(self):
"""Conducts
- Uniform sampling from n-dimensional space of parameters within the bounds given as ABC.free_params.
- Creates parameter sets and outputs them
"""
if self.rank == 0:
# python version > 3.6
non_scalled_samples = transform_spread_out(lhd_matrix(self.settings["sample_n"], len(self.free_params))).transpose()
scaled_samples = []
ii = 0
for bounds in self.free_params_bounds:
low = bounds[0]
high = bounds[1]
pre_samples_param = non_scalled_samples[ii]
samples_param = list(map(lambda x:x*(high-low)+low ,pre_samples_param))
scaled_samples.append(samples_param)
ii+=1
priors = {key:value for key,value in zip(self.free_params_keys,scaled_samples)}
samples = np.array(scaled_samples).transpose()
np.savetxt(self.settings["output_path"]+'/samples.txt', samples, fmt='%f')
##### create parameter sets
param_sets = []
for sample in samples:
param_set = {}
for i in range(len(sample)):
sample_p = sample[i]
key = self.free_params_keys[i]
param_set.update({key:sample_p})
param_sets.append(param_set)
with open(self.settings["output_path"]+'/param_sets.json','w') as file:
file.write(json.dumps({"param_sets":param_sets}))
self.param_sets = param_sets
def run(self):
"""Runs the user given model for the parameter sets.
"""
if self.rank == 0:
# reload
with open(self.settings["output_path"]+'/param_sets.json') as file:
self.param_sets = json.load(file)["param_sets"]
CPU_n = self.comm.Get_size()
shares = np.ones(CPU_n,dtype=int)*int(len(self.param_sets)/CPU_n)
plus = len(self.param_sets)%CPU_n
for i in range(plus):
shares[i]+=1
portions = []
for i in range(CPU_n):
start = i*shares[i-1]
end = start + shares[i]
portions.append([start,end])
paramsets = self.param_sets
else:
portions = None
paramsets = None
portion = self.comm.scatter(portions,root = 0)
paramsets = self.comm.bcast(paramsets,root = 0)
def run_model(start,end):
pb = ProgressBar(end-start)
distances = []
for i in range(start,end):
distance = self.settings["run_func"](paramsets[i],self.settings["args"])
distances.append(distance)
pb.update()
pb.done()
return distances
distances_perCore = run_model(portion[0],portion[1])
distances_stacks = self.comm.gather(distances_perCore,root = 0)
if self.rank == 0:
distances = np.array([])
for stack in distances_stacks:
distances = np.concatenate([distances,stack],axis = 0)
np.savetxt(self.settings["output_path"]+'/distances.txt',np.array(distances),fmt='%s')
def postprocessing(self):
"""Conducts post processing tasks. Currently it extracts top fits and posteriors and also plots scaled posteriors.
"""
if self.rank == 0:
# reload
distances = []
with open(self.settings["output_path"]+'/distances.txt') as file:
for line in file:
line.strip()
try:
value = float(line)
except:
value = None
distances.append(value)
samples = np.loadtxt(self.settings["output_path"]+'/samples.txt', dtype=float)
# top fitnesses
top_n = self.settings["top_n"]
fitness_values = np.array([])
for item in distances:
if item == None:
fitness = 0
else:
fitness = 1 - item
fitness_values = np.append(fitness_values,fitness)
top_ind = np.argpartition(fitness_values, -top_n)[-top_n:]
top_fitess_values = fitness_values[top_ind]
np.savetxt(self.settings["output_path"]+'/top_fitness.txt',top_fitess_values,fmt='%f')
# extract posteriors
top_fit_samples = samples[top_ind].transpose()
try:
posteriors = {key:list(value) for key,value in zip(self.free_params_keys,top_fit_samples)}
except TypeError:
posteriors = {self.free_params_keys[0]:list(top_fit_samples)}
with open(self.settings["output_path"]+'/posterior.json', 'w') as file:
file.write(json.dumps({'posteriors': posteriors}))
# box plot
scalled_posteriors = {}
for key,values in posteriors.items():
min_v = self.free_params[key][0]
max_v = self.free_params[key][1]
scalled = list(map(lambda x: (x-min_v)/(max_v-min_v),values))
scalled_posteriors.update({key:scalled})
box_plot(scalled_posteriors,self.settings["output_path"])
settings = 0
comm = 0
rank = 0
param_sets = 0 | ABayesianC | /ABayesianC-1.0.7-py3-none-any.whl/ABC/tools.py | tools.py |
# ABlooper
Antibodies are a key component of the immune system and have been extensively used as biotherapeutics. Accurate knowledge of their structure is central to understanding their antigen binding function. The key area for antigen binding and the main area of structural variation in antibodies is concentrated in the six complementarity determining regions (CDRs), with the most important for binding and most variable being the CDR-H3 loop. The sequence and structural variability of CDR-H3 make it particularly challenging to model. Recently deep learning methods have offered a step change in our ability to predict protein structures. In this work we present ABlooper, an end-to-end equivariant deep-learning based CDR loop structure prediction tool. ABlooper rapidly predicts the structure of CDR loops with high accuracy and provides a confidence estimate for each of its predictions. On the models of the Rosetta Antibody Benchmark, ABlooper makes predictions with an average CDR-H3 RMSD of 2.49Å, which drops to 2.05Å when considering only its 76% most confident predictions.
## Install
To install via PyPi:
```bash
$ pip install ABlooper
```
To download and install the latest version from github:
```bash
$ git clone https://github.com/brennanaba/ABlooper.git
$ pip install ABlooper/
```
This package requires PyTorch. If you do not already have PyTorch installed, you can do so following these <a href="https://pytorch.org/get-started/locally/">instructions</a>.
Either OpenMM or PyRosetta are required for the optional refinement and side-chain prediction steps.
OpenMM and pdbfixer can be installed via conda using:
```bash
$ conda install -c conda-forge openmm pdbfixer
```
If you want to use PyRosetta for refinement and do not have it installed, it can be obtained from <a href="https://www.pyrosetta.org/">here</a>.
## Usage
To use ABlooper, you will need an IMGT numbered antibody model. If you do not already have an antibody model, you can generate one using <a href="http://opig.stats.ox.ac.uk/webapps/newsabdab/sabpred/abodybuilder/">ABodyBuilder</a>.
To remodel the CDRs of an existing antibody model using the command line:
```bash
$ ABlooper my_antibody_model.pdb --output ABlooper_model.pdb --heavy_chain H --light_chain L
```
To remodel the CDRs of an existing model using the python API:
```python
from ABlooper import CDR_Predictor
input_path = "my_antibody_model.pdb"
output_path = "ABlooper_model.pdb"
pred = CDR_Predictor(input_path, chains = ("H", "L"))
pred.write_predictions_in_pdb_format(output_path)
```
Two pretrained models are available. The default predicts CDRs defined by the Chothia numbering scheme (This is the model described in the paper). To get predictions of CDRs defined by the IMGT numbering scheme use:
```python
pred = CDR_Predictor(input_path, chains = ("H", "L"), model = 'imgt')
pred.write_predictions_in_pdb_format(output_path)
```
I would recommend using the command line if you just want a quick antibody model. If speed is a priority, it is probably best to just use the trained pytorch model. The python class will work best if you want to incorporate CDR prediction into a pipeline or access other details such as confidence score or RMSD to original model. Both of which can be obtained as follows:
```python
rmsd_from_input = pred.calculate_BB_rmsd_wrt_input()
confidence_score = pred.decoy_diversity
```
I have been made aware that ABlooper will occasionally generate abnormal geometries. To fix this, and to generate side-chains you can do (Only works if you have PyRosetta or OpenMM installed):
```bash
$ ABlooper my_antibody_model.pdb --output ABlooper_model.pdb --model chothia --side_chains
```
As a default this will use OpenMM if it is installed.
## Citing this work
The code and data in this package is based on the following paper <a href="https://academic.oup.com/bioinformatics/article/38/7/1877/6517780">ABlooper</a>. If you use it, please cite:
```tex
@article{10.1093/bioinformatics/btac016,
author = {Abanades, Brennan and Georges, Guy and Bujotzek, Alexander and Deane, Charlotte M},
title = {ABlooper: fast accurate antibody CDR loop structure prediction with accuracy estimation},
journal = {Bioinformatics},
volume = {38},
number = {7},
pages = {1877-1880},
year = {2022},
month = {01},
issn = {1367-4803},
doi = {10.1093/bioinformatics/btac016},
url = {https://doi.org/10.1093/bioinformatics/btac016},
```
| ABlooper | /ABlooper-1.1.2.tar.gz/ABlooper-1.1.2/README.md | README.md |
# What is this?
A Python [Flask](http://flask.pocoo.org/)-based library for building [HipChat Connect add-ons](https://www.hipchat.com/docs/apiv2/addons). This is an early, alpha-quality release,
but can be used to build real add-ons today. Future versions may include backward-incompatible changes.
# Getting started
For a simple alternative to the following set up instructions, you may consider using the [Vagrant starter project](https://bitbucket.org/atlassianlabs/ac-flask-hipchat-vagrant) to get up and running quickly.
## Dependencies
In addition to Python 2.7 or later, `ac-flask-hipchat` expects Redis to be available for temporary persistence of
authentication tokens, and MongoDB for a permanent data store.
## A first add-on
Writing basic HipChat add-ons with `ac-flask-hipchat` requires very little code to get up and running. Here's an
example of a simple yet complete add-on, in two files:
### web.py
```
from ac_flask.hipchat import Addon, room_client, sender
from flask import Flask
addon = Addon(app=Flask(__name__),
key="ac-flask-hipchat-greeter",
name="HipChat Greeter Example Add-on",
allow_room=True,
scopes=['send_notification'])
@addon.webhook(event="room_enter")
def room_entered():
room_client.send_notification('hi: %s' % sender.name)
return '', 204
if __name__ == '__main__':
addon.run()
```
### requirements.txt
```
AC-Flask-HipChat
```
## Running the server
To run this example yourself, add these files to a new directory and run the following commands there:
```
$ pip install -r requirements.txt
$ python web.py
```
If the server started as expected, you'll see something like the following emitted:
```
--------------------------------------
Public descriptor base URL: http://localhost:5000
--------------------------------------
INFO:werkzeug: * Running on http://127.0.0.1:5000/
INFO:werkzeug: * Restarting with reloader
```
To double check that the server is running correctly, try requesting it's add-on descriptor:
```
$ curl http://localhost:5000/
```
A successful request will return a HipChat descriptor for the add-on.
## Preparing the add-on for installation
Now that you have a server running, you'll want to try it somehow. The next step is different depending on whether
you're going to be developing with hipchat.com or a private HipChat instance being hosted behind your corporate firewall.
### Developing with HipChat.com
The easiest way to test with hipchat.com while developing on your local machine is to use [ngrok](https://ngrok.com).
Download and install it now if you need to -- it's an amazing tool that will change the way you develop and share web applications.
Start the ngrok tunnel in another terminal window or if using the [Vagrant starter project](https://bitbucket.org/atlassianlabs/ac-flask-hipchat-vagrant),
you should already have ngrok running, and the URL should be printed to the screen when starting the VM. For the
purposes of this tutorial, we'll assume your domain is `https://asdf123.ngrok.com`.
While ngrok will forward both HTTP and HTTPS, for the protection of you and your HipChat group members, you should
always use HTTPS when running your add-on on the public internet.
### Developing with a private server
To install your add-on on a private HipChat server, both the add-on server and HipChat server need to be able to connect
to each other via HTTP or HTTPS on your local network. Simply determine an HTTP url that your HipChat server can use to
connect to your locally running add-on, and use that as the value of your "local base url" needed by the Installation step.
If all goes well, you won't have to change anything from the defaults, as `ac-flask-hipchat` will simply attempt to
use the OS's hostname to build the local base url, which may already be good enough for your private network.
## Installation
### Configuring the add-on's local base url
Now, we need to tell the add-on server where it's running so that it can successfully be installed. By default,
it'll assume your local computer name, but for installation into HipChat, especially if using ngrok,
you'll likely want to set it explicitly.
You can do that by setting the `AC_BASE_URL` environment variable when you start the server:
```
$ AC_BASE_URL=https://asdf123.ngrok.com python web.py
```
When properly configured, you'll see the server report the new local base url when it starts up:
```
--------------------------------------
Public descriptor base URL: https://asdf123.ngrok.com
--------------------------------------
INFO:werkzeug: * Running on http://127.0.0.1:5000/
INFO:werkzeug: * Restarting with reloader
```
__Note__: by signing up for an ngrok account, you can specify a generally stable, custom subdomain for even easier
iterative development. See [ngrok](http://ngrok.com) for more information.
### Manually installing the add-on using HipChat's admin UI
To install your add-on into HipChat, you have to register your addon's descriptor.
HipChat add-ons can operate inside a room or within the entire account. When developing, you should probably register
your add-on inside a room you've created just for testing. Also, you can only register add-ons inside a room where you
are an administrator.
To register your add-on descriptor, navigate to the rooms administration page at
`https://www.hipchat.com/rooms` (or whatever url your private server is running at,
if appropriate). Then select one of your rooms in the list. In the following page, select `Integrations` in the
sidebar, and then click the "Build and install your own integration" link at the bottom of the page:

Paste your descriptor url in the `Integration URL` field of the opened dialog and then click `Add integration`. This
will initiate the installation of your add-on for that room.
# Library Features
This library provides help with many aspects of add-on development, such as:
* Choice of programmatic HipChat add-on descriptor builder or providing a full or partial descriptor object literal
* High-level conveniences for mounting webhook handlers and configuration pages
* A REST API client with built-in OAuth2 token acquisition and refresh
* JWT authentication validation, refresh, and token generation for web UI routes (e.g. the `configurable` capability)
See `test.py` for a very simple example add-on.
### Authenticating requests from the iframe to the add-on
Add-ons typically can't use sessions, because browsers treat cookies set by the add-on as third-party cookies.
You can still make an authenticated call to an endpoint in your add-on, however:
Say there is an endpoint like this:
```
@addon.route(rule='/data')
@addon.json_output
def data():
return {'some': 'data'}
```
You want to call this endpoint from the iframe with the full authentication context. This can be done by rendering
a token into the iframe:
```
@addon.webpanel(key='webpanel.key', name='Panel')
def web_panel():
token = tenant.sign_jwt(sender.id)
return render_template('panel.html', token=token)
```
The template can then render the token into the desired location:
```
var url = '/data?signed_request={{ token }}'
```
or
```
<meta name='token' content='{{ token }}'>
```
You can also include the full context of the original request from HipChat by using:
```
token = tenant.sign_jwt(sender.id, {
'context': dict(context)
})
``` | AC-Flask-HipChat | /AC-Flask-HipChat-0.2.12.tar.gz/AC-Flask-HipChat-0.2.12/readme.md | readme.md |
import json
import logging
from ac_flask.hipchat.events import events
from ac_flask.hipchat.db import mongo, redis
from .tenant import Tenant
from flask import request
import requests
_log = logging.getLogger(__name__)
def _invalid_install(msg):
_log.error("Installation failed: %s" % msg)
return msg, 400
def init(addon, allow_global, allow_room, send_events=True, db_name='clients', require_group_id=False):
# noinspection PyUnusedLocal
@addon.app.route('/addon/installable', methods=['POST'])
def on_install():
clients = mongo[db_name]
data = json.loads(request.data)
if not data.get('roomId', None) and not allow_global:
return _invalid_install("This add-on can only be installed in individual rooms. Please visit the " +
"'Add-ons' link in a room's administration area and install from there.")
if data.get('roomId', None) and not allow_room:
return _invalid_install("This add-on cannot be installed in an individual room. Please visit the " +
"'Add-ons' tab in the 'Group Admin' area and install from there.")
_log.info("Retrieving capabilities doc at %s" % data['capabilitiesUrl'])
capdoc = requests.get(data['capabilitiesUrl'], timeout=10).json()
if capdoc['links'].get('self', None) != data['capabilitiesUrl']:
return _invalid_install("The capabilities URL %s doesn't match the resource's self link %s" %
(data['capabilitiesUrl'], capdoc['links'].get('self', None)))
client = Tenant(data['oauthId'], data['oauthSecret'], room_id=data.get('roomId', None), capdoc=capdoc)
try:
session = client.get_token(redis, token_only=False,
scopes=addon.descriptor['capabilities']['hipchatApiConsumer']['scopes'])
except Exception as e:
_log.warn("Error validating installation by receiving token: %s" % e)
return _invalid_install("Unable to retrieve token using the new OAuth information")
_log.info("session: %s" % json.dumps(session))
if require_group_id and int(require_group_id) != int(session['group_id']):
_log.error("Attempted to install for group %s when group %s is only allowed" %
(session['group_id'], require_group_id))
return _invalid_install("Only group %s is allowed to install this add-on" % require_group_id)
client.group_id = session['group_id']
client.group_name = session['group_name']
clients.remove(client.id_query)
clients.insert(client.to_map())
if send_events:
events.fire_event('install', {"client": client})
return '', 201
# noinspection PyUnusedLocal
@addon.app.route('/addon/installable/<string:oauth_id>', methods=['DELETE'])
def on_uninstall(oauth_id):
uninstall_client(oauth_id, db_name, send_events)
return '', 204
addon.descriptor['capabilities']['installable']['callbackUrl'] = "{base}/addon/installable".format(
base=addon.app.config['BASE_URL']
)
def uninstall_client(oauth_id, db_name='clients', send_events=True):
client = Tenant.load(oauth_id)
clients = mongo[db_name]
client_filter = {"id": oauth_id}
clients.remove(client_filter)
if send_events:
events.fire_event('uninstall', {"client": client}) | AC-Flask-HipChat | /AC-Flask-HipChat-0.2.12.tar.gz/AC-Flask-HipChat-0.2.12/ac_flask/hipchat/installable.py | installable.py |
from ac_flask.hipchat.tenant import Tenant
from flask import _request_ctx_stack as stack, request
from flask import abort
import jwt
from werkzeug.local import LocalProxy
from functools import wraps
def require_tenant(func):
@wraps(func)
def inner(*args, **kwargs):
if not tenant:
abort(401)
return func(*args, **kwargs)
return inner
def _validate_jwt(req):
if 'signed_request' in req.form:
jwt_data = req.form['signed_request']
else:
jwt_data = req.args.get('signed_request', None)
if not jwt_data:
header = req.headers.get('authorization', '')
jwt_data = header[4:] if header.startswith('JWT ') else None
if not jwt_data:
abort(401)
try:
oauth_id = jwt.decode(jwt_data, verify=False)['iss']
client = Tenant.load(oauth_id)
data = jwt.decode(jwt_data, client.secret, leeway=10)
return client, data
except jwt.DecodeError:
abort(400)
except jwt.ExpiredSignature:
abort(401)
def _get_tenant():
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'tenant'):
body = request.json
cur_sender = cur_context = None
if request.args.get('signed_request', None) or 'authorization' in request.headers:
cur_tenant, data = _validate_jwt(request)
cur_sender = User(data['sub'])
cur_context = data.get('context', None)
elif body and 'oauth_client_id' in body:
tenant_id = body['oauth_client_id']
cur_tenant = Tenant.load(tenant_id)
else:
cur_tenant = None
if body and 'item' in body:
sent_by = _extract_sender(body['item'])
if sent_by:
user = User(user_id=sent_by['id'], name=sent_by['name'], mention_name=sent_by['mention_name'])
# Check if the sender in the webhook matches the one provided in the JWT
if cur_sender and str(cur_sender.id) != str(user.id):
abort(400)
cur_sender = user
ctx.tenant = cur_tenant
ctx.sender = cur_sender
ctx.context = cur_context
return ctx.tenant
def _extract_sender(item):
if 'sender' in item:
return item['sender']
if 'message' in item and 'from' in item['message']:
return item['message']['from']
return None
def _get_sender():
_get_tenant()
if hasattr(stack.top, 'sender'):
return stack.top.sender
else:
return None
def _get_context():
_get_tenant()
if hasattr(stack.top, 'context'):
return stack.top.context
else:
return None
tenant = LocalProxy(_get_tenant)
sender = LocalProxy(_get_sender)
context = LocalProxy(_get_context)
class User(object):
def __init__(self, user_id, name=None, mention_name=None):
super(User, self).__init__()
self.id = user_id
self.name = name
self.mention_name = mention_name | AC-Flask-HipChat | /AC-Flask-HipChat-0.2.12.tar.gz/AC-Flask-HipChat-0.2.12/ac_flask/hipchat/auth.py | auth.py |
from datetime import timedelta
import time
from ac_flask.hipchat.db import mongo
import jwt
import logging
import requests
from requests.auth import HTTPBasicAuth
from werkzeug.exceptions import abort
from urlparse import urlparse
_log = logging.getLogger(__name__)
ACCESS_TOKEN_CACHE = "hipchat-tokens:{oauth_id}"
def base_url(url):
if not url:
return None
result = urlparse(url)
return "{scheme}://{netloc}".format(scheme=result.scheme, netloc=result.netloc)
class Tenant:
def __init__(self, id, secret=None, homepage=None, capabilities_url=None, room_id=None, token_url=None,
group_id=None, group_name=None, capdoc=None):
self.id = id
self.room_id = room_id
self.secret = secret
self.group_id = group_id
self.group_name = None if not group_name else group_name
self.homepage = homepage or None if not capdoc else capdoc['links']['homepage']
self.token_url = token_url or None if not capdoc else capdoc['capabilities']['oauth2Provider']['tokenUrl']
self.capabilities_url = capabilities_url or None if not capdoc else capdoc['links']['self']
self.api_base_url = capdoc['capabilities']['hipchatApiProvider']['url'] if capdoc \
else self.capabilities_url[0:self.capabilities_url.rfind('/')] if self.capabilities_url else None
self.installed_from = base_url(self.token_url)
def to_map(self):
return {
"id": self.id,
"secret": self.secret,
"room_id": self.room_id,
"group_id": self.group_id,
"group_name": self.group_name,
"homepage": self.homepage,
"token_url": self.token_url,
"capabilities_url": self.capabilities_url
}
@staticmethod
def from_map(data):
filtered = {key: val for key, val in data.items() if not key.startswith('_')}
return Tenant(**filtered)
@staticmethod
def load(client_id):
client_data = mongo.clients.find_one(Tenant(client_id).id_query)
if client_data:
return Tenant.from_map(client_data)
else:
_log.warn("Cannot find client: %s" % client_id)
abort(400)
@property
def id_query(self):
return {"id": self.id}
def get_token(self, cache, token_only=True, scopes=None):
if scopes is None:
scopes = ["send_notification"]
cache_key = ACCESS_TOKEN_CACHE.format(oauth_id=self.id)
cache_key += ":" + ",".join(scopes)
def gen_token():
resp = requests.post(self.token_url, data={"grant_type": "client_credentials", "scope": " ".join(scopes)},
auth=HTTPBasicAuth(self.id, self.secret), timeout=10)
if resp.status_code == 200:
_log.debug("Token request response: " + resp.text)
return resp.json()
elif resp.status_code == 401:
_log.error("Client %s is invalid but we weren't notified. Uninstalling" % self.id)
raise OauthClientInvalidError(self)
else:
raise Exception("Invalid token: %s" % resp.text)
if token_only:
token = cache.get(cache_key)
if not token:
data = gen_token()
token = data['access_token']
cache.setex(cache_key, token, data['expires_in'] - 20)
return token
else:
return gen_token()
def sign_jwt(self, user_id, data=None):
if data is None:
data = {}
now = int(time.time())
exp = now + timedelta(hours=1).total_seconds()
jwt_data = {"iss": self.id,
"iat": now,
"exp": exp}
if user_id:
jwt_data['sub'] = user_id
data.update(jwt_data)
return jwt.encode(data, self.secret)
class OauthClientInvalidError(Exception):
def __init__(self, client, *args, **kwargs):
super(OauthClientInvalidError, self).__init__(*args, **kwargs)
self.client = client | AC-Flask-HipChat | /AC-Flask-HipChat-0.2.12.tar.gz/AC-Flask-HipChat-0.2.12/ac_flask/hipchat/tenant.py | tenant.py |
from functools import wraps
import httplib
import logging
from ac_flask.hipchat import installable
from ac_flask.hipchat.auth import require_tenant, tenant
import os
from flask import jsonify, request
from urlparse import urlparse
_log = logging.getLogger(__name__)
def _not_none(app, name, default):
val = app.config.get(name, default)
if val is not None:
return val
else:
raise ValueError("Missing '{key}' configuration property".format(key=name))
class Addon(object):
def __init__(self, app, key=None, name=None, description=None, config=None,
env_prefix="AC_", allow_room=True, allow_global=False,
scopes=None, vendor_name=None, vendor_url=None, avatar=None):
if scopes is None:
scopes = ['send_notification']
if avatar is None:
avatar_url = "https://abotars.hipch.at/bot/" + _not_none(app, 'ADDON_KEY', key) + ".png"
avatar = {
"url": avatar_url,
"url@2x": avatar_url
}
self.app = app
self._init_app(app, config, env_prefix)
self.descriptor = {
"key": _not_none(app, 'ADDON_KEY', key),
"name": _not_none(app, 'ADDON_NAME', name),
"description": app.config.get('ADDON_DESCRIPTION', description) or "",
"links": {
"self": self._relative_to_base("/addon/descriptor")
},
"capabilities": {
"installable": {
"allowRoom": allow_room,
"allowGlobal": allow_global
},
"hipchatApiConsumer": {
"scopes": scopes,
"avatar": avatar
}
},
"vendor": {
"url": app.config.get('ADDON_VENDOR_URL', vendor_url) or "",
"name": app.config.get('ADDON_VENDOR_NAME', vendor_name) or ""
}
}
if app.config.get('BASE_URL') is not None and app.config.get('AVATAR_URL') is not None:
self.descriptor['capabilities']['hipchatApiConsumer']['avatar'] = {
'url': app.config.get('BASE_URL') + app.config.get('AVATAR_URL')
}
installable.init(addon=self,
allow_global=allow_global,
allow_room=allow_room)
@self.app.route("/addon/descriptor")
def descriptor():
return jsonify(self.descriptor)
self.app.route("/")(descriptor)
@staticmethod
def _init_app(app, config, env_prefix):
app.config.from_object('ac_flask.hipchat.default_settings')
if config is not None:
app.config.from_object(config)
if env_prefix is not None:
env_vars = {key[len(env_prefix):]: val for key, val in os.environ.items()}
app.config.update(env_vars)
if app.config['DEBUG']:
# These two lines enable debugging at httplib level (requests->urllib3->httplib)
# You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA.
# The only thing missing will be the response.body which is not logged.
httplib.HTTPConnection.debuglevel = 1
# You must initialize logging, otherwise you'll not see debug output.
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
else:
logging.basicConfig()
logging.getLogger().setLevel(logging.WARN)
app.events = {}
def configure_page(self, path="/configure", **kwargs):
self.descriptor['capabilities'].setdefault('configurable', {})['url'] = self._relative_to_base(path)
def inner(func):
return self.app.route(rule=path, **kwargs)(require_tenant(func))
return inner
def webhook(self, event, name=None, pattern=None, path=None, auth="jwt", **kwargs):
if path is None:
path = "/event/" + event
wh = {
"event": event,
"url": self._relative_to_base(path),
"authentication": auth
}
if name is not None:
wh['name'] = name
if pattern is not None:
wh['pattern'] = pattern
self.descriptor['capabilities'].setdefault('webhook', []).append(wh)
def inner(func):
return self.app.route(rule=path, methods=['POST'], **kwargs)(require_tenant(func))
return inner
def route(self, anonymous=False, *args, **kwargs):
"""
Decorator for routes with defaulted required authenticated tenants
"""
def inner(func):
if not anonymous:
func = require_tenant(func)
func = self.app.route(*args, **kwargs)(func)
return func
return inner
def glance(self, key, name, target, icon, icon2x=None, conditions=None, anonymous=False, path=None, **kwargs):
if path is None:
path = "/glance/" + key
if icon2x is None:
icon2x = icon
glance_capability = {
"key": key,
"name": {
"value": name
},
"queryUrl": self._relative_to_base(path),
"target": target,
"icon": {
"url": self._relative_to_base(icon),
"url@2x": self._relative_to_base(icon2x)
},
"conditions": conditions or []
}
self.descriptor['capabilities'].setdefault('glance', []).append(glance_capability)
def inner(func):
return self.route(anonymous, rule=path, **kwargs)(self.cors(self.json_output(func)))
return inner
def webpanel(self, key, name, location="hipchat.sidebar.right", anonymous=False, path=None, **kwargs):
if path is None:
path = "/webpanel/" + key
webpanel_capability = {
"key": key,
"name": {
"value": name
},
"url": self._relative_to_base(path),
"location": location
}
self.descriptor['capabilities'].setdefault('webPanel', []).append(webpanel_capability)
def inner(func):
return self.route(anonymous, rule=path, **kwargs)(func)
return inner
def cors(self, func):
@wraps(func)
def inner(*args, **kwargs):
whitelisted_origin = self._get_white_listed_origin()
installed_from = tenant.installed_from if tenant else None
response = self.app.make_response(func(*args, **kwargs))
response.headers['Access-Control-Allow-Origin'] = whitelisted_origin or installed_from or '*'
return response
return inner
def json_output(self, func):
@wraps(func)
def inner(*args, **kwargs):
res = func(*args, **kwargs)
return jsonify(res) if isinstance(res, dict) else res
return inner
def _relative_to_base(self, path):
base = self.app.config['BASE_URL']
path = '/' + path if not path.startswith('/') else path
return base + path
def _get_white_listed_origin(self):
try:
origin = request.headers['origin']
if origin:
origin_url = urlparse(origin)
if origin_url and origin_url.hostname.endswith(self.app.config['CORS_WHITELIST']):
return origin
return None
except KeyError:
return None
def run(self, *args, **kwargs):
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
print("")
print("--------------------------------------")
print("Public descriptor base URL: %s" % self.app.config['BASE_URL'])
print("--------------------------------------")
print("")
self.app.run(*args, **kwargs) | AC-Flask-HipChat | /AC-Flask-HipChat-0.2.12.tar.gz/AC-Flask-HipChat-0.2.12/ac_flask/hipchat/addon.py | addon.py |
# About
ACAutomaton Python Package
High-performance multi-string lookup data structure
# Notice
1. If you want to insert unicode string, please encode them to byte string first.
2. Once you insert a new word to ACAutomaton, please remember call build method. You can call build method multiple times.
# Install
pip install ACAutomaton
# Usage
>>> from ACAutomaton import ACAutomaton
>>> a = ACAutomaton()
>>> a.insert('11')
>>> a.insert('22')
>>> a.insert('33')
>>> a.build()
>>> a.matchOne('0011222333')
(2, '11')
>>> a.matchOne('00')
(-1, None)
>>> a.matchAll('0011222333')
[(2, '11'), (4, '22'), (5, '22'), (7, '33'), (7, '33'), (8, '33'), (8, '33')]
example for unicode string
>>> from ACAutomaton import ACAutomaton
>>> a = ACAutomaton()
>>> a.insert('你好')
>>> a.insert('你坏')
>>> a.insert('你')
>>> a.build()
>>> a.matchOne('你好你坏你')
(0, '\xe4\xbd\xa0')
>>> a.matchAll('你好你坏你不存在')
[(0, '\xe4\xbd\xa0'), (0, '\xe4\xbd\xa0\xe5\xa5\xbd'), (6, '\xe4\xbd\xa0'), (6, '\xe4\xbd\xa0\xe5\x9d\x8f'), (12, '\xe4\xbd\xa0')]
>>> a.matchAll('不存在')
[]
>>> a.insert('不存在')
>>> a.build()
>>> a.matchAll('不存在')
[(0, '\xe4\xb8\x8d\xe5\xad\x98\xe5\x9c\xa8')]
| ACAutomaton | /ACAutomaton-1.0.3.tar.gz/ACAutomaton-1.0.3/README.md | README.md |
Python implementation of a very basic langage compiler that never throw errors while compiling. (almost)
Its not a big and complex compiler, and its implementation is something like awful.
Some links are given below.
## Errors
The only errors releved by the compiler is :
- source code contains characters not present in provided alphabet;
- provided vocabulary don't follow conventions of writing;
If these conditions are respected, whatever you give to the __ACCC__, it will always return something valid.
(but it can be an empty code)
## Bias
If compiled source code is too short, or made of lots of repetitions,
some bias can appear:
- always same values in object code
- lots of neutral values
The bigger is the vocabulary and bigger is the list of lexems,
the less bias will appear.
## Interests
A compilable source code is a string of characters.
Valid characters are provided at Compiler instanciation.
For example, if you have the alphabet *'01'*,
any string exclusively composed of *'0'* and *'1'* is compilable and will produce something.
Any little modification of the string can lead to heavy or no modification of object code.
In fact, with ACCC you can generate mutation of a source code without problem of compilation error.
Write a code with lots of parameters is another way to do almost the same thing.
## Object code
Currently, current object langage is __very simple__: you can compare things, and do things.
That's all. No loops, variables, functions, objects,… Just conditions and actions.
This is an example of code, not totally illogic, created one time with a source code size of 60 and the alphabet '01':
(indentation can miss)
if parameter1 == parameter2 and haveThat:
do_that
if have_that:
say_this
do_that
if know_that and have_many_things:
do_that
say_this
do_that
if have_many_things:
say_this
Please have a look to docstring of *Compiler* class for more details about that. (notabily used vocabulary)
## I/O speaking
Inputs:
- iterable of characters (doublons are unexpected) that compose the source code
- vocabulary used for compiling
Outputs:
- a python compilable code, according to vocabulary
## Next improvements
In random-priority order:
- [ ] allow lexems to have arguments;
- [ ] create before convert in any langage;
- [ ] allow configuration of output langage;
- [ ] unit tests;
- [ ] usage example;
- [ ] base tables on source code instead of only vocabulary;
- [X] upload on pypi and github (see links below);
## Why don't you use…
Someone do the same thing ? Or better ?
Give me the link, i want to see that !
## Why do that ?
1. It's fun
2. I need it for test something in another project (an Evolution simulation named [EvolAcc](http://www.github.com/Aluriak/EvolAcc) ; no surprise)
## Links
- ACCC on [github](http://www.github.com/Aluriak/ACCC);
- ACCC on [pypi](https://pypi.python.org/pypi/ACCC); | ACCC | /ACCC-0.0.3.tar.gz/ACCC-0.0.3/README.mkd | README.mkd |
#########################
# IMPORTS #
#########################
from accc.lexems import *
#########################
# PRE-DECLARATIONS #
#########################
# keys of
INDENTATION = 'indent'
BEG_BLOCK = 'begin_block'
END_BLOCK = 'end_block'
BEG_LINE = 'begin line'
END_LINE = 'end line'
BEG_ACTION = 'begin action'
END_ACTION = 'end action'
BEG_CONDITION = 'begin condition'
END_CONDITION = 'end condition'
LOGICAL_AND = 'logical and'
LOGICAL_OR = 'logical or'
#########################
# CONSTRUCTION FUNCTION #
#########################
def constructSpec(indentation, begin_block, end_block, begin_line, end_line,
begin_action, end_action,
begin_condition, end_condition,
logical_and, logical_or):
"""Return a language specification based on parameters."""
return {
INDENTATION : indentation,
BEG_BLOCK : begin_block,
END_BLOCK : end_block,
BEG_LINE : begin_line,
END_LINE : end_line,
BEG_ACTION : begin_action,
END_ACTION : end_action,
BEG_CONDITION : begin_condition,
END_CONDITION : end_condition,
LOGICAL_AND : logical_and,
LOGICAL_OR : logical_or
}
#########################
# TRANSLATED FUNCTION #
#########################
def translated(structure, values, lang_spec):
"""Return code associated to given structure and values,
translate with given language specification."""
# LANGUAGE SPECS
indentation = '\t'
endline = '\n'
object_code = ""
stack = []
# define shortcuts to behavior
push = lambda x: stack.append(x)
pop = lambda : stack.pop()
last = lambda : stack[-1] if len(stack) > 0 else ' '
def indented_code(s, level, end):
return lang_spec[INDENTATION]*level + s + end
# recreate python structure, and replace type by value
level = 0
CONDITIONS = [LEXEM_TYPE_PREDICAT, LEXEM_TYPE_CONDITION]
ACTION = LEXEM_TYPE_ACTION
DOWNLEVEL = LEXEM_TYPE_DOWNLEVEL
for lexem_type in structure:
if lexem_type is ACTION:
# place previous conditions if necessary
if last() in CONDITIONS:
# construct conditions lines
value, values = values[0:len(stack)], values[len(stack):]
object_code += (indented_code(lang_spec[BEG_CONDITION]
+ lang_spec[LOGICAL_AND].join(value)
+ lang_spec[END_CONDITION],
level,
lang_spec[END_LINE]
))
# if provided, print the begin block token on a new line
if len(lang_spec[BEG_BLOCK]) > 0:
object_code += indented_code(
lang_spec[BEG_BLOCK],
level,
lang_spec[END_LINE]
)
stack = []
level += 1
# and place the action
object_code += indented_code(
lang_spec[BEG_ACTION] + values[0],
level,
lang_spec[END_ACTION]+lang_spec[END_LINE]
)
values = values[1:]
elif lexem_type in CONDITIONS:
push(lexem_type)
elif lexem_type is DOWNLEVEL:
if last() not in CONDITIONS:
# down level, and add a END_BLOCK only if needed
level -= 1
if level >= 0:
object_code += indented_code(
lang_spec[END_BLOCK], level,
lang_spec[END_LINE]
)
else:
level = 0
# add END_BLOCK while needed for reach level 0
while level > 0:
level -= 1
if level >= 0:
object_code += indented_code(
lang_spec[END_BLOCK], level,
lang_spec[END_LINE]
)
else:
level = 0
# Finished !
return object_code
#########################
# C++ #
#########################
def cpp_spec():
"""C++ specification, provided for example, and java compatible."""
return {
INDENTATION : '\t',
BEG_BLOCK : '{',
END_BLOCK : '}',
BEG_LINE : '',
END_LINE : '\n',
BEG_ACTION : '',
END_ACTION : ';',
BEG_CONDITION : 'if(',
END_CONDITION : ')',
LOGICAL_AND : ' && ',
LOGICAL_OR : ' || '
}
#########################
# ADA #
#########################
def ada_spec():
"""Ada specification, provided for example"""
return {
INDENTATION : '\t',
BEG_BLOCK : '',
END_BLOCK : 'end if;',
BEG_LINE : '',
END_LINE : '\n',
BEG_ACTION : '',
END_ACTION : ';',
BEG_CONDITION : 'if ',
END_CONDITION : ' then',
LOGICAL_AND : ' and ',
LOGICAL_OR : ' or '
}
#########################
# PYTHON #
#########################
def python_spec():
"""Python specification, provided for use"""
return {
INDENTATION : '\t',
BEG_BLOCK : '',
END_BLOCK : '',
BEG_LINE : '',
END_LINE : '\n',
BEG_ACTION : '',
END_ACTION : '',
BEG_CONDITION : 'if ',
END_CONDITION : ':',
LOGICAL_AND : ' and ',
LOGICAL_OR : ' or '
} | ACCC | /ACCC-0.0.3.tar.gz/ACCC-0.0.3/accc/langspec/langspec.py | langspec.py |
from math import log, ceil
from itertools import zip_longest
from functools import partial, lru_cache
import itertools
import accc.langspec as langspec
#########################
# PRE-DECLARATIONS #
#########################
# lexems seens in structure
from accc.lexems import LEXEM_TYPE_CONDITION, LEXEM_TYPE_ACTION
from accc.lexems import LEXEM_TYPE_PREDICAT, LEXEM_TYPE_DOWNLEVEL
# lexems only seen in values
from accc.lexems import LEXEM_TYPE_COMPARISON, LEXEM_TYPE_OPERATOR
from accc.lexems import LEXEM_TYPE_UINTEGER
# all lexems
from accc.lexems import ALL as ALL_LEXEMS
#########################
# COMPILER CLASS #
#########################
class Compiler():
"""
Compiler of code writed with any vocabulary. ('01', 'ATGC', 'whatevr',…)
A source code is an ordered list of vocabulary elements
('10011010000101', 'AGGATGATCAGATA', 'wtrvwhttera'…).
Whatever the given source_code, it's always compilable. (but can return empty object code)
Also, it can be totally illogic (do many times the same test, do nothing,…)
The source code is readed entirely for determine STRUCTURE,
and then re-readed for determines effectives VALUES.
The STRUCTURE defines:
- logic of the code
- lexems type that will be used
The VALUES defines:
- what are the exact value of each lexem
- values of integers used as function parameters
Example of prettified STRUCTURE:
if C:
A
if C:
A
A
if P and P:
A
A
A
if P:
A
VALUES will describes which is the lexem effectively used for each
word, C, A or P. (condition, action, predicat)
NB: D is the char that indicate a indent level decrease
The dictionnary values vocabulary, given at compiler creation, define lexems :
vocabulary_values = {
LEXEM_TYPE_COMPARISON: ('parameter1', 'parameter2', 'parameter3', 'parameter4'),
LEXEM_TYPE_PREDICAT : ('have_that', 'is_this', 'have_many_things', 'know_that'),
LEXEM_TYPE_ACTION : ('do_that', 'say_this'),
LEXEM_TYPE_OPERATOR : ('>', '==', '<', 'is', '!='),
}
Then, compiled code can be something like:
if parameter1 == parameter2 and have_that:
do_that
if have_that:
say_this
do_that
if know_that and have_many_things:
do_that
say_this
do_that
if have_many_things:
say_this
Modification of provided lexems types is not supported at this time.
"""
# CONSTRUCTOR #################################################################
def __init__(self, alphabet, target_language_spec, comparables, predicats, actions, operators,
neutral_value_condition='True', neutral_value_action='pass'):
"""
Wait for alphabet ('01', 'ATGC',…), language specification and vocabularies of
structure and values parts.
Neutral value is used when no value is finded.
Set it to something that pass in all cases.
NB: a little source code lead to lots of neutral values.
"""
self.alphabet = alphabet
self.voc_structure = ALL_LEXEMS
self.target_lang_spec = target_language_spec()
self.voc_values = {
LEXEM_TYPE_COMPARISON: comparables,
LEXEM_TYPE_PREDICAT : predicats,
LEXEM_TYPE_ACTION : actions,
LEXEM_TYPE_OPERATOR : operators,
}
self.neutral_value_action = neutral_value_action
self.neutral_value_condition = neutral_value_condition
# verifications
assert(issubclass(neutral_value_action.__class__, str)
and issubclass(neutral_value_condition.__class__, str)
)
# prepare tables of words->lexems
self._initialize_tables()
# PUBLIC METHODS ###############################################################
def compile(self, source_code, post_treatment=''.join):
"""Compile given source code.
Return object code, modified by given post treatment.
"""
# read structure
structure = self._structure(source_code)
values = self._struct_to_values(structure, source_code)
# create object code, translated in targeted language
obj_code = langspec.translated(
structure, values,
self.target_lang_spec
)
# apply post treatment and return
return obj_code if post_treatment is None else post_treatment(obj_code)
# PRIVATE METHODS ##############################################################
def _initialize_tables(self):
"""Create tables for structure and values, word->vocabulary"""
# structure table
self.table_struct, self.idnt_struct_size = self._create_struct_table()
# values table
self.table_values, self.idnt_values_size = self._create_values_table()
# debug print
#print(self.table_struct)
#print(self.idnt_struct_size)
#print(self.table_values)
#print(self.idnt_values_size)
def _structure(self, source_code):
"""return structure in ACDP format."""
# define cutter as a per block reader
def cutter(seq, block_size):
for index in range(0, len(seq), block_size):
lexem = seq[index:index+block_size]
if len(lexem) == block_size:
yield self.table_struct[seq[index:index+block_size]]
return tuple(cutter(source_code, self.idnt_struct_size))
def _next_lexem(self, lexem_type, source_code, source_code_size):
"""Return next readable lexem of given type in source_code.
If no value can be found, the neutral_value will be used"""
# define reader as a lexem extractor
def reader(seq, block_size):
identificator = ''
for char in source_code:
if len(identificator) == self.idnt_values_size[lexem_type]:
yield self.table_values[lexem_type][identificator]
identificator = ''
identificator += char
lexem_reader = reader(source_code, self.idnt_values_size)
lexem = None
time_out = 0
while lexem == None and time_out < 2*source_code_size:
lexem = next(lexem_reader)
time_out += 1
# here we have found a lexem
return lexem
def _next_condition_lexems(self, source_code, source_code_size):
"""Return condition lexem readed in source_code"""
# find three lexems
lexems = tuple((
self._next_lexem(LEXEM_TYPE_COMPARISON, source_code, source_code_size),
self._next_lexem(LEXEM_TYPE_OPERATOR , source_code, source_code_size),
self._next_lexem(LEXEM_TYPE_COMPARISON, source_code, source_code_size)
))
# verify integrity
if None in lexems: # one of the condition lexem was not found in source code
return None
else: # all lexems are valid
return ' '.join(lexems)
@lru_cache(maxsize = 100)
def _string_to_int(self, s):
"""Read an integer in s, in Little Indian. """
base = len(self.alphabet)
return sum((self._letter_to_int(l) * base**lsb
for lsb, l in enumerate(s)
))
@lru_cache(maxsize = None)
def _letter_to_int(self, l):
return self.alphabet.index(l)
@lru_cache(maxsize = 127) # source code is potentially largely variable on length
def _integer_size_for(self, source_code_size):
"""Find and return the optimal integer size.
A perfect integer can address all indexes of
a string of size source_code_size.
"""
return ceil(log(source_code_size, len(self.alphabet)))
def _struct_to_values(self, structure, source_code):
"""Return list of values readed in source_code,
according to given structure.
"""
# iterate on source code until all values are finded
# if a value is not foundable,
# (ie its identificator is not in source code)
# it will be replaced by associated neutral value
iter_source_code = itertools.cycle(source_code)
values = []
for lexem_type in (l for l in structure if l is not 'D'):
if lexem_type is LEXEM_TYPE_CONDITION:
new_value = self._next_condition_lexems(
iter_source_code, len(source_code)
)
else:
new_value = self._next_lexem(
lexem_type, iter_source_code, len(source_code)
)
# if values is unvalid:
# association with the right neutral value
if new_value is None:
if lexem_type in (LEXEM_TYPE_PREDICAT, LEXEM_TYPE_CONDITION):
new_value = self.neutral_value_condition
else:
new_value = self.neutral_value_action
values.append(new_value)
return values
# TABLE METHODS ################################################################
def _create_struct_table(self):
"""Create table identificator->vocabulary,
and return it with size of an identificator"""
len_alph = len(self.alphabet)
len_vocb = len(self.voc_structure)
identificator_size = ceil(log(len_vocb, len_alph))
# create list of lexems
num2alph = lambda x, n: self.alphabet[(x // len_alph**n) % len_alph]
identificators = [[str(num2alph(x, n))
for n in range(identificator_size)
]
for x in range(len_vocb)
]
# initialize table and iterable
identificators_table = {}
zip_id_voc = zip_longest(
identificators, self.voc_structure,
fillvalue=None
)
# create dict identificator:word
for idt, word in zip_id_voc:
identificators_table[''.join(idt)] = word
return identificators_table, identificator_size
def _create_values_table(self):
"""Create table lexem_type->{identificator->vocabulary},
and return it with sizes of an identificator as lexem_type->identificator_size"""
# number of existing character, and returned dicts
len_alph = len(self.alphabet)
identificators_table = {k:{} for k in self.voc_values.keys()}
identificators_sizes = {k:-1 for k in self.voc_values.keys()}
for lexem_type, vocabulary in self.voc_values.items():
# find number of different values that can be found,
# and size of an identificator.
len_vocb = len(vocabulary)
identificators_sizes[lexem_type] = ceil(log(len_vocb, len_alph))
# create list of possible identificators
num2alph = lambda x, n: self.alphabet[(x // len_alph**n) % len_alph]
identificators = [[str(num2alph(x, n))
for n in range(identificators_sizes[lexem_type])
] # this list is an identificator
for x in range(len_alph**identificators_sizes[lexem_type])
] # this one is a list of identificator
# initialize iterable
zip_id_voc = zip_longest(
identificators, vocabulary,
fillvalue=None
)
# create dict {identificator:word}
for idt, voc in zip_id_voc:
identificators_table[lexem_type][''.join(idt)] = voc
# return all
return identificators_table, identificators_sizes
# PREDICATS ###################################################################
# ACCESSORS ###################################################################
# CONVERSION ##################################################################
# OPERATORS ################################################################### | ACCC | /ACCC-0.0.3.tar.gz/ACCC-0.0.3/accc/compiler/compiler.py | compiler.py |
import yaml
import warnings
import getpass
import pandas as pd
import sys
import re
import pickle
def ask_user_password(prompt):
return getpass.getpass(prompt + ": ")
def create_mssql_connection(username='cranedra', host='clarityprod.uphs.upenn.edu', database='clarity_snapshot_db',
domain='UPHS',
port='1433', timeout=600, password=None):
from sqlalchemy import create_engine
if password is None:
password = ask_user_password("PW")
user = domain + '\\' + username
return create_engine('mssql+pymssql://{}:{}@{}:{}/{}?timeout={}'. \
format(user, password, host, port, database, timeout))
def get_clarity_conn(path_to_clarity_creds=None):
if path_to_clarity_creds is None:
print("put your creds in a yaml file somewhere safeish and then rerun this function with the path as argument")
return
with open(path_to_clarity_creds) as f:
creds = yaml.safe_load(f)
return create_mssql_connection(password=creds['pass'])
def get_res_dict(q, conn, params = None):
res = conn.execute(q, params)
data = res.fetchall()
data_d = [dict(zip(res.keys(), r)) for r in data]
return data_d
def SQLquery2df(q, conn, params=None):
return pd.DataFrame(get_res_dict(q, conn, params))
# function to get data
def get_from_clarity_then_save(query=None, clar_conn=None, save_path=None):
"""function to get data from clarity and then save it, or to pull saved data """
# make sure that you're not accidentally saving to the cloud
if save_path is not None:
# make sure you're not saving it to box or dropbox
assert ("Dropbox" or "Box") not in save_path, "don't save PHI to the cloud, you goofus"
# now get the data
try:
db_out = get_res_dict(query, clar_conn)
except Exception as e:
print(e)
# print("error: problem with query or connection")
return
# move it to a df
df = pd.DataFrame(db_out)
# save it
if save_path is not None:
try:
df.to_json(save_path)
except Exception:
print("error: problem saving the file")
return df
def get_res_with_values(q, values, conn):
res = conn.execute(q, values)
data = res.fetchall()
data_d = [dict(r.items()) for r in data]
return data_d
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def chunk_res_with_values(query, ids, conn, chunk_size=10000, params=None):
if params is None:
params = {}
res = []
for sub_ids in chunks(ids, chunk_size):
print('.', end='')
params.update({'ids': sub_ids})
res.append(pd.DataFrame(get_res_with_values(query, params, conn)))
print('')
return pd.concat(res, ignore_index=True)
def combine_notes(df, grouper="PAT_ENC_CSN_ID"):
full_notes = []
for g, dfi in df.groupby(grouper):
full_note = '\n'.join(
' '.join(list(dfi.sort_values(['NOTE_ENTRY_TIME', 'NOTE_LINE'])['NOTE_TEXT'])).split(' '))
row = dfi.iloc[0].to_dict()
_ = row.pop('NOTE_TEXT')
_ = row.pop('NOTE_LINE')
row['NOTE_TEXT'] = full_note
full_notes.append(row)
return pd.DataFrame(full_notes)
def combine_notes_by_type(df, CSN="PAT_ENC_CSN_ID", note_type="IP_NOTE_TYPE"):
full_notes = []
for g, dfi in df.groupby(CSN):
dfis = dfi.sort_values(['NOTE_ENTRY_TIME', 'NOTE_LINE'])[['NOTE_TEXT', note_type, 'NOTE_ENTRY_TIME',
'NOTE_ID', 'NOTE_STATUS']]
full_note = ""
current_type = "YARR, HERE BE ERRORS IF YE SCRY THIS IN ANY OUTPUT, MATEY"
for i in range(nrow(dfis)):
if current_type != dfis[note_type].iloc[i]: # prepend separator
full_note += f"\n\n#### {dfis[note_type].iloc[i]}, {dfis['NOTE_ENTRY_TIME'].iloc[i]}, " \
f"ID: {dfis['NOTE_ID'].iloc[i]}, " \
f"Status: {note_status_mapper(dfis['NOTE_STATUS'].iloc[i])} ####\n"
current_type = dfis[note_type].iloc[i]
full_note += '\n'.join(dfis['NOTE_TEXT'].iloc[i].split(' '))
row = dfi.iloc[0].to_dict()
_ = row.pop('NOTE_TEXT')
_ = row.pop('NOTE_LINE')
_ = row.pop(note_type)
row['NOTE_TEXT'] = full_note
full_notes.append(row)
return pd.DataFrame(full_notes)
def combine_all_notes(df, cohort):
d = df.sort_values(['NOTE_ID', 'CONTACT_NUM']).drop_duplicates(['NOTE_ID', 'NOTE_LINE'], keep='last')
d = d.merge(cohort, on='PAT_ENC_CSN_ID', how='left')
f = combine_notes(d)
del d
return f
def make_sql_string(lst, dtype="str", mode="wherelist"):
assert dtype in ["str", 'int']
assert mode in ["wherelist", 'vallist']
if dtype == "int":
lst = [str(i) for i in lst]
if mode == "wherelist":
if dtype == "str":
out = "('" + "','".join(lst) + "')"
elif dtype == "int":
out = "(" + ",".join(lst) + ")"
elif mode == "vallist":
if dtype == "str":
out = "('" + "'),('".join(lst) + "')"
elif dtype == "int":
out = "(" + "),(".join(lst) + ")"
return out
def write_txt(str, path):
text_file = open(path, "w")
text_file.write(str)
text_file.close()
def write_pickle(x, path):
with open(path, 'wb') as handle:
pickle.dump(x, handle, protocol=pickle.HIGHEST_PROTOCOL)
def query_filtered_with_temp_tables(q, fdict, rstring=""):
"""
The q is the query
the fdict contains the info on how to filter, and what the foreign table is
the rstring is some random crap to append to the filter table when making lots of temp tables through multiprocessing
"""
base_temptab = """
IF OBJECT_ID('tempdb..#filter_n') IS NOT NULL BEGIN DROP TABLE #filter_n END
CREATE TABLE #filter_n (
:idname :type NOT NULL,
PRIMARY KEY (:idname)
);
INSERT INTO #filter_n
(:idname)
VALUES
:ids;
"""
# added Aug 10: if the foreign_key isn't in each fdict dict entry, input the name of the base fdict key by default:
for k in fdict.keys():
try:
_ = fdict[k]['foreign_key']
except:
fdict[k]['foreign_key'] = k
# tally ho:
base_footer = "join #filter_n on #filter_n.:idname = :ftab.:fkey \n"
filter_header = ""
filter_footer = ""
for i in range(len(fdict)):
tti = re.sub(":idname", list(fdict.keys())[i], base_temptab)
dtype = list(set(type(j).__name__ for j in fdict[list(fdict.keys())[i]]['vals']))
assert len(dtype) == 1
dtype = dtype[0]
valstring = make_sql_string(fdict[list(fdict.keys())[i]]['vals'], dtype=dtype, mode='vallist')
tti = re.sub(":ids", valstring, tti)
if dtype == "str":
tti = re.sub(":type", "VARCHAR(255)", tti)
elif dtype == "int":
tti = re.sub(":type", "INT", tti)
tti = re.sub("filter_n", f"filter_{i}_{rstring}", tti)
filter_header += tti
fi = re.sub(":idname", list(fdict.keys())[i], base_footer)
fi = re.sub(":fkey", fdict[list(fdict.keys())[i]]['foreign_key'], fi)
fi = re.sub("filter_n", f"filter_{i}_{rstring}", fi)
fi = re.sub(":ftab", fdict[list(fdict.keys())[i]]['foreign_table'], fi)
filter_footer += fi
outq = filter_header + "\n" + q + "\n" + filter_footer
return outq
def read_txt(path):
f = open(path, 'r')
out = f.read()
f.close()
return out
def nrow(x):
return x.shape[0]
def ncol(x):
return x.shape[1]
def note_status_mapper(x):
d = {
1: "Incomplete",
2: "Signed",
3: "Addendum",
4: "Deleted",
5: "Revised",
6: "Cosigned",
7: "Finalized",
8: "Unsigned",
9: "Cosign Needed",
10: "Incomplete Revision",
11: "Cosign Needed Addendum",
12: "Shared"
}
if type(x).__name__ == "str":
return d[int(x)]
elif x is None:
return "None"
elif type(x).__name__ == "int":
return d[x]
else:
raise Exception("feeding note mapper something it didn't like")
def get_csn_from_har(csns, clar_conn):
'''input is a list of csns'''
csnstring = ','.join(["'" + str(i) + "'" for i in csns])
q = '''
with HAR as (
select peh.HSP_ACCOUNT_ID
from PAT_ENC_HSP as peh
where peh.PAT_ENC_CSN_ID in
(:csns)
)
select peh.PAT_ENC_CSN_ID
from PAT_ENC_HSP as peh
inner join HAR on peh.HSP_ACCOUNT_ID = HAR.HSP_ACCOUNT_ID
'''
q = re.sub(":csns", csnstring, q)
newcsns = get_from_clarity_then_save(q, clar_conn = clar_conn)
return newcsns.PAT_ENC_CSN_ID.astype(str).tolist()
def sheepish_mkdir(path):
import os
try:
os.mkdir(path)
except FileExistsError:
pass
if __name__ == "__main__":
print("Special message from the department of redundant verbosity department:") | ACD-helpers | /ACD_helpers-0.1.2-py3-none-any.whl/ACD_helpers/ACD_shared_functions.py | ACD_shared_functions.py |
from collections import defaultdict
# pylint: disable=too-few-public-methods
class Pedigree(dict):
"""
Parses a pedigree file and allows different views
"""
def __init__(self, file_name):
super(Pedigree, self).__init__()
self.samples = self.keys
self.families = defaultdict(list)
with open(file_name, 'r') as fh:
for line in fh:
if line.startswith("#"):
continue
data = line.strip().split('\t')
# check the data length
n_ped = _PedSample(*data[:5], phenotype=data[5:])
self.families[n_ped.fam_id].append(n_ped)
if n_ped.ind_id in self:
raise KeyError("Duplicate Individual Id %s" % n_ped.ind_id)
self[n_ped.ind_id] = n_ped
# Give parents a presence in the ped, even if they didn't have a line
for ind in self.values():
if ind.pat_id not in self and ind.pat_id != "0":
self[ind.pat_id] = _PedSample(ind.fam_id, ind.pat_id, "0", "0", "1", "0")
if ind.mat_id not in self and ind.mat_id != "0":
self[ind.mat_id] = _PedSample(ind.fam_id, ind.mat_id, "0", "0", "2", "0")
# Set parent's offspring
for n_ped in self.values():
if n_ped.pat_id in self:
self[n_ped.pat_id].offspring.append(n_ped)
n_ped.father = self[n_ped.pat_id]
if n_ped.mat_id in self:
self[n_ped.mat_id].offspring.append(n_ped)
n_ped.mother = self[n_ped.mat_id]
def filter(self, inc_fam=None, exc_fam=None, inc_indiv=None, exc_indiv=None):
"""
Exclude anything that's exc in the pedigree.
Include only anything that's inc in the pedigree.
"""
if inc_fam is not None:
for i in self.keys():
if self[i].fam_id not in inc_fam:
del self[i]
if inc_indiv is not None:
for i in self.keys():
if self[i].ind_id not in inc_indiv:
del self[i]
if exc_fam is not None:
for i in self.keys():
if self[i].fam_id in exc_fam:
del self[i]
if exc_indiv is not None:
for i in self.keys():
if self[i].ind_id in exc_indiv:
del self[i]
def all_male(self):
"""
Returns all male individuals
"""
for i in self:
if self[i].sex == "1":
yield self[i]
def all_female(self):
"""
Returns all female individuals
"""
for i in self:
if self[i].sex == "2":
yield self[i]
def all_affected(self):
"""
Returns all affected individuals
"""
for i in self:
if self[i].phenotype == "2":
yield self[i]
def all_unaffected(self):
"""
Returns all unaffected individuals
"""
for i in self:
if self[i].phenotype == "1":
yield self[i]
def get_siblings(self, indiv):
"""
Returns the siblings of an individual
"""
for i in self:
if self[i].pat_id == self[indiv].pat_id or self[i].mat_id == self[indiv].mat_id:
yield self[i]
def get_trio_probands(self):
"""
Yields _PedSample probands that are part of a trio i.e. niether parent is 0
"""
for indiv in self.values():
if indiv.mat_id != '0'and indiv.pat_id != '0':
yield indiv
def get_quad_probands(self):
"""
Yields _PedSample proband tuples that are part of an exact quad.
"""
for fam in self.families:
already_yielded = {}
for indiv in self.families[fam]:
if indiv.ind_id in already_yielded:
continue
if indiv.mat_id != "0" and indiv.pat_id != "0":
siblings = set(self[indiv.mat_id].offspring).intersection(set(self[indiv.pat_id].offspring))
if len(siblings) == 2:
yield list(siblings)
for sib in siblings:
if indiv != sib:
already_yielded[sib.ind_id] = 1
yield (indiv, sib)
class _PedSample(object):
"""
An individual in a pedigree
Family ID
Individual ID
Paternal ID
Maternal ID
Sex (1=male; 2=female; other=unknown)
Phenotype
"""
def __init__(self, fam_id, ind_id, pat_id, mat_id, sex, phenotype):
self.fam_id = fam_id
self.ind_id = ind_id
self.pat_id = pat_id
self.mat_id = mat_id
self.sex = sex
self.phenotype = phenotype
self.father = None
self.mother = None
self.offspring = []
def __hash__(self):
return hash(self.ind_id)
def __repr__(self):
return "PedigreeSample<%s:%s %s>" % (self.fam_id, self.ind_id, self.sex)
def __str__(self):
return "\t".join([self.fam_id, self.ind_id, self.pat_id,
self.mat_id, self.pat_id, self.sex,
"\t".join(self.phenotype)]) | ACEBinf | /ACEBinf-1.0.2.tar.gz/ACEBinf-1.0.2/acebinf/pedigree.py | pedigree.py |
from __future__ import print_function
from io import BytesIO
import swalign
def aligner(reference, assembly, s_anchor, e_anchor, match=2, mismatch=-1, gap_penalty=-2, gap_extension_decay=0.5):
"""
Given two anchor reference ranges map a query sequence align using swalign
returns the alignment
"""
positions = [s_anchor.start, s_anchor.end, e_anchor.start, e_anchor.end]
start = min(positions)
end = max(positions)
# We're assuming non tloc
chrom = s_anchor.chromosome
scoring = swalign.NucleotideScoringMatrix(match, mismatch)
sw = swalign.LocalAlignment(scoring, gap_penalty=gap_penalty, gap_extension_decay=gap_extension_decay)
aln = sw.align(str(reference.make_range(chrom, start, end).sequence), str(assembly))
return aln
def aln_to_vcf(anchor, aln, header=True):
"""
Turns an swalign into a vcf written to a StringIO
this can be parsed by pyvcf directly or written to a file
It currently has no INFO and a fake FORMAT of GT=0/1
if header is true, turn write a vcf header
"""
ret = BytesIO()
if header:
ret.write(('##INFO=<ID=NS,Number=1,Type=Integer,Description="Number of smaples">\n'
'##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n'
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tSAMPLE\n'))
tovcf = lambda chrom, pos, ref, alt: ret.write("{chrom}\t{pos}\t.\t{ref}\t{alt}\t.\tPASS\tNS=1\tGT\t0/1\n".format(
chrom=chrom, pos=pos, ref=ref, alt=alt))
# Position in the ref/query sequence
# this will be off if the assembly maps to a sub-sequence of the anchor region
base_position = anchor.start + 1
rpos = 0
qpos = 0
chrom = anchor.chromosome
query = aln.query
reference = aln.ref
for size, code in aln.cigar:
if code == "M":
for _ in range(size):
if query[qpos] != reference[rpos]:
# SNP. Can't handle MNPs yet
tovcf(chrom, base_position, reference[rpos], query[qpos])
rpos += 1
qpos += 1
base_position += 1
elif code == 'I': # ins
tovcf(chrom, base_position - 1, reference[rpos - 1], reference[rpos - 1] + query[qpos:qpos + size])
qpos += size
elif code == 'D': # del
tovcf(chrom, base_position - 1, reference[rpos - 1:rpos + size], reference[rpos - 1])
rpos += size
base_position += size
ret.seek(0)
return ret | ACEBinf | /ACEBinf-1.0.2.tar.gz/ACEBinf-1.0.2/acebinf/aligner.py | aligner.py |
import sys
import traceback
import multiprocessing
import logging
class Consumer(multiprocessing.Process):
"""
Basic Consumer. Follow the two queues with your *args and **kwargs that should be sent
to the task when __call__ 'd
NOTE! args can't hold anything that isn't pickle-able for the subprocess
task_queue, result_queue
Should add timeout functionality - I know I have it somewhere
"""
def __init__(self, task_queue, result_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
def run(self):
try:
while True:
next_task = self.task_queue.get()
if next_task is None:
# Poison pill means shutdown
self.task_queue.task_done()
break
try:
next_task()
except Exception as e: # pylint: disable=broad-except
logging.error("Exception raised in task - %s", str(e))
exc_type, exc_value, exc_traceback = sys.exc_info()
logging.error("Dumping Traceback:")
traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr)
next_task.failed = True
next_task.errMessage = str(e)
self.result_queue.put(next_task)
self.task_queue.task_done()
return
except Exception as e: # pylint: disable=broad-except
logging.error("Consumer %s Died\nERROR: %s", self.name, e)
return
class ConsumerPool(object):
"""
A resource for making a pool of consumer multiprocesses
The tasks passed in via put must be callable (__call__)
finished tasks are then yielded back.
Usage:
>>> procs = ConsumerPool(THREADS)
>>> procs.start_pool()
>>> for stuff in things:
>>> procs.put_task(MyTask(stuff, Param1, Param2))
>>> procs.put_poison()
>>>
>>> for pcmp_result in procs.get_tasks():
>>> pass # Do work on your MyTasks
"""
def __init__(self, threads=1):
"""
Does all the work
"""
self.threads = threads
self.input_queue = multiprocessing.JoinableQueue()
self.output_queue = multiprocessing.Queue()
self.processes = [Consumer(self.input_queue, self.output_queue)
for i in range(self.threads)] # pylint: disable=unused-variable
self.task_count = 0
def start_pool(self):
"""
run start on all processes
"""
for proc in self.processes:
proc.start()
def put_task(self, task):
"""
Add a callable task to the input_queue
"""
self.task_count += 1
self.input_queue.put(task)
def put_poison(self):
"""
For each process, add a poison pill so that it will close
once the input_queue is depleted
"""
for i in range(self.threads):
logging.debug("Putting poison %d", i)
self.input_queue.put(None)
def get_tasks(self):
"""
Yields the finished tasks
"""
remaining = self.task_count
while remaining:
ret_task = self.output_queue.get()
remaining -= 1
yield ret_task | ACEBinf | /ACEBinf-1.0.2.tar.gz/ACEBinf-1.0.2/acebinf/multiprocess_utils.py | multiprocess_utils.py |
# A python wrapper for the Answer Constraint Engine
This package is a wrapper for [ACE](http://sweaglesw.org/linguistics/ace)
## Installation
```bash
pip install ACEngine
```
## Quick Start
### English Grammar Resource
```python
from ace.paraphrase import generate_paraphrase
text = "The quick brown fox that jumped over the lazy dog took a nap."
paraphrase_list = generate_paraphrase(text)
for paraphrase in paraphrase_list:
print(paraphrase)
# The brown quick fox which jumped over the lazy dog took a nap.
# The brown quick fox that jumped over the lazy dog took a nap.
# A nap was taken by the brown quick fox which jumped over the lazy dog.
# The brown quick fox who jumped over the lazy dog took a nap.
# A nap was taken by the brown quick fox that jumped over the lazy dog.
# A nap was taken by the brown quick fox, which jumped over the lazy dog.
# A nap was taken by the quick brown fox, which jumped over the lazy dog.
# The brown quick fox, which jumped over the lazy dog, took a nap.
# The quick brown fox which jumped over the lazy dog took a nap.
# The quick brown fox, which jumped over the lazy dog, took a nap.
# A nap was taken by the brown quick fox who jumped over the lazy dog.
# A nap was taken by the brown quick fox, who jumped over the lazy dog.
# The quick brown fox that jumped over the lazy dog took a nap.
# A nap was taken by the quick brown fox, who jumped over the lazy dog.
# The brown quick fox, who jumped over the lazy dog, took a nap.
# The quick brown fox, who jumped over the lazy dog, took a nap.
# A nap was taken by the brown quick fox, that jumped over the lazy dog.
# A nap was taken by the quick brown fox, that jumped over the lazy dog.
# The brown quick fox, that jumped over the lazy dog, took a nap.
# The quick brown fox, that jumped over the lazy dog, took a nap.
# A nap was taken by the quick brown fox which jumped over the lazy dog.
# The quick brown fox who jumped over the lazy dog took a nap.
# A nap was taken by the quick brown fox that jumped over the lazy dog.
# A nap was taken by the quick brown fox who jumped over the lazy dog.
```
### The Jacy Japanese Grammar
```python
from ace.paraphrase import generate_paraphrase
text = "太郎 が 次郎 に 本 を 渡し た"
paraphrase_list = generate_paraphrase(text, grammar='jacy')
for paraphrase in paraphrase_list:
print(paraphrase)
# 太郎 が 次郎 に 本 を 渡し た
# 次郎 に 太郎 が 本 を 渡し た
# 次郎 に 本 を 太郎 が 渡し た
```
| ACEngine | /ACEngine-0.0.5.tar.gz/ACEngine-0.0.5/README.md | README.md |
import bz2
import os
import os.path as osp
import platform
import gdown
download_base_dir = osp.expanduser('~/.ace')
download_bin_dir = osp.join(download_base_dir, 'bin')
download_grammar_dir = osp.join(download_base_dir, 'grammar')
ace_versions = (
'0.9.31',
)
default_ace_version = '0.9.31'
english_resource_grammar_versions = (
'1214',
'2018',
)
default_english_resource_grammar_version = '2018'
jacy_urls = {
'2017': {
'osx':
'https://drive.google.com/uc?id=1YdHii_0NNpi_e-Xi_Oa3vL4MQ3yS-b2f',
'x86-64':
'https://drive.google.com/uc?id=1-vG00-IsTX1RxJaCcQSKVLK-7wAzEqEe',
},
}
default_jacy_version = '2017'
def get_ace(ace_version=default_ace_version):
if ace_version not in ace_versions:
raise RuntimeError('Could not find a version {} (from versions: {})'
.format(ace_version, ", ".join(ace_versions)))
pf = platform.system()
base_url = 'http://sweaglesw.org/linguistics/ace/download/ace-{}-{}.tar.gz'
if pf == 'Windows':
raise NotImplementedError('Not supported in Windows.')
elif pf == 'Darwin':
url = base_url.format(ace_version, 'osx')
bin_filename = 'ace-{}-{}'.format(ace_version, 'osx')
else:
url = base_url.format(ace_version, 'x86-64')
bin_filename = 'ace-{}-{}'.format(ace_version, 'x86-64')
bin_filename = osp.join(download_bin_dir, bin_filename)
name = osp.splitext(osp.basename(url))[0]
if not osp.exists(bin_filename):
gdown.cached_download(
url=url,
path=osp.join(download_bin_dir, name),
postprocess=gdown.extractall,
quiet=True,
)
os.rename(
osp.join(download_bin_dir, 'ace-{}'.format(ace_version), 'ace'),
bin_filename)
return bin_filename
def get_english_resource_grammar(
ace_version=default_ace_version,
erg_version=default_english_resource_grammar_version):
"""Get Precompiled grammar images.
"""
if ace_version not in ace_versions:
raise RuntimeError(
'Could not find an ACE version {} (from versions: {})'
.format(ace_version, ", ".join(ace_versions)))
if erg_version not in english_resource_grammar_versions:
raise RuntimeError(
'Could not find an ERG version {} (from versions: {})'
.format(erg_version, ", ".join(english_resource_grammar_versions)))
pf = platform.system()
base_url = 'http://sweaglesw.org/linguistics/ace/download/' \
'erg-{}-{}-{}.dat.bz2'
if pf == 'Windows':
raise NotImplementedError('Not supported in Windows.')
elif pf == 'Darwin':
url = base_url.format(erg_version, 'osx', ace_version)
name = 'erg-{}-{}-{}.dat'.format(erg_version, 'osx', ace_version)
else:
url = base_url.format(erg_version, 'x86-64', ace_version)
name = 'erg-{}-{}-{}.dat'.format(erg_version, 'x86-64', ace_version)
dat_filename = osp.join(download_grammar_dir, name)
bz2_file = osp.join(download_grammar_dir, name + '.bz2')
if not osp.exists(dat_filename):
gdown.cached_download(
url=url,
path=bz2_file,
quiet=True,
)
with open(bz2_file, 'rb') as f:
data = f.read()
with open(dat_filename, 'wb') as fw:
fw.write(bz2.decompress(data))
return dat_filename
def get_jacy_grammar(ace_version=default_ace_version,
jacy_version=default_jacy_version):
"""Get Precompiled grammar images.
https://github.com/delph-in/jacy
"""
if ace_version not in ace_versions:
raise RuntimeError(
'Could not find an ACE version {} (from versions: {})'
.format(ace_version, ", ".join(ace_versions)))
if jacy_version not in jacy_urls.keys():
raise RuntimeError(
'Could not find a jacy version {} (from versions: {})'
.format(jacy_version, ", ".join(jacy_urls.keys())))
pf = platform.system()
if pf == 'Windows':
raise NotImplementedError('Not supported in Windows.')
elif pf == 'Darwin':
url = jacy_urls[jacy_version]['osx']
name = 'jacy-{}-{}-{}.dat'.format(jacy_version, 'osx', ace_version)
else:
url = jacy_urls[jacy_version]['x86-64']
name = 'jacy-{}-{}-{}.dat'.format(jacy_version, 'x86-64', ace_version)
dat_filename = osp.join(download_grammar_dir, name)
bz2_file = osp.join(download_grammar_dir, name + '.bz2')
if not osp.exists(dat_filename):
gdown.cached_download(
url=url,
path=bz2_file,
quiet=True,
)
with open(bz2_file, 'rb') as f:
data = f.read()
with open(dat_filename, 'wb') as fw:
fw.write(bz2.decompress(data))
return dat_filename | ACEngine | /ACEngine-0.0.5.tar.gz/ACEngine-0.0.5/ace/data/ace_utils.py | ace_utils.py |
ACIOps
==============
Description
--------------
ACIOps is a collection of my personal method/functions used in my programs. The module will return all the the requested
information for you unformatted. Within this module you will find the following
tools:
+ APIC Login
+ Subnet Finder
+ View Tenants
+ Vlans Pools
+ Encapsulation Finder
+ Access Policy Mappings
+ Tenant vrfs
+ Application Profiles
+ Endpoint Groups
+ Bridge Domains
+ Endpoint Finder
**Version 2.0 additions
+ Create Tenant
+ Create App Profile
+ Create EPG
+ Create BD (l3/l2)
+ Routing Scope
+ Create VRF
+ Enable Unicast
Depedency Modules
__________
+ xml.etree.ElementTree
+ ipaddress
+ collections
+ json
+ warnings
+ request
+ re
Usage
_____
**Import**
>>>import ACIOperations.ACIOps as ops
Examples
---
Some method can be run without any argument and some dont. The seed method is always the login() which produces the session
**Example 1 (Authentication: )**
>>> call_class = ops.AciOps()
>>> login = call_class.login(apic="192.168.1.1", username="JoeSmo", password="helpme!")
>>> print(call_class.session)
<requests.sessions.Session object at 0x00000253743CFB48>
>>>
**Example 2 (Fetch VLAN Pools: )**
>>>call_class.vlan_pools()
defaultdict(<class 'list'>, {'Pool1': 'vlan-10-vlan-20', 'Pool2': 'vlan-1000-vlan-2000'}
>>> pools = call_class.vlan_pools()
>>> for k, v in pools.items():
print("Pool: {} Range: {}".format(k, v))
Pool: Pool1 Range: vlan-10-vlan-20
Pool: Pool2 Range: vlan-1000-vlan-2000
**Example 3 (Find Encap: )**
>>>find_encap = call_class.find_encap(vlan="2000")
* Output omitted due to length
This will produce all access policies associated with an external fabric encapsulation
**Example 4 (Policy Mappings:)**
>>> policy_maps = call_class.policy_mappings()
* Output omitted due to length
This will map vlan pools, AAEP, phydoms, routeddoms, vmmdoms and return to user.
**Example 5 (Infrastructure Info: )**
>>> infra = call_class.infr(pod=1)
>>> print(infra)
['Leaf101', 'N9K-C93180YC-EX', 'FDO21eddfrr', 'Leaf102', 'N9K-C93108TC-EX', 'FDO21rfeff', 'Spine101', 'N9K-C9336PQ', 'FDO2rffere']
**Example 6 (Find Subnet: )**
>>> find_subnet = call_class.subnet_finder(subnet="10.1.1.1/24")
>>> print(find_subnet)
('10.1.1.1/24', 'Customer1', 'BD-VL100', 'Customer1-VRF', 'Customer1-l3out', 'yes', 'public,shared', 'flood', ['ANP-Web'], ['EPG-WebServer'])
**Example 7 (View Tenants: )**
>>> tenants = call_class.view_tenants()
>>> print(tenants)
['infra', 'Customer-1', 'common', 'Customer-2']
>>>
**Example 8 (View Vrf: )**
>>> vrf = call_class.tenant_vrf(tenant="Customer-1")
>>> print(vrf)
defaultdict(<class 'list'>, {'vrf': ['Customer-1']})
>>>
**Example 9 (View Bridge Domains: )**
>>>call_class.view_bd(tenant="Example")
['L3BD', 'BDL3']
>>>
**Example 9 (View App Profiles: )**
>>>call_class.view_app_profiles(tenant="Example")
['Web', 'None']
**Example 10 (View EPG: )**
>>>call_class.view_epgs(tenant="Example", app="Web")
['Servers']
>>>
**Example 11 (Endpoint Tracker: )**
>>> endpoint = call_class.enpoint_tracker(endpoint="10.1.1.10")
>>> print(endpoint)
Name: 00:50:56:A0:77:88
EP: 00:50:56:A0:77:88
Encapsulation: vlan-200
Location: uni/tn-Customer-1/ap-ANP-WEB/epg-EPG-WEB/cep-00:50:56:A0:77:88
IP: 10.1.1.10
>>>
Send Operations
=====
Description
----
**The AciOpsSend class enables you to send configurations to ACI. You can run it from you own program or just use**
**the python console. Simple and easy methods inherited from our parent class in v1.0.0**
**Example 1 (Create Tenant: )**
>>> call_class = ops.AciOpsSend(apic="192.168.1.1", username="JoeSmo", password="Help!")
>>> create_tenant = call_class.create_tenant(tenant="Example")
>>> call_class.view_tenants()
['Example']
>>>
**Example 2 (Create App Profile: )**
>>> create_app = call_class.create_app_profile(tenant="Example", app="Web")
>>> call_class.create_app_profile()
>>> call_class.create_app_profile(tenant="Example")
(<Response [200]>, defaultdict(<class 'list'>, {'name': ['Web', 'None']}))
>>>
**Example 3 (Create EPG: )**
>>> call_class.create_epg(tenant="Example", app="Web", epg="Servers")
(<Response [200]>, defaultdict(<class 'list'>, {'name': ['Servers']}))
>>>
**Example 4 (Create BD: )**
>>> call_class.create_bd_l3(tenant="Example", bd="L3BD", subnet="4.4.4.4/32")
(<Response [200]>, defaultdict(<class 'list'>, {'name': ['L3BD']}))
>>> call_class.subnet_finder(subnet="4.4.4.4/32")
('4.4.4.4/32', 'Example', 'L3BD', 'vrf', 'None', 'yes', 'private', 'proxy', 'None', 'None')
>>>
**Example 5 (Create vrf: )**
>>> call_class.create_vrf(tenant="Example", vrf="vrf-1")
(<Response [200]>, defaultdict(<class 'list'>, {'vrf': ['vrf-1']}))
>>>
**Example 6 (Enable Unicast Route: )**
>>> call_class.enable_unicast(tenant="Example", bd="L3BD", enable="no") **yes/no**
(<Response [200]>, '{"fvBD":{"attributes": {"name": "L3BD", "unicastRoute": "no"}}}')
>>>
**Example 7 (Assign Vrf to BridgeDomain: )**
>>>call_class.vrf_to_bd(tenant="Example", bd="BDL3", vrf="vrf-1")
(<Response [200]>, defaultdict(<class 'list'>, {'vrf': ['vrf-1']}))
>>>
**Example 8 (Routing Scope: )**
>>> call_class.routing_scope(tenant="Example", bd="BDL3", scope="private", subnet="4.4.4.4/32") **share|public|shared***
(<Response [200]>, defaultdict(<class 'list'>, {'name': ['L3BD', 'BDL3']}), {'IP': 'uni/tn-Example/BD-BDL3/subnet-[4.4.4.4/32]',
'Tenant': 'Example', 'BD': 'BDL3', 'vrf': 'vrf-1', 'L3Out': 'None', 'Route Enable': 'yes', 'Scope': 'private', 'Uni Flood': 'proxy',
'APs': 'None', 'EPGs': 'None'})
>>> | ACIOps | /ACIOps-2.0.0.tar.gz/ACIOps-2.0.0/README.rst | README.rst |
import requests
import json
import warnings
import xml.etree.ElementTree as ET
import re
import collections
import ipaddress
class AciOps:
"""Collects authentication information from user, returns session if successfull, or response if not"""
def __init__(self):
self.session = None
self.response = None
self.apic = None
self.vlan_dict = collections.defaultdict(list)
self.policies_dict = collections.defaultdict(list)
self.device_info = []
self.tenant_array = []
self.bd_array = []
self.ap_array = []
self.epg_array = []
self.vrf_array = []
self.json_header = headers = {'content-type': 'application/json'}
def login(self, apic=None, username=None, password=None):
"""APIC authentication method. Takes username, password, apic kwargs and returns session"""
ignore_warning = warnings.filterwarnings('ignore', message='Unverified HTTPS request')
uri = "https://%s/api/aaaLogin.json" % apic
json_auth = {'aaaUser': {'attributes': {'name': username, 'pwd': password}}}
json_credentials = json.dumps(json_auth)
self.session = requests.Session()
self.apic = apic
try:
request = self.session.post(uri, data=json_credentials, verify=False)
self.response = json.loads(request.text)
except (requests.exceptions.ConnectionError, requests.exceptions.InvalidURL):
raise ConnectionError("Login Failed, Verify APIC Address")
try:
if self.response["imdata"][0]["error"]["attributes"]["code"] == "401":
raise ValueError("Login Failed, please verify credentials | Credential Submitted:\n{}\n{}"
.format(username, password))
except TypeError:
raise TypeError("Something Went Wrong")
except KeyError:
pass
else:
return self.session
def vlan_pools(self):
"""Get fabric vlan pools, return pool in a dictionary data structure"""
vlan_range_dict = collections.defaultdict(list)
uri = "https://" + self.apic + "/api/node/mo/uni/infra.xml?query-target=subtree&target-subtree-class=fvnsVlanInstP&target-subtree-class=fvnsEncapBlk&query-target=subtree&rsp-subtree=full&rsp-subtree-class=tagAliasInst"
request = self.session.get(uri, verify=False)
root = ET.fromstring(request.text)
for fvnsEncapBlk in root.iter("fvnsEncapBlk"):
vlan_pool_array = []
if "vxlan" in fvnsEncapBlk.get("from"):
continue
else:
vlan_pool_array.append(fvnsEncapBlk.get("from"))
vlan_pool_array.append(fvnsEncapBlk.get("to"))
start_range = fvnsEncapBlk.get("from")
end_range = fvnsEncapBlk.get("to")
dn = fvnsEncapBlk.get("dn")
parse_dn = re.findall(r'(?<=vlanns-\[).*?(?=]-[a-z])', dn)
vlan_range_dict[parse_dn[0]] = start_range + "-" + end_range
vlans = []
for vlan in vlan_pool_array:
remove_vlan = vlan.replace("vlan-", "")
vlan_range = remove_vlan.split("-")
vlans.append(vlan_range[0])
if "vxlan" in vlans:
pass
else:
vlans_unpacked = []
vlan_start = int(vlans[0])
vlan_end = int(vlans[1]) + 1
if vlan_start == vlan_end:
vlans_unpacked.append(str(vlan_end))
else:
begin = vlan_start
for i in range(vlan_start, vlan_end):
vlans_unpacked.append(str(begin))
begin = begin + 1
self.vlan_dict[parse_dn[0]].append(vlans_unpacked)
return request, vlan_range_dict
def find_encap(self, encap=None):
"""Takes in the vlan encapsulation, intiaates vlan_pool(0 and policy_mappings. Calls _find_encap_comiple to fin fabric information about the encapsulation
Returns a series of list to the caller"""
vlan_pool = collections.defaultdict(list)
phys_doms = collections.defaultdict(list)
aaeps = collections.defaultdict(list)
location = collections.defaultdict(list)
path = collections.defaultdict(list)
self.vlan_pools()
self.policy_mappings()
pools = self._find_encap_compile(encap)
vlan_pool[encap].append(pools[0])
phys_doms[encap].append(pools[1])
aaeps[encap].append(pools[2])
location[encap].append(pools[3])
path[encap].append(pools[4])
unpacked_vlan_pools = [v for k, v in vlan_pool.items() for v in v for v in v]
unpacked_phys_doms = [v for k, v in phys_doms.items() for v in v for v in v]
unpacked_aaep = [v for k, v in aaeps.items() for v in v for v in v]
unpacked_location = [v for k, v in location.items() for v in v for v in v]
unpacked_path = [v for k, v in path.items() for v in v for v in v]
return unpacked_vlan_pools, unpacked_phys_doms, unpacked_aaep, unpacked_location, unpacked_path
def _find_encap_compile(self, encap=None):
""" This method is for local use only. It works with vlan_pool() to produce a series of list and return them
to the call find_encap"""
pools = []
phy_doms = []
aaep = []
location = []
path = []
uri = "https://{}/api/class/fvRsPathAtt.xml?query-target-filter=eq(fvRsPathAtt.encap,\"vlan-{}\")".format(self.apic, encap)
request = self.session.get(uri, verify=False)
root = ET.fromstring(request.text)
if "\"0\"" in request.text:
print("Encap not Found")
else:
for fvRsPathAtt in root.iter("fvRsPathAtt"):
string = fvRsPathAtt.get("dn")
tenant = re.findall(r'(?<=tn-).*(?=/ap)', string)
location.append("Tenant: " + tenant[0])
ap = re.findall(r'(?<=ap-).*(?=/ep)', string)
location.append("App Profile: " + ap[0])
epg = re.findall(r'(?<=epg-).*(?=/rsp)', string)
location.append("EPG: " + epg[0])
if re.findall(r'(?<=protpaths-).*(?=/pat)', string):
path_1 = re.findall(r'(?<=protpaths-).*(?=/pat)', string)
elif re.findall(r'(?<=paths-).*(?=/pat)', string):
path_1 = re.findall(r'(?<=paths-).*(?=/pat)', string)
profile = re.findall(r'(?<=pathep-\[).*(?=]])', string)
path.append("Path: " + path_1[0] + ": " + profile[0])
for key_1, value_1 in self.vlan_dict.items():
for v in value_1:
for v in v:
if encap == v:
pools.append(key_1)
for key_2, value_2 in self.policies_dict.items():
if key_2 == key_1:
for v in value_2:
phy_doms.append(v)
for key_3, value_3 in self.policies_dict.items():
for dom in phy_doms:
if dom in value_3:
if "AAEP" in key_3:
aaep.append(key_3)
else:
continue
else:
pass
else:
continue
dup_pools = list(dict.fromkeys(pools))
dup_location = list(dict.fromkeys(location))
dup_aaep = list(dict.fromkeys(aaep))
return dup_pools, phy_doms, dup_aaep, dup_location, path
def policy_mappings(self):
"""Maps AAEPS, Vlan Pools, and phys/vmm/routed domain. Return dictionary data structure"""
uri = "https://" + self.apic + "/api/node/mo/uni.xml?query-target=subtree&target-subtree-class=physDomP&target-subtree-class=infraRsVlanNs,infraRtDomP&query-target=subtree"
headers = {'content-type': 'text/xml'}
request = self.session.get(uri, verify=False, headers=headers)
root = ET.fromstring(request.text)
for infraRtDomP in root.iter("infraRtDomP"):
string = infraRtDomP.get("dn")
if re.findall(r'phys-.*?[/]\b', string):
aaeps = re.findall(r'(?<=attentp-).*(?=])', string)
phys_dom = re.findall(r'(?<=phys-).*(?=/rt)', string)
self.policies_dict["AAEP " + aaeps[0]].append(phys_dom)
elif re.findall(r'l3dom-.*?[/]\b', string):
aaeps = re.findall(r'(?<=attentp-).*(?=])', string)
l3_dom = re.findall(r'(?<=l3dom-).*(?=/rt)', string)
self.policies_dict["AAEP " + aaeps[0]].append(l3_dom)
elif re.findall(r'vmmp-.*?[/]\b', string):
aaeps = re.findall(r'(?<=attentp-).*(?=])', string)
vmm_dom = re.findall(r'(?<=vmmp-).*(?=/rt)', string)
self.policies_dict["AAEP " + aaeps[0]].append(vmm_dom[0])
else:
continue
for infraRsVlanNs in root.iter("infraRsVlanNs"):
vl_pool_dn = infraRsVlanNs.get("tDn")
phys_dom_dn = infraRsVlanNs.get("dn")
if re.findall(r'(?<=phys-).*(?=/)', phys_dom_dn):
phys_dom = re.findall(r'(?<=phys-).*(?=/)', phys_dom_dn)
vlan_pool = re.findall(r'(?<=vlanns-\[).*(?=])', vl_pool_dn)
self.policies_dict[vlan_pool[0]].append(phys_dom)
elif re.findall(r'(?<=ledom-).*(?=/)', phys_dom_dn):
l3_dom = re.findall(r'(?<=l3dom-).*(?=/)', phys_dom_dn)
vlan_pool = re.findall(r'(?<=vlanns-\[).*(?=])', vl_pool_dn)
self.policies_dict[vlan_pool[0]].append(l3_dom)
elif re.findall(r'(?<=vmmp-).*(?=/)', phys_dom_dn):
vmm_dom = re.findall(r'(?<=vmmp-).*(?=/)', phys_dom_dn)
vlan_pool = re.findall(r'(?<=vlanns-\[).*(?=])', vl_pool_dn)
self.policies_dict[vlan_pool[0]].append(vmm_dom[0])
else:
continue
return request, self.policies_dict
def infr(self, pod=None):
"""Takes in pod number , and return all information about the fabric hardware. Greate for TAC use"""
pod_num = pod
pod_number = "pod-{}".format(pod_num)
uri = "https://{}/api/node/mo/topology/{}.xml?query-target=children".format(self.apic, pod_number)
request = self.session.get(uri, verify=False)
root = ET.fromstring(request.text)
for fabricNode in root.iter("fabricNode"):
fabric_node = fabricNode.get("name")
model_node = fabricNode.get("model")
serial_node = fabricNode.get("serial")
self.device_info.append(fabric_node)
self.device_info.append(model_node)
self.device_info.append(serial_node)
if not self.device_info:
return "No Infrastructor Information"
else:
return self.device_info
def view_tenants(self):
"""Returns ACI Tenant from the arbitrary APIC"""
uri = "https://{}/api/class/fvTenant.json".format(self.apic)
request = self.session.get(uri, verify=False)
response_dict = request.json()
total_count = int(response_dict["totalCount"])
try:
index = 0
self.tenant_array.clear()
for i in range(0, total_count):
self.tenant_array.append(response_dict["imdata"][index]["fvTenant"]["attributes"]["name"])
index = index + 1
except IndexError:
pass
return self.tenant_array
def subnet_finder(self, subnet=None):
""" Takes in kwarg subnet and finds all details about the subnet (BD, Tenant, scope etc."""
endpoint_dict = {}
uri = "https://{}/api/class/fvBD.xml?query-target=subtree".format(self.apic)
request = self.session.get(uri, verify=False)
root = ET.fromstring(request.text)
for fvSubnet in root.iter("fvSubnet"):
location = fvSubnet.get("dn")
ip = fvSubnet.get("ip")
if subnet in ip:
gps = location
gps_ip = ip
scope = fvSubnet.get("scope")
try:
for fvBD in root.iter("fvBD"):
bridge_domain = fvBD.get("name")
if re.findall('[?<=/BD-]' + bridge_domain + '(?=/)', gps):
gps_bd = bridge_domain
uni_route = fvBD.get("unicastRoute")
unkwn_uni = fvBD.get("unkMacUcastAct")
for fvRsCtx in root.iter("fvRsCtx"):
vrf = fvRsCtx.get("tnFvCtxName")
location = fvRsCtx.get("dn")
print(location)
if re.findall('[?<=/BD-]' + gps_bd + '(?=/)', location):
print(vrf)
gps_vrf = vrf
aps = []
epgs = []
for fvRtBd in root.iter("fvRtBd"):
dn = fvRtBd.get("dn")
if re.findall('[?<=/BD-]' + gps_bd + '(?=/)', dn):
ap = re.findall(r'(?<=ap-).*(?=/ep)', dn)
aps.append(ap[0])
epg = re.findall(r'(?<=epg-).*(?=\])', dn)
epgs.append(epg[0])
else:
pass
for fvRsBDToOut in root.iter("fvRsBDToOut"):
if "fvRsBDToOut" in fvRsBDToOut:
dn = fvRsBDToOut.get("dn")
if re.findall('[?<=/BD-]' + gps_bd + '(?=/)', dn):
if not fvRsBDToOut.get("tnL3extOutName"):
l3out = "N/A"
else:
l3out = fvRsBDToOut.get("tnL3extOutName")
else:
l3out = "None"
for tenant in self.tenant_array:
if tenant in gps:
gps_tenant = tenant
else:
continue
unpack_ap = [i for i in aps]
if not unpack_ap:
unpack_ap = "None"
unpack_epg = [i for i in epgs]
if not unpack_epg:
unpack_epg = "None"
endpoint_dict["IP"] = gps
endpoint_dict["Tenant"] = gps_tenant
endpoint_dict["BD"] = gps_bd
endpoint_dict["vrf"] = gps_vrf
endpoint_dict["L3Out"] = l3out
endpoint_dict["Route Enable"] = uni_route
endpoint_dict["Scope"] = scope
endpoint_dict["Uni Flood"] = unkwn_uni
endpoint_dict["APs"] = unpack_ap
endpoint_dict["EPGs"] = unpack_epg
return endpoint_dict
except UnboundLocalError:
return "Subnet not found"
def view_tenant_vrf(self, tenant=None):
"""View Tenant vrf, return Tenant vrf names"""
uri = "https://{}/api/node/mo/uni/tn-{}.json?query-target=children&target-subtree-class=fvCtx"\
.format(self.apic, tenant)
request = self.session.get(uri, verify=False)
response = json.loads(request.text)
try:
index = 0
self.vrf_array.clear()
for i in range(0, 100):
self.vrf_array.append(response["imdata"][index]["fvCtx"]["attributes"]["name"])
index = index + 1
except IndexError:
pass
return self.vrf_array
def view_bd(self, tenant=None):
"""View Bridge domains of a Tenant, returns bridge domain names"""
uri = "https://{}/api/node/mo/uni/tn-{}.json?query-target=children&target-subtree-class=fvBD"\
.format(self.apic, tenant)
request = self.session.get(uri, verify=False)
response = json.loads(request.text)
total_count = int(response["totalCount"])
index = 0
self.bd_array.clear()
for i in range(0, total_count):
self.bd_array.append(response["imdata"][index]["fvBD"]["attributes"]["name"])
index = index + 1
return self.bd_array
def view_app_profiles(self, tenant=None):
"""View Application profiles of a particular Tenant, return App profiles"""
uri = "https://{}/api/node/mo/uni/tn-{}.json?query-target=children&target-subtree-class=fvAp"\
.format(self.apic, tenant)
request = self.session.get(uri, verify=False)
response = json.loads(request.text)
total_count = int(response["totalCount"])
index = 0
self.ap_array.clear()
for i in range(0, total_count):
self.ap_array.append(response["imdata"][index]["fvAp"]["attributes"]["name"])
index = index + 1
return self.ap_array
def view_epgs(self, tenant=None, app=None):
"""View endpoint groups of a particular Tenant-App profile, returns EPG names"""
uri = "https://{}/api/node/mo/uni/tn-{}/ap-{}.json?query-target=children&target-subtree-class=fvAEPg"\
.format(self.apic, tenant, app)
request = self.session.get(uri, verify=False)
response = json.loads(request.text)
total_count = int(response["totalCount"])
index = 0
self.epg_array.clear()
for i in range(0, total_count):
self.epg_array.append(response["imdata"][index]["fvAEPg"]["attributes"]["name"])
index = index + 1
return self.epg_array
def enpoint_tracker(self, endpoint=None):
"""This method take in a IP or MAC address and returns the endpoint data. Return string if no endpoint
is found"""
try:
ipaddress.IPv4Address(endpoint)
uri = "https://%s" % self.apic + "/api/node/class/fvCEp.xml?rsp-subtree=full&rsp-subtree-include=" \
"required&rsp-subtree-filter=eq(fvIp.addr," + "\"%s\"" % endpoint
except ValueError:
uri = "https://%s" % self.apic + "/api/node/class/fvCEp.xml?rsp-subtree=full&rsp-subtree-class=" \
"fvCEp,fvRsCEpToPathEp,fvIp,fvRsHyper,fvRsToNic,fvRsToVm&query-target-filter=eq(fvCEp.mac," \
+ "\"%s\"" % endpoint
request = self.session.get(uri, verify=False)
root = ET.fromstring(request.text)
for fvCEp in root.iter("fvCEp"):
ep_name = fvCEp.get("name")
ep_mac = fvCEp.get("mac")
encap = fvCEp.get("encap")
ep_loc = fvCEp.get("dn")
ep_ip = fvCEp.get("ip")
endpoint = ("Name: {0:20}\nEP: {1:<20}\nEncapsulation: {2:<20}\nLocation: {3:<20}\nIP: {4:<20}"
.format(ep_name, ep_mac, encap, ep_loc, ep_ip))
try:
return endpoint
except UnboundLocalError:
return "Endpoint Not Found"
class AciOpsSend(AciOps):
"""ACI send basic configs. Return value will be APIC response in dictionary structure, or string notify the caller of
and error"""
def __init__(self, **kwargs):
""" Import * from AciOps class. Use AciOps login method to create a http session. Once session has been
intiated, call AciOps view_tenants method. The AciOps self.session and self.tenant_array will be used
throughout"""
super().__init__()
self.login(apic=kwargs["apic"], username=kwargs["username"], password=kwargs["password"])
self.view_tenants()
def create_tenant(self, tenant=None):
"""Create tenant, arg supplied will be tenants name. Conditional check will be done o insure no duplicates"""
uri = """https://{}/api/mo/uni.json""".format(self.apic)
if tenant not in self.tenant_array:
tenants = """{"fvTenant" : { "attributes" : { "name" : "%s"}}}""" % tenant
request = self.session.post(uri, verify=False, data=tenants, headers=self.json_header)
tenants = self.view_tenants()
return request, tenants
else:
return "Tenant: %s Exist" % tenant
def create_app_profile(self, tenant=None, app=None):
"""Create app prof, args supplied will be tenant, and app prof name.
Conditional check will be done to insure no duplicates"""
app_profiles = self.view_app_profiles(tenant=tenant)
if app not in app_profiles:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + ".json"
app_profile = "{\"fvAp\": " \
"{\"attributes\": " \
"{\"name\": \"%s\"}}}}" % app
request = self.session.post(uri, verify=False, data=app_profile, headers=self.json_header)
app_profiles = self.view_app_profiles(tenant=tenant)
return request, app_profiles
else:
return "App Profile: %s Exist " % app
def create_epg(self, tenant=None, app=None, epg=None):
"""Create epg, args supplied will be tenant, and app prof name, and epg name
Conditional check will be done to insure no duplicates"""
epgs = self.view_epgs(tenant=tenant, app=app)
if epg not in epgs:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + "/ap-" + app + ".json"
epg = "{\"fvAEPg\":" \
"{\"attributes\": " \
"{\"name\": \"%s\"}}}}" % epg
request = self.session.post(uri, verify=False, data=epg, headers=self.json_header)
epgs = self.view_epgs(tenant=tenant, app=app)
return request, epgs
else:
return "EPG: %s Exist" % epg
def create_bd_l3(self, tenant=None, bd=None, subnet=None, scope=None):
"""Create bd, args supplied will be tenant. Conditional check will be done to insure no duplicates"""
bds = self.view_bd(tenant=tenant)
if bd not in bds:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + ".json"
bridge_dom = "{\"fvBD\":" \
"{\"attributes\": " \
"{\"name\": \"%s\"" % bd + "}," \
"\"children:[" \
"{\"fvSubnet\": " \
"{\"attributes\":" \
"{\"ip\": \"%s\"" % subnet + "," \
"{\"scope\": \"%s\"" % scope + "}}}}]}}}"
request = self.session.post(uri, verify=False, data=bridge_dom, headers=self.json_header)
bds = self.view_bd(tenant=tenant)
bd_info = self.subnet_finder(subnet=subnet)
return request, bds, bd_info
else:
return "BD: %s Exist" % bd
def routing_scope(self, tenant=None, bd=None, subnet=None, scope=None):
"""Configuring routing scope (shared, private, external). First we split the scope to check for validity
if valid, use the orignal scope arg for the variable"""
split_scope = scope.split(",")
scope_list = ["private", "public", "shared"]
bds = self.view_bd(tenant=tenant)
for scope in split_scope:
if scope not in scope_list:
raise ValueError("Invalid Scope \"{}\" - Expecting private|public|shared".format(scope))
else:
pass
if bd in bds:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + "/BD-" + bd + "/subnet-[" + subnet + "].json"
bridge_dom = "{\"fvSubnet\": " \
"{\"attributes\":" \
"{\"scope\": \"%s\"" % scope + "}}}"
request = self.session.post(uri, verify=False, data=bridge_dom, headers=self.json_header)
bds = self.view_bd(tenant=tenant)
bd_info = self.subnet_finder(subnet=subnet)
return request, bds, bd_info
else:
return "BD: %s Exist" % bd
def enable_unicast(self, tenant=None, bd=None, enable=None):
"""Create bd, args supplied will be tenant Conditional check will be done to insure no duplicates,
require yes/no input"""
bds = self.view_bd(tenant=tenant)
yes_no = ["yes", "no"]
if enable not in yes_no:
raise ValueError("Invalid arg \"{}\" - Expecting yes/no".format(enable))
if bd in bds:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + ".json"
bridge_dom = "{\"fvBD\":" \
"{\"attributes\": " \
"{\"name\": \"%s\"" % bd + ", \"" \
"unicastRoute\": \"%s\"" % enable + "}}}"
request = self.session.post(uri, verify=False, data=bridge_dom, headers=self.json_header)
return request, bridge_dom
else:
return "BD: %s Exist" % bd
def create_bd_l2(self, tenant=None, bd=None):
"""Create L2 bd, args supplied will be tenant Conditional check will be done to insure no duplicates"""
bds = self.view_bd(tenant=tenant)
if bd not in bds:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + ".json"
bridge_dom = "{\"fvBD\":" \
"{\"attributes\": " \
"{\"name\": \"%s\"" % bd + "}}}"
request = self.session.post(uri, verify=False, data=bridge_dom, headers=self.json_header)
bds = self.view_bd(tenant=tenant)
return request, bds
else:
return "BD: %s Exist" % bd
def create_vrf(self, tenant=None, vrf=None):
"""Create tenant vrf, args supplied will be tenant Conditional check will be done to insure no duplicates"""
vrfs = self.view_tenant_vrf(tenant=tenant)
if vrf not in vrfs:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + ".json"
vrf = "{\"fvCtx\":" \
"{\"attributes\": " \
"{\"name\": \"%s\"" % vrf + "}}}"
request = self.session.post(uri, verify=False, data=vrf, headers=self.json_header)
vrfs = self.view_tenant_vrf(tenant=tenant)
return request, vrfs
else:
return "Vrf: %s Exist" % vrf
def vrf_to_bd(self, tenant=None, bd=None, vrf=None):
"""Assign vrf to bd, args supplied will be tenant, bd name, vrf name
Conditional check will be done to insure vrf has been configured"""
vrfs = self.view_tenant_vrf(tenant=tenant)
if vrf in vrfs:
uri = "https://" + self.apic + "/api/mo/uni/tn-" + tenant + ".json"
vrf_bd = "{\"fvBD\":" \
"{\"attributes\": " \
"{\"name\": \"%s\"" % bd + "}," \
"\"children:[" \
"{\"fvRsCtx \": " \
"{\"attributes\":" \
"{\"tnFvCtxName\": \"%s\"" % vrf + "}}}]}}}"
request = self.session.post(uri, verify=False, data=vrf_bd, headers=self.json_header)
vrfs = self.view_tenant_vrf(tenant=tenant)
return request, vrfs
else:
return "VRF: %s Doesn't Exist " % vrf | ACIOps | /ACIOps-2.0.0.tar.gz/ACIOps-2.0.0/ACIOperations/ACIOps.py | ACIOps.py |
# 
# ACME oneM2M CSE
An open source CSE Middleware for Education.
Version 0.3.0
## Introduction
This CSE implements a subset of the oneM2M standard specializations (see [http://www.onem2m.org](http://www.onem2m.org)). The intention is to provide an easy to install, extensible, and easy to use and maintain CSE for educational purposes. Also see the discussion on [Limitations](#limitations) below.

## Prerequisites
In order to run the CSE the following prerequisites must be fulfilled:
- **Python 3.8** : Install this or a newer version of Python with your favorite package manager.
- You may consider to use a virtual environment manager like pyenv + virtualenv (see, for example, [this tutorial](https://realpython.com/python-virtual-environments-a-primer/)).
- **flask**: The CSE uses the [Flask](https://flask.palletsprojects.com/) web framework. Install it by running the pip command:
pip3 install flask
- **psutil**: The [psutil](https://pypi.org/project/psutil/)
package is used to gather various system information for the CSE's hosting node resource. Install it by running the pip command:
pip3 install psutil
- **requests**: The CSE uses the [Requests](https://requests.readthedocs.io) HTTP Library to send requests vi http. Install it by running the pip command:
pip3 install requests
- **tinydb** : To store resources the CSE uses the lightweight [TinyDB](https://github.com/msiemens/tinydb) document database. Install it by running the pip command:
pip3 install tinydb
## Installation and Configuration
Install the ACME CSE by copy the whole distribution to a new directory. You also need to copy the configuration file [acme.ini.default](acme.ini.default) to a new file *acme.ini* and make adjustments to that new file.
cp acme.ini.default acme.ini
Please have a look at the configuration file. All the CSE's settings are read from this file.
There are a lot of individual things to configure here. Mostly, the defaults should be sufficient, but individual settings can be applied to each of the sections.
## Running
### Running the Notifications Server
If you want to work with subscriptions and notification: You might want to have a Notifications Server running first before starting the CSE. The Notification Server provided with the CSE in the [tools/notificationServer](tools/notificationServer) directory provides a very simple implementation that receives and answers notification requests.
See the [README](tools/notificationServer/README.md) file for further details.
### Running the CSE
You can start the CSE by simply running it from a command line:
python3 acme.py
In this case the configuration file *acme.ini* must be in the same directory.
In additions, you can provide additional command line arguments that will override the respective settings from the configuration file:
- **-h**, **--help** : show a help message and exit.
- **--apps**, **--noapps**: Enable or disable the build-in applications. This overrides the settings in the configuration file.
- **--config CONFIGFILE**: Specify a configuration file that is used instead of the default (*acme.ini*) one.
- **--db-reset**: Reset and clear the database when starting the CSE.
- **--db-storage {memory,disk}**: Specify the DB´s storage mode.
- **--log-level {info, error, warn, debug, off}**: Set the log level, or turn logging off.
- **--import-directory IMPORTDIRECTORY**: Specify the import directory.
### Stopping the CSE
The CSE can be stopped by pressing *CTRL-C* **once** on the command line.
Please note, that the shutdown might take a moment (e.g. gracefully terminating background processes, writing database caches, sending notifications etc).
**Being impatient and hitting *CTRL-C* twice might lead to data corruption.**
### Downloading and Running a Docker Image
A Docker image with reasonable defaults is available on Docker Hub: [https://hub.docker.com/repository/docker/ankraft/acme-onem2m-cse](https://hub.docker.com/repository/docker/ankraft/acme-onem2m-cse) .
You can download and run it with the following shell command:
```bash
$ docker run -p 8080:8080 --rm --name acme-onem2m-cse ankraft/acme-onem2m-cse
```
#### Build Your Own Docker Image
You can adapt (ie. configure a new Docker Hub ID) the build script and *Dockerfile* in the [tools/Docker](tools/Docker) directory. The build script takes the current scripts, configuration, resources etc, builds a new Docker image, and uploads the image to the configured Docker Hub repository.
### Importing Resources
During startup it is possible to import resources into to CSE. Each resource is read from a single file in the [init](./init) resource directory specified in the configuration file.
Not much validation, access control, or registration procedures are performedfor imported resources.
#### Importing Mandatory Resources
**Please note** that importing is required for creating the CSEBase resource and at least two (admin) ACP resources. Those are imported before all other resources, so that the CSEBase resource can act as the root for the resource tree. The *admin* ACP is used to access resources with the administrator originator. The *default* ACP resource is the one that is assigned for resources that don't specify an ACP on their own.
The filenames for these resources must be:
- [csebase.json](init/csebase.json) for the CSEBase.
- [acp.admin.json](init/acp.admin.json) for the admin ACP.
- [acp.default.json](init/acp.default.json) for the default ACP.
#### Importing Other Resources
After importing the mandatory resources all other resources in the [init](./init) directory are read in alphabetical order and are added (created) to the CSE's resource tree. Imported resources must have a valid *acpi* attribute, because no default *acpi* is assigned during importing.
#### Updating Resources
If the filename contains the substring *update*, then the resource specified by the resource's *ri* attribute is updated instead of created.
#### Examples & Templates
A minimal set of resources is provided in the [init](./init) directory. Definitions for a more sophisticated setup can be found in the [tools/init.example](tools/init.example) directory. To use these examples, you can either copy the resources to the *init* directory or change the "cse -> resourcesPath" entry in the *acme.ini* configuration file.
The directory [tools/resourceTemplates](tools/resourceTemplates) contains templates for supported resource types. Please see the [README](tools/resourceTemplates/README.md) there for further details.
## Web UI
The Web UI is by default enabled and reachable under the (configurable) path *<host>/webui*.
- To login you need to specify a valid originator. The default "admin" originator is *CAdmin*.
- Beside of the default *CSEBase* resource you can specify a different resource identifier as the root of the resource tree.
- You can navigate the resource tree with arrow keys.
- You can switch between short and long attribute names (press CTRL-H).
### REST UI
The web UI also provides a REST UI where you can send REST requests directed at resources on the CSE.

## Operation
### Remote CSE
When a CSE is configured as an MN-CSE of ASN-CSE it can connect to a remote CSE, respectively an IN-CSE and MN-CSE can receive connection requests from those CSE types. A *remoteCSE* resource is created in case of a successful connection. A CSE checks regularly the connection to other remote CSEs and removes the *remoteCSE* if the connection could not been established.
Announced resources are currently **not** supported by this implementation. But you can issue transfer requests to a remote CSE via its *remoteCSE* resource. These requests are forwarded by the CSE.
You must configure the details of the remote CSE in the configuration file.
### CSE Originator Assignment
Whenever a new *ACP* resource is created, the CSE's admin *originator* is assigned to that resource automatically. This way resources can always accessed by this originator.
This behaviour can be configured in the *[cse.resource.acp]* section of the configuration file.
### AE Registration
Whenever a new *AE* registers itself with the CSE (using the originators *C* or *S*) then a new originator for that *AE* is created. Also, the CSE automatically creates a new *ACP* resource for that new originator.
Be aware that this *ACP* resource is also removed when the *AE* is deleted.
The operations for the *ACP* resource can be configured in the *[cse.resource.acp]* section of the configuration file.
## Nodes and Applications
Currently, two component implementations are provided in addtion to the main CSE. They serve as examples how implement components that are hosted by the CSE itself.
### CSE Node
This component implements a <node> resource that provides additional information about the actual node (system) the CSE is running on. These are specializations of <mgmtObj>'s, namely battery, memory, and device information.
It can be enabled/disabled and configured in the **[app.csenode]** section of the configuration file.
### Statistics AE
The component implements an <AE> resource that provides statistic information about the CSE. It defines a proprietary <flexContainer> specialization that contains custom attributes for various statistic information, and which is updated every few seconds.
It can be enabled/disabled and configured in the **[app.statistics]** section of the configuration file.
### Developing Nodes and AEs
You can develop your own components that technically run inside the CSE themselves by following the pattern of those two components:
- Implement a class with either *AEBase* or *NodeBase* as a base class. This will create an <AE> or <node> resource for you.
- Implement a worker method and start it in the *\_\_init\_\_()* method. This method is called regularly in the background. This worker method can implement the main functionality of the <AE> or <node>.
- Implement a *shutdown()* method that is called when the CSE shuts down.
- Add your new component to the following methods in [acme/CSE.py](acme/CSE.py):
- *startApps()*: starting your component.
- *stopApps()*: shutting down your component.
There are more helper methods provided by the common *AppBase* and *AEBase* base classes, e.g. to send requests to the CSE via Mca, store AE data persistently etc.
## Integration Into Other Applications
It is possible to integrate the CSE into other applications, e.g. a Jupyter Notebook. In this case you would possibly like to provide startup arguments, for example the path of the configuration file or the logging level, directly instead of getting them from *argparse*.
You might want to get the example from the starter file [acme.py](acme.py) where you could replace the line:
```python
CSE.startup(parseArgs())
```
with a call to the CSE's *startup()* function:
```python
CSE.startup(None, configfile=defaultConfigFile, loglevel='error')
```
Please note that in case you provide the arguments directly the first argument needs to be `None`.
The names of the *argparse* variables can be used here, and you may provide all or only some of the arguments. Please note that you need to keep or copy the `import` and `sys.path` statements at the top of that file.
## URL Mappings
As a convenience to access resources on a CSE and to let requests look more like "normal" REST request you can define mappings. The format is a path that maps to another path and arguments. When issued a request to one of those mapped paths the http server issues a redirect to the other path.
For example, the path */access/v1/devices* can be mapped to */cse-mn?ty=14&fu=1&fo=2&rcn=8* to easily retrieve all nodes from the CSE.
See the configuration file for more examples.
## Limitations
- **This is by no means a fully compliant, secure or stable CSE! Don't use it in production.**
- This CSE is intended for educational purposes. The underlying database system is not optimized in any way for high-volume, high-accessibility.
- No support for https yet.
- Security: None. Please contact me if you have suggestions to improve this.
- Unsupported resource types are just stored, but no check or functionality is provided for those resources. The same is true for unknown resource attributes. Only a few attributes are validated.
## Supported Resource Types and Functionalities
### Resources
The CSE supports the following oneM2M resource types:
- **CSEBase (CB)**
- **Access Control Policy (ACP)**
- **Remote CSE (CSR)**
Announced resources are yet not supported. Transit request, though, to resources on the remote CSE are supported.
- **Application Entity (AE)**
- **Container (CNT)**
- **Content Instance (CIN)**
- **Subscription (SUB)**
Notifications via http to a direct url or an AE's Point-of-Access (POA) are supported as well.
- **Group (GRP)**
The support includes requests via the *fopt* (fan-out-point) virtual resource.
- **Node (NOD)**
The support includes the following **Management Object (mgmtObj)** specializations:
- **Firmware (FWR)**
- **Software (SWR)**
- **Memory (MEM)**
- **AreaNwkInfo (ANI)**
- **AreaNwkDeviceInfo (ANDI)**
- **Battery (BAT)**
- **DeviceInfo (DVI)**
- **DeviceCapability (DVC)**
- **Reboot (REB)**
- **EventLog (EVL)**
- **FlexContainer Specializations**
Any specializations is supported. There is no check performed against a schema (e.g. via the *cnd* attribute).
Resources of any other type are stored in the CSE but no further processed and no checks are performed on these resources. The type is marked as *unknown*.
### Discovery
The following result contents are implemented for Discovery:
- attributes + child-resources (rcn=4)
- attributes + child-resource-references (rcn=5)
- child-resource-references (rcn=6)
- child-resources (rcn=8)
## Third-Party Components
### CSE
- Flask: [https://flask.palletsprojects.com/](https://flask.palletsprojects.com/), BSD 3-Clause License
- Requests: [https://requests.readthedocs.io/en/master/](https://requests.readthedocs.io/en/master/), Apache2 License
- TinyDB: [https://github.com/msiemens/tinydb](https://github.com/msiemens/tinydb), MIT License
- PSUtil: [https://github.com/giampaolo/psutil](https://github.com/giampaolo/psutil), BSD 3-Clause License
### UI Components
- TreeJS: [https://github.com/m-thalmann/treejs](https://github.com/m-thalmann/treejs), MIT License
- Picnic CSS : [https://picnicss.com](https://picnicss.com), MIT License
## Roadmap & Backlog
- CSE: Announcements
- CSE: Better resource validations
- CSE: Timeseries
- CSE: Support discovery also for other request types
- UI: Support for resource specific actions (e.g. latest, oldest)
- UI: Graph for Container reosurces
- Importer: Automatically import/update resources when the CSE is running
- App development: support more specializations
## The Messy Details

## License
BSD 3-Clause License for the CSE and its native components and modules. Please see the individual licenses of the used third-party components.
| ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/README.md | README.md |
# Changelog
## Unreleased 0.3.0 - 2020-04-20
- [CSE] Discovery supports "attributes + children" return content.
- [CSE] Changed command line argument --reset-db to --db-reset .
- [CSE] Added command line argument --db-storage .
- [CSE] Added support for FlexContainerInstance.
- [CSE] Fixed discovery results: ignore latest, oldest, and fixed result format.
- [CSE] Added command line arguments --apps / --no-apps to enable and disable internal applications. Also added entry in config file.
- [CSE] Added sorting of discovery results. Configurable.
## 0.2.1 - 2020-03-06
- [APPS] Fixed wrong originator handling for already registered AEs.
- [APPS] Added persistent storage support for AEs.
## 0.2.0 - 2020-03-02
- [CSE] Checking and setting "creator" attribute when creating new resources.
- [ACP] Always add "admin" originator to newly created ACPs (configurable).
- [ACP] Imporved default ACP. Any new resource without ACP gets the default ACP assigned.
- [AE] Added proper AE registration. An ACP is automatically created for a new AE, and also removed when the corresponding AE is removed.
- [LOGGING] Added option to enable/disable logging to a log file (Logging:enableFileLogging). If disabled, log-messages are only written to the console.
- [LOGGING] Possibility to disable logging on the command line.
- [IMPORTING] Added default ACP.
- [WEB] Browser request to "/"" will now redirect to the webui's URL.
- [WEB] REST UI will not refresh anymore when automatic refresh is on.
- [WEB] Added LOGO & favicon.
- [MISC] Various fixes and improvements.
## 0.1.0 - 2020-02-09
- First release
| ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/CHANGELOG.md | CHANGELOG.md |
function getChildren(node, errorCallback) {
resource = node.resource
// get children
var ri = resource['ri'] + "?fu=1&lvl=1&rcn=6" // TODO move this to the getchildren request
var client = new HttpClient();
addr = cseid + "/" + ri
// addr = "/" + ri
client.getChildren(addr, node, function(response) { // TODo
//client.getChildren(cseid + "/" + ri, node, function(response) { // TODo
// remove all children, if any
removeChildren(node)
resource = JSON.parse(response)
ris = resource["m2m:uril"]
for (ri of ris) {
// TODO in extra function createNode()
var childNode = new TreeNode(ri);
childNode.on("click", clickOnNode)
childNode.on("expand", expandNode)
childNode.on("collapse", collapseNode)
childNode.on("contextmenu", function(e,n) { showContextMenu(e, n) })
childNode.ri = ri
childNode.wasExpanded = false
childNode.setExpanded(false)
childNode.resolved = false
node.addChild(childNode)
}
if (node != root) {
if (node.wasExpanded) {
node.setExpanded(true)
clickOnNode(null, node)
expandNode(node)
} else {
node.setExpanded(false)
}
} else { // Display the root node expanded and show attributes etc
expandNode(root)
root.setSelected(true)
clickOnNode(null, root)
}
// add short info in front of name
ty = node.resource['ty']
pfx = shortTypes[ty]
if (ty == 13) {
var mgd = node.resource['mgd']
if (mgd == undefined) {
pfx = "MGO"
} else {
pfx = mgdShortTypes[mgd]
}
}
if (pfx == undefined) {
pfx = "unknown"
}
node.setUserObject(pfx + ": " + node.getUserObject())
if (tree != null) {
tree.reload()
}
}, function() {
typeof errorCallback === 'function' && errorCallback();
});
}
function getResource(ri, node, callback) {
_getResource(ri, node, function(node) {
document.getElementById("connectButton").className = "button success"
document.getElementById("connectButton").text = "Connected"
typeof callback === 'function' && callback(node);
}, function() { // error callback
if (node.ri.endsWith("/la") || node.ri.endsWith("/ol")) { // special handling for empty la or ol
node.setUserObject(node.ri.slice(-2))
node.resolved = true
tree.reload()
return
}
document.getElementById("connectButton").className = "button error"
document.getElementById("connectButton").text = "Reconnect"
showAppArea(false)
var x = document.getElementById("treeContainer");
x.innerHTML = "";
tree = null;
root = null;
clearResourceInfo()
clearRootResourceName()
clearAttributesTable()
clearJSONArea()
// TODO Display Error message
})
}
function _getResource(ri, node, callback, errorCallback) {
var client = new HttpClient();
client.get(ri, node, function(response) { // TODO
resource = JSON.parse(response)
var k = Object.keys(resource)[0]
var oldUserObject = node.getUserObject()
node.hasDetails = true
if (oldUserObject.endsWith("/la")) {
node.setUserObject("la")
} else if (oldUserObject.endsWith("/ol")) {
node.setUserObject("ol")
} else if (oldUserObject.endsWith("/fopt")) {
node.setUserObject("fopt")
node.hasDetails = false
} else {
node.setUserObject(resource[k].rn)
}
node.resource = resource[k]
node.resourceFull = resource
node.resolved = true
node.ri = ri
//node.wasExpanded = false
getChildren(node, null)
typeof callback === 'function' && callback(node);
}, function(response, status) {
typeof errorCallback === 'function' && errorCallback(status);
});
}
function connectToCSE() {
clearAttributesTable()
clearJSONArea()
clearResourceInfo()
clearRootResourceName()
delete nodeClicked
// Get input fields
originator = document.getElementById("originator").value;
rootri = document.getElementById("baseri").value;
root = new TreeNode("");
root.on("click", clickOnNode)
root.on("expand", expandNode)
root.on("collapse", collapseNode)
root.on("contextmenu", function(e,n) { showContextMenu(e, n) })
tree = new TreeView(root, "#treeContainer");
getResource(rootri, root, function(node) {
showAppArea(true)
setRootResourceName(node.resource.rn)
// remove the focus from the input field
document.activeElement.blur();
var x = document.getElementById("appArea")
x.focus()
})
}
function toggleRefresh() {
if (typeof refreshTimer !== "undefined") {
document.getElementById("refreshButton").className = "button"
cancelRefreshResource()
} else {
document.getElementById("refreshButton").className = "button success"
setupRefreshResource(5)
}
}
function showAppArea(state) {
var x = document.getElementById("appArea")
var f = document.getElementById("originator")
if (state) {
x.style.display = "block";
} else {
x.style.display = "none";
// inputfield focus
f.focus()
// f.select()
}
}
var cursorEnabled = true;
// callback for info tabs
function tabTo( number) {
switch(number) {
case 1: cursorEnabled = true; break;
case 2: cursorEnabled = true; break;
case 3: cursorEnabled = false; break;
}
}
function setup() {
// document.body.style.zoom=0.6;
this.blur();
var x = document.getElementById("baseri");
cseid = getUrlParameterByName("ri")
x.value = cseid
document.title = "ACME CSE - " + cseid
// hide when not connected
showAppArea(false)
setupContextMenu()
// add key event listener for refresh
document.onkeypress = function(e) {
let key = event.key.toUpperCase();
if (key == 'R' && e.ctrlKey) {
refreshNode()
} else if (key == 'H' && e.ctrlKey) {
printLongNames = !printLongNames
clearAttributesTable()
if (nodeClicked.hasDetails) {
fillAttributesTable(nodeClicked.resource)
}
} else if (key == 'C' && e.ctrlKey) {
connectToCSE();
}
}
document.onkeydown = function(e) {
let keyCode = event.keyCode
if (cursorEnabled == false) {
return
}
if (typeof nodeClicked === "undefined") {
return
}
p = nodeClicked.parent
if (typeof p !== "undefined") {
index = p.getIndexOfChild(nodeClicked)
count = p.getChildCount()
}
if (keyCode == 40 && typeof p !== "undefined") { // down
index = (index + 1) % count
newnode = p.getChildren()[index]
clickOnNode(null, newnode)
} else if (keyCode == 38 && typeof p !== "undefined") { // up
index = (index + count - 1) % count
newnode = p.getChildren()[index]
clickOnNode(null, newnode)
} else if (keyCode == 39) { // right or open an unexpanded subtree
if (nodeClicked.isLeaf()) {
return
}
if (nodeClicked.isExpanded() == false) {
nodeClicked.setExpanded(true)
tree.reload()
return
}
clickOnNode(null, nodeClicked.getChildren()[0])
} else if (keyCode == 37) { // left or close an expanded subtree
if (nodeClicked.isLeaf() == false && nodeClicked.isExpanded()) {
nodeClicked.setExpanded(false)
tree.reload()
return
}
if (typeof p !== "undefined") {
clickOnNode(null, p)
}
} else if (keyCode == 13) { // return
nodeClicked.toggleExpanded()
tree.reload()
} else if (keyCode == 9) {
e.preventDefault();
e.stopPropagation();
}
}
initRestUI();
} | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/webui/js/main.js | main.js |
var currentRestRequestMethod = "GET"
var currentResource = null
var currentResourceType = null
var btnGet
var btnPost
var btnPut
var btnDelete
var spanGet
var spanPost
var spanPut
var spanDelete
var requestbody
var requestarea
var sendbutton
function initRestUI() {
requestbody = document.getElementById("rest-requestbody");
requestarea = document.getElementById("rest-requestarea");
sendbutton = document.getElementById("sendButton");
var rad = document.getElementsByName("rest-method");
for(var i = 0; i < rad.length; i++) {
rad[i].onclick = function() {
currentRestRequestMethod = this.value
fillRequestArea()
};
}
btnGet = document.getElementById("methodget")
btnPost = document.getElementById("methodpost")
btnPut = document.getElementById("methodput")
btnDelete = document.getElementById("methoddelete")
spanGet = document.getElementById("spanget")
spanPost = document.getElementById("spanpost")
spanPut = document.getElementById("spanput")
spanDelete = document.getElementById("spandelete")
}
function setRestUI(resourceFull) {
currentResourceType = Object.keys(resourceFull)[0];
currentResource = resourceFull[currentResourceType]
bri = document.getElementById("baseri").value
cri = "/" + currentResource.ri
if (bri == cri) {
document.getElementById("rest-url").value=bri
} else {
document.getElementById("rest-url").value=bri + cri
}
// check requests for this resource type
// First enable all buttons
btnGet.disabled = false
btnPost.disabled = false
btnPut.disabled = false
btnDelete.disabled = false
spanGet.style.display = "inline-block"
spanPost.style.display = "inline-block"
spanPut.style.display = "inline-block"
spanDelete.style.display = "inline-block"
if (currentResourceType == "m2m:cb") { // CSE
disableButton(btnDelete, spanDelete)
} else if (currentResourceType == "m2m:acp") { // ACP
disableButton(btnPost, spanPost)
} else if (currentResourceType == "m2m:cin") { // CIN
disableButton(btnPost, spanPost)
disableButton(btnPut, spanPut)
} else if (currentResourceType == "m2m:sub") { // SUB
disableButton(btnPost, spanPost)
}
fillHeaderArea(currentResource.ty)
fillRequestArea()
}
// disable a button and hide it. If it is selected, then select the GET button
function disableButton(btn, spn) {
btn.disabled = true
spn.style.display = "none"
if (btn.checked) {
btn.checked = false
btnGet.checked = true
currentRestRequestMethod = "GET"
}
}
function restSendForm() {
restSendData(document.querySelector('input[name="rest-method"]:checked').value,
document.getElementById("rest-url").value,
document.getElementById("rest-headers").value,
requestarea.value)
}
function restSendData(method, url, headers, data) {
var XHR = new XMLHttpRequest();
XHR.addEventListener('error', function(event) {
document.getElementById("restui-error").checked = true;
});
XHR.onreadystatechange = function() {
if (this.readyState == 4) {
switch (this.status) {
case 200: s = '200 - OK'; break;
case 201: s = '201 - Created'; break;
case 204: s = '204 - Updated'; break;
case 400: s = '400 - Bad Request'; break;
case 403: s = '403 - Forbidden'; break;
case 404: s = '404 - Not Found'; break;
case 405: s = '405 - Method Not Allowed'; break;
case 409: s = '409 - Conflict'; break;
}
document.getElementById("rest-status").value = s;
if (this.status == 200 || this.status == 201 || this.status == 204) {
if (this.responseText.length > 0) {
document.getElementById("rest-result-body").value = JSON.stringify(JSON.parse(this.responseText), null, 4);
}
if (method == "DELETE") {
document.getElementById("rest-result-body").value = "";
connectToCSE();
} else {
refreshNode()
}
} else {
document.getElementById("rest-result-body").value = "";
}
document.getElementById("rest-result-headers").value = this.getAllResponseHeaders()
}
};
XHR.open(method, url);
var headerLines = headers.split("\n");
for (line of headerLines) {
x = line.split(":")
if (x.length == 2) {
XHR.setRequestHeader(x[0], x[1]);
}
}
// Add the required HTTP header for form data POST requests
//XHR.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
// Finally, send our data.
XHR.send(data);
}
// Callback and function to clear the status and rest resuld fields/areas.
function restClearResult() {
document.getElementById("rest-status").value = '';
document.getElementById("rest-result-headers").value = '';
document.getElementById("rest-result-body").value = '';
}
// fill the header fields. Depending on the type and the currently selected
// method this will change, for example, the Content-Type field.
function fillHeaderArea(ty) {
if (ty != null && currentRestRequestMethod == "POST") {
text = "Content-Type: application/json;ty=" + ty + "\n"
} else {
text = "Content-Type: application/json\n"
}
text += "Accept: application/json\n"
text += "X-M2M-Origin: " + document.getElementById("originator").value + "\n"
text += "X-M2M-RI: " + Math.random().toString(36).slice(2)
document.getElementById("rest-headers").value = text;
}
/////////////////////////////////////////////////////////////////////////////
tplAE = {
"m2m:ae": {
"acpi": [ "==> fill or remove <==" ],
"api": "==> fill <==",
"nl": "==> fill <==",
"poa": [ "==> fill or remove <==" ],
"rn": "==> fill <==",
"rr": false
}
}
tplACP = {
"m2m:acp": {
"pv": { "acr": { "acop": 63, "acor": [ "==> fill <==" ] } },
"pvs": { "acr": { "acop": 51, "acor": [ "==> fill <==" ] } },
"rn": "==> fill <=="
}
}
tplContainer = {
"m2m:cnt" : {
"acpi": [ "==> fill or remove <==" ],
"mbs": 10000,
"mni": 10,
"rn": "==> fill <=="
}
}
tplContentInstance = {
"m2m:cin": {
"cnf": "text/plain:0",
"con": "==> fill <==",
"rn": "==> fill <=="
}
}
tplGroup = {
"m2m:grp": {
"acpi": [ "==> fill or remove <==" ],
"csy": 1,
"gn": "==> fill <==",
"mid": [ "==> Add members <==" ],
"mnm": 10,
"mt": 3,
"rn": "==> fill <=="
}
}
tplSubscription = {
"m2m:sub": {
"acpi": [ "==> fill or remove <==" ],
"enc": { "net": [ 1, 2, 3, 4 ] },
"nu": [ "==> fill <==" ],
"rn": "==> fill <=="
}
}
tplFlexContainer = {
"==> fill <==": {
"acpi": [ "==> fill or remove <==" ],
"cnd": "==> fill <==",
"rn": "==> fill <==",
"==> custom attributes <==": "==> fill <=="
}
}
tplNode = {
"m2m:nod": {
"acpi": [ "==> fill or remove <==" ],
"ni": "==> fill <==",
"nid": "==> fill <==",
"rn": "==> fill <=="
}
}
tplAreaNwkDeviceInfo = {
"m2m:andi": {
"acpi": [ "==> fill or remove <==" ],
"awi": "==> fill <==",
"dc": "==> fill <==",
"dvd": "==> fill <==",
"dvt": "==> fill <==",
"lnh": [ "==> fill <==" ],
"mgd": 1005,
"rn": "==> fill <==",
"sld": 0,
"sli": 0
}
}
tplAreaNwkType = {
"m2m:ani": {
"acpi": [ "==> fill or remove <==" ],
"ant": "==> fill <==",
"dc": "==> fill <==",
"ldv": [ "==> fill <==" ],
"mgd": 1004,
"rn": "==> fill <=="
}
}
tplBattery = {
"m2m:bat": {
"acpi": [ "==> fill or remove <==" ],
"btl": 23,
"bts": 7,
"dc": "==> fill <==",
"mgd": 1006,
"rn": "==> fill <=="
}
}
tplDeviceCapability = {
"m2m:dvc": {
"acpi": [ "==> fill or remove <==" ],
"att": true,
"can": "==> fill <==",
"cas": {
"acn": "==> fill <==",
"sus": 1
},
"cus": true,
"dc": "==> fill <==",
"mgd": 1008,
"rn": "==> fill <=="
}
}
tplDeviceInfo = {
"m2m:dvi": {
"acpi": [ "==> fill or remove <==" ],
"cnty": "==> fill <==",
"dc": "==> fill <==",
"dlb": [
"==> label:value <=="
],
"dty": "==> fill <==",
"dvnm": "==> fill <==",
"fwv": "==> fill <==",
"hwv": "==> fill <==",
"loc": "==> fill <==",
"man": "==> fill <==",
"mfd": "==> fill timestamp <==",
"mfdl": "==> fill <==",
"mgd": 1007,
"mod": "==> fill <==",
"osv": "==> fill <==",
"ptl": [ "==> fill <==" ],
"purl": "==> fill <==",
"rn": "==> fill <==",
"smod": "==> fill <==",
"spur": "==> fill <==",
"swv": "==> fill <==",
"syst": "==> fill timestamp <=="
}
}
tplEventLog = {
"m2m:evl": {
"acpi": [ "==> fill or remove <==" ],
"dc": "==> fill <==",
"lga": false,
"lgd": "==> fill <==",
"lgo": false,
"lgst": 1,
"lgt": 0,
"mgd": 1010,
"rn": "==> fill <=="
}
}
tplFirmware = {
"m2m:fwr": {
"acpi": [ "==> fill or remove <==" ],
"dc": "==> fill <==",
"fwn": "==> fill <==",
"mgd": 1001,
"rn": "==> fill <==",
"ud": false,
"uds": {
"acn": "==> fill <==",
"sus": 0
},
"url": "==> fill <==",
"vr": "==> fill <=="
}
}
tplMemory = {
"m2m:mem": {
"acpi": [ "==> fill or remove <==" ],
"dc": "==> fill <==",
"mgd": 1003,
"mma": 0,
"mmt": 0,
"rn": "==> fill <=="
}
}
tplReboot = {
"m2m:rbo": {
"acpi": [ "==> fill or remove <==" ],
"dc": "==> fill <==",
"far": false,
"mgd": 1009,
"rbo": false,
"rn": "==> fill <=="
}
}
tplSoftware = {
"m2m:swr": {
"acpi": [ "==> fill or remove <==" ],
"act": false,
"acts": {
"acn": "==> fill <==",
"sus": 0
},
"dc": "==> fill <==",
"dea": false,
"in": false,
"ins": {
"acn": "==> fill <==",
"sus": 0
},
"mgd": 1002,
"rn": "==> fill <==",
"swn": "==> fill <==",
"un": false,
"url": "==> fill <==",
"vr": "==> fill <=="
}
}
var templates = ["", "", "", "", "", "", "", "", "", ""]
var templateTypes = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
var templateButtons = [null, null, null, null, null, null, null, null, null, null]
function fillTemplate(nr) {
requestarea.value = templates[nr]
fillHeaderArea(templateTypes[nr])
}
function fillRequestArea() {
templateButtons[0] = document.getElementById("tplButton0")
templateButtons[1] = document.getElementById("tplButton1")
templateButtons[2] = document.getElementById("tplButton2")
templateButtons[3] = document.getElementById("tplButton3")
templateButtons[4] = document.getElementById("tplButton4")
templateButtons[5] = document.getElementById("tplButton5")
templateButtons[6] = document.getElementById("tplButton6")
templateButtons[7] = document.getElementById("tplButton7")
templateButtons[8] = document.getElementById("tplButton8")
templateButtons[9] = document.getElementById("tplButton9")
templateButtons[10] = document.getElementById("tplButton10")
// enable / disable the area depending on the currently selected method
if (currentRestRequestMethod == "POST" || currentRestRequestMethod == "PUT") {
requestarea.readOnly = false;
requestbody.style.display = 'block';
} else {
requestarea.readOnly = true;
requestbody.style.display = 'none';
}
if (currentRestRequestMethod == "DELETE") {
sendButton.className = "error"
} else {
sendButton.className = "button success"
}
// hide buttons and fill with resource for PUT
if (currentRestRequestMethod == "GET" || currentRestRequestMethod == "DELETE") {
hideTemplateButtons()
return
} else if (currentRestRequestMethod == "PUT") {
hideTemplateButtons()
requestarea.value = JSON.stringify(prepareNodeForPUT(currentResource, currentResourceType), null, 4)
return
}
// only POST from here
// add templates and buttons
requestarea.value = ""
hideTemplateButtons()
if (currentResourceType == "m2m:ae") { // AE
showTemplateButton(0, "Container", tplContainer, 3)
showTemplateButton(1, "FlexContainer", tplFlexContainer, 28)
showTemplateButton(2, "Group", tplGroup, 9)
showTemplateButton(3, "Subscription", tplSubscription, 23)
} else if (currentResourceType == "m2m:cnt") { // Container
showTemplateButton(0, "Container", tplContainer, 3)
showTemplateButton(1, "ContentInstance", tplContentInstance, 4)
showTemplateButton(2, "Subscription", tplSubscription, 23)
} else if (currentResourceType == "m2m:cb") { // CSEBase
showTemplateButton(0, "ACP", tplACP, 1)
showTemplateButton(1, "AE", tplAE, 2)
showTemplateButton(2, "Container", tplContainer, 3)
showTemplateButton(3, "FlexContainer", tplFlexContainer, 28)
showTemplateButton(4, "Group", tplGroup, 9)
showTemplateButton(5, "Node", tplNode, 14)
showTemplateButton(6, "Subscription", tplSubscription, 23)
} else if (currentResourceType == "m2m:nod") { // Node
showTemplateButton(0, "AreaNwkDeviceInfo", tplAreaNwkDeviceInfo, 13)
showTemplateButton(1, "AreaNwkType", tplAreaNwkType, 13)
showTemplateButton(2, "Battery", tplBattery, 13)
showTemplateButton(3, "Firmware", tplFirmware, 13)
showTemplateButton(4, "DeviceCapability", tplDeviceCapability, 13)
showTemplateButton(5, "DeviceInfo", tplDeviceInfo, 13)
showTemplateButton(6, "EventLog", tplEventLog, 13)
showTemplateButton(7, "Memory", tplMemory, 13)
showTemplateButton(8, "Reboot", tplReboot, 13)
showTemplateButton(9, "Software", tplSoftware, 13)
showTemplateButton(10, "Subscription", tplSubscription, 23)
} else if (currentResourceType == "m2m:grp") { // Group
showTemplateButton(0, "Subscription", tplSubscription, 23)
} else if (currentResource.ty == 28) { // FlexContainer
showTemplateButton(0, "Container", tplContainer, 3)
showTemplateButton(1, "FlexContainer", tplFlexContainer, 28)
showTemplateButton(2, "Subscription", tplSubscription, 23)
} else if (currentResource.ty == 13) { // FlexContainer
showTemplateButton(0, "Subscription", tplSubscription, 23)
}
}
function hideTemplateButtons() {
for (b in templateButtons) {
templateButtons[b].style.display = "none"
}
for (var i = templateTypes.length - 1; i >= 0; i--) {
templateTypes[i] = 0
}
}
function showTemplateButton(idx, text, template, ty) {
templateButtons[idx].text = text
templateButtons[idx].style.display = 'inline-block'
templates[idx] = JSON.stringify(template, null, 4)
templateTypes[idx] = ty
}
function prepareNodeForPUT(resource, tpe) {
let r = Object.assign({}, resource);
delete r["ct"]
delete r["lt"]
delete r["ri"]
delete r["pi"]
delete r["rn"]
delete r["st"]
delete r["ty"]
delete r["cbs"]
delete r["cni"]
delete r["acpi"]
delete r["mgd"]
delete r["srt"]
delete r["csi"]
let result = {}
result[tpe] = r
return result
} | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/webui/js/restui.js | restui.js |
function ContextMenu(menu, options){
var self = this;
var num = ContextMenu.count++;
this.menu = menu;
this.contextTarget = null;
if(!(menu instanceof Array)){
throw new Error("Parameter 1 must be of type Array");
}
if(typeof options !== "undefined"){
if(typeof options !== "object"){
throw new Error("Parameter 2 must be of type object");
}
}else{
options = {};
}
window.addEventListener("resize", function(){
if(ContextUtil.getProperty(options, "close_on_resize", true)){
self.hide();
}
});
this.setOptions = function(_options){
if(typeof _options === "object"){
options = _options;
}else{
throw new Error("Parameter 1 must be of type object")
}
}
this.changeOption = function(option, value){
if(typeof option === "string"){
if(typeof value !== "undefined"){
options[option] = value;
}else{
throw new Error("Parameter 2 must be set");
}
}else{
throw new Error("Parameter 1 must be of type string");
}
}
this.getOptions = function(){
return options;
}
this.reload = function(){
if(document.getElementById('cm_' + num) == null){
var cnt = document.createElement("div");
cnt.className = "cm_container";
cnt.id = "cm_" + num;
document.body.appendChild(cnt);
}
var container = document.getElementById('cm_' + num);
container.innerHTML = "";
container.appendChild(renderLevel(menu));
}
function renderLevel(level){
var ul_outer = document.createElement("ul");
level.forEach(function(item){
var li = document.createElement("li");
li.menu = self;
if(typeof item.type === "undefined"){
var icon_span = document.createElement("span");
icon_span.className = 'cm_icon_span';
if(ContextUtil.getProperty(item, "icon", "") != ""){
icon_span.innerHTML = ContextUtil.getProperty(item, "icon", "");
}else{
icon_span.innerHTML = ContextUtil.getProperty(options, "default_icon", "");
}
var text_span = document.createElement("span");
text_span.className = 'cm_text';
if(ContextUtil.getProperty(item, "text", "") != ""){
text_span.innerHTML = ContextUtil.getProperty(item, "text", "");
}else{
text_span.innerHTML = ContextUtil.getProperty(options, "default_text", "item");
}
var sub_span = document.createElement("span");
sub_span.className = 'cm_sub_span';
if(typeof item.sub !== "undefined"){
if(ContextUtil.getProperty(options, "sub_icon", "") != ""){
sub_span.innerHTML = ContextUtil.getProperty(options, "sub_icon", "");
}else{
sub_span.innerHTML = '›';
}
}
li.appendChild(icon_span);
li.appendChild(text_span);
li.appendChild(sub_span);
if(!ContextUtil.getProperty(item, "enabled", true)){
li.setAttribute("disabled", "");
}else{
if(typeof item.events === "object"){
var keys = Object.keys(item.events);
for(var i = 0; i < keys.length; i++){
li.addEventListener(keys[i], item.events[keys[i]]);
}
}
if(typeof item.sub !== "undefined"){
li.appendChild(renderLevel(item.sub));
}
}
}else{
if(item.type == ContextMenu.DIVIDER){
li.className = "cm_divider";
}
}
ul_outer.appendChild(li);
});
return ul_outer;
}
this.display = function(e, target){
if(typeof target !== "undefined"){
self.contextTarget = target;
}else{
self.contextTarget = e.target;
}
var menu = document.getElementById('cm_' + num);
var clickCoords = {x: e.clientX, y: e.clientY};
var clickCoordsX = clickCoords.x;
var clickCoordsY = clickCoords.y;
var menuWidth = menu.offsetWidth + 4;
var menuHeight = menu.offsetHeight + 4;
var windowWidth = window.innerWidth;
var windowHeight = window.innerHeight;
var mouseOffset = parseInt(ContextUtil.getProperty(options, "mouse_offset", 2));
if((windowWidth - clickCoordsX) < menuWidth){
menu.style.left = windowWidth - menuWidth + "px";
}else{
menu.style.left = (clickCoordsX + mouseOffset) + "px";
}
if((windowHeight - clickCoordsY) < menuHeight){
menu.style.top = windowHeight - menuHeight + "px";
}else{
menu.style.top = (clickCoordsY + mouseOffset) + "px";
}
var sizes = ContextUtil.getSizes(menu);
if((windowWidth - clickCoordsX) < sizes.width){
menu.classList.add("cm_border_right");
}else{
menu.classList.remove("cm_border_right");
}
if((windowHeight - clickCoordsY) < sizes.height){
menu.classList.add("cm_border_bottom");
}else{
menu.classList.remove("cm_border_bottom");
}
menu.classList.add("display");
if(ContextUtil.getProperty(options, "close_on_click", true)){
window.addEventListener("click", documentClick);
}
e.preventDefault();
}
this.hide = function(){
document.getElementById('cm_' + num).classList.remove("display");
window.removeEventListener("click", documentClick);
}
function documentClick(){
self.hide();
}
this.reload();
}
ContextMenu.count = 0;
ContextMenu.DIVIDER = "cm_divider";
const ContextUtil = {
getProperty: function(options, opt, def){
if(typeof options[opt] !== "undefined"){
return options[opt];
}else{
return def;
}
},
getSizes: function(obj){
var lis = obj.getElementsByTagName('li');
var width_def = 0;
var height_def = 0;
for(var i = 0; i < lis.length; i++){
var li = lis[i];
if(li.offsetWidth > width_def){
width_def = li.offsetWidth;
}
if(li.offsetHeight > height_def){
height_def = li.offsetHeight;
}
}
var width = width_def;
var height = height_def;
for(var i = 0; i < lis.length; i++){
var li = lis[i];
var ul = li.getElementsByTagName('ul');
if(typeof ul[0] !== "undefined"){
var ul_size = ContextUtil.getSizes(ul[0]);
if(width_def + ul_size.width > width){
width = width_def + ul_size.width;
}
if(height_def + ul_size.height > height){
height = height_def + ul_size.height;
}
}
}
return {
"width": width,
"height": height
};
}
}; | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/webui/js/contextmenu.js | contextmenu.js |
// There are basically 4 types of attributes:
// - common & universal : same as oneM2M
// - custom : from flexContainer and mgmtObj specializations
// - all others
const shortNames = {
"aa" : { "ln" : "announcedAttribute", "type" : "common" },
"acn" : { "ln" : "action", "type": "" },
"act" : { "ln" : "activate", "type": "" },
"acts" : { "ln" : "activeStatus", "type": "" },
"acpi" : { "ln" : "accessControlPolicyIDs", "type": "common" },
"acn" : { "ln" : "action", "type": "" },
"aei" : { "ln" : "AE-ID", "type": "" },
"ant" : { "ln" : "areaNwkType", "type": "custom" },
"ape" : { "ln" : "activityPatternElements", "type": "" },
"api" : { "ln" : "App-ID", "type": "" },
"apn" : { "ln" : "AppName", "type": "" },
"at" : { "ln" : "announcedTo", "type" : "common" },
"att" : { "ln" : "attached", "type": "custom" },
"awi" : { "ln" : "areaNwkId", "type": "custom" },
"btl" : { "ln" : "batteryLevel", "type": "custom" },
"bts" : { "ln" : "batteryStatus", "type": "custom" },
"can" : { "ln" : "capabilityName", "type": "custom" },
"cas" : { "ln" : "capabilityActionStatus", "type": "custom" },
"cbs" : { "ln" : "currentByteSize", "type": "" },
"cnd" : { "ln" : "containerDefinition", "type": "" },
"cnf" : { "ln" : "contentInfo", "type": "custom" },
"cni" : { "ln" : "currentNrOfInstances", "type": "" },
"cnm" : { "ln" : "currentNrOfMembers", "type": "" },
"cnty" : { "ln" : "country", "type": "custom" },
"con" : { "ln" : "content", "type": "custom" },
"cr" : { "ln" : "creator", "type": "common" },
"cs" : { "ln" : "contentSize", "type": "" },
"csi" : { "ln" : "CSE-ID", "type": "" },
"cst" : { "ln" : "cseType", "type": "" },
"csy" : { "ln" : "consistencyStrategy", "type": "" },
"csz" : { "ln" : "contentSerialization", "type": "" },
"ct" : { "ln" : "creationTime", "type": "universal" },
"cus" : { "ln" : "currentState", "type": "custom" },
"daci" : { "ln" : "dynamicAuthorizationConsultationIDs", "type": "common" },
"dc" : { "ln" : "description", "type": "" },
"dea" : { "ln" : "deactivate", "type": "" },
"dis" : { "ln" : "disable", "type": "" },
"dlb" : { "ln" : "deviceLabel", "type": "custom" },
"dty" : { "ln" : "deviceType", "type": "custom" },
"dvd" : { "ln" : "devId", "type": "custo" },
"dvi" : { "ln" : "deviceInfo", "type": "" },
"dvnm" : { "ln" : "deviceName", "type": "custom" },
"dvt" : { "ln" : "devType", "type": "custom" },
"egid" : { "ln" : "externalGroupID", "type": "" },
"ena" : { "ln" : "enable", "type": "" },
"enc" : { "ln" : "eventNotificationCriteria", "type": "" },
"esi" : { "ln" : "e2eSecInfo", "type": "common" },
"et" : { "ln" : "expirationTime", "type": "common" },
"far" : { "ln" : "factoryReset", "type": "" },
"fwn" : { "ln" : "firmwareName", "type": "custom" },
"fwv" : { "ln" : "fwVersion", "type": "custom" },
"gn" : { "ln" : "groupName", "type": "" },
"hael" : { "ln" : "hostedAELinks", "type": "" },
"hcl" : { "ln" : "hostedCSELink", "type": "" },
"hsl" : { "ln" : "hostedServiceLink", "type": "" },
"hwv" : { "ln" : "hwVersion", "type": "custom" },
"in" : { "ln" : "install", "type": "" },
"ins" : { "ln" : "installStatus", "type": "" },
"lbl" : { "ln" : "labels", "type": "common" },
"ldv" : { "ln" : "listOfDevices", "type": "custom" },
"lga" : { "ln" : "logStart", "type": "custom" },
"lgd" : { "ln" : "logData", "type": "custom" },
"lgo" : { "ln" : "logStop", "type": "custom" },
"lgst" : { "ln" : "logStatus", "type": "custom" },
"lgt" : { "ln" : "logTypeId", "type": "custom" },
"lnh" : { "ln" : "listOfNeighbors", "type": "custom" },
"loc" : { "ln" : "location", "type": "custom" },
"lt" : { "ln" : "lastModifiedTime", "type": "universal" },
"macp" : { "ln" : "membersAccessControlPolicyIDs", "type": "" },
"man" : { "ln" : "manufacturer", "type": "custom" },
"mbs" : { "ln" : "maxByteSize", "type": "" },
"mei" : { "ln" : "M2M-Ext-ID", "type": "" },
"mfd" : { "ln" : "manufacturingDate", "type": "custom" },
"mfdl" : { "ln" : "manufacturerDetailsLink", "type": "custom" },
"mgca" : { "ln" : "mgmtClientAddress", "type": "" },
"mgd" : { "ln" : "mgmtDefinition", "type": "" },
"mid" : { "ln" : "memberIDs", "type": "" },
"mma" : { "ln" : "memAvailable", "type": "custom" },
"mmt" : { "ln" : "memTotal", "type": "custom" },
"mni" : { "ln" : "maxNrOfInstances", "type": "" },
"mnm" : { "ln" : "maxNrOfMembers", "type": "" },
"mod" : { "ln" : "model", "type": "custom" },
"mt" : { "ln" : "memberType", "type": "" },
"mtv" : { "ln" : "memberTypeValidated", "type": "" },
"nar" : { "ln" : "notifyAggregation", "type": "" },
"nct" : { "ln" : "notificationContentType", "type": "" },
"ni" : { "ln" : "nodeID", "type": "" },
"nid" : { "ln" : "networkID", "type": "" },
"nl" : { "ln" : "nodeLink", "type": "" },
"nu" : { "ln" : "notificationURI", "type": "" },
"or" : { "ln" : "ontologyRef", "type" : "" },
"osv" : { "ln" : "osVersion", "type": "custom" },
"pi" : { "ln" : "parentID", "type": "universal" },
"poa" : { "ln" : "pointOfAccess", "type": "" },
"ptl" : { "ln" : "protocol", "type": "custom" },
"purl" : { "ln" : "presentationURL", "type": "custom" },
"pv" : { "ln" : "privileges", "type": "" },
"pvs" : { "ln" : "selfPrivileges", "type": "" },
"rbo" : { "ln" : "reboot", "type": "" },
"regs" : { "ln" : "registrationStatus", "type": "" },
"ri" : { "ln" : "resourceID", "type": "universal" },
"rms" : { "ln" : "roamingStatus", "type": "" },
"rn" : { "ln" : "resourceName", "type": "universal" },
"rr" : { "ln" : "requestReachability", "type": "" },
"scp" : { "ln" : "sessionCapabilities", "type": "" },
"sld" : { "ln" : "sleepDuration", "type": "custom" },
"sli" : { "ln" : "sleepInterval", "type": "custom" },
"smod" : { "ln" : "subModel", "type": "custom" },
"spty" : { "ln" : "specializationType", "type": "" },
"spur" : { "ln" : "supportURL", "type": "custom" },
"srt" : { "ln" : "supportedResourceType", "type": "" },
"srv" : { "ln" : "supportedReleaseVersions", "type": "" },
"ssi" : { "ln" : "semanticSupportIndicator", "type": "" },
"st" : { "ln" : "stateTag", "type": "common" },
"sus" : { "ln" : "status", "type": "" },
"swn" : { "ln" : "softwareName", "type": "" },
"swr" : { "ln" : "software", "type": "" },
"swv" : { "ln" : "swVersion", "type": "custom" },
"syst" : { "ln" : "systemTime", "type": "custom" },
"tri" : { "ln" : "trigger-Recipient-ID", "type": "" },
"tren" : { "ln" : "triggerEnable", "type": "" },
"trn" : { "ln" : "triggerReferenceNumber", "type": "" },
"trps" : { "ln" : "trackRegistrationPoints", "type": "" },
"ty" : { "ln" : "resourceType", "type": "universal" },
"ud" : { "ln" : "update", "type": "" },
"uds" : { "ln" : "updateStatus", "type": "" },
"un" : { "ln" : "uninstall", "type": "" },
"url" : { "ln" : "URL", "type": "custom" },
"vr" : { "ln" : "version", "type": "custom" },
// proprietary custom attributes
"crRes" : { "ln" : "createdResources", "type": "custom" },
"cseSU" : { "ln" : "cseStartUpTime", "type": "custom" },
"cseUT" : { "ln" : "cseUptime", "type": "custom" },
"ctRes" : { "ln" : "resourceCount", "type": "custom" },
"htCre" : { "ln" : "httpCreates", "type": "custom" },
"htDel" : { "ln" : "httpDeletes", "type": "custom" },
"htRet" : { "ln" : "httpRetrieves", "type": "custom" },
"htUpd" : { "ln" : "httpUpdates", "type": "custom" },
"lgErr" : { "ln" : "logErrors", "type": "custom" },
"lgWrn" : { "ln" : "logWarnings", "type": "custom" },
"rmRes" : { "ln" : "deletedResources", "type": "custom" }
}
function shortToLongname(sn) {
if (printLongNames && sn in shortNames) {
return shortNames[sn].ln
}
return sn
}
function attributeRole(sn) {
if (sn in shortNames) {
return shortNames[sn].type
}
return "custom"
} | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/webui/js/attributes.js | attributes.js |
var tree
var cseid = ''
var root = null
var rootri = null
var originator = ""
var printLongNames = false
var nodeClicked = undefined
// hack: if this is set to false then the REST UI will not be refreshed.
// useful with auto refresh.
var refreshRESTUI = true
// TODO Clickable references. For each node add ("ri" : TreePath). expand via TreeView.expandPath.
// Select the entry and display
const types = {
1 : "ACP",
2 : "AE",
3 : "Container",
4 : "ContentInstance",
5 : "CSEBase",
9 : "Group",
14 : "Node",
16 : "RemoteCSE",
23 : "Subscription",
28 : "FlexContainer",
52 : "FlexContainerInstance"
}
const shortTypes = {
1 : "ACP",
2 : "AE",
3 : "CNT",
4 : "CIN",
5 : "CSE",
9 : "GRP",
14 : "NOD",
16 : "CSR",
23 : "SUB",
28 : "FCNT",
52 : "FCI"
}
const mgdTypes = {
1001 : "Firmware",
1002 : "Software",
1003 : "Memory",
1004 : "AreaNwkInfo",
1005 : "AreaNwkDeviceInfo",
1006 : "Battery",
1007 : "DeviceInfo",
1008 : "DeviceCapability",
1009 : "Reboot",
1010 : "EventLog"
}
const mgdShortTypes = {
1001 : "FWR",
1002 : "SWR",
1003 : "MEM",
1004 : "ANI",
1005 : "ANDI",
1006 : "BAT",
1007 : "DVI",
1008 : "DVC",
1009 : "REB",
1010 : "EVL"
}
function clickOnNode(e, node) {
if (typeof nodeClicked !== "undefined") {
nodeClicked.setSelected(false)
}
node.setSelected(true)
nodeClicked = node
tree.reload()
resource = node.resource
clearAttributesTable()
fillAttributesTable(resource)
fillJSONArea(node)
setResourceInfo(resource)
if (refreshRESTUI) {
setRestUI(node.resourceFull)
} else {
refreshRESTUI = true
}
}
//////////////////////////////////////////////////////////////////////////////
//
// Tree handling
//
function expandNode(node) {
for (ch of node.getChildren()) {
if (ch.resolved == false) {
getResource(ch.ri, ch)
}
}
}
function collapseNode(node) {
for (ch of node.getChildren()) {
ch.setExpanded(false)
}
}
function removeChildren(node) {
var chc = node.getChildCount()
for (i = 0; i < chc; i++) {
node.removeChildPos(0)
}
}
function clearAttributesTable() {
var table = document.getElementById("details");
var tb = table.getElementsByTagName('tbody')[0]
tb.innerHTML = " "
}
function fillAttributesTable(resource) {
// fill attribute table with resource attributes
var table = document.getElementById("details");
var tb = table.getElementsByTagName('tbody')[0]
for (var key in resource) {
var newRow = tb.insertRow()
var keyCell = newRow.insertCell(0)
var valueCell = newRow.insertCell(1);
// Colorful attributes
switch (attributeRole(key)) {
case "universal": keyCell.innerHTML = "<font color=\"#e67e00\">" + shortToLongname(key) + "</font>";
break;
case "common": keyCell.innerHTML = "<font color=\"#0040ff\">" + shortToLongname(key) + "</font>";
break;
case "custom": keyCell.innerHTML = "<font color=\"#239023\">" + shortToLongname(key) + "</font>";
break;
default: keyCell.innerHTML = "<font color=\"black\">" + shortToLongname(key) + "</font>";
break;
}
valueCell.innerText = JSON.stringify(resource[key])
}
}
function fillJSONArea(node) {
// fill JSON text area
document.getElementById("resource").value = JSON.stringify(node.resourceFull, null, 4)
}
function clearJSONArea() {
// fill JSON text area
document.getElementById("resource").value = ""
}
function setRootResourceName(name) {
document.getElementById("rootResourceName").innerText = name
}
function clearRootResourceName() {
document.getElementById("rootResourceName").innerHTML = " "
}
function setResourceInfo(resource) {
if (typeof resource === "undefined") {
return
}
// extra infos in the header
var d = document.getElementById("resourceType");
var ty = resource['ty']
var t = types[ty]
if (ty == 13) {
var mgd = resource['mgd']
if (mgd == undefined) {
t = "mgmtObj"
} else {
t = mgdTypes[mgd]
}
}
if (t == undefined) {
t = "Unknown"
}
var ri = "/" + resource["ri"]
if (ri == cseid) {
d.innerText = t + ": " + cseid
} else {
d.innerText = t + ": " + cseid + "/" + resource["ri"]
}
}
function clearResourceInfo() {
document.getElementById("resourceType").innerHTML = " "
}
function refreshNode() {
if (typeof nodeClicked !== "undefined") {
nodeClicked.wasExpanded = nodeClicked.isExpanded()
removeChildren(nodeClicked)
getResource(nodeClicked.resource.ri, nodeClicked)
}
}
//////////////////////////////////////////////////////////////////////////////
//
// Utilities
//
function getUrlParameterByName(name, url) {
if (!url)
url = window.location.href;
name = name.replace(/[\[\]]/g, "\\$&");
var regex = new RegExp("[?&]" + name + "(=([^&#]*)|&|#|$)"), results = regex.exec(url);
if (!results)
return null;
if (!results[2])
return '';
return decodeURIComponent(results[2].replace(/\+/g, " "));
}
//////////////////////////////////////////////////////////////////////////////
//
// Refresh
//
var refreshTimer = undefined
function setupRefreshResource(seconds) {
refreshTimer = setInterval(function() {
refreshRESTUI = false
refreshNode()
}, seconds*1000)
}
function cancelRefreshResource() {
clearInterval(refreshTimer);
refreshTimer = undefined
} | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/webui/js/resourceTree.js | resourceTree.js |
function TreeView(root, container, options){
var self = this;
/*
* Konstruktor
*/
if(typeof root === "undefined"){
throw new Error("Parameter 1 must be set (root)");
}
if(!(root instanceof TreeNode)){
throw new Error("Parameter 1 must be of type TreeNode");
}
if(container){
if(!TreeUtil.isDOM(container)){
container = document.querySelector(container);
if(container instanceof Array){
container = container[0];
}
if(!TreeUtil.isDOM(container)){
throw new Error("Parameter 2 must be either DOM-Object or CSS-QuerySelector (#, .)");
}
}
}else{
container = null;
}
if(!options || typeof options !== "object"){
options = {};
}
/*
* Methods
*/
this.setRoot = function(_root){
if(root instanceof TreeNode){
root = _root;
}
}
this.getRoot = function(){
return root;
}
this.expandAllNodes = function(){
root.setExpanded(true);
root.getChildren().forEach(function(child){
TreeUtil.expandNode(child);
});
}
this.expandPath = function(path){
if(!(path instanceof TreePath)){
throw new Error("Parameter 1 must be of type TreePath");
}
path.getPath().forEach(function(node){
node.setExpanded(true);
});
}
this.collapseAllNodes = function(){
root.setExpanded(false);
root.getChildren().forEach(function(child){
TreeUtil.collapseNode(child);
});
}
this.setContainer = function(_container){
if(TreeUtil.isDOM(_container)){
container = _container;
}else{
_container = document.querySelector(_container);
if(_container instanceof Array){
_container = _container[0];
}
if(!TreeUtil.isDOM(_container)){
throw new Error("Parameter 1 must be either DOM-Object or CSS-QuerySelector (#, .)");
}
}
}
this.getContainer = function(){
return container;
}
this.setOptions = function(_options){
if(typeof _options === "object"){
options = _options;
}
}
this.changeOption = function(option, value){
options[option] = value;
}
this.getOptions = function(){
return options;
}
// T ODO: set selected key: up down; expand right; collapse left; enter: open;
this.getSelectedNodes = function(){
return TreeUtil.getSelectedNodesForNode(root);
}
this.reload = function(){
if(container == null){
console.warn("No container specified");
return;
}
container.classList.add("tj_container");
var cnt = document.createElement("ul");
cnt.appendChild(renderNode(root));
container.innerHTML = "";
container.appendChild(cnt);
}
function renderNode(node){
var li_outer = document.createElement("li");
var span_desc = document.createElement("span");
span_desc.className = "tj_description";
span_desc.tj_node = node;
if(!node.isEnabled()){
li_outer.setAttribute("disabled", "");
node.setExpanded(false);
node.setSelected(false);
}
if(node.isSelected()){
span_desc.classList.add("selected");
}
span_desc.addEventListener("click", function(e){
var cur_el = e.target;
clickRelative = e.clientX - getOffset(this).left // akr
while(typeof cur_el.tj_node === "undefined" || cur_el.classList.contains("tj_container")){
cur_el = cur_el.parentElement;
}
var node_cur = cur_el.tj_node;
if(typeof node_cur === "undefined"){
return;
}
if(node_cur.isEnabled()){
if(e.ctrlKey == false){
if(!node_cur.isLeaf()){
if (clickRelative < 27) { // akr start
node_cur.toggleExpanded();
} else if (!node_cur.isExpanded()) {
node_cur.setExpanded(true)
} // akr end
self.reload();
}else{
node_cur.open();
}
node_cur.on("click")(e, node_cur);
}
if(e.ctrlKey == true){
node_cur.toggleSelected();
self.reload();
}else{
var rt = node_cur.getRoot();
if(rt instanceof TreeNode){
TreeUtil.getSelectedNodesForNode(rt).forEach(function(_nd){
_nd.setSelected(false);
});
}
node_cur.setSelected(true);
self.reload();
}
}
});
span_desc.addEventListener("contextmenu", function(e){
var cur_el = e.target;
while(typeof cur_el.tj_node === "undefined" || cur_el.classList.contains("tj_container")){
cur_el = cur_el.parentElement;
}
var node_cur = cur_el.tj_node;
if(typeof node_cur === "undefined"){
return;
}
if(typeof node_cur.getListener("contextmenu") !== "undefined"){
node_cur.on("contextmenu")(e, node_cur);
e.preventDefault();
}else if(typeof TreeConfig.context_menu === "function"){
TreeConfig.context_menu(e, node_cur);
e.preventDefault();
}
});
if(node.isLeaf()){
var ret = '';
var icon = TreeUtil.getProperty(node.getOptions(), "icon", "");
if(icon != ""){
ret += '<span class="tj_icon">' + icon + '</span>';
}else if((icon = TreeUtil.getProperty(options, "leaf_icon", "")) != ""){
ret += '<span class="tj_icon">' + icon + '</span>';
}else{
ret += '<span class="tj_icon">' + TreeConfig.leaf_icon + '</span>';
}
span_desc.innerHTML = ret + node.toString() + "</span>";
span_desc.classList.add("tj_leaf");
li_outer.appendChild(span_desc);
}else{
var ret = '';
if(node.isExpanded()){
ret += '<span class="tj_mod_icon">' + TreeConfig.open_icon + '</span>';
}else{
ret += '<span class="tj_mod_icon">' + TreeConfig.close_icon + '</span>';
}
// var icon = TreeUtil.getProperty(node.getOptions(), "icon", "");
// if(icon != ""){
// ret += '<span class="tj_icon">' + icon + '</span>';
// }else if((icon = TreeUtil.getProperty(options, "parent_icon", "")) != ""){
// ret += '<span class="tj_icon">' + icon + '</span>';
// }else{
// ret += '<span class="tj_icon">' + TreeConfig.parent_icon + '</span>';
// }
span_desc.innerHTML = ret + node.toString() + '</span>';
li_outer.appendChild(span_desc);
if(node.isExpanded()){
var ul_container = document.createElement("ul");
node.getChildren().forEach(function(child){
ul_container.appendChild(renderNode(child));
});
li_outer.appendChild(ul_container)
}
}
return li_outer;
}
if(typeof container !== "undefined")
this.reload();
}
function TreeNode(userObject, options){
var children = new Array();
var self = this;
var events = new Array();
var expanded = true;
var enabled = true;
var selected = false;
/*
* Konstruktor
*/
if(userObject){
if(!(typeof userObject === "string") || typeof userObject.toString !== "function"){
throw new Error("Parameter 1 must be of type String or Object, where it must have the function toString()");
}
}else{
userObject = "";
}
if(!options || typeof options !== "object"){
options = {};
}else{
expanded = TreeUtil.getProperty(options, "expanded", true);
enabled = TreeUtil.getProperty(options, "enabled", true);
selected = TreeUtil.getProperty(options, "selected", false);
}
/*
* Methods
*/
this.addChild = function(node){
if(!TreeUtil.getProperty(options, "allowsChildren", true)){
console.warn("Option allowsChildren is set to false, no child added");
return;
}
if(node instanceof TreeNode){
children.push(node);
//Konstante hinzufügen (workaround)
Object.defineProperty(node, "parent", {
value: this,
writable: false,
enumerable: true,
configurable: true
});
}else{
throw new Error("Parameter 1 must be of type TreeNode");
}
}
this.removeChildPos = function(pos){
if(typeof children[pos] !== "undefined"){
if(typeof children[pos] !== "undefined"){
children.splice(pos, 1);
}
}
}
this.removeChild = function(node){
if(!(node instanceof TreeNode)){
throw new Error("Parameter 1 must be of type TreeNode");
}
this.removeChildPos(this.getIndexOfChild(node));
}
this.getChildren = function(){
return children;
}
this.getChildCount = function(){
return children.length;
}
this.getIndexOfChild = function(node){
for(var i = 0; i < children.length; i++){
if(children[i].equals(node)){
return i;
}
}
return -1;
}
this.getRoot = function(){
var node = this;
while(typeof node.parent !== "undefined"){
node = node.parent;
}
return node;
}
this.setUserObject = function(_userObject){
if(!(typeof _userObject === "string") || typeof _userObject.toString !== "function"){
throw new Error("Parameter 1 must be of type String or Object, where it must have the function toString()");
}else{
userObject = _userObject;
}
}
this.getUserObject = function(){
return userObject;
}
this.setOptions = function(_options){
if(typeof _options === "object"){
options = _options;
}
}
this.changeOption = function(option, value){
options[option] = value;
}
this.getOptions = function(){
return options;
}
this.isLeaf = function(){
return (children.length == 0);
}
this.setExpanded = function(_expanded){
if(this.isLeaf()){
return;
}
if(typeof _expanded === "boolean"){
if(expanded == _expanded){
return;
}
expanded = _expanded;
if(_expanded){
this.on("expand")(this);
}else{
this.on("collapse")(this);
}
this.on("toggle_expanded")(this);
}
}
this.toggleExpanded = function(){
if(expanded){
this.setExpanded(false);
}else{
this.setExpanded(true);
}
};
this.isExpanded = function(){
if(this.isLeaf()){
return true;
}else{
return expanded;
}
}
this.setEnabled = function(_enabled){
if(typeof _enabled === "boolean"){
if(enabled == _enabled){
return;
}
enabled = _enabled;
if(_enabled){
this.on("enable")(this);
}else{
this.on("disable")(this);
}
this.on("toggle_enabled")(this);
}
}
this.toggleEnabled = function(){
if(enabled){
this.setEnabled(false);
}else{
this.setEnabled(true);
}
}
this.isEnabled = function(){
return enabled;
}
this.setSelected = function(_selected){
if(typeof _selected !== "boolean"){
return;
}
if(selected == _selected){
return;
}
selected = _selected;
if(_selected){
this.on("select")(this);
}else{
this.on("deselect")(this);
}
this.on("toggle_selected")(this);
}
this.toggleSelected = function(){
if(selected){
this.setSelected(false);
}else{
this.setSelected(true);
}
}
this.isSelected = function(){
return selected;
}
this.open = function(){
if(!this.isLeaf()){
this.on("open")(this);
}
}
this.on = function(ev, callback){
if(typeof callback === "undefined"){
if(typeof events[ev] !== "function"){
return function(){};
}else{
return events[ev];
}
}
if(typeof callback !== 'function'){
throw new Error("Argument 2 must be of type function");
}
events[ev] = callback;
}
this.getListener = function(ev){
return events[ev];
}
this.equals = function(node){
if(node instanceof TreeNode){
if(node.getUserObject() == userObject){
return true;
}
}
return false;
}
this.toString = function(){
if(typeof userObject === "string"){
return userObject;
}else{
return userObject.toString();
}
}
}
function TreePath(root, node){
var nodes = new Array();
this.setPath = function(root, node){
nodes = new Array();
while(typeof node !== "undefined" && !node.equals(root)){
nodes.push(node);
node = node.parent;
}
if(node.equals(root)){
nodes.push(root);
}else{
nodes = new Array();
throw new Error("Node is not contained in the tree of root");
}
nodes = nodes.reverse();
return nodes;
}
this.getPath = function(){
return nodes;
}
this.toString = function(){
return nodes.join(" - ");
}
if(root instanceof TreeNode && node instanceof TreeNode){
this.setPath(root, node);
}
}
/*
* Util-Methods
*/
const TreeUtil = {
// default_leaf_icon: "<span>🖹</span>",
// default_parent_icon: "<span>🗁</span>",
// default_open_icon: "<span>◢</span>",
// default_close_icon: "<span>▶</span>",
// default_leaf_icon: "<span>▷</span>",
default_leaf_icon: "<span>▱</span>",
default_parent_icon: "<span></span>",
default_open_icon: "<span>▼</span>",
default_close_icon: "<span>▶</span>",
isDOM: function(obj){
try {
return obj instanceof HTMLElement;
}
catch(e){
return (typeof obj==="object") &&
(obj.nodeType===1) && (typeof obj.style === "object") &&
(typeof obj.ownerDocument ==="object");
}
},
getProperty: function(options, opt, def){
if(typeof options[opt] === "undefined"){
return def;
}
return options[opt];
},
expandNode: function(node){
node.setExpanded(true);
if(!node.isLeaf()){
node.getChildren().forEach(function(child){
TreeUtil.expandNode(child);
});
}
},
collapseNode: function(node){
node.setExpanded(false);
if(!node.isLeaf()){
node.getChildren().forEach(function(child){
TreeUtil.collapseNode(child);
});
}
},
getSelectedNodesForNode: function(node){
if(!(node instanceof TreeNode)){
throw new Error("Parameter 1 must be of type TreeNode");
}
var ret = new Array();
if(node.isSelected()){
ret.push(node);
}
node.getChildren().forEach(function(child){
if(child.isSelected()){
if(ret.indexOf(child) == -1){
ret.push(child);
}
}
if(!child.isLeaf()){
TreeUtil.getSelectedNodesForNode(child).forEach(function(_node){
if(ret.indexOf(_node) == -1){
ret.push(_node);
}
});
}
});
return ret;
}
};
var TreeConfig = {
leaf_icon: TreeUtil.default_leaf_icon,
parent_icon: TreeUtil.default_parent_icon,
open_icon: TreeUtil.default_open_icon,
close_icon: TreeUtil.default_close_icon,
context_menu: undefined
};
function getOffset(el) {
const rect = el.getBoundingClientRect();
return {
left: rect.left + window.scrollX,
top: rect.top + window.scrollY
};
} | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/webui/js/tree.js | tree.js |
import threading
from Logging import Logging
from Constants import Constants as C
import CSE
# TODO: create/delete each resource to count! resourceCreate(ty)
# TODO move event creations from here to the resp modules.
class EventManager(object):
def __init__(self):
self.addEvent('httpRetrieve')
self.addEvent('httpCreate')
self.addEvent('httpDelete')
self.addEvent('httpUpdate')
self.addEvent('httpRedirect')
self.addEvent('createResource')
self.addEvent('deleteResource')
self.addEvent('cseStartup')
self.addEvent('logError')
self.addEvent('logWarning')
Logging.log('EventManager initialized')
def shutdown(self):
Logging.log('EventManager shut down')
#########################################################################
# Event topics are added as new methods of the handler class with the
# given name and can be raised by calling those new methods, e.g.
#
# manager.addEvent("someName") # add new event topic
# manager.addHandler(manager.someName, handlerFunction) # add an event handler
# handler.someName() # raises the event
def addEvent(self, name):
if not hasattr(self, name):
setattr(self, name, Event())
return getattr(self, name)
def removeEvent(self, name):
if hasattr(self, name):
delattr(self, name)
def hasEvent(self, name):
return name in self.__dict__
def addHandler(self, event, func):
event.append(func)
def removeHandler(self, event, func):
try:
del event[func]
except Exception as e:
pass
#########################################################################
#
# Event class.
#
class Event(list):
"""Event subscription.
A list of callable methods. Calling an instance of Event will cause a
call to each function in the list in ascending order by index.
It supports all methods from its base class (list), so use append() and remove()
to add and remove functions.
An event is raised by calling the event: anEvent(anArgument). It may have an
arbitrary number of arguments which are passed to the functions.
The function will be called in a separate thread in order to prevent waiting
for the returns. This might lead to some race conditions, so the synchronizations
must be done insode the functions.
"""
def __call__(self, *args, **kwargs):
# Call the handlers in a thread so that we don't block everything
thrd = threading.Thread(target=self._callThread, args=args, kwargs=kwargs)
thrd.setDaemon(True) # Make the thread a daemon of the main thread
thrd.start()
def _callThread(self, *args, **kwargs):
for function in self:
function(*args, **kwargs)
def __repr__(self):
return "Event(%s)" % list.__repr__(self) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/EventManager.py | EventManager.py |
from Logging import Logging
from Constants import Constants as C
import CSE, Utils
from resources import FCNT, MgmtObj
class GroupManager(object):
def __init__(self):
# Add delete event handler because we like to monitor the resources in mid
CSE.event.addHandler(CSE.event.deleteResource, self.handleDeleteEvent)
Logging.log('GroupManager initialized')
def shutdown(self):
Logging.log('GroupManager shut down')
#########################################################################
def validateGroup(self, group, originator):
# Get consistencyStrategy
csy = group.csy
# Check member types and group set type
# Recursive for sub groups, if .../fopt. Check privileges of originator
if not (res := self._checkMembersAndPrivileges(group, group.mt, group.csy, group.spty, originator))[0]:
return res
# Check for max members
if group.hasAttribute('mnm'): # only if mnm attribute is set
try: # mnm may not be a number
if len(group.mid) > int(group.mnm):
return (False, C.rcMaxNumberOfMemberExceeded)
except ValueError:
return (False, C.rcInvalidArguments)
# TODO: check virtual resources
return (True, C.rcOK)
def _checkMembersAndPrivileges(self, group, mt, csy, spty, originator):
# check for duplicates and remove them
midsList = [] # contains the real mid list
for mid in group['mid']:
# get the resource and check it
id = mid[:-5] if (hasFopt := mid.endswith('/fopt')) else mid # remove /fopt to retrieve the resource
if (r := CSE.dispatcher.retrieveResource(id))[0] is None:
return (False, C.rcNotFound)
resource = r[0]
# skip if ri is already in the list
if (ri := resource.ri) in midsList:
continue
# check privileges
if not CSE.security.hasAccess(originator, resource, C.permRETRIEVE):
return (False, C.rcReceiverHasNoPrivileges)
# if it is a group + fopt, then recursively check members
if (ty := resource.ty) == C.tGRP and hasFopt:
if not (res := self._checkMembersAndPrivileges(resource, mt, csy, spty, originator))[0]:
return res
ty = resource.mt # set the member type to the group's member type
# check specializationType spty
if spty is not None:
if isinstance(spty, int): # mgmtobj type
if isinstance(resource, MgmtObj.MgmtObj) and ty != spty:
return (False, C.rcGroupMemberTypeInconsistent)
elif isinstance(spty, str): # fcnt specialization
if isinstance(resource, FCNT.FCNT) and resource.cnd != spty:
return (False, C.rcGroupMemberTypeInconsistent)
# check type of resource and member type of group
if not (mt == C.tMIXED or ty == mt): # types don't match
if csy == C.csyAbandonMember: # abandon member
continue
elif csy == C.csySetMixed: # change group's member type
mt = C.tMIXED
group['mt'] = C.tMIXED
else: # abandon group
return (False, C.rcGroupMemberTypeInconsistent)
# member seems to be ok, so add ri to the list
midsList.append(ri if not hasFopt else ri + '/fopt') # restore fopt for ri
group['mid'] = midsList # replace with a cleaned up mid
group['cnm'] = len(midsList)
return (True, C.rcOK)
def foptRequest(self, operation, fopt, request, id, originator, ct=None, ty=None):
""" Handle requests to a fanOutPoint.
This method might be called recursivly, when there are groups in groups."""
# get parent / group
group = fopt.retrieveParentResource()
if group is None:
return (None, C.rcNotFound)
# get the rqi header field
(_, _, _, rqi, _) = Utils.getRequestHeaders(request)
# check whether there is something after the /fopt ...
(_, _, tail) = id.partition('/fopt/') if '/fopt/' in id else (_, _, '')
Logging.logDebug('Adding additional path elements: %s' % tail)
# walk through all members
result = []
tail = '/' + tail if len(tail) > 0 else '' # add remaining path, if any
for mid in group.mid:
# Try to get the SRN and add the tail
if (srn := Utils.structuredPathFromRI(mid)) is not None:
mid = srn + tail
else:
mid = mid + tail
# Invoke the request
if operation == C.opRETRIEVE:
if (res := CSE.dispatcher.handleRetrieveRequest(request, mid, originator))[0] is None:
return res
elif operation == C.opCREATE:
if (res := CSE.dispatcher.handleCreateRequest(request, mid, originator, ct, ty))[0] is None:
return res
elif operation == C.opUPDATE:
if (res := CSE.dispatcher.handleUpdateRequest(request, mid, originator, ct))[0] is None:
return res
elif operation == C.opDELETE:
if (res := CSE.dispatcher.handleDeleteRequest(request, mid, originator))[1] != C.rcDeleted:
return res
else:
return (None, C.rcOperationNotAllowed)
result.append(res)
# construct aggregated response
if len(result) > 0:
items = []
for r in result:
item = { 'rsc' : r[1],
'rqi' : rqi,
'pc' : r[0].asJSON(),
'to' : r[0].__srn__
}
items.append(item)
rsp = { 'm2m:rsp' : items}
agr = { 'm2m:agr' : rsp }
else:
agr = {}
# Different "ok" results per operation
return (agr, [ C.rcOK, C.rcCreated, C.rcUpdated, C.rcDeleted ][operation])
#########################################################################
def handleDeleteEvent(self, deletedResource):
"""Handle a delete event. Check whether the deleted resource is a member
of group. If yes, remove the member."""
ri = deletedResource.ri
groups = CSE.storage.searchByTypeFieldValue(C.tGRP, 'mid', ri)
for group in groups:
group['mid'].remove(ri)
group['cnm'] = group.cnm - 1
CSE.storage.updateResource(group) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/GroupManager.py | GroupManager.py |
import datetime, random, string, sys, re
from resources import ACP, AE, ANDI, ANI, BAT, CIN, CNT, CNT_LA, CNT_OL, CSEBase, CSR, DVC
from resources import DVI, EVL, FCI, FCNT, FCNT_LA, FCNT_OL, FWR, GRP, GRP_FOPT, MEM, NOD, RBO, SUB, SWR, Unknown
from Constants import Constants as C
from Configuration import Configuration
from Logging import Logging
import CSE
def uniqueRI(prefix=''):
p = prefix.split(':')
p = p[1] if len(p) == 2 else p[0]
return p + uniqueID()
def isUniqueRI(ri):
return len(CSE.storage.identifier(ri)) == 0
def uniqueRN(prefix='un'):
p = prefix.split(':')
p = p[1] if len(p) == 2 else p[0]
return "%s_%s" % (p, ''.join(random.choices(string.ascii_uppercase + string.digits + string.ascii_lowercase, k=C.maxIDLength)))
# create a unique aei, M2M-SP type
def uniqueAEI(prefix='S'):
return prefix + ''.join(random.choices(string.ascii_uppercase + string.digits + string.ascii_lowercase, k=C.maxIDLength))
def fullRI(ri):
return '/' + Configuration.get('cse.csi') + '/' + ri
def uniqueID():
return str(random.randint(1,sys.maxsize))
def isVirtualResource(resource):
return (ty := r.ty) and ty in C.tVirtualResources
# Check for valid ID
def isValidID(id):
#return len(id) > 0 and '/' not in id # pi might be ""
return '/' not in id
def getResourceDate(delta=0):
return toISO8601Date(datetime.datetime.utcnow() + datetime.timedelta(seconds=delta))
def toISO8601Date(ts):
if isinstance(ts, float):
ts = datetime.datetime.utcfromtimestamp(ts)
return ts.strftime('%Y%m%dT%H%M%S,%f')
def structuredPath(resource):
rn = resource.rn
if resource.ty == C.tCSEBase: # if CSE
return rn
# retrieve identifier record of the parent
if (pi := resource.pi) is None:
Logging.logErr('PI is None')
return rn
rpi = CSE.storage.identifier(pi)
if len(rpi) == 1:
return rpi[0]['srn'] + '/' + rn
Logging.logErr('Parent not fount in DB')
return rn # fallback
def structuredPathFromRI(ri):
if len((identifiers := CSE.storage.identifier(ri))) == 1:
return identifiers[0]['srn']
return None
def resourceFromJSON(jsn, pi=None, acpi=None, tpe=None, create=False):
(jsn, root) = pureResource(jsn) # remove optional "m2m:xxx" level
ty = jsn['ty'] if 'ty' in jsn else tpe
if ty != None and tpe != None and ty != tpe:
return None
mgd = jsn['mgd'] if 'mgd' in jsn else None # for mgmtObj
# Add extra acpi
if acpi is not None:
jsn['acpi'] = acpi if type(acpi) is list else [ acpi ]
# sorted by assumed frequency (small optimization)
if ty == C.tCIN or root == C.tsCIN:
return CIN.CIN(jsn, pi=pi, create=create)
elif ty == C.tCNT or root == C.tsCNT:
return CNT.CNT(jsn, pi=pi, create=create)
elif ty == C.tGRP or root == C.tsGRP:
return GRP.GRP(jsn, pi=pi, create=create)
elif ty == C.tGRP_FOPT or root == C.tsGRP_FOPT:
return GRP_FOPT.GRP_FOPT(jsn, pi=pi, create=create)
elif ty == C.tACP or root == C.tsACP:
return ACP.ACP(jsn, pi=pi, create=create)
elif ty == C.tFCNT:
return FCNT.FCNT(jsn, pi=pi, fcntType=root, create=create)
elif ty == C.tFCI:
return FCI.FCI(jsn, pi=pi, fcntType=root, create=create)
elif ty == C.tAE or root == C.tsAE:
return AE.AE(jsn, pi=pi, create=create)
elif ty == C.tSUB or root == C.tsSUB:
return SUB.SUB(jsn, pi=pi, create=create)
elif ty == C.tCSR or root == C.tsCSR:
return CSR.CSR(jsn, pi=pi, create=create)
elif ty == C.tNOD or root == C.tsNOD:
return NOD.NOD(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdFWR) or root == C.tsFWR:
return FWR.FWR(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdSWR) or root == C.tsSWR:
return SWR.SWR(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdMEM) or root == C.tsMEM:
return MEM.MEM(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdANI) or root == C.tsANI:
return ANI.ANI(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdANDI) or root == C.tsANDI:
return ANDI.ANDI(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdBAT) or root == C.tsBAT:
return BAT.BAT(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdDVI) or root == C.tsDVI:
return DVI.DVI(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdDVC) or root == C.tsDVC:
return DVC.DVC(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdRBO) or root == C.tsRBO:
return RBO.RBO(jsn, pi=pi, create=create)
elif (ty == C.tMGMTOBJ and mgd == C.mgdEVL) or root == C.tsEVL:
return EVL.EVL(jsn, pi=pi, create=create)
elif ty == C.tCNT_LA or root == C.tsCNT_LA:
return CNT_LA.CNT_LA(jsn, pi=pi, create=create)
elif ty == C.tCNT_OL or root == C.tsCNT_OL:
return CNT_OL.CNT_OL(jsn, pi=pi, create=create)
elif ty == C.tFCNT_LA:
return FCNT_LA.FCNT_LA(jsn, pi=pi, create=create)
elif ty == C.tFCNT_OL:
return FCNT_OL.FCNT_OL(jsn, pi=pi, create=create)
elif ty == C.tCSEBase or root == C.tsCSEBase:
return CSEBase.CSEBase(jsn, create=create)
else:
return Unknown.Unknown(jsn, ty, root, pi=pi, create=create) # Capture-All resource
return None
# return the "pure" json without the "m2m:xxx" resource specifier
excludeFromRoot = [ 'pi' ]
def pureResource(jsn):
rootKeys = list(jsn.keys())
if len(rootKeys) == 1 and rootKeys[0] not in excludeFromRoot:
return (jsn[rootKeys[0]], rootKeys[0])
return (jsn, None)
# find a structured element in JSON
def findXPath(jsn, element, default=None):
paths = element.split("/")
data = jsn
for i in range(0,len(paths)):
if paths[i] not in data:
return default
data = data[paths[i]]
return data
# set a structured element in JSON. Create if necessary, and observce the overwrite option
def setXPath(jsn, element, value, overwrite=True):
paths = element.split("/")
ln = len(paths)
data = jsn
for i in range(0,ln-1):
if paths[i] not in data:
data[paths[i]] = {}
data = data[paths[i]]
if paths[ln-1] in data is not None and not overwrite:
return # don't overwrite
data[paths[ln-1]] = value
urlregex = re.compile(
r'^(?:http|ftp)s?://' # http://, https://, ftp://, ftps://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain
r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9]))|' # localhost or single name w/o domain
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ipv4
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE) # optional path
def isURL(url):
return url is not None and re.match(urlregex, url) is not None
# Compare an old and a new resource. Keywords and values. Ignore internal __XYZ__ keys
# Return a dictionary.
def resourceDiff(old, new):
res = {}
for k,v in new.items():
if k.startswith('__'): # ignore all internal attributes
continue
if not k in old: # Key not in old
res[k] = v
elif v != old[k]: # Value different
res[k] = v
return res
def getCSE():
return CSE.dispatcher.retrieveResource(Configuration.get('cse.ri'))
# Check whether the target contains a fanoutPoint in between or as the target
def fanoutPointResource(id):
nid = None
if id.endswith('/fopt'):
nid = id
elif '/fopt/' in id:
(head, sep, tail) = id.partition('/fopt/')
nid = head + '/fopt'
if nid is not None:
if (result := CSE.dispatcher.retrieveResource(nid))[0] is not None:
return result[0]
return None
#
# HTTP request helper functions
#
def requestID(request, rootPath):
p = request.path
if p.startswith(rootPath):
p = p[len(rootPath):]
if p.startswith('/'):
p = p[1:]
return p
def requestHeaderField(request, field):
if not request.headers.has_key(field):
return None
return request.headers.get(field)
def getRequestHeaders(request):
originator = requestHeaderField(request, C.hfOrigin)
rqi = requestHeaderField(request, C.hfRI)
# content-type
ty = None
if (ct := request.content_type) is not None:
if not ct.startswith(tuple(C.supportedContentSerializations)):
ct = None
else:
p = ct.partition(';')
ct = p[0] # content-type
t = p[2].partition('=')[2]
ty = int(t) if t.isdigit() else C.tUNKNOWN # resource type
return (originator, ct, ty, rqi, C.rcOK) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Utils.py | Utils.py |
import json, os, fnmatch
from Utils import *
from Configuration import Configuration
from Constants import Constants as C
import CSE
from Logging import Logging
from resources import Resource
class Importer(object):
# List of "priority" resources that must be imported first for correct CSE operation
_firstImporters = [ 'csebase.json', 'acp.admin.json', 'acp.default.json' ]
def __init__(self):
Logging.log('Importer initialized')
def importResources(self, path=None):
# Only when the DB is empty else don't imports
if CSE.dispatcher.countResources() > 0:
Logging.log('Resources already imported, skipping importing')
# But we still need the CSI etc of the CSE
rss = CSE.dispatcher.retrieveResourcesByType(C.tCSEBase)
if rss is not None:
Configuration.set('cse.csi', rss[0]['csi'])
Configuration.set('cse.ri', rss[0]['ri'])
Configuration.set('cse.rn', rss[0]['rn'])
return True
Logging.logErr('CSE not found')
return False
# get the originator for the creator attribute of imported resources
originator = Configuration.get('cse.originator')
# Import
if path is None:
if Configuration.has('cse.resourcesPath'):
path = Configuration.get('cse.resourcesPath')
else:
Logging.logErr('cse.resourcesPath not set')
raise RuntimeError('cse.resourcesPath not set')
if not os.path.exists(path):
Logging.logWarn('Import directory does not exist: %s' % path)
return False
Logging.log('Importing resources from directory: %s' % path)
self._prepareImporting()
# first import the priority resources, like CSE, Admin ACP, Default ACP
hasCSE = False
hasACP = False
for rn in self._firstImporters:
fn = path + '/' + rn
if os.path.exists(fn):
Logging.log('Importing resource: %s ' % fn)
with open(fn) as jfile:
r = resourceFromJSON(json.load(jfile), create=True)
# Check resource creation
if not CSE.registration.checkResourceCreation(r, originator):
continue
CSE.dispatcher.createResource(r)
ty = r.ty
if ty == C.tCSEBase:
Configuration.set('cse.csi', r.csi)
Configuration.set('cse.ri', r.ri)
Configuration.set('cse.rn', r.rn)
hasCSE = True
elif ty == C.tACP:
hasACP = True
# Check presence of CSE and at least one ACP
if not (hasCSE and hasACP):
Logging.logErr('CSE and/or default ACP missing during import')
self._finishImporting()
return False
# then get the filenames of all other files and sort them. Process them in order
filenames = sorted(os.listdir(path))
for fn in filenames:
if fn not in self._firstImporters:
Logging.log('Importing resource from file: %s' % fn)
with open(path + '/' + fn) as jfile:
# update an existing resource
if 'update' in fn:
j = json.load(jfile)
keys = list(j.keys())
if len(keys) == 1 and (k := keys[0]) and 'ri' in j[k] and (ri := j[k]['ri']) is not None:
(r, _) = CSE.dispatcher.retrieveResource(ri)
if r is not None:
CSE.dispatcher.updateResource(r, j)
# create a new cresource
else:
r = resourceFromJSON(json.load(jfile), create=True)
# Try to get parent resource
if r is not None:
parent = None
if (pi := r.pi) is not None:
(parent, _) = CSE.dispatcher.retrieveResource(pi)
# Check resource creation
if not CSE.registration.checkResourceCreation(r, originator):
continue
# Add the resource
CSE.dispatcher.createResource(r, parent)
else:
Logging.logWarn('Unknown resource in file: %s' % fn)
self._finishImporting()
return True
def _prepareImporting(self):
# temporarily disable access control
self._oldacp = Configuration.get('cse.enableACPChecks')
Configuration.set('cse.enableACPChecks', False)
def _finishImporting(self):
Configuration.set('cse.enableACPChecks', self._oldacp) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Importer.py | Importer.py |
import logging, logging.handlers, os, inspect, re, sys, datetime#
from logging import StreamHandler
from Configuration import Configuration
levelName = {
logging.INFO : 'ℹ️ I',
logging.DEBUG : '🐞 D',
logging.ERROR : '🔥 E',
logging.WARNING : '⚠️ W'
# logging.INFO : 'INFO ',
# logging.DEBUG : 'DEBUG ',
# logging.ERROR : 'ERROR ',
# logging.WARNING : 'WARNING'
}
class Logging:
""" Wrapper class for the logging subsystem. This class wraps the
initialization of the logging subsystem and provides convenience
methods for printing log, error and warning messages to a
logfile and to the console.
"""
logger = None
logLevel = logging.INFO
loggingEnabled = True
enableFileLogging = True
@staticmethod
def init():
"""Init the logging system.
"""
if Logging.logger is not None:
return
Logging.enableFileLogging = Configuration.get('logging.enableFileLogging')
Logging.logLevel = Configuration.get('logging.level')
Logging.loggingEnabled = Configuration.get('logging.enable')
Logging.logger = logging.getLogger('logging')
# Log to file only when file logging is enabled
if Logging.enableFileLogging:
logfile = Configuration.get('logging.file')
os.makedirs(os.path.dirname(logfile), exist_ok=True)# create log directory if necessary
logfp = logging.handlers.RotatingFileHandler( logfile,
maxBytes=Configuration.get('logging.size'),
backupCount=Configuration.get('logging.count'))
logfp.setLevel(Logging.logLevel)
logfp.setFormatter(logging.Formatter('%(levelname)s %(asctime)s %(message)s'))
Logging.logger.addHandler(logfp)
Logging.logger.setLevel(Logging.logLevel)
@staticmethod
def log(msg, withPath=True):
"""Print a log message with level INFO.
"""
Logging._log(logging.INFO, msg, withPath)
@staticmethod
def logDebug(msg, withPath=True):
"""Print a log message with level DEBUG.
"""
Logging._log(logging.DEBUG, msg, withPath)
@staticmethod
def logErr(msg, withPath=True):
"""Print a log message with level ERROR.
"""
import CSE
CSE.event.logError() # raise logError event
Logging._log(logging.ERROR, msg, withPath)
@staticmethod
def logWarn(msg, withPath=True):
"""Print a log message with level WARNING.
"""
import CSE
CSE.event.logWarning() # raise logWarning event
Logging._log(logging.WARNING, msg, withPath)
@staticmethod
def _log(level, msg, withPath):
try:
if Logging.loggingEnabled and Logging.logLevel <= level:
caller = inspect.getframeinfo(inspect.stack()[2][0])
if withPath:
msg = '(%s:%d) %s' % (os.path.basename(caller.filename), caller.lineno, msg)
#print( "(" + time.ctime(time.time()) + ") " + msg)
print('%s %s %s' % (levelName[level], datetime.datetime.now().isoformat(sep=' ', timespec='milliseconds'), msg))
Logging.logger.log(level, msg)
except:
pass
#
# Redirect handler to redirect other log output to our log
#
class RedirectHandler(StreamHandler):
def __init__(self, topic):
StreamHandler.__init__(self)
self.topic = topic
def emit(self, record):
msg = '(%s) %s' % (self.topic, record.getMessage())
msg = re.sub(r'\[.+?\] ', '', msg) # clean up (remove superflous date and time)
if record.levelno == logging.DEBUG:
Logging.logDebug(msg, False)
elif record.levelno == logging.INFO:
Logging.log(msg, False)
elif record.levelno == logging.WARNING:
Logging.logWarn(msg, False)
elif record.levelName == logging.ERROR:
Logging.logErr(msg, False) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Logging.py | Logging.py |
class Constants(object):
# Type constants
tUNKNOWN = -1
tMIXED = 0
tsMIXED = 'mixed'
tACP = 1
tsACP = 'm2m:acp'
tAE = 2
tsAE = 'm2m:ae'
tCNT = 3
tsCNT = 'm2m:cnt'
tCIN = 4
tsCIN = 'm2m:cin'
tCSEBase = 5
tsCSEBase = 'm2m:cb'
tGRP = 9
tsGRP = 'm2m:grp'
tMGMTOBJ = 13
tsMGMTOBJ = 'm2m:mgo' # not an official shortname
tNOD = 14
tsNOD = 'm2m:nod'
tCSR = 16
tsCSR = 'm2m:csr'
tSUB = 23
tsSUB = 'm2m:sub'
tFCNT = 28
tsFCNT = 'm2m:fcnt' # not an official shortname
tFCI = 52
tsFCI = 'm2m:fci' # not an official shortname
# Virtual resources (proprietary resource types)
tCNT_OL = -20001
tsCNT_OL = 'm2m:ol'
tCNT_LA = -20002
tsCNT_LA = 'm2m:la'
tGRP_FOPT = -20003
tsGRP_FOPT = 'm2m:fopt'
tFCNT_OL = -20004
tsFCNT_OL = 'm2m:ol'
tFCNT_LA = -20005
tsFCNT_LA = 'm2m:la'
# <mgmtObj> Specializations
mgdFWR = 1001
tsFWR = 'm2m:fwr'
mgdSWR = 1002
tsSWR = 'm2m:swr'
mgdMEM = 1003
tsMEM = 'm2m:mem'
mgdANI = 1004
tsANI = 'm2m:ani'
mgdANDI = 1005
tsANDI = 'm2m:andi'
mgdBAT = 1006
tsBAT = 'm2m:bat'
mgdDVI = 1007
tsDVI = 'm2m:dvi'
mgdDVC = 1008
tsDVC = 'm2m:dvc'
mgdRBO = 1009
tsRBO = 'm2m:rbo'
mgdEVL = 1010
tsEVL = 'm2m:evl'
# List of virtual resources
tVirtualResources = [ tCNT_LA, tCNT_OL, tGRP_FOPT ]
# Supported by this CSE
supportedResourceTypes = [ tACP, tAE, tCNT, tCIN, tCSEBase, tGRP, tMGMTOBJ, tNOD, tCSR, tSUB, tFCNT, tFCI ]
supportedContentSerializations = [ 'application/json', 'application/vnd.onem2m-res+json' ]
supportedReleaseVersions = [ '3' ]
# List of resource types for which "creator" is allowed
# Also add later: eventConfig, pollingChannel, statsCollect, statsConfig, semanticDescriptor,
# notificationTargetPolicy, timeSeries, crossResourceSubscription, backgroundDataTransfer
tCreatorAllowed = [ tCIN, tCNT, tGRP, tSUB, tFCNT ]
# max length of identifiers
maxIDLength = 10
# Response codes
rcOK = 2000
rcCreated = 2001
rcDeleted = 2002
rcUpdated = 2004
rcBadRequest = 4000
rcNotFound = 4004
rcOperationNotAllowed = 4005
rcContentsUnacceptable = 4102
rcOriginatorHasNoPrivilege = 4103
rcInvalidChildResourceType = 4108
rcGroupMemberTypeInconsistent = 4110
rcInternalServerError = 5000
rcNotImplemented = 5001
rcTargetNotReachable = 5103
rcReceiverHasNoPrivileges = 5105
rcAlreadyExists = 5106
rcTargetNotSubscribable = 5203
rcMaxNumberOfMemberExceeded = 6010
rcInvalidArguments = 6023
rcInsufficientArguments = 6024
# Operations
opRETRIEVE = 0
opCREATE = 1
opUPDATE = 2
opDELETE = 3
# Permissions
permNONE = 0
permCREATE = 1
permRETRIEVE = 2
permUPDATE = 4
permDELETE = 8
permNOTIFY = 16
permDISCOVERY = 32
permALL = 63
# CSE Types
cseTypeIN = 1
cseTypeMN = 2
cseTypeASN = 3
cseTypes = [ '', 'IN', 'MN', 'ASN' ]
# Header Fields
hfOrigin = 'X-M2M-Origin'
hfRI = 'X-M2M-RI'
hfvContentType = 'application/json'
# Subscription-related
# notificationContentTypes
nctAll = 1
nctModifiedAttributes = 2
nctRI = 3
# eventNotificationCriteria/NotificationEventTypes
netResourceUpdate = 1 # default
netResourceDelete = 2
netCreateDirectChild = 3
netDeleteDirectChild = 4
netRetrieveCNTNoChild = 5 # TODO not supported yet
# Result Content types
rcnNothing = 0
rcnAttributes = 1
rcnAttributesAndChildResources = 4
rcnAttributesAndChildResourceReferences = 5
rcnChildResourceReferences = 6
rcnChildResources = 8
rcnModifiedAttributes = 9
# TODO support other RCN
# Desired Identifier Result Type
drtStructured = 1 # default
drtUnstructured = 2
# Filter Usage
fuDiscoveryCriteria = 1
fuConditionalRetrieval = 2 # default
fuIPEOnDemandDiscovery = 3
# Group related
# consistencyStrategy
csyAbandonMember = 1 # default
csyAbandonGroup = 2
csySetMixed = 3 | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Constants.py | Constants.py |
import logging, configparser
from Constants import Constants as C
defaultConfigFile = 'acme.ini'
defaultImportDirectory = './init'
class Configuration(object):
_configuration = {}
@staticmethod
def init(args = None):
global _configuration
# resolve the args, of any
argsConfigfile = args.configfile if args is not None else defaultConfigFile
argsLoglevel = args.loglevel if args is not None else None
argsDBReset = args.dbreset if args is not None else False
argsDBStorageMode = args.dbstoragemode if args is not None else None
argsImportDirectory = args.importdirectory if args is not None else None
argsAppsEnabled = args.appsenabled if args is not None else None
config = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation())
config.read(argsConfigfile)
try:
Configuration._configuration = {
'configfile' : argsConfigfile,
#
# HTTP Server
#
'http.listenIF' : config.get('server.http', 'listenIF', fallback='127.0.0.1'),
'http.port' : config.getint('server.http', 'port', fallback=8080),
'http.root' : config.get('server.http', 'root', fallback='/'),
'http.address' : config.get('server.http', 'address', fallback='http://127.0.0.1:8080'),
'http.multiThread' : config.getboolean('server.http', 'multiThread', fallback=True),
#
# Database
#
'db.path' : config.get('database', 'path', fallback='./data'),
'db.inMemory' : config.getboolean('database', 'inMemory', fallback=False),
'db.cacheSize' : config.getint('database', 'cacheSize', fallback=0), # Default: no caching
'db.resetAtStartup' : config.getboolean('database', 'resetAtStartup', fallback=False),
#
# Logging
#
'logging.enable' : config.getboolean('logging', 'enable', fallback=True),
'logging.enableFileLogging' : config.getboolean('logging', 'enableFileLogging', fallback=True),
'logging.file' : config.get('logging', 'file', fallback='./logs/cse.log'),
'logging.level' : config.get('logging', 'level', fallback='debug'),
'logging.size' : config.getint('logging', 'size', fallback=100000),
'logging.count' : config.getint('logging', 'count', fallback=10), # Number of log files
#
# CSE
#
'cse.type' : config.get('cse', 'type', fallback='IN'), # IN, MN, ASN
'cse.resourcesPath' : config.get('cse', 'resourcesPath', fallback=defaultImportDirectory),
'cse.expirationDelta' : config.getint('cse', 'expirationDelta', fallback=60*60*24*365), # 1 year, in seconds
'cse.enableACPChecks' : config.getboolean('cse', 'enableACPChecks', fallback=True),
'cse.adminACPI' : config.get('cse', 'adminACPI', fallback='acpAdmin'),
'cse.defaultACPI' : config.get('cse', 'defaultACPI', fallback='acpDefault'),
'cse.originator' : config.get('cse', 'originator', fallback='CAdmin'),
'cse.csi' : '(not set yet)', # will be set by importer
'cse.ri' : '(not set yet)', # will be set by importer
'cse.rn' : '(not set yet)', # will be set by importer
'cse.enableApplications' : config.getboolean('cse', 'enableApplications', fallback=True),
'cse.enableNotifications' : config.getboolean('cse', 'enableNotifications', fallback=True),
'cse.enableRemoteCSE' : config.getboolean('cse', 'enableRemoteCSE', fallback=True),
'cse.enableTransitRequests' : config.getboolean('cse', 'enableTransitRequests', fallback=True),
'cse.sortDiscoveredResources' : config.getboolean('cse', 'sortDiscoveredResources', fallback=True),
#
# Remote CSE
#
'cse.remote.address' : config.get('cse.remote', 'address', fallback=''),
'cse.remote.root' : config.get('cse.remote', 'root', fallback='/'),
'cse.remote.cseid' : config.get('cse.remote', 'cseid', fallback=''),
'cse.remote.originator' : config.get('cse.remote', 'originator', fallback='CAdmin'),
'cse.remote.checkInterval' : config.getint('cse.remote', 'checkInterval', fallback=30), # Seconds
#
# Statistics
#
'cse.statistics.writeIntervall' : config.getint('cse.statistics', 'writeIntervall', fallback=60), # Seconds
#
# Defaults for Container Resources
#
'cse.cnt.mni' : config.getint('cse.resource.cnt', 'mni', fallback=10),
'cse.cnt.mbs' : config.getint('cse.resource.cnt', 'mbs', fallback=10000),
#
# Defaults for Access Control Policies
#
'cse.acp.pv.acop' : config.getint('cse.resource.acp', 'permission', fallback=63),
'cse.acp.pvs.acop' : config.getint('cse.resource.acp', 'selfPermission', fallback=51),
'cse.acp.addAdminOrignator' : config.getboolean('cse.resource.acp', 'addAdminOrignator', fallback=True),
#
# Defaults for Application Entities
#
'cse.ae.createACP' : config.getboolean('cse.resource.ae', 'createACP', fallback=True),
'cse.ae.removeACP' : config.getboolean('cse.resource.ae', 'removeACP', fallback=True),
#
# Web UI
#
'cse.webui.enable' : config.getboolean('cse.webui', 'enable', fallback=True),
'cse.webui.root' : config.get('cse.webui', 'root', fallback='/webui'),
#
# App: Statistics AE
#
'app.statistics.enable' : config.getboolean('app.statistics', 'enable', fallback=True),
'app.statistics.aeRN' : config.get('app.statistics', 'aeRN', fallback='statistics'),
'app.statistics.aeAPI' : config.get('app.statistics', 'aeAPI', fallback='ae-statistics'),
'app.statistics.fcntRN' : config.get('app.statistics', 'fcntRN', fallback='statistics'),
'app.statistics.fcntCND' : config.get('app.statistics', 'fcntCND', fallback='acme.statistics'),
'app.statistics.fcntType' : config.get('app.statistics', 'fcntType', fallback='acme:csest'),
'app.statistics.originator' : config.get('app.statistics', 'originator', fallback='C'),
'app.statistics.intervall' : config.getint('app.statistics', 'intervall', fallback=10), # seconds
#
# App: CSE Node
#
'app.csenode.enable' : config.getboolean('app.csenode', 'enable', fallback=True),
'app.csenode.nodeRN' : config.get('app.csenode', 'nodeRN', fallback='cse-node'),
'app.csenode.nodeID' : config.get('app.csenode', 'nodeID', fallback='cse-node'),
'app.csenode.originator' : config.get('app.csenode', 'originator', fallback='CAdmin'),
'app.csenode.batteryLowLevel' : config.getint('app.csenode', 'batteryLowLevel', fallback=20), # percent
'app.csenode.batteryChargedLevel' : config.getint('app.csenode', 'batteryChargedLevel', fallback=100), # percent
'app.csenode.intervall' : config.getint('app.csenode', 'updateIntervall', fallback=60), # seconds
}
except Exception as e: # about when findings errors in configuration
print('Error in configuration file: %s - %s' % (argsConfigfile, str(e)))
return False
# Read id-mappings
if config.has_section('server.http.mappings'):
Configuration._configuration['server.http.mappings'] = config.items('server.http.mappings')
#print(config.items('server.http.mappings'))
# Some clean-ups and overrites
# CSE type
cseType = Configuration._configuration['cse.type'].lower()
if cseType == 'asn':
Configuration._configuration['cse.type'] = C.cseTypeASN
elif cseType == 'mn':
Configuration._configuration['cse.type'] = C.cseTypeMN
else:
Configuration._configuration['cse.type'] = C.cseTypeIN
# Loglevel from command line
logLevel = Configuration._configuration['logging.level'].lower()
logLevel = argsLoglevel if argsLoglevel is not None else logLevel # command line args override config
if logLevel == 'off':
Configuration._configuration['logging.enable'] = False
Configuration._configuration['logging.level'] = logging.DEBUG
elif logLevel == 'info':
Configuration._configuration['logging.level'] = logging.INFO
elif logLevel == 'warn':
Configuration._configuration['logging.level'] = logging.WARNING
elif logLevel == 'error':
Configuration._configuration['logging.level'] = logging.ERROR
else:
Configuration._configuration['logging.level'] = logging.DEBUG
# Override DB reset from command line
if argsDBReset is True:
Configuration._configuration['db.resetAtStartup'] = True
# Override DB storage mode from command line
if argsDBStorageMode is not None:
Configuration._configuration['db.inMemory'] = argsDBStorageMode == 'memory'
# Override import directory from command line
if argsImportDirectory is not None:
Configuration._configuration['cse.resourcesPath'] = argsImportDirectory
# Override app enablement
if argsAppsEnabled is not None:
Configuration._configuration['cse.enableApplications'] = argsAppsEnabled
return True
@staticmethod
def print():
result = 'Configuration:\n'
for kv in Configuration._configuration.items():
result += ' %s = %s\n' % kv
return result
@staticmethod
def all():
return Configuration._configuration
@staticmethod
def get(key):
if not Configuration.has(key):
return None
return Configuration._configuration[key]
@staticmethod
def set(key, value):
Configuration._configuration[key] = value
@staticmethod
def has(key):
return key in Configuration._configuration | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Configuration.py | Configuration.py |
import atexit, argparse, os, threading, time
from Constants import Constants as C
from AnnouncementManager import AnnouncementManager
from Configuration import Configuration, defaultConfigFile
from Dispatcher import Dispatcher
from EventManager import EventManager
from GroupManager import GroupManager
from HttpServer import HttpServer
from Importer import Importer
from Logging import Logging
from NotificationManager import NotificationManager
from RegistrationManager import RegistrationManager
from RemoteCSEManager import RemoteCSEManager
from SecurityManager import SecurityManager
from Statistics import Statistics
from Storage import Storage
from AEStatistics import AEStatistics
from CSENode import CSENode
# singleton main components. These variables will hold all the various manager
# components that are used throughout the CSE implementation.
announce = None
dispatcher = None
event = None
group = None
httpServer = None
notification = None
registration = None
remote = None
security = None
statistics = None
storage = None
rootDirectory = None
aeCSENode = None
aeStatistics = None
appsStarted = False
aeStartupDelay = 5 # seconds
# TODO make AE registering a bit more generic
##############################################################################
#def startup(args=None, configfile=None, resetdb=None, loglevel=None):
def startup(args, **kwargs):
global announce, dispatcher, group, httpServer, notification, registration, remote, security, statistics, storage, event
global rootDirectory
global aeStatistics
rootDirectory = os.getcwd() # get the root directory
# Handle command line arguments and load the configuration
if args is None:
args = argparse.Namespace() # In case args is None create a new args object and populate it
args.configfile = None
args.resetdb = False
args.loglevel = None
for key, value in kwargs.items():
args.__setattr__(key, value)
if not Configuration.init(args):
return
# init Logging
Logging.init()
Logging.log('============')
Logging.log('Starting CSE')
Logging.log('CSE-Type: %s' % C.cseTypes[Configuration.get('cse.type')])
Logging.log(Configuration.print())
# Initiatlize the resource storage
storage = Storage()
# Initialize the event manager
event = EventManager()
# Initialize the statistics system
statistics = Statistics()
# Initialize the registration manager
registration = RegistrationManager()
# Initialize the resource dispatcher
dispatcher = Dispatcher()
# Initialize the security manager
security = SecurityManager()
# Initialize the HTTP server
httpServer = HttpServer()
# Initialize the notification manager
notification = NotificationManager()
# Initialize the announcement manager
announce = AnnouncementManager()
# Initialize the group manager
group = GroupManager()
# Import a default set of resources, e.g. the CSE, first ACP or resource structure
importer = Importer()
if not importer.importResources():
return
# Initialize the remote CSE manager
remote = RemoteCSEManager()
remote.start()
# Start AEs
startAppsDelayed() # the Apps are actually started after the CSE finished the startup
# Start the HTTP server
event.cseStartup()
Logging.log('CSE started')
httpServer.run() # This does NOT return
# Gracefully shutdown the CSE, e.g. when receiving a keyboard interrupt
@atexit.register
def shutdown():
if appsStarted:
stopApps()
if remote is not None:
remote.shutdown()
if group is not None:
group.shutdown()
if announce is not None:
announce.shutdown()
if notification is not None:
notification.shutdown()
if dispatcher is not None:
dispatcher.shutdown()
if security is not None:
security.shutdown()
if registration is not None:
registration.shutdown()
if statistics is not None:
statistics.shutdown()
if event is not None:
event.shutdown()
if storage is not None:
storage.shutdown()
# Delay starting the AEs in the backround. This is needed because the CSE
# has not yet started. This will be called when the cseStartup event is raised.
def startAppsDelayed():
event.addHandler(event.cseStartup, startApps)
def startApps():
global appsStarted, aeStatistics, aeCSENode
if not Configuration.get('cse.enableApplications'):
return
time.sleep(aeStartupDelay)
Logging.log('Starting Apps')
appsStarted = True
if Configuration.get('app.csenode.enable'):
aeCSENode = CSENode()
if Configuration.get('app.statistics.enable'):
aeStatistics = AEStatistics()
# Add more apps here
def stopApps():
global appsStarted
if appsStarted:
Logging.log('Stopping Apps')
appsStarted = False
if aeStatistics is not None:
aeStatistics.shutdown()
if aeCSENode is not None:
aeCSENode.shutdown() | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/CSE.py | CSE.py |
from Logging import Logging
from Constants import Constants as C
import CSE
from Configuration import Configuration
class SecurityManager(object):
def __init__(self):
Logging.log('SecurityManager initialized')
if Configuration.get('cse.enableACPChecks'):
Logging.log('ACP checking ENABLED')
else:
Logging.log('ACP checking DISABLED')
def shutdown(self):
Logging.log('SecurityManager shut down')
def hasAccess(self, originator, resource, requestedPermission, checkSelf=False, ty=None, isCreateRequest=False):
if not Configuration.get('cse.enableACPChecks'): # check or ignore the check
return True
# originator may be None or empty or C or S.
# That is okay if type is AE and this is a create request
if originator is None or len(originator) == 0 or originator in ['C', 'S']:
if ty is not None and ty == C.tAE and isCreateRequest:
Logging.logDebug("Empty originator for AE CREATE. OK.")
return True
# Check parameters
if resource is None:
Logging.logWarn("Resource must not be None")
return False
if requestedPermission is None or not (0 <= requestedPermission <= C.permALL):
Logging.logWarn("RequestedPermission must not be None, and between 0 and 63")
return False
Logging.logDebug("Checking permission for originator: %s, ri: %s, permission: %d, selfPrivileges: %r" % (originator, resource.ri, requestedPermission, checkSelf))
if resource.ty == C.tACP: # target is an ACP resource
if resource.checkSelfPermission(originator, requestedPermission):
Logging.logDebug('Permission granted')
return True
else: # target is not an ACP resource
if (acpi := resource.acpi) is None or len(acpi) == 0:
if resource.inheritACP:
(parentResource, _) = CSE.dispatcher.retrieveResource(resource.pi)
return self.hasAccess(originator, parentResource, requestedPermission, checkSelf)
Logging.logDebug("Missing acpi in resource")
return False
for a in acpi:
(acp, _) = CSE.dispatcher.retrieveResource(a)
if acp is None:
continue
if checkSelf: # forced check for self permissions
if acp.checkSelfPermission(originator, requestedPermission):
Logging.logDebug('Permission granted')
return True
else:
if acp.checkPermission(originator, requestedPermission):
Logging.logDebug('Permission granted')
return True
# no fitting permission identified
Logging.logDebug('Permission NOT granted')
return False | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/SecurityManager.py | SecurityManager.py |
from Logging import Logging
from Constants import Constants as C
from Configuration import Configuration
import CSE, Utils
from resources import ACP
acpPrefix = 'acp_'
class RegistrationManager(object):
def __init__(self):
Logging.log('RegistrationManager initialized')
def shutdown(self):
Logging.log('RegistrationManager shut down')
#########################################################################
#
# Handle new resources in general
#
def checkResourceCreation(self, resource, originator, parentResource=None):
if resource.ty in [ C.tAE ]:
if (originator := self.handleAERegistration(resource, originator, parentResource)) is None:
return (originator, C.rcOK)
# Test and set creator attribute.
if (rc := self.handleCreator(resource, originator)) != C.rcOK:
return (None, rc)
return (originator, C.rcOK)
# Check for (wrongly) set creator attribute as well as assign it to allowed resources.
def handleCreator(self, resource, originator):
# Check whether cr is set. This is wrong
if resource.cr is not None:
Logging.logWarn('Setting "creator" attribute is not allowed.')
return C.rcBadRequest
# Set cr for some of the resource types
if resource.ty in C.tCreatorAllowed:
resource['cr'] = Configuration.get('cse.originator') if originator in ['C', 'S', '', None ] else originator
return C.rcOK
def checkResourceDeletion(self, resource, originator):
if resource.ty in [ C.tAE ]:
if not self.handleAEDeRegistration(resource):
return (False, originator)
return (True, originator)
#########################################################################
#
# Handle AE registration
#
def handleAERegistration(self, ae, originator, parentResource):
if originator == 'C':
originator = Utils.uniqueAEI('C')
elif originator == 'S':
originator = Utils.uniqueAEI('S')
elif originator is None or len(originator) == 0:
originator = Utils.uniqueAEI('S')
Logging.logDebug('Registering AE. aei: %s ' % originator)
# set the aei to the originator
ae['aei'] = originator
# Verify that parent is the CSEBase, else this is an error
if parentResource is None or parentResource.ty != C.tCSEBase:
return None
# Create an ACP for this AE-ID if there is none set
if Configuration.get("cse.ae.createACP"):
if ae.acpi is None or len(ae.acpi) == 0:
Logging.logDebug('Adding ACP for AE')
cseOriginator = Configuration.get('cse.originator')
acp = ACP.ACP(pi=parentResource.ri, rn=acpPrefix + ae.rn)
acp.addPermissionOriginator(originator)
acp.addPermissionOriginator(cseOriginator)
acp.setPermissionOperation(Configuration.get('cse.acp.pv.acop'))
acp.addSelfPermissionOriginator(cseOriginator)
acp.setSelfPermissionOperation(Configuration.get('cse.acp.pvs.acop'))
if not (res := self.checkResourceCreation(acp, originator, parentResource))[0]:
return None
CSE.dispatcher.createResource(acp, parentResource=parentResource, originator=originator)
# Set ACPI (anew)
ae['acpi'] = [ acp.ri ]
else:
ae['acpi'] = [ Configuration.get('cse.defaultACPI') ]
return originator
#
# Handle AE deregistration
#
def handleAEDeRegistration(self, resource):
# remove the before created ACP, if it exist
Logging.logDebug('DeRegisterung AE. aei: %s ' % resource.aei)
if Configuration.get("cse.ae.removeACP"):
Logging.logDebug('Removing ACP for AE')
acpi = '%s/%s%s' % (Configuration.get("cse.rn"), acpPrefix, resource.rn)
if (res := CSE.dispatcher.retrieveResource(acpi))[1] != C.rcOK:
Logging.logWarn('Could not find ACP: %s' % acpi)
return False
CSE.dispatcher.deleteResource(res[0])
return True | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/RegistrationManager.py | RegistrationManager.py |
from Logging import Logging
from Configuration import Configuration
import CSE, Utils
import datetime
from threading import Lock
from helpers import BackgroundWorker
deletedResources = 'rmRes'
createdresources = 'crRes'
httpRetrieves = 'htRet'
httpCreates = 'htCre'
httpUpdates = 'htUpd'
httpDeletes = 'htDel'
logErrors = 'lgErr'
logWarnings = 'lgWrn'
cseStartUpTime = 'cseSU'
cseUpTime = 'cseUT'
resourceCount = 'ctRes'
# TODO startup, uptime, restartcount, errors, warnings
class Statistics(object):
def __init__(self):
# create lock
self.statLock = Lock()
# retrieve or create statitics record
self.stats = self.setupStats()
# Start b ackground worker to handle writing to DB
Logging.log('Starting statistics DB thread')
self.worker = BackgroundWorker.BackgroundWorker(Configuration.get('cse.statistics.writeIntervall'), self.statisticsDBWorker)
self.worker.start()
# subscripe vto various events
CSE.event.addHandler(CSE.event.createResource, self.handleCreateEvent)
CSE.event.addHandler(CSE.event.deleteResource, self.handleDeleteEvent)
CSE.event.addHandler(CSE.event.httpRetrieve, self.handleHttpRetrieveEvent)
CSE.event.addHandler(CSE.event.httpCreate, self.handleHttpCreateEvent)
CSE.event.addHandler(CSE.event.httpUpdate, self.handleHttpUpdateEvent)
CSE.event.addHandler(CSE.event.httpDelete, self.handleHttpDeleteEvent)
CSE.event.addHandler(CSE.event.cseStartup, self.handleCseStartup)
CSE.event.addHandler(CSE.event.logError, self.handleLogError)
CSE.event.addHandler(CSE.event.logWarning, self.handleLogWarning)
Logging.log('Statistics initialized')
def shutdown(self):
# Stop the worker
Logging.log('Stopping statistics DB thread')
self.worker.stop()
# One final write
self.storeDBStatistics()
Logging.log('Statistics shut down')
def setupStats(self):
result = self.retrieveDBStatistics()
if result is not None:
return result
return {
deletedResources : 0,
createdresources : 0,
httpRetrieves : 0,
httpCreates : 0,
httpUpdates : 0,
httpDeletes : 0,
cseStartUpTime : 0.0,
logErrors : 0,
logWarnings : 0
}
# Return stats
def getStats(self):
s = self.stats.copy()
# Calculate some stats
s[cseUpTime] = str(datetime.timedelta(seconds=int(datetime.datetime.utcnow().timestamp() - s[cseStartUpTime])))
s[cseStartUpTime] = Utils.toISO8601Date(s[cseStartUpTime])
s[resourceCount] = s[createdresources] - s[deletedResources]
return s
#########################################################################
#
# Event handlers
#
def handleCreateEvent(self, resource):
with self.statLock:
self.stats[createdresources] += 1
def handleDeleteEvent(self, resource):
with self.statLock:
self.stats[deletedResources] += 1
def handleHttpRetrieveEvent(self):
with self.statLock:
self.stats[httpRetrieves] += 1
def handleHttpCreateEvent(self):
with self.statLock:
self.stats[httpCreates] += 1
def handleHttpUpdateEvent(self):
with self.statLock:
self.stats[httpUpdates] += 1
def handleHttpDeleteEvent(self):
with self.statLock:
self.stats[httpDeletes] += 1
def handleCseStartup(self):
with self.statLock:
self.stats[cseStartUpTime] = datetime.datetime.utcnow().timestamp()
def handleLogError(self):
with self.statLock:
self.stats[logErrors] += 1
def handleLogWarning(self):
with self.statLock:
self.stats[logWarnings] += 1
#########################################################################
#
# Store statistics handling
# Called by the background worker
def statisticsDBWorker(self):
Logging.logDebug('Writing statistics DB')
try:
self.storeDBStatistics()
except Exception as e:
Logging.logErr('Exception: %s' % e)
return False
return True
def retrieveDBStatistics(self):
with self.statLock:
return CSE.storage.getStatistics()
def storeDBStatistics(self):
with self.statLock:
return CSE.storage.updateStatistics(self.stats) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Statistics.py | Statistics.py |
from Logging import Logging
from Constants import Constants as C
from Configuration import Configuration
import Utils, CSE
import requests, json
# TODO: removal policy (e.g. unsuccessful tries)
# TODO: no async notifications yet, no batches etc
class NotificationManager(object):
def __init__(self):
Logging.log('NotificationManager initialized')
if Configuration.get('cse.enableNotifications'):
Logging.log('Notifications ENABLED')
else:
Logging.log('Notifications DISABLED')
def shutdown(self):
Logging.log('NotificationManager shut down')
def addSubscription(self, subscription):
if Configuration.get('cse.enableNotifications') is not True:
return False
Logging.logDebug('Adding subscription')
if self._getAndCheckNUS(subscription) is None: # verification requests happen here
return False
return CSE.storage.addSubscription(subscription)
def removeSubscription(self, subscription):
Logging.logDebug('Removing subscription')
# This check does allow for removal of subscriptions
if Configuration.get('cse.enableNotifications'):
for nu in self._getNotificationURLs(subscription.nu):
if not self._sendDeletionNotification(nu, subscription):
Logging.logDebug('Deletion request failed') # but ignore the error
return CSE.storage.removeSubscription(subscription)
def updateSubscription(self, subscription):
Logging.logDebug('Updating subscription')
previousSub = CSE.storage.getSubscription(subscription.ri)
if self._getAndCheckNUS(subscription, previousSub['nus']) is None: # verification/delete requests happen here
return False
return CSE.storage.updateSubscription(subscription)
def checkSubscriptions(self, resource, reason, childResource=None):
if Configuration.get('cse.enableNotifications') is not True:
return
ri = resource.ri
subs = CSE.storage.getSubscriptionsForParent(ri)
if subs is None or len(subs) == 0:
return
Logging.logDebug('Checking subscription for: %s, reason: %d' % (ri, reason))
for sub in subs:
# Prevent own notifications for subscriptions
if childResource is not None and \
sub['ri'] == childResource.ri and \
reason in [C.netCreateDirectChild, C.netDeleteDirectChild]:
continue
if reason not in sub['net']: # check whether reason is actually included in the subscription
continue
if reason in [C.netCreateDirectChild, C.netDeleteDirectChild]: # reasons for child resources
for nu in self._getNotificationURLs(sub['nus']):
if not self._sendNotification(sub, nu, reason, childResource):
pass
else: # all other reasons that target the resource
for nu in self._getNotificationURLs(sub['nus']):
if not self._sendNotification(sub, nu, reason, resource):
pass
#########################################################################
# Return resolved notification URLs, so also POA from referenced AE's etc
def _getNotificationURLs(self, nus):
result = []
for nu in nus:
# check if it is a direct URL
if Utils.isURL(nu):
result.append(nu)
else:
(r, _) = CSE.dispatcher.retrieveResource(nu)
if r is None:
continue
if not CSE.security.hasAccess('', r, C.permNOTIFY): # check whether AE/CSE may receive Notifications
continue
if (poa := r['poa']) is not None and isinstance(poa, list): #TODO? check whether AE or CSEBase
result += poa
return result
def _getAndCheckNUS(self, subscription, previousNus=None):
newNus = self._getNotificationURLs(subscription['nu'])
# notify removed nus (deletion notification)
if previousNus is not None:
for nu in previousNus:
if nu not in newNus:
if not self._sendDeletionNotification(nu, subscription):
Logging.logDebug('Deletion request failed') # but ignore the error
# notify new nus (verification request)
for nu in newNus:
if previousNus is None or (previousNus and nu not in previousNus):
if not self._sendVerificationRequest(nu, subscription):
Logging.logDebug('Verification request failed: %s' % nu)
return None
return newNus
#########################################################################
_verificationRequest = {
'm2m:sgn' : {
'vrq' : True,
'sur' : ''
}
}
def _sendVerificationRequest(self, nu, subscription):
Logging.logDebug('Sending verification request to: %s' % nu)
return self._sendRequest(nu, subscription['ri'], self._verificationRequest)
_deletionNotification = {
'm2m:sgn' : {
'sud' : True,
'sur' : ''
}
}
def _sendDeletionNotification(self, nu, subscription):
Logging.logDebug('Sending deletion notification to: %s' % nu)
return self._sendRequest(nu, subscription['ri'], self._deletionNotification)
_notificationRequest = {
'm2m:sgn' : {
'nev' : {
'rep' : {},
'net' : 0
},
'sur' : ''
}
}
def _sendNotification(self, subscription, nu, reason, resource):
Logging.logDebug('Sending notification to: %s, reason: %d' % (nu, reason))
return self._sendRequest(nu, subscription['ri'], self._notificationRequest, reason, resource)
def _sendRequest(self, nu, ri, jsn, reason=None, resource=None):
Utils.setXPath(jsn, 'm2m:sgn/sur', Utils.fullRI(ri))
if reason is not None:
Utils.setXPath(jsn, 'm2m:sgn/nev/net', reason)
if resource is not None:
Utils.setXPath(jsn, 'm2m:sgn/nev/rep', resource.asJSON())
(_, rc) = CSE.httpServer.sendCreateRequest(nu, Configuration.get('cse.csi'), data=json.dumps(jsn))
return rc in [C.rcOK] | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/NotificationManager.py | NotificationManager.py |
from flask import Flask, request, make_response
import flask
from Configuration import Configuration
from Constants import Constants as C
import CSE, Utils
from Logging import Logging, RedirectHandler
from resources.Resource import Resource
import json, requests, logging, os
from werkzeug.serving import WSGIRequestHandler
class HttpServer(object):
def __init__(self):
# Initialize the http server
# Meaning defaults are automatically provided.
self.flaskApp = Flask(Configuration.get('cse.csi'))
self.rootPath = Configuration.get('http.root')
Logging.log('Registering http server root at: %s' % self.rootPath)
while self.rootPath.endswith('/'):
self.rootPath = self.rootPath[:-1]
# Add endpoints
# self.addEndpoint(self.rootPath + '/', handler=self.handleGET, methods=['GET'])
self.addEndpoint(self.rootPath + '/<path:path>', handler=self.handleGET, methods=['GET'])
# self.addEndpoint(self.rootPath + '/', handler=self.handlePOST, methods=['POST'])
self.addEndpoint(self.rootPath + '/<path:path>', handler=self.handlePOST, methods=['POST'])
# self.addEndpoint(self.rootPath + '/', handler=self.handlePUT, methods=['PUT'])
self.addEndpoint(self.rootPath + '/<path:path>', handler=self.handlePUT, methods=['PUT'])
# self.addEndpoint(self.rootPath + '/', handler=self.handleDELETE, methods=['DELETE'])
self.addEndpoint(self.rootPath + '/<path:path>', handler=self.handleDELETE, methods=['DELETE'])
# Register the endpoint for the web UI
if Configuration.get('cse.webui.enable'):
self.webuiRoot = Configuration.get('cse.webui.root')
self.webuiDirectory = '%s/webui' % CSE.rootDirectory
Logging.log('Registering web ui at: %s, serving from %s' % (self.webuiRoot, self.webuiDirectory))
self.addEndpoint(self.webuiRoot, handler=self.handleWebUIGET, methods=['GET'])
self.addEndpoint(self.webuiRoot + '/<path:path>', handler=self.handleWebUIGET, methods=['GET'])
self.addEndpoint('/', handler=self.redirectRoot, methods=['GET'])
# Add mapping / macro endpoints
self.mappings = {}
if (mappings := Configuration.get('server.http.mappings')) is not None:
# mappings is a list of tuples
for (k, v) in mappings:
Logging.log('Registering mapping: %s%s -> %s%s' % (self.rootPath, k, self.rootPath, v))
self.addEndpoint(self.rootPath + k, handler=self.requestRedirect, methods=['GET', 'POST', 'PUT', 'DELETE'])
self.mappings = dict(mappings)
def run(self):
# Redirect the http server (Flask) log output to the CSE logs
werkzeugLog = logging.getLogger('werkzeug')
werkzeugLog.addHandler(RedirectHandler("httpServer"))
WSGIRequestHandler.protocol_version = "HTTP/1.1"
# Run the http server. This runs forever.
# The server can run single-threadedly since some of the underlying
# components (e.g. TinyDB) may run into problems otherwise.
if self.flaskApp is not None:
try:
self.flaskApp.run(host=Configuration.get('http.listenIF'),
port=Configuration.get('http.port'),
threaded=Configuration.get('http.multiThread'))
except Exception as e:
Logging.logErr(e)
def addEndpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=None):
self.flaskApp.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
def handleGET(self, path=None):
Logging.logDebug('==> Retrieve: %s' % request.path)
Logging.logDebug('Headers: \n' + str(request.headers))
CSE.event.httpRetrieve()
(resource, rc) = CSE.dispatcher.retrieveRequest(request)
return self._prepareResponse(request, resource, rc)
def handlePOST(self, path=None):
Logging.logDebug('==> Create: %s' % request.path)
Logging.logDebug('Headers: \n' + str(request.headers))
Logging.logDebug('Body: \n' + str(request.data))
CSE.event.httpCreate()
(resource, rc) = CSE.dispatcher.createRequest(request)
return self._prepareResponse(request, resource, rc)
def handlePUT(self, path=None):
Logging.logDebug('==> Update: %s' % request.path)
Logging.logDebug('Headers: \n' + str(request.headers))
Logging.logDebug('Body: \n' + str(request.data))
CSE.event.httpUpdate()
(resource, rc) = CSE.dispatcher.updateRequest(request)
return self._prepareResponse(request, resource, rc)
def handleDELETE(self, path=None):
Logging.logDebug('==> Delete: %s' % request.path)
Logging.logDebug('Headers: \n' + str(request.headers))
CSE.event.httpDelete()
(resource, rc) = CSE.dispatcher.deleteRequest(request)
return self._prepareResponse(request, resource, rc)
#########################################################################
# Handle requests to mapped paths
def requestRedirect(self):
path = request.path[len(self.rootPath):] if request.path.startswith(self.rootPath) else request.path
if path in self.mappings:
Logging.logDebug('==> Redirecting to: %s' % path)
CSE.event.httpRedirect()
return flask.redirect(self.mappings[path], code=307)
return '', 404
#########################################################################
# Redirect request to / to webui
def redirectRoot(self):
return flask.redirect(Configuration.get('cse.webui.root'), code=302)
def handleWebUIGET(self, path=None):
# security check whether the path will under the web root
if not (CSE.rootDirectory + request.path).startswith(CSE.rootDirectory):
return None, 404
# Redirect to index file. Also include base / cse RI
if path == None or len(path) == 0 or (path.endswith('index.html') and len(request.args) != 1):
return flask.redirect('%s/index.html?ri=/%s' % (self.webuiRoot, Configuration.get('cse.ri')), code=302)
else:
filename = '%s/%s' % (self.webuiDirectory, path) # return any file in the web directory
try:
return flask.send_file(filename)
except Exception as e:
flask.abort(404)
#########################################################################
#
# Send various types of HTTP requests
#
def sendRetrieveRequest(self, url, originator):
return self.sendRequest(requests.get, url, originator)
def sendCreateRequest(self, url, originator, ty=None, data=None):
return self.sendRequest(requests.post, url, originator, ty, data)
def sendUpdateRequest(self, url, originator, data):
return self.sendRequest(requests.put, url, originator, data=data)
def sendDeleteRequest(self, url, originator):
return self.sendRequest(requests.delete, url, originator)
def sendRequest(self, method, url, originator, ty=None, data=None, ct='application/json'):
headers = { 'Content-Type' : '%s%s' % (ct, ';ty=%d' % ty if ty is not None else ''),
'X-M2M-Origin' : originator,
'X-M2M-RI' : Utils.uniqueRI()
}
try:
r = method(url, data=data, headers=headers)
except Exception as e:
Logging.logWarn('Failed to send request: %s' % str(e))
return (None, C.rcTargetNotReachable)
rc = int(r.headers['X-M2M-RSC']) if 'X-M2M-RSC' in r.headers else C.rcInternalServerError
return (r.json() if len(r.content) > 0 else None, rc)
#########################################################################
def _prepareResponse(self, request, resource, returnCode):
if resource is None or returnCode == C.rcDeleted:
r = ''
elif isinstance(resource, dict):
r = json.dumps(resource)
else:
if (r := resource.asJSON() if isinstance(resource, Resource) else resource) is None:
r = ''
returnCode = C.rcNotFound
Logging.logDebug('Response: \n' + str(r))
resp = make_response(r)
# headers
resp.headers['X-M2M-RSC'] = str(returnCode)
if 'X-M2M-RI' in request.headers:
resp.headers['X-M2M-RI'] = request.headers['X-M2M-RI']
if 'X-M2M-RVI' in request.headers:
resp.headers['X-M2M-RVI'] = request.headers['X-M2M-RVI']
resp.status_code = self._statusCode(returnCode)
resp.content_type = C.hfvContentType
return resp
#
# Mapping of oneM2M return codes to http status codes
#
_codes = {
C.rcOK : 200, # OK
C.rcDeleted : 200, # DELETED
C.rcUpdated : 200, # UPDATED
C.rcCreated : 201, # CREATED
C.rcBadRequest : 400, # BAD REQUEST
C.rcContentsUnacceptable : 400, # NOT ACCEPTABLE
C.rcInsufficientArguments : 400, # INSUFFICIENT ARGUMENTS
C.rcInvalidArguments : 400, # INVALID ARGUMENTS
C.rcMaxNumberOfMemberExceeded : 400, # MAX NUMBER OF MEMBER EXCEEDED
C.rcGroupMemberTypeInconsistent : 400, # GROUP MEMBER TYPE INCONSISTENT
C.rcOriginatorHasNoPrivilege : 403, # ORIGINATOR HAS NO PRIVILEGE
C.rcInvalidChildResourceType : 403, # INVALID CHILD RESOURCE TYPE
C.rcTargetNotReachable : 403, # TARGET NOT REACHABLE
C.rcAlreadyExists : 403, # ALREAD EXISTS
C.rcTargetNotSubscribable : 403, # TARGET NOT SUBSCRIBABLE
C.rcReceiverHasNoPrivileges : 403, # RECEIVER HAS NO PRIVILEGE
C.rcNotFound : 404, # NOT FOUND
C.rcOperationNotAllowed : 405, # OPERATION NOT ALLOWED
C.rcInternalServerError : 500, # INTERNAL SERVER ERROR
C.rcNotImplemented : 501, # NOT IMPLEMENTED
}
def _statusCode(self, sc):
return self._codes[sc] | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/HttpServer.py | HttpServer.py |
from tinydb import TinyDB, Query, where
from tinydb.storages import MemoryStorage
from tinydb.operations import delete
import os, json, re
from threading import Lock
from Configuration import Configuration
from Constants import Constants as C
from Logging import Logging
from resources.Resource import Resource
import Utils
class Storage(object):
def __init__(self):
# create data directory
path = None
if not Configuration.get('db.inMemory'):
if Configuration.has('db.path'):
path = Configuration.get('db.path')
Logging.log('Using data directory: ' + path)
os.makedirs(path, exist_ok=True)
else:
Logging.logErr('db.path not set')
raise RuntimeError('db.path not set')
self.db = TinyDBBinding(path)
self.db.openDB()
# Reset dbs?
if Configuration.get('db.resetAtStartup') is True:
self.db.purgeDB()
Logging.log('Storage initialized')
def shutdown(self):
self.db.closeDB()
Logging.log('Storage shut down')
#########################################################################
##
## Resources
##
def createResource(self, resource, overwrite=True):
if resource is None:
Logging.logErr('resource is None')
raise RuntimeError('resource is None')
ri = resource.ri
# Logging.logDebug('Adding resource (ty: %d, ri: %s, rn: %s)' % (resource['ty'], resource['ri'], resource['rn']))
did = None
srn = resource.__srn__
if overwrite:
Logging.logDebug('Resource enforced overwrite')
self.db.upsertResource(resource)
else:
# if not self.db.hasResource(ri=ri) and not self.db.hasResource(srn=srn): # Only when not resource does not exist yet
if not self.hasResource(ri, srn): # Only when not resource does not exist yet
self.db.insertResource(resource)
else:
Logging.logWarn('Resource already exists (Skipping)')
return (False, C.rcAlreadyExists)
# Add path to identifiers db
self.db.insertIdentifier(resource, ri, srn)
return (True, C.rcCreated)
# Check whether a resource with either the ri or the srn already exists
def hasResource(self, ri, srn):
return self.db.hasResource(ri=ri) or self.db.hasResource(srn=srn)
# Return a resource via different addressing methods
def retrieveResource(self, ri=None, csi=None, srn=None, ty=-1):
resources = []
if ri is not None: # get a resource by its ri
# Logging.logDebug('Retrieving resource ri: %s' % ri)
resources = self.db.searchResources(ri=ri)
elif srn is not None: # get a resource by its structured rn
# Logging.logDebug('Retrieving resource srn: %s' % srn)
# get the ri via the srn from the identifers table
resources = self.db.searchResources(srn=srn)
elif csi is not None: # get the CSE by its csi
# Logging.logDebug('Retrieving resource csi: %s' % csi)
resources = self.db.searchResources(csi=csi)
elif ty != -1: # get all resources of a specific type
# Logging.logDebug('Retrieving all resources ty: %d' % ty)
return self.db.searchResources(ty=ty)
return Utils.resourceFromJSON(resources[0].copy()) if len(resources) == 1 else None
def discoverResources(self, rootResource, handling, conditions, attributes, fo):
# preparations
rootSRN = rootResource.__srn__
handling['__returned__'] = 0
handling['__matched__'] = 0
if 'lvl' in handling:
handling['__lvl__'] = rootSRN.count('/') + handling['lvl']
# a bit of optimization. This length stays the same.
allLen = ((len(conditions) if conditions is not None else 0) +
(len(attributes) if attributes is not None else 0) +
(len(conditions['ty']) if conditions is not None else 0) - 1 +
(len(conditions['cty']) if conditions is not None else 0) - 1
)
rs = self.db.discoverResources(lambda r: _testDiscovery(r,
rootSRN,
handling,
conditions,
attributes,
fo,
handling['lim'] if 'lim' in handling else None,
handling['ofst'] if 'ofst' in handling else None,
allLen))
# transform JSONs to resources
result = []
for r in rs:
result.append(Utils.resourceFromJSON(r))
# sort resources by type and then by lowercase rn
if Configuration.get('cse.sortDiscoveredResources'):
result.sort(key=lambda x:(x.ty, x.rn.lower()))
return result
def updateResource(self, resource):
if resource is None:
Logging.logErr('resource is None')
raise RuntimeError('resource is None')
ri = resource.ri
# Logging.logDebug('Updating resource (ty: %d, ri: %s, rn: %s)' % (resource['ty'], ri, resource['rn']))
resource = self.db.updateResource(resource)
return (resource, C.rcUpdated)
def deleteResource(self, resource):
if resource is None:
Logging.logErr('resource is None')
raise RuntimeError('resource is None')
# Logging.logDebug('Removing resource (ty: %d, ri: %s, rn: %s)' % (resource['ty'], ri, resource['rn']))
self.db.deleteResource(resource)
self.db.deleteIdentifier(resource)
return (True, C.rcDeleted)
def subResources(self, pi, ty=None):
rs = self.db.searchResources(pi=pi, ty=ty)
# if ty is not None:
# rs = self.tabResources.search((Query().pi == pi) & (Query().ty == ty))
# else:
# rs = self.tabResources.search(Query().pi == pi)
result = []
for r in rs:
result.append(Utils.resourceFromJSON(r.copy()))
return result
def countResources(self):
return self.db.countResources()
def identifier(self, ri):
return self.db.searchIdentifiers(ri=ri)
def searchByTypeFieldValue(self, ty, field, value):
"""Search and return all resources of a specific type and a value in a field,
and return them in an array."""
result = []
for j in self.db.searchByTypeFieldValue(ty, field, value):
result.append(Utils.resourceFromJSON(j))
return result
#########################################################################
##
## Subscriptions
##
def getSubscription(self, ri):
# Logging.logDebug('Retrieving subscription: %s' % ri)
subs = self.db.searchSubscriptions(ri=ri)
if subs is None or len(subs) != 1:
return None
return subs[0]
def getSubscriptionsForParent(self, pi):
# Logging.logDebug('Retrieving subscriptions for parent: %s' % pi)
return self.db.searchSubscriptions(pi=pi)
def addSubscription(self, subscription):
# Logging.logDebug('Adding subscription: %s' % ri)
return self.db.upsertSubscription(subscription)
def removeSubscription(self, subscription):
# Logging.logDebug('Removing subscription: %s' % subscription.ri)
return self.db.removeSubscription(subscription)
def updateSubscription(self, subscription):
# Logging.logDebug('Updating subscription: %s' % ri)
return self.db.upsertSubscription(subscription)
#########################################################################
##
## Statistics
##
def getStatistics(self):
return self.db.searchStatistics()
def updateStatistics(self, stats):
return self.db.upsertStatistics(stats)
#########################################################################
##
## App Support
##
def getAppData(self, id):
return self.db.searchAppData(id)
def updateAppData(self, data):
return self.db.upsertAppData(data)
def removeAppData(self, data):
return self.db.removeData(data)
#########################################################################
##
## internal utilities
##
# handler function for discovery search and matching resources
def _testDiscovery(r, rootSRN, handling, conditions, attributes, fo, lim, ofst, allLen):
# check limits
# TinyDB doesn't support pagination. So we need to implement it here. See also offset below.
if lim is not None and handling['__returned__'] >= lim:
return False
# check for SRN first
# Add / to the "startswith" check to terminate the search string
if (srn := r['__srn__']) is not None and rootSRN.count('/') >= srn.count('/') or not srn.startswith(rootSRN+'/') or srn == rootSRN:
return False
# Ignore virtual resources TODO: correct?
# if (ty := r.get('ty')) and ty in C.tVirtualResources:
# return False
ty = r.get('ty')
# ignore some resource types
if ty in [ C.tGRP_FOPT ]:
return False
# check level
if (h_lvl := handling.get('__lvl__')) is not None and srn.count('/') > h_lvl:
return False
# check conditions
if conditions is not None:
found = 0
# found += 1 if (c_ty := conditions.get('ty')) is not None and (str(ty) == c_ty) else 0
if (ct := r.get('ct')) is not None:
found += 1 if (c_crb := conditions.get('crb')) is not None and (ct < c_crb) else 0
found += 1 if (c_cra := conditions.get('cra')) is not None and (ct > c_cra) else 0
if (lt := r.get('lt')) is not None:
found += 1 if (c_ms := conditions.get('ms')) is not None and (lt > c_ms) else 0
found += 1 if (c_us := conditions.get('us')) is not None and (lt < c_us) else 0
if (st := r.get('st')) is not None:
found += 1 if (c_sts := conditions.get('sts')) is not None and (str(st) > c_sts) else 0
found += 1 if (c_stb := conditions.get('stb')) is not None and (str(st) < c_stb) else 0
if (et := r.get('et')) is not None:
found += 1 if (c_exb := conditions.get('exb')) is not None and (et < c_exb) else 0
found += 1 if (c_exa := conditions.get('exa')) is not None and (et > c_exa) else 0
# special handling of label-list
if (lbl := r.get('lbl')) is not None and (c_lbl := conditions.get('lbl')) is not None:
lbla = c_lbl.split()
fnd = 0
for l in lbla:
fnd += 1 if l in lbl else 0
found += 1 if (fo == 1 and fnd == len(lbl)) or (fo == 2 and fnd > 0) else 0 # fo==or -> find any label
if ty in [ C.tCIN, C.tFCNT ]: # special handling for CIN, FCNT
if (cs := r.get('cs')) is not None:
found += 1 if (sza := conditions.get('sza')) is not None and (str(cs) >= sza) else 0
found += 1 if (szb := conditions.get('szb')) is not None and (str(cs) < szb) else 0
if ty in [ C.tCIN ]: # special handling for CIN
if (cnf := r.get('cnf')) is not None:
found += 1 if cnf in conditions['cty'] else 0
# TODO labelsQuery
# TODO childLabels
# TODO parentLabels
# TODO childResourceType
# TODO parentResourceType
# Attributes:
if attributes is not None:
for name in attributes:
val = attributes[name]
if '*' in val:
val = val.replace('*', '.*')
found += 1 if (rval := r.get(name)) is not None and re.match(val, str(rval)) else 0
else:
found += 1 if (rval := r.get(name)) is not None and str(val) == str(rval) else 0
# TODO childAttribute
# TODO parentAttribute
# Test Types
found += 1 if str(ty) in conditions['ty'] else 0
# Test whether the OR or AND criteria is fullfilled
if not ((fo == 2 and found > 0) or # OR and found something
(fo == 1 and allLen == found) # AND and found everything
):
return False
# Check offset. Dont match if offset not reached
handling['__matched__'] += 1
if ofst is not None and handling['__matched__'] <= ofst:
return False
handling['__returned__'] += 1
return True
#########################################################################
#
# DB class that implements the TinyDB binding
#
# This class may be moved later to an own module.
class TinyDBBinding(object):
def __init__(self, path=None):
self.path = path
self.cacheSize = Configuration.get('db.cacheSize')
Logging.log('Cache Size: %s' % self.cacheSize)
# create transaction locks
self.lockResources = Lock()
self.lockIdentifiers = Lock()
self.lockSubscriptions = Lock()
self.lockStatistics = Lock()
self.lockAppData = Lock()
def openDB(self):
if Configuration.get('db.inMemory'):
Logging.log('DB in memory')
self.dbResources = TinyDB(storage=MemoryStorage)
self.dbIdentifiers = TinyDB(storage=MemoryStorage)
self.dbSubscriptions = TinyDB(storage=MemoryStorage)
self.dbStatistics = TinyDB(storage=MemoryStorage)
self.dbAppData = TinyDB(storage=MemoryStorage)
else:
Logging.log('DB in file system')
self.dbResources = TinyDB(self.path + '/resources.json')
self.dbIdentifiers = TinyDB(self.path + '/identifiers.json')
self.dbSubscriptions = TinyDB(self.path + '/subscriptions.json')
self.dbStatistics = TinyDB(self.path + '/statistics.json')
self.dbAppData = TinyDB(self.path + '/appdata.json')
self.tabResources = self.dbResources.table('resources', cache_size=self.cacheSize)
self.tabIdentifiers = self.dbIdentifiers.table('identifiers', cache_size=self.cacheSize)
self.tabSubscriptions = self.dbSubscriptions.table('subsriptions', cache_size=self.cacheSize)
self.tabStatistics = self.dbStatistics.table('statistics', cache_size=self.cacheSize)
self.tabAppData = self.dbAppData.table('appdata', cache_size=self.cacheSize)
def closeDB(self):
Logging.log('Closing DBs')
self.dbResources.close()
self.dbIdentifiers.close()
self.dbSubscriptions.close()
self.dbStatistics.close()
self.dbAppData.close()
def purgeDB(self):
Logging.log('Purging DBs')
self.tabResources.purge()
self.tabIdentifiers.purge()
self.tabSubscriptions.purge()
self.tabStatistics.purge()
self.tabAppData.purge()
#
# Resources
#
def insertResource(self, resource):
with self.lockResources:
self.tabResources.insert(resource.json)
def upsertResource(self, resource):
with self.lockResources:
self.tabResources.upsert(resource.json, Query().ri == resource.ri) # Update existing or insert new when overwriting
def updateResource(self, resource):
ri = resource.ri
with self.lockResources:
self.tabResources.update(resource.json, Query().ri == ri)
# remove nullified fields from db and resource
for k in list(resource.json):
if resource.json[k] is None:
self.tabResources.update(delete(k), Query().ri == ri)
del resource.json[k]
return resource
def deleteResource(self, resource):
with self.lockResources:
self.tabResources.remove(Query().ri == resource.ri)
def searchResources(self, ri=None, csi=None, srn=None, pi=None, ty=None):
# find the ri first and then try again recursively
if srn is not None:
if len((identifiers := self.searchIdentifiers(srn=srn))) == 1:
return self.searchResources(ri=identifiers[0]['ri'])
return []
with self.lockResources:
if ri is not None:
r = self.tabResources.search(Query().ri == ri)
elif csi is not None:
r = self.tabResources.search(Query().csi == csi)
elif pi is not None and ty is not None:
r = self.tabResources.search((Query().pi == pi) & (Query().ty == ty))
elif pi is not None:
r = self.tabResources.search(Query().pi == pi)
elif ty is not None:
r = self.tabResources.search(Query().ty == ty)
return r
def discoverResources(self, func):
with self.lockResources:
rs = self.tabResources.search(func)
return rs
def hasResource(self, ri=None, csi=None, srn=None, ty=None):
# find the ri first and then try again recursively
if srn is not None:
if len((identifiers := self.searchIdentifiers(srn=srn))) == 1:
return self.hasResource(ri=identifiers[0]['ri'])
ret = False
with self.lockResources:
if ri is not None:
ret = self.tabResources.contains(Query().ri == ri)
elif csi is not None:
ret = self.tabResources.contains(Query().csi == csi)
elif ty is not None:
ret = self.tabResources.contains(Query().ty == ty)
return ret
def countResources(self):
with self.lockResources:
result = len(self.tabResources)
return result
def searchByTypeFieldValue(self, ty, field, value):
"""Search and return all resources of a specific type and a value in a field,
and return them in an array."""
with self.lockResources:
result = self.tabResources.search((Query().ty == ty) & (where(field).any(value)))
return result
#
# Identifiers
#
def insertIdentifier(self, resource, ri, srn):
with self.lockIdentifiers:
self.tabIdentifiers.upsert(
# ri, rn, srn
{'ri' : ri, 'rn' : resource.rn, 'srn' : srn, 'ty' : resource.ty},
Query().ri == ri)
def deleteIdentifier(self, resource):
with self.lockIdentifiers:
self.tabIdentifiers.remove(Query().ri == resource.ri)
def searchIdentifiers(self, ri=None, srn=None):
with self.lockIdentifiers:
if srn is not None:
r = self.tabIdentifiers.search(Query().srn == srn)
elif ri is not None:
r = self.tabIdentifiers.search(Query().ri == ri)
return r
#
# Subscriptions
#
def searchSubscriptions(self, ri=None, pi=None):
subs = None
with self.lockSubscriptions:
if ri is not None:
subs = self.tabSubscriptions.search(Query().ri == ri)
if pi is not None:
subs = self.tabSubscriptions.search(Query().pi == pi)
return subs
def upsertSubscription(self, subscription):
ri = subscription.ri
with self.lockSubscriptions:
result = self.tabSubscriptions.upsert(
{ 'ri' : ri,
'pi' : subscription.pi,
'nct' : subscription.nct,
'net' : subscription['enc/net'],
'nus' : subscription.nu
},
Query().ri == ri)
return result is not None
def removeSubscription(self, subscription):
with self.lockSubscriptions:
result = self.tabSubscriptions.remove(Query().ri == subscription.ri)
return result
#
# Statistics
#
def searchStatistics(self):
stats = None
with self.lockStatistics:
stats = self.tabStatistics.get(doc_id=1)
return stats if stats is not None and len(stats) > 0 else None
def upsertStatistics(self, stats):
with self.lockStatistics:
if len(self.tabStatistics) > 0:
result = self.tabStatistics.update(stats, doc_ids=[1])
else:
result = self.tabStatistics.insert(stats)
return result is not None
#
# App Data
#
def searchAppData(self, id):
data = None
with self.lockAppData:
data = self.tabAppData.get(Query().id == id)
return data if data is not None and len(data) > 0 else None
def upsertAppData(self, data):
if 'id' not in data:
return None
with self.lockAppData:
if len(self.tabAppData) > 0:
result = self.tabAppData.update(data, Query().id == data['id'])
else:
result = self.tabAppData.insert(data)
return result is not None
def removeAppData(self, data):
if 'id' not in data:
return None
with self.lockAppData:
result = self.tabAppData.remove(Query().id == data['id'])
return result | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Storage.py | Storage.py |
from Logging import Logging
from Configuration import Configuration
from Constants import Constants as C
import CSE, Utils
class Dispatcher(object):
def __init__(self):
self.rootPath = Configuration.get('http.root')
self.enableTransit = Configuration.get('cse.enableTransitRequests')
Logging.log('Dispatcher initialized')
def shutdown(self):
Logging.log('Dispatcher shut down')
# The "xxxRequest" methods handle http requests while the "xxxResource"
# methods handle actions on the resources. Security/permission checking
# is done for requests, not on resource actions.
#
# Retrieve resources
#
def retrieveRequest(self, request):
(originator, _, _, _, _) = Utils.getRequestHeaders(request)
id = Utils.requestID(request, self.rootPath)
Logging.logDebug('ID: %s, originator: %s' % (id, originator))
# handle transit requests
if CSE.remote.isTransitID(id):
return CSE.remote.handleTransitRetrieveRequest(request, id, originator) if self.enableTransit else (None, C.rcOperationNotAllowed)
# handle fanoutPoint requests
if (fanoutPointResource := Utils.fanoutPointResource(id)) is not None and fanoutPointResource.ty == C.tGRP_FOPT:
Logging.logDebug('Redirecting request to fanout point: %s' % fanoutPointResource.__srn__)
return fanoutPointResource.retrieveRequest(request, id, originator)
# just a normal retrieve request
return self.handleRetrieveRequest(request, id, originator)
def handleRetrieveRequest(self, request, id, originator):
try:
attrs = self._getArguments(request)
fu = attrs.get('fu')
drt = attrs.get('drt')
handling = attrs.get('__handling__')
conditions = attrs.get('__conditons__')
attributes = attrs.get('__attrs__')
fo = attrs.get('fo')
rcn = attrs.get('rcn')
except Exception as e:
return (None, C.rcInvalidArguments)
if fu == 1 and rcn != C.rcnAttributes: # discovery. rcn == Attributes is actually "normal retrieval"
Logging.logDebug('Discover resources (fu: %s, drt: %s, handling: %s, conditions: %s, resultContent: %d, attributes: %s)' % (fu, drt, handling, conditions, rcn, str(attributes)))
if rcn not in [C.rcnAttributesAndChildResourceReferences, C.rcnChildResourceReferences, C.rcnChildResources, C.rcnAttributesAndChildResources]: # Only allow those two
return (None, C.rcInvalidArguments)
# do discovery
(rs, _) = self.discoverResources(id, handling, conditions, attributes, fo)
if rs is not None:
# check and filter by ACP
allowedResources = []
for r in rs:
if CSE.security.hasAccess(originator, r, C.permDISCOVERY):
allowedResources.append(r)
if rcn == C.rcnChildResourceReferences: # child resource references
return (self._resourcesToURIList(allowedResources, drt), C.rcOK)
# quiet strange for discovery, since children might not be direct descendants...
elif rcn == C.rcnAttributesAndChildResourceReferences:
(resource, res) = self.retrieveResource(id)
if resource is None:
return (None, res)
self._resourceTreeReferences(allowedResources, resource, drt) # the function call add attributes to the result resource
return (resource, C.rcOK)
# resource and child resources, full attributes
elif rcn == C.rcnAttributesAndChildResources:
(resource, res) = self.retrieveResource(id)
if resource is None:
return (None, res)
self._childResourceTree(allowedResources, resource) # the function call add attributes to the result resource
return (resource, C.rcOK)
# direct child resources, NOT the root resource
elif rcn == C.rcnChildResources:
resource = { } # empty
self._resourceTreeJSON(allowedResources, resource)
return (resource, C.rcOK)
# return (self._childResources(allowedResources), C.rcOK)
return (None, C.rcNotFound)
elif fu == 2 or rcn == C.rcnAttributes: # normal retrieval
Logging.logDebug('Get resource: %s' % id)
(resource, res) = self.retrieveResource(id)
if resource is None:
return (None, res)
if not CSE.security.hasAccess(originator, resource, C.permRETRIEVE):
return (None, C.rcOriginatorHasNoPrivilege)
if rcn == C.rcnAttributes: # Just the resource & attributes
return (resource, res)
(rs, rc) = self.discoverResources(id, handling, rootResource=resource)
if rs is None:
return (None, rc)
# check and filter by ACP
result = []
for r in rs:
if CSE.security.hasAccess(originator, r, C.permRETRIEVE):
result.append(r)
# Handle more sophisticated result content types
if rcn == C.rcnAttributesAndChildResources:
self._resourceTreeJSON(result, resource) # the function call add attributes to the result resource
return (resource, C.rcOK)
elif rcn == C.rcnAttributesAndChildResourceReferences:
self._resourceTreeReferences(result, resource, drt) # the function call add attributes to the result resource
return (resource, C.rcOK)
elif rcn == C.rcnChildResourceReferences: # child resource references
return (self._resourcesToURIList(result, drt), C.rcOK)
return (None, C.rcInvalidArguments)
# TODO check rcn. Allowed only 1, 4, 5 . 1= as now. If 4,5 check lim etc
else:
return (None, C.rcInvalidArguments)
def retrieveResource(self, id):
Logging.logDebug('Retrieve resource: %s' % id)
if id is None:
return (None, C.rcNotFound)
oid = id
csi = Configuration.get('cse.csi')
if '/' in id:
# when the id is in the format <cse RI>/<resource RI>
if id.startswith(csi):
id = id[len(csi)+1:]
if not '/' in id:
return self.retrieveResource(id)
# elif id.startswith('-') or id.startswith('~'): # remove shortcut (== csi) (Also the ~ makes it om2m compatible)
if id.startswith('-') or id.startswith('~'): # remove shortcut (== csi) (Also the ~ makes it om2m compatible)
id = "%s/%s" % (csi, id[2:])
return self.retrieveResource(id)
# Check whether it is Unstructured-CSE-relativeResource-ID
s = id.split('/')
if len(s) == 2 and s[0] == Configuration.get('cse.ri'):
# Logging.logDebug('Resource via Unstructured-CSE-relativeResource-ID')
r = CSE.storage.retrieveResource(ri=s[1])
else:
# Assume it is a Structured-CSE-relativeResource-ID
# Logging.logDebug('Resource via Structured-CSE-relativeResource-ID')
r = CSE.storage.retrieveResource(srn=id)
else: # only the cseid or ri
if id == csi:
# SP-relative-CSE-ID
# Logging.logDebug('Resource via SP-relative-CSE-ID')
r = CSE.storage.retrieveResource(csi=id)
else:
# Unstructured-CSE-relativeResource-ID
# Logging.logDebug('Resource via Unstructured-CSE-relativeResource-ID')
r = CSE.storage.retrieveResource(ri=id)
if r is None: # special handling for CSE. ID could be ri or srn...
r = CSE.storage.retrieveResource(srn=id)
if r is not None:
return (r, C.rcOK)
Logging.logDebug('Resource not found: %s' % oid)
return (None, C.rcNotFound)
def discoverResources(self, id, handling, conditions=None, attributes=None, fo=None, rootResource=None):
if rootResource is None:
(rootResource, _) = self.retrieveResource(id)
if rootResource is None:
return (None, C.rcNotFound)
return (CSE.storage.discoverResources(rootResource, handling, conditions, attributes, fo), C.rcOK)
#
# Add resources
#
def createRequest(self, request):
(originator, ct, ty, _, _) = Utils.getRequestHeaders(request)
id = Utils.requestID(request, self.rootPath)
Logging.logDebug('ID: %s, originator: %s' % (id, originator))
# handle transit requests
if CSE.remote.isTransitID(id):
return CSE.remote.handleTransitCreateRequest(request, id, originator, ty) if self.enableTransit else (None, C.rcOperationNotAllowed)
# handle fanoutPoint requests
if (fanoutPointResource := Utils.fanoutPointResource(id)) is not None and fanoutPointResource.ty == C.tGRP_FOPT:
Logging.logDebug('Redirecting request to fanout point: %s' % fanoutPointResource.__srn__)
return fanoutPointResource.createRequest(request, id, originator, ct, ty)
# just a normal create request
return self.handleCreateRequest(request, id, originator, ct, ty)
def handleCreateRequest(self, request, id, originator, ct, ty):
Logging.logDebug('Adding new resource')
if ct == None or ty == None:
return (None, C.rcBadRequest)
# Check whether the target contains a fanoutPoint in between or as the target
# TODO: Is this called twice (here + in createRequest)?
if (fanoutPointResource := Utils.fanoutPointResource(id)) is not None:
Logging.logDebug('Redirecting request to fanout point: %s' % fanoutPointResource.__srn__)
return fanoutPointResource.createRequest(request, id, originator, ct, ty)
# Get parent resource and check permissions
(pr, res) = self.retrieveResource(id)
if pr is None:
Logging.log('Parent resource not found')
return (None, C.rcNotFound)
if CSE.security.hasAccess(originator, pr, C.permCREATE, ty=ty, isCreateRequest=True) == False:
return (None, C.rcOriginatorHasNoPrivilege)
# Add new resource
#nr = resourceFromJSON(request.json, pi=pr['ri'], tpe=ty) # Add pi
if (nr := Utils.resourceFromJSON(request.json, pi=pr.ri, tpe=ty)) is None: # something wrong, perhaps wrong type
return (None, C.rcBadRequest)
# # determine and add the srn
# nr[nr._srn] = Utils.structuredPath(nr)
# check whether the resource already exists
if CSE.storage.hasResource(nr.ri, nr.__srn__):
Logging.logWarn('Resource already registered')
return (None, C.rcAlreadyExists)
# Check resource creation
if (res := CSE.registration.checkResourceCreation(nr, originator, pr))[1] != C.rcOK:
return (None, res[1])
originator = res[0]
return self.createResource(nr, pr, originator)
def createResource(self, resource, parentResource=None, originator=None):
Logging.logDebug('Adding resource ri: %s, type: %d' % (resource.ri, resource.ty))
if parentResource is not None:
Logging.logDebug('Parent ri: %s' % parentResource.ri)
if not parentResource.canHaveChild(resource):
Logging.logWarn('Invalid child resource type')
return (None, C.rcInvalidChildResourceType)
# if not already set: determine and add the srn
if resource.__srn__ is None:
resource[resource._srn] = Utils.structuredPath(resource)
# add the resource to storage
if (res := CSE.storage.createResource(resource, overwrite=False))[1] != C.rcCreated:
return (None, res[1])
# Activate the resource
# This is done *after* writing it to the DB, because in activate the resource might create or access other
# resources that will try to read the resource from the DB.
if not (res := resource.activate(originator))[0]: # activate the new resource
CSE.storage.deleteResource(resource)
return res
# Could be that we changed the resource in the activate, therefore write it again
if (res := CSE.storage.updateResource(resource))[0] is None:
CSE.storage.deleteResource(resource)
return res
if parentResource is not None:
parentResource.childAdded(resource, originator) # notify the parent resource
CSE.event.createResource(resource) # send a create event
return (resource, C.rcCreated) # everything is fine. resource created.
#
# Update resources
#
def updateRequest(self, request):
(originator, ct, _, _, _) = Utils.getRequestHeaders(request)
id = Utils.requestID(request, self.rootPath)
Logging.logDebug('ID: %s, originator: %s' % (id, originator))
# handle transit requests
if CSE.remote.isTransitID(id):
return CSE.remote.handleTransitUpdateRequest(request, id, originator) if self.enableTransit else (None, C.rcOperationNotAllowed)
# handle fanoutPoint requests
if (fanoutPointResource := Utils.fanoutPointResource(id)) is not None and fanoutPointResource.ty == C.tGRP_FOPT:
Logging.logDebug('Redirecting request to fanout point: %s' % fanoutPointResource.__srn__)
return fanoutPointResource.updateRequest(request, id, originator, ct)
# just a normal retrieve request
return self.handleUpdateRequest(request, id, originator, ct)
def handleUpdateRequest(self, request, id, originator, ct):
# get arguments
try:
attrs = self._getArguments(request)
rcn = attrs.get('rcn')
except Exception as e:
return (None, C.rcInvalidArguments)
Logging.logDebug('Updating resource')
if ct == None:
return (None, C.rcBadRequest)
# Get resource to update
(r, _) = self.retrieveResource(id)
if r is None:
Logging.log('Resource not found')
return (None, C.rcNotFound)
if r.readOnly:
return (None, C.rcOperationNotAllowed)
# check permissions
jsn = request.json
acpi = Utils.findXPath(jsn, list(jsn.keys())[0] + '/acpi')
if acpi is not None: # update of acpi attribute means check for self privileges!
updateOrDelete = C.permDELETE if acpi is None else C.permUPDATE
if CSE.security.hasAccess(originator, r, updateOrDelete, checkSelf=True) == False:
return (None, C.rcOriginatorHasNoPrivilege)
if CSE.security.hasAccess(originator, r, C.permUPDATE) == False:
return (None, C.rcOriginatorHasNoPrivilege)
jsonOrg = r.json.copy()
if (result := self.updateResource(r, jsn, originator=originator))[0] is None:
return (None, result[1])
(r, rc) = result
# only send the diff
if rcn == C.rcnAttributes:
return result
if rcn == C.rcnModifiedAttributes:
jsonNew = r.json.copy()
result = { r.tpe : Utils.resourceDiff(jsonOrg, jsonNew) }
return ( result if rc == C.rcUpdated else None, rc)
return (None, C.rcNotImplemented)
def updateResource(self, resource, json=None, doUpdateCheck=True, originator=None):
Logging.logDebug('Updating resource ri: %s, type: %d' % (resource.ri, resource.ty))
if doUpdateCheck:
if not (res := resource.update(json, originator))[0]:
return (None, res[1])
else:
Logging.logDebug('No check, skipping resource update')
return CSE.storage.updateResource(resource)
#
# Remove resources
#
def deleteRequest(self, request):
(originator, _, _, _, _) = Utils.getRequestHeaders(request)
id = Utils.requestID(request, self.rootPath)
Logging.logDebug('ID: %s, originator: %s' % (id, originator))
# handle transit requests
if CSE.remote.isTransitID(id):
return CSE.remote.handleTransitDeleteRequest(id, originator) if self.enableTransit else (None, C.rcOperationNotAllowed)
# handle fanoutPoint requests
if (fanoutPointResource := Utils.fanoutPointResource(id)) is not None and fanoutPointResource.ty == C.tGRP_FOPT:
Logging.logDebug('Redirecting request to fanout point: %s' % fanoutPointResource.__srn__)
return fanoutPointResource.deleteRequest(request, id, originator)
# just a normal delete request
return self.handleDeleteRequest(request, id, originator)
def handleDeleteRequest(self, request, id, originator):
Logging.logDebug('Removing resource')
# get resource to be removed and check permissions
(r, _) = self.retrieveResource(id)
if r is None:
Logging.logDebug('Resource not found')
return (None, C.rcNotFound)
# if r.readOnly:
# return (None, C.rcOperationNotAllowed)
if CSE.security.hasAccess(originator, r, C.permDELETE) == False:
return (None, C.rcOriginatorHasNoPrivilege)
# Check resource deletion
if not (res := CSE.registration.checkResourceDeletion(r, originator))[0]:
return (None, C.rcBadRequest)
# remove resource
return self.deleteResource(r, originator)
def deleteResource(self, resource, originator=None):
Logging.logDebug('Removing resource ri: %s, type: %d' % (resource.ri, resource.ty))
if resource is None:
Logging.log('Resource not found')
resource.deactivate(originator) # deactivate it first
# notify the parent resource
parentResource = resource.retrieveParentResource()
# (parentResource, _) = self.retrieveResource(resource['pi'])
(_, rc) = CSE.storage.deleteResource(resource)
CSE.event.deleteResource(resource) # send a delete event
if parentResource is not None:
parentResource.childRemoved(resource, originator)
return (resource, rc)
#
# Utility methods
#
def subResources(self, pi, ty=None):
return CSE.storage.subResources(pi, ty)
def countResources(self):
return CSE.storage.countResources()
# All resources of a type
def retrieveResourcesByType(self, ty):
return CSE.storage.retrieveResource(ty=ty)
#########################################################################
#
# Internal methods
#
# Get the request arguments, or meaningful defaults.
# Only a small subset is supported yet
def _getArguments(self, request):
result = { }
args = request.args.copy() # copy for greedy attributes checking
# basic attributes
if (fu := args.get('fu')) is not None:
fu = int(fu)
del args['fu']
else:
fu = C.fuConditionalRetrieval
result['fu'] = fu
if (drt := args.get('drt')) is not None: # 1=strucured, 2=unstructured
drt = int(drt)
del args['drt']
else:
drt = C.drtStructured
result['drt'] = drt
if (rcn := args.get('rcn')) is not None:
rcn = int(rcn)
del args['rcn']
else:
rcn = C.rcnAttributes if fu == C.fuConditionalRetrieval else C.rcnChildResourceReferences
result['rcn'] = rcn
# handling conditions
handling = {}
for c in ['lim', 'lvl', 'ofst']: # integer parameters
if c in args:
handling[c] = int(args[c])
del args[c]
for c in ['arp']:
if c in args:
handling[c] = args[c]
del args[c]
result['__handling__'] = handling
# conditions
conditions = {}
# TODO Check ty multiple times. Then -> "ty" : array?
# also contentType
# Extra dictionary! as in attributes
for c in ['crb', 'cra', 'ms', 'us', 'sts', 'stb', 'exb', 'exa', 'lbl', 'lbq', 'sza', 'szb', 'catr', 'patr']:
if (x:= args.get(c)) is not None:
conditions[c] = x
del args[c]
# get types (multi)
conditions['ty'] = args.getlist('ty')
args.poplist('ty')
# get contentTypes (multi)
conditions['cty'] = args.getlist('cty')
args.poplist('cty')
result['__conditons__'] = conditions
# filter operation
if (fo := args.get('fo')) is not None: # 1=AND, 2=OR
fo = int(fo)
del args['fo']
else:
fo = 1 # default
result['fo'] = fo
# all remaining arguments are treated as matching attributes
result['__attrs__'] = args.copy()
return result
# Create a m2m:uril structure from a list of resources
def _resourcesToURIList(self, resources, drt):
cseid = '/' + Configuration.get('cse.csi') + '/'
lst = []
for r in resources:
lst.append(Utils.structuredPath(r) if drt == C.drtStructured else cseid + r.ri)
return { 'm2m:uril' : lst }
# def _attributesAndChildResources(self, parentResource, resources):
# result = parentResource.asJSON()
# ch = []
# for r in resources:
# ch.append(r.asJSON(embedded=False))
# result[parentResource.tpe]['ch'] = ch
# return result
# Recursively walk the results and build a sub-resource tree for each resource type
def _resourceTreeJSON(self, rs, rootResource):
rri = rootResource['ri'] if 'ri' in rootResource else None
while True: # go multiple times per level through the resources until the list is empty
result = []
handledTy = None
idx = 0
while idx < len(rs):
r = rs[idx]
if rri is not None and r.pi != rri: # only direct children
idx += 1
continue
if r.ty in [ C.tCNT_OL, C.tCNT_LA, C.tFCNT_OL, C.tFCNT_LA ]: # Skip latest, oldest virtual resources
idx += 1
continue
if handledTy is None:
handledTy = r.ty # this round we check this type
if r.ty == handledTy: # handle only resources of the currently handled type
result.append(r) # append the found resource
rs.remove(r) # remove resource from the original list (greedy), but don't increment the idx
rs = self._resourceTreeJSON(rs, r) # check recursively whether this resource has children
else:
idx += 1 # next resource
# add all found resources under the same type tag to the rootResource
if len(result) > 0:
rootResource[result[0].tpe] = [r.asJSON(embedded=False) for r in result]
# TODO not all child resources are lists [...] Handle just to-1 relations
else:
break # end of list, leave while loop
return rs # Return the remaining list
# Retrieve child resource referenves of a resource and add them to a new target resource as "children"
def _resourceTreeReferences(self, resources, targetResource, drt):
if len(resources) == 0:
return
t = []
for r in resources:
if r.ty in [ C.tCNT_OL, C.tCNT_LA, C.tFCNT_OL, C.tFCNT_LA ]: # Skip latest, oldest virtual resources
continue
t.append({ 'nm' : r['rn'], 'typ' : r['ty'], 'val' : Utils.structuredPath(r) if drt == C.drtStructured else r.ri})
targetResource['ch'] = t
# Retrieve full child resources of a resource and add them to a new target resource
def _childResourceTree(self, resource, targetResource):
if len(resource) == 0:
return
result = {}
self._resourceTreeJSON(resource, result) # rootResource is filled with the result
for k,v in result.items(): # copy child resources to result resource
targetResource[k] = v | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/Dispatcher.py | Dispatcher.py |
import requests, json, urllib
from Configuration import Configuration
from Logging import Logging
from Constants import Constants as C
import Utils, CSE
from resources import CSR, CSEBase
from helpers import BackgroundWorker
class RemoteCSEManager(object):
def __init__(self):
self.csetype = Configuration.get('cse.type')
self.isConnected = False
self.remoteAddress = Configuration.get('cse.remote.address')
self.remoteRoot = Configuration.get('cse.remote.root')
self.remoteCseid = Configuration.get('cse.remote.cseid')
self.originator = Configuration.get('cse.remote.originator')
self.worker = None
self.checkInterval = Configuration.get('cse.remote.checkInterval')
self.cseCsi = Configuration.get('cse.csi')
self.remoteCSEURL = self.remoteAddress + self.remoteRoot + self.remoteCseid
self.remoteCSRURL = self.remoteCSEURL + '/' + self.cseCsi
Logging.log('RemoteCSEManager initialized')
def shutdown(self):
self.stop()
Logging.log('RemoteCSEManager shut down')
#
# Connection Monitor
#
# Start the monitor in a thread.
def start(self):
if not Configuration.get('cse.enableRemoteCSE'):
return;
Logging.log('Starting remote CSE connection monitor')
self.worker = BackgroundWorker.BackgroundWorker(self.checkInterval, self.connectionMonitorWorker)
self.worker.start()
# Stop the monitor. Also delete the CSR resources on both sides
def stop(self):
if not Configuration.get('cse.enableRemoteCSE'):
return;
Logging.log('Stopping remote CSE connection monitor')
# Stop the thread
if self.worker is not None:
self.worker.stop()
# Remove resources
if self.csetype in [ C.cseTypeASN, C.cseTypeMN ]:
(_, rc) = self._deleteRemoteCSR() # delete remote CSR
(csr, rc) = self._retrieveLocalCSR() # retrieve local CSR
if rc == C.rcOK:
self._deleteLocalCSR(csr[0]) # delete local CSR
#
# Check the connection, and presence and absence of CSE and CSR in a
# thread periodically.
#
# It works like this for connections for an ASN or MN to the remote CSE:
#
# Is there is a local <remoteCSE> for a remote <CSEBase>?
# - Yes: Is there a remote <remoteCSE>?
# - Yes:
# - Retrieve the remote <CSEBase>.
# - Has the remote <CSEBase> been modified?
# - Yes:
# - Update the local <remoteCSE>
# - Retrieve the local <CSEBase>
# - Has the local <CSEBase> been modified?
# - Yes:
# -Update the remote <remoteCSE>
# - No:
# - Delete a potential local <remoteCSE>
# - Create a remote <remoteCSE>
# - Success:
# - Retrieve the remote <CSEBase>
# - Create a local <remoteCSE> for it
# - No:
# - Delete a potential remote <remoteCSE>
# - Create a new remote <remoteCSE>
# - Success:
# - Retrieve the remote <CSEBase>
# - Create a local <remoteCSE> for it
#
def connectionMonitorWorker(self):
Logging.logDebug('Checking connections to remote CSEs')
try:
# Check the current state of the connection to the "upstream" CSEs
if self.csetype in [ C.cseTypeASN, C.cseTypeMN ]:
self._checkOwnConnection()
# Check the liveliness of other CSR connections
if self.csetype in [ C.cseTypeMN, C.cseTypeIN ]:
self._checkCSRLiveliness()
except Exception as e:
Logging.logErr('Exception: %s' % e)
return False
return True
# Check the connection for this CSE to the remote CSE.
def _checkOwnConnection(self):
# first check whether there is already a local CSR
(localCSR, rc) = self._retrieveLocalCSR()
localCSR = localCSR[0] # hopefully, there is only one upstream CSR+
if rc == C.rcOK:
(remoteCSR, rc) = self._retrieveRemoteCSR() # retrieve own
if rc == C.rcOK:
# check for changes in remote CSE
(remoteCSE, rc) = self._retrieveRemoteCSE()
if rc == C.rcOK:
if remoteCSE.isModifiedSince(localCSR): # remote CSE modified
self._updateLocalCSR(localCSR, remoteCSE)
Logging.log('Local CSR updated')
(localCSE, _) = Utils.getCSE()
if localCSE.isModifiedSince(remoteCSR): # local CSE modified
self._updateRemoteCSR(localCSE)
Logging.log('Remote CSR updated')
else:
# Potential disconnect
(_, rc) = self._deleteLocalCSR(localCSR)
(remoteCSR, rc) = self._createRemoteCSR()
if rc == C.rcCreated:
(remoteCSE, rc) = self._retrieveRemoteCSE()
if rc == C.rcOK:
self._createLocalCSR(remoteCSE)
Logging.log('Remote CSE connected')
else:
Logging.log('Remote CSE disconnected')
else:
# No local CSR, so try to delete an optional remote one and re-create everything.
(_, rc) = self._deleteRemoteCSR()
if rc in [C.rcDeleted, C.rcNotFound]:
(_, rc) = self._createRemoteCSR()
if rc == C.rcCreated:
(remoteCSE, rc) = self._retrieveRemoteCSE()
if rc == C.rcOK:
self._createLocalCSR(remoteCSE)
Logging.log('Remote CSE connected')
# Check the liveliness of all remote CSE's that are connected to this CSE.
# This is done by trying to retrie a remote CSR. If it cannot be retrieved
# then the related local CSR is removed.
def _checkCSRLiveliness(self):
(csrs, rc) = self._retrieveLocalCSR(own=False)
for csr in csrs:
found = False
for url in csr.poa:
if Utils.isURL(url):
(cse, rc) = self._retrieveRemoteCSE(url='%s/%s' % (url, csr.csi ))
if rc != C.rcOK:
Logging.logWarn('Remote CSE unreachable. Removing CSR: %s' % csr.rn)
CSE.dispatcher.deleteResource(csr)
#
# Local CSR
#
def _retrieveLocalCSR(self, csi=None, own=True):
#Logging.logDebug('Retrieving local CSR: %s' % csi)
csrs = CSE.dispatcher.subResources(pi=Configuration.get('cse.ri'), ty=C.tCSR)
if csi is None:
csi = self.remoteCseid
if own:
for csr in csrs:
if (c := csr.csi) is not None and c == csi:
return ([csr], C.rcOK)
return ([None], C.rcBadRequest)
else:
result = []
for csr in csrs:
if (c := csr.csi) is not None and c == csi:
continue
result.append(csr)
return (result, C.rcOK)
def _createLocalCSR(self, remoteCSE):
Logging.logDebug('Creating local CSR: %s' % remoteCSE.ri)
# copy attributes
(localCSE, _) = Utils.getCSE()
csr = CSR.CSR()
# csr['pi'] = localCSE['ri']
csr['pi'] = Configuration.get('cse.ri')
self._copyCSE2CSE(csr, remoteCSE)
csr['ri'] = remoteCSE.ri
# add local CSR
return CSE.dispatcher.createResource(csr, localCSE)
def _updateLocalCSR(self, localCSR, remoteCSE):
Logging.logDebug('Updating local CSR: %s' % localCSR.rn)
# copy attributes
self._copyCSE2CSE(localCSR, remoteCSE)
return CSE.dispatcher.updateResource(localCSR)
def _deleteLocalCSR(self, resource):
Logging.logDebug('Deleting local CSR: %s' % resource.ri)
return CSE.dispatcher.deleteResource(resource)
#
# Remote CSR
#
def _retrieveRemoteCSR(self):
#Logging.logDebug('Retrieving remote CSR: %s' % self.remoteCseid)
(jsn, rc) = CSE.httpServer.sendRetrieveRequest(self.remoteCSRURL, self.originator)
if rc not in [C.rcOK]:
return (None, rc)
return (CSR.CSR(jsn), C.rcOK)
def _createRemoteCSR(self):
Logging.logDebug('Creating remote CSR: %s' % self.remoteCseid)
# get local CSEBase and copy relevant attributes
(localCSE, _) = Utils.getCSE()
csr = CSR.CSR()
self._copyCSE2CSE(csr, localCSE)
csr['ri'] = self.cseCsi
data = json.dumps(csr.asJSON())
(jsn, rc) = CSE.httpServer.sendCreateRequest(self.remoteCSEURL, self.originator, ty=C.tCSR, data=data)
if rc not in [C.rcCreated, C.rcOK]:
if rc != C.rcAlreadyExists:
Logging.logDebug('Error creating remote CSR: %d' % rc)
return (None, rc)
Logging.logDebug('Remote CSR created: %s' % self.remoteCseid)
return (CSR.CSR(jsn), C.rcCreated)
def _updateRemoteCSR(self, localCSE):
Logging.logDebug('Updating remote CSR: %s' % remoteCSR.rn)
csr = CSR.CSR()
self._copyCSE2CSE(csr, localCSE)
del csr['acpi'] # remove ACPI (don't provide ACPI in updates...a bit)
data = json.dumps(csr.asJSON())
(jsn, rc) = CSE.httpServer.sendUpdateRequest(self.remoteCSRURL, self.originator, data=data)
if rc not in [C.rcUpdated, C.rcOK]:
if rc != C.rcAlreadyExists:
Logging.logDebug('Error updating remote CSR: %d' % rc)
return (None, rc)
Logging.logDebug('Remote CSR updated: %s' % self.remoteCseid)
return (CSR.CSR(jsn), C.rcUpdated)
def _deleteRemoteCSR(self):
Logging.logDebug('Deleting remote CSR: %s' % self.remoteCseid)
(jsn, rc) = CSE.httpServer.sendDeleteRequest(self.remoteCSRURL, self.originator)
if rc not in [C.rcDeleted, C.rcOK]:
return (None, rc)
Logging.log('Remote CSR deleted: %s' % self.remoteCseid)
return (None, C.rcDeleted)
#
# Remote CSE
#
# Retrieve the remote CSE
def _retrieveRemoteCSE(self, url=None):
#Logging.logDebug('Retrieving remote CSE: %s' % self.remoteCseid)
(jsn, rc) = CSE.httpServer.sendRetrieveRequest(url if url is not None else self.remoteCSEURL, self.originator)
if rc not in [C.rcOK]:
return (None, rc)
return (CSEBase.CSEBase(jsn), C.rcOK)
#########################################################################
#
# Handling of Transit requests. Forward requests to the resp. remote CSE's.
#
# Forward a Retrieve request to a remote CSE
def handleTransitRetrieveRequest(self, request, id, origin):
if (url := self._getForwardURL(id)) is None:
return (None, C.rcNotFound)
if len(request.args) > 0: # pass on other arguments, for discovery
url += '?' + urllib.parse.urlencode(request.args)
Logging.log('Forwarding Retrieve/Discovery request to: %s' % url)
return CSE.httpServer.sendRetrieveRequest(url, origin)
# Forward a Create request to a remote CSE
def handleTransitCreateRequest(self, request, id, origin, ty):
if (url := self._getForwardURL(id)) is None:
return (None, C.rcNotFound)
Logging.log('Forwarding Create request to: %s' % url)
return CSE.httpServer.sendCreateRequest(url, origin, data=request.data, ty=ty)
# Forward a Update request to a remote CSE
def handleTransitUpdateRequest(self, request, id, origin):
if (url := self._getForwardURL(id)) is None:
return (None, C.rcNotFound)
Logging.log('Forwarding Update request to: %s' % url)
return CSE.httpServer.sendUpdateRequest(url, origin, data=request.data)
# Forward a Delete request to a remote CSE
def handleTransitDeleteRequest(self, id, origin):
if (url := self._getForwardURL(id)) is None:
return (None, C.rcNotFound)
Logging.log('Forwarding Delete request to: %s' % url)
return CSE.httpServer.sendDeleteRequest(url, origin)
# Check whether an ID is a targeting a remote CSE via a CSR
def isTransitID(self, id):
(r, _) = self._getCSRFromPath(id)
return r is not None and r.ty == C.tCSR
# Get the new target URL when forwarding
def _getForwardURL(self, path):
(r, pe) = self._getCSRFromPath(path)
if r is not None:
return '%s/-/%s' % (r.poa[0], '/'.join(pe[1:]))
return None
# try to get a CSR even from a longer path (only the first 2 path elements are relevant)
def _getCSRFromPath(self, id):
pathElements = id.split('/')
if len(pathElements) <= 2:
return (None, None)
id = '%s/%s' % (pathElements[0], pathElements[1])
(r, rc) = CSE.dispatcher.retrieveResource(id)
return (r, pathElements)
#########################################################################
def _copyCSE2CSE(self, target, source):
if 'csb' in source:
target['csb'] = self.remoteCSEURL
if 'csi' in source:
target['csi'] = source.csi
if 'cst' in source:
target['cst'] = source.cst
if 'csz' in source:
target['csz'] = source.csz
if 'lbl' in source:
target['lbl'] = source.lbl
if 'nl' in source:
target['nl'] = source.nl
if 'poa' in source:
target['poa'] = source.poa
if 'rn' in source:
target['rn'] = source.rn
if 'rr' in source:
target['rr'] = source.rr
if 'srt' in source:
target['srt'] = source.srt
if 'srv' in source:
target['srv'] = source.srv
if 'st' in source:
target['st'] = source.st | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/RemoteCSEManager.py | RemoteCSEManager.py |
from Constants import Constants as C
from .Resource import *
import Utils
class ACP(Resource):
def __init__(self, jsn=None, pi=None, rn=None, create=False):
super().__init__(C.tsACP, jsn, pi, C.tACP, create=create, inheritACP=True, rn=rn)
# store permissions for easier access
self._storePermissions()
def validate(self, originator, create=False):
if (res := super().validate(originator, create))[0] == False:
return res
# add admin originator
if Configuration.get('cse.acp.addAdminOrignator'):
cseOriginator = Configuration.get('cse.originator')
if cseOriginator not in self.pv_acor:
self.addPermissionOriginator(cseOriginator)
if cseOriginator not in self.pvs_acor:
self.addSelfPermissionOriginator(cseOriginator)
self._storePermissions()
return (True, C.rcOK)
#########################################################################
#
# Permission handlings
#
def addPermissionOriginator(self, originator):
if originator not in self.pv_acor:
self.pv_acor.append(originator)
self.setAttribute('pv/acr/acor', self.pv_acor)
def setPermissionOperation(self, operation):
self.pv_acop = operation
self.setAttribute('pv/acr/acop', self.pv_acop)
def addSelfPermissionOriginator(self, originator):
if originator not in self.pvs_acor:
self.pvs_acor.append(originator)
self.setAttribute('pvs/acr/acor', self.pvs_acor)
def setSelfPermissionOperation(self, operation):
self.pvs_acop = operation
self.setAttribute('pvs/acr/acop', self.pvs_acop)
def checkPermission(self, origin, requestedPermission):
if requestedPermission & self.pv_acop == 0: # permission not fitting at all
return False
return 'all' in self.pv_acor or origin in self.pv_acor or requestedPermission == C.permNOTIFY
def checkSelfPermission(self, origin, requestedPermission):
if requestedPermission & self.pvs_acop == 0: # permission not fitting at all
return False
return 'all' in self.pvs_acor or origin in self.pvs_acor
def _storePermissions(self):
self.pv_acop = self.attribute('pv/acr/acop', 0)
self.pv_acor = self.attribute('pv/acr/acor', [])
self.pvs_acop = self.attribute('pvs/acr/acop', 0)
self.pvs_acor = self.attribute('pvs/acr/acor', []) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/resources/ACP.py | ACP.py |
import sys
from Constants import Constants as C
import Utils
from .Resource import *
class FCNT(Resource):
def __init__(self, jsn=None, pi=None, fcntType=None, create=False):
super().__init__(fcntType, jsn, pi, C.tFCNT, create=create)
if self.json is not None:
self.setAttribute('cs', 0, overwrite=False)
# "current" attributes are added when necessary in the validate() method
# Indicates whether this FC has flexContainerInstances.
# Might change during the lifetime of a resource. Used for optimization
self.hasInstances = False
self.ignoreAttributes = [ self._rtype, self._srn, self._node, 'acpi', 'cbs', 'cni', 'cnd', 'cs', 'cr', 'ct', 'et', 'lt', 'mbs', 'mia', 'mni', 'or', 'pi', 'ri', 'rn', 'st', 'ty' ]
# Enable check for allowed sub-resources
def canHaveChild(self, resource):
return super()._canHaveChild(resource,
[ C.tCNT,
C.tFCNT,
C.tSUB
])
def activate(self, originator):
super().activate(originator)
# TODO Error checking above
# register latest and oldest virtual resources
Logging.logDebug('Registering latest and oldest virtual resources for: %s' % self.ri)
if self.hasInstances:
# add latest
r = Utils.resourceFromJSON({}, pi=self.ri, acpi=self.acpi, tpe=C.tFCNT_LA)
CSE.dispatcher.createResource(r)
# add oldest
r = Utils.resourceFromJSON({}, pi=self.ri, acpi=self.acpi, tpe=C.tFCNT_OL)
CSE.dispatcher.createResource(r)
return (True, C.rcOK)
# Checking the presentse of cnd and calculating the size
def validate(self, originator, create=False):
if (res := super().validate(originator, create))[0] == False:
return res
# No CND?
if (cnd := self.cnd) is None or len(cnd) == 0:
return (False, C.rcContentsUnacceptable)
# Calculate contentSize
# This is not at all realistic since this is the in-memory representation
# TODO better implementation needed
cs = 0
for attr in self.json:
if attr in self.ignoreAttributes:
continue
cs += sys.getsizeof(self[attr])
self['cs'] = cs
#
# Handle flexContainerInstances
#
# TODO When cni and cbs is set to 0, then delete mni, mbs, la, ol, and all children
if self.mni is not None or self.mbs is not None:
self.hasInstances = True # Change the internal flag whether this FC has flexContainerInstances
self.addFlexContainerInstance(originator)
fci = self.flexContainerInstances()
# check mni
if self.mni is not None:
mni = self.mni
fcii = len(fci)
i = 0
l = fcii
while fcii > mni and i < l:
# remove oldest
CSE.dispatcher.deleteResource(fci[i])
fcii -= 1
i += 1
changed = True
self['cni'] = fcii
# Add "current" atribute, if it is not there
self.setAttribute('cni', 0, overwrite=False)
# check size
if self.mbs is not None:
fci = self.flexContainerInstances() # get FCIs again (bc may be different now)
mbs = self.mbs
cbs = 0
for f in fci: # Calculate cbs
cbs += f.cs
i = 0
l = len(fci)
print(fci)
while cbs > mbs and i < l:
# remove oldest
cbs -= fci[i].cs
CSE.dispatcher.deleteResource(fci[i])
i += 1
self['cbs'] = cbs
# Add "current" atribute, if it is not there
self.setAttribute('cbs', 0, overwrite=False)
# TODO Remove la, ol, existing FCI when mni etc are not present anymore.
# TODO support maxInstanceAge
# May have been changed, so store the resource
x = CSE.dispatcher.updateResource(self, doUpdateCheck=False) # To avoid recursion, dont do an update check
return (True, C.rcOK)
# Get all flexContainerInstances of a resource and return a sorted (by ct) list
def flexContainerInstances(self):
return sorted(CSE.dispatcher.subResources(self.ri, C.tFCI), key=lambda x: (x.ct))
# Add a new FlexContainerInstance for this flexContainer
def addFlexContainerInstance(self, originator):
Logging.logDebug('Adding flexContainerInstance')
jsn = { 'rn' : '%s_%d' % (self.rn, self.st),
#'cnd' : self.cnd,
'lbl' : self.lbl,
'ct' : self.lt,
'et' : self.et,
'cs' : self.cs,
'or' : originator
}
for attr in self.json:
if attr not in self.ignoreAttributes:
jsn[attr] = self[attr]
fci = Utils.resourceFromJSON(jsn = { self.tpe : jsn },
pi = self.ri,
tpe = C.tFCI) # no ACPI
CSE.dispatcher.createResource(fci) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/resources/FCNT.py | FCNT.py |
from Logging import Logging
from Configuration import Configuration
from Constants import Constants as C
import Utils, CSE
from .Resource import *
class CNT(Resource):
def __init__(self, jsn=None, pi=None, create=False):
super().__init__(C.tsCNT, jsn, pi, C.tCNT, create=create)
if self.json is not None:
self.setAttribute('mni', Configuration.get('cse.cnt.mni'), overwrite=False)
self.setAttribute('mbs', Configuration.get('cse.cnt.mbs'), overwrite=False)
self.setAttribute('cni', 0, overwrite=False)
self.setAttribute('cbs', 0, overwrite=False)
# Enable check for allowed sub-resources
def canHaveChild(self, resource):
return super()._canHaveChild(resource,
[ C.tCNT,
C.tCIN,
C.tFCNT,
C.tSUB
])
def activate(self, originator):
super().activate(originator)
# register latest and oldest virtual resources
Logging.logDebug('Registering latest and oldest virtual resources for: %s' % self.ri)
# add latest
r = Utils.resourceFromJSON({}, pi=self.ri, acpi=self.acpi, tpe=C.tCNT_LA)
CSE.dispatcher.createResource(r)
# add oldest
r = Utils.resourceFromJSON({}, pi=self.ri, acpi=self.acpi, tpe=C.tCNT_OL)
CSE.dispatcher.createResource(r)
# TODO Error checking above
return (True, C.rcOK)
# Get all content instances of a resource and return a sorted (by ct) list
def contentInstances(self):
return sorted(CSE.dispatcher.subResources(self.ri, C.tCIN), key=lambda x: (x.ct))
# Handle the addition of new CIN. Basically, get rid of old ones.
def childAdded(self, childResource, originator):
super().childAdded(childResource, originator)
if childResource.ty == C.tCIN: # Validate if child is CIN
self.validate(originator)
# Handle the removal of a CIN.
def childRemoved(self, childResource, originator):
super().childRemoved(childResource, originator)
if childResource.ty == C.tCIN: # Validate if child was CIN
self.validate(originator)
# Validating the Container. This means recalculating cni, cbs as well as
# removing ContentInstances when the limits are met.
def validate(self, originator, create=False):
if (res := super().validate(originator, create))[0] == False:
return res
# retrieve all children
cs = self.contentInstances()
# Check number of instances
mni = self.mni
cni = len(cs)
i = 0
l = cni
while cni > mni and i < l:
# remove oldest
CSE.dispatcher.deleteResource(cs[i])
cni -= 1
i += 1
self['cni'] = cni
# check size
cs = self.contentInstances() # get CINs again
mbs = self.mbs
cbs = 0
for c in cs: # Calculate cbs
cbs += c['cs']
i = 0
l = len(cs)
while cbs > mbs and i < l:
# remove oldest
cbs -= cs[i]['cs']
CSE.dispatcher.deleteResource(cs[i])
i += 1
self['cbs'] = cbs
# TODO: support maxInstanceAge
# Some CNT resource may have been updated, so store the resource
CSE.dispatcher.updateResource(self, doUpdateCheck=False) # To avoid recursion, dont do an update check
return (True, C.rcOK) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/resources/CNT.py | CNT.py |
from Logging import Logging
from Constants import Constants as C
from Configuration import Configuration
import Utils, CSE
import datetime, random
# Future TODO: Check RO/WO etc for attributes (list of attributes per resource?)
class Resource(object):
_rtype = '__rtype__'
_srn = '__srn__'
_node = '__node__'
def __init__(self, tpe, jsn=None, pi=None, ty=None, create=False, inheritACP=False, readOnly=False, rn=None):
self.tpe = tpe
self.readOnly = readOnly
self.inheritACP = inheritACP
self.json = {}
if jsn is not None:
if tpe in jsn:
self.json = jsn[tpe].copy()
else:
self.json = jsn.copy()
else:
pass
# TODO Exception?
if self.json is not None:
if self.tpe is None: # and self._rtype in self:
self.tpe = self.__rtype__
self.setAttribute('ri', Utils.uniqueRI(self.tpe), overwrite=False)
# override rn if given
if rn is not None:
self.setAttribute('rn', rn, overwrite=True)
# Check uniqueness of ri. otherwise generate a new one. Only when creating
# TODO: could be a BAD REQUEST?
if create:
while Utils.isUniqueRI(ri := self.attribute('ri')) == False:
Logging.logWarn("RI: %s is already assigned. Generating new RI." % ri)
self.setAttribute('ri', Utils.uniqueRI(self.tpe), overwrite=True)
# Create an RN if there is none
self.setAttribute('rn', Utils.uniqueRN(self.tpe), overwrite=False)
# Set some more attributes
ts = Utils.getResourceDate()
self.setAttribute('ct', ts, overwrite=False)
self.setAttribute('lt', ts, overwrite=False)
self.setAttribute('et', Utils.getResourceDate(Configuration.get('cse.expirationDelta')), overwrite=False)
self.setAttribute('st', 0, overwrite=False)
if pi is not None:
self.setAttribute('pi', pi, overwrite=False)
if ty is not None:
self.setAttribute('ty', ty)
#
## Note: ACPI is set in activate()
#
# Remove empty / null attributes from json
self.json = {k: v for (k, v) in self.json.items() if v is not None }
# determine and add the srn
self[self._srn] = Utils.structuredPath(self)
self[self._rtype] = self.tpe
# Default encoding implementation. Overwrite in subclasses
def asJSON(self, embedded=True, update=False, noACP=False):
# remove (from a copy) all internal attributes before printing
jsn = self.json.copy()
for k in [ self._rtype, self._srn, self._node]:
if k in jsn:
del jsn[k]
if noACP:
if 'acpi' in jsn:
del jsn['acpi']
if update:
for k in [ 'ri', 'ty', 'pi', 'ct', 'lt', 'st', 'rn', 'mgd']:
del jsn[k]
return { self.tpe : jsn } if embedded else jsn
# This method is called to to activate a resource. This is not always the
# case, e.g. when a resource object is just used temporarly.
# NO notification on activation/creation!
# Implemented in sub-classes.
def activate(self, originator):
Logging.logDebug('Activating resource: %s' % self.ri)
if not (result := self.validate(originator, create=True))[0]:
return result
# Note: CR is set in RegistrationManager
# Handle ACPI assignments here
if self.inheritACP:
self.delAttribute('acpi')
else:
if self.ty != C.tAE: # Don't handle AE's here. This is done in the RegistrationManager
#adminACPIRI = Configuration.get('cse.adminACPI')
defaultACPIRI = Configuration.get('cse.defaultACPI')
if self.acpi is None:
self.setAttribute('acpi', [ defaultACPIRI ]) # Set default ACPIRIs
#self.setAttribute('acpi', [ adminACPIRI, defaultACPIRI ]) # Set admin and default ACPIRIs
# else:
# if not adminACPIRI in self.acpi:
# self.acpi.append(adminACPIRI)
self.setAttribute(self._rtype, self.tpe, overwrite=False)
return (True, C.rcOK)
# Deactivate an active resource.
# Send notification on deletion
def deactivate(self, originator):
Logging.logDebug('Deactivating and removing sub-resources: %s' % self.ri)
# First check notification because the subscription will be removed
# when the subresources are removed
CSE.notification.checkSubscriptions(self, C.netResourceDelete)
# Remove subresources
rs = CSE.dispatcher.subResources(self.ri)
for r in rs:
self.childRemoved(r, originator)
CSE.dispatcher.deleteResource(r, originator)
# Update this resource with (new) fields.
# Call validate() afterward to react on changes.
def update(self, jsn=None, originator=None):
if jsn is not None:
if self.tpe not in jsn:
Logging.logWarn("Update types don't match")
return (False, C.rcContentsUnacceptable)
j = jsn[self.tpe] # get structure under the resource type specifier
for key in j:
# Leave out some attributes
if key in ['ct', 'lt', 'pi', 'ri', 'rn', 'st', 'ty']:
continue
self[key] = j[key] # copy new value
# - state and lt
if 'st' in self.json: # Update the state
self['st'] += 1
if 'lt' in self.json: # Update the lastModifiedTime
self['lt'] = Utils.getResourceDate()
# Do some extra validations, if necessary
if not (res := self.validate(originator))[0]:
return res
# Check subscriptions
CSE.notification.checkSubscriptions(self, C.netResourceUpdate)
return (True, C.rcOK)
# Child was added to the resource.
def childAdded(self, childResource, originator):
CSE.notification.checkSubscriptions(self, C.netCreateDirectChild, childResource)
# Child was removed from the resource.
def childRemoved(self, childResource, originator):
CSE.notification.checkSubscriptions(self, C.netDeleteDirectChild, childResource)
# MUST be implemented by each class
def canHaveChild(self, resource):
raise NotImplementedError('canHaveChild()')
# Is be called from child class
def _canHaveChild(self, resource, allowedChildResourceTypes):
from .Unknown import Unknown # Unknown imports this class, therefore import only here
return resource['ty'] in allowedChildResourceTypes or isinstance(resource, Unknown)
# Validate a resource. Usually called within activate() or
# update() methods.
def validate(self, originator=None, create=False):
Logging.logDebug('Validating resource: %s' % self.ri)
if (not Utils.isValidID(self.ri) or
not Utils.isValidID(self.pi) or
not Utils.isValidID(self.rn)):
Logging.logDebug('Invalid ID ri: %s, pi: %s, rn: %s)' % (self.ri, self.pi, self.rn))
return (False, C.rcContentsUnacceptable)
return (True, C.rcOK)
#########################################################################
#
# Attribute handling
#
def setAttribute(self, name, value, overwrite=True):
Utils.setXPath(self.json, name, value, overwrite)
def attribute(self, key, default=None):
if '/' in key: # search in path
return Utils.findXPath(self.json, key, default)
if self.hasAttribute(key):
return self.json[key]
return default
def hasAttribute(self, key):
# TODO check sub-elements as well
return key in self.json
def delAttribute(self, key):
if self.hasAttribute(key):
del self.json[key]
def __setitem__(self, key, value):
self.setAttribute(key, value)
def __getitem__(self, key):
return self.attribute(key)
def __delitem__(self, key):
self.delAttribute(key)
def __contains__(self, key):
return self.hasAttribute(key)
def __getattr__(self, name):
return self.attribute(name)
#########################################################################
#
# Misc utilities
#
def __str__(self):
return str(self.asJSON())
def __eq__(self, other):
return self.ri == other.ri
def isModifiedSince(self, other):
return self.lt > other.lt
def retrieveParentResource(self):
(parentResource, _) = CSE.dispatcher.retrieveResource(self.pi)
return parentResource | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/acme/resources/Resource.py | Resource.py |
from AppBase import AppBase
from NodeBase import NodeBase
from Configuration import Configuration
from Constants import Constants as C
import CSE, Utils
import json
class AEBase(AppBase):
def __init__(self, rn, api, originator=None, nodeRN=None, nodeID=None, nodeOriginator=None):
super().__init__(rn, originator)
self.rn = rn
self.originator = originator
self.ae = None
self.aeNodeBase = None
self.appData = None
# Get or create the hosting node
if nodeRN is not None and nodeID is not None:
self.aeNode = NodeBase(nodeRN, nodeID, nodeOriginator)
# Try to get the application data and the origionator
self.originator = self.getAppData('_originator', originator)
# Get or create the AE resource
self.ae = self.retrieveCreate( srn=self.srn,
jsn={ C.tsAE : {
'rn' : self.rn,
'api' : api,
'nl' : self.aeNode.node.ri if self.aeNode.node is not None else None,
'poa' : Configuration.get('http.address')
}
},
ty=C.tAE)
# assign as originator the assigned aei attribute
self.originator = Utils.findXPath(self.ae, "aei")
# Store updated application data
self.setAppData('_originator', self.originator)
# assign as acpi to use the first assigned acpi
self.acpi = Utils.findXPath(self.ae, "acpi")[0]
def shutdown(self):
super().shutdown()
def clean(self):
self.shutdown()
self.removeAppData()
#########################################################################
#
# Persistent Application Data
#
# retrieve application data. If not found, initialize and store a record
def retrieveAppData(self):
if (result := CSE.storage.getAppData(self.rn)) is None:
self.appData = { 'id': self.rn,
'_originator': self.originator
}
self.storeAppData()
else:
self.appData = result
return self.appData
def storeAppData(self):
CSE.storage.updateAppData(self.appData)
def removeAppData(self):
CSE.storage.removeAppData()
def setAppData(self, key, value):
self.appData[key] = value
self.storeAppData()
def getAppData(self, key, default=None):
if self.appData is None:
self.retrieveAppData()
return self.appData[key] if key in self.appData else default | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/apps/AEBase.py | AEBase.py |
from AEBase import *
from Logging import Logging
from Configuration import Configuration
import Statistics
import threading, time
class AEStatistics(AEBase):
def __init__(self):
super().__init__( rn=Configuration.get('app.statistics.aeRN'),
api=Configuration.get('app.statistics.aeAPI'),
originator=Configuration.get('app.statistics.originator'),
nodeRN=Configuration.get('app.csenode.nodeRN'), # From CSE-Node
nodeID=Configuration.get('app.csenode.nodeID'), # From CSE-Node
nodeOriginator=Configuration.get('app.csenode.originator') # From CSE-Node
)
self.fcsrn = self.srn + '/' + Configuration.get('app.statistics.fcntRN')
self.fcntType = Configuration.get('app.statistics.fcntType')
# Create structure beneath the AE resource
self.fc = self.retrieveCreate( srn=self.fcsrn,
jsn={ self.fcntType : {
'rn' : Configuration.get('app.statistics.fcntRN'),
'cnd' : Configuration.get('app.statistics.fcntCND'),
'acpi': [ self.acpi ], # assignde by CSE,
'mni' : 10,
Statistics.deletedResources : 0,
Statistics.createdresources : 0,
Statistics.httpRetrieves : 0,
Statistics.httpCreates : 0,
Statistics.httpUpdates : 0,
Statistics.httpDeletes : 0,
Statistics.logErrors : 0,
Statistics.logWarnings : 0,
Statistics.cseStartUpTime : '',
Statistics.cseUpTime : '',
Statistics.resourceCount: 0
}
},
ty=C.tFCNT)
# Update the statistic resource from time to time
self.startWorker(Configuration.get('app.statistics.intervall'), self.statisticsWorker)
Logging.log('AEStatistics AE registered')
def shutdown(self):
super().shutdown()
Logging.log('AEStatistics AE shut down')
#########################################################################
#
# Update statistics in a worker thread
#
def statisticsWorker(self):
Logging.logDebug('Updating statistics')
# Update statistics
stats = CSE.statistics.getStats()
self.updateResource(srn=self.fcsrn, jsn={ self.fcntType : stats })
return True | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/apps/AEStatistics.py | AEStatistics.py |
from AppBase import AppBase
from Configuration import Configuration
from Constants import Constants as C
import CSE, Utils
from resources import BAT
# TODO support further specializations
class NodeBase(AppBase):
def __init__(self, rn, nodeID, originator):
super().__init__(rn, originator)
self.batRn = self.srn + '/battery'
self.memRn = self.srn + '/memory'
self.dviRn = self.srn + '/deviceinfo'
self.node = None
self.battery = None
self.memory = None
self.deviceInfo = None
# First check whether node exists and create it if necessary
self.node = self.retrieveCreate(srn=self.srn,
jsn={ C.tsNOD : {
'rn' : self.rn,
'ni' : nodeID
}
},
ty=C.tNOD)
def shutdown(self):
super().shutdown()
#########################################################################
#
# MgmtObj: Battery
#
def createBattery(self):
self.battery = self.retrieveCreate( srn=self.batRn,
jsn={ 'm2m:bat' : {
'mgd' : C.mgdBAT,
'dc' : 'battery',
'rn' : 'battery',
'btl': 0,
'bts': BAT.btsUNKNOWN
}
}
)
def updateBattery(self):
if self.battery is not None:
(n, rc) = self.updateResource(ri=self.battery.ri, jsn=self.battery.asJSON(update=True, noACP=True))
#########################################################################
#
# MgmtObj: Memory
#
def createMemory(self):
self.memory = self.retrieveCreate( srn=self.memRn,
jsn={ 'm2m:mem' : {
'mgd' : C.mgdMEM,
'dc' : 'memory',
'rn' : 'memory',
'mma': 0,
'mmt': 0
}
}
)
def updateMemory(self):
if self.memory is not None:
(n, rc) = self.updateResource(ri=self.memory.ri, jsn=self.memory.asJSON(update=True, noACP=True))
#########################################################################
#
# MgmtObj: DeviceInfo
#
def createDeviceInfo(self):
self.deviceInfo = self.retrieveCreate( srn=self.dviRn,
jsn={ 'm2m:dvi' : {
'mgd' : C.mgdDVI,
'dc' : 'deviceInfo',
'rn' : 'deviceinfo',
'dlb': [],
'dvnm': '',
'osv': '',
'syst': Utils.getResourceDate()
}
}
)
def updateDeviceInfo(self):
if self.memory is not None:
(n, rc) = self.updateResource(ri=self.deviceInfo.ri, jsn=self.deviceInfo.asJSON(update=True, noACP=True)) | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/apps/NodeBase.py | NodeBase.py |
from Configuration import Configuration
from Logging import Logging
from Constants import Constants as C
import CSE, Utils
from helpers import BackgroundWorker
import json, os
class AppBase(object):
def __init__(self, rn, originator):
self.rn = rn
self.originator = originator
self.cseri = Configuration.get('cse.ri')
self.csern = Configuration.get('cse.rn')
self.srn = self.csern + '/' + self.rn
self.url = Configuration.get('http.address') + Configuration.get('http.root')
self.worker = None
def shutdown(self):
self.stopWorker()
#########################################################################
#
# Requests
#
def retrieveResource(self, ri=None, srn=None):
return CSE.httpServer.sendRetrieveRequest(self._id(ri, srn), self.originator)
def createResource(self, ri=None, srn=None, ty=None, jsn=None):
return CSE.httpServer.sendCreateRequest(self._id(ri, srn), self.originator, ty, json.dumps(jsn))
def updateResource(self, ri=None, srn=None, jsn=None):
return CSE.httpServer.sendUpdateRequest(self._id(ri, srn), self.originator, json.dumps(jsn))
def deleteResource(self, ri=None, srn=None):
return CSE.httpServer.sendDeleteRequest(self._id(ri, srn), self.originator)
def _id(self, ri, srn):
if ri is not None:
return self.url + self.cseri + '/' + ri
elif srn is not None:
return self.url + srn
return None
def retrieveCreate(self, srn=None, jsn=None, ty=C.tMGMTOBJ):
# First check whether node exists and create it if necessary
if (result := self.retrieveResource(srn=srn))[1] != C.rcOK:
# No, so create mgmtObj specialization
srn = os.path.split(srn)[0] if srn.count('/') >= 0 else ''
(n, rc) = self.createResource(srn=srn, ty=ty, jsn=jsn)
if n is not None:
return Utils.resourceFromJSON(n)
else: # just retrieve
return Utils.resourceFromJSON(result[0])
return None
#########################################################################
def startWorker(self, updateInterval, worker):
self.stopWorker()
self.worker = BackgroundWorker.BackgroundWorker(updateInterval, worker)
self.worker.start()
def stopWorker(self):
if self.worker is not None:
self.worker.stop()
self.worker = None | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/apps/AppBase.py | AppBase.py |
from NodeBase import *
from Logging import Logging
from Configuration import Configuration
from resources import BAT
import psutil, socket, platform, re, uuid
class CSENode(NodeBase):
def __init__(self):
super().__init__(rn=Configuration.get('app.csenode.nodeRN'),
nodeID=Configuration.get('app.csenode.nodeID'),
originator=Configuration.get('app.csenode.originator'))
if self.node is None:
Logging.logErr('CSENode: no node')
return
self.lastBTL = -1
self.lastMMA = -1
self.batteryLowLevel = 20
self.batteryChargedLevel = 100
self.updateCSEBase()
self.createBattery()
self.createMemory()
self.createDeviceInfo()
# Add a thread to read and update the content from time to time
self.startWorker(Configuration.get('app.csenode.intervall'), self.nodeWorker)
Logging.log('CSENode registered')
def shutdown(self):
super().shutdown()
Logging.log('CSENode shut down')
# Set this node as the hosting node for the CSE Base
def updateCSEBase(self):
if (result := self.retrieveResource(ri=self.cseri))[1] != C.rcOK:
Logging.logErr('CSENode: cannot retrieve CSEBase')
return
jsn = { 'm2m:cb' : {
'nl' : self.node.ri
}
}
(n, rc) = self.updateResource(ri=self.cseri, jsn=jsn)
#########################################################################
#
# Node capabilities monitoring handling
#
def nodeWorker(self):
Logging.logDebug('Updating node data')
try:
self._checkBattery()
self._checkMemory()
self._checkDeviceInfo()
except Exception as e:
Logging.logErr('Exception: %s' % e)
return False
return True
#########################################################################
#
# Update Management Objects of the node
#
def _checkBattery(self):
if self.battery is not None:
if (sensorBat := psutil.sensors_battery()) is not None:
(percent, _, plugged) = sensorBat
if percent == self.lastBTL:
return
self.lastBTL = percent
self.battery['btl'] = percent
self.battery['bts'] = BAT.btsNORMAL
if percent <= self.batteryLowLevel:
self.battery['bts'] = BAT.btsLOW_BATTERY
if plugged is not None and plugged:
self.battery['bts'] = BAT.btsCHARGING_COMPLETE if percent >= self.batteryChargedLevel else BAT.btsCHARGING
else:
self.battery['bts'] = BAT.btsNOT_INSTALLED
self.updateBattery()
def _checkMemory(self):
if self.memory is not None:
mmt = psutil.virtual_memory().total
mma = psutil.virtual_memory().available
if mma != self.lastMMA:
self.lastMMA = mma
self.memory['mmt'] = mmt
self.memory['mma'] = mma
self.updateMemory()
def _checkDeviceInfo(self):
if self.deviceInfo is not None:
self.deviceInfo['dvnm'] = socket.gethostname()
self.deviceInfo['osv'] = '%s %s %s' % (platform.system(), platform.release(), platform.machine())
self.deviceInfo['syst'] = Utils.getResourceDate()
self.deviceInfo['dlb'] = [ '%s:%s' % ('IP', socket.gethostbyname(socket.gethostname())),
'%s:%s' % ('MAC', ':'.join(re.findall('..', '%012x' % uuid.getnode())))
]
self.updateDeviceInfo() | ACME-oneM2M-CSE | /ACME%20oneM2M%20CSE-0.3.0.tar.gz/ACME oneM2M CSE-0.3.0/apps/CSENode.py | CSENode.py |
| Metrics | Master | Develop |
|:-------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|
| CI status | [](https://gitlab.com/redmic-project/device/oag-buoy/acmplus/commits/master) | [](https://gitlab.com/redmic-project/device/oag-buoy/acmplus/commits/dev) |
| Test coverage | [](https://gitlab.com/redmic-project/device/oag-buoy/acmplus/commits/master) | [](https://gitlab.com/redmic-project/device/oag-buoy/acmplus/commits/dev) |
# Falmouth Scientific, Inc. ACM-Plus

* Lee datos desde el correntímetro ACM-Plus de la empresa Falmouth Scientific, Inc.
* Guarda los datos en una base de datos PostgreSQL
* Publica los datos utlizando el protocolo MQTT
## Detección del dispositivo
Para conectar el correntímetro al equipo se han utilizado los conversores FTDI. De esta forma se facilita la tarea
de identificación del dispositivo utilizando reglas UDEV. En este caso hay 2 reglas ya que hay 2 conectores, uno
para pruebas/ reemplazo y el que está en funcionamiento.
```
ACTION=="add", SUBSYSTEM=="tty", ATTRS{idProduct}=="6001", ATTRS{idVendor}=="0403", ATTRS{serial}=="FT0I1IP5", SYMLINK+="current_meter_acmplus"
ACTION=="add", SUBSYSTEM=="tty", ATTRS{idProduct}=="6001", ATTRS{idVendor}=="0403", ATTRS{serial}=="FT0I104U", SYMLINK+="current_meter_acmplus"
```
## Configuración
La configuración del servicio es necesario pasarla a través de un fichero YAML, por defecto debe estar en /etc/buoy/acmplus y
se debe llamar device.yaml.
```buildoutcfg
service:
path_pidfile: /var/run/buoy/
start_timeout: 1
database:
database: database
user: username
password: password
host: localhost
serial:
port: /dev/current_meter_acmplus
baudrate: 115200
stopbits: 1
parity: N
bytesize: 8
timeout: 0
mqtt:
broker_url: iot.eclipse.org
client_id: client_id
topic_data: topic_name
username: username
password: changeme
```
También debe de haber otro fichero logging.yaml donde se define el nivel de log y donde se vuelcan.
```buildoutcfg
version: 1
disable_existing_loggers: False
formatters:
simple:
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
detail:
format: '%(asctime)s - %(levelname)s - File: %(filename)s - %(funcName)s() - Line: %(lineno)d - %(message)s'
handlers:
console:
class: logging.StreamHandler
level: INFO
formatter: simple
stream: ext://sys.stdout
root:
level: INFO
handlers: [console]
```
## Arranque del servicio
Utilizando las reglas de UDEV, se puede detectar cuando el dispositivo está conectado y entonces iniciar
el servicio de recogida, guardado y envío de datos de forma automática. Para ello, se hay de crear un servicio
en systemd que se arranque con la presencia del dispositivo.
```buildoutcfg
[Unit]
Description=Current Meter - ACMPlus
After=multi-user.target dev-current_meter_acmplus.device
BindsTo=dev-current_meter_acmplus.device
[Service]
Type=idle
ExecStart=/usr/local/bin/current-meter-acmplus
PIDFile=/var/run/buoy/acmplus.pid
ExecStop=/usr/bin/pkill -15 /var/run/buoy/acmplus.pid
TimeoutStopSec=5
Restart=on-failure
[Install]
WantedBy=dev-current_meter_acmplus.device
```
El fichero es necesario copiarlo en /etc/systemd/system/, recargar y activar el servicio.
```
sudo cp current-meter-acmplus.service /etc/systemd/system/
sudo systemctl daemon-reload
sudo systemctl enable current-meter-acmplus.service
```
Y por último iniciar el servicio.
```
sudo systemctl start current-meter-acmplus.service
``` | ACMPlus | /ACMPlus-0.0.3.tar.gz/ACMPlus-0.0.3/README.md | README.md |
import math
from buoy.base.data.item import BaseItem
class ACMPlusItem(BaseItem):
def __init__(self, **kwargs):
self.vx = kwargs.pop('vx', None)
self.vy = kwargs.pop('vy', None)
self.speed = kwargs.pop('speed', None)
self.direction = kwargs.pop('direction', None)
self.water_temp = kwargs.pop('water_temp', None)
super(ACMPlusItem, self).__init__(**kwargs)
@property
def vx(self):
"""
:return: The X component of the current velocity in cm/sec relative to the direction indicator arrow on the
velocity head of instrument
:rtype: Decimal
"""
return self._vx
@vx.setter
def vx(self, value):
self._vx = self._convert_string_to_decimal(value)
@property
def vy(self):
"""
:return: The Y component of the current velocity in cm/sec relative to the direction indicator arrow on the
velocity head of instrument
:rtype: Decimal
"""
return self._vy
@vy.setter
def vy(self, value):
self._vy = self._convert_string_to_decimal(value)
@property
def speed(self):
if not self._speed and self.is_fulled:
self.speed = math.sqrt(math.pow(self._vx, 2) + math.pow(self._vy, 2))
return self._speed
@speed.setter
def speed(self, value):
self._speed = self._convert_string_to_decimal(value)
@property
def direction(self):
if not self._direction and self.is_fulled:
dir_current = math.degrees(math.atan2(self.vy, self.vx))
if (self.vy >= 0) and (self.vx >= 0): # Cuadrante entre 0º y 90º
dir_current = 90 - dir_current
elif (self.vy <= 0) and (self.vx >= 0): # Cuadrante entre 90º y 180º
dir_current = math.fabs(dir_current) + 90
elif (self.vy <= 0) and (self.vx <= 0): # Cuadrante entre 180º y 270º
dir_current = math.fabs(dir_current) + 90
elif (self.vy >= 0) and (self.vx <= 0): # Cuadrante entre 270º y 360º
dir_current = 360 - (dir_current - 90)
self.direction = dir_current
return self._direction
@direction.setter
def direction(self, value):
self._direction = self._convert_string_to_decimal(value)
@property
def water_temp(self):
"""
:return: The water temperature in °C
:rtype: Decimal
"""
return self._water_temp
@water_temp.setter
def water_temp(self, value):
self._water_temp = self._convert_string_to_decimal(value)
def is_fulled(self):
return self._vx and self._vy
def __str__(self, pretty=False):
return ("Uuid: {uuid}\n"
"Date: {date}\n"
"Vx: {vx} cm/s\n"
"Vy: {vy} cm/s\n"
"Speed: {speed} cm/s\n"
"Direction: {direction} %\n"
"Water temperature: {water_temp} ºC\n").format(**dict(self)) | ACMPlus | /ACMPlus-0.0.3.tar.gz/ACMPlus-0.0.3/acmplus/item.py | item.py |
import logging.config
import re
from datetime import datetime, timezone
from buoy.base.device.device import Device
from buoy.base.device.threads.reader import DeviceReader
from buoy.base.service.daemon import Daemon
from buoy.base.utils.config import *
from buoy.base.database import DeviceDB
from acmplus.item import ACMPlusItem
from buoy.base.utils.argsparse import parse_args
logger = logging.getLogger(__name__)
DEVICE_NAME = "acmplus"
DAEMON_NAME = "acmplus"
class ACMPlusReader(DeviceReader):
def __init__(self, **kwargs):
super(ACMPlusReader, self).__init__(**kwargs)
self.pattern = ("\s*(?P<vy>-?\d{1,}.\d{1,}),\s{1,}(?P<vx>-?\d{1,}.\d{1,}),\s{1,}(?P<time>\d{2}:\d{2}:\d{2})"
",\s{1,}(?P<date>\d{2}-\d{2}-\d{4}),\s{1,}(?P<waterTemperature>-?\d{1,}.\d{1,}).*")
def parser(self, data):
result = re.match(self.pattern, data)
if result:
measurement = ACMPlusItem(
date=datetime.now(tz=timezone.utc),
vx=result.group("vx"),
vy=result.group("vy"),
water_temp=result.group("waterTemperature")
)
return measurement
class ACMPlus(Device):
def __init__(self, *args, **kwargs):
device_name = kwargs.pop('device_name', 'ACMPlus')
super(ACMPlus, self).__init__(device_name=device_name, cls_reader=ACMPlusReader, *args, **kwargs)
class ACMPlusDaemon(ACMPlus, Daemon):
def __init__(self, name, **kwargs):
db_conf = kwargs.pop('database')
service_conf = kwargs.pop('service')
db = DeviceDB(db_config=db_conf, db_tablename=name, cls_item=ACMPlusItem)
Daemon.__init__(self, daemon_name=DAEMON_NAME, daemon_config=service_conf)
ACMPlus.__init__(self, db=db, **kwargs)
def before_stop(self):
self.disconnect()
def run(config_buoy: str, config_log_file: str):
logging.config.dictConfig(load_config_logger(path_config=config_log_file))
buoy_config = load_config(path_config=config_buoy)
daemon = ACMPlusDaemon(name=DEVICE_NAME, **buoy_config)
daemon.start()
def main():
args = parse_args(path_config='/etc/buoy/acmplus')
run(config_buoy=args.config_file, config_log_file=args.config_log_file)
if __name__ == "__main__":
main() | ACMPlus | /ACMPlus-0.0.3.tar.gz/ACMPlus-0.0.3/acmplus/acmplus.py | acmplus.py |
import logging
import time
import os
import sys
import inspect
from html import escape
class ACNLogger:
def check(self, message):
if self.ENV == "DEV":
return message
else:
newstr = message.replace("./", "")
newstr = newstr.replace("..", "")
return escape(newstr)
def debug(self, message):
self.logger.debug("["+os.path.basename(inspect.stack()[1].filename)+"] ["+self.ENV+"] ["+self.oid+"] ["+self.session+"] ["+self.correlationId+"] " + self.check(message))
def info(self, message):
self.logger.info("["+os.path.basename(inspect.stack()[1].filename)+"] ["+self.ENV+"] ["+self.oid+"] ["+self.session+"] ["+self.correlationId+"] " + self.check(message))
def warning(self, message):
self.logger.warning("["+os.path.basename(inspect.stack()[1].filename)+"] ["+self.ENV+"] ["+self.oid+"] ["+self.session+"] ["+self.correlationId+"] " + self.check(message))
def error(self, e):
self.logger.error("["+os.path.basename(inspect.stack()[1].filename)+"] ["+self.ENV+"] ["+self.oid+"] ["+self.session+"] ["+self.correlationId+"] " + self.check(str(e)))
def critical(self, e):
self.logger.critical("["+os.path.basename(inspect.stack()[1].filename)+"] ["+self.ENV+"] ["+self.oid+"] ["+self.session+"] ["+self.correlationId+"] " + self.check(str(e)))
def exception(self, e):
if self.ENV != "PRO":
self.logger.exception("["+os.path.basename(inspect.stack()[1].filename)+"] ["+self.ENV+"] ["+self.oid+"] ["+self.session+"] ["+self.correlationId+"] " + self.check(str(e)))
else:
self.error(e)
def setSession(self, session):
self.session = session
def setCorrelationId(self, correlationId):
self.correlationId = correlationId
def setOId(self, oid):
self.oid = oid
def __init__(self,name,file=None,console_level="debug",logfile_level="debug"):
#file = file or name+".log"
_logLevelMap = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
"critical":logging.CRITICAL
}
acn_logger=logging.getLogger(name) # Creating the new logger
acn_logger.setLevel(logging.DEBUG) # Setting new logger level to INFO or above
acn_logger.propagate = False
console_handler=logging.StreamHandler()
console_handler.setLevel(_logLevelMap[console_level])
#file_handler=logging.FileHandler(file)
#file_handler.setLevel(_logLevelMap[logfile_level])
#acn_logger.addHandler(file_handler) #Adding file handler to the new logger
acn_logger.addHandler(console_handler)
formatter=logging.Formatter('[%(asctime)s] [%(levelname)s] [%(process)d] %(message)s') #Creating a formatter
#file_handler.setFormatter(formatter) #Setting handler format
console_handler.setFormatter(formatter)
self.session = "UNDEFINED"
self.correlationId = "UNDEFINED"
self.oid = "UNDEFINED"
self.logger=acn_logger
try:
self.ENV = os.environ["ENV"]
except:
self.ENV = "ENV NOT SET"
self.warning("Environment variable ENV not set")
self.info("STARTING MICROSERVICE") | ACNLogger2 | /ACNLogger2-1.1.0-py3-none-any.whl/ACNLogger/__init__.py | __init__.py |
import logging
import time
import re
from logging.handlers import TimedRotatingFileHandler
import uuid
import os
class ACNLogger:
def debug(self, session, message):
try:
self.logger.debug(("["+session+"] ["+self._service_name+"] [DEBUG] "+message).encode('utf8'))
except:
try:
self.logger.debug("["+session+"] ["+self._service_name+"] [DEBUG] "+message)
except:
self.logger.debug(("["+session+"] ["+self._service_name+"] [DEBUG] "+message).decode('utf8'))
def info(self, session, message):
try:
self.logger.info(("["+session+"] ["+self._service_name+"] [INFO] "+message).encode('utf8'))
except:
try:
self.logger.info("["+session+"] ["+self._service_name+"] [INFO] "+message)
except:
self.logger.info(("["+session+"] ["+self._service_name+"] [INFO] "+message).decode('utf8'))
def warning(self, session, message):
try:
self.logger.warning(("["+session+"] ["+self._service_name+"] [WARNING] "+message).encode('utf8'))
except:
try:
self.logger.warning("["+session+"] ["+self._service_name+"] [WARNING] "+message)
except:
self.logger.warning(("["+session+"] ["+self._service_name+"] [WARNING] "+message).decode('utf8'))
def error(self, session, e):
try:
self.logger.error(("["+session+"] ["+self._service_name+"] [ERROR] "+str(e.__class__.__name__)+" "+str(e)).encode('utf8'))
except:
try:
self.logger.error("["+session+"] ["+self._service_name+"] [ERROR] "+str(e.__class__.__name__)+" "+str(e))
except:
self.logger.error(("["+session+"] ["+self._service_name+"] [ERROR] "+str(e.__class__.__name__)+" "+str(e)).decode('utf8'))
def critical(self, session, e):
try:
self.logger.critical(("["+session+"] ["+self._service_name+"] [CRITICAL] "+str(e.__class__.__name__)+" "+str(e)).encode('utf8'))
except:
try:
self.logger.critical("["+session+"] ["+self._service_name+"] [CRITICAL] "+str(e.__class__.__name__)+" "+str(e))
except:
self.logger.critical(("["+session+"] ["+self._service_name+"] [CRITICAL] "+str(e.__class__.__name__)+" "+str(e)).decode('utf8'))
def exception(self, session, e):
try:
self.logger.error(("["+session+"] ["+self._service_name+"] [ERROR] "+str(e.__class__.__name__)+" "+str(e)).encode('utf8'))
except:
try:
self.logger.error("["+session+"] ["+self._service_name+"] [ERROR] "+str(e.__class__.__name__)+" "+str(e))
except:
self.logger.error(("["+session+"] ["+self._service_name+"] [ERROR] "+str(e.__class__.__name__)+" "+str(e)).decode('utf8'))
def __init__(self,name,file,console_level="debug",logfile_level="debug"):
offset_s = time.timezone if (time.localtime().tm_isdst == 0) else time.altzone###These 3 lines
offset = offset_s / 60 / 60 * -1 #are to get
timezone = ["", "+"][offset >= 0]+str(offset).zfill(2)+"00" #the timezone
acn_logger=logging.getLogger(name) # Creating the new logger
acn_logger.setLevel(logging.DEBUG) # Setting new logger level to INFO or above
acn_logger.propagate=False
console_handler=logging.StreamHandler()
if console_level == "info":
console_handler.setLevel(logging.INFO)
else:
console_handler.setLevel(logging.DEBUG)
file_handler=TimedRotatingFileHandler(file, when="midnight", interval=1)
file_handler.suffix = "%Y%m%d"
file_handler.extMatch = re.compile(r"^\d{8}$")
if logfile_level == "info":
file_handler.setLevel(logging.INFO)
else:
file_handler.setLevel(logging.DEBUG)
acn_logger.addHandler(file_handler) #Adding file handler to the new logger
acn_logger.addHandler(console_handler)
formatter=logging.Formatter('[%(asctime)s'+timezone+'] %(message)s') #Creating a formatter
file_handler.setFormatter(formatter) #Setting handler format
console_handler.setFormatter(formatter)
self.logger=acn_logger
self._service_name=name
self.info("UNDEFINED","STARTING MICROSERVICE")
def get_unique_logger(name, path):
unique_id = str(uuid.uuid4())
logger_name = name + "_" + unique_id
logger_path = os.path.join(path, logger_name + ".log")
return ACNLogger(name=logger_name, file=logger_path) | ACNLogger3 | /ACNLogger3-1.1.0.tar.gz/ACNLogger3-1.1.0/ACNLogger/__init__.py | __init__.py |
=====
Pants
=====
A Python3 implementation of the Ant Colony Optimization Meta-Heuristic
--------
Overview
--------
**Pants** provides you with the ability to quickly determine how to
visit a collection of interconnected nodes such that the work done is
minimized. Nodes can be any arbitrary collection of data while the edges
represent the amount of "work" required to travel between two nodes.
Thus, **Pants** is a tool for solving traveling salesman problems.
The world is built from a list of nodes and a function responsible for
returning the length of the edge between any two given nodes. The length
function need not return actual length. Instead, "length" refers to that
the amount of "work" involved in moving from the first node to the second
node - whatever that "work" may be. For a silly, random example, it could
even be the number of dishes one must wash before moving to the next
station at a least dish-washing dish washer competition.
Solutions are found through an iterative process. In each iteration,
several ants are allowed to find a solution that "visits" every node of
the world. The amount of pheromone on each edge is updated according to
the length of the solutions in which it was used. The ant that traveled the
least distance is considered to be the local best solution. If the local
solution has a shorter distance than the best from any previous
iteration, it then becomes the global best solution. The elite ant(s)
then deposit their pheromone along the path of the global best solution
to strengthen it further, and the process repeats.
You can read more about `Ant Colony Optimization on
Wikipedia <http://en.wikipedia.org/wiki/Ant_colony_optimization_algorithms>`_.
------------
Installation
------------
Installation via ``pip``
.. code-block:: console
$ pip3 install ACO-Pants
-----
Usage
-----
Using **Pants** is simple. The example here uses Euclidean distance
between 2D nodes with ``(x, y)`` coordinates, but there are no real
requirements for node data of any sort.
1) Import **Pants** (along with any other packages you'll need).
.. code-block:: python
import pants
import math
import random
2) Create your data points; these become the nodes. Here we create some
random 2D points. The only requirement for a node is that it is
distinguishable from all of the other nodes.
.. code-block:: python
nodes = []
for _ in range(20):
x = random.uniform(-10, 10)
y = random.uniform(-10, 10)
nodes.append((x, y))
3) Define your length function. This function must accept two nodes and
return the amount of "work" between them. In this case, Euclidean
distance works well.
.. code-block:: python
def euclidean(a, b):
return math.sqrt(pow(a[1] - b[1], 2) + pow(a[0] - b[0], 2))
4) Create the ``World`` from the nodes and the length function.
.. code-block:: python
world = pants.World(nodes, euclidean)
5) Create the ``Solver``.
.. code-block:: python
solver = pants.Solver()
6) Solve the ``World`` with the ``Solver``. Two methods are provided for
finding solutions: ``solve()`` and ``solutions()``. The former
returns the best solution found, whereas the latter returns each
solution found if it is the best thus far.
.. code-block:: python
solution = solver.solve(world)
# or
solutions = solver.solutions(world)
7) Inspect the solution(s).
.. code-block:: python
print(solution.distance)
print(solution.tour) # Nodes visited in order
print(solution.path) # Edges taken in order
# or
best = float("inf")
for solution in solutions:
assert solution.distance < best
best = solution.distance
Run the Demo
------------
Included is a 33 "city" demo script that can be run from the command line.
.. code-block:: console
user@host:~$ pants-demo -h
usage: pants-demo [-h] [-V] [-a A] [-b B] [-l L] [-p P] [-e E] [-q Q] [-t T]
[-c N] [-d D]
Script th;at demos the ACO-Pants package.
optional arguments:
-h, --help show this help message and exit
-V, --version show program's version number and exit
-a A, --alpha A relative importance placed on pheromones; default=1
-b B, --beta B relative importance placed on distances; default=3
-l L, --limit L number of iterations to perform; default=100
-p P, --rho P ratio of evaporated pheromone (0 <= P <= 1); default=0.8
-e E, --elite E ratio of elite ant's pheromone; default=0.5
-q Q, --Q Q total pheromone capacity of each ant (Q > 0); default=1
-t T, --t0 T initial amount of pheromone on every edge (T > 0);
default=0.01
-c N, --count N number of ants used in each iteration (N > 0); default=10
-d D, --dataset D specify a particular set of demo data; default=33
For best results:
* 0.5 <= A <= 1
* 1.0 <= B <= 5
* A < B
* L >= 2000
* N > 1
For more information, please visit https://github.com/rhgrant10/Pants.
user@host:~$ pants-demo
Solver settings:
limit=100
rho=0.8, Q=1
alpha=1, beta=3
elite=0.5
Time Elapsed Distance
--------------------------------------------------
0:00:00.017490 0.7981182992833705
0:00:00.034784 0.738147755518648
0:00:00.069041 0.694362159048816
0:00:00.276027 0.6818083968312925
0:00:00.379039 0.6669398280432167
0:00:00.465924 0.6463548571712562
0:00:00.585685 0.6416519698864324
0:00:01.563389 0.6349308484274142
--------------------------------------------------
Best solution:
0 = (34.02115, -84.267249)
9 = (34.048194, -84.262126)
6 = (34.044915, -84.255772)
22 = (34.061518, -84.243566)
23 = (34.062461, -84.240155)
18 = (34.060461, -84.237402)
17 = (34.060164, -84.242514)
12 = (34.04951, -84.226327)
11 = (34.048679, -84.224917)
8 = (34.046006, -84.225258)
7 = (34.045483, -84.221723)
13 = (34.051529, -84.218865)
14 = (34.055487, -84.217882)
16 = (34.059412, -84.216757)
25 = (34.066471, -84.217717)
24 = (34.064489, -84.22506)
20 = (34.063814, -84.225499)
10 = (34.048312, -84.208885)
15 = (34.056326, -84.20058)
5 = (34.024302, -84.16382)
32 = (34.118162, -84.163304)
31 = (34.116852, -84.163971)
30 = (34.109645, -84.177031)
29 = (34.10584, -84.21667)
28 = (34.071628, -84.265784)
27 = (34.068647, -84.283569)
26 = (34.068455, -84.283782)
19 = (34.061281, -84.334798)
21 = (34.061468, -84.33483)
2 = (34.022585, -84.36215)
3 = (34.022718, -84.361903)
4 = (34.023101, -84.36298)
1 = (34.021342, -84.363437)
Solution length: 0.6349308484274142
Found at 0:00:01.563389 out of 0:00:01.698616 seconds.
user@host:~$
Known Bugs
----------
None of which I am currently aware. Please let me know if you find
otherwise.
Troubleshooting
---------------
Credits
-------
- Robert Grant [email protected]
License
-------
GPL
| ACO-Pants | /ACO-Pants-0.5.2.tar.gz/ACO-Pants-0.5.2/README.rst | README.rst |
import random
from copy import copy
from .world import World
from .ant import Ant
class Solver:
"""This class contains the functionality for finding one or more solutions
for a given :class:`World`.
:param float alpha: relative importance of pheromone (default=1)
:param float beta: relative importance of distance (default=3)
:param float rho: percent evaporation of pheromone (0..1, default=0.8)
:param float q: total pheromone deposited by each :class:`Ant` after
each iteration is complete (>0, default=1)
:param float t0: initial pheromone level along each :class:`Edge` of the
:class:`World` (>0, default=0.01)
:param int limit: number of iterations to perform (default=100)
:param float ant_count: how many :class:`Ant`\s will be used
(default=10)
:param float elite: multiplier of the pheromone deposited by the elite
:class:`Ant` (default=0.5)
"""
def __init__(self, **kwargs):
self.alpha = kwargs.get('alpha', 1)
self.beta = kwargs.get('beta', 3)
self.rho = kwargs.get('rho', 0.8)
self.q = kwargs.get('Q', 1)
self.t0 = kwargs.get('t0', .01)
self.limit = kwargs.get('limit', 100)
self.ant_count = kwargs.get('ant_count', 10)
self.elite = kwargs.get('elite', .5)
def create_colony(self, world):
"""Create a set of :class:`Ant`\s and initialize them to the given
*world*.
If the *ant_count* is less than `1`, :func:`round_robin_ants` are
used and the number of :class:`Ant`\s will be equal to the number of
nodes. Otherwise, :func:`random_ants` are created instead, and the
number of :class:`Ant`\s will be equal to the *ant_count*.
:param World world: the world from which the :class:`Ant`\s will be
given starting nodes.
:return: list of :class:`Ant`\s
:rtype: list
"""
if self.ant_count < 1:
return self.round_robin_ants(world, len(world.nodes))
return self.random_ants(world, self.ant_count)
def reset_colony(self, colony):
"""Reset the *colony* of :class:`Ant`\s such that each :class:`Ant` is
ready to find a new solution.
Essentially, this method re-initializes all :class:`Ant`\s in the
colony to the :class:`World` that they were initialized to last.
Internally, this method is called after each iteration of the
:class:`Solver`.
:param list colony: the :class:`Ant`\s to reset
"""
for ant in colony:
ant.initialize(ant.world)
def aco(self, colony):
"""Return the best solution by performing the ACO meta-heuristic.
This method lets every :class:`Ant` in the colony find a solution,
updates the pheromone levels according to the solutions found, and
returns the `Ant` with the best solution.
This method is not meant to be called directly. Instead, call either
:func:`solve` or :func:`solutions`.
:param list colony: the `Ant`\s to use in finding a solution
:return: the best solution found
:rtype: :class:`Ant`
"""
self.find_solutions(colony)
self.global_update(colony)
return sorted(colony)[0]
def solve(self, world):
"""Return the single shortest path found through the given *world*.
:param World world: the :class:`World` to solve
:return: the single best solution found
:rtype: :class:`Ant`
"""
world.reset_pheromone(self.t0)
global_best = None
colony = self.create_colony(world)
for i in range(self.limit):
self.reset_colony(colony)
local_best = self.aco(colony)
if global_best is None or local_best < global_best:
global_best = copy(local_best)
self.trace_elite(global_best)
return global_best
def solutions(self, world):
"""Return successively shorter paths through the given *world*.
Unlike :func:`solve`, this method returns one solution for each
improvement of the best solution found thus far.
:param World world: the :class:`World` to solve
:return: successively shorter solutions as :class:`Ant`\s
:rtype: list
"""
world.reset_pheromone(self.t0)
global_best = None
colony = self.create_colony(world)
for i in range(self.limit):
self.reset_colony(colony)
local_best = self.aco(colony)
if global_best is None or local_best < global_best:
global_best = copy(local_best)
yield global_best
self.trace_elite(global_best)
def round_robin_ants(self, world, count):
"""Returns a list of :class:`Ant`\s distributed to the nodes of the
world in a round-robin fashion.
Note that this does not ensure at least one :class:`Ant` begins at each
node unless there are exactly as many :class:`Ant`\s as there are
nodes. However, if *ant_count* is ``0`` then *ant_count* is set to the
number of nodes in the :class:`World` and this method is used to create
the :class:`Ant`\s before solving.
:param World world: the :class:`World` in which to create the
:class:`Ant`\s
:param int count: the number of :class:`Ant`\s to create
:return: the :class:`Ant`\s initialized to nodes in the :class:`World`
:rtype: list
"""
starts = world.nodes
n = len(starts)
return [
Ant(self.alpha, self.beta).initialize(
world, start=starts[i % n])
for i in range(count)
]
def random_ants(self, world, count, even=False):
"""Returns a list of :class:`Ant`\s distributed to the nodes of the
world in a random fashion.
Note that this does not ensure at least one :class:`Ant` begins at each
node unless there are exactly as many :class:`Ant`\s as there are
nodes. This method is used to create the :class:`Ant`\s before solving
if *ant_count* is **not** ``0``.
:param World world: the :class:`World` in which to create the ants.
:param int count: the number of :class:`Ant`\s to create
:param bool even: ``True`` if :func:`random.random` should avoid
choosing the same starting node multiple times
(default is ``False``)
:return: the :class:`Ant`\s initialized to nodes in the :class:`World`
:rtype: list
"""
ants = []
starts = world.nodes
n = len(starts)
if even:
# Since the caller wants an even distribution, use a round-robin
# method until the number of ants left to create is less than the
# number of nodes.
if count > n:
for i in range(self.ant_count // n):
ants.extend([
Ant(self.alpha,self.beta).initialize(
world, start=starts[j])
for j in range(n)
])
# Now (without choosing the same node twice) choose the reamining
# starts randomly.
ants.extend([
Ant(self.alpha, self.beta).initialize(
world, start=starts.pop(random.randrange(n - i)))
for i in range(count % n)
])
else:
# Just pick random nodes.
ants.extend([
Ant(self.alpha, self.beta).initialize(
world, start=starts[random.randrange(n)])
for i in range(count)
])
return ants
def find_solutions(self, ants):
"""Let each :class:`Ant` find a solution.
Makes each :class:`Ant` move until each can no longer move.
.. todo::
Make the local pheromone update optional and configurable.
:param list ants: the ants to use for solving
"""
# This loop occurs exactly as many times as there are ants times nodes,
# but that is only because every ant must visit every node. It may be
# more efficient to convert it to a counting loop... but what
# flexibility would we loose in terms of extending the solver class?
ants_done = 0
while ants_done < len(ants):
ants_done = 0
for ant in ants:
if ant.can_move():
edge = ant.move()
self.local_update(edge)
else:
ants_done += 1
def local_update(self, edge):
"""Evaporate some of the pheromone on the given *edge*.
.. note::
This method should never let the pheromone on an edge decrease to
less than its initial level.
:param Edge edge: the :class:`Edge` to be updated
"""
edge.pheromone = max(self.t0, edge.pheromone * self.rho)
def global_update(self, ants):
"""Update the amount of pheromone on each edge according to the fitness
of solutions that use it.
This accomplishes the global update performed at the end of each
solving iteration.
.. note::
This method should never let the pheromone on an edge decrease to
less than its initial level.
:param list ants: the ants to use for solving
"""
ants = sorted(ants)[:len(ants) // 2]
for a in ants:
p = self.q / a.distance
for edge in a.path:
edge.pheromone = max(
self.t0,
(1 - self.rho) * edge.pheromone + p)
def trace_elite(self, ant):
"""Deposit pheromone along the path of a particular ant.
This method is used to deposit the pheromone of the elite :class:`Ant`
at the end of each iteration.
.. note::
This method should never let the pheromone on an edge decrease to
less than its initial level.
:param Ant ant: the elite :class:`Ant`
"""
if self.elite:
p = self.elite * self.q / ant.distance
for edge in ant.path:
edge.pheromone += p | ACO-Pants | /ACO-Pants-0.5.2.tar.gz/ACO-Pants-0.5.2/pants/solver.py | solver.py |
import json
class World:
"""The nodes and edges of a particular problem.
Each :class:`World` is created from a list of nodes, a length function, and
optionally, a name and a description. Additionally, each :class:`World` has
a UID. The length function must accept nodes as its first two parameters,
and is responsible for returning the distance between them. It is the
responsibility of the :func:`create_edges` to generate the required
:class:`Edge`\s and initialize them with the correct *length* as returned
by the length function.
Once created, :class:`World` objects convert the actual nodes into node
IDs, since solving does not rely on the actual data in the nodes. These are
accessible via the :attr:`nodes` property. To access the actual nodes,
simply pass an ID obtained from :attr:`nodes` to the :func:`data` method,
which will return the node associated with the specified ID.
:class:`Edge`\s are accessible in much the same way, except two node IDs
must be passed to the :func:`data` method to indicate which nodes start and
end the :class:`Edge`. For example:
.. code-block:: python
ids = world.nodes
assert len(ids) > 1
node0 = world.data(ids[0])
node1 = world.data(ids[1])
edge01 = world.data(ids[0], ids[1])
assert edge01.start == node0
assert edge01.end == node1
The :func:`reset_pheromone` method provides an easy way to reset the
pheromone levels of every :class:`Edge` contained in a :class:`World` to a
given *level*. It should be invoked before attempting to solve a
:class:`World` unless a "blank slate" is not desired. Also note that it
should *not* be called between iterations of the :class:`Solver` because it
effectively erases the memory of the :class:`Ant` colony solving it.
:param list nodes: a list of nodes
:param callable lfunc: a function that calculates the distance between
two nodes
:param str name: the name of the world (default is "world#", where
"#" is the ``uid`` of the world)
:param str description: a description of the world (default is None)
"""
uid = 0
def __init__(self, nodes, lfunc, **kwargs):
self.uid = self.__class__.uid
self.__class__.uid += 1
self.name = kwargs.get('name', 'world{}'.format(self.uid))
self.description = kwargs.get('description', None)
self._nodes = nodes
self.lfunc = lfunc
self.edges = self.create_edges()
@property
def nodes(self):
"""Node IDs."""
return list(range(len(self._nodes)))
def create_edges(self):
"""Create edges from the nodes.
The job of this method is to map node ID pairs to :class:`Edge`
instances that describe the edge between the nodes at the given
indices. Note that all of the :class:`Edge`\s are created within this
method.
:return: a mapping of node ID pairs to :class:`Edge` instances.
:rtype: :class:`dict`
"""
edges = {}
for m in self.nodes:
for n in self.nodes:
a, b = self.data(m), self.data(n)
if a != b:
edge = Edge(a, b, length=self.lfunc(a, b))
edges[m, n] = edge
return edges
def reset_pheromone(self, level=0.01):
"""Reset the amount of pheromone on every edge to some base *level*.
Each time a new set of solutions is to be found, the amount of
pheromone on every edge should be equalized to ensure un-biased initial
conditions.
:param float level: amount of pheromone to set on each edge
(default=0.01)
"""
for edge in self.edges.values():
edge.pheromone = level
def data(self, idx, idy=None):
"""Return the node data of a single id or the edge data of two ids.
If only *idx* is specified, return the node with the ID *idx*. If *idy*
is also specified, return the :class:`Edge` between nodes with indices
*idx* and *idy*.
:param int idx: the id of the first node
:param int idy: the id of the second node (default is None)
:return: the node with ID *idx* or the :class:`Edge` between nodes
with IDs *idx* and *idy*.
:rtype: node or :class:`Edge`
"""
try:
if idy is None:
return self._nodes[idx]
else:
return self.edges[idx, idy]
except IndexError:
return None
class Edge:
"""This class represents the link between starting and ending nodes.
In addition to *start* and *end* nodes, every :class:`Edge` has *length*
and *pheromone* properties. *length* represents the static, *a priori*
information, whereas *pheromone* level represents the dynamic, *a
posteriori* information.
:param node start: the node at the start of the :class:`Edge`
:param node end: the node at the end of the :class:`Edge`
:param float length: the length of the :class:`Edge` (default=1)
:param float pheromone: the amount of pheromone on the :class:`Edge`
(default=0.1)
"""
def __init__(self, start, end, length=None, pheromone=None):
self.start = start
self.end = end
self.length = 1 if length is None else length
self.pheromone = 0.1 if pheromone is None else pheromone
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False | ACO-Pants | /ACO-Pants-0.5.2.tar.gz/ACO-Pants-0.5.2/pants/world.py | world.py |
import sys
import random
import bisect
import itertools
import functools
from .world import World
@functools.total_ordering
class Ant:
"""
A single independent finder of solutions to a :class:`World`.
Each :class:`Ant` finds a solution to a world one move at a time. They
also represent the solution they find, and are capable of reporting which
nodes and edges they visited, in what order they were visited, and the
total length of the solution.
Two properties govern the decisions each :class:`Ant` makes while finding
a solution: *alpha* and *beta*. *alpha* controls the importance placed on
pheromone while *beta* controls the importance placed on distance. In
general, *beta* should be greater than *alpha* for best results.
:class:`Ant`\s also have a *uid* property that can be used to identify a
particular instance.
Using the :func:`initialize` method, each :class:`Ant` *must be
initialized* to a particular :class:`World`, and optionally may be given an
initial node from which to start finding a solution. If a starting node is
not given, one is chosen at random. Thus a few examples of instantiation
and initialization might look like:
.. code-block:: python
ant = Ant()
ant.initialize(world)
.. code-block:: python
ant = Ant().initialize(world)
.. code-block:: python
ant = Ant(alpha=0.5, beta=2.25)
ant.initialize(world, start=world.nodes[0])
.. note::
The examples above assume the world has already been created!
Once an :class:`Ant` has found a solution (or at any time), the solution
may be obtained and inspected by accessing its ``tour`` property, which
returns the nodes visited in order, or its ``path`` property, which
returns the edges visited in order. Also, the total distance of the
solution can be accessed through its ``distance`` property. :class:`Ant`\s
are even sortable by their distance:
.. code-block:: python
ants = [Ant() for ...]
# ... have each ant in the list solve a world
ants = sorted(ants)
for i in range(1, len(ants)):
assert ants[i - 1].distance < ants[i].distance
:class:`Ant`\s may be cloned, which will return a shallow copy while not
preserving the *uid* property. If this behavior is not desired, simply use
the :func:`copy.copy` or :func:`copy.deepcopy` methods as necessary.
The remaining methods mainly govern the mechanics of making each move.
:func:`can_move` determines whether all possible moves have been made,
:func:`remaining_moves` returns the moves not yet made, :func:`choose_move`
returns a single move from a list of moves, :func:`make_move` actually
performs the move, and :func:`weigh` returns the weight of a given move.
The :func:`move` method governs the move-making process by gathering the
remaining moves, choosing one of them, making the chosen move, and
returning the move that was made.
"""
uid = 0
def __init__(self, alpha=1, beta=3):
"""Create a new Ant for the given world.
:param float alpha: the relative importance of pheromone (default=1)
:param float beta: the relative importance of distance (default=3)
"""
self.uid = self.__class__.uid
self.__class__.uid += 1
self.world = None
self.alpha = alpha
self.beta = beta
self.start = None
self.distance = 0
self.visited = []
self.unvisited = []
self.traveled = []
def initialize(self, world, start=None):
"""Reset everything so that a new solution can be found.
:param World world: the world to solve
:param Node start: the starting node (default is chosen randomly)
:return: `self`
:rtype: :class:`Ant`
"""
self.world = world
if start is None:
self.start = random.randrange(len(self.world.nodes))
else:
self.start = start
self.distance = 0
self.visited = [self.start]
self.unvisited = [n for n in self.world.nodes if n != self.start]
self.traveled = []
return self
def clone(self):
"""Return a shallow copy with a new UID.
If an exact copy (including the uid) is desired, use the
:func:`copy.copy` method.
:return: a clone
:rtype: :class:`Ant`
"""
ant = Ant(self.alpha, self.beta)
ant.world = self.world
ant.start = self.start
ant.visited = self.visited[:]
ant.unvisited = self.unvisited[:]
ant.traveled = self.traveled[:]
ant.distance = self.distance
return ant
@property
def node(self):
"""Most recently visited node."""
try:
return self.visited[-1]
except IndexError:
return None
@property
def tour(self):
"""Nodes visited by the :class:`Ant` in order."""
return [self.world.data(i) for i in self.visited]
@property
def path(self):
"""Edges traveled by the :class:`Ant` in order."""
return [edge for edge in self.traveled]
def __eq__(self, other):
"""Return ``True`` if the distance is equal to the other distance.
:param Ant other: right-hand argument
:rtype: bool
"""
return self.distance == other.distance
def __lt__(self, other):
"""Return ``True`` if the distance is less than the other distance.
:param Ant other: right-hand argument
:rtype: bool
"""
return self.distance < other.distance
def can_move(self):
"""Return ``True`` if there are moves that have not yet been made.
:rtype: bool
"""
# This is only true after we have made the move back to the starting
# node.
return len(self.traveled) != len(self.visited)
def move(self):
"""Choose, make, and return a move from the remaining moves.
:return: the :class:`Edge` taken to make the move chosen
:rtype: :class:`Edge`
"""
remaining = self.remaining_moves()
choice = self.choose_move(remaining)
return self.make_move(choice)
def remaining_moves(self):
"""Return the moves that remain to be made.
:rtype: list
"""
return self.unvisited
def choose_move(self, choices):
"""Choose a move from all possible moves.
:param list choices: a list of all possible moves
:return: the chosen element from *choices*
:rtype: node
"""
if len(choices) == 0:
return None
if len(choices) == 1:
return choices[0]
# Find the weight of the edges that take us to each of the choices.
weights = []
for move in choices:
edge = self.world.edges[self.node, move]
weights.append(self.weigh(edge))
# Choose one of them using a weighted probability.
total = sum(weights)
cumdist = list(itertools.accumulate(weights)) + [total]
return choices[bisect.bisect(cumdist, random.random() * total)]
def make_move(self, dest):
"""Move to the *dest* node and return the edge traveled.
When *dest* is ``None``, an attempt to take the final move back to the
starting node is made. If that is not possible (because it has
previously been done), then ``None`` is returned.
:param node dest: the destination node for the move
:return: the edge taken to get to *dest*
:rtype: :class:`Edge`
"""
# Since self.node simply refers to self.visited[-1], which will be
# changed before we return to calling code, store a reference now.
ori = self.node
# When dest is None, all nodes have been visited but we may not
# have returned to the node on which we started. If we have, then
# just do nothing and return None. Otherwise, set the dest to the
# node on which we started and don't try to move it from unvisited
# to visited because it was the first one to be moved.
if dest is None:
if self.can_move() is False:
return None
dest = self.start # last move is back to the start
else:
self.visited.append(dest)
self.unvisited.remove(dest)
edge = self.world.edges[ori, dest]
self.traveled.append(edge)
self.distance += edge.length
return edge
def weigh(self, edge):
"""Calculate the weight of the given *edge*.
The weight of an edge is simply a representation of its perceived value
in finding a shorter solution. Larger weights increase the odds of the
edge being taken, whereas smaller weights decrease those odds.
:param Edge edge: the edge to weigh
:return: the weight of *edge*
:rtype: float
"""
pre = 1 / (edge.length or 1)
post = edge.pheromone
return post ** self.alpha * pre ** self.beta | ACO-Pants | /ACO-Pants-0.5.2.tar.gz/ACO-Pants-0.5.2/pants/ant.py | ant.py |
from neo4j import GraphDatabase
from string import Template
from .node import Node
NEO4J_URL = 'bolt://localhost:7687'
NEO4J_ACCOUNT = 'neo4j'
NEO4J_PASSWORD = 'neo4jj'
class Neo4j:
DRIVER = GraphDatabase.driver(NEO4J_URL, auth=(NEO4J_ACCOUNT, NEO4J_PASSWORD))
@classmethod
def is_already_node(cls, url):
""" すでにノードがあるか?
Args:
url(str):
Returns:
"""
query = f'MATCH (n:Node{{url:"{url}"}}) return COUNT(n)'
with cls.DRIVER.session() as session:
for record in session.run(query):
return record[0] > 0
@classmethod
def is_already_edge(cls, from_url, to_url):
"""すでにエッジがあるか?
Args:
from_url:
to_url:
Returns:
"""
query = f'MATCH (n1:Node{{url:"{from_url}"}}), (n2:Node{{url:"{to_url}"}}), (n1)-[r:Link]->(n2) return COUNT(r);'
with cls.DRIVER.session() as session:
for record in session.run(query):
return record[0] > 0
@classmethod
def create_node(cls, node):
"""ノードを作る
Args:
node(Node):
Returns:
"""
anchors = node.anchors
anchors_text = ['"' + anchor + '"' for anchor in anchors]
anchors_text = ','.join(anchors_text)
query = f'MERGE (:Node{{' \
f'url:"{node.url}", ' \
f'domain:"{node.domain}", ' \
f'title:"{node.title}", ' \
f'anchors: [{anchors_text}], ' \
f'body: "{node.body}", ' \
f'wakati:"{node.wakati}", ' \
f'score:{node.score}, ' \
f'is_expanded:{node.is_expanded}, ' \
f'visit_times: {node.visit_times}, ' \
f'created: {node.created}, ' \
f'updated: {node.updated}' \
f'}})'
with cls.DRIVER.session() as session:
session.run(query)
@classmethod
def create_edge(cls, from_url, to_url, pheromone, heuristic, similarity):
"""エッジを作る
Args:
from_url(str):
to_url(str):
pheromone(float): フェロモン値。外側で計算しておく。
heuristic(float): ヒューリスティック値。外側で計算しておく。
similarity(float): fromとtoの文書間類似度。
Returns:
"""
template = Template(
'MATCH (n1:Node{url:"${from_url}"}), (n2:Node{url:"${to_url}"}) CREATE (n1)-[:Link{edge_id: "${edge_id}", pheromone:${pheromone}, heuristic: ${heuristic}, similarity: ${similarity}, from_url:"${from_url}", to_url: "${to_url}", pass_times: ${pass_times}}]->(n2);')
query = template.substitute(from_url=from_url, to_url=to_url, edge_id=from_url + to_url, pheromone=pheromone,
heuristic=heuristic, similarity=similarity, pass_times=0)
with cls.DRIVER.session() as session:
session.run(query)
@classmethod
def set_node(cls, url, key, value):
"""ノードのキーにプロパティをセットする
Args:
url:
key:
value:
Returns:
"""
query = f'MATCH (n:Node{{url:"{url}"}}) SET n.{key} = {value}'
with cls.DRIVER.session() as session:
session.run(query)
@classmethod
def add_value_node(cls, url, key, value):
"""ノードのキーの値を加算する
Args:
url:
key:
value:
Returns:
"""
query = f'MATCH (n:Node{{url:"{url}"}}) SET n.{key} = n.{key} + {value}'
with cls.DRIVER.session() as session:
session.run(query)
@classmethod
def add_value_edge(cls, from_url, to_url, key, value):
"""エッジのキーの値を加算する
Args:
url:
key:
value:
Returns:
"""
query = f'MATCH (n1:Node{{url:"{from_url}"}}), (n2:Node{{url:"{to_url}"}}), (n1)-[r:Link]->(n2) SET r.{key} = r.{key} + {value}'
with cls.DRIVER.session() as session:
session.run(query)
@classmethod
def get_node(cls, url):
"""ノードを取り出す
Args:
url:
Returns:
Node: ノードを返す
"""
query = f'MATCH (n:Node{{url:"{url}"}}) return n'
with cls.DRIVER.session() as session:
for record in session.run(query):
node = Node(**record[0])
return node
@classmethod
def get_anchors(cls, url):
"""urlの文書に含まれるアンカー配列を返す
Args:
url:
Returns:
list(str): URL配列を返す
"""
query = f'MATCH (n:Node{{url:"{url}"}}) MATCH (n)-[r:Link]->(to:Node) return r, to'
anchors = []
with cls.DRIVER.session() as session:
for record in session.run(query):
data = {
'to_url': record[0].nodes[1]['url'],
'heuristic': record[0]['heuristic'],
'pheromone': record[0]['pheromone']
}
anchors.append(data)
return anchors
@classmethod
def get_nodes_properties(cls, *args):
"""すべてのレコードで、キーで選んだプロパティを辞書配列で返す
Args:
*args: 取り出したいキーの文字列配列
Returns:
辞書配列
"""
query = f'MATCH(n:Node) RETURN n order by n.created ASC;'
results = []
with cls.DRIVER.session() as session:
for record in session.run(query):
props = {}
for key in args:
props[key] = record[0][key]
results.append(props)
return results
@classmethod
def get_records_properties(cls, *args):
"""すべてのエッジレコードで、キーで選んだプロパティを辞書配列で返す
Args:
*args: 取り出したいキーの文字列配列
Returns:
dict: 辞書配列
"""
query = f'MATCH (n:Node) , (n)-[r:Link]->() return r;'
results = []
with cls.DRIVER.session() as session:
for record in session.run(query):
props = {}
for key in args:
props[key] = record[0][key]
results.append(props)
return results
@classmethod
def count_nodes(cls):
query = f'MATCH(n:Node) return COUNT(n);'
with cls.DRIVER.session() as session:
for record in session.run(query):
return record[0]
@classmethod
def count_edges(cls):
query = f'MATCH (n:Node)-[r:Link]->() return COUNT(r);'
with cls.DRIVER.session() as session:
for record in session.run(query):
return record[0]
@classmethod
def set_mul_all_edges(cls, key, value):
"""エッジのキーにvalueをかける
Args:
key(str):
value(float):
Returns:
"""
query = f'MATCH ()-[r:Link]->() SET r.{key} = r.{key} * {value}'
with cls.DRIVER.session() as session:
session.run(query)
@classmethod
def set_clamp_all_edges(cls, key, lower, upper):
"""エッジすべてにClampをする
Args:
key:
lower:
upper:
Returns:
"""
query = f'MATCH ()-[r:Link]->() SET r.{key} = CASE WHEN r.{key} < {lower} THEN {lower} WHEN r.{key} > {upper} THEN {upper} ELSE r.{key} end'
with cls.DRIVER.session() as session:
session.run(query) | ACOCrawler | /components/neo4J.py | neo4J.py |
import json
import time
from urllib import parse
import requests
from bs4 import BeautifulSoup
SLEEP_TIME = 2
class Google:
def __init__(self):
self.GOOGLE_SEARCH_URL = 'https://www.google.co.jp/search'
self.session = requests.session()
self.session.headers.update({
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0'})
def Search(self, keyword, type='text', maximum=100):
"""Google検索
Args:
keyword(str):
type(str):
maximum(int):
Returns:
list: 検索結果
"""
print('Google', type.capitalize(), 'Search :', keyword)
result, total = [], 0
query = self.query_gen(keyword, type)
while True:
# 検索
html = self.session.get(next(query)).text
links = self.get_links(html, type)
# 検索結果の追加
if not len(links):
print('-> No more links')
break
elif len(links) > maximum - total:
result += links[:maximum - total]
break
else:
result += links
total += len(links)
print('-> Finally got', str(len(result)), 'links')
return result
def query_gen(self, keyword, type):
"""検索クエリを作り出す
ジェネレータ
Args:
keyword:
type:
Yields:
str: 検索クエリ
"""
page = 0
while True:
if type == 'text':
params = parse.urlencode({
'q': keyword,
'num': '10',
'filter': '0',
'start': str(page * 10)})
elif type == 'image':
params = parse.urlencode({
'q': keyword,
'tbm': 'isch',
'filter': '0',
'ijn': str(page)})
yield self.GOOGLE_SEARCH_URL + '?' + params
time.sleep(SLEEP_TIME)
page += 1
def get_links(self, html, type):
"""リンクを求める
あるHTMLに入っているリンクをすべて洗い出す
Args:
html:
type:
Returns:
list: Googleの検索ページに含まれるURLを返す
"""
soup = BeautifulSoup(html, 'lxml')
if type == 'text':
elements = soup.select('.rc > .r > a')
links = [e['href'] for e in elements]
elif type == 'image':
elements = soup.select('.rg_meta.notranslate')
jsons = [json.loads(e.get_text()) for e in elements]
links = [js['ou'] for js in jsons]
return links | ACOCrawler | /components/google.py | google.py |
import itertools
import bisect
import random
from .neo4J import Neo4j
class AcoNeo4j(Neo4j):
"""Neo4jを継承したACOに関連するNeo4j
Neo4jより派生したクラス
"""
@classmethod
def calc_next_url_aco(cls, anchors, alpha, beta):
"""次に移動するURLを確率選択して返す
ACOの確率選択式を利用して
確率選択して選んだ次に移動するべきURLを返す
Args:
anchors(list(dict)): 次に移動できるURL候補配列
alpha(float): フェロモン
beta(float): ヒューリスティック呈す
Returns:
str: 次のURL
"""
tau_arr = []
for anchor in anchors:
p = anchor['pheromone']
h = anchor['heuristic']
tau = p ** alpha + h ** beta
tau_arr.append(tau)
sum_v = sum(tau_arr)
prob_arr = list(itertools.accumulate(tau_arr))
next_url = anchors[bisect.bisect_left(prob_arr, sum_v * random.random())]['to_url']
return next_url
@classmethod
def calc_next_link_aco_tabu(cls, anchors, alpha, beta, path):
"""次に移動するURLを確率選択して返す
ACOの確率選択式を利用して
確率選択して選んだ次に移動するべきURLを返す
Tabu処理をする
Args:
anchors(list(dict)): 次に移動できるURL候補配列
alpha(float): フェロモン
beta(float): ヒューリスティック呈す
path(list): アリの通った経路
Returns:
str: 次のURL
"""
tau_arr = []
for anchor in anchors:
p = anchor['pheromone']
h = anchor['heuristic']
tau = p ** alpha + h ** beta
tau_arr.append(tau)
sum_v = sum(tau_arr)
prob_arr = list(itertools.accumulate(tau_arr))
next_url = None
for i in range(10000):
next_url = anchors[bisect.bisect_left(prob_arr, sum_v * random.random())]['to_url']
if next_url not in path:
return next_url
return next_url
@classmethod
def mul_all_edges_pheromone(cls, rho):
"""エッジにrhoをかけて蒸発させる
エッジのフェロモンを蒸発させる
ただ、残留率といったほうが正しい
Args:
rho(float): 蒸発率
Returns:
"""
cls.set_mul_all_edges('pheromone', rho)
@classmethod
def calc_path_scores(cls, path):
"""経路のパスのスコア(フェロモン)を計算する
蟻のパスを引数として
各ノードのスコアを合計しその平均(フェロモン)を返す
Args:
path(list(str)): アリが通ったパス
Returns:
float: 各エッジに加算することになるフェロモン
"""
score = 0
for url in path:
node = cls.get_node(url)
score += node.score
if len(path) == 0:
return 0
return score / len(path)
@classmethod
def splay_pheromone_by_ant_k(cls, add_pheromone, path):
"""アリkが分泌したフェロモンをパスに添付する
calc_path_scoresで計算したフェロモンを
通ったpathのエッジに加算する
Args:
add_pheromone(float): 一つあたりのエッジに加算するフェロモン
path: アリが通ったパス
Returns:
"""
with cls.DRIVER.session() as session:
for i in range(len(path) - 1):
query = f'MATCH (n1:Node{{url: "{path[i]}"}}) MATCH (n2:Node{{url: ' \
f'"{path[i + 1]}"}}) MATCH (n1)-[r:Link]->(n2) SET r.pheromone = r.pheromone + {add_pheromone}'
session.run(query)
@classmethod
def clamp_pheromones(cls, lower, upper):
"""フェロモンのClamp
Args:
lower:
upper:
Returns:
"""
cls.set_clamp_all_edges('pheromone', lower, upper) | ACOCrawler | /components/aconeo4J.py | aconeo4J.py |
import os
import time
import urllib3
import re
import requests
from bs4 import BeautifulSoup, Comment
from reppy.cache import RobotsCache
from requests.exceptions import Timeout
from urllib3.exceptions import InsecureRequestWarning
from .url import Url
from .wakati import Wakati
from .score import Score
urllib3.disable_warnings(InsecureRequestWarning)
PAGE_CAPACITY = 200
TIME_OUT = 2
SLEEP_TIME = 1
URL_REGEX = re.compile(
r'^(?:http|ftp)s?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
r'localhost|'
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
r'(?::\d+)?'
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
BODY_URL_REGEX = re.compile(r'https?://[\w/:%#\$&\?\(\)~\.=\+\-…]+')
BODY_WWW_REGEX = re.compile(r'[/:%#\$&\?\(\)~\.=\+\-…]+')
SPACE_REGEX = re.compile(r'\s+')
NUMBER_REGEX = re.compile(r'\d+')
TAG_REGEX = re.compile(r'<[^>]*?>')
SPECIAL_REGEX = re.compile('[!"#$%&\'\\\\()*,-./⁄:;<=>?@[\\]^_`{|}~「」〔〕“”〈〉『』【】&*・()$#@。、≒◉?!`╱+¥%〻〳〴〵〼ヿ«»〽]')
BAD_EXTENSIONS = ['.pdf', '.jpg', '.png', '.css', '.js', '.mp4', '.jpeg', '.gif', '.bmp', '.mpg', '.avi', '.mp3',
'.wma', '.wav', '.exe', '.zip', '.bat', '.bak', '.doc', '.fon', '.fnt', '.ini', '.lzh', '.gz', '.tar',
'.tgz', '.wav', '.xls']
class Scrape:
"""スクレイピングクラス
URLのページをスクレイピングする
Raises:
Exception: 拡張子・robots.txt・TLEに関して発生する
"""
robot = RobotsCache(PAGE_CAPACITY, TIME_OUT)
@classmethod
def page_scrape(cls, url, keyword, **kwargs):
"""ページをスクレイピングする
urlの文書をスクレイピングする
Args:
url(str): 文書URL
keyword(str): キーワード
**kwargs(dict): NLPの際使用する
Returns:
dict: スクレイピング結果
"""
url = Url.erase_url_parameters(url)
extension = os.path.splitext(url)[1].lower()
if extension in BAD_EXTENSIONS:
raise Exception
if not cls.robot.allowed(url, 'python program'):
print('robots.txt not allowed: ', url)
raise Exception
try:
result = requests.get(url, verify=False, allow_redirects=True, timeout=TIME_OUT)
time.sleep(SLEEP_TIME)
except Timeout:
print("TLE: ", url)
raise Exception
content = result.content
soup = BeautifulSoup(content, 'lxml')
title = cls.__get_title(soup)
title = cls.__clean_body(title)
anchors = cls.__get_anchors(url, soup)
domain = Url.get_url_domain(url)
body = cls.__get_body(soup)
body = cls.__clean_body(body)
wakati = Wakati.wakati_text(body)
score = Score.calc_score_by_simple_counts(body, wakati, keyword, **kwargs)
return {
'url': url,
'title': title,
'anchors': anchors,
'domain': domain,
'body': body,
'wakati': wakati,
'score': score
}
@classmethod
def __get_title(cls, soup):
title = ""
if hasattr(soup, 'title') and hasattr(soup.title, 'string'):
title = soup.title.string
return title
@classmethod
def __get_anchors(cls, url, soup):
anchors = [a.get('href') for a in soup('a')
if a.get('href') is not None
and len(a.get('href'))
and a.get('href')[0:10] != 'javascript'
and re.match(URL_REGEX, a.get('href')) is not None]
anchors = [Url.get_absolute_url(url, anchor) for anchor in anchors]
anchors = [Url.erase_url_parameters(anchor) for anchor in anchors]
anchors = [Url.erase_url_special_characters(anchor) for anchor in anchors]
anchors = list(set(anchors))
return anchors
@classmethod
def __get_body(cls, soup):
for comment in soup(text=lambda x: isinstance(x, Comment)): comment.extract()
[script.decompose() for script in soup('script')]
[style.decompose() for style in soup('style')]
body = ' '.join([text.strip() for text in soup.find_all(text=True)[1:]])
return body
@classmethod
def __clean_body(cls, body):
if body is None:
body = ''
body = body.lower()
body = TAG_REGEX.sub('', body)
body = NUMBER_REGEX.sub('', body)
body = BODY_URL_REGEX.sub('', body)
body = BODY_WWW_REGEX.sub('', body)
body = SPECIAL_REGEX.sub(' ', body)
body = SPACE_REGEX.sub(' ', body)
return body | ACOCrawler | /components/scrape.py | scrape.py |
from datetime import datetime, timedelta
import os.path as osp
import re
import warnings
from operator import attrgetter
from functools import reduce
import numpy as np
from memoized_property import memoized_property
import pydub
from .sound import Sound
class _FileLoader:
resolution = np.int32
time_code = '%Y-%m-%d--%H.%M'
@classmethod
def load_ACO_from_file(cls, basedir, relpath):
time_stamp = cls._date_from_filename(relpath)
filename = osp.join(basedir, relpath)
fs = cls._frames_per_second(filename)
data = cls._data_from_file(filename)
return ACO(time_stamp, fs, data, True)
@classmethod
def _data_from_file(cls, filename):
raise NotImplementedError
@classmethod
def _date_from_filename(cls, filename):
# 2016-02-15--05.00.HYD24BBpk
name = osp.basename(filename)
dts, _ = name.rsplit('.', 1)
time_stamp = datetime.strptime(dts, cls.time_code)
return time_stamp
def _frames_per_second(cls, filename):
raise NotImplementedError
class _FileMp3Loader(_FileLoader):
extension = 'mp3'
@classmethod
def _data_from_file(cls, filename):
a = pydub.AudioSegment.from_mp3(filename)
y = np.array(a.get_array_of_samples())
return y
@classmethod
def _frames_per_second(cls, filename):
a = pydub.AudioSegment.from_mp3(filename)
return a.frame_rate
class _FileACOLoader(_FileLoader):
extension = 'HYD24BBpk'
header_dtype = np.dtype(
[('Record', '<u4'),
('Decimation', '<u1'),
('StartofFile', '<u1'),
('Sync1', '<u1'),
('Sync2', '<u1'),
('Statusbyte1', '<u1'),
('Statusbyte2', '<u1'),
('pad1', '<u1'),
('LeftRightFlag', '<u1'),
('tSec', '<u4'),
('tuSec', '<u4'),
('timecount', '<u4'),
('Year', '<i2'),
('yDay', '<i2'),
('Hour', '<u1'),
('Min', '<u1'),
('Sec', '<u1'),
('Allignment', '<u1'),
('sSec', '<i2'),
('dynrange', '<u1'),
('bits', '<u1')])
@classmethod
def _ACO_to_int(cls, databytes, nbits):
'''
Convert the block of bytes to an array of int32.
We need to use int32 because there can be 17 bits.
'''
nbits = int(nbits)
# Fast path for special case of 16 bits:
if nbits == 16:
return databytes.view(np.int16).astype(cls.resolution)
# Put the bits in order from LSB to MSB:
bits = np.unpackbits(databytes).reshape(-1, 8)[:, ::-1]
# Group by the number of bits in the int:
bits = bits.reshape(-1, nbits)
# Reassemble the integers:
pows = 2 ** np.arange(nbits, dtype=cls.resolution)
num = (bits * pows).sum(axis=1).astype(cls.resolution)
# Handle twos-complement negative integers:
neg = num >= 2**(nbits-1)
num[neg] -= 2**nbits
return num
@classmethod
def _frames_per_second(cls, path):
name = osp.basename(path)
_, encs = name.rsplit('.', 1)
fs = int(re.findall('\d+', encs).pop()) * 1000
return fs
@classmethod
def _params_from_filename(cls, filename):
timestamp = cls._date_from_filename(filename)
fs = cls._frames_per_second(filename)
return timestamp, fs
@classmethod
def _data_from_file(cls, filename):
headerlist = []
datalist = []
with open(filename, 'rb') as fid:
fid.seek(0, 2)
eof = fid.tell()
fid.seek(0, 0)
while fid.tell() < eof:
header = np.fromfile(fid, count=1, dtype=cls.header_dtype)[0]
headerlist.append(header)
nbits = int(header['bits'])
count = (4096//8) * nbits
databytes = np.fromfile(fid, count=count, dtype='<u1')
data = cls._ACO_to_int(databytes, nbits)
datalist.append(data)
# headers = np.array(headerlist)
# Keeping the blocks separate, matching the headers:
data = np.vstack(datalist)
# But we can also view it as a single time series:
alldata = data.reshape(-1)
return alldata
class _DatetimeLoader:
expected_file_length = timedelta(minutes=5)
@classmethod
def __floor_dt(cls, dt):
src = timedelta(hours=dt.hour, minutes=dt.minute, seconds=dt.second)
offset = src.total_seconds() % cls.expected_file_length.total_seconds()
return dt - timedelta(seconds=offset)
@classmethod
def _filename_from_date(cls, index_datetime):
dts = datetime.strftime(index_datetime, cls.time_code)
return '.'.join([dts, cls.extension])
@classmethod
def _path_from_date(cls, index_datetime, normdir):
info = [index_datetime.year, index_datetime.month, index_datetime.day]
dirname = osp.join(*map(lambda i: str(i).zfill(2), info)) \
if normdir is None else normdir
basename = cls._filename_from_date(index_datetime)
return osp.join(dirname, basename)
@classmethod
def path_from_date(cls, search_datetime, normdir=None):
floor_datetime = cls.__floor_dt(search_datetime)
return cls._path_from_date(floor_datetime, normdir=normdir)
@classmethod
def _load_full_ACO_from_base_datetime(
cls,
basedir, floor_datetime
):
aco = cls.load_ACO_from_file(
basedir, cls._path_from_date(floor_datetime)
)
return aco
@classmethod
def load_span_ACO_from_datetime(
cls,
basedir, index_datetime,
durration
):
result = []
floor_datetime = cls.__floor_dt(index_datetime)
start = index_datetime - floor_datetime
end = start + durration
local_end = end
while local_end.total_seconds() > 0:
try:
_ = cls._load_full_ACO_from_base_datetime(
basedir,
floor_datetime
)
except FileNotFoundError:
warnings.warn(
'index-range not continuous in local storage',
UserWarning
)
break
floor_datetime = cls.__floor_dt(
floor_datetime + start + cls.expected_file_length
)
local_end = local_end - _._durration
result.append(_)
aco = reduce(ACO.__matmul__, result).squash_nan()
return aco[start:end]
class _DatetimeACOLoader(_DatetimeLoader, _FileACOLoader):
pass
class _DatetimeMp3Loader(_DatetimeLoader, _FileMp3Loader):
pass
class Loader:
def __init__(self, basedir):
self.basedir = basedir
def _path_loader(self, target):
raise NotImplementedError
def _date_loader(self, target, durration):
raise NotImplementedError
def load(self, target, durration=None):
if isinstance(target, str):
return self._path_loader(target, durration)
elif isinstance(target, datetime):
if durration is None:
durration = timedelta(minutes=5)
return self._date_loader(target, durration)
else:
raise TypeError
class ACOLoader(Loader):
def _path_loader(self, target):
return _DatetimeACOLoader.load_ACO_from_file(self.basedir, target)
def _date_loader(self, target, durration):
return _DatetimeACOLoader.load_span_ACO_from_datetime(
self.basedir, target, durration)
class Mp3Loader(Loader):
def _path_loader(self, target):
return _DatetimeMp3Loader.load_ACO_from_file(self.basedir, target)
def _date_loader(self, target, durration):
return _DatetimeMp3Loader.load_span_ACO_from_datetime(
self.basedir, target, durration)
class ACOio:
def __init__(self, basedir, Loader=ACOLoader):
self.loader = Loader(basedir)
def load(self, target, durration=None):
return self.loader.load(target, durration)
class ACO(Sound):
def __init__(self, time_stamp, fs, data, raw=False):
super().__init__(fs, data)
self.start_datetime = time_stamp
self.raw = raw
def copy(self):
return ACO(
self.start_datetime,
self._fs,
self._data.copy(),
self.raw
)
@memoized_property
def end_datetime(self):
return self.date_offset(self._durration)
def date_offset(self, durration):
return self.start_datetime + durration
def _date_difference(self, d):
return self.durration_to_index(d - self.start_datetime)
def __oolb(self, slice_):
return (slice_.start < timedelta(0))
def __ooub(self, slice_):
return (self.date_offset(slice_.stop) > self.end_datetime)
def _oob(self, slice_):
return self.__oolb(slice_) or self.__oolb(slice_)
@classmethod
def _reversed_indexing(cls, slice_):
return (slice_.stop < slice_.start)
def __getitem__(self, slice_):
idx = timedelta(seconds=0) if slice_.start is None else slice_.start
jdx = self._durration if slice_.stop is None else slice_.stop
slice_ = slice(idx, jdx)
if self._reversed_indexing(slice_):
raise "Does not support reverse indexing"
if self._oob(slice_):
warnings.warn(f'Slice Out of Bounds', UserWarning)
result = self.copy()
start = slice_.start
timestamp = self.start_datetime + (
timedelta(0) if start is None else start
)
idx, jdx = self._getitem__indicies(slice_)
data = self._data[idx:jdx]
result._data = data
result.timestamp = timestamp
return result
def __matmul__(self, other):
'''
allows date-time respecting joins of tracks
'''
assert(self.raw)
assert(other.raw)
A, B = self.copy(), other.copy()
ordered = (A, B) # wlg
if self._fs != other._fs:
ordered = sorted((self, other), key=attrgetter('_fs'))
ordered[-1] = ordered[-1].resample_fs(ordered[0]._fs)
ordered = sorted(ordered, key=attrgetter('start_datetime'))
durration = ordered[-1].end_datetime - ordered[0].start_datetime
space = max(
ordered[0].durration_to_index(durration),
len(A._data), len(B._data))
data = np.full(space, np.NAN)
idx = ~np.isnan(ordered[0]._data)
data[:len(ordered[0]._data)][idx] = ordered[0]._data[idx]
durration = ordered[-1].start_datetime - ordered[0].start_datetime
start = ordered[0].durration_to_index(durration)
idx = ~np.isnan(ordered[-1]._data)
overlap_count = np.sum(~np.isnan(data[start:][idx]))
data[start:][idx] = ordered[-1]._data[idx]
if overlap_count > 0:
warnings.warn(f'Overlaps {overlap_count} samples', UserWarning)
result = self.__class__(
ordered[0].start_datetime,
ordered[0]._fs,
data,
ordered[0].raw
)
return result
if __name__ == '__main__':
loader = ACOio('./dst/', Mp3Loader)
target = datetime(
day=1, month=2, year=2016
)
aco = loader.load(target) | ACOio | /ACOio-0.2.2-py3-none-any.whl/aco.py | aco.py |
from collections import namedtuple
from datetime import datetime, timedelta
import warnings
import io
import numpy as np
import scipy.signal as signal
from scipy.io.wavfile import write as wavwrite
import scipy
# from python_speech_features import logfbank
from memoized_property import memoized_property
PlotInfo = namedtuple('PlotInfo', ['data', 'xaxis', 'interval', 'shift'])
class NoViewMethodError(Exception):
pass
class UnsupportedViewDimmensions(Exception):
pass
class Sound:
BULLSHITWAVNUMBER = 24000
def __init__(self, fs, data):
self._fs = fs
self._data = data.astype(np.float64)
def copy(self):
return Sound(self._fs, self._data.copy())
@classmethod
def _resample_fs(cls, data, new_fs, old_fs):
fs_ratio = new_fs/old_fs
new_length = int(np.round(len(data) * fs_ratio))
return signal.resample(data, new_length)
def squash_nan(self):
result = self.copy()
idx = ~np.isnan(result._data)
result._data = result._data[idx]
return result
def resample_fs(self, fs):
''' returns a track resampled to a specific frames per second '''
result = self.copy()
result._data = self._resample_fs(self._data, fs, self._fs)
result._fs = fs
return result
def resample(self, n):
''' returns a track resampled to a specific number of data points '''
result = self.copy()
if len(self) != n:
fs_ratio = n/len(self._data)
warnings.warn(
f'Only {fs_ratio:.3f} of signal represented',
UserWarning
)
result._data = signal.resample(self._data, n)
result._fs = int(np.round(self._fs * fs_ratio))
return result
def random_sample(self, durration):
t = np.random.uniform(0, (self._durration - durration).total_seconds())
start = timedelta(seconds=t)
return self[start:start+durration]
def chunk(self, durration, step):
''' returns generator to step over track for durration by step '''
start = timedelta(seconds=0)
stop = start + durration
while stop.total_seconds() < self._durration.total_seconds():
yield self[start:stop]
start += step
stop += step
@classmethod
def _pre_emphasis(cls, data, pre_emphasis):
return np.append(data[0], data[1:] - pre_emphasis * data[:-1])
def _getitem__indicies(self, slice_):
i, j = slice_.start, slice_.stop
new_start \
= timedelta(0) if i is None else i
new_end \
= self._durration if j is None else j
idx, jdx = \
self.durration_to_index(new_start), \
self.durration_to_index(new_end)
return idx, jdx
def __getitem__(self, slice_):
idx, jdx = self._getitem__indicies(slice_)
result = self.copy()
result._data = result._data[idx:jdx]
return result
@memoized_property
def _durration(self):
return timedelta(seconds=float((self._data.size / self._fs)))
@classmethod
def _to_frame_count(cls, fs, seconds):
return int(np.round(seconds * fs))
def to_frame_count(self, seconds):
''' given a seconds count, return the index offsed count for track '''
return self._to_frame_count(self._fs, seconds)
def __len__(self):
return len(self._data)
def durration_to_index(self, t):
''' returns the idx offset reached when stepping `t` seconds '''
return int(t.total_seconds() * self._fs)
@memoized_property
def _max_value(self):
return np.max(np.abs(self._data))
def normdata(self, dtype=np.int32):
''' safe normalization of `._data` to the specified bit precision '''
data = self._data.copy().astype(np.float64)
max_value = self._max_value
data = ((data/max_value) * (np.iinfo(dtype).max - 1)).astype(dtype)
return data
@classmethod
def _lowpass(cls, data, BUTTER_ORDER, sampling_rate, cut_off):
Wn = float(cut_off) / (float(sampling_rate) / 2.0)
b, a = signal.butter(BUTTER_ORDER, Wn, btype='low')
return signal.filtfilt(b, a, data)
def lowpass(self, BUTTER_ORDER=6, cut_off=3000.0):
''' returns track after application of low-pass buttworth filter '''
result = self.copy()
result._data = self._lowpass(
self._data,
BUTTER_ORDER=BUTTER_ORDER,
sampling_rate=self._fs,
cut_off=cut_off)
return result
@classmethod
def _highpass(cls, data, BUTTER_ORDER, sampling_rate, cut_off):
Wn = float(cut_off) / (float(sampling_rate) / 2.0)
b, a = signal.butter(BUTTER_ORDER, Wn, 'high')
return signal.filtfilt(b, a, data)
def highpass(self, BUTTER_ORDER=6, cut_off=30.0):
''' returns track after application of high-pass buttworth filter '''
result = self.copy()
result._data = self._highpass(
self._data,
BUTTER_ORDER=BUTTER_ORDER,
sampling_rate=self._fs,
cut_off=cut_off)
return result
def stft(self):
''' short term fourier transform, as implemented by `signal.stft` '''
return signal.stft(self._data, self._fs)
def power(self, frame_duration=.08, frame_shift=.02, wtype='boxcar'):
num_overlap = self.to_frame_count(frame_duration - frame_shift)
frame_size = self.to_frame_count(frame_duration)
window = signal.get_window(wtype, frame_size)
_, power = signal.welch(
self._data,
window=window,
return_onesided=False,
scaling='spectrum',
noverlap=num_overlap
)
return power * window.sum()**2
@classmethod
def _overlap_add(cls, frames, shift, norm=True):
count, size = frames.shape
assert(shift < size)
store = np.full((count, (size + (shift * (count - 1)))), np.NAN)
for i in range(count):
store[i][shift*i:shift*i+size] = frames[i]
out = np.nansum(store, axis=0)
if norm:
out = out/np.sum(~np.isnan(store), axis=0)
return out
# def periodogram(self):
# return signal.periodogram(self._data, fs=self._fs)
def autocorr(self, mode='full'):
x = self._data
n = len(x)
return np.correlate(x, x, mode=mode)[n - 1:]
def logspectrogram(
self, frame_duration=.08, frame_shift=.02, wtype='hanning'
):
unit = self.spectrogram(frame_duration, frame_shift, wtype)
return unit._replace(data=(20 * np.log10(np.abs(unit.data))))
def cepstrum(self, frame_duration=.08, frame_shift=.02, wtype='hanning'):
unit = self.spectrogram(frame_duration, frame_shift, wtype)
return unit._replace(
data=(np.fft.irfft(np.log(np.abs(unit.data))).real)
)
def spectrogram(
self, frame_duration=.08, frame_shift=.02, wtype='hanning'
):
unit = self._Frame(frame_duration, frame_shift)
mat = unit.data * signal.get_window(wtype, unit.data.shape[1])
N = 2 ** int(np.ceil(np.log2(mat.shape[0])))
return unit._replace(data=np.fft.rfft(mat, n=N))
def _Frame(self, frame_duration=.08, frame_shift=.02):
'''
returns a sliding frame view, turning the track into a 2d array,
specified by frame_duration and frame_shift, usually precedes
application of windowing, then onto iltering
'''
n = self.to_frame_count(frame_duration)
s = self.to_frame_count(frame_shift)
total_frames = (len(self._data) - n) // s + 1
zero = self._time_stamp if hasattr(self, '_time_stamp') \
else datetime(1, 1, 1)
time = (zero + (timedelta(seconds=frame_shift) * i)
for i in range(total_frames))
# dom = np.arange(total_frames) * s + n // 2
mat = np.empty((total_frames, n))
mat[:, :] = np.NAN
start = 0
for i in range(total_frames):
idx = slice(start, (start+n))
mat[i, :] = self._data[idx]
start += s
return PlotInfo(mat, time, frame_duration, frame_shift)
def _spectral_subtraction(
self, other, α, β,
frame_duration=.08, frame_shift=.02, wtype='boxcar'
):
Frames = self._Frame(frame_duration, frame_shift).data
power = other.power(frame_duration, frame_shift, wtype)
window = signal.get_window(wtype, self.to_frame_count(frame_duration))
spectrum = np.fft.fft(Frames * window)
amplitude = np.abs(spectrum)
phase = np.angle(spectrum)
# theres lots of math in these parts
_ = (amplitude ** 2.0)
__ = (power * α)
_ = _ - __
__ = amplitude ** 2
__ = β * __
_ = np.maximum(_, __)
_ = np.sqrt(_)
__ = phase * 1j
__ = np.exp(__)
_ = _ * __
return _
def _subtract_data(
self, other, α=5.0, β=.02,
frame_duration=.08, frame_shift=.02, wtype='boxcar'
):
assert(self._fs == other._fs)
new_spectrum = self._spectral_subtraction(
other, α, β, frame_duration, frame_shift, wtype
)
frames = np.fft.ifft(new_spectrum).real
data = self._overlap_add(frames, self.to_frame_count(frame_shift))
return data
def subtract(
self, other, α=5.0, β=.02,
frame_duration=.08, frame_shift=.02, wtype='boxcar'
):
''' returns new track after application of spectral subtraction '''
result = self.copy()
result._data = self._subtract_data(
other, α, β, frame_duration, frame_shift, wtype
)
return result
def Listen(self, data=None):
'''
creates a jypyter compliant audio component, always resampled to
`self.BULLSHITWAVNUMBER` frames per second. This is required in order
to play the track in a browser. For most accurate listening, consider
saving out the content and using `sox` audio player.
'''
if data is None:
data = self._data.copy()
# cannot resample values with nan
idx = np.isnan(data)
data[idx] = 0
# bug in IPython.Audio, only handles common fs
data = self._resample_fs(self._data, self.BULLSHITWAVNUMBER, self._fs)
from IPython.display import Audio
return Audio(data=data, rate=self.BULLSHITWAVNUMBER)
def View(self, itype=None, **kwargs):
'''
Shortcut method to jupyter compliant viewing of track under supported
methods. `itype` defines which visualization to provide.
Supported methods are transforms that yield a 1d or 2d numpy array.
None => wav files
'spectrogram', 'logspectrogram', 'power', 'pariodogram',
'cepstrum', ...
`**kwargs` specify the parameters to the `itype` method. See associated
method signatures. Sane defaults are selected.
'''
if itype is None:
unit = self._data
elif hasattr(self, itype):
attr = getattr(self, itype)
unit = attr(**kwargs) if callable(attr) else attr
else:
raise NoViewMethodError
from matplotlib import pyplot as plt
fig, ax = plt.subplots()
name = 'wave' if itype is None else itype
_ = plt.title(f'{name}') # @ {self._time_stamp}')
if isinstance(unit, PlotInfo):
'''
['data', 'xaxis', 'yaxis'])
_ = plt.plot(unit.data.T.real)
'''
_ = plt.imshow(X=unit.data.T.real, interpolation=None)
_ = plt.yticks([])
_ = plt.ylabel(
f'{unit.interval:.3f} interval, {unit.shift:.3f} '
f'shift, {self._fs} f/s'
)
elif len(unit.shape) == 1:
_ = plt.plot(unit)
elif len(unit.shape) == 2:
_ = plt.imshow(X=unit.T.real, interpolation=None)
else:
raise UnsupportedViewDimmensions
@property
def header(self):
return 'audio/x-wav'
def get_wav(self, *, resample=True):
'''Retrieves io.BytesIO() packed with `.wav` contents'''
result = self.resample_fs(self.BULLSHITWAVNUMBER) if resample \
else self.copy()
data = result.normdata(dtype=np.int16)
bytes_io = io.BytesIO()
wavwrite(bytes_io, result._fs, data)
return bytes_io
def save(self, label, note="", store='saved.csv'):
data = self._data.copy()
data.flags.writeable = False
filename = f'{np.abs(hash(data.tobytes()))}.wav'
scipy.io.wavfile.write(filename, self._fs, self.normdata(np.int16))
with open(store, mode='a') as fd:
_ = note.replace(',', ':comma:')
print(filename, label, _, sep=',', file=fd) | ACOio | /ACOio-0.2.2-py3-none-any.whl/aco/sound.py | sound.py |
from datetime import datetime, timedelta
import os.path as osp
import re
import warnings
from operator import attrgetter
from functools import reduce
import numpy as np
from memoized_property import memoized_property
import pydub
from . import sound
class _FileLoader:
resolution = np.int32
time_code = '%Y-%m-%d--%H.%M'
@classmethod
def load_ACO_from_file(cls, basedir, relpath):
time_stamp = cls._date_from_filename(relpath)
filename = osp.join(basedir, relpath)
fs = cls._frames_per_second(filename)
data = cls._data_from_file(filename)
return ACO(time_stamp, fs, data, True)
@classmethod
def _data_from_file(cls, filename):
raise NotImplementedError
@classmethod
def _date_from_filename(cls, filename):
# 2016-02-15--05.00.HYD24BBpk
name = osp.basename(filename)
dts, _ = name.rsplit('.', 1)
time_stamp = datetime.strptime(dts, cls.time_code)
return time_stamp
def _frames_per_second(cls, filename):
raise NotImplementedError
class _FileMp3Loader(_FileLoader):
extension = 'mp3'
@classmethod
def _data_from_file(cls, filename):
a = pydub.AudioSegment.from_mp3(filename)
y = np.array(a.get_array_of_samples())
return y
@classmethod
def _frames_per_second(cls, filename):
a = pydub.AudioSegment.from_mp3(filename)
return a.frame_rate
class _FileACOLoader(_FileLoader):
extension = 'HYD24BBpk'
header_dtype = np.dtype(
[('Record', '<u4'),
('Decimation', '<u1'),
('StartofFile', '<u1'),
('Sync1', '<u1'),
('Sync2', '<u1'),
('Statusbyte1', '<u1'),
('Statusbyte2', '<u1'),
('pad1', '<u1'),
('LeftRightFlag', '<u1'),
('tSec', '<u4'),
('tuSec', '<u4'),
('timecount', '<u4'),
('Year', '<i2'),
('yDay', '<i2'),
('Hour', '<u1'),
('Min', '<u1'),
('Sec', '<u1'),
('Allignment', '<u1'),
('sSec', '<i2'),
('dynrange', '<u1'),
('bits', '<u1')])
@classmethod
def _ACO_to_int(cls, databytes, nbits):
'''
Convert the block of bytes to an array of int32.
We need to use int32 because there can be 17 bits.
'''
nbits = int(nbits)
# Fast path for special case of 16 bits:
if nbits == 16:
return databytes.view(np.int16).astype(cls.resolution)
# Put the bits in order from LSB to MSB:
bits = np.unpackbits(databytes).reshape(-1, 8)[:, ::-1]
# Group by the number of bits in the int:
bits = bits.reshape(-1, nbits)
# Reassemble the integers:
pows = 2 ** np.arange(nbits, dtype=cls.resolution)
num = (bits * pows).sum(axis=1).astype(cls.resolution)
# Handle twos-complement negative integers:
neg = num >= 2**(nbits-1)
num[neg] -= 2**nbits
return num
@classmethod
def _frames_per_second(cls, path):
name = osp.basename(path)
_, encs = name.rsplit('.', 1)
fs = int(re.findall('\d+', encs).pop()) * 1000
return fs
@classmethod
def _params_from_filename(cls, filename):
timestamp = cls._date_from_filename(filename)
fs = cls._frames_per_second(filename)
return timestamp, fs
@classmethod
def _data_from_file(cls, filename):
headerlist = []
datalist = []
with open(filename, 'rb') as fid:
fid.seek(0, 2)
eof = fid.tell()
fid.seek(0, 0)
while fid.tell() < eof:
header = np.fromfile(fid, count=1, dtype=cls.header_dtype)[0]
headerlist.append(header)
nbits = int(header['bits'])
count = (4096//8) * nbits
databytes = np.fromfile(fid, count=count, dtype='<u1')
data = cls._ACO_to_int(databytes, nbits)
datalist.append(data)
# headers = np.array(headerlist)
# Keeping the blocks separate, matching the headers:
data = np.vstack(datalist)
# But we can also view it as a single time series:
alldata = data.reshape(-1)
return alldata
class _DatetimeLoader:
expected_file_length = timedelta(minutes=5)
@classmethod
def __floor_dt(cls, dt):
src = timedelta(hours=dt.hour, minutes=dt.minute, seconds=dt.second)
offset = src.total_seconds() % cls.expected_file_length.total_seconds()
return dt - timedelta(seconds=offset)
@classmethod
def _filename_from_date(cls, index_datetime):
dts = datetime.strftime(index_datetime, cls.time_code)
return '.'.join([dts, cls.extension])
@classmethod
def _path_from_date(cls, index_datetime, normdir):
info = [index_datetime.year, index_datetime.month, index_datetime.day]
dirname = osp.join(*map(lambda i: str(i).zfill(2), info)) \
if normdir is None else normdir
basename = cls._filename_from_date(index_datetime)
return osp.join(dirname, basename)
@classmethod
def path_from_date(cls, search_datetime, normdir=None):
floor_datetime = cls.__floor_dt(search_datetime)
return cls._path_from_date(floor_datetime, normdir=normdir)
@classmethod
def _load_full_ACO_from_base_datetime(
cls,
basedir, floor_datetime, normdir=None
):
aco = cls.load_ACO_from_file(
basedir, cls._path_from_date(floor_datetime, normdir)
)
return aco
@classmethod
def load_span_ACO_from_datetime(
cls,
basedir, index_datetime,
durration
):
result = []
floor_datetime = cls.__floor_dt(index_datetime)
start = index_datetime - floor_datetime
end = start + durration
local_end = end
while local_end.total_seconds() > 0:
try:
_ = cls._load_full_ACO_from_base_datetime(
basedir,
floor_datetime
)
except FileNotFoundError:
warnings.warn(
'index-range not continuous in local storage',
UserWarning
)
break
floor_datetime = cls.__floor_dt(
floor_datetime + start + cls.expected_file_length
)
local_end = local_end - _._durration
result.append(_)
if not result:
raise FileNotFoundError(cls._path_from_date(floor_datetime, None), "Check Your Basedir")
aco = reduce(ACO.__matmul__, result).squash_nan()
return aco[start:end]
class _DatetimeACOLoader(_DatetimeLoader, _FileACOLoader):
pass
class _DatetimeMp3Loader(_DatetimeLoader, _FileMp3Loader):
pass
class Loader:
def __init__(self, basedir):
self.basedir = basedir
def _path_loader(self, target):
raise NotImplementedError
def _date_loader(self, target, durration):
raise NotImplementedError
def load(self, target, durration=None):
if isinstance(target, str):
return self._path_loader(target, durration)
elif isinstance(target, datetime):
if durration is None:
durration = timedelta(minutes=5)
return self._date_loader(target, durration)
else:
raise TypeError
class ACOLoader(Loader):
def _path_loader(self, target):
return _DatetimeACOLoader.load_ACO_from_file(self.basedir, target)
def _date_loader(self, target, durration):
return _DatetimeACOLoader.load_span_ACO_from_datetime(
self.basedir, target, durration)
class Mp3Loader(Loader):
def _path_loader(self, target):
return _DatetimeMp3Loader.load_ACO_from_file(self.basedir, target)
def _date_loader(self, target, durration):
return _DatetimeMp3Loader.load_span_ACO_from_datetime(
self.basedir, target, durration)
class ACOio:
def __init__(self, basedir, Loader=ACOLoader):
self.loader = Loader(basedir)
def load(self, target, durration=None):
return self.loader.load(target, durration)
class ACO(sound.Sound):
def __init__(self, time_stamp, fs, data, raw=False):
super().__init__(fs, data)
self.start_datetime = time_stamp
self.raw = raw
def copy(self):
return ACO(
self.start_datetime,
self._fs,
self._data.copy(),
self.raw
)
@memoized_property
def end_datetime(self):
return self.date_offset(self._durration)
def date_offset(self, durration):
return self.start_datetime + durration
def _date_difference(self, d):
return self.durration_to_index(d - self.start_datetime)
def __oolb(self, slice_):
return (slice_.start < timedelta(0))
def __ooub(self, slice_):
return (self.date_offset(slice_.stop) > self.end_datetime)
def _oob(self, slice_):
return self.__oolb(slice_) or self.__oolb(slice_)
@classmethod
def _reversed_indexing(cls, slice_):
return (slice_.stop < slice_.start)
def __getitem__(self, slice_):
idx = timedelta(seconds=0) if slice_.start is None else slice_.start
jdx = self._durration if slice_.stop is None else slice_.stop
slice_ = slice(idx, jdx)
if self._reversed_indexing(slice_):
raise "Does not support reverse indexing"
if self._oob(slice_):
warnings.warn(f'Slice Out of Bounds', UserWarning)
result = self.copy()
start = slice_.start
timestamp = self.start_datetime + (
timedelta(0) if start is None else start
)
idx, jdx = self._getitem__indicies(slice_)
data = self._data[idx:jdx]
result._data = data
result.timestamp = timestamp
return result
def __matmul__(self, other):
'''
allows date-time respecting joins of tracks
'''
assert(self.raw)
assert(other.raw)
A, B = self.copy(), other.copy()
ordered = (A, B) # wlg
if self._fs != other._fs:
ordered = sorted((self, other), key=attrgetter('_fs'))
ordered[-1] = ordered[-1].resample_fs(ordered[0]._fs)
ordered = sorted(ordered, key=attrgetter('start_datetime'))
durration = ordered[-1].end_datetime - ordered[0].start_datetime
space = max(
ordered[0].durration_to_index(durration),
len(A._data), len(B._data))
data = np.full(space, np.NAN)
idx = ~np.isnan(ordered[0]._data)
data[:len(ordered[0]._data)][idx] = ordered[0]._data[idx]
durration = ordered[-1].start_datetime - ordered[0].start_datetime
start = ordered[0].durration_to_index(durration)
idx = ~np.isnan(ordered[-1]._data)
overlap_count = np.sum(~np.isnan(data[start:][idx]))
data[start:][idx] = ordered[-1]._data[idx]
if overlap_count > 0:
warnings.warn(f'Overlaps {overlap_count} samples', UserWarning)
result = self.__class__(
ordered[0].start_datetime,
ordered[0]._fs,
data,
ordered[0].raw
)
return result
if __name__ == '__main__':
loader = ACOio('./dst/', Mp3Loader)
target = datetime(
day=1, month=2, year=2016
)
aco = loader.load(target) | ACOio | /ACOio-0.2.2-py3-none-any.whl/aco/aco.py | aco.py |
from math import log10, floor, ceil
# numpy
import numpy as np
# matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import FixedLocator, FixedFormatter
# warnings
import warnings
# obs
from acropolis.obs import pdg2020
# pprint
from acropolis.pprint import print_info
# params
from acropolis.params import NY
# Set the general style of the plot
plt.rc('text', usetex=True)
plt.rc('font', family='serif', size=14)
# Include additional latex packages
plt.rcParams['text.latex.preamble'] = r'\usepackage{amsmath}\usepackage{mathpazo}'
# A global variable counting the
# number of created plots, in order
# to provide unique plot identifiers
_plot_number = 0
# The number of sigmas at which a
# point is considered excluded
_95cl = 1.95996 # 95% C.L.
# DATA EXTRACTION ###################################################
def _get_abundance(data, i):
# Add + 2 for the two parameters in the first two columns
i0 = i + 2
# Extract the different abundances...
mean, high, low = data[:,i0], data[:,i0+NY], data[:,i0+2*NY]
# ...and calculate an estimate for the error
diff = np.minimum( np.abs( mean - high ), np.abs( mean - low ) )
return mean, diff
def _get_deviations(data, obs):
# Extract and sum up neutrons and protons
mn, en = _get_abundance(data, 0)
mp, ep = _get_abundance(data, 1)
mH, eH = mn + mp, np.sqrt( en**2. + ep**2. )
# Extract and sum up lithium-7 and berylium-7
mLi7, eLi7 = _get_abundance(data, 7)
mBe7, eBe7 = _get_abundance(data, 8)
m7, e7 = mLi7 + mBe7, np.sqrt( eLi7**2. + eBe7**2. )
# Extract deuterium
mD , eD = _get_abundance(data, 2)
# Extract and sum up tritium and helium-3
mT , eT = _get_abundance(data, 3)
mHe3, eHe3 = _get_abundance(data, 4)
m3 , e3 = mT + mHe3, np.sqrt( eT**2. + eHe3**2. )
# Extract helium-4
mHe4, eHe4 = _get_abundance(data, 5)
# Calculate the actual deviations
with warnings.catch_warnings(record=True) as w:
# Calculate the relevant abundance ratios
mYp , eYp = 4.*mHe4, 4.*eHe4
mDH , eDH = mD/mH, (mD/mH)*np.sqrt( (eD/mD)**2. + (eH/mH)**2. )
mHeD, eHeD = m3/mD, (m3/mD)*np.sqrt( (e3/m3)**2. + (eD/mD)**2. )
mLiH, eLiH = m7/mH, (m7/mH)*np.sqrt( (e7/m7)**2. + (eH/mH)**2. )
# Calculate the corresponding deviations
Yp = (mYp - obs[ 'Yp'].mean) / np.sqrt( obs[ 'Yp'].err**2. + eYp**2. )
DH = (mDH - obs[ 'DH'].mean) / np.sqrt( obs[ 'DH'].err**2. + eDH**2. )
HeD = (mHeD - obs['HeD'].mean) / np.sqrt( obs['HeD'].err**2. + eHeD**2. )
LiH = (mLiH - obs['LiH'].mean) / np.sqrt( obs['LiH'].err**2. + eLiH**2. )
if len(w) == 1 and issubclass(w[0].category, RuntimeWarning):
# Nothing to do here
pass
# Take care of potential NaNs
HeD[ mDH < obs['DH'].err ] = 10
DH [ np.isnan(DH) ] = -10
# Return (without reshaping)
return Yp, DH, HeD, LiH
# LATEX INFORMATION #################################################
_tex_data = {
# DecayModel
'mphi' : (r'm_\phi' , r'\mathrm{MeV}' ),
'tau' : (r'\tau_\phi' , r'\mathrm{s}' ),
'temp0' : (r'T_0' , r'\mathrm{MeV}' ),
'n0a' : (r'(n_\phi/n_\gamma)|_{T=T_0}' , r'' ),
# AnnihilationModel
'braa' : (r'\text{BR}_{\gamma\gamma} = 1-\text{BR}_{e^+e^-}', r'' ),
'mchi' : (r'm_\chi' , r'\mathrm{MeV}' ),
'a' : (r'a' , r'\mathrm{cm^3/s}'),
'b' : (r'b' , r'\mathrm{cm^3/s}'),
'tempkd': (r'T_\text{kd}' , r'\mathrm{MeV}' ),
}
def add_tex_data(key, tex, unit):
global _tex_data
_tex_data[key] = (tex, unit)
def tex_title(**kwargs):
global _tex_data
if _tex_data is None:
return
eof = r',\;'
# Define a function to handle values
# that need to be printed in scientific
# notation
def _val_to_string(val):
if type(val) == float:
power = log10( val )
if power != int(power):
# TODO
pass
return r'10^' + str( int(power) )
return str( val )
title = r''
for key in kwargs.keys():
# Extract the numerical value
val = kwargs[ key ]
val_str = _val_to_string( val )
# Extract the latex representation
# of the parameter and its unit
tex, unit = _tex_data[ key ]
# If the value is 0, do not print units
unit = '\,' + unit if val != 0 else r''
title += tex + '=' + val_str + unit + eof
if title.endswith(eof):
title = title[:-len(eof)]
return r'$' + title + r'$'
def tex_label(key):
if key not in _tex_data.keys():
return ''
tex, unit = _tex_data[ key ]
if unit != r'':
unit = r'\;[' + unit + r']'
return r'$' + tex + unit + r'$'
def tex_labels(key_x, key_y):
return ( tex_label(key_x), tex_label(key_y) )
# FIGURE HANDLING ###################################################
def _init_figure():
fig = plt.figure(figsize=(4.8, 4.4), dpi=150, edgecolor='white')
ax = fig.add_subplot(1, 1, 1)
ax.tick_params(axis='both', which='both', labelsize=11, direction='in', width=0.5)
ax.xaxis.set_ticks_position('both')
ax.yaxis.set_ticks_position('both')
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(0.5)
return fig, ax
def _set_tick_labels(ax, x, y):
nint = lambda val: ceil(val) if val >= 0 else floor(val)
xmin, xmax = np.min(x), np.max(x)
ymin, ymax = np.min(y), np.max(y)
xmin_log = nint( log10(xmin) )
xmax_log = nint( log10(xmax) )
ymin_log = nint( log10(ymin) )
ymax_log = nint( log10(ymax) )
nx = abs( xmax_log - xmin_log ) + 1
ny = abs( ymax_log - ymin_log ) + 1
# Set the ticks on the x-axis
xticks_major = np.linspace(xmin_log, xmax_log, nx)
xticks_minor = [ log10(i*10**j) for i in range(1, 10) for j in xticks_major ]
xlabels = [ r'$10^{' + f'{int(i)}' + '}$' for i in xticks_major ]
xticks_major_locator = FixedLocator(xticks_major)
xticks_minor_locator = FixedLocator(xticks_minor)
xlabels_formatter = FixedFormatter(xlabels)
ax.xaxis.set_major_locator(xticks_major_locator)
ax.xaxis.set_minor_locator(xticks_minor_locator)
ax.xaxis.set_major_formatter(xlabels_formatter)
ax.set_xlim(xmin_log, xmax_log)
# Set the ticks on the y-axis
yticks_major = np.linspace(ymin_log, ymax_log, ny)
yticks_minor = [ log10(i*10**j) for i in range(1, 10) for j in yticks_major ]
ylabels = [ r'$10^{' + f'{int(i)}' + '}$' for i in yticks_major ]
yticks_major_locator = FixedLocator(yticks_major)
yticks_minor_locator = FixedLocator(yticks_minor)
ylabels_formatter = FixedFormatter(ylabels)
ax.yaxis.set_major_locator(yticks_major_locator)
ax.yaxis.set_minor_locator(yticks_minor_locator)
ax.yaxis.set_major_formatter(ylabels_formatter)
ax.set_ylim(ymin_log, ymax_log)
def save_figure(output_file=None):
global _plot_number
# If no name for the output file is given
# simply enumerate the different plots
if output_file is None:
output_file = 'acropolis_plot_{}.pdf'.format(_plot_number)
_plot_number += 1
plt.savefig(output_file)
print_info(
"Figure has been saved as '{}'".format(output_file),
"acropolis.plot.save_figure"
)
def plot_scan_results(data, output_file=None, title='', labels=('', ''), save_pdf=True, show_fig=False, obs=pdg2020):
# If data is a filename, load the data first
if type(data) == str:
data = np.loadtxt(data)
# Get the set of input parameters...
x, y = data[:,0], data[:,1]
# ...and determine the shape of the data
N = len(x)
Ny = (x == x[0]).sum()
Nx = N//Ny
shape = (Nx, Ny)
# Calculate the abundance deviations
Yp, DH, HeD, LiH = _get_deviations(data, obs)
# Reshape the input data...
x = x.reshape(shape)
y = y.reshape(shape)
# ...and the deviation arrays
Yp = Yp.reshape(shape)
DH = DH.reshape(shape)
HeD = HeD.reshape(shape)
LiH = LiH.reshape(shape)
# Extract the overall exclusion limit
max = np.maximum( np.abs(DH), np.abs(Yp) )
max = np.maximum( max, HeD )
# Init the figure and...
fig, ax = _init_figure()
# ...set the tick labels
_set_tick_labels(ax, x, y)
# Plot the actual data
cut = 1e10
# Deuterium (filled)
ax.contourf(np.log10(x), np.log10(y), DH,
levels=[-cut, -_95cl, _95cl, cut],
colors=['0.6','white', 'tomato'],
alpha=0.2
)
# Helium-4 (filled)
ax.contourf(np.log10(x), np.log10(y), Yp,
levels=[-cut, -_95cl, _95cl, cut],
colors=['dodgerblue','white', 'lightcoral'],
alpha=0.2
)
# Helium-3 (filled)
ax.contourf(np.log10(x), np.log10(y), HeD,
levels=[_95cl, cut], # Only use He3/D as an upper limit
colors=['mediumseagreen'],
alpha=0.2
)
# Deuterium low (line)
ax.contour(np.log10(x), np.log10(y), DH,
levels=[-_95cl], colors='0.6', linestyles='-'
)
# Deuterium high (line)
ax.contour(np.log10(x), np.log10(y), DH,
levels=[_95cl], colors='tomato', linestyles='-'
)
# Helium-4 low (line)
ax.contour(np.log10(x), np.log10(y), Yp,
levels=[-_95cl], colors='dodgerblue', linestyles='-'
)
# Helium-3 high (line)
ax.contour(np.log10(x), np.log10(y), HeD,
levels=[_95cl], colors='mediumseagreen', linestyles='-'
)
# Overall high/low (line)
ax.contour(np.log10(x), np.log10(y), max,
levels=[_95cl], colors='black', linestyles='-'
)
# Set the title...
ax.set_title( title, fontsize=11 )
# ...and the axis labels
ax.set_xlabel( labels[0] )
ax.set_ylabel( labels[1] )
# Set tight layout
plt.tight_layout()
if save_pdf == True:
save_figure(output_file)
if show_fig == True:
plt.show()
# Return figure and axis in case
# further manipulation is desired
return fig, ax | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/plots.py | plots.py |
import numpy as np
# time
from time import time, sleep
# itertools
from itertools import product
# multiprocessing
from multiprocessing import Pool, cpu_count
# pprint
from acropolis.pprint import print_info, print_error
# params
from acropolis.params import NY
# models
from acropolis.models import AbstractModel
class _Batch(object):
def __init__(self, length, is_fast):
self.length = length
self.is_fast = is_fast
class ScanParameter(object):
def __init__(self, ivalue, fvalue, num, spacing="log", fast=False):
self._sInitialValue = ivalue
self._sFinalValue = fvalue
self._sSpacingMode = spacing
self._sFastParameter = fast
self._sNumPoints = num
def get_range(self):
if self._sSpacingMode == "log":
return np.logspace( self._sInitialValue, self._sFinalValue, self._sNumPoints )
elif self._sSpacingMode == "lin":
return np.linspace( self._sInitialValue, self._sFinalValue, self._sNumPoints )
def is_fast(self):
return self._sFastParameter
class BufferedScanner(object):
def __init__(self, model, **kwargs):
# Store the requested model
# self._sModel(...) afterwards creates
# a new instance of the requested model
if not issubclass(model, AbstractModel):
print_error(
model.__name__ + " is not a subclass of AbstractModel",
"acropolis.scans.BufferedScanner.__init__"
)
self._sModel = model
#######################################################################
# Initialize the various sets
self._sFixed = {} # Fixed parameter
self._sScanp = {} # Scan parameters...
self._sFastf = {} # ...w/o fast scanning
# Initialize the number of scan parameters...
self._sNP = 0 # (all)
self._sNP_fast = 0 # (only fast)
# Parse the keyword arguments and build up the
# sets 'self._sFixed' and 'self._sScanp'
self._parse_arguments(**kwargs)
#######################################################################
# Generate the keys for the scan parameters
self._sScanp_id = list( self._sScanp.keys() )
# Determine the parameter for the parallelisation
# In case there is a 'fast' parameter, this whould be
# one of the 'non-fast' parameters
#
# Sort the keys in order for the fast parameters
# to be at he beginning of the array
list.sort( self._sScanp_id, key=lambda id: self._sFastf[id], reverse=True )
# Choose the last parameter, which in any case is not the
# 'fast' parameter and therefore can be calculated in parallel
self._sId_pp = self._sScanp_id[-1]
#######################################################################
# Extract the dimension of parallel/sequential jobs
self._sDp, self._sDs = 0, 0
for id in self._sScanp_id:
if id == self._sId_pp:
self._sDp += len( self._sScanp[id] )
else:
self._sDs += len( self._sScanp[id] )
def _parse_arguments(self, **kwargs):
# Loop over the different parameters
for key in kwargs.keys():
param = kwargs[key]
# Extract the fixed values
if type(param) in [int, float]:
self._sFixed[key] = float(param)
# Extract the scan parameters
elif isinstance(param, ScanParameter):
self._sNP += 1
# Save the relevant range of all paremeters
self._sScanp[key] = param.get_range()
# Save the 'is_fast' status of all parameters
self._sFastf[key] = param.is_fast()
else:
print_error(
"All parameters must either be 'int', 'float' or an instance of 'ScanParameter'",
"acropolis.scans.BufferedScanner._parse_arguments"
)
# Get the number of 'fast' parameters (Np_fast <= Np - 1)
self._sNP_fast = list( self._sFastf.values() ).count(True)
# ERRORS for not-yet-implemented features (TODO) ################################
if self._sNP_fast > 1 or self._sNP != 2:
print_error(
"Currently only exactly 2 scan parameters with <= 1 fast parameter are supported!",
"acropolis.scans.BufferedScanner._parse_arguments"
)
# TODO!!!
def _build_batches(self):
# Generate all possible parameter combinations, thereby
# NOT! including the parameter used for the parallelisation
scanp_ls = product( *[self._sScanp[id] for id in self._sScanp_id[:-1]] )
# Right now: One sequential parameter, which is either fast or not
scanp_bt = [ _Batch(self._sDs, self._sNP_fast != 0), ]
return scanp_ls, scanp_bt
def rescale_matp_buffer(self, buffer, factor):
return (factor*buffer[0], buffer[1])
def _perform_non_parallel_scan(self, pp):
# Build the relevant batches
scanp_ls, scanp_bt = self._build_batches()
# Determine the dimensions of the 'result grid'
dx = self._sDs # rows
dy = self._sNP + 3*NY # columns
results = np.zeros( ( dx, dy ) )
# Initialize the buffer
matpb = None
nb, ib = 0, 0
# Loop over the non-parallel parameter(s)
for i, scanp in enumerate(scanp_ls):
# Store the current batch
batch = scanp_bt[nb]
# Check if a reset is required
reset_required = (ib == 0)
# Define the set that contains only scan parameters
scanp_set = dict( zip(self._sScanp_id, scanp) )
scanp_set.update( {self._sId_pp: pp} )
# Define the set that contains all parameters
fullp_set = scanp_set.copy()
fullp_set.update( self._sFixed )
# Initialize the model of choice
model = self._sModel(**fullp_set)
scanp_set_id_0 = scanp_set[self._sScanp_id[0]]
# Rescale the rates with the 'fast' parameter
# but only if the current parameter is 'fast'
if batch.is_fast and (not reset_required):
if matpb is not None:
# matpb might still be None if E0 < Emin
# save, since parameters determining the
# injection energy, should never be fast
factor = scanp_set_id_0/fastp
model.set_matp_buffer( self.rescale_matp_buffer(matpb, factor) )
##############################################################
Yb = model.run_disintegration()
##############################################################
# Reset the buffer/rescaling
if batch.is_fast and reset_required:
matpb = model.get_matp_buffer()
fastp = scanp_set_id_0
# For the output, use the following format
# 1. The 'non fast' parameters
# 3. The 'fast' parameters
sortp_ls = list( zip( scanp_set.keys(), scanp_set.values() ) )
list.sort(sortp_ls, key=lambda el: self._sFastf[ el[0] ]) # False...True
sortp_ls = [ el[1] for el in sortp_ls ]
results[i] = [*sortp_ls, *Yb.transpose().reshape(Yb.size)]
# Update the batch index
if ib == batch.length - 1: # next batch
ib = 0
nb += 1
else:
ib += 1
return results
def perform_scan(self, cores=1):
num_cpus = cpu_count() if cores == -1 else cores
start_time = time()
print_info(
"Running scan for {} on {} cores.".format(self._sModel.__name__, num_cpus),
"acropolis.scans.BufferedScanner.perform_scan",
verbose_level=3
)
with Pool(processes=num_cpus) as pool:
# Loop over all possible combinations, by...
# ...1. looping over the 'parallel' parameter (map)
# ...2. looping over all parameter combinations,
# thereby exclusing the 'parallel' parameter (perform_non_parallel_scan)
async_results = pool.map_async(
self._perform_non_parallel_scan, self._sScanp[self._sId_pp], 1
)
progress = 0
while ( progress < 100 ) or ( not async_results.ready() ):
progress = 100*( self._sDp - async_results._number_left )/self._sDp
print_info(
"Progress: {:.1f}%".format(progress),
"acropolis.scans.BufferedScanner.perform_scan",
eol="\r", verbose_level=3
)
sleep(1)
parallel_results = async_results.get()
pool.terminate()
parallel_results = np.array(parallel_results)
old_shape = parallel_results.shape
parallel_results.shape = (old_shape[0]*old_shape[1], len( self._sScanp_id ) + 3*NY) # + 1)
end_time = time()
print_info(
"Finished after {:.1f}min.".format( (end_time - start_time)/60 ),
"acropolis.scans.BufferedScanner.perform_scan",
verbose_level=3
)
return parallel_results | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/scans.py | scans.py |
from sys import stdout, stderr
# params
from acropolis.params import verbose, debug
# info
from acropolis.info import version, dev_version, url
_max_verbose_level = 1
def print_version():
if verbose == True:
# Differentiate between stable and dev version
version_str = ""
# Stable version
if version == dev_version:
version_str = "v{}".format(version)
# Development version
else:
version_str = "v{} [dev]".format(dev_version)
stdout.write( "\x1B[38;5;209mACROPOLIS {} ({})\x1B[0m\n\n".format(version_str, url) )
def print_Yf(Yf, header=["mean", "high", "low"]):
# If not verbose, simply print one line
# including all abundances
if not verbose:
print(*Yf.transpose().reshape(1, Yf.size)[0,:])
return
# Fill potentially missing header entries
NYf = Yf.shape[1]
header.extend( [""] * ( NYf - len(header) ) )
# Set small values to zero to guarantee the same
# width for all abundances in the output
Yf[Yf <= 1e-99] = 0
# Define the set of all possible labels
labels = ['n', 'p', 'H2', 'H3', 'He3', 'He4', 'Li6', 'Li7', 'Be7']
# Print the header
header_str = "\n{:^4}"
for i in range(NYf):
header_str += " | \x1B[35m{:^11}\x1B[0m"
print( header_str.format("", *header) )
print("----------------------------------------------")
# Print the different abundances
for j, l in enumerate(labels):
line = "\x1B[34m{:>4}\x1B[0m"
for i in range(NYf):
line += " | {:11.5e}"
if l in ['n', 'H3', 'Be7']:
line += " [\x1B[36m{:7}\x1B[0m]"
print( line.format(l, *Yf[j], 'decayed') )
def print_error(error, loc="", eol="\n"):
locf = ""
if debug == True and loc != "":
locf = " \x1B[1;35m(" + loc + ")\x1B[0m"
stderr.write("\x1B[1;31mERROR \x1B[0m: " + error + locf + eol)
exit(1)
def print_warning(warning, loc="", eol="\n"):
locf = ""
if debug == True and loc != "":
locf = " \x1B[1;35m(" + loc + ")\x1B[0m"
stdout.write("\x1B[1;33mWARNING\x1B[0m: " + warning + locf + eol)
def print_info(info, loc="", eol="\n", verbose_level=None):
global _max_verbose_level
if verbose_level is None:
verbose_level = _max_verbose_level
_max_verbose_level = max( _max_verbose_level, verbose_level )
locf = ""
if debug == True and loc != "":
locf = " \x1B[1;35m(" + loc + ")\x1B[0m"
if verbose and verbose_level >= _max_verbose_level:
stdout.write("\x1B[1;32mINFO \x1B[0m: " + info + locf + eol)
def set_max_verbose_level(max_verbose_level=None):
global _max_verbose_level
if max_verbose_level is None:
max_verbose_level = 1
_max_verbose_level = max_verbose_level | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/pprint.py | pprint.py |
from math import log, pow
# numpy
import numpy as np
class LogInterp(object):
def __init__(self, x_grid, y_grid, base=np.e, fill_value=None):
self._sBase = base
self._sLogBase = log(self._sBase)
self._sFillValue = fill_value
self._sXLog = np.log(x_grid)/self._sLogBase
self._sYLog = np.log(y_grid)/self._sLogBase
self._sXminLog = self._sXLog[ 0]
self._sXmaxLog = self._sXLog[-1]
if self._sXmaxLog <= self._sXminLog:
raise ValueError(
"The values in x_grid need to be in ascending order."
)
self._sN = len(self._sXLog)
self._sCache = {}
def _perform_interp(self, x):
x_log = log(x)/self._sLogBase
if not (self._sXminLog <= x_log <= self._sXmaxLog):
if self._sFillValue is None:
raise ValueError(
"The given value does not lie within the interpolation range."
)
return self._sFillValue
ix = int( ( x_log - self._sXminLog )*( self._sN - 1 )/( self._sXmaxLog - self._sXminLog ) )
# Handle the case for which ix+1 is out-of-bounds
if ix == self._sN - 1: ix -= 1
x1_log, x2_log = self._sXLog[ix], self._sXLog[ix+1]
y1_log, y2_log = self._sYLog[ix], self._sYLog[ix+1]
m = ( y2_log - y1_log )/( x2_log - x1_log )
b = y2_log - m*x2_log
return pow( self._sBase, m*x_log + b )
def __call__(self, x):
if x not in self._sCache:
self._sCache[x] = self._perform_interp(x)
return self._sCache[x]
# Cummulative numerical Simpson integration
def cumsimp(x_grid, y_grid):
n = len(x_grid)
delta_z = log( x_grid[-1]/x_grid[0] )/( n-1 )
g_grid = x_grid*y_grid
i_grid = np.zeros( n )
last_even_int = 0.
for i in range(1, n//2 + 1):
ie = 2 * i
io = 2 * i - 1
i_grid[io] = last_even_int + 0.5 * delta_z * (g_grid[io-1] + g_grid[io])
if ie < n:
i_grid[ie] = last_even_int + delta_z * (g_grid[ie-2] + 4.*g_grid[ie-1] + g_grid[ie])/3.
last_even_int = i_grid[ie]
return i_grid | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/utils.py | utils.py |
import gzip
# pickle
import pickle
# os
from os import path
# numba
import numba as nb
# time
from time import time
# pprint
from acropolis.pprint import print_info
# params
from acropolis.params import usedb
from acropolis.params import Emin_log, Emax_log, Enum
from acropolis.params import Tmin_log, Tmax_log, Tnum
def import_data_from_db():
pkg_dir, _ = path.split(__file__)
db_file = path.join(pkg_dir, "data", "rates.db.gz")
ratedb = None
if not usedb or not path.exists(db_file):
return ratedb
start_time = time()
print_info(
"Extracting and reading database files.",
"acropolis.db.import_data_from_db",
verbose_level=1
)
ratefl = gzip.open(db_file, "rb")
ratedb = pickle.load(ratefl)
ratefl.close()
end_time = time()
print_info(
"Finished after {:.1f}ms.".format( 1e3*(end_time - start_time) ),
"acropolis.db.import_data_from_db",
verbose_level=1
)
return ratedb
def in_rate_db(E_log, T_log):
if (Emin_log <= E_log <= Emax_log) and (Tmin_log <= T_log <= Tmax_log):
return True
return False
def in_kernel_db(E_log, Ep_log, T_log):
if (Emin_log <= E_log <= Emax_log) \
and (Emin_log <= Ep_log <= Emax_log) \
and (Tmin_log <= T_log <= Tmax_log):
return True
return False
@nb.jit(cache=True)
def _get_E_log(i):
return Emin_log + (Emax_log - Emin_log)*i/(Enum - 1)
@nb.jit(cache=True)
def _get_T_log(i):
return Tmin_log + (Tmax_log - Tmin_log)*i/(Tnum - 1)
@nb.jit(cache=True)
def _get_E_index(E_log):
index = int( ( Enum - 1 ) * ( E_log - Emin_log ) / ( Emax_log - Emin_log ) )
# For points at the upper boundary, i+1 does not exist
return index if index != Enum - 1 else index - 1
@nb.jit(cache=True)
def _get_T_index(T_log):
index = int( ( Tnum - 1 ) * ( T_log - Tmin_log ) / ( Tmax_log - Tmin_log ) )
# For points at the upper boundary, i+1 does not exist
return index if index != Tnum - 1 else index - 1
@nb.jit(cache=True)
def interp_rate_db(rate_db, id, E_log, T_log):
# Extract the correct index for the datafile
c = {
'ph:rate_pair_creation' : 0,
'el:rate_inverse_compton': 1
}[id]
# Calculate the respective indices in the interpolation file
iE, iT = _get_E_index(E_log), _get_T_index(T_log)
# Perform the interpolation according to the wikipedia page:
# https://en.wikipedia.org/wiki/Bilinear_interpolation
x , y = T_log, E_log
x0, y0 = _get_T_log(iT ), _get_E_log(iE )
x1, y1 = _get_T_log(iT+1), _get_E_log(iE+1)
xd, yd = (x-x0)/(x1-x0), (y-y0)/(y1-y0)
# Define the index function
k = lambda jT, jE: jT*Enum + jE
c00 = rate_db[ k(iT , iE ) ][c]
c10 = rate_db[ k(iT+1, iE ) ][c]
c01 = rate_db[ k(iT , iE+1) ][c]
c11 = rate_db[ k(iT+1, iE+1) ][c]
d = (x0-x1)*(y0-y1)
a0 = ( c00*x1*y1 - c01*x1*y0 - c10*x0*y1 + c11*x0*y0 )/d
a1 = ( -c00*y1 + c01*y0 + c10*y1 - c11*y0 )/d
a2 = ( -c00*x1 + c01*x1 + c10*x0 - c11*x0 )/d
a3 = ( c00 - c01 - c10 + c11 )/d
return 10.**( a0 + a1*x + a2*y + a3*x*y )
@nb.jit(cache=True)
def interp_kernel_db(kernel_db, id, E_log, Ep_log, T_log):
c = {
'ph:kernel_inverse_compton': 0,
'el:kernel_pair_creation' : 1,
'el:kernel_inverse_compton': 2
}[id]
# Calculate the respective indices in the interpolation file
iE, iEp, iT = _get_E_index(E_log), _get_E_index(Ep_log), _get_T_index(T_log)
# Perform the interpolation according to the wikipedia page:
# https://en.wikipedia.org/wiki/Trilinear_interpolation
x , y , z = T_log, E_log, Ep_log
x0, y0, z0, = _get_T_log(iT ), _get_E_log(iE ), _get_E_log(iEp )
x1, y1, z1, = _get_T_log(iT+1), _get_E_log(iE+1), _get_E_log(iEp+1)
xd, yd, zd = (x-x0)/(x1-x0), (y-y0)/(y1-y0), (z-z0)/(z1-z0)
# Define the index function
k = lambda jT, jE, jEp: jT*Enum*(Enum+1)//2 + jE*Enum - (jE-1)*jE//2 + (jEp - jE)
c000 = kernel_db[ k(iT , iE , iEp ) ][c]
c100 = kernel_db[ k(iT+1, iE , iEp ) ][c]
c010 = kernel_db[ k(iT , iE+1, iEp ) ][c]
c001 = kernel_db[ k(iT , iE , iEp+1) ][c]
c110 = kernel_db[ k(iT+1, iE+1, iEp ) ][c]
c101 = kernel_db[ k(iT+1, iE , iEp+1) ][c]
c111 = kernel_db[ k(iT+1, iE+1, iEp+1) ][c]
c011 = kernel_db[ k(iT , iE+1, iEp+1) ][c]
c00 = c000*(1.-xd) + c100*xd
c01 = c001*(1.-xd) + c101*xd
c10 = c010*(1.-xd) + c110*xd
c11 = c011*(1.-xd) + c111*xd
c0 = c00*(1.-yd) + c10*yd
c1 = c01*(1.-yd) + c11*yd
c = c0*(1.-zd) + c1*zd
d = (x0-x1)*(y0-y1)*(z0-z1)
a0 = ( -c000*x1*y1*z1 + c001*x1*y1*z0 + c010*x1*y0*z1 - c011*x1*y0*z0 + \
c100*x0*y1*z1 - c101*x0*y1*z0 - c110*x0*y0*z1 + c111*x0*y0*z0)/d
a1 = ( c000*y1*z1 - c001*y1*z0 - c010*y0*z1 + c011*y0*z0 + \
-c100*y1*z1 + c101*y1*z0 + c110*y0*z1 - c111*y0*z0)/d
a2 = ( c000*x1*z1 - c001*x1*z0 - c010*x1*z1 + c011*x1*z0 + \
-c100*x0*z1 + c101*x0*z0 + c110*x0*z1 - c111*x0*z0)/d
a3 = ( c000*x1*y1 - c001*x1*y1 - c010*x1*y0 + c011*x1*y0 + \
-c100*x0*y1 + c101*x0*y1 + c110*x0*y0 - c111*x0*y0)/d
a4 = ( -c000*z1 + c001*z0 + c010*z1 - c011*z0 + c100*z1 - c101*z0 - c110*z1 + c111*z0 )/d
a5 = ( -c000*y1 + c001*y1 + c010*y0 - c011*y0 + c100*y1 - c101*y1 - c110*y0 + c111*y0 )/d
a6 = ( -c000*x1 + c001*x1 + c010*x1 - c011*x1 + c100*x0 - c101*x0 - c110*x0 + c111*x0 )/d
a7 = ( c000 - c001 - c010 + c011 - c100 + c101 + c110 - c111 )/d
return 10.**( a0 + a1*x + a2*y + a3*z + a4*x*y + a5*x*z + a6*y*z + a7*x*y*z ) | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/db.py | db.py |
from functools import wraps
# math
from math import sqrt, log10, log, exp
# numpy
import numpy as np
# scipy
from scipy.integrate import quad
from scipy.integrate import IntegrationWarning
from scipy.linalg import expm
# time
from time import time
# warnings
import warnings
# util
from acropolis.utils import LogInterp
# pprint
from acropolis.pprint import print_error, print_warning, print_info
# params
from acropolis.params import me, me2, hbar, tau_n, tau_t
from acropolis.params import approx_zero, eps, E_EC_max
from acropolis.params import NT_pd, NY
from acropolis.params import universal
# cascade
from acropolis.cascade import SpectrumGenerator
# A dictionary containing all relevant nuclei, or more precisely
# all nuclei that appear in the reactions specified in '_reactions'
# or in the decays specified in '_decays'
_nuclei = {
"n" : 0,
"p" : 1,
"d" : 2,
"t" : 3,
"He3": 4,
"He4": 5,
"Li6": 6,
"Li7": 7,
"Be7": 8
}
# A dictionary containing all relevant pdi reactions
# This dict can be modified if new reactions are added
# In this case, also remember to modify the function
# 'NuclearReactor.get_cross_section(reaction_id, E)'
_reactions = {
1 : "d+a>n+p",
2 : "t+a>n+d",
3 : "t+a>n+p+n",
4 : "He3+a>p+d",
5 : "He3+a>n+p+p",
6 : "He4+a>p+t",
7 : "He4+a>n+He3",
8 : "He4+a>d+d",
9 : "He4+a>n+p+d",
10: "Li6+a>n+p+He4",
11: "Li6+a>X",
12: "Li7+a>t+He4",
13: "Li7+a>n+Li6",
14: "Li7+a>n+n+p+He4",
15: "Be7+a>He3+He4",
16: "Be7+a>p+Li6",
17: "Be7+a>p+p+n+He4"
}
# A dictionary containing all accociated threshold
# energies. All energies are given in MeV
_eth = {
1 : 2.224573,
2 : 6.257248,
3 : 8.481821,
4 : 5.493485,
5 : 7.718058,
6 : 19.813852,
7 : 20.577615,
8 : 23.846527,
9 : 26.071100,
10: 3.698892,
11: 15.794685,
12: 2.467032,
13: 7.249962,
14: 10.948850,
15: 1.586627,
16: 5.605794,
17: 9.304680
}
# A dictionary containing the theoretical errors for
# the different reaction rates (taken from 2006.14803)
# in terms of a relative deviation from the mean value
# 8 (He4->d+d); 10, 11; (Li6->...); 12, 14 (Li7->...)
_rdev = {
1: 0.00,
2: 0.00,
3: 0.00,
4: 0.00,
5: 0.00,
6: 0.00,
7: 0.00,
8: 0.00,
9: 0.00,
10: 0.00,
11: 0.00,
12: 0.00,
13: 0.00,
14: 0.00,
15: 0.00,
16: 0.00,
17: 0.00
}
# A dictionary containing all relevant decays
_decays = {
1: "n>p",
2: "t>He3"
#3: "Be7>Li7"
}
# A dictionary containing all accociated lifetimes.
# All lifetimes are given in s
_tau = {
1: tau_n,
2: tau_t
#3: 6.634e6 # T_(1/2) = 4.598e6
}
# The number of relevant nucleons
_nnuc = len( _nuclei )
# The number of relevant reaction
_nrec = len( _reactions )
# The number of relevant decays
_ndec = len( _decays )
# A list containing all reactions id's
_lrid = list( _reactions.keys() )
# A list containing all decay id's
_ldid = list( _decays.keys() )
def _extract_signature(reaction_str):
sp = reaction_str.split(">")
# Extract the inital state
istate = _nuclei[ sp[0].split("+")[0] ]
# Extract the final state
#
# Set up a default dictionary (to store the final nucleons)
fstate = { i:0 for i in range( _nnuc ) }
# Fill the dictionary; Afterwards this variable stores
# the number of all nuleids in the final state
# The result looks somewhat like
# {0:<number_of_n_in_fs>, 1:<number_of_p_in_fs, 3:...}
for Nstr in sp[1].split("+"):
if Nstr in _nuclei: # Do not consider particles like X in reaction 11
fstate[ _nuclei[Nstr] ] += 1
return istate, fstate
# A dictionary containing the signatures
# for the different pdi reactions
_rsig = { rid:_extract_signature( _reactions[rid] ) for rid in _lrid }
# A dictionary containing the signatures
# for the different decays
_dsig = { did:_extract_signature( _decays[ did] ) for did in _ldid }
def _convert_mb_to_iMeV2(f_in_mb):
# Define the conversion factor
cf = 2.56819e-6
# Define the wrapper function
@wraps(f_in_mb)
def f_in_iMeV2(*args, **kwargs):
return cf * f_in_mb(*args, **kwargs)
return f_in_iMeV2
class NuclearReactor(object):
def __init__(self, s0, sc, temp_rg, e0, ii):
self._sII = ii
# A dictionary containing the BBN parameters
self._sY0 = self._sII.bbn_abundances_0()
# The injection energy
self._sE0 = e0
# The baryon-to-photon ratio at the time of the CMB
self._sEta = self._sII.parameter("eta")
# The source terms without the detla function
self._sS0 = s0
# The FSR source terms
self._sSc = sc
# The approximate decay temperature of the mediator
self._sTrg = temp_rg
# An instance of 'Spectrum_Generator' in order to calculate
# the photon spectrum in the function 'get_reaction_rate(reaction_id, T)'
self._sGen = SpectrumGenerator(self._sY0, self._sEta)
# BEGIN REACTIONS ###############################################
def _generic_expr(self, E, Q, N, p1, p2, p3):
# Below threshold, return 0
if E < Q:
return 0.
return N * (Q**p1) * (E-Q)**p2 / (E**p3)
def _da_np(self, E):
Q = _eth[1]
# Below threshold, return 0.
if E < Q:
return 0.
return 18.75 * ( ( sqrt( Q*(E-Q) )/E )**3. + 0.007947*( sqrt( Q*(E-Q) )/E )**2. * ( (sqrt(Q) - sqrt(0.037))**2./( E - Q + 0.037 ) ) )
def _ta_nd(self, E):
return self._generic_expr(E, _eth[2], 9.8, 1.95, 1.65, 3.6)
def _ta_npn(self, E):
return self._generic_expr(E, _eth[3], 26.0, 2.6, 2.3, 4.9)
def _He3a_pd(self, E):
return self._generic_expr(E, _eth[4], 8.88, 1.75, 1.65, 3.4)
def _He3a_npp(self, E):
return self._generic_expr(E, _eth[5], 16.7, 1.95, 2.3, 4.25)
def _He4a_pt(self, E):
return self._generic_expr(E, _eth[6], 19.5, 3.5, 1.0, 4.5)
def _He4a_nHe3(self, E):
# Prefactor changed from 17.1mb to 20.7mb to account for EXFOR data
# cf. 'hep-ph/0604251 [38]' for more details
return self._generic_expr(E, _eth[7], 20.7, 3.5, 1.0, 4.5)
def _He4a_dd(self, E):
return self._generic_expr(E, _eth[8], 10.7, 10.2, 3.4, 13.6)
def _He4a_npd(self, E):
return self._generic_expr(E, _eth[9], 21.7, 4.0, 3.0, 7.0)
def _Li6a_npHe4(self, E):
# Prefactor changed from 104.0mb to 143.0mb to account for EXFOR data
# cf. 'hep-ph/0604251 [39]' for more details
return self._generic_expr(E, _eth[10], 143.0, 2.3, 4.7, 7.0)
def _Li6a_XA3(self, E):
Q = _eth[11]
# Template for the exponential term of the form
# -- N * exp( -1/2*( (E - Eb)/Ed )^2 ) --
# E, Eb, Ed in MeV, N unitless, result unitless
def exp_term(E, N, Eb, Ed):
return N * exp( -(1./2.)*( (E - Eb)/Ed )**2. )
# __genereic_expr returns 0. below threshold
return self._generic_expr(E, Q, 38.1, 3.0, 2.0, 5.0) * ( exp_term(E, 3.7, 19.0, 3.5) + exp_term(E, 2.75, 30.0, 3.0) + exp_term(E, 2.2, 43.0, 5.0) )
def _Li7a_tHe4(self, E):
Q = _eth[12]
# Below threshold, return 0.
if E < Q:
return 0.
# Define the excess energy relative to the threshold
Ecm = E - Q
# Define the closing polynomial in Ecm...
pEcm = 1. + 2.2875*(Ecm**2.) - 1.1798*(Ecm**3.) + 2.5279*(Ecm**4.)
# ...and for pEcm < 0, return 0. (This should be a continuous transition)
# For this reaction, however, there should not be any problems
# The roots of pEcm are all imaginary according to 'WolframAlpha'
# Therefore, do not perform the check in order not to loose performance
#if pEcm < 0.:
# return 0.
return 0.105 * ( 2371./(E**2) ) * exp( -2.5954/sqrt(Ecm) ) * exp(-2.056*Ecm) * pEcm
def _Li7a_nLi6(self, E):
Q = _eth[13]
# Below threshold, return 0.
if E < Q:
return 0.
return self._generic_expr(E, Q, 0.176, 1.51, 0.49, 2.0) + self._generic_expr(E, Q, 1205.0, 5.5, 5.0, 10.5) + 0.06/( 1. + ( (E - Q - 7.46)/0.188 )**2. )
def _Li7a_nnpHe4(self, E):
return self._generic_expr(E, _eth[14], 122.0, 4.0, 3.0, 7.0)
def _Be7a_He3He4(self, E):
Q = _eth[15]
# Below threshold, return 0.
if E < Q:
return 0.
# Define the excess energy relative to the threshold
Ecm = E - Q
# Define the closing polynomial in Ecm...
pEcm = 1. - 0.428*(Ecm**2.) + 0.534*(Ecm**3.) - 0.115*(Ecm**4.)
# ... and for pEcm < 0, return 0. (This should be a continuous transition)
# In fact, pEcm has a root at Ecm ~ 3.92599MeV > Q according to 'WolframAlpha',
# This root lies above threshold, which is why we have to explicitly perform
# the corresponding check for this reaction
if pEcm < 0.:
return 0.
return 0.504 * ( 2371./(E**2.) ) * exp( -5.1909/sqrt(Ecm) ) * exp(-0.548*Ecm) * pEcm
def _Be7a_pLi6(self, E):
Q = _eth[16]
return self._generic_expr(E, Q, 32.6, 10.0, 2.0, 12.0) + self._generic_expr(E, Q, 2.27e6, 8.8335, 13.0, 21.8335)
def _Be7a_ppnHe4(self, E):
return self._generic_expr(E, _eth[17], 133.0, 4.0, 3.0, 7.0)
# END REACTIONS #################################################
@_convert_mb_to_iMeV2
def get_cross_section(self, reaction_id, E):
# There is no switch statement in python :(
if reaction_id == 1: return self._da_np(E) # 1. d + a -> n + p
if reaction_id == 2: return self._ta_nd(E) # 2. t + a -> n + d
if reaction_id == 3: return self._ta_npn(E) # 3. t + a -> 2n + p
if reaction_id == 4: return self._He3a_pd(E) # 4. He3 + a -> p + d
if reaction_id == 5: return self._He3a_npp(E) # 5. He3 + a -> n + 2p
if reaction_id == 6: return self._He4a_pt(E) # 6. He4 + a -> p + t
if reaction_id == 7: return self._He4a_nHe3(E) # 7. He4 + a -> n + He3
if reaction_id == 8: return self._He4a_dd(E) # 8. He4 + a -> 2d
if reaction_id == 9: return self._He4a_npd(E) # 9. He4 + a -> n + p + d
if reaction_id == 10: return self._Li6a_npHe4(E) # 10. Li6 + a -> n + p + He4
if reaction_id == 11: return self._Li6a_XA3(E) # 11. Li7 + a -> X + A3
if reaction_id == 12: return self._Li7a_tHe4(E) # 12. Li7 + a -> t + He4
if reaction_id == 13: return self._Li7a_nLi6(E) # 13. Li7 + a -> n + Li6
if reaction_id == 14: return self._Li7a_nnpHe4(E) # 14. Li7 + a -> 2n + p + He4
if reaction_id == 15: return self._Be7a_He3He4(E) # 15. Be7 + a -> He3 + He4
if reaction_id == 16: return self._Be7a_pLi6(E) # 16. Be7 + a -> p + Li6
if reaction_id == 17: return self._Be7a_ppnHe4(E) # 17. Be7 + a -> 2p + n + He4
# If no match is found, return 0.
print_error(
"Reaction with reaction_id" + str(reaction_id) + "does not exist.",
"acropolis.nucl.NuclearReactor.get_cross_section"
)
def _pdi_rates(self, T):
EC = me2/(22.*T)
# Set the maximal energy, serving
# as a cutoff for the integration
Emax = min( self._sE0, E_EC_max*EC )
# For E > me2/T >> EC, the spectrum
# is strongly suppressed
# Define a dict containing the rates
# for the photodisintegration reactions
# key = reaction_id (from _reactions)
pdi_rates = {rid:approx_zero for rid in _lrid}
# Calculate the spectra for the given temperature
if not universal:
xsp, ysp = self._sGen.get_spectrum(
self._sE0, self._sS0, self._sSc, T
)
else:
xsp, ysp = self._sGen.get_universal_spectrum(
self._sE0, self._sS0, self._sSc, T, offset=5e-2
)
# For performance reasons, also
# cut the energy at threshold
Emax = min(self._sE0, EC)
# Interpolate the photon spectrum (in log-log space)
# With this procedure it should be sufficient to perform
# a linear interpolation, which also has less side effects
Fph = LogInterp(xsp, ysp) # Interpolation on: Emin -> E0
# Calculate the kernel for the integration in log-space
def Fph_s(log_E, rid):
E = exp( log_E ); return Fph( E ) * E * self.get_cross_section(rid, E)
# Define the total rate of interactions altering the photon spectrum,
# evaluated at the relevant injection energy E0
rate_photon_E0 = self._sGen.rate_photon(self._sE0, T)
# Calculate the different rates by looping over all available reaction_id's
for rid in _lrid:
# Calculate the 'delta-term'...
I_dt = self._sS0[0](T)*self.get_cross_section(rid, self._sE0)/rate_photon_E0
# ... and use it as an initial value
pdi_rates[rid] = I_dt # might be zero due to exp. suppression!
# Only perform the integral for energies above threshold,
# i.e. do not consider strongly suppressed spectra
if Emax > _eth[rid]:
# Perform the integration from the threshold energy to Emax
with warnings.catch_warnings(record=True) as w:
log_Emin, log_Emax = log(_eth[rid]), log(Emax)
I_Fs = quad(Fph_s, log_Emin, log_Emax, epsrel=eps, epsabs=0, args=(rid,))
if len(w) == 1 and issubclass(w[0].category, IntegrationWarning):
print_warning(
"Slow convergence when calculating the pdi rates " +
"@ rid = %i, T = %.3e, E0 = %.3e, Eth = %.3e" % (rid, T, self._sE0, _eth[rid]),
"acropolis.nucl.NuclearReactor._thermal_rates_at"
)
# Add the result of the integral to the 'delta' term
pdi_rates[rid] += I_Fs[0]
# Avoid potential zeros
pdi_rates[rid] = max(approx_zero, pdi_rates[rid])
# Go home and play
return pdi_rates
def get_pdi_grids(self):
(Tmin, Tmax) = self._sTrg
NT = int(log10(Tmax/Tmin)*NT_pd)
# Create an array containing all
# temperature points ('log spacing')
Tr = np.logspace( log10(Tmin), log10(Tmax), NT )
# Create a dictionary to store the pdi
# rates for all reactions and temperatures
pdi_grids = {rid:np.zeros(NT) for rid in _lrid}
start_time = time()
print_info(
"Calculating non-thermal spectra and reaction rates.",
"acropolis.nucl.NuclearReactor.get_pdi_grids",
verbose_level=1
)
# Loop over all the temperatures and
# calculate the corresponding thermal rates
for i, Ti in enumerate(Tr):
progress = 100*i/NT
print_info(
"Progress: {:.1f}%".format(progress),
"acropolis.nucl.NuclearReactor.get_pdi_grids",
eol="\r", verbose_level=1
)
rates_at_i = self._pdi_rates(Ti)
# Loop over the different reactions
for rid in _lrid:
pdi_grids[rid][i] = rates_at_i[rid]
end_time = time()
print_info(
"Finished after {:.1f}s.".format(end_time - start_time),
"acropolis.nucl.NuclearReactor.get_pdi_grids",
verbose_level=1
)
# Go get some sun
return (Tr, pdi_grids)
class MatrixGenerator(object):
def __init__(self, temp, pdi_grids, ii):
self._sII = ii
# Save the thermal rates
self._sTemp = temp
self._sPdiGrids = pdi_grids
# Save the appropriate temperature range
(self._sTmin, self._sTmax) = temp[0], temp[-1]
# Interpolate the thermal rates
self._sPdiIp = self._interp_pdi_grids()
def _interp_pdi_grids(self):
# A dict containing all interp. rates; key = reaction_id (from _sReactions)
interp_grids = {}
for rid in _lrid:
# Interpolate the rates between
# Tmin and Tmax in log-log space
interp_grids[rid] = LogInterp(
self._sTemp, self._sPdiGrids[rid], base=10. # fill_value=0.
)
return interp_grids
def _pref_ij(self, state, i, j):
ris = state[0] # initial state of the reaction
rfs = state[1] # final state of the reaction
# Find reactions/decays that have
# 1. the nucleid 'nr=i' in the final state
# 2. the nucleid 'nc=j' in the initial state
if ris == j and rfs[i] != 0:
return rfs[i]
# Find reactions/decays that have
# nc = nr in the initial state
# (diagonal entries)
if i == j and ris == j:
return -1.
return 0.
def _pdi_kernel_ij(self, i, j, T):
matij = 0.
for rid in _lrid:
matij += self._pref_ij(_rsig[rid], i, j) * self._sPdiIp[rid](T)
# Incorporate the time-temperature relation and return
return matij/( self._sII.dTdt(T) )
def _dcy_kernel_ij(self, i, j, T):
matij = 0.
for did in _ldid:
matij += self._pref_ij(_dsig[did], i, j) * hbar/_tau[did]
# Incorporate the time-temperature relation and return
return matij/( self._sII.dTdt(T) )
def get_matp(self, T):
# Generate empty matrices
mpdi, mdcy = np.zeros( (_nnuc, _nnuc) ), np.zeros( (_nnuc, _nnuc) )
start_time = time()
print_info(
"Calculating final transfer matrix.",
"acropolis.nucl.MatrixGenerator.get_matp",
verbose_level=1
)
nt = 0
# Rows: Loop over all relevant nuclei
for nr in range(_nnuc):
# Columns: Loop over all relevant nuclei
for nc in range(_nnuc):
nt += 1
progress = 100*nt/_nnuc**2
print_info(
"Progress: {:.1f}%".format(progress),
"acropolis.nucl.MatrixGenerator.get_matp",
eol="\r", verbose_level=1
)
# Define the kernels for the integration in log-log space
Ik_pdi = lambda y: self._pdi_kernel_ij( nr, nc, exp(y) ) * exp(y)
Ik_dcy = lambda y: self._dcy_kernel_ij( nr, nc, exp(y) ) * exp(y)
# Perform the integration (in log-log space)
mpdi[nr, nc] = quad(Ik_pdi, log(self._sTmax), log(T), epsrel=eps, epsabs=0)[0]
mdcy[nr, nc] = quad(Ik_dcy, log(self._sTmax), log(T), epsrel=eps, epsabs=0)[0]
end_time = time()
print_info(
"Finished after {:.1f}ms.".format( 1e3*(end_time - start_time) ),
"acropolis.nucl.MatrixGenerator.get_matp",
verbose_level=1
)
return (mpdi, mdcy)
def get_all_matp(self):
NT = len(self._sTemp)
start_time = time()
print_info(
"Calculating transfer matrices for all temperatures.",
"acropolis.nucl.MatrixGenerator.get_all_matp",
verbose_level=2
)
all_mpdi = np.zeros( (NT, NY, NY) )
all_mdcy = np.zeros( (NT, NY, NY) )
for i, temp in enumerate(self._sTemp):
progress = 100*i/NT
print_info(
"Progress: {:.1f}%".format(progress),
"acropolis.nucl.MatrixGenerator.get_all_matp",
eol="\r", verbose_level=2
)
all_mpdi[i, :, :], all_mdcy[i, :, :] = self.get_matp(temp)
end_time = time()
print_info(
"Finished after {:.1f}s.".format(end_time - start_time),
"acropolis.nucl.MatrixGenerator.get_all_matp",
verbose_level=2
)
return self._sTemp, (all_mpdi, all_mdcy)
def get_final_matp(self):
return self.get_matp( self._sTmin ) | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/nucl.py | nucl.py |
from os import path
# math
from math import log10
# numpy
import numpy as np
# tarfilfe
import tarfile
# abc
from abc import ABC, abstractmethod
# util
from acropolis.utils import cumsimp
# pprint
from acropolis.pprint import print_error
# params
from acropolis.params import hbar
from acropolis.params import NY, NC
def locate_sm_file():
pkg_dir, _ = path.split(__file__)
sm_file = path.join(pkg_dir, "data", "sm.tar.gz")
return sm_file
def data_from_file(filename):
# Read the input file
tf, tc = tarfile.open(filename, "r:gz"), {}
# Extract the different files and
# store them in a dictionary
for m in tf.getmembers(): tc[m.name] = tf.extractfile(m)
# READ THE PREVIOUSLY GENERATED DATA
cosmo_data = np.genfromtxt(tc["cosmo_file.dat"] )
abund_data = np.genfromtxt(tc["abundance_file.dat"])
param_data = np.genfromtxt(tc["param_file.dat"],
delimiter="=",
dtype=None,
encoding=None
)
return InputData(cosmo_data, abund_data, param_data)
class AbstractData(ABC):
@abstractmethod
def get_cosmo_data(self):
pass
@abstractmethod
def get_abund_data(self):
pass
@abstractmethod
def get_param_data(self):
pass
class InputData(AbstractData):
def __init__(self, cosmo_data, abund_data, param_data):
self._sCosmoData = cosmo_data
self._sAbundData = abund_data
self._sParamData = param_data
def get_cosmo_data(self):
return self._sCosmoData
def get_abund_data(self):
return self._sAbundData
def get_param_data(self):
return self._sParamData
class InputInterface(object):
def __init__(self, input_data):
# If input_data is a filename, extract the data first
if type(input_data) == str:
input_data = data_from_file(input_data)
# Extract the provided input data
self._sCosmoData = input_data.get_cosmo_data()
self._sAbundData = input_data.get_abund_data()
self._sParamData = input_data.get_param_data()
# Calculate the scale factor and add it
sf = np.exp( cumsimp(self._sCosmoData[:,0]/hbar, self._sCosmoData[:,4]) )
self._sCosmoData = np.column_stack( [self._sCosmoData, sf] )
# Log the cosmo data for the interpolation
# ATTENTION: At this point we have to take the
# absolute value, because dT/dt is negative
self._sCosmoDataLog = np.log10( np.abs(self._sCosmoData) )
self._sCosmoDataShp = self._sCosmoData.shape
# Reshape the abundance data
self._sAbundData = self._sAbundData.reshape(
(NY, self._sAbundData.size//NY)
)
# Reshape the param data and
# turn it into a dictionary
self._sParamData = dict( self._sParamData.reshape(self._sParamData.size) )
# Check if the data is consistent
self._check_data()
def _check_data(self):
# Check if param_file.dat includes the required parameters
req_param = ( "eta" in self._sParamData )
if not req_param:
print_error(
"The mandatory variable 'eta' could not be found in 'param_file.dat'",
"acropolis.input.InputInterface::_check_data"
)
# Check if abundance_file.dat has the correct dimensions
abund_shape = ( self._sAbundData.shape[0] == NY )
if not abund_shape:
print_error(
"The content of 'abundance_file.dat' does not have the required shape.",
"acropolis.input.InputInterface::_check_data"
)
# Check if cosmo_file.dat has the correct number of columns
cosmo_shape = ( self._sCosmoDataShp[1] >= NC )
if not cosmo_shape:
print_error(
"The content of 'cosmo_file.dat' does not have the required shape.",
"acropolis.input.InputInterface::_check_data"
)
# 1. COSMO_DATA ###########################################################
def _find_index(self, x, x0):
# Returns an index ix such that x0
# lies between x[ix] and x[ix+1]
ix = np.argmin( np.abs( x - x0 ) )
# Check the edge of the array
if ix == self._sCosmoDataShp[0] - 1:
# In this case, the condition
# below is always False
# --> No additional -1
ix -= 1
# If x0 is not between ix and ix+1,...
if not (x[ix] <= x0 <= x[ix+1] or x[ix] >= x0 >= x[ix+1]):
# ...it must be between ix-1 and ix
ix -= 1
return ix
def _interp_cosmo_data(self, val, xc, yc):
# ATTENTION: To ensure maximal performance,
# it is assumed that x is already sorted in
# either increasing or decreasing order
x = self._sCosmoDataLog[:,xc]
y = self._sCosmoDataLog[:,yc]
val_log = log10(val)
# Extract the index closest to 'val_log'
ix = self._find_index(x, val_log)
m = (y[ix+1] - y[ix])/(x[ix+1] - x[ix])
b = y[ix] - m*x[ix]
return 10.**(m*val_log + b)
def temperature(self, t):
return self._interp_cosmo_data(t, 0, 1)
def time(self, T):
return self._interp_cosmo_data(T, 1, 0)
def dTdt(self, T):
return -self._interp_cosmo_data(T, 1, 2)
def neutrino_temperature(self, T):
return self._interp_cosmo_data(T, 1, 3)
def hubble_rate(self, T):
return self._interp_cosmo_data(T, 1, 4)
def scale_factor(self, T):
return self._interp_cosmo_data(T, 1, -1)
def cosmo_column(self, yc, val, xc=1):
return self._interp_cosmo_data(val, xc, yc)
def cosmo_range(self):
return ( min(self._sCosmoData[:,1]), max(self._sCosmoData[:,1]) )
# 2. ABUNDANCE_DATA #######################################################
def bbn_abundances(self):
return self._sAbundData
def bbn_abundances_0(self):
return self._sAbundData[:,0]
# 3. PARAM_DATA ###########################################################
def parameter(self, key):
return self._sParamData[key] | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/input.py | input.py |
from math import pi, exp, log, log10
# numpy
import numpy as np
# scipy
from scipy.linalg import expm
# abc
from abc import ABC, abstractmethod
# input
from acropolis.input import InputInterface, locate_sm_file
# nucl
from acropolis.nucl import NuclearReactor, MatrixGenerator
# params
from acropolis.params import zeta3
from acropolis.params import hbar, c_si, me2, alpha, tau_t
from acropolis.params import Emin, NY
from acropolis.params import universal
# pprint
from acropolis.pprint import print_info, print_warning
class AbstractModel(ABC):
def __init__(self, e0, ii):
# Initialize the input interface
self._sII = ii
# The injection energy
self._sE0 = e0
# The temperature range that is used for the calculation
self._sTrg = self._temperature_range()
# The relevant source terms
(self._sS0, self._sSc) = self.get_source_terms()
# A buffer for high-performance scans
self._sMatpBuffer = None
def run_disintegration(self):
# Print a warning if the injection energy
# is larger than 1GeV, as this might lead
# to wrong results
if not universal and int( self._sE0 ) > 1e3:
print_warning(
"Injection energy > 1 GeV. Results cannot be trusted.",
"acropolis.models.AbstractMode.run_disintegration"
)
# Print a warning if the temperature range
# of the model is not covered by the data
# in cosmo_file.dat
cf_temp_rg = self._sII.cosmo_range()
if not (cf_temp_rg[0] <= self._sTrg[0] <= self._sTrg[1] <= cf_temp_rg[1]):
print_warning(
"Temperature range not covered by input data. Results cannot be trusted.",
"acropolis.models.AbstractMode.run_disintegration"
)
# If the energy is below all thresholds,
# simply return the initial abundances
if self._sE0 <= Emin:
print_info(
"Injection energy is below all thresholds. No calculation required.",
"acropolis.models.AbstractModel.run_disintegration",
verbose_level=1
)
return self._squeeze_decays( self._sII.bbn_abundances() )
# Calculate the different transfer matrices
###########################################
# 1. pre-decay
pred_mat = self._pred_matrix()
# 2. photodisintegration
pdi_mat = self._pdi_matrix()
# 3. post-decay
postd_mat = self._postd_matrix()
# Combine
transf_mat = postd_mat.dot( pdi_mat.dot( pred_mat ) )
# Calculate the final abundances
Yf = np.column_stack(
list( transf_mat.dot( Y0i ) for Y0i in self._sII.bbn_abundances().transpose() )
)
return Yf
def get_source_terms(self):
# Collect the different source terms, i.e. ...
# ...the 'delta' source terms and...
s0 = [
self._source_photon_0 ,
self._source_electron_0,
self._source_positron_0
]
# ...the continous source terms
sc = [
self._source_photon_c ,
self._source_electron_c,
self._source_positron_c
]
return (s0, sc)
def _pdi_matrix(self):
if self._sMatpBuffer is None:
# Initialize the NuclearReactor
nr = NuclearReactor(self._sS0, self._sSc, self._sTrg, self._sE0, self._sII)
# Calculate the thermal rates
(temp, pdi_grids) = nr.get_pdi_grids()
# Initialize the MatrixGenerator
mg = MatrixGenerator(temp, pdi_grids, self._sII)
# Calculate the final matrices and set the buffer
self._sMatpBuffer = mg.get_final_matp()
# Calculate the final matrices
matp = self._sMatpBuffer
# Calculate the final matrix and return
return expm( sum(m for m in matp) )
def _pred_matrix(self):
dmat = np.identity(NY)
# n > p
dmat[0,0], dmat[1,0] = 0., 1.
# t > He3
Tmax = max( self._sTrg )
tmax = self._sII.time( Tmax )
expf = exp( -tmax/tau_t )
dmat[3,3], dmat[4, 3] = expf, 1. - expf
return dmat
def _postd_matrix(self):
dmat = np.identity(NY)
dmat[0,0], dmat[1,0] = 0., 1. # n > p
dmat[3,3], dmat[4,3] = 0., 1. # t > He3
dmat[8,8], dmat[7,8] = 0., 1. # Be7 > Li7
return dmat
def _squeeze_decays(self, Yf):
dmat = self._postd_matrix()
return np.column_stack(
list( dmat.dot( Yi ) for Yi in Yf.transpose() )
)
def get_matp_buffer(self):
return self._sMatpBuffer
def set_matp_buffer(self, matp):
self._sMatpBuffer = matp
# ABSTRACT METHODS ##############################################
@abstractmethod
def _temperature_range(self):
pass
@abstractmethod
def _source_photon_0(self, T):
pass
@abstractmethod
def _source_electron_0(self, T):
pass
def _source_positron_0(self, T):
return self._source_electron_0(T)
def _source_photon_c(self, E, T):
return 0.
def _source_electron_c(self, E, T):
return 0.
def _source_positron_c(self, E, T):
return self._source_electron_c(E, T)
class DecayModel(AbstractModel):
def __init__(self, mphi, tau, temp0, n0a, bree, braa):
# Initialize the Input_Interface
self._sII = InputInterface( locate_sm_file() )
# The mass of the decaying particle
self._sMphi = mphi # in MeV
# The lifetime of the decaying particle
self._sTau = tau # in s
# The injection energy
self._sE0 = self._sMphi/2. # in MeV
# The number density of the mediator
# (relative to photons) ...
self._sN0a = n0a
# ... at T = temp0 ...
self._sT0 = temp0 # in MeV
# ... corresponding to t = t(temp0)
self._st0 = self._sII.time(self._sT0)
# The branching ratio into electron-positron pairs
self._sBRee = bree
# The branching ratio into two photons
self._sBRaa = braa
# Call the super constructor
super(DecayModel, self).__init__(self._sE0, self._sII)
# DEPENDENT QUANTITIES ##############################################################
def _number_density(self, T):
sf_ratio = self._sII.scale_factor(self._sT0)/self._sII.scale_factor(T)
delta_t = self._sII.time(T) - self._st0
n_gamma = (2.*zeta3)*(self._sT0**3.)/(pi**2.)
return self._sN0a * n_gamma * sf_ratio**3. * exp( -delta_t/self._sTau )
# ABSTRACT METHODS ##################################################################
def _temperature_range(self):
# The number of degrees-of-freedom to span
mag = 2.
# Calculate the approximate decay temperature
Td = self._sII.temperature( self._sTau )
# Calculate Tmin and Tmax from Td
Td_ofm = log10(Td)
# Here we choose -1.5 (+0.5) orders of magnitude
# below (above) the approx. decay temperature,
# since the main part happens after t = \tau
Tmin = 10.**(Td_ofm - 3.*mag/4.)
Tmax = 10.**(Td_ofm + 1.*mag/4.)
return (Tmin, Tmax)
def _source_photon_0(self, T):
return self._sBRaa * 2. * self._number_density(T) * (hbar/self._sTau)
def _source_electron_0(self, T):
return self._sBRee * self._number_density(T) * (hbar/self._sTau)
def _source_photon_c(self, E, T):
EX = self._sE0
x = E/EX
y = me2/(4.*EX**2.)
if 1. - y < x:
return 0.
_sp = self._source_electron_0(T)
return (_sp/EX) * (alpha/pi) * ( 1. + (1.-x)**2. )/x * log( (1.-x)/y )
class AnnihilationModel(AbstractModel):
def __init__(self, mchi, a, b, tempkd, bree, braa, omegah2=0.12):
# Initialize the Input_Interface
self._sII = InputInterface( locate_sm_file() )
# The mass of the dark-matter particle
self._sMchi = mchi # in MeV
# The s-wave and p-wave parts of <sigma v>
self._sSwave = a # in cm^3/s
self._sPwave = b # in cm^3/s
# The dark matter decoupling temperature in MeV
# For Tkd=0, the dark matter partices stays in
# kinetic equilibrium with the SM heat bath
self._sTkd = tempkd # in MeV
# The injection energy
self._sE0 = self._sMchi # in MeV
# The branching ratio into electron-positron pairs
self._sBRee = bree
# The branching ratio into two photons
self._sBRaa = braa
# The density parameter of dark matter
self._sOmgh2 = omegah2
# Call the super constructor
super(AnnihilationModel, self).__init__(self._sE0, self._sII)
# DEPENDENT QUANTITIES ##############################################################
def _number_density(self, T):
rho_d0 = 8.095894680377574e-35 * self._sOmgh2 # DM density today in MeV^4
T0 = 2.72548*8.6173324e-11 # CMB temperature today in MeV
sf_ratio = self._sII.scale_factor(T0) / self._sII.scale_factor(T)
return rho_d0 * sf_ratio**3. / self._sMchi
def _dm_temperature(self, T):
if T >= self._sTkd:
return T
sf_ratio = self._sII.scale_factor(self._sTkd) / self._sII.scale_factor(T)
return self._sTkd * sf_ratio**2.
def _sigma_v(self, T):
swave_nu = self._sSwave/( (hbar**2.)*(c_si**3.) )
pwave_nu = self._sPwave/( (hbar**2.)*(c_si**3.) )
v2 = 6.*self._dm_temperature(T)/self._sMchi
return swave_nu + pwave_nu*v2
# ABSTRACT METHODS ##################################################################
def _temperature_range(self):
# The number of degrees-of-freedom to span
mag = 4.
# Tmax is determined by the Be7 threshold
# (The factor 0.5 takes into account effects
# of the high-energy tail)
Tmax = me2/(22.*.5*Emin)
# For smaller T the annihilation rate is suppressed
# --> falls off at least with T^(-6)
Tmin = 10.**(log10(Tmax) - mag)
return (Tmin, Tmax)
def _source_photon_0(self, T):
return self._sBRaa * (self._number_density(T)**2.) * self._sigma_v(T)
def _source_electron_0(self, T):
return self._sBRee * .5 * (self._number_density(T)**2.) * self._sigma_v(T)
def _source_photon_c(self, E, T):
EX = self._sE0
x = E/EX
y = me2/(4.*EX**2.)
if 1. - y < x:
return 0.
_sp = self._source_electron_0(T)
return (_sp/EX) * (alpha/pi) * ( 1. + (1.-x)**2. )/x * log( (1.-x)/y ) | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/models.py | models.py |
from math import pi, log, log10, exp, sqrt
# numpy
import numpy as np
# scipy
from scipy.integrate import quad, dblquad
from scipy.integrate import IntegrationWarning
# numba
import numba as nb
# warnings
import warnings
# db
from acropolis.db import import_data_from_db
from acropolis.db import in_rate_db, interp_rate_db
# cache
from acropolis.cache import cached_member
# pprint
from acropolis.pprint import print_warning, print_error
# params
from acropolis.params import me, me2, mm, mm2, alpha, re, hbar, tau_m
from acropolis.params import zeta3, pi2
from acropolis.params import FX
from acropolis.params import Emin, approx_zero, eps, Ephb_T_max
from acropolis.params import NE_pd, NE_min
# _ReactionWrapperScaffold ####################################################
@nb.jit(cache=True)
def _JIT_F(Eph, Ee, Ephb):
# ATTENTION: Here we use the range given in '10.1103/PhysRev.167.1159',
# because the translation to 0 < q < 1 is questionable
if not ( Ephb <= Eph <= 4.*Ephb*Ee*Ee/( me2 + 4.*Ephb*Ee ) ):
# CHECKED to never happen, since the intergration
# limits are always chosen appropriately (below)
return 0.
G = 4.*Ephb*Ee/me2 # \Gamma_\epsilon
q = Eph/( G*(Ee - Eph) ) # q
# ATTENTION:
# If the last term is (2.-2.*G*q) , Kawasaki
# If the last term is (2.+2.*G*q) , correct
return 2.*q*log(q) + (1.+2.*q)*(1.-q) + (G*q)**2. * (1.-q)/(2.+2.*G*q)
@nb.jit(cache=True)
def _JIT_G(Ee, Eph, Ephb):
# Define the energy of the positron
Eep = Eph + Ephb - Ee
# Calculate the valid range for Ee
# ATTENTION: This range is absent in 'astro-ph/9412055'
# Here we adopt the original result from
# 'link.springer.com/content/pdf/10.1007/BF01005624.pdf'
dE_sqrt = (Eph - Ephb)*sqrt( 1. - me2/( Eph*Ephb ) )
Ee_lim_m = ( Eph + Ephb - dE_sqrt )/2.
Ee_lim_p = ( Eph + Ephb + dE_sqrt )/2.
# ATTENTION: White et al. impose the range in the soft
# photon limit, which is more difficult to handle but
# should lead to the same results, since the pair production
# kernel ensures that Ephb ~ T << Eph ~ O(MeV)
if not ( me < Ee_lim_m <= Ee <= Ee_lim_p ):
# CHECKED to never happen, since the intergration
# limits are always chosen appropriately (below)
return 0.
# Split the function into four summands
# and calculate all of them separately
# Ee + Eep = Eph + Ephb
sud = 0.
sud += 4.*( (Ee + Eep)**2. )*log( (4.*Ephb*Ee*Eep)/( me2*(Ee + Eep) ) )/( Ee*Eep )
sud += ( me2/( Ephb*(Ee + Eep) ) - 1. ) * ( (Ee + Eep)**4. )/( (Ee**2.)*(Eep**2.) )
# ATTENTION: no additional minus sign in sud[2]
# It is unclear whether it is a type or an artifact
# of the scan (in the original paper)
sud += 2.*( 2.*Ephb*(Ee + Eep) - me2 ) * ( (Ee + Eep)**2. )/( me2*Ee*Eep )
sud += -8.*Ephb*(Ee + Eep)/me2
return sud
# _PhotonReactionWrapper ######################################################
@nb.jit(cache=True)
def _JIT_ph_rate_pair_creation(logy, logx, T):
# Return the integrand for the 2d integral in log-space
x, y = exp(logx), exp(logy)
# Define beta as a function of y
b = sqrt(1. - 4.*me2/y)
# Define the kernel for the 2d-integral; y = s, x = epsilon_bar
# f/E^2 s \sigma_DP
# ATTENTION: There is an error in 'astro-ph/9412055.pdf'
# In the integration for \bar{\epsilon}_\gamma the lower
# limit of integration should be me^2/\epsilon_\gamma
# (the written limit is unitless, which must be wrong)
# This limit is a consequence of the constraint on
# the center-of-mass energy
sig_pc = .5*pi*(re**2.)*(1.-b**2.)*( (3.-b**4.)*log( (1.+b)/(1.-b) ) - 2.*b*(2.-b**2.) )
return ( 1./(pi**2) )/( exp(x/T) - 1. ) * y * sig_pc * (x*y)
@nb.jit(cache=True)
def _JIT_ph_kernel_inverse_compton(logx, E, Ep, T):
# Return the integrand for the 1d-integral in log-space; x = Ephb
x = exp(logx)
return _JIT_F(E, Ep, x)*x/( pi2*(exp(x/T) - 1.) ) * x
# _ElectronReactionWrapper ####################################################
@nb.jit(cache=True)
def _JIT_el_rate_inverse_compton(y, x, E, T):
# Return the integrand for the 2d-integral; y = Eph, x = Ephb
return _JIT_F(y, E, x)*x/( (pi**2.)*(exp(x/T) - 1.) )
@nb.jit(cache=True)
def _JIT_el_kernel_inverse_compton(logx, E, Ep, T):
# Define the integrand for the 1d-integral in log-space; x = Ephb
x = exp(logx)
return _JIT_F(Ep+x-E, Ep, x)*( x/(pi**2) )/( exp(x/T) - 1. ) * x
@nb.jit(cache=True)
def _JIT_el_kernel_pair_creation(logx, E, Ep, T):
# Define the integrand for the 1d-integral in log-space; x = Ephb
x = exp(logx)
return _JIT_G(E, Ep, x)/( (pi**2.)*(exp(x/T) - 1.) ) * x
@nb.jit(cache=True)
def _JIT_dsdE_Z2(Ee, Eph):
# Define the energies (here: nucleon is at rest)
Em = Ee # E_-
Ep = Eph - Ee # E_+
# Define the various parameters that enter the x-section
pm = sqrt(Em*Em - me2) # p_-
pp = sqrt(Ep*Ep - me2) # p_+
L = log( (Ep*Em + pp*pm + me2)/(Ep*Em - pp*pm + me2) ) # L
lm = log( (Em + pm)/(Em - pm) ) # l_-
lp = log( (Ep + pp)/(Ep - pp) ) # l_+
# Define the prefactor
pref = alpha*(re**2.)*pp*pm/(Eph**3.)
# Calculate the infamous 'lengthy expression'
# Therefore, split the sum into four summands
sud = 0.
sud += -4./3. - 2.*Ep*Em*(pp*pp + pm*pm)/( (pp**2.)*(pm**2.) )
sud += me2*( lm*Ep/(pm**3.) + lp*Em/(pp**3.) - lp*lm/(pp*pm) )
sud += L*( -8.*Ep*Em/(3.*pp*pm) + Eph*Eph*((Ep*Em)**2. + (pp*pm)**2. - me2*Ep*Em)/( (pp**3.)*(pm**3.) ) )
sud += -L*me2*Eph*( lp*(Ep*Em - pp*pp)/(pp**3.) + lm*(Ep*Em - pm*pm)/(pm**3.) )/(2.*pp*pm)
return pref * sud
# SpectrumGenerator ###########################################################
@nb.jit(cache=True)
def _JIT_set_spectra(F, i, Fi, cond=False):
F[:, i] = Fi
# In the strongly compressed regime, manually
# set the photon spectrum to zero in order to
# avoid floating-point errors
if cond: F[0, i] = 0.
@nb.jit(cache=True)
def _JIT_solve_cascade_equation(E_rt, G, K, E0, S0, Sc, T):
# Extract the number of particle species...
NX = len(G)
# ...and the number of points in energy.
NE = len(E_rt)
dy = log(E_rt[-1]/Emin)/(NE-1)
# Generate the grid for the different spectra
# First index: X = photon, electron, positron
F_rt = np.zeros( (NX, NE) )
# Calculate F_X(E_S), NE-1
_JIT_set_spectra(F_rt, -1, np.array([
Sc[X,-1]/G[X,-1] + np.sum(K[X,:,-1,-1]*S0[:]/(G[:,-1]*G[X,-1])) for X in range(NX)
]))
# Loop over all energies
i = (NE - 1) - 1 # start at the second to last index, NE-2
while i >= 0:
B = np.zeros( (NX, NX) )
a = np.zeros( (NX, ) )
# Calculate the matrix B and the vector a
for X in range(NX):
# Calculate B
B[X,:] = .5*dy*E_rt[i]*K[X,:,i,i]/G[X,i]
# Calculate a
a[X] = Sc[X,i]/G[X,i]
a0 = K[X,:,i,-1]*S0[:]/G[:,-1] + .5*dy*E_rt[-1]*K[X,:,i,-1]*F_rt[:,-1]
for j in range(i+1, NE-1): # Goes to NE-2
a0 += dy*E_rt[j]*K[X,:,i,j]*F_rt[:,j]
for a0X in a0:
a[X] += a0X/G[X,i]
# Solve the system of linear equations for F
_JIT_set_spectra(F_rt, i,
np.linalg.solve(np.identity(NX)-B, a)
)
i -= 1
# Remove potential zeros
F_rt = F_rt.reshape( NX*NE )
for i, f in enumerate(F_rt):
if f < approx_zero:
F_rt[i] = approx_zero
F_rt = F_rt.reshape( (NX, NE) )
# Define the result array...
res = np.zeros( (NX+1, NE) )
# ...and fill it
res[0 , :] = E_rt
res[1:NX+1, :] = F_rt
return res
###############################################################################
class _ReactionWrapperScaffold(object):
def __init__(self, Y0, eta, db):
self._sY0 = Y0
self._sEta = eta
self._sRateDb = db
# NUMBER DENSITIES of baryons, electrons and nucleons #####################
def _nb(self, T):
# gs does not change anymore for the relevant temperature,
# hence (R0/R)^3 = gs(T)T^3/( gs(T0)T0^3) = (T/T0)^3
return self._sEta * ( 2.*zeta3/pi2 ) * (T**3.)
def _ne(self, T):
# 1: p; 5: He4 (see 'NuclearReactor._nuclei' for all identifiers)
return ( self._sY0[1] + 2.*self._sY0[5] ) * self._nb(T)
def _nNZ2(self, T):
# 1: p; 5: He4 (see 'NuclearReactor._nuclei' for all identifiers)
return ( self._sY0[1] + 4.*self._sY0[5] ) * self._nb(T)
class _PhotonReactionWrapper(_ReactionWrapperScaffold):
def __init__(self, Y0, eta, db):
super(_PhotonReactionWrapper, self).__init__(Y0, eta, db)
# CONTINUOUS ENERGY LOSS ##################################################
# E is the energy of the loosing particle
# T is the temperature of the background photons
# TOTAL CONTINUOUS ENERGY LOSS ############################################
def total_eloss(E, T):
return 0.
# RATES ###################################################################
# E is the energy of the incoming particle
# T is the temperature of the background photons
# PHOTON-PHOTON SCATTERING ################################################
def _rate_photon_photon(self, E, T):
#if E > me2/T:
# return 0.
expf = exp( -E*T/me2 )
return 0.151348 * (alpha**4.) * me * (E/me)**3. * (T/me)**6. * expf
# COMPTON SCATTERING ######################################################
def _rate_compton(self, E, T):
x = 2.*E/me
return ( 2.*pi*(re**2.)/x ) * self._ne(T) * ( (1. - 4./x - 8./(x**2.))*log(1.+x) + .5 + 8./x - 1./(2.*(1.+x)**2.) )
# BETHE-HEITLER PAIR PRODUCTION ###########################################
def _rate_bethe_heitler(self, E, T):
# For small energies, the rate can be approximated by a constant
# (cf. 'hep-ph/0604251') --- NOT USED HERE
#if E < 4.: E = 4.
k = E/me
# Below threshold, the rate vanishes
# This case never happens since Emin = 1.5 > 2me
# (see 'acropolis.params')
if k < 2:
return 0.
# Approximation for SMALL energies
if 2 <= k <= 4:
r = ( 2.*k - 4. )/( k + 2. + 2.*sqrt(2.*k) )
return ( alpha**3./me2 ) * self._nNZ2(T) * (2.*pi/3.) * ( (k-2.)/k )**3. * ( \
1 + r/2. + (23./40.)*(r**2.) + (11./60.)*(r**3.) + (29./960.)*(r**4.) \
)
# Approximation for LARGE energies
log2k = log(2.*k)
# We implement corrections up to order (2./k)**6 ('astro-ph/9412055')
# This is relevant in order to ensure a smooth transition at k = 4
return ( alpha**3./me2 ) * self._nNZ2(T) * ( \
(28./9.)*log2k - 218./27. \
+ (2./k)**2. * ( (2./3.)*log2k**3. - log2k**2. + (6. - pi2/3.)*log2k + 2.*zeta3 + pi2/6. - 7./2. ) \
- (2./k)**4. * ( (3./16.)*log2k + 1./8. ) \
- (2./k)**6. * ( (29./2304.)*log2k - 77./13824. ) \
)
# DOUBLE PHOTON PAIR PRODUCTION ###########################################
def _rate_pair_creation(self, E, T):
# In general, the threshold is E ~ me^2/(22*T)
# However, here we use a slighlty smaller threshold
# in order to guarantee a smooth transition
if E < me2/(50.*T):
return 0.
# Define the integration limits from the
# constraint on the center-of-mass energy
llim = me2/E # < 50*T (see above)
ulim = Ephb_T_max*T # ~ 200*T
# ulim > llim, since me2/E < 50*T
# CHECKED!
# Perform the integration in log-log space
# The limits for s are always in ascending order,
# i.e. 4*me2 < 4*E*x, since x > me2/E
I_fso_E2 = dblquad(_JIT_ph_rate_pair_creation, log(llim), log(ulim), \
lambda logx: log(4.*me2), lambda logx: log(4.*E) + logx, \
epsrel=eps, epsabs=0, args=(T,)
)
return I_fso_E2[0]/( 8.*E**2. )
def _rate_pair_creation_db(self, E, T):
if E < me2/(50.*T):
return 0.
E_log, T_log = log10(E), log10(T)
if ( self._sRateDb is None ) or ( not in_rate_db(E_log, T_log) ):
return self._rate_pair_creation(E, T)
return interp_rate_db(self._sRateDb, 'ph:rate_pair_creation', E_log, T_log)
# TOTAL RATE ##############################################################
def total_rate(self, E, T):
return self._rate_photon_photon(E, T) + self._rate_compton(E, T) + self._rate_bethe_heitler(E, T) + self._rate_pair_creation_db(E, T)
# INTEGRAL KERNELS ########################################################
# E is the energy of the outgoing particle
# Ep is the energy of the incoming particle
# T is the temperature of the background photons
# PHOTON-PHOTON SCATTERING ################################################
def _kernel_photon_photon(self, E, Ep, T):
#if Ep > me2/T:
# return 0.
expf = exp( -Ep*T/me2 )
return 1112./(10125.*pi) * (alpha**4.)/(me**8.) * 8.*(pi**4.)*(T**6.)/63. \
* Ep**2. * ( 1. - E/Ep + (E/Ep)**2. )**2. * expf
# COMPTON SCATTERING ######################################################
def _kernel_compton(self, E, Ep, T):
# Check that the energies do not execeed the 'Compton edge'
# ATTENTION: This constraint is missing in '1503.04852'
if Ep/(1. + 2.*Ep/me) > E:
return 0.
# ATTENTION:
# If the last term is + 2.*me*(1./E - 1./Ep) , Serpico
# If the last term is - 2.*me*(1./E - 1./Ep) , correct
return pi*(re**2.)*me/(Ep**2.) * self._ne(T) * ( Ep/E + E/Ep + (me/E - me/Ep)**2. - 2.*me*(1./E - 1./Ep) )
# INVERSE COMPTON SCATTERING ##############################################
@cached_member
def _kernel_inverse_compton(self, E, Ep, T):
# Incorporate the non-generic integration limit as
# the algorithm requires Ep > E and not Ep > E + me
if Ep < E + me:
return 0.
# This also ensures that Ep != E (!!!)
# Define the integration limits from
# the range that is specified in '_JIT_F'
llim = .25*me2*E/( Ep*Ep - Ep*E ) # with Ep != E (see above)
ulim = min( E, Ep-me2/(4.*Ep), Ephb_T_max*T )
# Here, the second condition is redundant, since
# Ep-me2/(4.*Ep) > E for Ep > E + me (see above)
# However, we include it anyways in order to
# provide a better documentation
# CHECKED!
# If the lower limit exceeds the upper limit,
# simply return 0. This also helps to avoid
# overflow if llim > Ephb_Tmax*T
if ulim <= llim:
return 0.
# Perform the integration in log space
I_fF_E = quad(_JIT_ph_kernel_inverse_compton, log(llim), log(ulim), epsrel=eps, epsabs=0, args=(E, Ep, T))
# ATTENTION: Kawasaki considers a combined e^+/e^- spectrum
# Therefore the factor 2 should not be there in our case
return 2.*pi*(alpha**2.)*I_fF_E[0]/(Ep**2.)
# TOTAL INTEGRAL KERNEL ####################################################
def total_kernel_x(self, E, Ep, T, X):
if X == 0: return self._kernel_photon_photon(E, Ep, T) + self._kernel_compton(E, Ep, T)
# Photon -> Photon
if X == 1: return self._kernel_inverse_compton(E, Ep, T)
# Electron -> Photon
if X == 2: return self._kernel_inverse_compton(E, Ep, T)
# Positron -> Photon
print_error(
"Particle with identifier X =" + str(X) + "does not exist.",
"acropolis.cascade._PhotonReactionWrapper.total_kernel_x"
)
class _ElectronReactionWrapper(_ReactionWrapperScaffold):
def __init__(self, Y0, eta, db):
super(_ElectronReactionWrapper, self).__init__(Y0, eta, db)
# RATES ###################################################################
# E is the energy of the incoming particle
# T is the temperature of the background photons
# INVERSE COMPTON SCATTERING ##############################################
@cached_member
def _rate_inverse_compton(self, E, T):
# Define the upper limit for the integration over x
ulim = min( E - me2/(4.*E), Ephb_T_max*T )
# The condition x <= E-me2/(4.*E) ensures
# E- <= E+ in E- <= E <= E+ (range for y)
# CHECKED!
# Perform the two-dimensional integration
# with limits that are calculated from the
# range that is specified in '_JIT_F'
# ATTENTION:
# The integral over \epsilon_\gamma should start at 0.
# In fact, for \epsilon_\gamma > \epsilon_e, we have q < 0.
I_fF_E = dblquad(_JIT_el_rate_inverse_compton, 0., ulim, lambda x: x, lambda x: 4.*x*E*E/( me2 + 4.*x*E ), epsrel=eps, epsabs=0, args=(E, T))
return 2.*pi*(alpha**2.)*I_fF_E[0]/(E**2.)
def _rate_inverse_compton_db(self, E, T):
E_log, T_log = log10(E), log10(T)
if ( self._sRateDb is None ) or ( not in_rate_db(E_log, T_log) ):
return self._rate_inverse_compton(E, T)
return interp_rate_db(self._sRateDb, 'el:rate_inverse_compton', E_log, T_log)
# TOTAL RATE ##############################################################
def total_rate(self, E, T):
return self._rate_inverse_compton_db(E, T)
# INTEGRAL KERNELS ########################################################
# E is the energy of the outgoing particle
# Ep is the energy of the incoming particle
# T is the temperature of the background photons
# INVERSE COMPTON SCATTERING ##############################################
@cached_member
def _kernel_inverse_compton(self, E, Ep, T):
# E == Ep leads to a divergence in
# the Bose-Einstein distribution
# TODO ???
if E == Ep:
return 0.
# Calculate appropriate integration limits
pf = .25*me2/Ep - E # <= 0.
qf = .25*me2*(Ep-E)/Ep # >= 0.
sqrt_d = sqrt( (pf/2.)**2. - qf )
z1 = -pf/2. - sqrt_d # smaller
z2 = -pf/2. + sqrt_d # larger
# Define the integration limits from
# the range that is specified in '_JIT_F'
llim = z1
ulim = min( z2, Ep - me2/(4.*Ep), Ephb_T_max*T )
# CHECKED!
# For the check, remember to use the correct
# '_JIT_F', i.e. '_JIT_F(Ep+x-E, Ep, x)'
# If the lower limit exceeds the upper limit,
# simply return 0. This also helps to avoid
# overflow if llim > Ephb_Tmax*T
if ulim <= llim:
return 0.
# Perform the integration in log space
I_fF_E = quad(_JIT_el_kernel_inverse_compton, log(llim), log(ulim), epsrel=eps, epsabs=0, args=(E, Ep, T))
return 2.*pi*(alpha**2.)*I_fF_E[0]/(Ep**2.)
# COMPTON SCATTERING ######################################################
def _kernel_compton(self, E, Ep, T):
# Perform a subsitution of the parameters.
# Compared to the formula for photons, only
# the arguments of the cross-section are different
E_s = Ep + me - E # E , substituted
Ep_s = Ep # Ep, substituted
# Use the same formula as in case of photons with
# E -> E_s
# Ep -> Ep_s
# Check that the energies do not exceed the 'Compton edge'
# ATTENTION: This condition is missing in some other papers
if Ep_s/(1. + 2.*Ep_s/me) > E_s:
return 0.
# ATTENTION:
# If the last term is + 2.*me*(1./E_s - 1./Ep_s), Serpico
# If the last term is - 2.*me*(1./E_s - 1./Ep_s), correct
return pi*(re**2.)*me/(Ep_s**2.) * self._ne(T) * ( Ep_s/E_s + E_s/Ep_s + (me/E_s - me/Ep_s)**2. - 2.*me*(1./E_s - 1./Ep_s) )
# BETHE_HEITLER PAIR CREATION #############################################
@cached_member
def _kernel_bethe_heitler(self, E, Ep, T):
# Incorporate the non-generic integration limit as
# the algorithm requires Ep > E and not Ep > E + me
if Ep < E + me:
return 0.
# Multiply by the nucleon density and return
return self._nNZ2(T)*_JIT_dsdE_Z2(E, Ep)
# DOUBLE PHOTON PAIR CREATION #############################################
@cached_member
def _kernel_pair_creation(self, E, Ep, T):
# In general, the threshold is Ep >~ me^2/(22*T)
# However, here we use a slighlty smaller threshold
# in acordance with the implementation we use in
# '_PhotonReactionWrapper._rate_pair_creation'
if Ep < me2/(50.*T):
return 0.
# Ep is the incoming(!) energy
dE, E2 = Ep - E, E**2.
z1 = Ep*( me2 - 2.*dE*( sqrt(E2 - me2) - E ) )/( 4*Ep*dE + me2 )
z2 = Ep*( me2 + 2.*dE*( sqrt(E2 - me2) + E ) )/( 4*Ep*dE + me2 )
# Define the integration limits from
# the range that is specified in '_JIT_G'
# and the constraint on the center-of-mass
# energy, i.e. Eph*Ephb > me^2
llim = max( me2/Ep, z1 )
ulim = min( z2, Ep, Ephb_T_max*T )
# The me < ... condition is fulfiled by
# default since all energies are larger
# than Emin > 2me
# The reference paper also states that
# x < Ep, which is also incorporated here
# CHECKED!
# If the lower limit exceeds the upper limit,
# simply return 0. This also helps to avoid
# overflow if llim > Ephb_Tmax*T
if ulim <= llim:
return 0.
# Perform the integration in log space
I_fG_E2 = quad(_JIT_el_kernel_pair_creation, log(llim), log(ulim), epsrel=eps, epsabs=0, args=(E, Ep, T))
return 0.25*pi*(alpha**2.)*me2*I_fG_E2[0]/(Ep**3.)
# TOTAL INTEGRAL KERNEL ####################################################
def total_kernel_x(self, E, Ep, T, X):
if X == 0: return self._kernel_compton(E, Ep, T) + self._kernel_bethe_heitler(E, Ep, T) + self._kernel_pair_creation(E, Ep, T)
# Photon -> Electron
if X == 1: return self._kernel_inverse_compton(E, Ep, T)
# Electron -> Electron
if X == 2: return 0.
# Positron -> Electron
print_error(
"Particle with identifier X =" + str(X) + "does not exist.",
"acropolis.cascade._ElectronReactionWrapper.total_kernel_x"
)
class _PositronReactionWrapper(object):
def __init__(self, Y0, eta, db):
self._sER = _ElectronReactionWrapper(Y0, eta, db)
# RATES ###################################################################
# E is the energy of the incoming particle
# T is the temperature of the background photons
# INVERSE COMPTON SCATTERING ##############################################
def _rate_inverse_compton_db(self, E, T):
return self._sER._rate_inverse_compton_db(E, T)
# TOTAL RATE ##############################################################
def total_rate(self, E, T):
return self._rate_inverse_compton_db(E, T)
# INTEGRAL KERNELS ########################################################
# E is the energy of the outgoing particle
# Ep is the energy of the incoming particle
# T is the temperature of the background photons
# INVERSE COMPTON SCATTERING ##############################################
def _kernel_inverse_compton(self, E, Ep, T):
return self._sER._kernel_inverse_compton(E, Ep, T)
# COMPTON SCATTERING ######################################################
def _kernel_compton(self, E, Ep, T):
# There are no thermal positrons
return 0.
# BETHE_HEITLER PAIR CREATION #############################################
def _kernel_bethe_heitler(self, E, Ep, T):
return self._sER._kernel_bethe_heitler(E, Ep, T)
# DOUBLE PHOTON PAIR CREATION #############################################
def _kernel_pair_creation(self, E, Ep, T):
return self._sER._kernel_pair_creation(E, Ep, T)
# TOTAL INTEGRAL KERNEL ####################################################
def total_kernel_x(self, E, Ep, T, X):
if X == 0: return self._kernel_compton(E, Ep, T) + self._kernel_bethe_heitler(E, Ep, T) + self._kernel_pair_creation(E, Ep, T)
# Photon -> Positron
if X == 1: return 0.
# Electron -> Positron
if X == 2: return self._kernel_inverse_compton(E, Ep, T)
# Positron -> Positron
print_error(
"Particle with identifier X =" + str(X) + "does not exist.",
"acropolis.cascade._PositronReactionWrapper.total_kernel_x"
)
# TODO: Not yet fully implemented
# Goal is ACROPOLIS v1.3
class _MuonReactionWrapper(_ReactionWrapperScaffold):
# RATES ###################################################################
# E is the energy of the incoming particle
# T is the temperature of the background photons
# MUON DECAY ##############################################################
def _rate_muon_decay(self, E, T):
return hbar*mm/(tau_m*E)
# INVERSE COMPTON SCATTERING ##############################################
def _rate_inverse_compton(self, E, T):
return 0.
# TOTAL RATE ##############################################################
def total_rate(self, E, T):
return self._rate_inverse_compton(E, T) + self._rate_muon_decay(E, T)
class SpectrumGenerator(object):
def __init__(self, Y0, eta):
# Extract the data from the databases; If there is
# no data in the folder 'data/', db = (None, None)
db = import_data_from_db()
# Define a dictionary containing the BBN parameter
self._sY0 = Y0
# Define a dictionary containing all reaction wrappers
self._sRW = {
0: _PhotonReactionWrapper (self._sY0, eta, db),
1: _ElectronReactionWrapper(self._sY0, eta, db),
2: _PositronReactionWrapper(self._sY0, eta, db)
}
# Set the number of particle species (in the cascade)
self._sNX = 1 + 2*FX
def _rate_x(self, X, E, T):
return self._sRW[X].total_rate(E, T)
def _kernel_x_xp(self, X, Xp, E, Ep, T):
return self._sRW[X].total_kernel_x(E, Ep, T, Xp)
def rate_photon(self, E, T):
return self._rate_x(0, E, T)
def get_spectrum(self, E0, S0, Sc, T, allX=False):
# Define the dimension of the grid
# as defined in 'params.py'...
NE = int(log10(E0/Emin)*NE_pd)
# ... but not less than NE_min points
NE = max(NE, NE_min)
# Generate the grid for the energy
E_rt = np.logspace(log(Emin), log(E0), NE, base=np.e)
# Generate the grid for the rates
G = np.array([[self._rate_x(X, E, T) for E in E_rt] for X in range(self._sNX)])
# first index: X, second index according to energy E
# Generate the grid for the kernels
K = np.array([[[[self._kernel_x_xp(X, Xp, E, Ep, T) if Ep >= E else 0. for Ep in E_rt] for E in E_rt] for Xp in range(self._sNX)] for X in range(self._sNX)])
# first index: X, second index: Xp
# third index according to energy E
# fourth index according to energy Ep;
# For Ep < E, the kernel is simply 0.
# Generate the grids for the source terms
# injection + final-state radiation
S0 = np.array([S(T) for S in S0])
Sc = np.array([[ScX(E, T) for E in E_rt] for ScX in Sc])
# Calculate the spectra by solving
# the cascade equation
res = _JIT_solve_cascade_equation(E_rt, G, K, E0, S0, Sc, T)
# 'res' always has at least two columns
return res[0:2,:] if allX == False else res
def get_universal_spectrum(self, E0, S0, Sc, T, offset=0.):
# Define EC and EX as in 'astro-ph/0211258'
EC = me2/(22.*T)
EX = me2/(80.*T)
# Define the normalization K0 as in 'astro-ph/0211258'
K0 = E0/( (EX**2.) * ( 2. + log( EC/EX ) ) )
# Define the dimension of the grid
# as defined in 'params.py'...
NE = int(log10(E0/Emin)*NE_pd)
# ... but not less than NE_min points
NE = max(NE, NE_min)
# Generate the grid for the energy
E_rt = np.logspace(log(Emin), log(E0), NE, base=np.e)
# Generate the grid for the photon spectrum
F_rt = np.zeros(NE)
# Calculate the spectrum for the different energies
# TODO: Perform integration
S0N = lambda T: sum(S0X(T) for S0X in S0)
for i, E in enumerate(E_rt):
if E < EX:
F_rt[i] = S0N(T) * K0 * (EX/E)**1.5/self.rate_photon(E, T)
elif E >= EX and E <= (1. + offset)*EC: # an offset enables better interpolation
F_rt[i] = S0N(T) * K0 * (EX/E)**2.0/self.rate_photon(E, T)
# Remove potential zeros
F_rt[F_rt < approx_zero] = approx_zero
# Define the result array...
res = np.zeros( (2, NE) )
# ...and fill it
res[0, :] = E_rt
res[1, :] = F_rt
return res | ACROPOLIS | /ACROPOLIS-1.2.2-py3-none-any.whl/acropolis/cascade.py | cascade.py |
# ACSConv
Reinventing 2D Convolutions for 3D Images ([arXiv](https://arxiv.org/abs/1911.10477))
IEEE Journal of Biomedical and Health Informatics (IEEE JBHI), 2021 ([DOI](http://doi.org/10.1109/JBHI.2021.3049452))
**News**:
- 2022.01.26 - ACS [ConvNeXt](acsconv/models/convnext.py) supported.
- 2021.12.17 - torch 1.10 supported & pip installation supported.
- 2021.4.19 - torch 1.8 supported.
## Key contributions
* ACS convolution aims at a **plug-and-play replacement** of standard 3D convolution, for 3D medical images.
* ACS convolution enables **2D-to-3D transfer learning**, which consistently provides significant performance boost in our experiments.
* Even without pretraining, ACS convolution is **comparable to or even better than** 3D convolution, with **smaller model size** and **less computation**.
## Package Installation
If you want to use this class, you have two options:
A) Install ACSConv as a standard Python package from PyPI:
```bash
pip install ACSConv
```
B) Simply copy and paste it in your project;
You could run the `test.py` to validate the installation. (If you want to test the validity of pip installation, please move this `test.py` file outside of this git project directory, otherwise it is testing the code inside the project instead of pip installation.)
## Requirements
### PyTorch requirements
```python
torch>=1.0.0 and torch<=1.10.0
```
You can install it on the [official homepage](https://pytorch.org/docs/stable/index.html).
### Other requirements
All libraries needed to run the included experiments (base requirements included).
```python
fire
jupyterlab
matplotlib
pandas
tqdm
sklearn
tensorboardx
```
## Code structure
* ``acsconv``
the core implementation of ACS convolution, including the operators, models, and 2D-to-3D/ACS model converters.
* ``operators``: include ACSConv, SoftACSConv and Conv2_5d.
* ``converters``: include converters which convert 2D models to 3d/ACS/Conv2_5d counterparts.
* ``models``: Native ACS models.
* ``experiments``
the scripts to run experiments.
* ``mylib``: the lib for running the experiments.
* ``poc``: the scripts to run proof-of-concept experiments.
* ``lidc``: the scripts to run LIDC-IDRI experiments.
## Convert a 2D model into 3D with a single line of code
```python
import torch
from torchvision.models import resnet18
from acsconv.converters import ACSConverter
# model_2d is a standard pytorch 2D model
model_2d = resnet18(pretrained=True)
B, C_in, H, W = (1, 3, 64, 64)
input_2d = torch.rand(B, C_in, H, W)
output_2d = model_2d(input_2d)
model_3d = ACSConverter(model_2d)
# once converted, model_3d is using ACSConv and capable of processing 3D volumes.
B, C_in, D, H, W = (1, 3, 64, 64, 64)
input_3d = torch.rand(B, C_in, D, H, W)
output_3d = model_3d(input_3d)
```
## Usage of ACS operators
```python
import torch
from acsconv.operators import ACSConv, SoftACSConv
B, C_in, D, H, W = (1, 3, 64, 64, 64)
x = torch.rand(B, C_in, D, H, W)
# ACSConv to process 3D volumnes
conv = ACSConv(in_channels=3, out_channels=10, kernel_size=3, padding=1)
out = conv(x)
# SoftACSConv to process 3D volumnes
conv = SoftACSConv(in_channels=3, out_channels=10, kernel_size=3, padding=1)
out = conv(x)
```
## Usage of native ACS models
```python
import torch
from acsconv.models.acsunet import ACSUNet
unet_3d = ACSUNet(num_classes=3)
B, C_in, D, H, W = (1, 1, 64, 64, 64)
input_3d = torch.rand(B, C_in, D, H, W)
output_3d = unet_3d(input_3d)
```
## How to run the experiments
* [Proof-of-Concept Segmentation](./experiments/poc/README.md)
* [Lung Nodule Classification and Segmentation](./experiments/lidc/README.md)
* ...
| ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/README.md | README.md |
import torch
import torch.nn.functional as F
import math
def conv3D_output_shape_f(i, input_shape, kernel_size, dilation, padding, stride):
"""
Calculate the original output size assuming the convolution is nn.Conv3d based on
input size, kernel size, dilation, padding and stride.
"""
return math.floor((input_shape[i]-kernel_size[i]-(dilation[i]-1)*
(kernel_size[i]-1)+2*padding[i])
/stride[i])+1
def acs_conv_f(x, weight, bias, kernel_size, dilation, padding, stride, groups, out_channels, acs_kernel_split):
B, C_in, *input_shape = x.shape
C_out = weight.shape[0]
assert groups==1 or groups==C_in==C_out, "only support standard or depthwise conv"
conv3D_output_shape = (conv3D_output_shape_f(0, input_shape, kernel_size, dilation, padding, stride),
conv3D_output_shape_f(1, input_shape, kernel_size, dilation, padding, stride),
conv3D_output_shape_f(2, input_shape, kernel_size, dilation, padding, stride))
weight_a = weight[0:acs_kernel_split[0]].unsqueeze(2)
weight_c = weight[acs_kernel_split[0]:(acs_kernel_split[0]+acs_kernel_split[1])].unsqueeze(3)
weight_s = weight[(acs_kernel_split[0]+acs_kernel_split[1]):].unsqueeze(4)
if groups==C_in==C_out:
# depth-wise
x_a = x[:, 0:acs_kernel_split[0]]
x_c = x[:, acs_kernel_split[0]:(acs_kernel_split[0]+acs_kernel_split[1])]
x_s = x[:, (acs_kernel_split[0]+acs_kernel_split[1]):]
group_a = acs_kernel_split[0]
group_c = acs_kernel_split[1]
group_s = acs_kernel_split[2]
else:
# groups=1
x_a = x_c = x_s = x
group_a = group_c = group_s = 1
f_out = []
if acs_kernel_split[0]>0:
a = F.conv3d(x_a if conv3D_output_shape[0]==input_shape[0] or 2*conv3D_output_shape[0]==input_shape[0] else F.pad(x, (0,0,0,0,padding[0],padding[0]),'constant',0)[:,:,
kernel_size[0]//2:kernel_size[0]//2+(conv3D_output_shape[0]-1)*stride[0]+1,
:,:],
weight=weight_a, bias=None,
stride=stride,
padding=(0,padding[1],padding[2]),
dilation=dilation,
groups=group_a)
f_out.append(a)
if acs_kernel_split[1]>0:
c = F.conv3d(x_c if conv3D_output_shape[1]==input_shape[1] or 2*conv3D_output_shape[1]==input_shape[1] else F.pad(x, (0,0,padding[1],padding[1]),'constant',0)[:,:,:,
kernel_size[1]//2:kernel_size[1]//2+stride[1]*(conv3D_output_shape[1]-1)+1,
:],
weight=weight_c, bias=None,
stride=stride,
padding=(padding[0],0,padding[2]),
dilation=dilation,
groups=group_c)
f_out.append(c)
if acs_kernel_split[2]>0:
s = F.conv3d(x_s if conv3D_output_shape[2]==input_shape[2] or 2*conv3D_output_shape[2]==input_shape[2] else F.pad(x, (padding[2],padding[2]),'constant',0)[:,:,:,:,
kernel_size[2]//2:kernel_size[2]//2+stride[2]*(conv3D_output_shape[2]-1)+1
],
weight=weight_s,
bias=None,
stride=stride,
padding=(padding[0],padding[1],0),
dilation=dilation,
groups=group_s)
f_out.append(s)
f = torch.cat(f_out, dim=1)
if bias is not None:
f += bias.view(1,out_channels,1,1,1)
return f | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/operators/functional.py | functional.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import init
import math
from collections import OrderedDict
from ..utils import _to_triple, _triple_same, _pair_same
from .base_acsconv import _ACSConv
class SoftACSConv(_ACSConv):
"""
Decorator class for soft ACS Convolution
Args:
mean: *bool*, optional, the default value is False. If True, it changes to a mean ACS Convolution.
Other arguments are the same as torch.nn.Conv3d.
Examples:
>>> import SoftACSConv
>>> x = torch.rand(batch_size, 3, D, H, W)
>>> # soft ACS Convolution
>>> conv = SoftACSConv(3, 10, 1)
>>> out = conv(x)
>>> # mean ACS Convolution
>>> conv = SoftACSConv(3, 10, 1, mean=Ture)
>>> out = conv(x)
"""
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1, mean=False,
bias=True, padding_mode='zeros'):
super().__init__(
in_channels, out_channels, kernel_size, stride, padding, dilation,
False, 0, groups, bias, padding_mode)
if not mean:
self.soft_w_core = nn.Parameter(torch.rand(out_channels,3)) #TODO: init
self.mean = mean
def conv3D_output_shape_f(self,i, input_shape):
"""
Calculate the original output size assuming the convolution is nn.Conv3d based on
input size, kernel size, dilation, padding and stride.
"""
return math.floor((input_shape[i]-self.kernel_size[i]-(self.dilation[i]-1)*
(self.kernel_size[i]-1)+2*self.padding[i])
/self.stride[i])+1
def forward(self, x):
"""
Convolution forward function
Conduct convolution on three directions seperately and then
aggregate the three parts of feature maps by *soft* or *mean* way.
Bias is added at last.
"""
B, C_in, *input_shape = x.shape
conv3D_output_shape = (self.conv3D_output_shape_f(0, input_shape),
self.conv3D_output_shape_f(1, input_shape),
self.conv3D_output_shape_f(2, input_shape))
f_a = F.conv3d(x if conv3D_output_shape[0]==input_shape[0] or 2*conv3D_output_shape[0]==input_shape[0] else F.pad(x, (0,0,0,0,self.padding[0],self.padding[0]),'constant',0)[:,:,
self.kernel_size[0]//2:self.kernel_size[0]//2+(conv3D_output_shape[0]-1)*self.stride[0]+1,
:,:],
weight=self.weight.unsqueeze(2), bias=None,
stride=self.stride,
padding=(0,self.padding[1],self.padding[2]),
dilation=self.dilation,
groups=self.groups)
f_c = F.conv3d(x if conv3D_output_shape[1]==input_shape[1] or 2*conv3D_output_shape[1]==input_shape[1] else F.pad(x, (0,0,self.padding[1],self.padding[1]),'constant',0)[:,:,:,
self.kernel_size[1]//2:self.kernel_size[1]//2+self.stride[1]*(conv3D_output_shape[1]-1)+1,
:],
weight=self.weight.unsqueeze(3), bias=None,
stride=self.stride,
padding=(self.padding[0],0,self.padding[2]),
dilation=self.dilation,
groups=self.groups)
f_s = F.conv3d(x if conv3D_output_shape[2]==input_shape[2] or 2*conv3D_output_shape[2]==input_shape[2] else F.pad(x, (self.padding[2],self.padding[2]),'constant',0)[:,:,:,:,
self.kernel_size[2]//2:self.kernel_size[2]//2+self.stride[2]*(conv3D_output_shape[2]-1)+1
],
weight=self.weight.unsqueeze(4), bias=None,
stride=self.stride,
padding=(self.padding[0],self.padding[1],0),
dilation=self.dilation,
groups=self.groups)
if self.mean:
f = (f_a + f_c + f_s) / 3
else:
soft_w = self.soft_w_core.softmax(-1)
f = f_a*soft_w[:,0].view(1,self.out_channels,1,1,1)+\
f_c*soft_w[:,1].view(1,self.out_channels,1,1,1)+\
f_s*soft_w[:,2].view(1,self.out_channels,1,1,1)
if self.bias is not None:
f += self.bias.view(1,self.out_channels,1,1,1)
return f
def extra_repr(self):
s = super().extra_repr() + ', mean={mean}'
return s.format(**self.__dict__) | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/operators/soft_acsconv.py | soft_acsconv.py |
from .functional import acs_conv_f
from .base_acsconv import _ACSConv
class ACSConv(_ACSConv):
"""
Vallina ACS Convolution
Args:
acs_kernel_split: optional, equally spit if not specified.
Other arguments are the same as torch.nn.Conv3d.
Examples:
>>> import ACSConv
>>> x = torch.rand(batch_size, 3, D, H, W)
>>> conv = ACSConv(3, 10, kernel_size=3, padding=1)
>>> out = conv(x)
>>> conv = ACSConv(3, 10, acs_kernel_split=(4, 3, 3))
>>> out = conv(x)
"""
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1, acs_kernel_split=None,
bias=True, padding_mode='zeros'):
super().__init__(
in_channels, out_channels, kernel_size, stride, padding, dilation,
False, 0, groups, bias, padding_mode)
if acs_kernel_split is None:
if self.out_channels%3==0:
self.acs_kernel_split = (self.out_channels//3,self.out_channels//3,self.out_channels//3)
if self.out_channels%3==1:
self.acs_kernel_split = (self.out_channels//3+1,self.out_channels//3,self.out_channels//3)
if self.out_channels%3==2:
self.acs_kernel_split = (self.out_channels//3+1,self.out_channels//3+1,self.out_channels//3)
else:
self.acs_kernel_split = acs_kernel_split
def forward(self, x):
"""
Convolution forward function
Divide the kernel into three parts on output channels based on acs_kernel_split,
and conduct convolution on three directions seperately. Bias is added at last.
"""
return acs_conv_f(x, self.weight, self.bias, self.kernel_size, self.dilation, self.padding, self.stride,
self.groups, self.out_channels, self.acs_kernel_split)
def extra_repr(self):
s = super().extra_repr() + ', acs_kernel_split={acs_kernel_split}'
return s.format(**self.__dict__) | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/operators/acsconv.py | acsconv.py |
import torch
import torch.nn as nn
from torch.nn import init
import math
from ..utils import _to_triple, _triple_same, _pair_same
class _ACSConv(nn.Module):
"""
Base class for ACS Convolution
Basically the same with _ConvNd in torch.nn.
Warnings:
The kernel size should be the same in the three directions under this implementation.
"""
def __init__(self, in_channels, out_channels, kernel_size, stride,
padding, dilation, transposed, output_padding,
groups, bias, padding_mode):
super().__init__()
assert padding_mode!='circular', 'circular padding is not supported yet.'
stride = _to_triple(stride)
padding = _to_triple(padding)
dilation = _to_triple(dilation)
output_padding = _to_triple(output_padding)
if in_channels % groups != 0:
raise ValueError('in_channels must be divisible by groups')
if out_channels % groups != 0:
raise ValueError('out_channels must be divisible by groups')
self.in_channels = in_channels
self.out_channels = out_channels
self.stride = stride
self.padding = padding
self.dilation = dilation
self.transposed = transposed
self.output_padding = output_padding
self.groups = groups
self.padding_mode = padding_mode
if self.transposed:
self.weight = nn.Parameter(torch.Tensor(
in_channels, out_channels // groups, *_pair_same(kernel_size) ))
else:
self.weight = nn.Parameter(torch.Tensor(
out_channels, in_channels // groups, *_pair_same(kernel_size) ))
if bias:
self.bias = nn.Parameter(torch.Tensor(out_channels))
else:
self.register_parameter('bias', None)
self.kernel_size = _triple_same(kernel_size)
self.reset_parameters()
def reset_parameters(self):
init.kaiming_uniform_(self.weight, a=math.sqrt(5))
if self.bias is not None:
fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_in)
init.uniform_(self.bias, -bound, bound)
def extra_repr(self):
s = ('{in_channels}, {out_channels}, kernel_size={kernel_size}'
', stride={stride}')
if self.padding != (0,) * len(self.padding):
s += ', padding={padding}'
if self.dilation != (1,) * len(self.dilation):
s += ', dilation={dilation}'
if self.output_padding != (0,) * len(self.output_padding):
s += ', output_padding={output_padding}'
if self.groups != 1:
s += ', groups={groups}'
if self.bias is None:
s += ', bias=False'
return s.format(**self.__dict__)
def __setstate__(self, state):
super().__setstate__(state)
if not hasattr(self, 'padding_mode'):
self.padding_mode = 'zeros' | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/operators/base_acsconv.py | base_acsconv.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
from timm.models.layers import trunc_normal_, DropPath
# suggested by this [issue](https://github.com/facebookresearch/ConvNeXt/issues/37) (not compared on ACS)
import torch.backends.cudnn as cudnn
cudnn.benchmark = True
from ..operators import ACSConv
model_urls = {
"convnext_tiny_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_tiny_1k_224_ema.pth",
"convnext_small_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_small_1k_224_ema.pth",
"convnext_base_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_base_1k_224_ema.pth",
"convnext_large_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_large_1k_224_ema.pth",
"convnext_base_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_224.pth",
"convnext_large_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_224.pth",
"convnext_xlarge_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_224.pth",
}
ConvOp = ACSConv
class LayerNorm(nn.Module):
r""" modified into 3D.
LayerNorm that supports two data formats: channels_last (default) or channels_first.
The ordering of the dimensions in the inputs. channels_last corresponds to inputs with
shape (batch_size, height, width, channels) while channels_first corresponds to inputs
with shape (batch_size, channels, depth, height, width).
"""
def __init__(self, normalized_shape, eps=1e-6, data_format="channels_last"):
super().__init__()
self.weight = nn.Parameter(torch.ones(normalized_shape))
self.bias = nn.Parameter(torch.zeros(normalized_shape))
self.eps = eps
self.data_format = data_format
if self.data_format not in ["channels_last", "channels_first"]:
raise NotImplementedError
self.normalized_shape = (normalized_shape, )
def forward(self, x):
if self.data_format == "channels_last":
return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
elif self.data_format == "channels_first":
u = x.mean(1, keepdim=True)
s = (x - u).pow(2).mean(1, keepdim=True)
x = (x - u) / torch.sqrt(s + self.eps)
x = self.weight[:, None, None, None] * x + self.bias[:, None, None, None]
return x
class Block(nn.Module):
r""" modified into 3D.
ConvNeXt Block. There are two equivalent implementations:
(1) DwConv -> LayerNorm (channels_first) -> 1x1x1 Conv -> GELU -> 1x1x1 Conv; all in (N, C, D, H, W)
(2) DwConv -> Permute to (N, H, D, W, C); LayerNorm (channels_last) -> Linear -> GELU -> Linear; Permute back
We use (2) as we find it slightly faster in PyTorch
Args:
dim (int): Number of input channels.
drop_path (float): Stochastic depth rate. Default: 0.0
layer_scale_init_value (float): Init value for Layer Scale. Default: 1e-6.
"""
def __init__(self, dim, drop_path=0., layer_scale_init_value=1e-6):
super().__init__()
self.dwconv = ConvOp(dim, dim, kernel_size=7, padding=3, groups=dim) # depthwise conv
self.norm = LayerNorm(dim, eps=1e-6)
self.pwconv1 = nn.Linear(dim, 4 * dim) # pointwise/1x1 convs, implemented with linear layers
self.act = nn.GELU()
self.pwconv2 = nn.Linear(4 * dim, dim)
self.gamma = nn.Parameter(layer_scale_init_value * torch.ones((dim)),
requires_grad=True) if layer_scale_init_value > 0 else None
self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
def forward(self, x):
input = x
x = self.dwconv(x)
x = x.permute(0, 2, 3, 4, 1) # (N, C, D, H, W) -> (N, D, H, W, C)
x = self.norm(x)
x = self.pwconv1(x)
x = self.act(x)
x = self.pwconv2(x)
if self.gamma is not None:
x = self.gamma * x
x = x.permute(0, 4, 1, 2, 3) # (N, D, H, W, C) -> (N, C, D, H, W)
x = input + self.drop_path(x)
return x
class ConvNeXt(nn.Module):
r""" modified into 3D.
ConvNeXt
A PyTorch impl of : `A ConvNet for the 2020s` -
https://arxiv.org/pdf/2201.03545.pdf
Args:
in_chans (int): Number of input image channels. Default: 3
num_classes (int): Number of classes for classification head. Default: 1000
depths (tuple(int)): Number of blocks at each stage. Default: [3, 3, 9, 3]
dims (int): Feature dimension at each stage. Default: [96, 192, 384, 768]
drop_path_rate (float): Stochastic depth rate. Default: 0.
layer_scale_init_value (float): Init value for Layer Scale. Default: 1e-6.
head_init_scale (float): Init scaling value for classifier weights and biases. Default: 1.
"""
def __init__(self, in_chans=3, num_classes=1000,
depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], drop_path_rate=0.,
layer_scale_init_value=1e-6, head_init_scale=1.,
):
super().__init__()
self.downsample_layers = nn.ModuleList() # stem and 3 intermediate downsampling conv layers
stem = nn.Sequential(
ConvOp(in_chans, dims[0], kernel_size=4, stride=4),
LayerNorm(dims[0], eps=1e-6, data_format="channels_first")
)
self.downsample_layers.append(stem)
for i in range(3):
downsample_layer = nn.Sequential(
LayerNorm(dims[i], eps=1e-6, data_format="channels_first"),
ConvOp(dims[i], dims[i+1], kernel_size=2, stride=2),
)
self.downsample_layers.append(downsample_layer)
self.stages = nn.ModuleList() # 4 feature resolution stages, each consisting of multiple residual blocks
dp_rates=[x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))]
cur = 0
for i in range(4):
stage = nn.Sequential(
*[Block(dim=dims[i], drop_path=dp_rates[cur + j],
layer_scale_init_value=layer_scale_init_value) for j in range(depths[i])]
)
self.stages.append(stage)
cur += depths[i]
self.norm = nn.LayerNorm(dims[-1], eps=1e-6) # final norm layer
self.head = nn.Linear(dims[-1], num_classes)
self.apply(self._init_weights)
self.head.weight.data.mul_(head_init_scale)
self.head.bias.data.mul_(head_init_scale)
def _init_weights(self, m):
if isinstance(m, (nn.Conv2d, nn.Linear)):
trunc_normal_(m.weight, std=.02)
nn.init.constant_(m.bias, 0)
def forward_features(self, x):
for i in range(4):
x = self.downsample_layers[i](x)
x = self.stages[i](x)
return self.norm(x.mean([-3, -2, -1])) # global average pooling, (N, C, D, H, W) -> (N, C)
def forward(self, x):
x = self.forward_features(x)
x = self.head(x)
return x
def convnext_tiny(pretrained=False, **kwargs):
model = ConvNeXt(depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], **kwargs)
if pretrained:
url = model_urls['convnext_tiny_1k']
checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu", check_hash=True)
model.load_state_dict(checkpoint["model"])
return model
def convnext_small(pretrained=False, **kwargs):
model = ConvNeXt(depths=[3, 3, 27, 3], dims=[96, 192, 384, 768], **kwargs)
if pretrained:
url = model_urls['convnext_small_1k']
checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
model.load_state_dict(checkpoint["model"])
return model
def convnext_base(pretrained=False, in_22k=False, **kwargs):
model = ConvNeXt(depths=[3, 3, 27, 3], dims=[128, 256, 512, 1024], **kwargs)
if pretrained:
url = model_urls['convnext_base_22k'] if in_22k else model_urls['convnext_base_1k']
checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
model.load_state_dict(checkpoint["model"])
return model
def convnext_large(pretrained=False, in_22k=False, **kwargs):
model = ConvNeXt(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536], **kwargs)
if pretrained:
url = model_urls['convnext_large_22k'] if in_22k else model_urls['convnext_large_1k']
checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
model.load_state_dict(checkpoint["model"])
return model
def convnext_xlarge(pretrained=False, in_22k=False, **kwargs):
model = ConvNeXt(depths=[3, 3, 27, 3], dims=[256, 512, 1024, 2048], **kwargs)
if pretrained:
assert in_22k, "only ImageNet-22K pre-trained ConvNeXt-XL is available; please set in_22k=True"
url = model_urls['convnext_xlarge_22k']
checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
model.load_state_dict(checkpoint["model"])
return model | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/models/convnext.py | convnext.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from collections import OrderedDict
from ..operators import ACSConv
model_urls = {
'densenet121': 'https://download.pytorch.org/models/densenet121-a639ec97.pth',
'densenet169': 'https://download.pytorch.org/models/densenet169-b2777c0a.pth',
'densenet201': 'https://download.pytorch.org/models/densenet201-c1103571.pth',
'densenet161': 'https://download.pytorch.org/models/densenet161-8d451a50.pth',
}
def densenet121(pretrained=False, **kwargs):
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 24, 16),
**kwargs)
if pretrained:
state_dict = model_zoo.load_url(model_urls['densenet121'])
model_state_dict = model.state_dict()
online_sd = list(state_dict.items())
count = 0
for i, k in enumerate(model_state_dict.keys()):
if 'num_batches_tracked' not in k:
print(i, count, k, online_sd[count][0])
model_state_dict[k] = online_sd[count][1]
count += 1
model.load_state_dict(model_state_dict)
print('densenet loaded imagenet pretrained weights')
else:
print('densenet without imagenet pretrained weights')
return model
class _DenseLayer(nn.Sequential):
def __init__(self, num_input_features, growth_rate, bn_size, drop_rate):
super(_DenseLayer, self).__init__()
self.add_module('norm1', nn.BatchNorm3d(num_input_features)),
self.add_module('relu1', nn.ReLU(inplace=True)),
self.add_module('conv1', ACSConv(num_input_features, bn_size *
growth_rate, kernel_size=1, stride=1, bias=False)),
self.add_module('norm2', nn.BatchNorm3d(bn_size * growth_rate)),
self.add_module('relu2', nn.ReLU(inplace=True)),
self.add_module('conv2', ACSConv(bn_size * growth_rate, growth_rate,
kernel_size=3, stride=1, padding=1, bias=False)),
self.drop_rate = drop_rate
def forward(self, x):
new_features = super(_DenseLayer, self).forward(x)
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return torch.cat([x, new_features], 1)
class _DenseBlock(nn.Sequential):
def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate):
super(_DenseBlock, self).__init__()
for i in range(num_layers):
layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate)
self.add_module('denselayer%d' % (i + 1), layer)
class _Transition(nn.Sequential):
def __init__(self, num_input_features, num_output_features, downsample=True):
super(_Transition, self).__init__()
self.add_module('norm', nn.BatchNorm3d(num_input_features))
self.add_module('relu', nn.ReLU(inplace=True))
self.add_module('conv', ACSConv(num_input_features, num_output_features,
kernel_size=1, stride=1, bias=False))
if downsample:
self.add_module('pool', nn.AvgPool3d(kernel_size=2, stride=2))
class DenseNet(nn.Module):
r"""Densenet-BC model class, based on
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
growth_rate (int) - how many filters to add each layer (`k` in paper)
block_config (list of 4 ints) - how many layers in each pooling block
num_init_features (int) - the number of filters to learn in the first convolution layer
bn_size (int) - multiplicative factor for number of bottle neck layers
(i.e. bn_size * k features in the bottleneck layer)
drop_rate (float) - dropout rate after each dense layer
num_classes (int) - number of classification classes
"""
def __init__(self, growth_rate=32, block_config=(6, 12, 24, 16),
num_init_features=64, bn_size=4, drop_rate=0, num_classes=1000):
super(DenseNet, self).__init__()
# First convolution
self.features = nn.Sequential(OrderedDict([
('conv0', ACSConv(3, num_init_features, kernel_size=7, stride=1,
padding=3, bias=False)),
('norm0', nn.BatchNorm3d(num_init_features)),
('relu0', nn.ReLU(inplace=True)),
]))
num_features = num_init_features
for i, num_layers in enumerate(block_config):
setattr(self, 'layer{}'.format(i+1), nn.Sequential(OrderedDict([
('denseblock%d' % (i + 1), _DenseBlock(
num_layers=num_layers,
num_input_features=num_features,
bn_size=bn_size,
growth_rate=growth_rate,
drop_rate=drop_rate)),
])))
num_features = num_features + num_layers * growth_rate
if i != len(block_config) - 1:
if i in [0,1,2]:
downsample = True
else:
downsample = False
trans = _Transition(num_input_features=num_features,
num_output_features=num_features // 2, downsample=downsample)
getattr(self, 'layer{}'.format(i+1)).add_module('transition%d' % (i + 1), trans)
num_features = num_features // 2
for m in self.modules():
if isinstance(m, ACSConv):
nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm3d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.constant_(m.bias, 0)
def forward(self, x):
x = self.features(x)
x2 = x.clone()
x = self.layer1(x)
x = self.layer2(x)
x1 = x.clone()
x = self.layer3(x)
x = self.layer4(x)
return x, x1, x2
class FCNHead(nn.Sequential):
def __init__(self, in_channels, channels):
inter_channels = in_channels // 4
layers = [
ACSConv(in_channels, inter_channels, 3, padding=1, bias=False),
nn.BatchNorm3d(inter_channels),
nn.ReLU(),
ACSConv(inter_channels, channels, 1)
]
super(FCNHead, self).__init__(*layers)
class FCNDenseNet(nn.Module):
def __init__(self, pretrained, num_classes, backbone='densenet121'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.conv1 = ACSConv(1024+256, 256, kernel_size=1, stride=1,
padding=0, bias=False)
self.conv2 = ACSConv(256+64, 64, kernel_size=1, stride=1,
padding=0, bias=False)
self.classifier = FCNHead(in_channels=64, channels=num_classes)
def forward(self, x):
features, features1, features2 = self.backbone(x)
# print(features.shape, features1.shape, features2.shape)
features_cat1 = torch.cat([features1, F.interpolate(features, scale_factor=2, mode='trilinear')], dim=1)
features_cat1 = self.conv1(features_cat1)
features_cat2 = torch.cat([features2, F.interpolate(features_cat1, scale_factor=4, mode='trilinear')], dim=1)
features_cat2 = self.conv2(features_cat2)
features = features_cat2
out = self.classifier(features)
return out
class ClsDenseNet(nn.Module):
def __init__(self, pretrained, num_classes, backbone='densenet121'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.fc = nn.Linear(1024, num_classes, bias=True)
def forward(self, x):
features = self.backbone(x)[0]
features = F.adaptive_avg_pool3d(features, output_size=1).view(features.shape[0], -1)
out = self.fc(features)
return out | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/models/densenet.py | densenet.py |
import torch.nn as nn
import math
import torch
import torch.utils.model_zoo as model_zoo
import torch.nn.functional as F
from ..operators import ACSConv
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
state_dict = model_zoo.load_url(model_urls['resnet18'])
for key in list(state_dict.keys()):
if 'fc' in key:
del state_dict[key]
model.load_state_dict(state_dict,strict=False)
print('resnet18 loaded imagenet pretrained weights')
else:
print('resnet18 without imagenet pretrained weights')
return model
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return ACSConv(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm3d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm3d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = ACSConv(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm3d(planes)
self.conv2 = ACSConv(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm3d(planes)
self.conv3 = ACSConv(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm3d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers):
self.inplanes = 64
super(ResNet, self).__init__()
self.conv1 = ACSConv(3, 64, kernel_size=7, stride=1, padding=3,
bias=False)
self.bn1 = nn.BatchNorm3d(64)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2])
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
for m in self.modules():
if isinstance(m, ACSConv):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm3d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
ACSConv(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm3d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x2 = x.clone()
x = self.layer1(x)
x = self.layer2(x)
x1 = x.clone()
x = self.layer3(x)
x = self.layer4(x)
return x, x1, x2
class FCNHead(nn.Sequential):
def __init__(self, in_channels, channels):
inter_channels = in_channels // 4
layers = [
ACSConv(in_channels, inter_channels, 3, padding=1, bias=False),
nn.BatchNorm3d(inter_channels),
nn.ReLU(),
ACSConv(inter_channels, channels, 1)
]
super(FCNHead, self).__init__(*layers)
class FCNResNet(nn.Module):
def __init__(self, pretrained, num_classes, backbone='resnet18'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.conv1 = ACSConv((128+512), 512, kernel_size=1, stride=1, padding=0, bias=False)
self.conv2 = ACSConv(64+512, 512, kernel_size=1, stride=1, padding=0, bias=False)
self.classifier = FCNHead(in_channels=512, channels=num_classes)
def forward(self, x):
features, features1, features2 = self.backbone(x)
features_cat1 = torch.cat([features1, F.interpolate(features, scale_factor=2, mode='trilinear')], dim=1)
features_cat1 = self.conv1(features_cat1)
features_cat2 = torch.cat([features2, F.interpolate(features_cat1, scale_factor=2, mode='trilinear')], dim=1)
features_cat2 = self.conv2(features_cat2)
features = features_cat2
out = self.classifier(features)
return out
class ClsResNet(nn.Module):
def __init__(self, pretrained, num_classes, backbone='resnet18'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.fc = nn.Linear(512, num_classes, bias=True)
def forward(self, x):
features = self.backbone(x)[0]
features = F.adaptive_avg_pool3d(features, output_size=1).view(features.shape[0], -1)
out = self.fc(features)
return out | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/models/resnet.py | resnet.py |
import torch
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
import math
try:
from torch.hub import load_state_dict_from_url
except ImportError:
from torch.utils.model_zoo import load_url as load_state_dict_from_url
from collections import OrderedDict
from torch.nn import functional as F
from ..operators import ACSConv
model_urls = {
'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth',
'vgg13': 'https://download.pytorch.org/models/vgg13-c768596a.pth',
'vgg16': 'https://download.pytorch.org/models/vgg16-397923af.pth',
'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth',
'vgg11_bn': 'https://download.pytorch.org/models/vgg11_bn-6002323d.pth',
'vgg13_bn': 'https://download.pytorch.org/models/vgg13_bn-abd245e5.pth',
'vgg16_bn': 'https://download.pytorch.org/models/vgg16_bn-6c64b313.pth',
'vgg19_bn': 'https://download.pytorch.org/models/vgg19_bn-c79401a0.pth',
}
class VGG16_bn(nn.Module):
def __init__(self, pretrained=False, num_classes=1000, init_weights=True):
super(VGG16_bn, self).__init__()
self.layer0 = nn.Sequential(OrderedDict([
('conv0', ACSConv(3, 64, kernel_size=3, padding=1)),
('bn0', nn.BatchNorm3d(64)),
('relu0', nn.ReLU(inplace=True)),
('conv1', ACSConv(64, 64, kernel_size=3, padding=1)),
('bn1', nn.BatchNorm3d(64)),
('relu1', nn.ReLU(inplace=True))
]))
self.layer1 = nn.Sequential(OrderedDict([
('conv0', ACSConv(64, 128, kernel_size=3, padding=1)),
('bn0', nn.BatchNorm3d(128)),
('relu0', nn.ReLU(inplace=True)),
('conv1', ACSConv(128, 128, kernel_size=3, padding=1)),
('bn1', nn.BatchNorm3d(128)),
('relu1', nn.ReLU(inplace=True))
]))
self.layer2 = nn.Sequential(OrderedDict([
('maxpool0', nn.MaxPool3d(kernel_size=2, stride=2)),
('conv0', ACSConv(128, 256, kernel_size=3, padding=1)),
('bn0', nn.BatchNorm3d(256)),
('relu0', nn.ReLU(inplace=True)),
('conv1', ACSConv(256, 256, kernel_size=3, padding=1)),
('bn1', nn.BatchNorm3d(256)),
('relu1', nn.ReLU(inplace=True)),
('conv2', ACSConv(256, 256, kernel_size=3, padding=1)),
('bn2', nn.BatchNorm3d(256)),
('relu2', nn.ReLU(inplace=True)),
]))
self.layer3 = nn.Sequential(OrderedDict([
('conv0', ACSConv(256, 512, kernel_size=3, padding=1)),
('bn0', nn.BatchNorm3d(512)),
('relu0', nn.ReLU(inplace=True)),
('conv1', ACSConv(512, 512, kernel_size=3, padding=1)),
('bn1', nn.BatchNorm3d(512)),
('relu1', nn.ReLU(inplace=True)),
('conv2', ACSConv(512, 512, kernel_size=3, padding=1)),
('bn2', nn.BatchNorm3d(512)),
('relu2', nn.ReLU(inplace=True)),
]))
self.layer4 = nn.Sequential(OrderedDict([
('maxpool0', nn.MaxPool3d(kernel_size=2, stride=2)),
('conv0', ACSConv(512, 512, kernel_size=3, padding=1)),
('bn0', nn.BatchNorm3d(512)),
('relu0', nn.ReLU(inplace=True)),
('conv1', ACSConv(512, 512, kernel_size=3, padding=1)),
('bn1', nn.BatchNorm3d(512)),
('relu1', nn.ReLU(inplace=True)),
('conv2', ACSConv(512, 512, kernel_size=3, padding=1)),
('bn2', nn.BatchNorm3d(512)),
('relu2', nn.ReLU(inplace=True)),
]))
self._initialize_weights()
if pretrained:
model_state_dict = self.state_dict()
online_sd = list(load_state_dict_from_url(model_urls['vgg16_bn']).items())
count = 0
for i, k in enumerate(model_state_dict.keys()):
if 'num_batches_tracked' not in k:
print(i, count, k, online_sd[count][0])
model_state_dict[k] = online_sd[count][1]
count += 1
self.load_state_dict(model_state_dict)
print('vgg loaded imagenet pretrained weights')
else:
print('vgg without imagenet pretrained weights')
def forward(self, x):
x = self.layer0(x)
x2 = x.clone()
x = self.layer1(x)
x = self.layer2(x)
x1 = x.clone()
x = self.layer3(x)
x = self.layer4(x)
return x, x1, x2
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, ACSConv):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm3d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def vgg16(*args, **kwargs):
return VGG16_bn(*args, **kwargs)
class FCNHead(nn.Sequential):
def __init__(self, in_channels, channels):
inter_channels = in_channels // 4
layers = [
ACSConv(in_channels, inter_channels, 3, padding=1, bias=False),
nn.BatchNorm3d(inter_channels),
nn.ReLU(),
ACSConv(inter_channels, channels, 1)
]
super(FCNHead, self).__init__(*layers)
class FCNVGG(nn.Module):
def __init__(self, pretrained, num_classes, backbone='VGG16_bn'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.conv1 = ACSConv(512+256, 256, kernel_size=1, stride=1,
padding=0, bias=False)
self.conv2 = ACSConv(256+64, 64, kernel_size=1, stride=1,
padding=0, bias=False)
self.classifier = FCNHead(in_channels=64, channels=num_classes)
def forward(self, x):
features, features1, features2 = self.backbone(x)
# print(features.shape, features1.shape, features2.shape)
features_cat1 = torch.cat([features1, F.interpolate(features, scale_factor=2, mode='trilinear')], dim=1)
features_cat1 = self.conv1(features_cat1)
features_cat2 = torch.cat([features2, F.interpolate(features_cat1, scale_factor=2, mode='trilinear')], dim=1)
features_cat2 = self.conv2(features_cat2)
features = features_cat2
out = self.classifier(features)
return out
class ClsVGG(nn.Module):
def __init__(self, pretrained, num_classes, backbone='vgg16'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.fc = nn.Linear(512, num_classes, bias=True)
def forward(self, x):
features = self.backbone(x)[0]
features = F.adaptive_avg_pool3d(features, output_size=1).view(features.shape[0], -1)
out = self.fc(features)
return out | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/models/vgg.py | vgg.py |
import torch.nn as nn
from ..utils import _triple_same
class BaseConverter(object):
"""
base class for converters
"""
converter_attributes = []
target_conv = None
def __init__(self, model):
""" Convert the model to its corresponding counterparts and deal with original weights if necessary """
pass
def convert_module(self, module):
"""
A recursive function.
Treat the entire model as a tree and convert each leaf module to
target_conv if it's Conv2d,
3d counterparts if it's a pooling or normalization module,
trilinear mode if it's a Upsample module.
"""
for child_name, child in module.named_children():
if isinstance(child, nn.Conv2d):
arguments = nn.Conv2d.__init__.__code__.co_varnames[1:]
# cleaning the tuple due to new PyTorch namming schema (or added new variables)
arguments = [a for a in arguments if a not in ['device', 'dtype', 'factory_kwargs','kernel_size_', 'stride_', 'padding_', 'dilation_']]
kwargs = {k: getattr(child, k) for k in arguments}
kwargs = self.convert_conv_kwargs(kwargs)
setattr(module, child_name, self.__class__.target_conv(**kwargs))
elif hasattr(nn, child.__class__.__name__) and \
('pool' in child.__class__.__name__.lower() or
'norm' in child.__class__.__name__.lower()):
if hasattr(nn, child.__class__.__name__.replace('2d', '3d')):
TargetClass = getattr(nn, child.__class__.__name__.replace('2d', '3d'))
arguments = TargetClass.__init__.__code__.co_varnames[1:]
arguments = [a for a in arguments if a not in ['device', 'dtype', 'factory_kwargs']]
kwargs = {k: getattr(child, k) for k in arguments}
if 'adaptive' in child.__class__.__name__.lower():
for k in kwargs.keys():
kwargs[k] = _triple_same(kwargs[k])
setattr(module, child_name, TargetClass(**kwargs))
else:
raise Exception('No corresponding module in 3D for 2d module {}'.format(child.__class__.__name__))
elif isinstance(child, nn.Upsample):
arguments = nn.Upsample.__init__.__code__.co_varnames[1:]
kwargs = {k: getattr(child, k) for k in arguments}
kwargs['mode'] = 'trilinear' if kwargs['mode']=='bilinear' else kwargs['mode']
setattr(module, child_name, nn.Upsample(**kwargs))
else:
self.convert_module(child)
return module
def convert_conv_kwargs(self, kwargs):
"""
Called by self.convert_module. Transform the original Conv2d arguments
to meet the arguments requirements of target_conv.
"""
raise NotImplementedError
def __getattr__(self, attr):
return getattr(self.model, attr)
def __setattr__(self, name, value):
if name in self.__class__.converter_attributes:
return object.__setattr__(self, name, value)
else:
return setattr(self.model, name, value)
def __call__(self, x):
return self.model(x)
def __repr__(self):
return self.__class__.__name__ + '(\n' + self.model.__repr__() + '\n)' | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/converters/base_converter.py | base_converter.py |
from .base_converter import BaseConverter
from ..operators import Conv2_5d
from ..utils import _pair_same
class Conv2_5dConverter(BaseConverter):
"""
Decorator class for converting 2d convolution modules
to corresponding 3d version in any networks.
Args:
model (torch.nn.module): model that needs to be converted
Warnings:
Functions in torch.nn.functional involved in data dimension are not supported
Examples:
>>> import Conv2_5DWrapper
>>> import torchvision
>>> # m is a standard pytorch model
>>> m = torchvision.models.resnet18(True)
>>> m = Conv2_5DWrapper(m)
>>> # after converted, m is using ACSConv and capable of processing 3D volumes
>>> x = torch.rand(batch_size, in_channels, D, H, W)
>>> out = m(x)
"""
converter_attributes = ['model', 'unsqueeze_axis']
target_conv = Conv2_5d
def __init__(self, model, unsqueeze_axis=-3):
preserve_state_dict = model.state_dict()
self.model = model
self.unsqueeze_axis = unsqueeze_axis
self.model = self.convert_module(self.model)
self.load_state_dict(preserve_state_dict, strict=True)
def convert_conv_kwargs(self, kwargs):
kwargs['bias'] = True if kwargs['bias'] is not None else False
for k in ['kernel_size','stride','padding','dilation']:
kwargs[k] = _pair_same(kwargs[k])[0]
kwargs['unsqueeze_axis'] = self.unsqueeze_axis
return kwargs
def load_state_dict(self, state_dict, strict=True, unsqueeze_axis=-3):
load_state_dict_from_2d_to_2_5d(self.model, state_dict, strict=strict, unsqueeze_axis=unsqueeze_axis)
def load_state_dict_from_2d_to_2_5d(model_2_5d, state_dict_2d, strict=True, unsqueeze_axis=-3):
for key in list(state_dict_2d.keys()):
if state_dict_2d[key].dim()==4:
state_dict_2d[key] = state_dict_2d[key].unsqueeze(unsqueeze_axis)
model_2_5d.load_state_dict(state_dict_2d, strict=strict) | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/converters/conv2_5d_converter.py | conv2_5d_converter.py |
import torch
import torch.nn as nn
from .base_converter import BaseConverter
from ..utils import _triple_same
class Conv3dConverter(BaseConverter):
"""
Decorator class for converting 2d convolution modules
to corresponding 3d version in any networks.
Args:
model (torch.nn.module): model that needs to be converted
Warnings:
Functions in torch.nn.functional involved in data dimension are not supported
Examples:
>>> import Conv3dConverter
>>> import torchvision
>>> # m is a standard pytorch model
>>> m = torchvision.models.resnet18(True)
>>> m = Conv3dConverter(m)
>>> # after converted, m is using Conv3d and capable of processing 3D volumes
>>> x = torch.rand(batch_size, in_channels, D, H, W)
>>> out = m(x)
"""
converter_attributes = ['model']
target_conv = nn.Conv3d
def __init__(self, model, i3d_repeat_axis=None):
if i3d_repeat_axis is not None:
preserve_state_dict = model.state_dict()
self.model = model
self.model = self.convert_module(self.model)
if i3d_repeat_axis is not None:
self.load_state_dict(preserve_state_dict, strict=True, i3d_repeat_axis=i3d_repeat_axis)
def convert_conv_kwargs(self, kwargs):
kwargs['bias'] = True if kwargs['bias'] is not None else False
for k in ['kernel_size','stride','padding','dilation']:
kwargs[k] = _triple_same(kwargs[k])
return kwargs
def load_state_dict(self, state_dict, strict=True, i3d_repeat_axis=None):
if i3d_repeat_axis is not None:
return load_state_dict_from_2d_to_i3d(self.model, state_dict, strict, repeat_axis=i3d_repeat_axis)
else:
return self.model.load_state_dict(state_dict, strict)
def load_state_dict_from_2d_to_i3d(model_3d, state_dict_2d, strict=True, repeat_axis=-1):
present_dict = model_3d.state_dict()
for key in list(state_dict_2d.keys()):
if state_dict_2d[key].dim()==4:
repeat_times = present_dict[key].shape[repeat_axis]
state_dict_2d[key] = torch.stack([state_dict_2d[key]]*repeat_times, dim=repeat_axis) / repeat_times
return model_3d.load_state_dict(state_dict_2d, strict=strict) | ACSConv | /ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/converters/conv3d_converter.py | conv3d_converter.py |
# ACSNI
Automatic context-specific network inference
Determining tissue- and disease-specific circuit of biological pathways remains a fundamental goal of molecular biology.
Many components of these biological pathways still remain unknown, hindering the full and accurate characterisation of
biological processes of interest. ACSNI leverages artificial intelligence for the reconstruction of a biological pathway,
aids the discovery of pathway components and classification of the crosstalk between pathways in specific tissues.

This tool is built in python3.8 with tensorflow backend and keras functional API.
# Installation and running the tool
The best way to get ACSNI along with all the dependencies is to install the release from python package installer (pip)
```pip install ACSNI```
This will add four command line scripts:
| Script | Context | Usage |
| --- | --- | --- |
| ACSNI-run | Gene set analysis | ```ACSNI-run -h``` |
| ACSNI-derive | Single gene analysis | ```ACSNI-derive -h``` |
| ACSNI-get | Link pathway trait | ```ACSNI-get -h``` |
| ACSNI-split | Split expression data | ```ACSNI-split -h``` |
Utility functions can be imported using conventional python system like ```from ACSNI.dbs import ACSNIResults```
# Input ACSNI-run
Expression Matrix - The expression file (.csv), specified by ```-i```, where columns are samples and rows are genes.
The expression values should be normalised (eg. TPM, CPM, RSEM). Make sure the column name of the 1st column is "gene".
| gene | Sample1 | Sample2 | Sample3 |
| --- | --- | --- | --- |
| Foxp1 | 123.2 | 274.1 | 852.6 |
| PD1 | 324.2 | 494.1 | 452.6 |
| CD8 | 523.6 | 624.1 | 252.6 |
This input should not be transformed in any way (e.g. log, z-scale)
Gene set matrix - The prior matrix (.csv) file, specified by ```-t```, where rows are genes and column is a binary
pathway membership. Where "1" means that a gene is in the pathway and "0" means that the gene is not know a priori.
The standard prior looks like below. Make sure the column name of the 1st column is "gene".
| gene | Pathway |
| --- | --- |
| Foxp1 | 0 |
| PD1 | 0 |
| CD8 | 1 |
You can also supply gene IDs instead of gene symbols.
The tool can handle multiple pathway columns in the ```-t``` file as below.
| gene | Pathway1 | Pathway2 | Pathway3 |
| --- | --- | --- | --- |
| Foxp1 | 0 | 0 | 0 |
| PD1 | 0 | 1 | 0 |
| CD8 | 1 | 0 | 1 |
Note: Each pathway above is analysed independently, and the outputs have no in-built relationship.
The tool is designed to get a granular view of a single pathway at a time.
# Output ACSNI-run
Database (.ptl)
| Content | Information |
| --- | --- |
| co | Pathway Code|
| w | Subprocess space |
| n | Interaction scores |
| p | Score classification |
| d | Interaction direction |
| run_info | Run parameters |
| methods | Extractor functions |
Predicted Network (.csv)
| Content | Meaning |
| --- | --- |
| name | Gene |
| sub | Subprocess |
| direction | Direction of interactions with subprocess |
Null (.csv) {Shuffled expression matrix}
# Input ACSNI-derive
Expression Matrix - See ``-i``` description above.
Note - We recommend removing any un-desirable genes (eg. MT, RPL) from the expression
matrix prior to running ACSNI-derive as they usually interfere during initial prior matrix generation steps.
For TCR/BCR genes, counts of alpha, beta and gamma chains can be combined into a single count.
Biotype file (Optional) - The biotype file (.csv) specified by ```-f```, given if the generation of gene set should be
based on a particular biotype specified by ```-b```.
| gene | biotype |
| --- | --- |
| Foxp1 | protein_coding |
| PD1 | protein_coding |
| MALAT1 | lncRNA |
| SNHG12 | lncRNA |
| RNU1-114P | snRNA |
Correlation file (Optional) - The correlation file (.csv) specified by ```-u```, given if the user wishes to replace
"some" specific genes with other genes to be used as a prior for the first iteration of ACSNI-run (internally).
| gene | cor |
| --- | --- |
| Foxp1 | 0.9 |
| PD1 | 0.89 |
| MALAT1 | 0.85 |
| SNHG12 | 0.80 |
| RNU1-114P | 0.72 |
# Output ACSNI-derive
Database (.ptl)
| Content | Information |
| --- | --- |
| co | Pathway Code|
| n | Interaction scores |
| d | Interaction direction |
| ac | Correlation and T test results |
| fd | Unfiltered prediction data |
| run_info | Run parameters |
| methods | Extractor functions |
Predicted (.csv)
| Content | Meaning |
| --- | --- |
| name | Gene |
| predict | Classification of genes|
Null (.csv) {Shuffled expression matrix}
# Input ACSNI-get
ACSNI database - Output of ACSNI-run (.ptl) specified by ```-r```.
Target phenotype - Biological phenotype file (.csv) to link ACSNI subprocesses, specified by ```-v```.
The sample IDs should match the IDs in the ```-i``` analysed by ACSNI-run.
Variable type - The type of phenotype i.e "numeric" or "character", specified by ```-c```.
Outputs the strength of the associations across the subprocesses (.csv).
# Input ACSNI-split
Expression Matrix - See ``-i``` description above.
Number of splits - The number of independent cohorts to generate from `-i```.
Outputs the data splits in the current working directory.
# Extras
R functions to reproduce the downstream analyses reported in the paper are inside the folder "R".
Example runs are inside the folder "sh".
# Tutorial
An extensive tutorial on how to use ACSNI commands can be found inside the Tutorial folder.
# To clone the source repository
git clone https://github.com/caanene1/ACSNI
# Citation
ACSNI: An unsupervised machine-learning tool for prediction of tissue-specific pathway components using gene expression profiles
Chinedu Anthony Anene, Faraz Khan, Findlay Bewicke-Copley, Eleni Maniati and Jun Wang
| ACSNI | /ACSNI-1.0.6.tar.gz/ACSNI-1.0.6/README.md | README.md |
ACSpy
=====
A Python package for working with ACS motion controllers.
Installation
------------
Execute
pip install acspy
Usage
-----
### Using the `acsc` module
The `acsc` module is designed to mimic the syntax of the ACS C library that it
wraps.
```python
>>> from acspy import acsc
>>> hcomm = acsc.openCommDirect()
>>> acsc.enable(hcomm, 0)
>>> acsc.getMotorState(hcomm, 0)
{'moving': False, 'enabled': True, 'in position': True, 'accelerating': False}
>>> acsc.closeComm(hcomm)
```
### Using the `Controller` object
The `control` module provides an object-oriented interface to the controller,
making code development more intuitive. An example of its use:
```python
>>> from acspy.control import Controller
>>> controller = Controller(contype="simulator", n_axes=4)
>>> controller.connect()
>>> axis0 = controller.axes[0]
>>> axis0.enable()
>>> axis0.enabled
True
>>> axis0.ptp(500.5)
>>> axis0.rpos
500.5
>>> axis0.disable()
>>> controller.disconnect()
```
| ACSpy | /ACSpy-0.0.4.zip/ACSpy-0.0.4/README.md | README.md |
from __future__ import division, print_function
from acspy import acsc
class Controller(object):
def __init__(self, contype="simulator", n_axes=8):
self.contype = contype
self.axes = []
for n in range(n_axes):
self.axes.append(Axis(self, n))
def connect(self, address="10.0.0.100", port=701):
if self.contype == "simulator":
self.hc = acsc.openCommDirect()
elif self.contype == "ethernet":
self.hc = acsc.openCommEthernetTCP(address=address, port=port)
def enable_all(self, wait=acsc.SYNCHRONOUS):
"""Enables all axes."""
for a in self.axes:
a.enable()
def disable_all(self, wait=acsc.SYNCHRONOUS):
"""Disables all axes."""
for a in self.axes:
a.disable()
def disconnect(self):
acsc.closeComm(self.hc)
class Axis(object):
def __init__(self, controller, axisno, name=None):
if isinstance(controller, Controller):
self.controller = controller
else:
raise TypeError("Controller is not a valid Controller object")
self.axisno = axisno
if name:
controller.axisdefs[name] = axisno
def enable(self, wait=acsc.SYNCHRONOUS):
acsc.enable(self.controller.hc, self.axisno, wait)
def disable(self, wait=acsc.SYNCHRONOUS):
acsc.disable(self.controller.hc, self.axisno, wait)
def ptp(self, target, coordinates="absolute", wait=acsc.SYNCHRONOUS):
"""Performs a point to point move in either relative or absolute
(default) coordinates."""
if coordinates == "relative":
flags = acsc.AMF_RELATIVE
else:
flags = None
acsc.toPoint(self.controller.hc, flags, self.axisno, target, wait)
def ptpr(self, distance, wait=acsc.SYNCHRONOUS):
"""Performance a point to point move in relative coordinates."""
self.ptp(distance, coordinates="relative", wait=wait)
@property
def axis_state(self):
"""Returns axis state dict."""
return acsc.getAxisState(self.controller.hc, self.axisno)
@property
def motor_state(self):
"""Returns motor state dict."""
return acsc.getMotorState(self.controller.hc, self.axisno)
@property
def moving(self):
return self.motor_state["moving"]
@property
def enabled(self):
return self.motor_state["enabled"]
@property
def in_position(self):
return self.motor_state["in position"]
@property
def accelerating(self):
return self.motor_state["accelerating"]
@property
def rpos(self):
return acsc.getRPosition(self.controller.hc, self.axisno)
@property
def fpos(self):
return acsc.getFPosition(self.controller.hc, self.axisno)
@property
def rvel(self):
return acsc.getRVelocity(self.controller.hc, self.axisno)
@property
def fvel(self):
return acsc.getFVelocity(self.controller.hc, self.axisno)
@property
def vel(self):
return acsc.getVelocity(self.controller.hc, self.axisno)
@vel.setter
def vel(self, velocity):
"""Sets axis velocity."""
acsc.setVelocity(self.controller.hc, self.axisno, velocity)
@property
def acc(self):
return acsc.getAcceleration(self.controller.hc, self.axisno)
@acc.setter
def acc(self, accel):
"""Sets axis velocity."""
acsc.setAcceleration(self.controller.hc, self.axisno, accel)
@property
def dec(self):
return acsc.getDeceleration(self.controller.hc, self.axisno)
@dec.setter
def dec(self, decel):
"""Sets axis velocity."""
acsc.setDeceleration(self.controller.hc, self.axisno, decel) | ACSpy | /ACSpy-0.0.4.zip/ACSpy-0.0.4/acspy/control.py | control.py |
from __future__ import division, print_function
import ctypes
from ctypes import byref
from acspy.errors import errors
import numpy as np
import platform
# Import the ACS C library DLL
if platform.architecture()[0] == "32bit":
acs = ctypes.windll.LoadLibrary('ACSCL_x86.dll')
if platform.architecture()[0] == "64bit":
acs = ctypes.windll.LoadLibrary('ACSCL_x64.dll')
int32 = ctypes.c_long
uInt32 = ctypes.c_ulong
uInt64 = ctypes.c_ulonglong
double = ctypes.c_double
char = ctypes.c_char
p = ctypes.pointer
# Define motion flags and constants
AMF_WAIT = 0x00000001
AMF_RELATIVE = 0x00000002
AMF_VELOCITY = 0x00000004
AMF_CYCLIC = 0x00000100
AMF_CUBIC = 0x00000400
# Axis states
AST_LEAD = 0x00000001
AST_DC = 0x00000002
AST_PEG = 0x00000004
AST_PEGREADY = 0x00000010
AST_MOVE = 0x00000020
AST_ACC = 0x00000040
AST_SEGMENT = 0x00000080
AST_VELLOCK = 0x00000100
AST_POSLOCK = 0x00000200
# Motor states
MST_ENABLE = 0x00000001
MST_INPOS = 0x00000010
MST_MOVE = 0x00000020
MST_ACC = 0x00000040
SYNCHRONOUS = None
INVALID = -1
IGNORE = -1
ASYNCHRONOUS = -2
NONE = -1
COUNTERCLOCKWISE = 1
CLOCKWISE = -1
INT_BINARY = 4
REAL_BINARY = 8
INT_TYPE = 1
REAL_TYPE = 2
def openCommDirect():
"""Open simulator. Returns communication handle."""
hcomm = acs.acsc_OpenCommDirect()
if hcomm == -1:
error = getLastError()
if error in errors:
print("ACS SPiiPlus Error", str(error) + ":", errors[error])
else: print("ACS SPiiPlus Error", error)
return hcomm
def openCommEthernetTCP(address="10.0.0.100", port=701):
"""Address is a string. Port is an int.
Returns communication handle."""
hcomm = acs.acsc_OpenCommEthernetTCP(address, port)
return hcomm
def setVelocity(hcomm, axis, vel, wait=SYNCHRONOUS):
"""Sets axis velocity."""
acs.acsc_SetVelocity(hcomm, axis, double(vel), wait)
def setAcceleration(hcomm, axis, acc, wait=SYNCHRONOUS):
"""Sets axis acceleration."""
acs.acsc_SetAcceleration(hcomm, axis, double(acc), wait)
def setDeceleration(hcomm, axis, dec, wait=SYNCHRONOUS):
"""Sets axis deceleration."""
acs.acsc_SetDeceleration(hcomm, axis, double(dec), wait)
def setJerk(hcomm, axis, jerk, wait=SYNCHRONOUS):
acs.acsc_SetJerk(hcomm, axis, double(jerk), wait)
def getMotorEnabled(hcomm, axis, wait=SYNCHRONOUS):
"""Checks if motor is enabled."""
state = ctypes.c_int()
acs.acsc_GetMotorState(hcomm, axis, byref(state), wait)
state = state.value
return hex(state)[-1] == "1"
def getMotorState(hcomm, axis, wait=SYNCHRONOUS):
"""Gets the motor state. Returns a dictionary with the following keys:
* "enabled"
* "in position"
* "moving"
* "accelerating"
"""
state = ctypes.c_int()
acs.acsc_GetMotorState(hcomm, axis, byref(state), wait)
state = state.value
mst = {"enabled" : hex(state)[-1] == "1",
"in position" : hex(state)[-2] == "1",
"moving" : hex(state)[-2] == "2",
"accelerating" : hex(state)[-2] == "4"}
return mst
def getAxisState(hcomm, axis, wait=SYNCHRONOUS):
"""Gets the axis state. Returns a dictionary with the following keys
* "lead"
* "DC"
* "PEG"
* "PEGREADY"
* "moving"
* "accelerating"
* "segment"
* "vel lock"
* "pos lock"
"""
state = ctypes.c_int()
acs.acsc_GetAxisState(hcomm, axis, byref(state), wait)
state = state.value
ast = {"lead" : hex(state)[-1] == "1",
"DC" : hex(state)[-1] == "2",
"PEG" : hex(state)[-1] == "4",
"PEGREADY" : hex(state)[-2] == "1",
"moving" : hex(state)[-2] == "2",
"accelerating" : hex(state)[-2] == "4",
"segment" : hex(state)[-2] == "8",
"vel lock" : hex(state)[-3] == "1",
"pos lock" : hex(state)[-3] == "2"}
return ast
def registerEmergencyStop():
"""Register the software emergency stop."""
acs.acsc_RegisterEmergencyStop()
def jog(hcomm, flags, axis, vel, wait=SYNCHRONOUS):
"""Jog move."""
acs.acsc_Jog(hcomm, flags, axis, double(vel), wait)
def toPoint(hcomm, flags, axis, target, wait=SYNCHRONOUS):
"""Point to point move."""
acs.acsc_ToPoint(hcomm, flags, axis, double(target), wait)
def toPointM(hcomm, flags, axes, target, wait=SYNCHRONOUS):
"""Initiates a multi-axis move to the specified target. Axes and target
are entered as tuples. Set flags as None for absolute coordinates."""
if len(axes) != len(target):
print("Number of axes and coordinates don't match!")
else:
target_array = double*len(axes)
axes_array = ctypes.c_int*(len(axes) + 1)
target_c = target_array()
axes_c = axes_array()
for n in range(len(axes)):
target_c[n] = target[n]
axes_c[n] = axes[n]
axes_c[-1] = -1
errorHandling(acs.acsc_ToPointM(hcomm, flags, axes_c, target_c, wait))
def enable(hcomm, axis, wait=SYNCHRONOUS):
acs.acsc_Enable(hcomm, int32(axis), wait)
def disable(hcomm, axis, wait=SYNCHRONOUS):
acs.acsc_Disable(hcomm, int32(axis), wait)
def getRPosition(hcomm, axis, wait=SYNCHRONOUS):
pos = double()
acs.acsc_GetRPosition(hcomm, axis, p(pos), wait)
return pos.value
def getFPosition(hcomm, axis, wait=SYNCHRONOUS):
pos = double()
acs.acsc_GetFPosition(hcomm, axis, byref(pos), wait)
return pos.value
def getRVelocity(hcomm, axis, wait=SYNCHRONOUS):
rvel = double()
acs.acsc_GetRVelocity(hcomm, axis, byref(rvel), wait)
return rvel.value
def getFVelocity(hcomm, axis, wait=SYNCHRONOUS):
vel = double()
acs.acsc_GetFVelocity(hcomm, axis, byref(vel), wait)
return vel.value
def getVelocity(hcomm, axis, wait=SYNCHRONOUS):
"""Returns current velocity for specified axis."""
vel = double()
acs.acsc_GetVelocity(hcomm, axis, byref(vel), wait)
return vel.value
def getAcceleration(hcomm, axis, wait=SYNCHRONOUS):
"""Returns current acceleration for specified axis."""
val = double()
acs.acsc_GetAcceleration(hcomm, axis, byref(val), wait)
return val.value
def getDeceleration(hcomm, axis, wait=SYNCHRONOUS):
"""Returns current deceleration for specified axis."""
val = double()
acs.acsc_GetDeceleration(hcomm, axis, byref(val), wait)
return val.value
def closeComm(hcomm):
"""Closes communication with the controller."""
acs.acsc_CloseComm(hcomm)
def unregisterEmergencyStop():
acs.acsc_UnregisterEmergencyStop()
def getLastError():
return acs.acsc_GetLastError()
def runBuffer(hcomm, buffno, label=None, wait=SYNCHRONOUS):
"""Runs a buffer in the controller."""
if label is not None:
label=label.encode()
acs.acsc_RunBuffer(hcomm, int32(buffno), label, wait)
def stopBuffer(hcomm, buffno, wait=SYNCHRONOUS):
"""Stops a buffer running in the controller."""
acs.acsc_StopBuffer(hcomm, int32(buffno), wait)
def getProgramState(hc, nbuf, wait=SYNCHRONOUS):
"""Returns program state"""
state = ctypes.c_int()
acs.acsc_GetProgramState(hc, nbuf, byref(state), wait)
return state.value
def halt(hcomm, axis, wait=SYNCHRONOUS):
"""Halts motion on specified axis."""
acs.acsc_Halt(hcomm, axis, wait)
def declareVariable(hcomm, vartype, varname, wait=SYNCHRONOUS):
"""Declare a variable in the controller."""
acs.acsc_DeclareVariable(hcomm, vartype, varname.encode(), wait)
def readInteger(hcomm, buffno, varname, from1=None, to1=None, from2=None,
to2=None, wait=SYNCHRONOUS):
"""Reads an integer(s) in the controller."""
intread = ctypes.c_int()
acs.acsc_ReadInteger(hcomm, buffno, varname.encode(), from1, to1, from2,
to2, p(intread), wait)
return intread.value
def writeInteger(hcomm, variable, val_to_write, nbuff=NONE, from1=NONE,
to1=NONE, from2=NONE, to2=NONE, wait=SYNCHRONOUS):
"""Writes an integer variable to the controller."""
val = ctypes.c_int(val_to_write)
acs.acsc_WriteInteger(hcomm, nbuff, variable.encode(), from1, to1,
from2, to2, p(val), wait)
def readReal(hcomm, buffno, varname, from1=NONE, to1=NONE, from2=NONE,
to2=NONE, wait=SYNCHRONOUS):
"""Read real variable (scalar or array) from the controller."""
if from2 == NONE and to2 == NONE and from1 != NONE:
values = np.zeros((to1-from1+1), dtype=np.float64)
pointer = values.ctypes.data
elif from2 != NONE:
values = np.zeros((to1-from1+1, to2-from2+1), dtype=np.float64)
pointer = values.ctypes.data
else:
values = double()
pointer = byref(values)
acs.acsc_ReadReal(hcomm, buffno, varname.encode(), from1, to1, from2, to2,
pointer, wait)
if from1 != NONE:
return values
else:
return values.value
def writeReal(hcomm, varname, val_to_write, nbuff=NONE, from1=NONE, to1=NONE,
from2=NONE, to2=NONE, wait=SYNCHRONOUS):
"""Writes a real value to the controller."""
val = ctypes.c_double(val_to_write)
acs.acsc_WriteReal(hcomm, nbuff, varname.encode(), from1, to1,
from2, to2, p(val), wait)
def uploadDataFromController(hcomm, src, srcname, srcnumformat, from1, to1,
from2, to2, destfilename, destnumformat, btranspose, wait=0):
acs.acsc_UploadDataFromController(hcomm, src, srcname, srcnumformat,
from1, to1, from2, to2, destfilename, destnumformat,
btranspose, wait)
def loadBuffer(hcomm, buffnumber, program, count=512, wait=SYNCHRONOUS):
"""Load a buffer into the ACS controller."""
prgbuff = ctypes.create_string_buffer(str(program).encode(), count)
rv = acs.acsc_LoadBuffer(hcomm, buffnumber, byref(prgbuff), count, wait)
errorHandling(rv)
def loadBuffersFromFile(hcomm, filename, wait=SYNCHRONOUS):
rv = acs.acsc_LoadBuffersFromFile(hcomm, filename.encode(), wait)
errorHandling(rv)
def spline(hcomm, flags, axis, period, wait=SYNCHRONOUS):
rv = acs.acsc_Spline(hcomm, flags, axis, double(period), wait)
errorHandling(rv)
def addPVPoint(hcomm, axis, point, velocity, wait=SYNCHRONOUS):
acs.acsc_AddPVPoint(hcomm, axis, double(point), double(velocity), wait)
def addPVTPoint(hcomm, axis, point, velocity, dt, wait=SYNCHRONOUS):
acs.acsc_AddPVTPoint(hcomm, axis, double(point), double(velocity),
double(dt), wait)
def multiPoint(hcomm, flags, axis, dwell, wait=SYNCHRONOUS):
acs.acsc_MultiPoint(hcomm, flags, axis, double(dwell), wait)
def addPoint(hcomm, axis, point, wait=SYNCHRONOUS):
acs.acsc_AddPoint(hcomm, axis, double(point), wait)
def extAddPoint(hcomm, axis, point, rate, wait=SYNCHRONOUS):
acs.acsc_ExtAddPoint(hcomm, axis, double(point), double(rate), wait)
def endSequence(hcomm, axis, wait=SYNCHRONOUS):
return acs.acsc_EndSequence(hcomm, axis, wait)
def go(hcomm, axis, wait=SYNCHRONOUS):
acs.acsc_Go(hcomm, axis, wait)
def getOutput(hcomm, port, bit, wait=SYNCHRONOUS):
"""Returns the value of a digital output."""
val = int32()
acs.acsc_GetOutput(hcomm, port, bit, byref(val), wait)
return val.value
def setOutput(hcomm, port, bit, val, wait=SYNCHRONOUS):
"""Sets the value of a digital output."""
acs.acsc_SetOutput(hcomm, port, bit, val, wait)
def errorHandling(returnvalue):
if returnvalue == 0:
error = getLastError()
if error in errors:
print("Error", error, errors[error])
else: print("Error", error)
def printLastError():
error = getLastError()
if error != 0:
if error in errors:
print(errors[error])
else:
print("ACS SPiiPlus Error", error)
if __name__ == "__main__":
"""Some testing can go here"""
hc = openCommEthernetTCP()
print(getOutput(hc, 1, 16))
closeComm(hc) | ACSpy | /ACSpy-0.0.4.zip/ACSpy-0.0.4/acspy/acsc.py | acsc.py |
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import List, TypedDict, Union
from .finite import (FiniteAdjunction, FiniteDP, FiniteFunctor, FiniteGroup, FiniteMap, FiniteMonoid,
FiniteNaturalTransformation, FinitePoset, FiniteProfunctor, FiniteRelation,
FiniteSemigroup, FiniteSet)
__all__ = [
"FiniteSet_desc",
"FiniteMap_desc",
"FiniteSetUnion_desc",
"FiniteSetProduct_desc",
"FiniteSetDisjointUnion_desc",
"DirectElements_desc",
"FiniteProfunctor_desc",
"IOHelper",
"FiniteRelation_desc",
"FiniteFunctor_desc",
"FiniteSemigroup_desc",
"FiniteGroup_desc",
"FiniteMonoid_desc",
"FiniteSetRepresentation",
"FiniteMapRepresentation",
"FiniteAdjunction_desc",
"FiniteGroupRepresentation",
"FiniteAdjunctionRepresentation",
"FinitePosetRepresentation",
"FiniteNaturalTransformationRepresentation",
"FiniteFunctorRepresentation",
"FiniteRelationRepresentation",
"FiniteProfunctorRepresentation",
"FiniteMonoidRepresentation",
"FiniteSemigroupRepresentation",
"FiniteDPRepresentation",
]
from .helper import IOHelper
from .types import ConcreteRepr
class DirectElements_desc(TypedDict):
elements: List[ConcreteRepr]
class FiniteSetProduct_desc(TypedDict):
product: List[FiniteSet_desc]
class FiniteSetDisjointUnion_desc(TypedDict):
disunion: List[FiniteSet_desc]
class FiniteSetUnion_desc(TypedDict):
union: List[FiniteSet_desc]
FiniteSet_desc = Union[
DirectElements_desc,
FiniteSetProduct_desc,
FiniteSetUnion_desc,
FiniteSetDisjointUnion_desc,
]
class FiniteMap_desc(TypedDict):
source: FiniteSet_desc
target: FiniteSet_desc
values: List[List[ConcreteRepr]]
class FiniteMapRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, s: FiniteMap_desc) -> FiniteMap:
...
@abstractmethod
def save(self, h: IOHelper, m: FiniteMap) -> FiniteMap_desc:
...
class FiniteSetRepresentation(ABC):
# @overload
# def load(self, h: IOHelper, data: FiniteSetDisjointUnion_desc) \
# -> FiniteSetDisjointUnion:
# ...
#
# @overload
# def load(self, h: IOHelper, data: FiniteSetUnion_desc) -> FiniteSet:
# ...
#
# @overload
# def load(self, h: IOHelper, data: FiniteSetProduct_desc) -> FiniteSetProduct:
# ...
@abstractmethod
def load(self, h: IOHelper, data: FiniteSet_desc) -> FiniteSet:
""" Load a finite set from data structure.
Throw InvalidFormat if the format is incorrect.
"""
@abstractmethod
def save(self, h: IOHelper, f: FiniteSet) -> FiniteSet_desc:
""" Serializes into a data structure """
class FiniteSemigroup_desc(TypedDict):
carrier: FiniteSet_desc
compose: FiniteMap_desc
class FiniteSemigroupRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, s: FiniteSemigroup_desc) -> FiniteSemigroup:
""" Load the data """
@abstractmethod
def save(self, h: IOHelper, m: FiniteSemigroup) -> FiniteSemigroup_desc:
""" Save the data """
class FiniteNaturalTransformation_desc(TypedDict):
pass
class FiniteMonoid_desc(FiniteSemigroup_desc):
# carrier: FiniteSet_desc
# compose: FiniteMap_desc
neutral: object
class FiniteMonoidRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, s: FiniteMonoid_desc) -> FiniteMonoid:
""" Load the data """
@abstractmethod
def save(self, h: IOHelper, m: FiniteMonoid) -> FiniteMonoid_desc:
""" Save the data """
class FiniteGroup_desc(FiniteMonoid_desc):
# carrier: FiniteSet_desc
# compose: FiniteMap_desc
# neutral: object
inv: FiniteMap_desc
class FinitePoset_desc(TypedDict):
pass
class FiniteGroupRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, s: FiniteGroup_desc) -> FiniteGroup:
""" Load the data """
@abstractmethod
def save(self, h: IOHelper, m: FiniteGroup) -> FiniteGroup_desc:
""" Save the data """
class FinitePosetRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, s: FinitePoset_desc) -> FinitePoset:
""" Load the data """
@abstractmethod
def save(self, h: IOHelper, m: FinitePoset) -> FinitePoset_desc:
""" Save the data """
class FiniteRelation_desc(TypedDict):
source: FiniteSet_desc
target: FiniteSet_desc
values: List[List[object]]
class FiniteRelationRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, data: FiniteRelation_desc) -> FiniteRelation:
""" Load a finite set from given YAML data"""
@abstractmethod
def save(self, h: IOHelper, f: FiniteRelation) -> FiniteRelation_desc:
""" Load a finite set from given YAML data"""
class FiniteCategory_desc(TypedDict):
...
class FiniteFunctor_desc(TypedDict):
source: FiniteCategory_desc
target: FiniteCategory_desc
f_ob: FiniteMap
f_mor: FiniteMap
class FiniteFunctorRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, data: FiniteFunctor_desc) -> FiniteFunctor:
...
@abstractmethod
def save(self, h: IOHelper, f: FiniteFunctor) -> FiniteFunctor_desc:
...
class FiniteNaturalTransformationRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, data: FiniteNaturalTransformation_desc) -> FiniteNaturalTransformation:
...
@abstractmethod
def save(self, h: IOHelper, f: FiniteNaturalTransformation) -> FiniteNaturalTransformation_desc:
...
class FiniteAdjunction_desc(TypedDict):
...
class FiniteAdjunctionRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, data: FiniteAdjunction_desc) -> FiniteAdjunction:
...
@abstractmethod
def save(self, h: IOHelper, f: FiniteAdjunction) -> FiniteAdjunction_desc:
...
class FiniteProfunctor_desc(TypedDict):
...
class FiniteProfunctorRepresentation(ABC):
@abstractmethod
def load(self, h: IOHelper, data: FiniteProfunctor_desc) -> FiniteProfunctor:
...
@abstractmethod
def save(self, h: IOHelper, f: FiniteProfunctor) -> FiniteProfunctor_desc:
...
class FiniteDP_desc(TypedDict):
pass
class FiniteDPRepresentation(ABC):
@abstractmethod
def load(self, yaml_data: FiniteDP_desc) -> FiniteDP:
...
@abstractmethod
def save(self, f: FiniteDP) -> FiniteDP_desc:
... | ACT4E-exercises | /ACT4E-exercises-2021.1.2103061245.tar.gz/ACT4E-exercises-2021.1.2103061245/src/act4e_interfaces/representations.py | representations.py |
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Callable, Iterator, List, Optional, overload, Set, Tuple
from .helper import IOHelper
from .types import ConcreteRepr, Element, Morphism, Object
__all__ = [
"InvalidFormat",
"InvalidValue",
"EnumerableSetsOperations",
"EnumerableSet",
"FiniteSetUnion",
"Setoid",
"SetoidOperations",
"SetProduct",
"SetDisjointUnion",
"MakeSetUnion",
"MakeSetDisjointUnion",
"MakeSetIntersection",
"MakeSetProduct",
"MonotoneMap",
"Mapping",
"Monoid",
"MonoidalCategory",
"MonoidalCategory",
"FiniteMap",
"FiniteDP",
"FiniteSet",
"FiniteSetProperties",
"FiniteSetProduct",
"FiniteProfunctor",
"FiniteSetDisjointUnion",
"FiniteFunctor",
"FiniteSemigroupConstruct",
"FiniteSemigroup",
"FinitePoset",
"FiniteAdjunction",
"FiniteMonoidalPoset",
"FiniteMonoidalCategory",
"FiniteCategory",
"FiniteEnrichedCategory",
"FiniteRelation",
"FiniteSemiCategory",
"FiniteMonoid",
"FiniteCategoryOperations",
"FiniteDPOperations",
"FiniteAdjunctionsOperations",
"FiniteProfunctorOperations",
"FiniteGroup",
"FiniteLattice",
"FiniteRelationProperties",
"FiniteRelationOperations",
"FiniteEndorelationOperations",
"FiniteMonotoneMapProperties",
"FiniteEndorelationProperties",
"FinitePosetOperations",
"FinitePosetConstructors",
"FinitePosetSubsetProperties",
"FinitePosetProperties",
"FiniteMapOperations",
"FinitePosetSubsetOperations",
"FiniteNaturalTransformation",
"Semigroup",
"Lattice", "JoinSemilattice", "Adjunction",
'MeetSemilattice', "MonoidalPosetOperations", "SemiCategory",
"SemiBiCategory", "Poset", "Category", "CategoryOperations", 'DP', 'DPCategory', 'DPI',
"DPConstructors", "InvalidFormat",
]
class Setoid(ABC):
"""
A setoid is something to which elements may belong,
has a way of distinguishing elements,
and is able to (de)serialize its elements.
"""
@abstractmethod
def contains(self, x: Element) -> bool:
""" Returns true if the element is in the set. """
def equal(self, x: Element, y: Element) -> bool:
""" Returns True if the two elements are to be considered equal. """
return x == y # default is to use the Python equality
def apart(self, x: Element, y: Element) -> bool:
return not self.equal(x, y)
@abstractmethod
def save(self, h: IOHelper, x: Element) -> ConcreteRepr:
...
@abstractmethod
def load(self, h: IOHelper, o: ConcreteRepr) -> Element:
...
class Mapping(ABC):
@abstractmethod
def source(self) -> Setoid:
...
@abstractmethod
def target(self) -> Setoid:
...
@abstractmethod
def __call__(self, a: Element) -> Element:
...
class EnumerableSet(Setoid, ABC):
@abstractmethod
def elements(self) -> Iterator[Element]:
""" Note: possibly non-terminating. """
class FiniteSet(EnumerableSet, ABC):
""" A finite set has a finite size. """
@abstractmethod
def size(self) -> int:
""" Return the size of the finite set. """
class FiniteMap(Mapping, ABC):
@abstractmethod
def source(self) -> FiniteSet:
...
@abstractmethod
def target(self) -> FiniteSet:
...
class InvalidFormat(Exception):
""" Raise this if the input data to parse is invalid. """
class InvalidValue(ValueError):
""" Raise this if the input does not make sense. """
class FiniteSetProperties(ABC):
@abstractmethod
def is_subset(self, a: FiniteSet, b: FiniteSet) -> bool:
""" True if `a` is a subset of `b`. """
def is_equal(self, a: FiniteSet, b: FiniteSet) -> bool:
return self.is_subset(a, b) and self.is_subset(b, a)
def is_strict_subset(self, a: FiniteSet, b: FiniteSet) -> bool:
return self.is_subset(a, b) and not self.is_subset(b, a)
class SetProduct(Setoid, ABC):
""" A set product is a setoid that can be factorized. """
@abstractmethod
def components(self) -> List[Setoid]:
""" Returns the components of the product"""
@abstractmethod
def pack(self, *args: Element) -> Element:
""" Packs an element of each setoid into an element of the mapping"""
@abstractmethod
def projections(self) -> List[Mapping]:
""" Returns the projection mappings. """
class FiniteSetProduct(FiniteSet, SetProduct, ABC):
""" Specialization of SetProduct where we deal with FiniteSets"""
@abstractmethod
def components(self) -> List[FiniteSet]:
""" Returns the components """
@abstractmethod
def projections(self) -> List[FiniteMap]:
""" Returns the projection mappings. """
class SetUnion(Setoid, ABC):
""" A set product is a setoid that can be factorized. """
@abstractmethod
def components(self) -> List[Setoid]:
""" Returns the components of the union"""
class EnumerableSetUnion(EnumerableSet, SetUnion, ABC):
""" Specialization of SetUnion where we deal with FiniteSets"""
@abstractmethod
def components(self) -> List[EnumerableSet]:
""" Returns the components of the union """
class FiniteSetUnion(FiniteSet, EnumerableSetUnion, ABC):
""" Specialization of SetUnion where we deal with FiniteSets"""
@abstractmethod
def components(self) -> List[FiniteSet]:
""" Returns the components of the union """
class SetDisjointUnion(Setoid, ABC):
@abstractmethod
def components(self) -> List[Setoid]:
""" Returns the components of the union """
@abstractmethod
def injections(self) -> List[Mapping]:
""" Returns the projection mappings. """
class FiniteSetDisjointUnion(FiniteSet, SetDisjointUnion, ABC):
""" Specialization of SetProduct where we deal with FiniteSets"""
@abstractmethod
def components(self) -> List[FiniteSet]:
...
@abstractmethod
def injections(self) -> List[FiniteMap]:
...
class MakeSetProduct(ABC):
@overload
def product(self, components: List[Setoid]) -> SetProduct:
...
@abstractmethod
def product(self, components: List[FiniteSet]) -> FiniteSetProduct:
...
class MakeSetIntersection(ABC):
@abstractmethod
def intersection(self, components: List[FiniteSet]) -> FiniteSet:
...
class MakeSetUnion(ABC):
@overload
def union(self, components: List[FiniteSet]) -> FiniteSetUnion:
...
@overload
def union(self, components: List[EnumerableSet]) -> EnumerableSetUnion:
...
@abstractmethod
def union(self, components: List[Setoid]) -> SetUnion:
...
# added
class MakeSetDisjointUnion(ABC):
@overload
def compute_disjoint_union(self, components: List[Setoid]) -> SetDisjointUnion:
...
@abstractmethod
def compute_disjoint_union(
self, components: List[FiniteSet]
) -> FiniteSetDisjointUnion:
...
class Relation(ABC):
@abstractmethod
def source(self) -> Setoid:
""" Returns a setoid """
@abstractmethod
def target(self) -> Setoid:
""" Returns a setoid """
@abstractmethod
def holds(self, a: Element, b: Element) -> bool:
""" Returns true if the two elements are related """
class FiniteRelation(Relation, ABC):
@abstractmethod
def source(self) -> FiniteSet:
""" Returns a finite set"""
@abstractmethod
def target(self) -> FiniteSet:
""" Returns a finite set"""
class FiniteRelationProperties(ABC):
@abstractmethod
def is_surjective(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is surjective. """
@abstractmethod
def is_injective(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is injective. """
@abstractmethod
def is_defined_everywhere(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is defined everywhere. """
@abstractmethod
def is_single_valued(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is single-valued """
class FiniteRelationOperations(ABC):
@abstractmethod
def transpose(self, fr: FiniteRelation) -> FiniteRelation:
""" Create the transposed of a relation """
class FiniteEndorelationProperties(ABC):
@abstractmethod
def is_reflexive(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is reflexive. """
@abstractmethod
def is_irreflexive(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is irreflexive. """
@abstractmethod
def is_transitive(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is transitive. """
@abstractmethod
def is_symmetric(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is symmetric """
@abstractmethod
def is_antisymmetric(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is antisymmetric """
@abstractmethod
def is_asymmetric(self, fr: FiniteRelation) -> bool:
""" Return True if the relation is asymmetric """
class FiniteEndorelationOperations(ABC):
@abstractmethod
def transitive_closure(self, fr: FiniteRelation) -> FiniteRelation:
""" Returns transitive closure """
class FiniteMapOperations(ABC):
@abstractmethod
def compose(self, f: FiniteMap, g: FiniteMap) -> FiniteMap:
""" compose two functions"""
@abstractmethod
def as_relation(self, f: FiniteMap) -> FiniteRelation:
""" Load the data """
class Semigroup(ABC):
@abstractmethod
def carrier(self) -> Setoid:
...
@abstractmethod
def compose(self, a: Element, b: Element) -> Element:
...
class FiniteSemigroup(Semigroup, ABC):
@abstractmethod
def carrier(self) -> FiniteSet:
...
class FiniteSemigroupConstruct(ABC):
@abstractmethod
def free(self, fs: FiniteSet) -> FiniteSemigroup:
""" Construct the free semigroup on a set. """
class Monoid(Semigroup, ABC):
@abstractmethod
def identity(self) -> Element:
...
class Group(Monoid, ABC):
@abstractmethod
def inverse(self, e: Element) -> Element:
""" Returns the inverse of an element"""
class FiniteMonoid(Monoid, FiniteSemigroup, ABC):
""""""
class FiniteGroup(Group, FiniteMonoid, ABC):
...
# TODO: equational theories
class Poset(ABC):
@abstractmethod
def carrier(self) -> Setoid:
...
@abstractmethod
def leq(self, a: Element, b: Element) -> bool:
...
class FinitePoset(Poset, ABC):
""" Implementation of finite posets. """
@abstractmethod
def carrier(self) -> FiniteSet:
...
class FinitePosetProperties(ABC):
@abstractmethod
def width(self, fp: FinitePoset) -> int:
""" Return the width of the poset. """
@abstractmethod
def height(self, fp: FinitePoset) -> int:
""" Return the height of the poset. """
class FinitePosetConstructors(ABC):
@abstractmethod
def discrete(self, s: FiniteSet) -> FinitePoset:
""" Creates the discrete poset from any set. """
@abstractmethod
def powerset(self, s: FiniteSet) -> FinitePoset:
""" Creates the powerset poset """
@abstractmethod
def uppersets(self, s: FinitePoset) -> FinitePoset:
""" Creates the upperset poset """
@abstractmethod
def lowersets(self, s: FinitePoset) -> FinitePoset:
""" Creates the lowersets poset """
@abstractmethod
def antichains(self, s: FinitePoset) -> FiniteSet:
""" Creates the antichain set """
@abstractmethod
def intervals(self, s: FinitePoset) -> FinitePoset:
""" Computes the poset of intervals. """
@abstractmethod
def intervals2(self, s: FinitePoset) -> FinitePoset:
""" Computes the other of intervals. """
class FinitePosetSubsetProperties(ABC):
@abstractmethod
def is_chain(self, fp: FinitePoset, s: FiniteSet) -> bool:
""" Computes if the subset is a chain. """
@abstractmethod
def is_antichain(self, fp: FinitePoset, s: FiniteSet) -> bool:
""" Computes if the subset is an antichain. """
class FinitePosetSubsetOperations(ABC):
@abstractmethod
def upperclosure(self, fp: FinitePoset, s: Set[Element]) -> Set[Element]:
""" Computes the upper closure of an element"""
@abstractmethod
def lowerclosure(self, fp: FinitePoset, s: Set[Element]) -> Set[Element]:
""" Computes the lower closure of an element"""
@abstractmethod
def maximal(self, fp: FinitePoset, s: Set[Element]) -> Set[Element]:
""" Computes the maximal elements in a subset of the poset"""
@abstractmethod
def minimal(self, fp: FinitePoset, s: Set[Element]) -> Set[Element]:
""" Computes the minimal elements in a subset of the poset"""
@abstractmethod
def infimum(self, fp: FinitePoset, s: Set[Element]) -> Optional[Element]:
""" Computes the infimum for the subset, or None if one does not exist. """
@abstractmethod
def supremum(self, fp: FinitePoset, s: Set[Element]) -> Optional[Element]:
""" Computes the supremum for the subset, or None if one does not exist. """
@abstractmethod
def meet(self, fp: FinitePoset, s: Set[Element]) -> Optional[Element]:
""" Computes the meet for the subset, or None if one does not exist. """
@abstractmethod
def join(self, fp: FinitePoset, s: Set[Element]) -> Optional[Element]:
""" Computes the join for the subset, or None if one does not exist. """
class FinitePosetOperations(ABC):
@abstractmethod
def opposite(self, s: str) -> FinitePoset:
...
@abstractmethod
def product(self, p1: FinitePoset, p2: FinitePoset) -> FinitePoset:
...
@abstractmethod
def disjoint_union(self, p1: FinitePoset, p2: FinitePoset) -> FinitePoset:
...
class MonotoneMap(Mapping, ABC):
def source_poset(self) -> Poset:
...
def target_poset(self) -> Poset:
...
class FiniteMonotoneMapProperties(ABC):
@abstractmethod
def is_monotone(self, p1: FinitePoset, p2: FinitePoset, m: FiniteMap) -> bool:
""" Check if a map is monotone. """
@abstractmethod
def is_antitone(self, p1: FinitePoset, p2: FinitePoset, m: FiniteMap) -> bool:
""" Check if a map is antitone. """
class FiniteMonoidalPoset(ABC):
""" Implementation of finite posets. """
@abstractmethod
def poset(self) -> FinitePoset:
...
@abstractmethod
def monoid(self) -> FiniteMonoid:
...
class MonoidalPosetOperations(ABC):
""" Implementation of finite posets. """
@abstractmethod
def is_monoidal_poset(self, fp: FinitePoset, fm: FiniteMonoid) -> bool:
""" Check that the pair of poset and monoid make together a monoidal poset."""
class MeetSemilattice(Poset, ABC):
@abstractmethod
def meet(self, x: Element, y: Element) -> Element:
...
@abstractmethod
def top(self) -> Element:
...
class JoinSemilattice(Poset, ABC):
@abstractmethod
def join(self, x: Element, y: Element) -> Element:
...
@abstractmethod
def bottom(self) -> Element:
...
class Lattice(JoinSemilattice, MeetSemilattice, ABC):
pass
class FiniteLattice(ABC):
@abstractmethod
def carrier(self) -> FiniteSet:
...
class SemiBiCategory(ABC):
@abstractmethod
def objects(self) -> Setoid:
...
@abstractmethod
def homs(self, ob1: Object, ob2: Object) -> Setoid:
...
@abstractmethod
def legs(self, m: Morphism) -> Tuple[Object, Object]:
""" Return source and target of the morphism """
class SemiCategory(SemiBiCategory, ABC):
...
class Category(SemiCategory, ABC):
@abstractmethod
def identity(self, ob: Object) -> Morphism:
""" Identity for the object """
assert [1] + [1, 2] == [1, 1, 2]
assert [1, 1, 2] + [] == [1, 1, 2]
class FiniteSemiCategory(SemiCategory, ABC):
@abstractmethod
def objects(self) -> FiniteSet:
...
@abstractmethod
def homs(self, ob1: Object, ob2: Object) -> FiniteSet:
...
class FiniteCategory(FiniteSemiCategory, Category, ABC):
...
class CategoryOperations:
@abstractmethod
def product(self, c1: Category, c2: Category) -> Category:
""" Product of two categories. """
@abstractmethod
def disjoint_union(self, c1: Category, c2: Category) -> FiniteCategory:
""" Disjoint union for the categories """
@abstractmethod
def arrow(self, c1: Category) -> Category:
""" Computes the arrow category """
@abstractmethod
def twisted_arrow(self, c1: Category) -> Category:
""" Computes the twisted arrow category """
class FiniteCategoryOperations:
@abstractmethod
def product(self, c1: FiniteCategory, c2: FiniteCategory) -> FiniteCategory:
""" Product of two categories. """
@abstractmethod
def disjoint_union(self, c1: FiniteCategory, c2: FiniteCategory) -> FiniteCategory:
""" Disjoint union for the categories """
@abstractmethod
def arrow(self, c1: FiniteCategory) -> FiniteCategory:
""" Computes the arrow category """
@abstractmethod
def twisted_arrow(self, c1: FiniteCategory) -> FiniteCategory:
""" Computes the twisted arrow category """
class Functor(ABC):
@abstractmethod
def source(self) -> Category:
...
@abstractmethod
def target(self) -> Category:
...
@abstractmethod
def f_ob(self, ob: Object) -> Object:
""" Effect on objects """
@abstractmethod
def f_mor(self, m: Morphism) -> Morphism:
""" Effect on morphisms """
class FiniteFunctor(Functor, ABC):
@abstractmethod
def source(self) -> FiniteCategory:
...
@abstractmethod
def target(self) -> FiniteCategory:
...
class MonoidalCategory(Category, ABC):
@abstractmethod
def monoidal_unit(self) -> Object:
""" Return the product functor. """
@abstractmethod
def monoidal_product(self) -> FiniteFunctor:
""" Return the product functor. """
class FiniteMonoidalCategory(MonoidalCategory, FiniteCategory, ABC):
...
class NaturalTransformation(ABC):
@abstractmethod
def cat1(self) -> Category:
...
@abstractmethod
def cat2(self) -> Category:
...
@abstractmethod
def component(self, ob: Object) -> Morphism:
"""Returns the component for a particular object in the first category.
This is a morphism in the second category.
"""
class FiniteNaturalTransformation(NaturalTransformation, ABC):
@abstractmethod
def cat1(self) -> FiniteCategory:
...
@abstractmethod
def cat2(self) -> FiniteCategory:
...
class Adjunction(ABC):
@abstractmethod
def source(self) -> Category:
...
@abstractmethod
def target(self) -> Category:
...
@abstractmethod
def left(self) -> Functor:
pass
@abstractmethod
def right(self) -> Functor:
pass
class FiniteAdjunction(Adjunction, ABC):
@abstractmethod
def source(self) -> FiniteCategory:
...
@abstractmethod
def target(self) -> FiniteCategory:
...
@abstractmethod
def left(self) -> FiniteFunctor:
pass
@abstractmethod
def right(self) -> FiniteFunctor:
pass
class FiniteAdjunctionsOperations(ABC):
@abstractmethod
def is_adjunction(self, left: FiniteFunctor, right: FiniteFunctor) -> bool:
""" check the pair is an adjunction """
@abstractmethod
def compose(
self, adj1: FiniteAdjunction, adj2: FiniteAdjunction
) -> FiniteAdjunction:
""" compose two compatible adjunctions"""
@abstractmethod
def from_relation(self, f: FiniteRelation) -> FiniteAdjunction:
...
class DPI(ABC):
@abstractmethod
def functionality(self) -> Poset:
...
@abstractmethod
def implementations(self) -> Setoid:
...
@abstractmethod
def costs(self) -> Poset:
...
@abstractmethod
def requires(self) -> Mapping:
...
@abstractmethod
def provides(self) -> Mapping:
...
class DPCategory(Category, ABC):
pass
class DP(ABC):
pass
class FiniteDP(ABC):
pass
class DPConstructors(ABC):
@abstractmethod
def companion(self, f: MonotoneMap) -> DP:
pass
@abstractmethod
def conjoint(self, f: MonotoneMap) -> DP:
pass
class FiniteDPOperations(ABC):
@abstractmethod
def series(self, dp1: FiniteDP, dp2: FiniteDP) -> FiniteDP:
pass
@abstractmethod
def union(self, dp1: FiniteDP, dp2: FiniteDP) -> FiniteDP:
pass
@abstractmethod
def intersection(self, dp1: FiniteDP, dp2: FiniteDP) -> FiniteDP:
pass
@abstractmethod
def from_relation(self, f: FiniteRelation) -> FiniteDP:
...
class Profunctor(ABC):
def source(self) -> Category:
...
def target(self) -> Category:
...
def functor(self) -> Functor:
...
class FiniteProfunctor(ABC):
def cat1(self) -> FiniteCategory:
...
def cat2(self) -> FiniteCategory:
...
def functor(self) -> FiniteFunctor:
...
class FiniteProfunctorOperations(ABC):
@abstractmethod
def series(self, p1: FiniteProfunctor, p2: FiniteProfunctor) -> FiniteProfunctor:
...
class FiniteEnrichedCategory(FiniteCategory, ABC):
def enrichment(self) -> FiniteMonoidalCategory:
...
class SetoidOperations(ABC):
@classmethod
@abstractmethod
def union_setoids(cls, a: Setoid, b: Setoid) -> Setoid:
""" Creates the union of two Setoids. """
@classmethod
@abstractmethod
def intersection_setoids(cls, a: Setoid, b: Setoid) -> Setoid:
""" Creates the intersection of two Setoids. """
class EnumerableSetsOperations(ABC):
@classmethod
@abstractmethod
def make_set_sequence(cls, f: Callable[[int], object]):
"""Creates an EnumerableSet from a function that gives the
i-th element."""
@classmethod
@abstractmethod
def union_esets(cls, a: EnumerableSet, b: EnumerableSet) -> EnumerableSet:
""" Creates the union of two EnumerableSet. """ | ACT4E-exercises | /ACT4E-exercises-2021.1.2103061245.tar.gz/ACT4E-exercises-2021.1.2103061245/src/act4e_interfaces/finite.py | finite.py |
# This branch is currently non functional due to changes to core. Please use the R branch for the time being.
# Installation
### Setting Up the Environment (Preinstallation)
**For Linux Users**
Verify that the cmake version you are using is >=3.19.
For the optimal performance on Intel-based architectures, installing [Intel Math Kernel Library (MKL)](https://software.intel.com/content/www/us/en/develop/articles/intel-math-kernel-library-intel-mkl-2020-install-guide.html) is **highly** recommended. After installing, make sure `MKLROOT` is defined by running the [setvars](https://software.intel.com/content/www/us/en/develop/documentation/using-configuration-file-for-setvars-sh/top.html) script.
**Install library dependencies**
To install the `ACTIONet` dependencie on debian-based linux machines, run:
```bash
sudo apt-get install libhdf5-dev libsuitesparse-dev libnss3 xvfb
```
For Mac-based systems, you can use [brew](https://brew.sh/) instead:
```bash
brew install hdf5 suite-sparse c-blosc
```
### Installing ACTIONet Python Package
Use `pip` to install ACTIONet directly from this repository:
```bash
pip install git+https://github.com/shmohammadi86/ACTIONet@python-devel
```
To install from source:
```
git clone --recurse-submodules https://github.com/shmohammadi86/ACTIONet.git
git submodule update --init
python setup.py build
python setup.py develop
```
# Running ACTIONet
**Note** If you are using `MKL`, make sure to properly [set the number of threads](https://software.intel.com/content/www/us/en/develop/documentation/mkl-macos-developer-guide/top/managing-performance-and-memory/improving-performance-with-threading/techniques-to-set-the-number-of-threads.html) used prior to running `ACTIONet`.
## Example Run
Here is a simple example to get you started:
```python
import urllib.request
import ACTIONet as an
import scanpy as sc
# Download example dataset from the 10X Genomics website
opener = urllib.request.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
urllib.request.install_opener(opener)
urllib.request.urlretrieve('http://cf.10xgenomics.com/samples/cell-exp/3.0.0/pbmc_10k_v3/pbmc_10k_v3_filtered_feature_bc_matrix.h5', 'pbmc_10k_v3.h5')
# Read and filter the data
adata = sc.read_10x_h5('pbmc_10k_v3.h5')
adata.var_names_make_unique(join='.')
an.pp.filter_adata(adata, min_cells_per_feature=0.01, min_features_per_cell=1000)
sc.pp.normalize_total(adata)
sc.pp.log1p(adata)
# Run ACTIONet
an.pp.reduce_kernel(adata)
an.run_ACTIONet(adata)
# Annotate cell-types
marker_genes, directions, names = an.tl.load_markers('PBMC_Monaco2019_12celltypes')
cell_labels, confidences, Z = an.tl.annotate_cells_using_markers(adata, marker_genes, directions, names)
adata.obs['celltypes'] = cell_labels
# Visualize output
an.pl.plot_ACTIONet(adata, 'celltypes', transparency_key='node_centrality')
# Export results
adata.write('pbmc_10k_v3.h5ad')
```
## Visualizing results using cellxgene
The output of ACTIONet in the python implementation is internally stored as as `AnnData` object, and R `ACE` objects can be imported from/exported to `AnnData` using functions `AnnData2ACE()` and `ACE2AnnData()` functions, respectively. `AnnData` objects can be directly loaded into [cellxgene](https://github.com/chanzuckerberg/cellxgene) package, an open-source viewer for interactive single-cell data visualization. `cellxgene` can be installed as:
```bash
pip install cellxgene
```
Then to visualize the results of ACTIONet, run:
```bash
cellxgene launch pbmc_10k_v3.h5ad
```
where *pbmc_10k_v3.h5ad* is the name of the file we exported using `adata.write()` function.
# Additional tutorials
You can access ACTIONet tutorials from:
1. [ACTIONet framework at a glance (human PBMC 3k dataset)](http://compbio.mit.edu/ACTIONet/tutorials/mini_intro.html)
2. [Introduction to the ACTIONet framework (human PBMC Granja et al. dataset)](http://compbio.mit.edu/ACTIONet/tutorials/intro.html)
3. [Introduction to cluster-centric analysis using the ACTIONet framework](http://compbio.mit.edu/ACTIONet/tutorials/clustering.html)
4. [To batch correct or not to batch correct, that is the question!](http://compbio.mit.edu/ACTIONet/tutorials/batch.html)
5. [PortingData: Import/export options in the ACTIONet framework](http://compbio.mit.edu/ACTIONet/tutorials/porting_data.html)
6. [Interactive visualization, annotation, and exploration](http://compbio.mit.edu/ACTIONet/tutorials/annotation.html)
7. [Constructing cell-type/cell-state-specific networks using SCINET](http://compbio.mit.edu/ACTIONet/tutorials/scinet.html)
You can also find a [step-by-step guide](http://compbio.mit.edu/ACTIONet/tutorials/guide.html) to learning the core functionalities of the ACTIONet framework.
| ACTIONet | /ACTIONet-0.1.1.tar.gz/ACTIONet-0.1.1/README.md | README.md |
Make sure you've completed the following steps before submitting your issue -- thank you!
1. Check if your question has already been answered in the [FAQ](http://pybind11.readthedocs.io/en/latest/faq.html) section.
2. Make sure you've read the [documentation](http://pybind11.readthedocs.io/en/latest/). Your issue may be addressed there.
3. If those resources didn't help and you only have a short question (not a bug report), consider asking in the [Gitter chat room](https://gitter.im/pybind/Lobby).
4. If you have a genuine bug report or a more complex question which is not answered in the previous items (or not suitable for chat), please fill in the details below.
5. Include a self-contained and minimal piece of code that reproduces the problem. If that's not possible, try to make the description as clear as possible.
*After reading, remove this checklist and the template text in parentheses below.*
## Issue description
(Provide a short description, state the expected behavior and what actually happens.)
## Reproducible example code
(The code should be minimal, have no external dependencies, isolate the function(s) that cause breakage. Submit matched and complete C++ and Python snippets that can be easily compiled and run to diagnose the issue.)
| ACTIONet | /ACTIONet-0.1.1.tar.gz/ACTIONet-0.1.1/pybind11/ISSUE_TEMPLATE.md | ISSUE_TEMPLATE.md |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.