file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
binder.rs | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::{
capability::{
CapabilityProvider, CapabilitySource, ComponentCapability, InternalCapability,
OptionalTask,
},
channel,
model::{
component::{BindReason, WeakComponentInstance},
error::ModelError,
hooks::{Event, EventPayload, EventType, Hook, HooksRegistration},
model::Model,
routing::report_routing_failure,
},
},
async_trait::async_trait,
cm_rust::{CapabilityName, CapabilityPath, ProtocolDecl},
fuchsia_async as fasync, fuchsia_zircon as zx,
lazy_static::lazy_static,
moniker::{AbsoluteMoniker, AbsoluteMonikerBase, ExtendedMoniker},
std::{
path::PathBuf,
sync::{Arc, Weak},
},
};
lazy_static! {
pub static ref BINDER_SERVICE: CapabilityName = "fuchsia.component.Binder".into();
pub static ref BINDER_CAPABILITY: ComponentCapability =
ComponentCapability::Protocol(ProtocolDecl {
name: BINDER_SERVICE.clone(),
source_path: Some(CapabilityPath {
basename: "fuchsia.component.Binder".into(),
dirname: "svc".into()
}),
});
}
/// Implementation of `fuchsia.component.Binder` FIDL protocol.
pub struct BinderCapabilityProvider {
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
}
impl BinderCapabilityProvider {
pub fn new(
source: WeakComponentInstance,
target: WeakComponentInstance,
host: Arc<BinderCapabilityHost>,
) -> Self {
Self { source, target, host }
}
}
#[async_trait]
impl CapabilityProvider for BinderCapabilityProvider {
async fn open(
self: Box<Self>,
_flags: u32,
_open_mode: u32,
_relative_path: PathBuf,
server_end: &mut zx::Channel,
) -> Result<OptionalTask, ModelError> {
let host = self.host.clone();
let target = self.target.clone();
let source = self.source.clone();
let server_end = channel::take_channel(server_end);
Ok(fasync::Task::spawn(async move {
if let Err(err) = host.bind(source).await {
let res = target.upgrade().map_err(|e| ModelError::from(e));
match res {
Ok(target) => {
report_routing_failure(&target, &*BINDER_CAPABILITY, &err, server_end)
.await;
}
Err(err) => {
log::warn!("failed to upgrade reference to {}: {}", target.moniker, err);
}
}
}
})
.into())
}
}
// A `Hook` that serves the `fuchsia.component.Binder` FIDL protocol.
#[derive(Clone)]
pub struct BinderCapabilityHost {
model: Weak<Model>,
}
impl BinderCapabilityHost {
pub fn new(model: Weak<Model>) -> Self {
Self { model }
}
pub fn hooks(self: &Arc<Self>) -> Vec<HooksRegistration> {
vec![HooksRegistration::new(
"BinderCapabilityHost",
vec![EventType::CapabilityRouted],
Arc::downgrade(self) as Weak<dyn Hook>,
)]
}
pub async fn bind(&self, source: WeakComponentInstance) -> Result<(), ModelError> {
let source = source.upgrade().map_err(|e| ModelError::from(e))?;
source.bind(&BindReason::Binder).await?;
Ok(())
}
async fn on_scoped_framework_capability_routed_async<'a>(
self: Arc<Self>,
source: WeakComponentInstance,
target_moniker: AbsoluteMoniker,
capability: &'a InternalCapability,
capability_provider: Option<Box<dyn CapabilityProvider>>,
) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> {
// If some other capability has already been installed, then there's nothing to
// do here.
if capability_provider.is_none() && capability.matches_protocol(&BINDER_SERVICE) {
let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?;
let target =
WeakComponentInstance::new(&model.look_up(&target_moniker.to_partial()).await?);
Ok(Some(Box::new(BinderCapabilityProvider::new(source, target, self.clone()))
as Box<dyn CapabilityProvider>))
} else {
Ok(capability_provider)
}
}
}
#[async_trait]
impl Hook for BinderCapabilityHost {
async fn on(self: Arc<Self>, event: &Event) -> Result<(), ModelError> {
if let Ok(EventPayload::CapabilityRouted {
source: CapabilitySource::Framework { capability, component },
capability_provider,
}) = &event.result
{
let target_moniker = match &event.target_moniker {
ExtendedMoniker::ComponentManager => {
Err(ModelError::UnexpectedComponentManagerMoniker)
}
ExtendedMoniker::ComponentInstance(moniker) => Ok(moniker),
}?;
let mut capability_provider = capability_provider.lock().await;
*capability_provider = self
.on_scoped_framework_capability_routed_async(
component.clone(),
target_moniker.clone(),
&capability,
capability_provider.take(),
)
.await?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{
builtin_environment::BuiltinEnvironment,
capability::CapabilityProvider,
model::{
events::{source::EventSource, stream::EventStream},
testing::test_helpers::*,
},
},
cm_rust::{self, CapabilityName, ComponentDecl, EventMode},
cm_rust_testing::*,
fidl::{client::Client, handle::AsyncChannel},
fuchsia_zircon as zx,
futures::{lock::Mutex, StreamExt},
matches::assert_matches,
moniker::AbsoluteMoniker,
std::path::PathBuf,
};
struct BinderCapabilityTestFixture {
builtin_environment: Arc<Mutex<BuiltinEnvironment>>,
}
impl BinderCapabilityTestFixture {
async fn | (components: Vec<(&'static str, ComponentDecl)>) -> Self {
let TestModelResult { builtin_environment, .. } =
TestEnvironmentBuilder::new().set_components(components).build().await;
BinderCapabilityTestFixture { builtin_environment }
}
async fn new_event_stream(
&self,
events: Vec<CapabilityName>,
mode: EventMode,
) -> (EventSource, EventStream) {
new_event_stream(self.builtin_environment.clone(), events, mode).await
}
async fn provider(
&self,
source: AbsoluteMoniker,
target: AbsoluteMoniker,
) -> Box<BinderCapabilityProvider> {
let builtin_environment = self.builtin_environment.lock().await;
let host = builtin_environment.binder_capability_host.clone();
let source = builtin_environment
.model
.look_up(&source.to_partial())
.await
.expect("failed to look up source moniker");
let target = builtin_environment
.model
.look_up(&target.to_partial())
.await
.expect("failed to look up target moniker");
Box::new(BinderCapabilityProvider::new(
WeakComponentInstance::new(&source),
WeakComponentInstance::new(&target),
host,
))
}
}
#[fuchsia::test]
async fn component_starts_on_open() {
let fixture = BinderCapabilityTestFixture::new(vec![
(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("source")
.add_lazy_child("target")
.build(),
),
("source", component_decl_with_test_runner()),
("target", component_decl_with_test_runner()),
])
.await;
let (_event_source, mut event_stream) = fixture
.new_event_stream(
vec![EventType::Resolved.into(), EventType::Started.into()],
EventMode::Async,
)
.await;
let (_client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = vec!["source:0"].into();
let () = fixture
.provider(moniker.clone(), vec!["target:0"].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take()
.expect("task is empty")
.await;
assert!(event_stream.wait_until(EventType::Resolved, moniker.clone()).await.is_some());
assert!(event_stream.wait_until(EventType::Started, moniker.clone()).await.is_some());
}
// TODO(yaneury): Figure out a way to test this behavior.
#[ignore]
#[fuchsia::test]
async fn channel_is_closed_if_component_does_not_exist() {
let fixture = BinderCapabilityTestFixture::new(vec![(
"root",
ComponentDeclBuilder::new()
.add_lazy_child("target")
.add_lazy_child("unresolvable")
.build(),
)])
.await;
let (client_end, mut server_end) =
zx::Channel::create().expect("failed to create channels");
let moniker: AbsoluteMoniker = AbsoluteMoniker::from(vec!["foo:0"]);
let () = fixture
.provider(moniker, vec![].into())
.await
.open(0, 0, PathBuf::new(), &mut server_end)
.await
.expect("failed to call open()")
.take()
.expect("task is empty")
.await;
let client_end =
AsyncChannel::from_channel(client_end).expect("failed to create AsyncChanel");
let client = Client::new(client_end, "binder_service");
let mut event_receiver = client.take_event_receiver();
assert_matches!(
event_receiver.next().await,
Some(Err(fidl::Error::ClientChannelClosed {
status: zx::Status::NOT_FOUND,
protocol_name: "binder_service"
}))
);
assert_matches!(event_receiver.next().await, None);
}
}
| new | identifier_name |
consolidate_data.py | import argparse
import glob
import logging
import os
import h5py
from joblib import Parallel, delayed
from natsort import natsorted
import numpy as np
import scipy.ndimage
import scipy.stats
import skimage.io as io
import tifffile
import zarr
logger = logging.getLogger(__name__)
def load_array(filename):
if filename.endswith(".tif") or \
filename.endswith(".tiff") or \
filename.endswith(".TIF") or \
filename.endswith(".TIFF"):
image = tifffile.imread(filename)
elif filename.endswith(".png"):
image = io.imread(filename, plugin="simpleitk")
else:
raise ValueError("invalid input file type", filename)
logger.debug("%s shape %s", filename, image.shape)
if len(image.shape) > 2 and image.shape[-1] > 1:
image = rgb2gray(image,
np.iinfo(image.dtype).min, np.iinfo(image.dtype).max)
logger.info("rgb2gray %s shape %s", filename, image.shape)
return image
def rgb2gray(rgb, mi, mx):
return np.clip(np.dot(rgb[..., :3], [1.0/3.0, 1.0/3.0, 1.0/3.0]), mi, mx)
# https://github.com/CSBDeep/CSBDeep/blob/master/csbdeep/utils/utils.py
def normalize_percentile(x, pmin=3, pmax=99.8, axis=None, clip=False,
eps=1e-20, dtype=np.float32):
mi = np.percentile(x, pmin, axis=axis, keepdims=True)
ma = np.percentile(x, pmax, axis=axis, keepdims=True)
logger.debug("min/max %s %s %s %s", mi, ma, np.min(x), np.max(x))
return normalize_min_max(x, mi, ma, clip=clip, eps=eps, dtype=dtype)
def normalize_min_max(x, mi, ma, clip=False, eps=1e-20, dtype=np.float32):
if mi is None:
mi = np.min(x)
if ma is None:
ma = np.max(x)
if dtype is not None:
x = x.astype(dtype, copy=False)
mi = dtype(mi) if np.isscalar(mi) else mi.astype(dtype, copy=False)
ma = dtype(ma) if np.isscalar(ma) else ma.astype(dtype, copy=False)
eps = dtype(eps)
try:
import numexpr
x = numexpr.evaluate("(x - mi) / ( ma - mi + eps )")
except ImportError:
x = (x - mi) / ( ma - mi + eps )
if clip:
x = np.clip(x, 0, 1)
return x
def normalize(args, raw, sample, mn, mx):
logger.debug(
"%s before norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
if args.normalize == "minmax":
|
elif args.normalize == "percentile":
raw = normalize_percentile(raw, mn, mx)
logger.debug(
"%s after norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
return raw
def preprocess(args, raw, sample):
logger.debug("%s before preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
if args.preprocess is None or args.preprocess == "no":
pass
elif args.preprocess == "square":
raw = np.square(raw)
elif args.preprocess == "cuberoot":
raw = np.cbrt(raw)
logger.debug("%s after preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
return raw
def pad(args, array, mode):
if args.padding != 0:
array = np.pad(array,
((args.padding, args.padding),
(args.padding, args.padding),
(args.padding, args.padding)),
mode)
return array
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--in-dir', dest='in_dir', required=True,
help='location of input files')
parser.add_argument('-o', '--out-dir', dest='out_dir', required=True,
help='where to place output files')
parser.add_argument('--out-format', dest='out_format', default="hdf",
help='format of output files')
parser.add_argument('-p', '--parallel', default=1, type=int)
parser.add_argument('--raw-gfp-min', dest='raw_gfp_min', type=int)
parser.add_argument('--raw-gfp-max', dest='raw_gfp_max', type=int)
parser.add_argument('--raw-bf-min', dest='raw_bf_min', type=int)
parser.add_argument('--raw-bf-max', dest='raw_bf_max', type=int)
parser.add_argument('--normalize', default='minmax',
choices=['minmax', 'percentile', 'meanstd'])
parser.add_argument('--preprocess', default='no',
choices=['no', 'square', 'cuberoot'])
parser.add_argument('--padding', default=0, type=int)
args = parser.parse_args()
os.makedirs(args.out_dir, exist_ok=True)
return args
def main():
logging.basicConfig(level='INFO')
args = get_arguments()
files = map(
lambda fn: fn.split("/")[-1].split(".")[0].split("_")[6],
glob.glob(os.path.join(args.in_dir, 'BBBC010_v2_images/*.tif')))
files = sorted(list(set(files)))
print(files)
if args.parallel > 1:
Parallel(n_jobs=args.parallel, verbose=1, backend='multiprocessing') \
(delayed(work)(args, f) for f in files)
else:
for f in files:
work(args, f)
def work(args, sample):
logger.info("Processing %s, %s", args.in_dir, sample)
out_fn = os.path.join(args.out_dir, sample)
raw_fns = natsorted(glob.glob(
os.path.join(args.in_dir,
"BBBC010_v2_images", "*_" + sample + "_*.tif")))
# print(raw_fns)
raw_gfp = load_array(raw_fns[0]).astype(np.float32)
# print(raw_fns[0], np.min(raw_gfp), np.max(raw_gfp))
raw_gfp = preprocess(args, raw_gfp, sample)
raw_gfp = normalize(args, raw_gfp, sample,
args.raw_gfp_min, args.raw_gfp_max)
raw_gfp = pad(args, raw_gfp, 'constant')
raw_bf = load_array(raw_fns[1]).astype(np.float32)
# print(raw_fns[1], np.min(raw_bf), np.max(raw_bf))
raw_bf = preprocess(args, raw_bf, sample)
raw_bf = normalize(args, raw_bf, sample,
args.raw_bf_min, args.raw_bf_max)
raw_bf = pad(args, raw_bf, 'constant')
files = natsorted(
glob.glob(os.path.join(args.in_dir, "BBBC010_v1_foreground_eachworm",
sample + "*" + "_ground_truth.png")))
# print(files)
logger.info("number files: %s", len(files))
gt_labels = np.zeros((len(files),) + raw_gfp.shape, dtype=np.uint16)
gt_instances = np.zeros(raw_gfp.shape, dtype=np.uint16)
for idx, f in enumerate(files):
gt_label = load_array(f).astype(np.uint16)
gt_labels[idx, ...] = 1*(gt_label!=0)
gt_instances[gt_label != 0] = idx+1
gt_numinst = np.sum(gt_labels, axis=0)
tmp = np.sum(gt_labels, axis=0, keepdims=True)
tmp[tmp==0] = 1
gt_labels_norm = (gt_labels.astype(np.float32) /
tmp.astype(np.float32)).astype(np.float32)
gt_fgbg = np.zeros(gt_labels.shape[1:], dtype=np.uint8)
gt_fgbg[np.sum(gt_labels, axis=0) > 0] = 1
# is slightly displaced +1,+0
# gt_fgbg2 = load_array(
# os.path.join(args.in_dir, "BBBC010_v1_foreground",
# sample + "_binary.png"))
# gt_fgbg2[0:-1,1:] = gt_fgbg2[1:,1:]
# print(np.count_nonzero(gt_fgbg2 != gt_fgbg))
# gt_fgbg2 = pad(args, gt_fgbg2, 'constant')
gt_labels = pad(args, gt_labels, 'constant')
gt_instances = pad(args, gt_instances, 'constant')
gt_labels_norm = pad(args, gt_labels_norm, 'constant')
gt_numinst = pad(args, gt_numinst, 'constant')
gt_fgbg = pad(args, gt_fgbg, 'constant')
if raw_gfp.shape[0] != 1:
raw_gfp = np.expand_dims(raw_gfp, 0)
if raw_bf.shape[0] != 1:
raw_bf = np.expand_dims(raw_bf, 0)
raw = np.concatenate((raw_gfp, raw_bf), axis=0)
if gt_instances.shape[0] != 1:
gt_instances = np.expand_dims(gt_instances, 0)
gt_numinst = np.expand_dims(gt_numinst, 0)
gt_fgbg = np.expand_dims(gt_fgbg, 0)
gt_labels_padded = np.pad(gt_labels, (0, 20-len(files)), 'constant')
gt_labels_norm_padded = np.pad(gt_labels_norm, (0, 20-len(files)),
'constant')
if args.out_format == "hdf":
f = h5py.File(out_fn + '.hdf', 'w')
elif args.out_format == "zarr":
f = zarr.open(out_fn + '.zarr', 'w')
f.create_dataset(
'volumes/raw',
data=raw,
chunks=(2, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_gfp',
data=raw_gfp,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_bf',
data=raw_bf,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels',
data=gt_labels,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm',
data=gt_labels_norm,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_padded',
data=gt_labels_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm_padded',
data=gt_labels_norm_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_instances',
data=gt_instances,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_numinst',
data=gt_numinst,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_fgbg',
data=gt_fgbg,
chunks=(1, 256, 256),
compression='gzip')
for dataset in ['volumes/raw',
'volumes/raw_gfp',
'volumes/raw_bf',
'volumes/gt_labels',
'volumes/gt_labels_norm',
'volumes/gt_instances',
'volumes/gt_numinst',
'volumes/gt_fgbg']:
f[dataset].attrs['offset'] = (0, 0)
f[dataset].attrs['resolution'] = (1, 1)
if args.out_format == "hdf":
f.close()
if __name__ == "__main__":
main()
| raw = normalize_min_max(raw, mn, mx) | conditional_block |
consolidate_data.py | import argparse
import glob
import logging
import os
import h5py
from joblib import Parallel, delayed
from natsort import natsorted
import numpy as np
import scipy.ndimage
import scipy.stats
import skimage.io as io
import tifffile
import zarr
logger = logging.getLogger(__name__)
def load_array(filename):
if filename.endswith(".tif") or \
filename.endswith(".tiff") or \
filename.endswith(".TIF") or \
filename.endswith(".TIFF"):
image = tifffile.imread(filename)
elif filename.endswith(".png"):
image = io.imread(filename, plugin="simpleitk")
else:
raise ValueError("invalid input file type", filename)
logger.debug("%s shape %s", filename, image.shape)
if len(image.shape) > 2 and image.shape[-1] > 1:
image = rgb2gray(image,
np.iinfo(image.dtype).min, np.iinfo(image.dtype).max)
logger.info("rgb2gray %s shape %s", filename, image.shape)
return image
def rgb2gray(rgb, mi, mx):
return np.clip(np.dot(rgb[..., :3], [1.0/3.0, 1.0/3.0, 1.0/3.0]), mi, mx)
# https://github.com/CSBDeep/CSBDeep/blob/master/csbdeep/utils/utils.py
def normalize_percentile(x, pmin=3, pmax=99.8, axis=None, clip=False,
eps=1e-20, dtype=np.float32):
mi = np.percentile(x, pmin, axis=axis, keepdims=True)
ma = np.percentile(x, pmax, axis=axis, keepdims=True)
logger.debug("min/max %s %s %s %s", mi, ma, np.min(x), np.max(x))
return normalize_min_max(x, mi, ma, clip=clip, eps=eps, dtype=dtype)
def normalize_min_max(x, mi, ma, clip=False, eps=1e-20, dtype=np.float32):
if mi is None:
mi = np.min(x)
if ma is None:
ma = np.max(x)
if dtype is not None:
x = x.astype(dtype, copy=False)
mi = dtype(mi) if np.isscalar(mi) else mi.astype(dtype, copy=False)
ma = dtype(ma) if np.isscalar(ma) else ma.astype(dtype, copy=False)
eps = dtype(eps)
try:
import numexpr
x = numexpr.evaluate("(x - mi) / ( ma - mi + eps )")
except ImportError:
x = (x - mi) / ( ma - mi + eps )
if clip:
x = np.clip(x, 0, 1)
return x
def normalize(args, raw, sample, mn, mx):
logger.debug(
"%s before norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
if args.normalize == "minmax":
raw = normalize_min_max(raw, mn, mx)
elif args.normalize == "percentile":
raw = normalize_percentile(raw, mn, mx)
logger.debug(
"%s after norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
return raw
def preprocess(args, raw, sample):
logger.debug("%s before preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
if args.preprocess is None or args.preprocess == "no":
pass
elif args.preprocess == "square":
raw = np.square(raw)
elif args.preprocess == "cuberoot":
raw = np.cbrt(raw)
logger.debug("%s after preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
return raw
def | (args, array, mode):
if args.padding != 0:
array = np.pad(array,
((args.padding, args.padding),
(args.padding, args.padding),
(args.padding, args.padding)),
mode)
return array
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--in-dir', dest='in_dir', required=True,
help='location of input files')
parser.add_argument('-o', '--out-dir', dest='out_dir', required=True,
help='where to place output files')
parser.add_argument('--out-format', dest='out_format', default="hdf",
help='format of output files')
parser.add_argument('-p', '--parallel', default=1, type=int)
parser.add_argument('--raw-gfp-min', dest='raw_gfp_min', type=int)
parser.add_argument('--raw-gfp-max', dest='raw_gfp_max', type=int)
parser.add_argument('--raw-bf-min', dest='raw_bf_min', type=int)
parser.add_argument('--raw-bf-max', dest='raw_bf_max', type=int)
parser.add_argument('--normalize', default='minmax',
choices=['minmax', 'percentile', 'meanstd'])
parser.add_argument('--preprocess', default='no',
choices=['no', 'square', 'cuberoot'])
parser.add_argument('--padding', default=0, type=int)
args = parser.parse_args()
os.makedirs(args.out_dir, exist_ok=True)
return args
def main():
logging.basicConfig(level='INFO')
args = get_arguments()
files = map(
lambda fn: fn.split("/")[-1].split(".")[0].split("_")[6],
glob.glob(os.path.join(args.in_dir, 'BBBC010_v2_images/*.tif')))
files = sorted(list(set(files)))
print(files)
if args.parallel > 1:
Parallel(n_jobs=args.parallel, verbose=1, backend='multiprocessing') \
(delayed(work)(args, f) for f in files)
else:
for f in files:
work(args, f)
def work(args, sample):
logger.info("Processing %s, %s", args.in_dir, sample)
out_fn = os.path.join(args.out_dir, sample)
raw_fns = natsorted(glob.glob(
os.path.join(args.in_dir,
"BBBC010_v2_images", "*_" + sample + "_*.tif")))
# print(raw_fns)
raw_gfp = load_array(raw_fns[0]).astype(np.float32)
# print(raw_fns[0], np.min(raw_gfp), np.max(raw_gfp))
raw_gfp = preprocess(args, raw_gfp, sample)
raw_gfp = normalize(args, raw_gfp, sample,
args.raw_gfp_min, args.raw_gfp_max)
raw_gfp = pad(args, raw_gfp, 'constant')
raw_bf = load_array(raw_fns[1]).astype(np.float32)
# print(raw_fns[1], np.min(raw_bf), np.max(raw_bf))
raw_bf = preprocess(args, raw_bf, sample)
raw_bf = normalize(args, raw_bf, sample,
args.raw_bf_min, args.raw_bf_max)
raw_bf = pad(args, raw_bf, 'constant')
files = natsorted(
glob.glob(os.path.join(args.in_dir, "BBBC010_v1_foreground_eachworm",
sample + "*" + "_ground_truth.png")))
# print(files)
logger.info("number files: %s", len(files))
gt_labels = np.zeros((len(files),) + raw_gfp.shape, dtype=np.uint16)
gt_instances = np.zeros(raw_gfp.shape, dtype=np.uint16)
for idx, f in enumerate(files):
gt_label = load_array(f).astype(np.uint16)
gt_labels[idx, ...] = 1*(gt_label!=0)
gt_instances[gt_label != 0] = idx+1
gt_numinst = np.sum(gt_labels, axis=0)
tmp = np.sum(gt_labels, axis=0, keepdims=True)
tmp[tmp==0] = 1
gt_labels_norm = (gt_labels.astype(np.float32) /
tmp.astype(np.float32)).astype(np.float32)
gt_fgbg = np.zeros(gt_labels.shape[1:], dtype=np.uint8)
gt_fgbg[np.sum(gt_labels, axis=0) > 0] = 1
# is slightly displaced +1,+0
# gt_fgbg2 = load_array(
# os.path.join(args.in_dir, "BBBC010_v1_foreground",
# sample + "_binary.png"))
# gt_fgbg2[0:-1,1:] = gt_fgbg2[1:,1:]
# print(np.count_nonzero(gt_fgbg2 != gt_fgbg))
# gt_fgbg2 = pad(args, gt_fgbg2, 'constant')
gt_labels = pad(args, gt_labels, 'constant')
gt_instances = pad(args, gt_instances, 'constant')
gt_labels_norm = pad(args, gt_labels_norm, 'constant')
gt_numinst = pad(args, gt_numinst, 'constant')
gt_fgbg = pad(args, gt_fgbg, 'constant')
if raw_gfp.shape[0] != 1:
raw_gfp = np.expand_dims(raw_gfp, 0)
if raw_bf.shape[0] != 1:
raw_bf = np.expand_dims(raw_bf, 0)
raw = np.concatenate((raw_gfp, raw_bf), axis=0)
if gt_instances.shape[0] != 1:
gt_instances = np.expand_dims(gt_instances, 0)
gt_numinst = np.expand_dims(gt_numinst, 0)
gt_fgbg = np.expand_dims(gt_fgbg, 0)
gt_labels_padded = np.pad(gt_labels, (0, 20-len(files)), 'constant')
gt_labels_norm_padded = np.pad(gt_labels_norm, (0, 20-len(files)),
'constant')
if args.out_format == "hdf":
f = h5py.File(out_fn + '.hdf', 'w')
elif args.out_format == "zarr":
f = zarr.open(out_fn + '.zarr', 'w')
f.create_dataset(
'volumes/raw',
data=raw,
chunks=(2, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_gfp',
data=raw_gfp,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_bf',
data=raw_bf,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels',
data=gt_labels,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm',
data=gt_labels_norm,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_padded',
data=gt_labels_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm_padded',
data=gt_labels_norm_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_instances',
data=gt_instances,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_numinst',
data=gt_numinst,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_fgbg',
data=gt_fgbg,
chunks=(1, 256, 256),
compression='gzip')
for dataset in ['volumes/raw',
'volumes/raw_gfp',
'volumes/raw_bf',
'volumes/gt_labels',
'volumes/gt_labels_norm',
'volumes/gt_instances',
'volumes/gt_numinst',
'volumes/gt_fgbg']:
f[dataset].attrs['offset'] = (0, 0)
f[dataset].attrs['resolution'] = (1, 1)
if args.out_format == "hdf":
f.close()
if __name__ == "__main__":
main()
| pad | identifier_name |
consolidate_data.py | import argparse
import glob
import logging
import os
import h5py
from joblib import Parallel, delayed
from natsort import natsorted
import numpy as np
import scipy.ndimage
import scipy.stats
import skimage.io as io
import tifffile
import zarr
logger = logging.getLogger(__name__)
def load_array(filename):
if filename.endswith(".tif") or \
filename.endswith(".tiff") or \
filename.endswith(".TIF") or \
filename.endswith(".TIFF"):
image = tifffile.imread(filename)
elif filename.endswith(".png"):
image = io.imread(filename, plugin="simpleitk")
else:
raise ValueError("invalid input file type", filename)
logger.debug("%s shape %s", filename, image.shape)
if len(image.shape) > 2 and image.shape[-1] > 1:
image = rgb2gray(image,
np.iinfo(image.dtype).min, np.iinfo(image.dtype).max)
logger.info("rgb2gray %s shape %s", filename, image.shape)
return image
def rgb2gray(rgb, mi, mx):
return np.clip(np.dot(rgb[..., :3], [1.0/3.0, 1.0/3.0, 1.0/3.0]), mi, mx)
# https://github.com/CSBDeep/CSBDeep/blob/master/csbdeep/utils/utils.py
def normalize_percentile(x, pmin=3, pmax=99.8, axis=None, clip=False,
eps=1e-20, dtype=np.float32):
mi = np.percentile(x, pmin, axis=axis, keepdims=True)
ma = np.percentile(x, pmax, axis=axis, keepdims=True)
logger.debug("min/max %s %s %s %s", mi, ma, np.min(x), np.max(x))
return normalize_min_max(x, mi, ma, clip=clip, eps=eps, dtype=dtype)
def normalize_min_max(x, mi, ma, clip=False, eps=1e-20, dtype=np.float32):
if mi is None:
mi = np.min(x)
if ma is None:
ma = np.max(x)
if dtype is not None:
x = x.astype(dtype, copy=False)
mi = dtype(mi) if np.isscalar(mi) else mi.astype(dtype, copy=False)
ma = dtype(ma) if np.isscalar(ma) else ma.astype(dtype, copy=False)
eps = dtype(eps)
try:
import numexpr
x = numexpr.evaluate("(x - mi) / ( ma - mi + eps )")
except ImportError:
x = (x - mi) / ( ma - mi + eps )
if clip:
x = np.clip(x, 0, 1)
return x
def normalize(args, raw, sample, mn, mx):
logger.debug(
"%s before norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
if args.normalize == "minmax":
raw = normalize_min_max(raw, mn, mx)
elif args.normalize == "percentile":
raw = normalize_percentile(raw, mn, mx)
logger.debug(
"%s after norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
return raw
def preprocess(args, raw, sample):
logger.debug("%s before preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
if args.preprocess is None or args.preprocess == "no":
pass
elif args.preprocess == "square":
raw = np.square(raw)
elif args.preprocess == "cuberoot":
raw = np.cbrt(raw)
logger.debug("%s after preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
return raw
def pad(args, array, mode):
if args.padding != 0:
array = np.pad(array,
((args.padding, args.padding),
(args.padding, args.padding),
(args.padding, args.padding)),
mode)
return array
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--in-dir', dest='in_dir', required=True,
help='location of input files')
parser.add_argument('-o', '--out-dir', dest='out_dir', required=True,
help='where to place output files')
parser.add_argument('--out-format', dest='out_format', default="hdf",
help='format of output files')
parser.add_argument('-p', '--parallel', default=1, type=int)
parser.add_argument('--raw-gfp-min', dest='raw_gfp_min', type=int)
parser.add_argument('--raw-gfp-max', dest='raw_gfp_max', type=int)
parser.add_argument('--raw-bf-min', dest='raw_bf_min', type=int)
parser.add_argument('--raw-bf-max', dest='raw_bf_max', type=int)
parser.add_argument('--normalize', default='minmax',
choices=['minmax', 'percentile', 'meanstd'])
parser.add_argument('--preprocess', default='no',
choices=['no', 'square', 'cuberoot'])
parser.add_argument('--padding', default=0, type=int)
args = parser.parse_args()
os.makedirs(args.out_dir, exist_ok=True)
return args
def main():
logging.basicConfig(level='INFO')
args = get_arguments()
files = map(
lambda fn: fn.split("/")[-1].split(".")[0].split("_")[6],
glob.glob(os.path.join(args.in_dir, 'BBBC010_v2_images/*.tif')))
files = sorted(list(set(files)))
print(files)
if args.parallel > 1:
Parallel(n_jobs=args.parallel, verbose=1, backend='multiprocessing') \
(delayed(work)(args, f) for f in files)
else:
for f in files:
work(args, f)
def work(args, sample):
logger.info("Processing %s, %s", args.in_dir, sample)
out_fn = os.path.join(args.out_dir, sample)
raw_fns = natsorted(glob.glob(
os.path.join(args.in_dir,
"BBBC010_v2_images", "*_" + sample + "_*.tif")))
# print(raw_fns)
raw_gfp = load_array(raw_fns[0]).astype(np.float32)
# print(raw_fns[0], np.min(raw_gfp), np.max(raw_gfp))
raw_gfp = preprocess(args, raw_gfp, sample)
raw_gfp = normalize(args, raw_gfp, sample,
args.raw_gfp_min, args.raw_gfp_max)
raw_gfp = pad(args, raw_gfp, 'constant')
raw_bf = load_array(raw_fns[1]).astype(np.float32)
# print(raw_fns[1], np.min(raw_bf), np.max(raw_bf))
raw_bf = preprocess(args, raw_bf, sample)
raw_bf = normalize(args, raw_bf, sample,
args.raw_bf_min, args.raw_bf_max)
raw_bf = pad(args, raw_bf, 'constant')
files = natsorted(
glob.glob(os.path.join(args.in_dir, "BBBC010_v1_foreground_eachworm",
sample + "*" + "_ground_truth.png")))
# print(files)
logger.info("number files: %s", len(files))
gt_labels = np.zeros((len(files),) + raw_gfp.shape, dtype=np.uint16)
gt_instances = np.zeros(raw_gfp.shape, dtype=np.uint16)
for idx, f in enumerate(files):
gt_label = load_array(f).astype(np.uint16)
gt_labels[idx, ...] = 1*(gt_label!=0)
gt_instances[gt_label != 0] = idx+1
gt_numinst = np.sum(gt_labels, axis=0)
tmp = np.sum(gt_labels, axis=0, keepdims=True)
tmp[tmp==0] = 1
gt_labels_norm = (gt_labels.astype(np.float32) /
tmp.astype(np.float32)).astype(np.float32)
gt_fgbg = np.zeros(gt_labels.shape[1:], dtype=np.uint8)
gt_fgbg[np.sum(gt_labels, axis=0) > 0] = 1
# is slightly displaced +1,+0
# gt_fgbg2 = load_array(
# os.path.join(args.in_dir, "BBBC010_v1_foreground",
# sample + "_binary.png"))
# gt_fgbg2[0:-1,1:] = gt_fgbg2[1:,1:]
# print(np.count_nonzero(gt_fgbg2 != gt_fgbg))
# gt_fgbg2 = pad(args, gt_fgbg2, 'constant')
gt_labels = pad(args, gt_labels, 'constant')
gt_instances = pad(args, gt_instances, 'constant')
gt_labels_norm = pad(args, gt_labels_norm, 'constant')
gt_numinst = pad(args, gt_numinst, 'constant')
gt_fgbg = pad(args, gt_fgbg, 'constant')
if raw_gfp.shape[0] != 1:
raw_gfp = np.expand_dims(raw_gfp, 0)
if raw_bf.shape[0] != 1:
raw_bf = np.expand_dims(raw_bf, 0)
raw = np.concatenate((raw_gfp, raw_bf), axis=0)
if gt_instances.shape[0] != 1:
gt_instances = np.expand_dims(gt_instances, 0)
gt_numinst = np.expand_dims(gt_numinst, 0)
gt_fgbg = np.expand_dims(gt_fgbg, 0)
gt_labels_padded = np.pad(gt_labels, (0, 20-len(files)), 'constant')
gt_labels_norm_padded = np.pad(gt_labels_norm, (0, 20-len(files)),
'constant') | f = zarr.open(out_fn + '.zarr', 'w')
f.create_dataset(
'volumes/raw',
data=raw,
chunks=(2, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_gfp',
data=raw_gfp,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_bf',
data=raw_bf,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels',
data=gt_labels,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm',
data=gt_labels_norm,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_padded',
data=gt_labels_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm_padded',
data=gt_labels_norm_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_instances',
data=gt_instances,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_numinst',
data=gt_numinst,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_fgbg',
data=gt_fgbg,
chunks=(1, 256, 256),
compression='gzip')
for dataset in ['volumes/raw',
'volumes/raw_gfp',
'volumes/raw_bf',
'volumes/gt_labels',
'volumes/gt_labels_norm',
'volumes/gt_instances',
'volumes/gt_numinst',
'volumes/gt_fgbg']:
f[dataset].attrs['offset'] = (0, 0)
f[dataset].attrs['resolution'] = (1, 1)
if args.out_format == "hdf":
f.close()
if __name__ == "__main__":
main() |
if args.out_format == "hdf":
f = h5py.File(out_fn + '.hdf', 'w')
elif args.out_format == "zarr": | random_line_split |
consolidate_data.py | import argparse
import glob
import logging
import os
import h5py
from joblib import Parallel, delayed
from natsort import natsorted
import numpy as np
import scipy.ndimage
import scipy.stats
import skimage.io as io
import tifffile
import zarr
logger = logging.getLogger(__name__)
def load_array(filename):
if filename.endswith(".tif") or \
filename.endswith(".tiff") or \
filename.endswith(".TIF") or \
filename.endswith(".TIFF"):
image = tifffile.imread(filename)
elif filename.endswith(".png"):
image = io.imread(filename, plugin="simpleitk")
else:
raise ValueError("invalid input file type", filename)
logger.debug("%s shape %s", filename, image.shape)
if len(image.shape) > 2 and image.shape[-1] > 1:
image = rgb2gray(image,
np.iinfo(image.dtype).min, np.iinfo(image.dtype).max)
logger.info("rgb2gray %s shape %s", filename, image.shape)
return image
def rgb2gray(rgb, mi, mx):
return np.clip(np.dot(rgb[..., :3], [1.0/3.0, 1.0/3.0, 1.0/3.0]), mi, mx)
# https://github.com/CSBDeep/CSBDeep/blob/master/csbdeep/utils/utils.py
def normalize_percentile(x, pmin=3, pmax=99.8, axis=None, clip=False,
eps=1e-20, dtype=np.float32):
mi = np.percentile(x, pmin, axis=axis, keepdims=True)
ma = np.percentile(x, pmax, axis=axis, keepdims=True)
logger.debug("min/max %s %s %s %s", mi, ma, np.min(x), np.max(x))
return normalize_min_max(x, mi, ma, clip=clip, eps=eps, dtype=dtype)
def normalize_min_max(x, mi, ma, clip=False, eps=1e-20, dtype=np.float32):
if mi is None:
mi = np.min(x)
if ma is None:
ma = np.max(x)
if dtype is not None:
x = x.astype(dtype, copy=False)
mi = dtype(mi) if np.isscalar(mi) else mi.astype(dtype, copy=False)
ma = dtype(ma) if np.isscalar(ma) else ma.astype(dtype, copy=False)
eps = dtype(eps)
try:
import numexpr
x = numexpr.evaluate("(x - mi) / ( ma - mi + eps )")
except ImportError:
x = (x - mi) / ( ma - mi + eps )
if clip:
x = np.clip(x, 0, 1)
return x
def normalize(args, raw, sample, mn, mx):
logger.debug(
"%s before norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
if args.normalize == "minmax":
raw = normalize_min_max(raw, mn, mx)
elif args.normalize == "percentile":
raw = normalize_percentile(raw, mn, mx)
logger.debug(
"%s after norm %s: min %s, max %s, mean %s, std %s, median %s",
sample, args.normalize, np.min(raw), np.max(raw), np.mean(raw),
np.std(raw), np.median(raw))
return raw
def preprocess(args, raw, sample):
logger.debug("%s before preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
if args.preprocess is None or args.preprocess == "no":
pass
elif args.preprocess == "square":
raw = np.square(raw)
elif args.preprocess == "cuberoot":
raw = np.cbrt(raw)
logger.debug("%s after preproc %s: skew %s",
sample, args.preprocess, scipy.stats.skew(raw.ravel()))
return raw
def pad(args, array, mode):
if args.padding != 0:
array = np.pad(array,
((args.padding, args.padding),
(args.padding, args.padding),
(args.padding, args.padding)),
mode)
return array
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--in-dir', dest='in_dir', required=True,
help='location of input files')
parser.add_argument('-o', '--out-dir', dest='out_dir', required=True,
help='where to place output files')
parser.add_argument('--out-format', dest='out_format', default="hdf",
help='format of output files')
parser.add_argument('-p', '--parallel', default=1, type=int)
parser.add_argument('--raw-gfp-min', dest='raw_gfp_min', type=int)
parser.add_argument('--raw-gfp-max', dest='raw_gfp_max', type=int)
parser.add_argument('--raw-bf-min', dest='raw_bf_min', type=int)
parser.add_argument('--raw-bf-max', dest='raw_bf_max', type=int)
parser.add_argument('--normalize', default='minmax',
choices=['minmax', 'percentile', 'meanstd'])
parser.add_argument('--preprocess', default='no',
choices=['no', 'square', 'cuberoot'])
parser.add_argument('--padding', default=0, type=int)
args = parser.parse_args()
os.makedirs(args.out_dir, exist_ok=True)
return args
def main():
|
def work(args, sample):
logger.info("Processing %s, %s", args.in_dir, sample)
out_fn = os.path.join(args.out_dir, sample)
raw_fns = natsorted(glob.glob(
os.path.join(args.in_dir,
"BBBC010_v2_images", "*_" + sample + "_*.tif")))
# print(raw_fns)
raw_gfp = load_array(raw_fns[0]).astype(np.float32)
# print(raw_fns[0], np.min(raw_gfp), np.max(raw_gfp))
raw_gfp = preprocess(args, raw_gfp, sample)
raw_gfp = normalize(args, raw_gfp, sample,
args.raw_gfp_min, args.raw_gfp_max)
raw_gfp = pad(args, raw_gfp, 'constant')
raw_bf = load_array(raw_fns[1]).astype(np.float32)
# print(raw_fns[1], np.min(raw_bf), np.max(raw_bf))
raw_bf = preprocess(args, raw_bf, sample)
raw_bf = normalize(args, raw_bf, sample,
args.raw_bf_min, args.raw_bf_max)
raw_bf = pad(args, raw_bf, 'constant')
files = natsorted(
glob.glob(os.path.join(args.in_dir, "BBBC010_v1_foreground_eachworm",
sample + "*" + "_ground_truth.png")))
# print(files)
logger.info("number files: %s", len(files))
gt_labels = np.zeros((len(files),) + raw_gfp.shape, dtype=np.uint16)
gt_instances = np.zeros(raw_gfp.shape, dtype=np.uint16)
for idx, f in enumerate(files):
gt_label = load_array(f).astype(np.uint16)
gt_labels[idx, ...] = 1*(gt_label!=0)
gt_instances[gt_label != 0] = idx+1
gt_numinst = np.sum(gt_labels, axis=0)
tmp = np.sum(gt_labels, axis=0, keepdims=True)
tmp[tmp==0] = 1
gt_labels_norm = (gt_labels.astype(np.float32) /
tmp.astype(np.float32)).astype(np.float32)
gt_fgbg = np.zeros(gt_labels.shape[1:], dtype=np.uint8)
gt_fgbg[np.sum(gt_labels, axis=0) > 0] = 1
# is slightly displaced +1,+0
# gt_fgbg2 = load_array(
# os.path.join(args.in_dir, "BBBC010_v1_foreground",
# sample + "_binary.png"))
# gt_fgbg2[0:-1,1:] = gt_fgbg2[1:,1:]
# print(np.count_nonzero(gt_fgbg2 != gt_fgbg))
# gt_fgbg2 = pad(args, gt_fgbg2, 'constant')
gt_labels = pad(args, gt_labels, 'constant')
gt_instances = pad(args, gt_instances, 'constant')
gt_labels_norm = pad(args, gt_labels_norm, 'constant')
gt_numinst = pad(args, gt_numinst, 'constant')
gt_fgbg = pad(args, gt_fgbg, 'constant')
if raw_gfp.shape[0] != 1:
raw_gfp = np.expand_dims(raw_gfp, 0)
if raw_bf.shape[0] != 1:
raw_bf = np.expand_dims(raw_bf, 0)
raw = np.concatenate((raw_gfp, raw_bf), axis=0)
if gt_instances.shape[0] != 1:
gt_instances = np.expand_dims(gt_instances, 0)
gt_numinst = np.expand_dims(gt_numinst, 0)
gt_fgbg = np.expand_dims(gt_fgbg, 0)
gt_labels_padded = np.pad(gt_labels, (0, 20-len(files)), 'constant')
gt_labels_norm_padded = np.pad(gt_labels_norm, (0, 20-len(files)),
'constant')
if args.out_format == "hdf":
f = h5py.File(out_fn + '.hdf', 'w')
elif args.out_format == "zarr":
f = zarr.open(out_fn + '.zarr', 'w')
f.create_dataset(
'volumes/raw',
data=raw,
chunks=(2, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_gfp',
data=raw_gfp,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/raw_bf',
data=raw_bf,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels',
data=gt_labels,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm',
data=gt_labels_norm,
chunks=(len(files), 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_padded',
data=gt_labels_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_labels_norm_padded',
data=gt_labels_norm_padded,
chunks=(20, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_instances',
data=gt_instances,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_numinst',
data=gt_numinst,
chunks=(1, 256, 256),
compression='gzip')
f.create_dataset(
'volumes/gt_fgbg',
data=gt_fgbg,
chunks=(1, 256, 256),
compression='gzip')
for dataset in ['volumes/raw',
'volumes/raw_gfp',
'volumes/raw_bf',
'volumes/gt_labels',
'volumes/gt_labels_norm',
'volumes/gt_instances',
'volumes/gt_numinst',
'volumes/gt_fgbg']:
f[dataset].attrs['offset'] = (0, 0)
f[dataset].attrs['resolution'] = (1, 1)
if args.out_format == "hdf":
f.close()
if __name__ == "__main__":
main()
| logging.basicConfig(level='INFO')
args = get_arguments()
files = map(
lambda fn: fn.split("/")[-1].split(".")[0].split("_")[6],
glob.glob(os.path.join(args.in_dir, 'BBBC010_v2_images/*.tif')))
files = sorted(list(set(files)))
print(files)
if args.parallel > 1:
Parallel(n_jobs=args.parallel, verbose=1, backend='multiprocessing') \
(delayed(work)(args, f) for f in files)
else:
for f in files:
work(args, f) | identifier_body |
lib.rs | #![allow(unused_doc_comment)]
/// # swiggen
///
/// The `swiggen` library is used to generate `extern "C"` definitions and
/// SWIG wrapper code from Rust functions.
///
/// This basically does two things: generates the `extern "C"` methods by
/// applying typemaps from cbindgen, or some fairly crude heuristics -
/// such as converting an opaque `Foo` into a `*mut Foo`, and running
/// `Box::into_raw(Box::new(foo))` to convert it into a pointer.
///
/// These exported functions all have mangled names like `__SWIG_INJECT_new_Foo`.
/// The code also generates SWIG wrapper code which wraps these functions sp
/// that `Foo` behaves like a native object with methods like `Foo.new`.
/// The SWIG code is injected into the expanded Rust source code through doc
/// comments on various structs/functions.
extern crate cbindgen;
#[macro_use]
extern crate log;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
use proc_macro2::{Span, TokenStream};
use quote::ToTokens;
use quote::TokenStreamExt;
use std::fmt;
use std::fs::File;
use std::io::Write;
use std::str;
use cbindgen::ir::ty;
use cbindgen::utilities::SynAbiHelpers;
use cbindgen::writer::{Source, SourceWriter};
/// Tags used to indicate swig binding code injected into the Rust source.
enum SwigTag {
CodeStart,
CodeEnd,
HdrStart,
HdrEnd,
SwigInject,
}
impl fmt::Display for SwigTag {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let tag = self.to_str();
write!(f, "{}", tag)
}
}
impl SwigTag {
fn to_str(&self) -> &'static str {
match self {
SwigTag::CodeStart => "__SWIG_CODE\n",
SwigTag::CodeEnd => "__SWIG_END_CODE\n",
SwigTag::HdrStart => "__SWIG_HDR\n",
SwigTag::HdrEnd => "__SWIG_END_HDR\n",
SwigTag::SwigInject=> "__SWIG_INJECT_",
}
}
#[inline]
fn len(&self) -> usize {
match self {
SwigTag::CodeStart => "__SWIG_CODE\n",
SwigTag::CodeEnd => "__SWIG_END_CODE\n",
SwigTag::HdrStart => "__SWIG_HDR\n",
SwigTag::HdrEnd => "__SWIG_END_HDR\n",
SwigTag::SwigInject=> "__SWIG_INJECT_",
}.len()
}
}
pub trait ToSwig {
fn to_swig(&self) -> String;
}
/// A type implementing `AsExtern` can be converted into an type compatible with
/// `extern "C"` functions.
pub trait AsExtern {
fn as_extern(&self) -> TokenStream;
}
impl AsExtern for syn::DeriveInput {
fn as_extern(&self) -> TokenStream {
let name = &self.ident;
let free_name = swig_free(&name);
// For an stuct we want to derive Swig for, we add a `free_Foo`
// method so we can free it from SWIG code.
let mut tokens = quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #free_name(arg: *mut #name) {
unsafe {
assert!(!arg.is_null());
&*arg;
}
}
};
let default_name = swig_fn(&name, "default");
// TOOD: Add more derive capabilities
// Extracting the derived methods from `#[swig_derive(...)]`.
// We need to automatically add the SWIG code since we cant somehow
// add the `#[swiggen(Foo)]` attribute to the derived methods.
let derivs = get_derives(&self.attrs);
let new_toks = derivs.iter().filter_map(|w| {
match w.as_str() {
"Default" => {
Some(quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #default_name() -> *mut #name {
Box::into_raw(Box::new(#name::default()))
}
})
},
_ => None
}
});
tokens.append_all(new_toks);
tokens
}
}
/// A method definition inside an impl block has an additional
/// `base` variable corresponding to the name of the type.
struct InternalFn<'a> {
base: &'a Option<syn::Ident>,
fn_def: &'a syn::ItemFn,
}
/// Convenience method to use cbindgen to convert types into C-compat types.
/// e.g. "input: u32" -> `cbindgen_write((input, u32))` might output `uint32 input`.
fn cbindgen_write<S: Source>(s: &S) -> String {
let mut buf = Vec::new();
{
let cfg = cbindgen::Config::default();
let mut sw = SourceWriter::new(&mut buf, &cfg);
s.write(&cfg, &mut sw);
}
String::from_utf8(buf).unwrap().replace("str", "char")
}
/// Hacky method to take a `&self` or `self` function argument and produce
/// something compatible with `extern "C"` method. Since we can't use `self`,
/// we coerce this to a pointer, and call the arg `wrapped_self`.
fn convert_self_type(arg: &syn::FnArg, base: &Option<syn::Ident>) -> syn::FnArg {
let base = base.clone().expect("Cannot convert `self` arg without provided base name.
Try: `#[swiggen(Foo)]` in macro");
let mut arg = arg.clone().into_token_stream().to_string();
arg = if arg.starts_with('&') {
arg.replace("&", "*const ")
} else {
"*mut ".to_string() + &arg
};
arg = format!("wrapped_self: {}", arg.replace("self", &base.to_string()));
syn::parse_str(&arg).unwrap()
}
/// For inputs, if the type is a primitive (as defined by cbindgen), we don't
/// do anything. Otherwise, assume we will take it in as a pointer.
fn convert_arg_type(syn::ArgCaptured { ref pat, ref ty, .. }: &syn::ArgCaptured) -> syn::FnArg {
if ty.clone().into_token_stream().to_string().ends_with("str") {
parse_quote!(#pat: *const c_char)
} else {
if needs_ref(ty) {
parse_quote!(#pat: *const #ty)
} else {
parse_quote!(#pat: #ty)
}
}
}
/// Similar to above, make sure that we return primitives when
/// recognised
fn convert_ret_type(rty: &syn::ReturnType, base: &Option<syn::Ident>) -> syn::ReturnType {
match rty {
syn::ReturnType::Default => syn::ReturnType::Default,
syn::ReturnType::Type(_, ty) => {
if needs_ref(ty) {
if ty.clone().into_token_stream().to_string() == "Self" {
let base = base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro");
parse_quote!(-> *mut #base)
} else if ty.clone().into_token_stream().to_string() == "String" {
parse_quote!(-> *mut c_char)
} else {
parse_quote!(-> *mut #ty)
}
} else {
parse_quote!(-> #ty)
}
}
}
}
/// For paths, assume we can convert to an opaque pointer.
fn needs_ref(ty: &syn::Type) -> bool {
match ty::Type::load(ty) {
Ok(Some(ty::Type::Primitive(_))) => false,
Ok(Some(ty::Type::Path(_)))=> true,
_ => false,
}
}
impl<'a> AsExtern for InternalFn<'a> {
fn as_extern(&self) -> TokenStream {
// Messy blob of code to convert function name, arguments, types,
// return type and generate appropriate code.
// Should be extracted out into smaller functions.
let name = &self.fn_def.ident;
let ext_name = swig_fn(&name, "ffi");
let mut args = Vec::<TokenStream>::new();
let mut caller = Vec::<syn::Ident>::new();
let mut caller_ref = Vec::<TokenStream>::new();
self.fn_def.decl.inputs.iter().for_each(|ref arg| {
match arg {
syn::FnArg::SelfRef(_) | syn::FnArg::SelfValue(_) => {
// For self methods, we do some extra work to wrap the
// function so that `impl Foo { fn bar(&self); }`
// becomes `Foo_bar(wrapped_self: *const Foo)`.
let wrapped_self = convert_self_type(&arg, self.base);
args.push(wrapped_self.into_token_stream());
let ws = syn::Ident::new("wrapped_self", Span::call_site());
caller.push(ws.clone());
caller_ref.push(quote!{@ref #ws});
}
syn::FnArg::Captured(ref ac) => {
let id = match &ac.pat {
syn::Pat::Ident(pi) => {
&pi.ident
},
_ => unimplemented!(),
};
args.push(convert_arg_type(ac).into_token_stream());
caller.push(id.clone());
// this later calls the appropriate macro function as to
// whether we need to do some pointer/box stuff
if ac.ty.clone().into_token_stream().to_string().ends_with("str") {
caller_ref.push(quote!{@str #id});
} else if let syn::Type::Reference(_) = ac.ty {
caller_ref.push(quote!{@ref #id});
} else {
caller_ref.push(quote!{@prim #id});
}
},
_ => ()
}
});
let base = self.base;
let out = convert_ret_type(&self.fn_def.decl.output, self.base);
// Similar to the above, this later calls the appropriate macro function
// as to whether we need to do some pointer/box stuff
let res_ref = if let syn::ReturnType::Type(_, ref ty) = self.fn_def.decl.output {
if ty.clone().into_token_stream().to_string() == "String" {
quote!{@str res}
} else if needs_ref(&ty) {
quote!{res}
} else {
quote!{@prim res}
}
} else {
quote!{@prim res}
};
/// Generate the function. We also inject some macro
/// definitions to help with converting pointers into types and types
/// into pointers.
let tokens = quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #ext_name(#(#args),*) #out {
#(ffi_ref!(#caller_ref);)*
let res = #base::#name(#(#caller),*);
box_ptr!(#res_ref)
}
};
tokens
}
}
/// Helper function to define the exported/mangled names.
fn swig_fn(name: &syn::Ident, fn_name: &str) -> syn::Ident {
syn::Ident::new(&format!("{}{}_{}", SwigTag::SwigInject, fn_name, name), Span::call_site())
}
fn swig_free(name: &syn::Ident) -> syn::Ident {
swig_fn(name, "free")
}
impl ToSwig for syn::DeriveInput {
fn to_swig(&self) -> String {
/// Generate the SWIG wrapper code as a string.
/// Basically, a class for the Rust struct `Foo` is just a wrapper
/// class called `Foo` which contains a pointer to the actual Rust
/// object.
// prefix with tag
let mut swigged = SwigTag::CodeStart.to_string();
let mut swigged_h = SwigTag::HdrStart.to_string();
let name = &self.ident;
match &self.data {
syn::Data::Struct(ref _ds) => {
// simple wrapper definition to wrap opaque pointer.
// methods get added elsewhere
swigged.push_str(&format!("\
// Wrapper for Rust class {name}
class {name} {{
public:
ffi::{name} *self;
{name}(ffi::{name} *ptr) {{
self = ptr;
}};
~{name}(){{
ffi::{free_name}(self);
self = NULL;
}};
", name=name, free_name=swig_free(&name))
);
swigged_h.push_str(&format!("\
// Wrapper for Rust class {name}
class {name} {{
ffi::{name} *self;
public:
~{name}();
", name=name)
);
// pull out any derive implementations we want to wrap
// TODO: do this in a less ad-hoc way
get_derives(&self.attrs).iter().for_each(|w| {
match w.as_str() {
"Default" => {
swigged.push_str(&format!(
"{name}() {{ self = {def_name}(); }};\n",
name=name, def_name=swig_fn(&name, "default")
));
swigged_h.push_str(&format!("{}();\n",name));
},
_ => (),
}
});
swigged.push_str("};\n");
swigged_h.push_str("};\n");
},
_ => unimplemented!(),
}
swigged.push_str(&SwigTag::CodeEnd.to_str());
swigged_h.push_str(&SwigTag::HdrEnd.to_str());
swigged.push_str(&swigged_h);
swigged
}
}
impl<'a> ToSwig for InternalFn<'a> {
fn to_swig(&self) -> String {
// Generate SWIG wrapper for methods.
// Main complication is making sure that namespaces are correct since
// we are basically overwriting names.
// Also a bit of magic to take an impl method, and add it back into
// being a class method.
// prefix with tag
let mut swigged = SwigTag::CodeStart.to_string();
let mut swigged_h = SwigTag::HdrStart.to_string();
let name = &self.fn_def.ident;
let cb_fn = cbindgen::ir::Function::load(name.to_string(),
&self.fn_def.decl,
true,
&[],
&None).unwrap();
let mut args = String::new();
let mut caller = String::new();
// Convert function arguments
cb_fn.args.iter().for_each(|arg| {
if args.len() > 0 {
args += ", ";
}
if caller.len() > 0 {
caller += ", ";
}
if arg.0 == "self" {
caller += "$self->self";
} else {
args += &cbindgen_write(arg);
caller += &arg.0;
}
});
// Convert return type
let mut out = cbindgen_write(&cb_fn.ret);
if out == "Self" {
out = self.base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro").to_string();
} else if out == "String" {
out = "char *".to_string()
}
let mut ret_out = out.clone();
// Convert function name.
let name = if name.to_string() == "new" {
// Custom format for new functions
ret_out = "".to_string();
out = "new PKG_NAME::".to_string() + &out;
self.base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro").to_string()
} else {
name.to_string()
};
// Get the mangled name exported by Rust
let ext_name = swig_fn(&self.fn_def.ident, "ffi");
// The following code generates the function definitions and the header
// Code needed for SWIG to generate bindings.
if self.base.is_none() {
swigged.push_str(&format!("\
{ret_out} {name}({args}) {{
return ({out})(ffi::{ext_name}({caller}));
}}"
, name=name, ext_name=ext_name, out=out, ret_out=ret_out, args=args, caller=caller));
}
if let Some(base) = self.base {
// Note the %extend is used by SWIG to make this a class method for
// `base`.
swigged_h.push_str(&format!("
%extend {base_name} {{
{ret_out} {name}({args}) {{
return ({out})(ffi::{ext_name}({caller}));
}}
}};\n"
,name=name, ext_name=ext_name, base_name=base, ret_out=ret_out, out=out, args=args, caller=caller));
} else {
swigged_h.push_str(&format!("\
{out} {name}({args});"
, name=name, out=out, args=args));
}
swigged.push_str(&SwigTag::CodeEnd.to_str());
swigged_h.push_str(&SwigTag::HdrEnd.to_str());
swigged.push_str(&swigged_h);
swigged
}
}
/// Generate extern and SWIG code for a `#[derive(Swig)]` annotated item.
pub fn impl_extern_it(ast: &syn::DeriveInput) -> TokenStream {
let comment = ast.to_swig();
let comment = format!("#[doc=\"{}\"] #[allow(non_camel_case_types)] struct {}{};", comment, SwigTag::SwigInject, ast.ident);
let doc_comment: syn::ItemStruct = syn::parse_str(&comment).expect("failed to generate SWIG code correctly");
let mut tokens: TokenStream = doc_comment.into_token_stream();
tokens.append_all(ast.as_extern().into_iter());
tokens
}
/// Generate extern and SWIG code for a `#[swiggen]` annotated method.
pub fn impl_extern_fn(base_name: &Option<syn::Ident>, ast: &syn::ItemFn) -> TokenStream {
let ifn = InternalFn {
base: base_name,
fn_def: ast,
};
let tok = ifn.as_extern();
let comment = ifn.to_swig();
let hidden = swig_fn(&ast.ident, "hidden_ffi");
quote! {
#[allow(non_snake_case)]
#[doc=#comment]
fn #hidden(){}
#tok
}
}
/// Write the swig code (injected via doc comments) into `swig.i`.
/// This parses expanded Rust code, and writes the SWIG code to a file.
pub fn gen_swig(pkg_name: &str, src: &str) {
let mut tmp_file = File::create("swig.i").unwrap();
tmp_file.write_all(format!("\
%module {name}
#define PKG_NAME {name}
%include <std_vector.i>
%include <stdint.i>
%include <std_string.i>
%typemap(newfree) char * \"free_string($1);\";
%{{
namespace ffi {{
#include \"bindings.h\"
}}
using namespace ffi;
namespace {name} {{
", name=pkg_name).as_bytes()).unwrap();
let syntax = syn::parse_file(&src).expect("Unable to parse file");
trace!("Syntax: {:#?}", syntax);
let mut hdr = String::new();
// SWIG code is inside doc comments:
// #[doc = "<swig code here>"]
// struct __SWIG_INJECT_Foo;
//
// So we extract this out.
syntax.items.iter().flat_map(|i| {
// Extract out all of the attributes which are attached to structs/functions
// starting with "__SWIG_INJECT"
match i {
syn::Item::Impl(ii) => {
ii.items.iter().fold(Vec::new(), |mut acc, ref ii| {
match ii {
syn::ImplItem::Method(iim) => { | }
acc
},
_ => Vec::new(),
}
})
},
syn::Item::Struct(syn::ItemStruct { attrs, ident, .. }) |
syn::Item::Fn(syn::ItemFn { attrs, ident, ..}) => {
if ident.to_string().starts_with(SwigTag::SwigInject.to_str()) {
debug!("{:#?}", attrs);
attrs.clone()
} else {
Vec::new()
}
},
_ => Vec::new()
}
}).for_each(|ref attr| {
match attr.interpret_meta() {
Some(syn::Meta::NameValue(ref mnv)) if &mnv.ident.to_string() == "doc" => {
// Extract out the doc comment for these attributes
if let syn::Lit::Str(ref ls) = mnv.lit {
let mut swig_class = ls.value().replace("\\n", "\n");
let prefix_offset = swig_class.find(SwigTag::CodeStart.to_str()).expect("no code prefix") + SwigTag::CodeStart.len();
let suffix_offset = swig_class.find(SwigTag::CodeEnd.to_str()).expect("no code suffix");
let final_class = &swig_class[prefix_offset..suffix_offset];
let prefix_offset = swig_class.find(SwigTag::HdrStart.to_str()).expect("no header prefix") + SwigTag::HdrStart.len();
let suffix_offset = swig_class.find(SwigTag::HdrEnd.to_str()).expect("no header suffix");
let final_hdr = &swig_class[prefix_offset..suffix_offset];
tmp_file.write_all(&final_class.replace("\\n", "\n").as_bytes()).unwrap();
hdr += &final_hdr.replace("\\n", "\n");
debug!("{}", final_hdr);
debug!("{}", final_class);
}
},
_ => ()
}
});
tmp_file.write_all(format!("\
}}
%}}
namespace {name} {{
{header}
}}
%ignore {inject};
%include \"bindings.h\";
", name=pkg_name, header=hdr, inject=SwigTag::SwigInject).as_bytes()).unwrap();
}
/// Extract out any `derive(Foo)` attributes.
fn get_derives(attrs: &[syn::Attribute]) -> Vec<String> {
attrs.iter().filter_map(|a| a.interpret_meta())
.filter_map(|a| {
if let syn::Meta::List(ml) = a {
Some(ml)
} else {
None
}
}).filter(|ml| ml.ident.to_string() == "swig_derive")
.flat_map(|ml| ml.nested)
.filter_map(|nm| {
if let syn::NestedMeta::Meta(m) = nm {
if let syn::Meta::Word(w) = m {
Some(w.to_string())
} else {
None
}
} else {
None
}
}).collect()
}
/// Parse a Rust file to extract any extern "C" functions or
/// `#[swiggen]`-annotated methods and move these out of the impl block.
pub fn split_out_externs(ast: &syn::ItemImpl) -> TokenStream {
let mut tokens = TokenStream::new();
tokens.append_all(ast.items.iter().filter_map(|item| {
match item {
syn::ImplItem::Method(iim) => {
if iim.sig.abi.is_c(){
Some(item.into_token_stream())
} else {
let mut ret = None;
for attr in iim.attrs.iter().filter_map(|a| a.interpret_meta()) {
match attr {
syn::Meta::List(ml) => if ml.ident == syn::Ident::new("swiggen", Span::call_site()) {
if let Some(v) = ml.nested.first().map(|p| p.into_value()) {
match v {
syn::NestedMeta::Meta(m) => {
let base_name = Some(m.name());
ret = Some(impl_extern_fn(&base_name, &iim_to_itemfn(iim.clone())));
},
_ => {}
}
}
},
_ => {}
}
}
ret
}
},
_ => None,
}
}));
quote!{
#ast
#tokens
}
}
#[derive(Debug)]
pub struct Args(pub Option<syn::Ident>);
// Extract an `Option<Ident>` from `(T)` or `""`.
impl syn::synom::Synom for Args {
named!(parse -> Self, map!(option!(map!(
parens!(syn!(syn::Ident)),
|(_parens, id)| id
)), |o| Args(o)));
}
fn iim_to_itemfn(iim: syn::ImplItemMethod) -> syn::ItemFn {
syn::ItemFn {
attrs: iim.attrs,
vis: iim.vis,
constness: iim.sig.constness,
unsafety: iim.sig.unsafety,
abi: iim.sig.abi,
ident: iim.sig.ident,
decl: Box::new(iim.sig.decl),
block: Box::new(iim.block),
}
} | debug!("{:#?}", iim);
if iim.sig.ident.to_string().starts_with(SwigTag::SwigInject.to_str()) {
acc.extend_from_slice(&iim.attrs[..]); | random_line_split |
lib.rs | #![allow(unused_doc_comment)]
/// # swiggen
///
/// The `swiggen` library is used to generate `extern "C"` definitions and
/// SWIG wrapper code from Rust functions.
///
/// This basically does two things: generates the `extern "C"` methods by
/// applying typemaps from cbindgen, or some fairly crude heuristics -
/// such as converting an opaque `Foo` into a `*mut Foo`, and running
/// `Box::into_raw(Box::new(foo))` to convert it into a pointer.
///
/// These exported functions all have mangled names like `__SWIG_INJECT_new_Foo`.
/// The code also generates SWIG wrapper code which wraps these functions sp
/// that `Foo` behaves like a native object with methods like `Foo.new`.
/// The SWIG code is injected into the expanded Rust source code through doc
/// comments on various structs/functions.
extern crate cbindgen;
#[macro_use]
extern crate log;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
use proc_macro2::{Span, TokenStream};
use quote::ToTokens;
use quote::TokenStreamExt;
use std::fmt;
use std::fs::File;
use std::io::Write;
use std::str;
use cbindgen::ir::ty;
use cbindgen::utilities::SynAbiHelpers;
use cbindgen::writer::{Source, SourceWriter};
/// Tags used to indicate swig binding code injected into the Rust source.
enum SwigTag {
CodeStart,
CodeEnd,
HdrStart,
HdrEnd,
SwigInject,
}
impl fmt::Display for SwigTag {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let tag = self.to_str();
write!(f, "{}", tag)
}
}
impl SwigTag {
fn to_str(&self) -> &'static str {
match self {
SwigTag::CodeStart => "__SWIG_CODE\n",
SwigTag::CodeEnd => "__SWIG_END_CODE\n",
SwigTag::HdrStart => "__SWIG_HDR\n",
SwigTag::HdrEnd => "__SWIG_END_HDR\n",
SwigTag::SwigInject=> "__SWIG_INJECT_",
}
}
#[inline]
fn len(&self) -> usize {
match self {
SwigTag::CodeStart => "__SWIG_CODE\n",
SwigTag::CodeEnd => "__SWIG_END_CODE\n",
SwigTag::HdrStart => "__SWIG_HDR\n",
SwigTag::HdrEnd => "__SWIG_END_HDR\n",
SwigTag::SwigInject=> "__SWIG_INJECT_",
}.len()
}
}
pub trait ToSwig {
fn to_swig(&self) -> String;
}
/// A type implementing `AsExtern` can be converted into an type compatible with
/// `extern "C"` functions.
pub trait AsExtern {
fn as_extern(&self) -> TokenStream;
}
impl AsExtern for syn::DeriveInput {
fn as_extern(&self) -> TokenStream {
let name = &self.ident;
let free_name = swig_free(&name);
// For an stuct we want to derive Swig for, we add a `free_Foo`
// method so we can free it from SWIG code.
let mut tokens = quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #free_name(arg: *mut #name) {
unsafe {
assert!(!arg.is_null());
&*arg;
}
}
};
let default_name = swig_fn(&name, "default");
// TOOD: Add more derive capabilities
// Extracting the derived methods from `#[swig_derive(...)]`.
// We need to automatically add the SWIG code since we cant somehow
// add the `#[swiggen(Foo)]` attribute to the derived methods.
let derivs = get_derives(&self.attrs);
let new_toks = derivs.iter().filter_map(|w| {
match w.as_str() {
"Default" => {
Some(quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #default_name() -> *mut #name {
Box::into_raw(Box::new(#name::default()))
}
})
},
_ => None
}
});
tokens.append_all(new_toks);
tokens
}
}
/// A method definition inside an impl block has an additional
/// `base` variable corresponding to the name of the type.
struct InternalFn<'a> {
base: &'a Option<syn::Ident>,
fn_def: &'a syn::ItemFn,
}
/// Convenience method to use cbindgen to convert types into C-compat types.
/// e.g. "input: u32" -> `cbindgen_write((input, u32))` might output `uint32 input`.
fn cbindgen_write<S: Source>(s: &S) -> String {
let mut buf = Vec::new();
{
let cfg = cbindgen::Config::default();
let mut sw = SourceWriter::new(&mut buf, &cfg);
s.write(&cfg, &mut sw);
}
String::from_utf8(buf).unwrap().replace("str", "char")
}
/// Hacky method to take a `&self` or `self` function argument and produce
/// something compatible with `extern "C"` method. Since we can't use `self`,
/// we coerce this to a pointer, and call the arg `wrapped_self`.
fn convert_self_type(arg: &syn::FnArg, base: &Option<syn::Ident>) -> syn::FnArg {
let base = base.clone().expect("Cannot convert `self` arg without provided base name.
Try: `#[swiggen(Foo)]` in macro");
let mut arg = arg.clone().into_token_stream().to_string();
arg = if arg.starts_with('&') {
arg.replace("&", "*const ")
} else {
"*mut ".to_string() + &arg
};
arg = format!("wrapped_self: {}", arg.replace("self", &base.to_string()));
syn::parse_str(&arg).unwrap()
}
/// For inputs, if the type is a primitive (as defined by cbindgen), we don't
/// do anything. Otherwise, assume we will take it in as a pointer.
fn convert_arg_type(syn::ArgCaptured { ref pat, ref ty, .. }: &syn::ArgCaptured) -> syn::FnArg {
if ty.clone().into_token_stream().to_string().ends_with("str") {
parse_quote!(#pat: *const c_char)
} else {
if needs_ref(ty) {
parse_quote!(#pat: *const #ty)
} else {
parse_quote!(#pat: #ty)
}
}
}
/// Similar to above, make sure that we return primitives when
/// recognised
fn convert_ret_type(rty: &syn::ReturnType, base: &Option<syn::Ident>) -> syn::ReturnType {
match rty {
syn::ReturnType::Default => syn::ReturnType::Default,
syn::ReturnType::Type(_, ty) => {
if needs_ref(ty) {
if ty.clone().into_token_stream().to_string() == "Self" {
let base = base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro");
parse_quote!(-> *mut #base)
} else if ty.clone().into_token_stream().to_string() == "String" {
parse_quote!(-> *mut c_char)
} else {
parse_quote!(-> *mut #ty)
}
} else {
parse_quote!(-> #ty)
}
}
}
}
/// For paths, assume we can convert to an opaque pointer.
fn needs_ref(ty: &syn::Type) -> bool {
match ty::Type::load(ty) {
Ok(Some(ty::Type::Primitive(_))) => false,
Ok(Some(ty::Type::Path(_)))=> true,
_ => false,
}
}
impl<'a> AsExtern for InternalFn<'a> {
fn as_extern(&self) -> TokenStream {
// Messy blob of code to convert function name, arguments, types,
// return type and generate appropriate code.
// Should be extracted out into smaller functions.
let name = &self.fn_def.ident;
let ext_name = swig_fn(&name, "ffi");
let mut args = Vec::<TokenStream>::new();
let mut caller = Vec::<syn::Ident>::new();
let mut caller_ref = Vec::<TokenStream>::new();
self.fn_def.decl.inputs.iter().for_each(|ref arg| {
match arg {
syn::FnArg::SelfRef(_) | syn::FnArg::SelfValue(_) => {
// For self methods, we do some extra work to wrap the
// function so that `impl Foo { fn bar(&self); }`
// becomes `Foo_bar(wrapped_self: *const Foo)`.
let wrapped_self = convert_self_type(&arg, self.base);
args.push(wrapped_self.into_token_stream());
let ws = syn::Ident::new("wrapped_self", Span::call_site());
caller.push(ws.clone());
caller_ref.push(quote!{@ref #ws});
}
syn::FnArg::Captured(ref ac) => {
let id = match &ac.pat {
syn::Pat::Ident(pi) => {
&pi.ident
},
_ => unimplemented!(),
};
args.push(convert_arg_type(ac).into_token_stream());
caller.push(id.clone());
// this later calls the appropriate macro function as to
// whether we need to do some pointer/box stuff
if ac.ty.clone().into_token_stream().to_string().ends_with("str") {
caller_ref.push(quote!{@str #id});
} else if let syn::Type::Reference(_) = ac.ty {
caller_ref.push(quote!{@ref #id});
} else {
caller_ref.push(quote!{@prim #id});
}
},
_ => ()
}
});
let base = self.base;
let out = convert_ret_type(&self.fn_def.decl.output, self.base);
// Similar to the above, this later calls the appropriate macro function
// as to whether we need to do some pointer/box stuff
let res_ref = if let syn::ReturnType::Type(_, ref ty) = self.fn_def.decl.output {
if ty.clone().into_token_stream().to_string() == "String" {
quote!{@str res}
} else if needs_ref(&ty) {
quote!{res}
} else {
quote!{@prim res}
}
} else {
quote!{@prim res}
};
/// Generate the function. We also inject some macro
/// definitions to help with converting pointers into types and types
/// into pointers.
let tokens = quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #ext_name(#(#args),*) #out {
#(ffi_ref!(#caller_ref);)*
let res = #base::#name(#(#caller),*);
box_ptr!(#res_ref)
}
};
tokens
}
}
/// Helper function to define the exported/mangled names.
fn swig_fn(name: &syn::Ident, fn_name: &str) -> syn::Ident {
syn::Ident::new(&format!("{}{}_{}", SwigTag::SwigInject, fn_name, name), Span::call_site())
}
fn swig_free(name: &syn::Ident) -> syn::Ident {
swig_fn(name, "free")
}
impl ToSwig for syn::DeriveInput {
fn to_swig(&self) -> String {
/// Generate the SWIG wrapper code as a string.
/// Basically, a class for the Rust struct `Foo` is just a wrapper
/// class called `Foo` which contains a pointer to the actual Rust
/// object.
// prefix with tag
let mut swigged = SwigTag::CodeStart.to_string();
let mut swigged_h = SwigTag::HdrStart.to_string();
let name = &self.ident;
match &self.data {
syn::Data::Struct(ref _ds) => {
// simple wrapper definition to wrap opaque pointer.
// methods get added elsewhere
swigged.push_str(&format!("\
// Wrapper for Rust class {name}
class {name} {{
public:
ffi::{name} *self;
{name}(ffi::{name} *ptr) {{
self = ptr;
}};
~{name}(){{
ffi::{free_name}(self);
self = NULL;
}};
", name=name, free_name=swig_free(&name))
);
swigged_h.push_str(&format!("\
// Wrapper for Rust class {name}
class {name} {{
ffi::{name} *self;
public:
~{name}();
", name=name)
);
// pull out any derive implementations we want to wrap
// TODO: do this in a less ad-hoc way
get_derives(&self.attrs).iter().for_each(|w| {
match w.as_str() {
"Default" => {
swigged.push_str(&format!(
"{name}() {{ self = {def_name}(); }};\n",
name=name, def_name=swig_fn(&name, "default")
));
swigged_h.push_str(&format!("{}();\n",name));
},
_ => (),
}
});
swigged.push_str("};\n");
swigged_h.push_str("};\n");
},
_ => unimplemented!(),
}
swigged.push_str(&SwigTag::CodeEnd.to_str());
swigged_h.push_str(&SwigTag::HdrEnd.to_str());
swigged.push_str(&swigged_h);
swigged
}
}
impl<'a> ToSwig for InternalFn<'a> {
fn to_swig(&self) -> String {
// Generate SWIG wrapper for methods.
// Main complication is making sure that namespaces are correct since
// we are basically overwriting names.
// Also a bit of magic to take an impl method, and add it back into
// being a class method.
// prefix with tag
let mut swigged = SwigTag::CodeStart.to_string();
let mut swigged_h = SwigTag::HdrStart.to_string();
let name = &self.fn_def.ident;
let cb_fn = cbindgen::ir::Function::load(name.to_string(),
&self.fn_def.decl,
true,
&[],
&None).unwrap();
let mut args = String::new();
let mut caller = String::new();
// Convert function arguments
cb_fn.args.iter().for_each(|arg| {
if args.len() > 0 {
args += ", ";
}
if caller.len() > 0 {
caller += ", ";
}
if arg.0 == "self" {
caller += "$self->self";
} else {
args += &cbindgen_write(arg);
caller += &arg.0;
}
});
// Convert return type
let mut out = cbindgen_write(&cb_fn.ret);
if out == "Self" {
out = self.base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro").to_string();
} else if out == "String" {
out = "char *".to_string()
}
let mut ret_out = out.clone();
// Convert function name.
let name = if name.to_string() == "new" {
// Custom format for new functions
ret_out = "".to_string();
out = "new PKG_NAME::".to_string() + &out;
self.base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro").to_string()
} else {
name.to_string()
};
// Get the mangled name exported by Rust
let ext_name = swig_fn(&self.fn_def.ident, "ffi");
// The following code generates the function definitions and the header
// Code needed for SWIG to generate bindings.
if self.base.is_none() {
swigged.push_str(&format!("\
{ret_out} {name}({args}) {{
return ({out})(ffi::{ext_name}({caller}));
}}"
, name=name, ext_name=ext_name, out=out, ret_out=ret_out, args=args, caller=caller));
}
if let Some(base) = self.base {
// Note the %extend is used by SWIG to make this a class method for
// `base`.
swigged_h.push_str(&format!("
%extend {base_name} {{
{ret_out} {name}({args}) {{
return ({out})(ffi::{ext_name}({caller}));
}}
}};\n"
,name=name, ext_name=ext_name, base_name=base, ret_out=ret_out, out=out, args=args, caller=caller));
} else {
swigged_h.push_str(&format!("\
{out} {name}({args});"
, name=name, out=out, args=args));
}
swigged.push_str(&SwigTag::CodeEnd.to_str());
swigged_h.push_str(&SwigTag::HdrEnd.to_str());
swigged.push_str(&swigged_h);
swigged
}
}
/// Generate extern and SWIG code for a `#[derive(Swig)]` annotated item.
pub fn impl_extern_it(ast: &syn::DeriveInput) -> TokenStream {
let comment = ast.to_swig();
let comment = format!("#[doc=\"{}\"] #[allow(non_camel_case_types)] struct {}{};", comment, SwigTag::SwigInject, ast.ident);
let doc_comment: syn::ItemStruct = syn::parse_str(&comment).expect("failed to generate SWIG code correctly");
let mut tokens: TokenStream = doc_comment.into_token_stream();
tokens.append_all(ast.as_extern().into_iter());
tokens
}
/// Generate extern and SWIG code for a `#[swiggen]` annotated method.
pub fn impl_extern_fn(base_name: &Option<syn::Ident>, ast: &syn::ItemFn) -> TokenStream |
/// Write the swig code (injected via doc comments) into `swig.i`.
/// This parses expanded Rust code, and writes the SWIG code to a file.
pub fn gen_swig(pkg_name: &str, src: &str) {
let mut tmp_file = File::create("swig.i").unwrap();
tmp_file.write_all(format!("\
%module {name}
#define PKG_NAME {name}
%include <std_vector.i>
%include <stdint.i>
%include <std_string.i>
%typemap(newfree) char * \"free_string($1);\";
%{{
namespace ffi {{
#include \"bindings.h\"
}}
using namespace ffi;
namespace {name} {{
", name=pkg_name).as_bytes()).unwrap();
let syntax = syn::parse_file(&src).expect("Unable to parse file");
trace!("Syntax: {:#?}", syntax);
let mut hdr = String::new();
// SWIG code is inside doc comments:
// #[doc = "<swig code here>"]
// struct __SWIG_INJECT_Foo;
//
// So we extract this out.
syntax.items.iter().flat_map(|i| {
// Extract out all of the attributes which are attached to structs/functions
// starting with "__SWIG_INJECT"
match i {
syn::Item::Impl(ii) => {
ii.items.iter().fold(Vec::new(), |mut acc, ref ii| {
match ii {
syn::ImplItem::Method(iim) => {
debug!("{:#?}", iim);
if iim.sig.ident.to_string().starts_with(SwigTag::SwigInject.to_str()) {
acc.extend_from_slice(&iim.attrs[..]);
}
acc
},
_ => Vec::new(),
}
})
},
syn::Item::Struct(syn::ItemStruct { attrs, ident, .. }) |
syn::Item::Fn(syn::ItemFn { attrs, ident, ..}) => {
if ident.to_string().starts_with(SwigTag::SwigInject.to_str()) {
debug!("{:#?}", attrs);
attrs.clone()
} else {
Vec::new()
}
},
_ => Vec::new()
}
}).for_each(|ref attr| {
match attr.interpret_meta() {
Some(syn::Meta::NameValue(ref mnv)) if &mnv.ident.to_string() == "doc" => {
// Extract out the doc comment for these attributes
if let syn::Lit::Str(ref ls) = mnv.lit {
let mut swig_class = ls.value().replace("\\n", "\n");
let prefix_offset = swig_class.find(SwigTag::CodeStart.to_str()).expect("no code prefix") + SwigTag::CodeStart.len();
let suffix_offset = swig_class.find(SwigTag::CodeEnd.to_str()).expect("no code suffix");
let final_class = &swig_class[prefix_offset..suffix_offset];
let prefix_offset = swig_class.find(SwigTag::HdrStart.to_str()).expect("no header prefix") + SwigTag::HdrStart.len();
let suffix_offset = swig_class.find(SwigTag::HdrEnd.to_str()).expect("no header suffix");
let final_hdr = &swig_class[prefix_offset..suffix_offset];
tmp_file.write_all(&final_class.replace("\\n", "\n").as_bytes()).unwrap();
hdr += &final_hdr.replace("\\n", "\n");
debug!("{}", final_hdr);
debug!("{}", final_class);
}
},
_ => ()
}
});
tmp_file.write_all(format!("\
}}
%}}
namespace {name} {{
{header}
}}
%ignore {inject};
%include \"bindings.h\";
", name=pkg_name, header=hdr, inject=SwigTag::SwigInject).as_bytes()).unwrap();
}
/// Extract out any `derive(Foo)` attributes.
fn get_derives(attrs: &[syn::Attribute]) -> Vec<String> {
attrs.iter().filter_map(|a| a.interpret_meta())
.filter_map(|a| {
if let syn::Meta::List(ml) = a {
Some(ml)
} else {
None
}
}).filter(|ml| ml.ident.to_string() == "swig_derive")
.flat_map(|ml| ml.nested)
.filter_map(|nm| {
if let syn::NestedMeta::Meta(m) = nm {
if let syn::Meta::Word(w) = m {
Some(w.to_string())
} else {
None
}
} else {
None
}
}).collect()
}
/// Parse a Rust file to extract any extern "C" functions or
/// `#[swiggen]`-annotated methods and move these out of the impl block.
pub fn split_out_externs(ast: &syn::ItemImpl) -> TokenStream {
let mut tokens = TokenStream::new();
tokens.append_all(ast.items.iter().filter_map(|item| {
match item {
syn::ImplItem::Method(iim) => {
if iim.sig.abi.is_c(){
Some(item.into_token_stream())
} else {
let mut ret = None;
for attr in iim.attrs.iter().filter_map(|a| a.interpret_meta()) {
match attr {
syn::Meta::List(ml) => if ml.ident == syn::Ident::new("swiggen", Span::call_site()) {
if let Some(v) = ml.nested.first().map(|p| p.into_value()) {
match v {
syn::NestedMeta::Meta(m) => {
let base_name = Some(m.name());
ret = Some(impl_extern_fn(&base_name, &iim_to_itemfn(iim.clone())));
},
_ => {}
}
}
},
_ => {}
}
}
ret
}
},
_ => None,
}
}));
quote!{
#ast
#tokens
}
}
#[derive(Debug)]
pub struct Args(pub Option<syn::Ident>);
// Extract an `Option<Ident>` from `(T)` or `""`.
impl syn::synom::Synom for Args {
named!(parse -> Self, map!(option!(map!(
parens!(syn!(syn::Ident)),
|(_parens, id)| id
)), |o| Args(o)));
}
fn iim_to_itemfn(iim: syn::ImplItemMethod) -> syn::ItemFn {
syn::ItemFn {
attrs: iim.attrs,
vis: iim.vis,
constness: iim.sig.constness,
unsafety: iim.sig.unsafety,
abi: iim.sig.abi,
ident: iim.sig.ident,
decl: Box::new(iim.sig.decl),
block: Box::new(iim.block),
}
}
| {
let ifn = InternalFn {
base: base_name,
fn_def: ast,
};
let tok = ifn.as_extern();
let comment = ifn.to_swig();
let hidden = swig_fn(&ast.ident, "hidden_ffi");
quote! {
#[allow(non_snake_case)]
#[doc=#comment]
fn #hidden(){}
#tok
}
} | identifier_body |
lib.rs | #![allow(unused_doc_comment)]
/// # swiggen
///
/// The `swiggen` library is used to generate `extern "C"` definitions and
/// SWIG wrapper code from Rust functions.
///
/// This basically does two things: generates the `extern "C"` methods by
/// applying typemaps from cbindgen, or some fairly crude heuristics -
/// such as converting an opaque `Foo` into a `*mut Foo`, and running
/// `Box::into_raw(Box::new(foo))` to convert it into a pointer.
///
/// These exported functions all have mangled names like `__SWIG_INJECT_new_Foo`.
/// The code also generates SWIG wrapper code which wraps these functions sp
/// that `Foo` behaves like a native object with methods like `Foo.new`.
/// The SWIG code is injected into the expanded Rust source code through doc
/// comments on various structs/functions.
extern crate cbindgen;
#[macro_use]
extern crate log;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
use proc_macro2::{Span, TokenStream};
use quote::ToTokens;
use quote::TokenStreamExt;
use std::fmt;
use std::fs::File;
use std::io::Write;
use std::str;
use cbindgen::ir::ty;
use cbindgen::utilities::SynAbiHelpers;
use cbindgen::writer::{Source, SourceWriter};
/// Tags used to indicate swig binding code injected into the Rust source.
enum SwigTag {
CodeStart,
CodeEnd,
HdrStart,
HdrEnd,
SwigInject,
}
impl fmt::Display for SwigTag {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let tag = self.to_str();
write!(f, "{}", tag)
}
}
impl SwigTag {
fn to_str(&self) -> &'static str {
match self {
SwigTag::CodeStart => "__SWIG_CODE\n",
SwigTag::CodeEnd => "__SWIG_END_CODE\n",
SwigTag::HdrStart => "__SWIG_HDR\n",
SwigTag::HdrEnd => "__SWIG_END_HDR\n",
SwigTag::SwigInject=> "__SWIG_INJECT_",
}
}
#[inline]
fn len(&self) -> usize {
match self {
SwigTag::CodeStart => "__SWIG_CODE\n",
SwigTag::CodeEnd => "__SWIG_END_CODE\n",
SwigTag::HdrStart => "__SWIG_HDR\n",
SwigTag::HdrEnd => "__SWIG_END_HDR\n",
SwigTag::SwigInject=> "__SWIG_INJECT_",
}.len()
}
}
pub trait ToSwig {
fn to_swig(&self) -> String;
}
/// A type implementing `AsExtern` can be converted into an type compatible with
/// `extern "C"` functions.
pub trait AsExtern {
fn as_extern(&self) -> TokenStream;
}
impl AsExtern for syn::DeriveInput {
fn as_extern(&self) -> TokenStream {
let name = &self.ident;
let free_name = swig_free(&name);
// For an stuct we want to derive Swig for, we add a `free_Foo`
// method so we can free it from SWIG code.
let mut tokens = quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #free_name(arg: *mut #name) {
unsafe {
assert!(!arg.is_null());
&*arg;
}
}
};
let default_name = swig_fn(&name, "default");
// TOOD: Add more derive capabilities
// Extracting the derived methods from `#[swig_derive(...)]`.
// We need to automatically add the SWIG code since we cant somehow
// add the `#[swiggen(Foo)]` attribute to the derived methods.
let derivs = get_derives(&self.attrs);
let new_toks = derivs.iter().filter_map(|w| {
match w.as_str() {
"Default" => {
Some(quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #default_name() -> *mut #name {
Box::into_raw(Box::new(#name::default()))
}
})
},
_ => None
}
});
tokens.append_all(new_toks);
tokens
}
}
/// A method definition inside an impl block has an additional
/// `base` variable corresponding to the name of the type.
struct InternalFn<'a> {
base: &'a Option<syn::Ident>,
fn_def: &'a syn::ItemFn,
}
/// Convenience method to use cbindgen to convert types into C-compat types.
/// e.g. "input: u32" -> `cbindgen_write((input, u32))` might output `uint32 input`.
fn cbindgen_write<S: Source>(s: &S) -> String {
let mut buf = Vec::new();
{
let cfg = cbindgen::Config::default();
let mut sw = SourceWriter::new(&mut buf, &cfg);
s.write(&cfg, &mut sw);
}
String::from_utf8(buf).unwrap().replace("str", "char")
}
/// Hacky method to take a `&self` or `self` function argument and produce
/// something compatible with `extern "C"` method. Since we can't use `self`,
/// we coerce this to a pointer, and call the arg `wrapped_self`.
fn convert_self_type(arg: &syn::FnArg, base: &Option<syn::Ident>) -> syn::FnArg {
let base = base.clone().expect("Cannot convert `self` arg without provided base name.
Try: `#[swiggen(Foo)]` in macro");
let mut arg = arg.clone().into_token_stream().to_string();
arg = if arg.starts_with('&') {
arg.replace("&", "*const ")
} else {
"*mut ".to_string() + &arg
};
arg = format!("wrapped_self: {}", arg.replace("self", &base.to_string()));
syn::parse_str(&arg).unwrap()
}
/// For inputs, if the type is a primitive (as defined by cbindgen), we don't
/// do anything. Otherwise, assume we will take it in as a pointer.
fn convert_arg_type(syn::ArgCaptured { ref pat, ref ty, .. }: &syn::ArgCaptured) -> syn::FnArg {
if ty.clone().into_token_stream().to_string().ends_with("str") {
parse_quote!(#pat: *const c_char)
} else {
if needs_ref(ty) {
parse_quote!(#pat: *const #ty)
} else {
parse_quote!(#pat: #ty)
}
}
}
/// Similar to above, make sure that we return primitives when
/// recognised
fn convert_ret_type(rty: &syn::ReturnType, base: &Option<syn::Ident>) -> syn::ReturnType {
match rty {
syn::ReturnType::Default => syn::ReturnType::Default,
syn::ReturnType::Type(_, ty) => {
if needs_ref(ty) {
if ty.clone().into_token_stream().to_string() == "Self" {
let base = base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro");
parse_quote!(-> *mut #base)
} else if ty.clone().into_token_stream().to_string() == "String" {
parse_quote!(-> *mut c_char)
} else {
parse_quote!(-> *mut #ty)
}
} else {
parse_quote!(-> #ty)
}
}
}
}
/// For paths, assume we can convert to an opaque pointer.
fn needs_ref(ty: &syn::Type) -> bool {
match ty::Type::load(ty) {
Ok(Some(ty::Type::Primitive(_))) => false,
Ok(Some(ty::Type::Path(_)))=> true,
_ => false,
}
}
impl<'a> AsExtern for InternalFn<'a> {
fn as_extern(&self) -> TokenStream {
// Messy blob of code to convert function name, arguments, types,
// return type and generate appropriate code.
// Should be extracted out into smaller functions.
let name = &self.fn_def.ident;
let ext_name = swig_fn(&name, "ffi");
let mut args = Vec::<TokenStream>::new();
let mut caller = Vec::<syn::Ident>::new();
let mut caller_ref = Vec::<TokenStream>::new();
self.fn_def.decl.inputs.iter().for_each(|ref arg| {
match arg {
syn::FnArg::SelfRef(_) | syn::FnArg::SelfValue(_) => {
// For self methods, we do some extra work to wrap the
// function so that `impl Foo { fn bar(&self); }`
// becomes `Foo_bar(wrapped_self: *const Foo)`.
let wrapped_self = convert_self_type(&arg, self.base);
args.push(wrapped_self.into_token_stream());
let ws = syn::Ident::new("wrapped_self", Span::call_site());
caller.push(ws.clone());
caller_ref.push(quote!{@ref #ws});
}
syn::FnArg::Captured(ref ac) => {
let id = match &ac.pat {
syn::Pat::Ident(pi) => {
&pi.ident
},
_ => unimplemented!(),
};
args.push(convert_arg_type(ac).into_token_stream());
caller.push(id.clone());
// this later calls the appropriate macro function as to
// whether we need to do some pointer/box stuff
if ac.ty.clone().into_token_stream().to_string().ends_with("str") {
caller_ref.push(quote!{@str #id});
} else if let syn::Type::Reference(_) = ac.ty {
caller_ref.push(quote!{@ref #id});
} else {
caller_ref.push(quote!{@prim #id});
}
},
_ => ()
}
});
let base = self.base;
let out = convert_ret_type(&self.fn_def.decl.output, self.base);
// Similar to the above, this later calls the appropriate macro function
// as to whether we need to do some pointer/box stuff
let res_ref = if let syn::ReturnType::Type(_, ref ty) = self.fn_def.decl.output {
if ty.clone().into_token_stream().to_string() == "String" {
quote!{@str res}
} else if needs_ref(&ty) {
quote!{res}
} else {
quote!{@prim res}
}
} else {
quote!{@prim res}
};
/// Generate the function. We also inject some macro
/// definitions to help with converting pointers into types and types
/// into pointers.
let tokens = quote! {
#[allow(non_snake_case)]
#[no_mangle]
pub extern "C" fn #ext_name(#(#args),*) #out {
#(ffi_ref!(#caller_ref);)*
let res = #base::#name(#(#caller),*);
box_ptr!(#res_ref)
}
};
tokens
}
}
/// Helper function to define the exported/mangled names.
fn swig_fn(name: &syn::Ident, fn_name: &str) -> syn::Ident {
syn::Ident::new(&format!("{}{}_{}", SwigTag::SwigInject, fn_name, name), Span::call_site())
}
fn swig_free(name: &syn::Ident) -> syn::Ident {
swig_fn(name, "free")
}
impl ToSwig for syn::DeriveInput {
fn to_swig(&self) -> String {
/// Generate the SWIG wrapper code as a string.
/// Basically, a class for the Rust struct `Foo` is just a wrapper
/// class called `Foo` which contains a pointer to the actual Rust
/// object.
// prefix with tag
let mut swigged = SwigTag::CodeStart.to_string();
let mut swigged_h = SwigTag::HdrStart.to_string();
let name = &self.ident;
match &self.data {
syn::Data::Struct(ref _ds) => {
// simple wrapper definition to wrap opaque pointer.
// methods get added elsewhere
swigged.push_str(&format!("\
// Wrapper for Rust class {name}
class {name} {{
public:
ffi::{name} *self;
{name}(ffi::{name} *ptr) {{
self = ptr;
}};
~{name}(){{
ffi::{free_name}(self);
self = NULL;
}};
", name=name, free_name=swig_free(&name))
);
swigged_h.push_str(&format!("\
// Wrapper for Rust class {name}
class {name} {{
ffi::{name} *self;
public:
~{name}();
", name=name)
);
// pull out any derive implementations we want to wrap
// TODO: do this in a less ad-hoc way
get_derives(&self.attrs).iter().for_each(|w| {
match w.as_str() {
"Default" => {
swigged.push_str(&format!(
"{name}() {{ self = {def_name}(); }};\n",
name=name, def_name=swig_fn(&name, "default")
));
swigged_h.push_str(&format!("{}();\n",name));
},
_ => (),
}
});
swigged.push_str("};\n");
swigged_h.push_str("};\n");
},
_ => unimplemented!(),
}
swigged.push_str(&SwigTag::CodeEnd.to_str());
swigged_h.push_str(&SwigTag::HdrEnd.to_str());
swigged.push_str(&swigged_h);
swigged
}
}
impl<'a> ToSwig for InternalFn<'a> {
fn to_swig(&self) -> String {
// Generate SWIG wrapper for methods.
// Main complication is making sure that namespaces are correct since
// we are basically overwriting names.
// Also a bit of magic to take an impl method, and add it back into
// being a class method.
// prefix with tag
let mut swigged = SwigTag::CodeStart.to_string();
let mut swigged_h = SwigTag::HdrStart.to_string();
let name = &self.fn_def.ident;
let cb_fn = cbindgen::ir::Function::load(name.to_string(),
&self.fn_def.decl,
true,
&[],
&None).unwrap();
let mut args = String::new();
let mut caller = String::new();
// Convert function arguments
cb_fn.args.iter().for_each(|arg| {
if args.len() > 0 {
args += ", ";
}
if caller.len() > 0 {
caller += ", ";
}
if arg.0 == "self" {
caller += "$self->self";
} else {
args += &cbindgen_write(arg);
caller += &arg.0;
}
});
// Convert return type
let mut out = cbindgen_write(&cb_fn.ret);
if out == "Self" {
out = self.base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro").to_string();
} else if out == "String" {
out = "char *".to_string()
}
let mut ret_out = out.clone();
// Convert function name.
let name = if name.to_string() == "new" {
// Custom format for new functions
ret_out = "".to_string();
out = "new PKG_NAME::".to_string() + &out;
self.base.clone().expect("Cannot convert `Self` return type without provided base name.
Try: `#[swiggen(Foo)]` in macro").to_string()
} else {
name.to_string()
};
// Get the mangled name exported by Rust
let ext_name = swig_fn(&self.fn_def.ident, "ffi");
// The following code generates the function definitions and the header
// Code needed for SWIG to generate bindings.
if self.base.is_none() {
swigged.push_str(&format!("\
{ret_out} {name}({args}) {{
return ({out})(ffi::{ext_name}({caller}));
}}"
, name=name, ext_name=ext_name, out=out, ret_out=ret_out, args=args, caller=caller));
}
if let Some(base) = self.base {
// Note the %extend is used by SWIG to make this a class method for
// `base`.
swigged_h.push_str(&format!("
%extend {base_name} {{
{ret_out} {name}({args}) {{
return ({out})(ffi::{ext_name}({caller}));
}}
}};\n"
,name=name, ext_name=ext_name, base_name=base, ret_out=ret_out, out=out, args=args, caller=caller));
} else {
swigged_h.push_str(&format!("\
{out} {name}({args});"
, name=name, out=out, args=args));
}
swigged.push_str(&SwigTag::CodeEnd.to_str());
swigged_h.push_str(&SwigTag::HdrEnd.to_str());
swigged.push_str(&swigged_h);
swigged
}
}
/// Generate extern and SWIG code for a `#[derive(Swig)]` annotated item.
pub fn impl_extern_it(ast: &syn::DeriveInput) -> TokenStream {
let comment = ast.to_swig();
let comment = format!("#[doc=\"{}\"] #[allow(non_camel_case_types)] struct {}{};", comment, SwigTag::SwigInject, ast.ident);
let doc_comment: syn::ItemStruct = syn::parse_str(&comment).expect("failed to generate SWIG code correctly");
let mut tokens: TokenStream = doc_comment.into_token_stream();
tokens.append_all(ast.as_extern().into_iter());
tokens
}
/// Generate extern and SWIG code for a `#[swiggen]` annotated method.
pub fn impl_extern_fn(base_name: &Option<syn::Ident>, ast: &syn::ItemFn) -> TokenStream {
let ifn = InternalFn {
base: base_name,
fn_def: ast,
};
let tok = ifn.as_extern();
let comment = ifn.to_swig();
let hidden = swig_fn(&ast.ident, "hidden_ffi");
quote! {
#[allow(non_snake_case)]
#[doc=#comment]
fn #hidden(){}
#tok
}
}
/// Write the swig code (injected via doc comments) into `swig.i`.
/// This parses expanded Rust code, and writes the SWIG code to a file.
pub fn gen_swig(pkg_name: &str, src: &str) {
let mut tmp_file = File::create("swig.i").unwrap();
tmp_file.write_all(format!("\
%module {name}
#define PKG_NAME {name}
%include <std_vector.i>
%include <stdint.i>
%include <std_string.i>
%typemap(newfree) char * \"free_string($1);\";
%{{
namespace ffi {{
#include \"bindings.h\"
}}
using namespace ffi;
namespace {name} {{
", name=pkg_name).as_bytes()).unwrap();
let syntax = syn::parse_file(&src).expect("Unable to parse file");
trace!("Syntax: {:#?}", syntax);
let mut hdr = String::new();
// SWIG code is inside doc comments:
// #[doc = "<swig code here>"]
// struct __SWIG_INJECT_Foo;
//
// So we extract this out.
syntax.items.iter().flat_map(|i| {
// Extract out all of the attributes which are attached to structs/functions
// starting with "__SWIG_INJECT"
match i {
syn::Item::Impl(ii) => {
ii.items.iter().fold(Vec::new(), |mut acc, ref ii| {
match ii {
syn::ImplItem::Method(iim) => {
debug!("{:#?}", iim);
if iim.sig.ident.to_string().starts_with(SwigTag::SwigInject.to_str()) {
acc.extend_from_slice(&iim.attrs[..]);
}
acc
},
_ => Vec::new(),
}
})
},
syn::Item::Struct(syn::ItemStruct { attrs, ident, .. }) |
syn::Item::Fn(syn::ItemFn { attrs, ident, ..}) => {
if ident.to_string().starts_with(SwigTag::SwigInject.to_str()) {
debug!("{:#?}", attrs);
attrs.clone()
} else {
Vec::new()
}
},
_ => Vec::new()
}
}).for_each(|ref attr| {
match attr.interpret_meta() {
Some(syn::Meta::NameValue(ref mnv)) if &mnv.ident.to_string() == "doc" => {
// Extract out the doc comment for these attributes
if let syn::Lit::Str(ref ls) = mnv.lit {
let mut swig_class = ls.value().replace("\\n", "\n");
let prefix_offset = swig_class.find(SwigTag::CodeStart.to_str()).expect("no code prefix") + SwigTag::CodeStart.len();
let suffix_offset = swig_class.find(SwigTag::CodeEnd.to_str()).expect("no code suffix");
let final_class = &swig_class[prefix_offset..suffix_offset];
let prefix_offset = swig_class.find(SwigTag::HdrStart.to_str()).expect("no header prefix") + SwigTag::HdrStart.len();
let suffix_offset = swig_class.find(SwigTag::HdrEnd.to_str()).expect("no header suffix");
let final_hdr = &swig_class[prefix_offset..suffix_offset];
tmp_file.write_all(&final_class.replace("\\n", "\n").as_bytes()).unwrap();
hdr += &final_hdr.replace("\\n", "\n");
debug!("{}", final_hdr);
debug!("{}", final_class);
}
},
_ => ()
}
});
tmp_file.write_all(format!("\
}}
%}}
namespace {name} {{
{header}
}}
%ignore {inject};
%include \"bindings.h\";
", name=pkg_name, header=hdr, inject=SwigTag::SwigInject).as_bytes()).unwrap();
}
/// Extract out any `derive(Foo)` attributes.
fn get_derives(attrs: &[syn::Attribute]) -> Vec<String> {
attrs.iter().filter_map(|a| a.interpret_meta())
.filter_map(|a| {
if let syn::Meta::List(ml) = a {
Some(ml)
} else {
None
}
}).filter(|ml| ml.ident.to_string() == "swig_derive")
.flat_map(|ml| ml.nested)
.filter_map(|nm| {
if let syn::NestedMeta::Meta(m) = nm {
if let syn::Meta::Word(w) = m {
Some(w.to_string())
} else {
None
}
} else {
None
}
}).collect()
}
/// Parse a Rust file to extract any extern "C" functions or
/// `#[swiggen]`-annotated methods and move these out of the impl block.
pub fn | (ast: &syn::ItemImpl) -> TokenStream {
let mut tokens = TokenStream::new();
tokens.append_all(ast.items.iter().filter_map(|item| {
match item {
syn::ImplItem::Method(iim) => {
if iim.sig.abi.is_c(){
Some(item.into_token_stream())
} else {
let mut ret = None;
for attr in iim.attrs.iter().filter_map(|a| a.interpret_meta()) {
match attr {
syn::Meta::List(ml) => if ml.ident == syn::Ident::new("swiggen", Span::call_site()) {
if let Some(v) = ml.nested.first().map(|p| p.into_value()) {
match v {
syn::NestedMeta::Meta(m) => {
let base_name = Some(m.name());
ret = Some(impl_extern_fn(&base_name, &iim_to_itemfn(iim.clone())));
},
_ => {}
}
}
},
_ => {}
}
}
ret
}
},
_ => None,
}
}));
quote!{
#ast
#tokens
}
}
#[derive(Debug)]
pub struct Args(pub Option<syn::Ident>);
// Extract an `Option<Ident>` from `(T)` or `""`.
impl syn::synom::Synom for Args {
named!(parse -> Self, map!(option!(map!(
parens!(syn!(syn::Ident)),
|(_parens, id)| id
)), |o| Args(o)));
}
fn iim_to_itemfn(iim: syn::ImplItemMethod) -> syn::ItemFn {
syn::ItemFn {
attrs: iim.attrs,
vis: iim.vis,
constness: iim.sig.constness,
unsafety: iim.sig.unsafety,
abi: iim.sig.abi,
ident: iim.sig.ident,
decl: Box::new(iim.sig.decl),
block: Box::new(iim.block),
}
}
| split_out_externs | identifier_name |
park.rs | use std::{
cell::Cell,
sync::atomic::{AtomicBool, Ordering},
thread::{self, Thread},
};
use conquer_util::BackOff;
use crate::{
cell::{Block, Unblock},
state::{
AtomicOnceState, BlockedState,
OnceState::{Ready, Uninit, WouldBlock},
},
POISON_PANIC_MSG,
};
use self::internal::ParkThread;
#[cfg(any(test, feature = "std"))]
/// A type for lazy initialization of e.g. global static variables, which
/// provides the same functionality as the `lazy_static!` macro.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the API of the generic
/// [`Lazy`](crate::doc::Lazy) type.
///
/// # Examples
///
/// ```
/// use std::sync::Mutex;
///
/// # #[cfg(feature = "std")]
/// use conquer_once::Lazy;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Lazy;
///
/// static MUTEX: Lazy<Mutex<Vec<i32>>> = Lazy::new(Mutex::default);
///
/// let mut lock = MUTEX.lock().unwrap();
///
/// lock.push(1);
/// lock.push(2);
/// lock.push(3);
///
/// assert_eq!(lock.as_slice(), &[1, 2, 3]);
/// ```
///
/// The associated [`new`](crate::lazy::Lazy::new) function can be used with any
/// function or closure that implements `Fn() -> T`.
///
/// ```
/// use std::collections::HashMap;
///
/// # #[cfg(feature = "std")]
/// use conquer_once::Lazy;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Lazy;
///
/// static CAPITALS: Lazy<HashMap<&str, &str>> = Lazy::new(|| {
/// let mut map = HashMap::new();
/// map.insert("Norway", "Oslo");
/// map.insert("Belgium", "Brussels");
/// map.insert("Latvia", "Riga");
/// map
/// });
///
/// assert_eq!(CAPITALS.get(&"Norway"), Some(&"Oslo"));
/// assert_eq!(CAPITALS.get(&"Belgium"), Some(&"Brussels"));
/// assert_eq!(CAPITALS.get(&"Latvia"), Some(&"Riga"));
/// ```
pub type Lazy<T, F = fn() -> T> = crate::lazy::Lazy<T, ParkThread, F>;
#[cfg(any(test, feature = "std"))]
/// An interior mutability cell type which allows synchronized one-time
/// initialization and read-only access exclusively after initialization.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the generic
/// [`OnceCell`](crate::doc::OnceCell) type.
///
/// # Examples
///
/// ```
/// # #[cfg(feature = "std")]
/// use conquer_once::OnceCell;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::OnceCell;
///
/// #[derive(Copy, Clone)]
/// struct Configuration {
/// mode: i32,
/// threshold: u64,
/// msg: &'static str,
/// }
///
/// static CONFIG: OnceCell<Configuration> = OnceCell::uninit();
///
/// // producer thread
/// CONFIG.init_once(|| Configuration {
/// mode: 2,
/// threshold: 128,
/// msg: "..."
/// });
///
/// // consumer thread
/// let res = CONFIG.get().copied();
/// if let Some(config) = res {
/// assert_eq!(config.mode, 2);
/// assert_eq!(config.threshold, 128);
/// }
/// ```
pub type OnceCell<T> = crate::cell::OnceCell<T, ParkThread>;
#[cfg(any(test, feature = "std"))]
/// A synchronization primitive which can be used to run a one-time global
/// initialization.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the generic
/// [`OnceCell`](crate::doc::OnceCell) type.
/// This is a specialization with `T = ()`.
///
/// # Examples
///
/// ```
/// # #[cfg(feature = "std")]
/// use conquer_once::Once;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Once;
///
/// static mut GLOBAL: usize = 0;
/// static INIT: Once = Once::uninit();
///
/// fn get_global() -> usize {
/// // SAFETY: this is safe because the `Once` ensures the `static mut` is
/// // assigned by only one thread and without data races.
/// unsafe {
/// INIT.init_once(|| {
/// GLOBAL = expensive_computation();
/// });
/// # assert_eq!(GLOBAL, 1);
/// GLOBAL
/// }
/// }
///
/// fn expensive_computation() -> usize {
/// // ...
/// # 1
/// }
/// ```
pub type Once = OnceCell<()>;
mod internal {
/// Blocking strategy using low-level and OS-reliant parking and un-parking
/// mechanisms.
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
pub struct ParkThread;
}
impl ParkThread {
#[inline]
pub(crate) fn try_block_spinning(
state: &AtomicOnceState,
back_off: &BackOff,
) -> Result<(), BlockedState> {
loop {
// (wait:1) this acquire load syncs-with the release swaps (guard:2)
// and the acq-rel CAS (wait:2)
match state.load(Ordering::Acquire).expect(POISON_PANIC_MSG) {
Ready => return Ok(()),
WouldBlock(blocked) if back_off.advise_yield() => {
back_off.reset();
return Err(blocked);
}
_ => {}
}
back_off.spin();
}
}
}
impl Unblock for ParkThread {
/// Unblocks all blocked waiting threads.
#[inline]
unsafe fn on_unblock(state: BlockedState) {
let mut curr = state.as_ptr() as *const StackWaiter;
while !curr.is_null() {
let thread = {
// SAFETY: no mutable references to a stack waiter can exist
// and the waiter struct is ensured to live while its thread is
// parked, so the pointer can be safely dereferenced
#[allow(unused_unsafe)]
let waiter = unsafe { &*curr };
curr = waiter.next.get();
// there can be now data race when mutating the thread-cell as only the unblocking
// thread will access it, the stack waiter can dropped as soon as the following
// store becomes visible, so the thread MUST be taken out first
let thread = waiter.thread.take().unwrap();
// (ready:2) this release store syncs-with the acquire load (ready:1)
waiter.ready.store(true, Ordering::Release);
thread
};
thread.unpark();
}
}
}
unsafe impl Block for ParkThread {
/// Blocks (parks) the current thread until it is woken up by the thread
/// with permission to initialize the `OnceCell`.
#[inline]
fn | (state: &AtomicOnceState) {
// spin a little before parking the thread in case the state is
// quickly unlocked again
let back_off = BackOff::new();
let blocked = match Self::try_block_spinning(state, &back_off) {
Ok(_) => return,
Err(blocked) => blocked,
};
// create a linked list node on the current thread's stack, which is
// guaranteed to stay alive while the thread is parked.
let waiter = StackWaiter {
ready: AtomicBool::new(false),
thread: Cell::new(Some(thread::current())),
next: Cell::new(blocked.as_ptr() as *const StackWaiter),
};
let mut curr = blocked;
let head = BlockedState::from(&waiter as *const _);
// SAFETY: `head` is a valid pointer to a `StackWaiter` that will live
// for the duration of this function, which in turn will only return
// when no other thread can still observe any pointer to it
// (wait:2) this acq-rel CAS syncs-with itself and the acq load (wait:1)
while let Err(err) = unsafe { state.try_enqueue_waiter(curr, head, Ordering::AcqRel) } {
match err {
// another parked thread succeeded in placing itself at the queue's front
WouldBlock(queue) => {
// the waiter hasn't been shared yet, so it's still safe to
// mutate the next pointer
curr = queue;
waiter.next.set(queue.as_ptr() as *const StackWaiter);
back_off.spin();
}
// acquire-release is required here to enforce acquire ordering in the failure case,
// which guarantees that any (non-atomic) stores to the cell's inner state preceding
// (guard:2) have become visible, if the function returns;
// (alternatively an explicit acquire fence could be placed into this path)
Ready => return,
Uninit => unreachable!("cell state can not become `UNINIT again`"),
}
}
// park the thread until it is woken up by the thread that first set the state to blocked.
// the loop guards against spurious wake ups
// (ready:1) this acquire load syncs-with the release store (ready:2)
while !waiter.ready.load(Ordering::Acquire) {
thread::park();
}
// SAFETY: propagates poisoning as required by the trait
// (wait:3) this acquire load syncs-with the acq-rel swap (guard:2)
assert_eq!(state.load(Ordering::Acquire).expect(POISON_PANIC_MSG), Ready);
}
}
/// A linked list node that lives on the stack of a parked thread.
#[repr(align(4))]
pub(crate) struct StackWaiter {
/// The flag marking the waiter as either blocked or ready to proceed.
///
/// This is read by the owning thread and is set by the thread that gets to
/// run the initialization closure and responsible for unparking all blocked
/// threads, which may be either the same or any other thread.
ready: AtomicBool,
/// The handle for the parked thread that is used to unpark it, once the
/// initialization is complete.
///
/// This field is in fact mutated by a thread that is potentially not the
/// same as the owning thread, but exclusively in the case where the
/// mutating thread has exclusive access to this field.
thread: Cell<Option<Thread>>,
/// The pointer to the next blocked thread.
///
/// This field is mutated exclusively by **either** the owning thread
/// **before** the waiter becomes visible to other threads or by the thread
/// responsible for unparking all waiting threads.
next: Cell<*const StackWaiter>,
}
#[cfg(test)]
mod tests {
generate_tests_non_blocking!();
generate_tests!();
}
| block | identifier_name |
park.rs | use std::{
cell::Cell,
sync::atomic::{AtomicBool, Ordering},
thread::{self, Thread},
};
use conquer_util::BackOff;
use crate::{
cell::{Block, Unblock},
state::{
AtomicOnceState, BlockedState,
OnceState::{Ready, Uninit, WouldBlock},
},
POISON_PANIC_MSG,
};
use self::internal::ParkThread;
#[cfg(any(test, feature = "std"))]
/// A type for lazy initialization of e.g. global static variables, which
/// provides the same functionality as the `lazy_static!` macro.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the API of the generic
/// [`Lazy`](crate::doc::Lazy) type.
///
/// # Examples
///
/// ```
/// use std::sync::Mutex;
///
/// # #[cfg(feature = "std")]
/// use conquer_once::Lazy;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Lazy;
///
/// static MUTEX: Lazy<Mutex<Vec<i32>>> = Lazy::new(Mutex::default);
///
/// let mut lock = MUTEX.lock().unwrap();
///
/// lock.push(1);
/// lock.push(2);
/// lock.push(3);
///
/// assert_eq!(lock.as_slice(), &[1, 2, 3]);
/// ```
///
/// The associated [`new`](crate::lazy::Lazy::new) function can be used with any
/// function or closure that implements `Fn() -> T`.
///
/// ```
/// use std::collections::HashMap;
///
/// # #[cfg(feature = "std")]
/// use conquer_once::Lazy;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Lazy;
///
/// static CAPITALS: Lazy<HashMap<&str, &str>> = Lazy::new(|| {
/// let mut map = HashMap::new();
/// map.insert("Norway", "Oslo");
/// map.insert("Belgium", "Brussels");
/// map.insert("Latvia", "Riga");
/// map
/// });
///
/// assert_eq!(CAPITALS.get(&"Norway"), Some(&"Oslo"));
/// assert_eq!(CAPITALS.get(&"Belgium"), Some(&"Brussels"));
/// assert_eq!(CAPITALS.get(&"Latvia"), Some(&"Riga"));
/// ```
pub type Lazy<T, F = fn() -> T> = crate::lazy::Lazy<T, ParkThread, F>;
#[cfg(any(test, feature = "std"))]
/// An interior mutability cell type which allows synchronized one-time
/// initialization and read-only access exclusively after initialization.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the generic
/// [`OnceCell`](crate::doc::OnceCell) type.
///
/// # Examples
///
/// ```
/// # #[cfg(feature = "std")]
/// use conquer_once::OnceCell;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::OnceCell;
///
/// #[derive(Copy, Clone)]
/// struct Configuration {
/// mode: i32,
/// threshold: u64,
/// msg: &'static str,
/// }
///
/// static CONFIG: OnceCell<Configuration> = OnceCell::uninit();
///
/// // producer thread
/// CONFIG.init_once(|| Configuration {
/// mode: 2,
/// threshold: 128,
/// msg: "..."
/// });
///
/// // consumer thread
/// let res = CONFIG.get().copied();
/// if let Some(config) = res {
/// assert_eq!(config.mode, 2);
/// assert_eq!(config.threshold, 128);
/// }
/// ```
pub type OnceCell<T> = crate::cell::OnceCell<T, ParkThread>;
#[cfg(any(test, feature = "std"))]
/// A synchronization primitive which can be used to run a one-time global
/// initialization.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the generic
/// [`OnceCell`](crate::doc::OnceCell) type.
/// This is a specialization with `T = ()`.
///
/// # Examples
///
/// ```
/// # #[cfg(feature = "std")]
/// use conquer_once::Once;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Once;
///
/// static mut GLOBAL: usize = 0;
/// static INIT: Once = Once::uninit();
///
/// fn get_global() -> usize {
/// // SAFETY: this is safe because the `Once` ensures the `static mut` is
/// // assigned by only one thread and without data races.
/// unsafe {
/// INIT.init_once(|| {
/// GLOBAL = expensive_computation();
/// });
/// # assert_eq!(GLOBAL, 1);
/// GLOBAL
/// }
/// }
///
/// fn expensive_computation() -> usize {
/// // ...
/// # 1
/// }
/// ```
pub type Once = OnceCell<()>;
mod internal {
/// Blocking strategy using low-level and OS-reliant parking and un-parking
/// mechanisms.
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
pub struct ParkThread;
}
impl ParkThread {
#[inline]
pub(crate) fn try_block_spinning(
state: &AtomicOnceState,
back_off: &BackOff,
) -> Result<(), BlockedState> {
loop {
// (wait:1) this acquire load syncs-with the release swaps (guard:2)
// and the acq-rel CAS (wait:2)
match state.load(Ordering::Acquire).expect(POISON_PANIC_MSG) {
Ready => return Ok(()),
WouldBlock(blocked) if back_off.advise_yield() => {
back_off.reset();
return Err(blocked);
}
_ => {}
}
back_off.spin();
}
}
}
impl Unblock for ParkThread {
/// Unblocks all blocked waiting threads.
#[inline]
unsafe fn on_unblock(state: BlockedState) {
let mut curr = state.as_ptr() as *const StackWaiter;
while !curr.is_null() {
let thread = {
// SAFETY: no mutable references to a stack waiter can exist
// and the waiter struct is ensured to live while its thread is
// parked, so the pointer can be safely dereferenced
#[allow(unused_unsafe)]
let waiter = unsafe { &*curr };
curr = waiter.next.get();
// there can be now data race when mutating the thread-cell as only the unblocking
// thread will access it, the stack waiter can dropped as soon as the following
// store becomes visible, so the thread MUST be taken out first
let thread = waiter.thread.take().unwrap();
// (ready:2) this release store syncs-with the acquire load (ready:1)
waiter.ready.store(true, Ordering::Release);
thread
};
thread.unpark();
}
}
}
unsafe impl Block for ParkThread {
/// Blocks (parks) the current thread until it is woken up by the thread
/// with permission to initialize the `OnceCell`.
#[inline]
fn block(state: &AtomicOnceState) |
}
/// A linked list node that lives on the stack of a parked thread.
#[repr(align(4))]
pub(crate) struct StackWaiter {
/// The flag marking the waiter as either blocked or ready to proceed.
///
/// This is read by the owning thread and is set by the thread that gets to
/// run the initialization closure and responsible for unparking all blocked
/// threads, which may be either the same or any other thread.
ready: AtomicBool,
/// The handle for the parked thread that is used to unpark it, once the
/// initialization is complete.
///
/// This field is in fact mutated by a thread that is potentially not the
/// same as the owning thread, but exclusively in the case where the
/// mutating thread has exclusive access to this field.
thread: Cell<Option<Thread>>,
/// The pointer to the next blocked thread.
///
/// This field is mutated exclusively by **either** the owning thread
/// **before** the waiter becomes visible to other threads or by the thread
/// responsible for unparking all waiting threads.
next: Cell<*const StackWaiter>,
}
#[cfg(test)]
mod tests {
generate_tests_non_blocking!();
generate_tests!();
}
| {
// spin a little before parking the thread in case the state is
// quickly unlocked again
let back_off = BackOff::new();
let blocked = match Self::try_block_spinning(state, &back_off) {
Ok(_) => return,
Err(blocked) => blocked,
};
// create a linked list node on the current thread's stack, which is
// guaranteed to stay alive while the thread is parked.
let waiter = StackWaiter {
ready: AtomicBool::new(false),
thread: Cell::new(Some(thread::current())),
next: Cell::new(blocked.as_ptr() as *const StackWaiter),
};
let mut curr = blocked;
let head = BlockedState::from(&waiter as *const _);
// SAFETY: `head` is a valid pointer to a `StackWaiter` that will live
// for the duration of this function, which in turn will only return
// when no other thread can still observe any pointer to it
// (wait:2) this acq-rel CAS syncs-with itself and the acq load (wait:1)
while let Err(err) = unsafe { state.try_enqueue_waiter(curr, head, Ordering::AcqRel) } {
match err {
// another parked thread succeeded in placing itself at the queue's front
WouldBlock(queue) => {
// the waiter hasn't been shared yet, so it's still safe to
// mutate the next pointer
curr = queue;
waiter.next.set(queue.as_ptr() as *const StackWaiter);
back_off.spin();
}
// acquire-release is required here to enforce acquire ordering in the failure case,
// which guarantees that any (non-atomic) stores to the cell's inner state preceding
// (guard:2) have become visible, if the function returns;
// (alternatively an explicit acquire fence could be placed into this path)
Ready => return,
Uninit => unreachable!("cell state can not become `UNINIT again`"),
}
}
// park the thread until it is woken up by the thread that first set the state to blocked.
// the loop guards against spurious wake ups
// (ready:1) this acquire load syncs-with the release store (ready:2)
while !waiter.ready.load(Ordering::Acquire) {
thread::park();
}
// SAFETY: propagates poisoning as required by the trait
// (wait:3) this acquire load syncs-with the acq-rel swap (guard:2)
assert_eq!(state.load(Ordering::Acquire).expect(POISON_PANIC_MSG), Ready);
} | identifier_body |
park.rs | use std::{
cell::Cell,
sync::atomic::{AtomicBool, Ordering},
thread::{self, Thread},
};
use conquer_util::BackOff;
use crate::{
cell::{Block, Unblock},
state::{
AtomicOnceState, BlockedState,
OnceState::{Ready, Uninit, WouldBlock},
},
POISON_PANIC_MSG,
};
use self::internal::ParkThread;
#[cfg(any(test, feature = "std"))]
/// A type for lazy initialization of e.g. global static variables, which
/// provides the same functionality as the `lazy_static!` macro.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the API of the generic
/// [`Lazy`](crate::doc::Lazy) type.
///
/// # Examples
///
/// ```
/// use std::sync::Mutex;
///
/// # #[cfg(feature = "std")]
/// use conquer_once::Lazy;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Lazy;
///
/// static MUTEX: Lazy<Mutex<Vec<i32>>> = Lazy::new(Mutex::default);
///
/// let mut lock = MUTEX.lock().unwrap();
///
/// lock.push(1);
/// lock.push(2);
/// lock.push(3);
///
/// assert_eq!(lock.as_slice(), &[1, 2, 3]);
/// ```
///
/// The associated [`new`](crate::lazy::Lazy::new) function can be used with any
/// function or closure that implements `Fn() -> T`.
///
/// ```
/// use std::collections::HashMap;
///
/// # #[cfg(feature = "std")]
/// use conquer_once::Lazy;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Lazy;
///
/// static CAPITALS: Lazy<HashMap<&str, &str>> = Lazy::new(|| {
/// let mut map = HashMap::new();
/// map.insert("Norway", "Oslo");
/// map.insert("Belgium", "Brussels");
/// map.insert("Latvia", "Riga");
/// map
/// });
///
/// assert_eq!(CAPITALS.get(&"Norway"), Some(&"Oslo"));
/// assert_eq!(CAPITALS.get(&"Belgium"), Some(&"Brussels"));
/// assert_eq!(CAPITALS.get(&"Latvia"), Some(&"Riga"));
/// ```
pub type Lazy<T, F = fn() -> T> = crate::lazy::Lazy<T, ParkThread, F>;
#[cfg(any(test, feature = "std"))]
/// An interior mutability cell type which allows synchronized one-time
/// initialization and read-only access exclusively after initialization.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the generic
/// [`OnceCell`](crate::doc::OnceCell) type.
///
/// # Examples
///
/// ```
/// # #[cfg(feature = "std")]
/// use conquer_once::OnceCell;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::OnceCell;
///
/// #[derive(Copy, Clone)]
/// struct Configuration {
/// mode: i32,
/// threshold: u64,
/// msg: &'static str,
/// }
///
/// static CONFIG: OnceCell<Configuration> = OnceCell::uninit();
///
/// // producer thread
/// CONFIG.init_once(|| Configuration {
/// mode: 2,
/// threshold: 128,
/// msg: "..."
/// });
///
/// // consumer thread
/// let res = CONFIG.get().copied();
/// if let Some(config) = res {
/// assert_eq!(config.mode, 2);
/// assert_eq!(config.threshold, 128);
/// }
/// ```
pub type OnceCell<T> = crate::cell::OnceCell<T, ParkThread>;
#[cfg(any(test, feature = "std"))]
/// A synchronization primitive which can be used to run a one-time global
/// initialization.
///
/// This type uses the blocking synchronization mechanism provided by the
/// underlying operating system.
///
/// For the API of this type alias, see the generic
/// [`OnceCell`](crate::doc::OnceCell) type.
/// This is a specialization with `T = ()`.
///
/// # Examples
///
/// ```
/// # #[cfg(feature = "std")]
/// use conquer_once::Once;
/// # #[cfg(not(feature = "std"))]
/// # use conquer_once::spin::Once;
///
/// static mut GLOBAL: usize = 0;
/// static INIT: Once = Once::uninit();
///
/// fn get_global() -> usize {
/// // SAFETY: this is safe because the `Once` ensures the `static mut` is
/// // assigned by only one thread and without data races.
/// unsafe {
/// INIT.init_once(|| {
/// GLOBAL = expensive_computation();
/// });
/// # assert_eq!(GLOBAL, 1);
/// GLOBAL
/// }
/// }
///
/// fn expensive_computation() -> usize {
/// // ...
/// # 1
/// }
/// ```
pub type Once = OnceCell<()>;
mod internal {
/// Blocking strategy using low-level and OS-reliant parking and un-parking
/// mechanisms.
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
pub struct ParkThread;
}
impl ParkThread {
#[inline]
pub(crate) fn try_block_spinning(
state: &AtomicOnceState,
back_off: &BackOff,
) -> Result<(), BlockedState> {
loop {
// (wait:1) this acquire load syncs-with the release swaps (guard:2)
// and the acq-rel CAS (wait:2)
match state.load(Ordering::Acquire).expect(POISON_PANIC_MSG) {
Ready => return Ok(()),
WouldBlock(blocked) if back_off.advise_yield() => {
back_off.reset();
return Err(blocked);
}
_ => {}
}
| }
}
}
impl Unblock for ParkThread {
/// Unblocks all blocked waiting threads.
#[inline]
unsafe fn on_unblock(state: BlockedState) {
let mut curr = state.as_ptr() as *const StackWaiter;
while !curr.is_null() {
let thread = {
// SAFETY: no mutable references to a stack waiter can exist
// and the waiter struct is ensured to live while its thread is
// parked, so the pointer can be safely dereferenced
#[allow(unused_unsafe)]
let waiter = unsafe { &*curr };
curr = waiter.next.get();
// there can be now data race when mutating the thread-cell as only the unblocking
// thread will access it, the stack waiter can dropped as soon as the following
// store becomes visible, so the thread MUST be taken out first
let thread = waiter.thread.take().unwrap();
// (ready:2) this release store syncs-with the acquire load (ready:1)
waiter.ready.store(true, Ordering::Release);
thread
};
thread.unpark();
}
}
}
unsafe impl Block for ParkThread {
/// Blocks (parks) the current thread until it is woken up by the thread
/// with permission to initialize the `OnceCell`.
#[inline]
fn block(state: &AtomicOnceState) {
// spin a little before parking the thread in case the state is
// quickly unlocked again
let back_off = BackOff::new();
let blocked = match Self::try_block_spinning(state, &back_off) {
Ok(_) => return,
Err(blocked) => blocked,
};
// create a linked list node on the current thread's stack, which is
// guaranteed to stay alive while the thread is parked.
let waiter = StackWaiter {
ready: AtomicBool::new(false),
thread: Cell::new(Some(thread::current())),
next: Cell::new(blocked.as_ptr() as *const StackWaiter),
};
let mut curr = blocked;
let head = BlockedState::from(&waiter as *const _);
// SAFETY: `head` is a valid pointer to a `StackWaiter` that will live
// for the duration of this function, which in turn will only return
// when no other thread can still observe any pointer to it
// (wait:2) this acq-rel CAS syncs-with itself and the acq load (wait:1)
while let Err(err) = unsafe { state.try_enqueue_waiter(curr, head, Ordering::AcqRel) } {
match err {
// another parked thread succeeded in placing itself at the queue's front
WouldBlock(queue) => {
// the waiter hasn't been shared yet, so it's still safe to
// mutate the next pointer
curr = queue;
waiter.next.set(queue.as_ptr() as *const StackWaiter);
back_off.spin();
}
// acquire-release is required here to enforce acquire ordering in the failure case,
// which guarantees that any (non-atomic) stores to the cell's inner state preceding
// (guard:2) have become visible, if the function returns;
// (alternatively an explicit acquire fence could be placed into this path)
Ready => return,
Uninit => unreachable!("cell state can not become `UNINIT again`"),
}
}
// park the thread until it is woken up by the thread that first set the state to blocked.
// the loop guards against spurious wake ups
// (ready:1) this acquire load syncs-with the release store (ready:2)
while !waiter.ready.load(Ordering::Acquire) {
thread::park();
}
// SAFETY: propagates poisoning as required by the trait
// (wait:3) this acquire load syncs-with the acq-rel swap (guard:2)
assert_eq!(state.load(Ordering::Acquire).expect(POISON_PANIC_MSG), Ready);
}
}
/// A linked list node that lives on the stack of a parked thread.
#[repr(align(4))]
pub(crate) struct StackWaiter {
/// The flag marking the waiter as either blocked or ready to proceed.
///
/// This is read by the owning thread and is set by the thread that gets to
/// run the initialization closure and responsible for unparking all blocked
/// threads, which may be either the same or any other thread.
ready: AtomicBool,
/// The handle for the parked thread that is used to unpark it, once the
/// initialization is complete.
///
/// This field is in fact mutated by a thread that is potentially not the
/// same as the owning thread, but exclusively in the case where the
/// mutating thread has exclusive access to this field.
thread: Cell<Option<Thread>>,
/// The pointer to the next blocked thread.
///
/// This field is mutated exclusively by **either** the owning thread
/// **before** the waiter becomes visible to other threads or by the thread
/// responsible for unparking all waiting threads.
next: Cell<*const StackWaiter>,
}
#[cfg(test)]
mod tests {
generate_tests_non_blocking!();
generate_tests!();
} | back_off.spin(); | random_line_split |
zapis.py | # -*- coding: utf-8 -*-
"""
Modul do zapisu piosenki (wczytywanie ustawien (defs.txt), tworzenie .wav,
"zglasnianie utworu")
"""
print("Laduje modul o nazwie: "+__name__)
import numpy as np
def wczytywanie_ustawien(plik_konfiguracyjny = "defs.txt"):
"""
wczytywanie pliku z ustawieniami (pliku defs.txt) do slownika
arg:
str: plik_konfiguracyjny - nazwa pliku konfiguracyjnego z podanymi
wartosciami parametrow (tempo itd.)
wyjscie:
dict: parametry - zapisane nazwy i wartosci uzywanych parametrow
"""
import re
import numpy as np
# wczytuje zawartosc pliku (bez pierwszej i ostatniej linijki, jeden wiersz
# wyjsciowej macierzy, zawiera nazwe parametru i jego wartosc, jako
# oddzielne elementy, zapisane jako stringi)
ustawienia = np.genfromtxt(plik_konfiguracyjny, dtype = str, \
skip_header=1, skip_footer=1, delimiter=":")
# tworze slownik, ktory bedzie przechowywal wartosci
parametry = {}
# pozbywam się "" z key
# jesli mamy 1 parametr (1 linijka w pliku, to ustawienia to zmienna o
# shape = (2,), wiec odwoluje sie bezposrednio do zmiennej ustawienia
if ustawienia.shape == (2,):
parametry[re.sub('"','',ustawienia[0])] = ustawienia[1]
# jak mamy wiecej parametrow odwoluje sie do kolejnych linijek macierzy
# ustawienia
else:
for l in ustawienia:
parametry[re.sub('"','',l[0])] = l[1]
# zamieniamy napisy na odpowiednie wartosci - kontroluje te parametry, wiec
# robie to recznie
try:
parametry['tryb'] = parametry['tryb'].strip() #tryb
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
print("Podaj tryb odczytu!")
try:
parametry['bpm'] = int(parametry['bpm']) # tempo
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['freq'] = int(parametry['freq']) # frekwencja wyjsciowego wav
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['loud'] = float(parametry['loud'] ) # glosnosc
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
# lista wag dla sampli
parametry['wages'] = [float(s) for s in parametry['wages'].split(",")]
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
return parametry
#b = wczytywanie_ustawien("defs.txt")
#zglasnianie utworu
def z | utwor, procent = 0):
"""
zmienia glosnosc utworu (jego amplitudy)
arg:
numpy.ndarray (numpy.int16): utwor - dzwiek, ktory ma byc zglosniony
lub zciszony
float: procent - liczba obrazujaca zmiane glosnosci utworu, osiaga
wartosci od -1 do 1, dla 0 brak zmian, dla 1 - "100%
glosniej", dla -1 "100% ciszej"
wyjscie:
numpy.ndarray (numpy.int16): glosniejszy -sciszony lub zglosniony utwor
"""
if(-1 <= procent <= 1):
#ile razy mamy pomnozyc amplitude naszego dzwieku
mnoznik = 0
if( procent < 0 ):
mnoznik = 1 + procent
else:
# obliczamy najwyzsza amplitude w danym utworze i ona bedzie
# wyznaczac jak bardzo mozemy podglosnic
maks_ampli = 0
maks_ampli = max(abs(utwor))
mnoznik = 32767/maks_ampli # maksymalny mnoznik
# mnoznik minimalnie moze osiagnac wartosc 1, to co powyzej
# (mnoznik-1) mnozymy o procent zglosnienia
# i dodajemy do podstawy (czyli 1)
mnoznik = 1 + (mnoznik - 1)*procent
glosniej = mnoznik * utwor
#glosniej = np.array(glosniej, dtype=np.int16)
glosniej = glosniej.astype(np.int16)
return glosniej
else:
print("Podaj procent z zakresu -1 do 1")
#wierszyk1 = zmiana_glosnosci(wierszyk, b['loud'])
#wierszyk1
def tworzenie_piosenki(macierz_piosenki, czy_pelna = True, bpm = 120, \
freq = 44100, wages = None, loud = 0):
"""
glowna funkcja generujaca cala piosenke
arg:
numpy.ndarray (str: U2): macierz_piosenki - macierz zawierajaca
definicje kolejnych cwiercnut (co ma byc grane
w danej cwiercnucie)
bool: czy_pelna - zmienna sprawdzajaca czy macierz_piosenki jest
zapisana (nie jest, gdy tracki mialy nieodpowiednia
liczbe wierszy lub kolumn)
int: bpm - tempo piosenki w jednostce bpm
int: freq - ilosc probek w jednej sekundzie
list (float): wages - wagi kolejnych sampli (jakie znaczenie ma miec 1
probka, 2 etc.)
float: loud - procent glosnosci, 0 - tak jak oryginalne probki, 1 - na
maxa, -1 - sciszamy na maxa
wyjscie:
numpy.ndarray (numpy.int16): gotowy utwór
"""
# macierz piosenki byla pusta, piosenka nie zostala utworzona
if(czy_pelna == False):
print("Nie utworzono piosenki")
return None
else:
import numpy as np
import scipy.io.wavfile
t_cwiercnuty = 60 / bpm # czas trwania jednej cwiercnuty (zalezy od
#tempa)
ile_cwiercnut = macierz_piosenki.shape[0] # ilosc cwiercnut
kanaly = macierz_piosenki.shape[1] # ilosc uzywanych sampli
frekw = freq
czas_utworu = ile_cwiercnut*t_cwiercnuty
# ile elementow bedzie w nowym utworze
ilosc_probek = int(frekw*czas_utworu)
# bedziemy tylko raz wczytywac zawartosc sampleXY.wav, wiec potrzebuje
# unikalne numery sampli
rozne_sample = np.unique(macierz_piosenki) # bierze lacznie z "--"
# w slownikach zapiszemy parametry tych sampli
# slownik z wartosciami danego sampla (tj. macierze numpy-owe z
# amplitudami)
sample_co = {}
sample_frekw = {} # slownik z ich frekwencjami
sample_dl = {} # slownik z ich dlugosciami
#wczytujemy te sample
# w iteratorze bierzemy napisy "01" "02" "--" itd. stringi!!!
for ktory_sampel in rozne_sample:
if(ktory_sampel != '--'):
# tworzymy napis z nazwa pliku sampla, np. "sample01.wav"
plik = ''.join(['sample',ktory_sampel,'.wav'])
# wczytujemy zawartosc i frekwencje danego sampla do
# odpowiednio nazwanego elementu w slowniku sample_co i
# sample_frekw
sample_frekw[ktory_sampel], sample_co[ktory_sampel] = \
scipy.io.wavfile.read(plik)
# tworzymy mono z naszego sampla
sample_co[ktory_sampel] = np.mean(sample_co[ktory_sampel],\
axis=1)/32767
# normalizujemy te wartosci
sample_co[ktory_sampel] = np.int16(sample_co[ktory_sampel]/ \
max(np.abs(sample_co[ktory_sampel])) * 32767)
# zapisujemy dlugosc sampli, czyli ilosc probek
# ( = czas_trwania*frekwencja)
sample_dl[ktory_sampel] = sample_co[ktory_sampel].shape[0]
else: # to samo robimy dla "--" recznie ustawiamy
# robimy cisze, gdy --
sample_co[ktory_sampel] = np.zeros((1,), dtype=np.int16)
sample_frekw[ktory_sampel] = frekw # taka sama jak domyslna
sample_dl[ktory_sampel] = 0 # zakladamy czas 0 sekund
if wages is None:
wages = np.ones((1,kanaly))
else:
# zeby mialo wymiar (1,kanaly), a nie (kanaly,)
wages = np.array(wages).reshape(1,kanaly)
# definicja nowego utworu
T = np.linspace(0, czas_utworu, ilosc_probek)
for wiersz in range(0, ile_cwiercnut):
sample = [] # wczytamy sample z danej cwiecnuty
dlugosci = [] # tu zapiszemy ich dlugosci w tej cwiercnucie
for i in range(0, kanaly):
sampus = macierz_piosenki[wiersz,i]
sample.append(sample_co[sampus])
dlugosci.append(sample_dl[sampus])
# bierzemy najdluzszy sample i w calosci bedziemy go odtwarzac;
# reszte zatem tez w calosci odtworzymy, a gdy sie skoncza damy
# cisze (zera)
maksik = max(dlugosci)
# mamy tutaj macierz 4 na max dlugosc, przygotowana do zlaczenia
# potem tych dzwiekow w jeden
pusty = np.int16(np.zeros((len(sample), maksik)))
# dodajemy nasze dzwieki do tej pustej
for k in range(0, kanaly):
pusty[k][0:dlugosci[k]] = sample[k]
# mnozymy kolejne elementy wektora pusty (czyli sample) przez
# wagi i sumujemy
cwiercnuta = np.dot(wages, pusty)
#otrzymamy wymiar (1, x), a chcemy (x,), wiec bierzemy pierwszy
# element
cwiercnuta = cwiercnuta[0]
# poczatek biezacej cwiercnuty
poczatek_cwiercnuty = int(wiersz*t_cwiercnuty*frekw)
# jesli dodanie ostatnich cwiercnut bedzie wiazalo sie z
# przekroczeniem dlugosci tworzonego utworu, obcinamy ostatnie
# dzwieki, tak by zmiescic sie w tej dlugosci
if (poczatek_cwiercnuty + maksik) > ilosc_probek:
T[poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)]=\
cwiercnuta[0:len(T[poczatek_cwiercnuty:(poczatek_cwiercnuty +\
maksik)])]
else:
T[poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)] += \
cwiercnuta
T= np.array(T, dtype=np.int16)
#ustalamy glosnosc utworu
T = zmiana_glosnosci(T, loud)
return T
#pios, k = wczytywanie_sciezek(a)
#wierszyk = tworzenie_piosenki(pios, k, bpm = b['bpm'], freq = b['freq'], \
#wages = b['wages'])
#wierszyk = tworzenie_piosenki(pios, k, **b)
#wierszyk | miana_glosnosci( | identifier_name |
zapis.py | # -*- coding: utf-8 -*-
"""
Modul do zapisu piosenki (wczytywanie ustawien (defs.txt), tworzenie .wav,
"zglasnianie utworu")
"""
print("Laduje modul o nazwie: "+__name__)
import numpy as np
def wczytywanie_ustawien(plik_konfiguracyjny = "defs.txt"):
"""
wczytywanie pliku z ustawieniami (pliku defs.txt) do slownika
arg:
str: plik_konfiguracyjny - nazwa pliku konfiguracyjnego z podanymi
wartosciami parametrow (tempo itd.)
wyjscie:
dict: parametry - zapisane nazwy i wartosci uzywanych parametrow
"""
import re
import numpy as np
# wczytuje zawartosc pliku (bez pierwszej i ostatniej linijki, jeden wiersz
# wyjsciowej macierzy, zawiera nazwe parametru i jego wartosc, jako
# oddzielne elementy, zapisane jako stringi)
ustawienia = np.genfromtxt(plik_konfiguracyjny, dtype = str, \
skip_header=1, skip_footer=1, delimiter=":")
# tworze slownik, ktory bedzie przechowywal wartosci
parametry = {}
# pozbywam się "" z key
# jesli mamy 1 parametr (1 linijka w pliku, to ustawienia to zmienna o
# shape = (2,), wiec odwoluje sie bezposrednio do zmiennej ustawienia
if ustawienia.shape == (2,):
parametry[re.sub('"','',ustawienia[0])] = ustawienia[1]
# jak mamy wiecej parametrow odwoluje sie do kolejnych linijek macierzy
# ustawienia
else:
for l in ustawienia:
parametry[re.sub('"','',l[0])] = l[1]
# zamieniamy napisy na odpowiednie wartosci - kontroluje te parametry, wiec
# robie to recznie
try:
parametry['tryb'] = parametry['tryb'].strip() #tryb
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
print("Podaj tryb odczytu!")
try:
parametry['bpm'] = int(parametry['bpm']) # tempo
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['freq'] = int(parametry['freq']) # frekwencja wyjsciowego wav
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['loud'] = float(parametry['loud'] ) # glosnosc
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
# lista wag dla sampli
parametry['wages'] = [float(s) for s in parametry['wages'].split(",")]
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
return parametry
#b = wczytywanie_ustawien("defs.txt")
#zglasnianie utworu
def zmiana_glosnosci(utwor, procent = 0):
"""
zmienia glosnosc utworu (jego amplitudy)
arg:
numpy.ndarray (numpy.int16): utwor - dzwiek, ktory ma byc zglosniony
lub zciszony
float: procent - liczba obrazujaca zmiane glosnosci utworu, osiaga
wartosci od -1 do 1, dla 0 brak zmian, dla 1 - "100%
glosniej", dla -1 "100% ciszej"
wyjscie:
numpy.ndarray (numpy.int16): glosniejszy -sciszony lub zglosniony utwor
"""
if(-1 <= procent <= 1):
#ile razy mamy pomnozyc amplitude naszego dzwieku
mnoznik = 0
if( procent < 0 ):
mnoznik = 1 + procent
else:
# obliczamy najwyzsza amplitude w danym utworze i ona bedzie
# wyznaczac jak bardzo mozemy podglosnic
maks_ampli = 0
maks_ampli = max(abs(utwor))
mnoznik = 32767/maks_ampli # maksymalny mnoznik
# mnoznik minimalnie moze osiagnac wartosc 1, to co powyzej
# (mnoznik-1) mnozymy o procent zglosnienia
# i dodajemy do podstawy (czyli 1)
mnoznik = 1 + (mnoznik - 1)*procent
glosniej = mnoznik * utwor
#glosniej = np.array(glosniej, dtype=np.int16)
glosniej = glosniej.astype(np.int16)
return glosniej
else:
print("Podaj procent z zakresu -1 do 1")
#wierszyk1 = zmiana_glosnosci(wierszyk, b['loud'])
#wierszyk1
def tworzenie_piosenki(macierz_piosenki, czy_pelna = True, bpm = 120, \
freq = 44100, wages = None, loud = 0):
"""
glowna funkcja generujaca cala piosenke
arg:
numpy.ndarray (str: U2): macierz_piosenki - macierz zawierajaca
definicje kolejnych cwiercnut (co ma byc grane
w danej cwiercnucie)
bool: czy_pelna - zmienna sprawdzajaca czy macierz_piosenki jest
zapisana (nie jest, gdy tracki mialy nieodpowiednia
liczbe wierszy lub kolumn)
int: bpm - tempo piosenki w jednostce bpm
int: freq - ilosc probek w jednej sekundzie
list (float): wages - wagi kolejnych sampli (jakie znaczenie ma miec 1
probka, 2 etc.)
float: loud - procent glosnosci, 0 - tak jak oryginalne probki, 1 - na
maxa, -1 - sciszamy na maxa
wyjscie:
numpy.ndarray (numpy.int16): gotowy utwór
"""
# macierz piosenki byla pusta, piosenka nie zostala utworzona
if(czy_pelna == False):
print("Nie utworzono piosenki")
return None
else:
import numpy as np
import scipy.io.wavfile
t_cwiercnuty = 60 / bpm # czas trwania jednej cwiercnuty (zalezy od
#tempa)
ile_cwiercnut = macierz_piosenki.shape[0] # ilosc cwiercnut
kanaly = macierz_piosenki.shape[1] # ilosc uzywanych sampli
frekw = freq
czas_utworu = ile_cwiercnut*t_cwiercnuty
# ile elementow bedzie w nowym utworze
ilosc_probek = int(frekw*czas_utworu)
# bedziemy tylko raz wczytywac zawartosc sampleXY.wav, wiec potrzebuje
# unikalne numery sampli
rozne_sample = np.unique(macierz_piosenki) # bierze lacznie z "--"
# w slownikach zapiszemy parametry tych sampli
# slownik z wartosciami danego sampla (tj. macierze numpy-owe z
# amplitudami)
sample_co = {}
sample_frekw = {} # slownik z ich frekwencjami
sample_dl = {} # slownik z ich dlugosciami
#wczytujemy te sample
# w iteratorze bierzemy napisy "01" "02" "--" itd. stringi!!!
for ktory_sampel in rozne_sample:
if(ktory_sampel != '--'):
# tworzymy napis z nazwa pliku sampla, np. "sample01.wav"
plik = ''.join(['sample',ktory_sampel,'.wav'])
# wczytujemy zawartosc i frekwencje danego sampla do
# odpowiednio nazwanego elementu w slowniku sample_co i
# sample_frekw
sample_frekw[ktory_sampel], sample_co[ktory_sampel] = \
scipy.io.wavfile.read(plik)
# tworzymy mono z naszego sampla
sample_co[ktory_sampel] = np.mean(sample_co[ktory_sampel],\
axis=1)/32767
# normalizujemy te wartosci
sample_co[ktory_sampel] = np.int16(sample_co[ktory_sampel]/ \
max(np.abs(sample_co[ktory_sampel])) * 32767)
# zapisujemy dlugosc sampli, czyli ilosc probek
# ( = czas_trwania*frekwencja)
sample_dl[ktory_sampel] = sample_co[ktory_sampel].shape[0]
else: # to samo robimy dla "--" recznie ustawiamy
# robimy cisze, gdy --
sample_co[ktory_sampel] = np.zeros((1,), dtype=np.int16)
sample_frekw[ktory_sampel] = frekw # taka sama jak domyslna
sample_dl[ktory_sampel] = 0 # zakladamy czas 0 sekund
if wages is None:
wages = np.ones((1,kanaly))
else:
# zeby mialo wymiar (1,kanaly), a nie (kanaly,)
wages = np.array(wages).reshape(1,kanaly)
# definicja nowego utworu
T = np.linspace(0, czas_utworu, ilosc_probek)
for wiersz in range(0, ile_cwiercnut):
sample = [] # wczytamy sample z danej cwiecnuty
dlugosci = [] # tu zapiszemy ich dlugosci w tej cwiercnucie
for i in range(0, kanaly):
sampus = macierz_piosenki[wiersz,i]
sample.append(sample_co[sampus])
dlugosci.append(sample_dl[sampus])
# bierzemy najdluzszy sample i w calosci bedziemy go odtwarzac;
# reszte zatem tez w calosci odtworzymy, a gdy sie skoncza damy
# cisze (zera)
maksik = max(dlugosci)
# mamy tutaj macierz 4 na max dlugosc, przygotowana do zlaczenia
# potem tych dzwiekow w jeden
pusty = np.int16(np.zeros((len(sample), maksik)))
# dodajemy nasze dzwieki do tej pustej
for k in range(0, kanaly):
pusty[k][0:dlugosci[k]] = sample[k]
# mnozymy kolejne elementy wektora pusty (czyli sample) przez
# wagi i sumujemy
cwiercnuta = np.dot(wages, pusty)
#otrzymamy wymiar (1, x), a chcemy (x,), wiec bierzemy pierwszy
# element
cwiercnuta = cwiercnuta[0]
# poczatek biezacej cwiercnuty
poczatek_cwiercnuty = int(wiersz*t_cwiercnuty*frekw)
# jesli dodanie ostatnich cwiercnut bedzie wiazalo sie z
# przekroczeniem dlugosci tworzonego utworu, obcinamy ostatnie
# dzwieki, tak by zmiescic sie w tej dlugosci
if (poczatek_cwiercnuty + maksik) > ilosc_probek:
T[ |
else:
T[poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)] += \
cwiercnuta
T= np.array(T, dtype=np.int16)
#ustalamy glosnosc utworu
T = zmiana_glosnosci(T, loud)
return T
#pios, k = wczytywanie_sciezek(a)
#wierszyk = tworzenie_piosenki(pios, k, bpm = b['bpm'], freq = b['freq'], \
#wages = b['wages'])
#wierszyk = tworzenie_piosenki(pios, k, **b)
#wierszyk | poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)]=\
cwiercnuta[0:len(T[poczatek_cwiercnuty:(poczatek_cwiercnuty +\
maksik)])]
| conditional_block |
zapis.py | # -*- coding: utf-8 -*-
"""
Modul do zapisu piosenki (wczytywanie ustawien (defs.txt), tworzenie .wav,
"zglasnianie utworu")
"""
print("Laduje modul o nazwie: "+__name__)
import numpy as np
def wczytywanie_ustawien(plik_konfiguracyjny = "defs.txt"):
"""
wczytywanie pliku z ustawieniami (pliku defs.txt) do slownika
arg:
str: plik_konfiguracyjny - nazwa pliku konfiguracyjnego z podanymi
wartosciami parametrow (tempo itd.)
wyjscie:
dict: parametry - zapisane nazwy i wartosci uzywanych parametrow
"""
import re
import numpy as np
# wczytuje zawartosc pliku (bez pierwszej i ostatniej linijki, jeden wiersz
# wyjsciowej macierzy, zawiera nazwe parametru i jego wartosc, jako
# oddzielne elementy, zapisane jako stringi)
ustawienia = np.genfromtxt(plik_konfiguracyjny, dtype = str, \
skip_header=1, skip_footer=1, delimiter=":")
# tworze slownik, ktory bedzie przechowywal wartosci
parametry = {}
# pozbywam się "" z key
# jesli mamy 1 parametr (1 linijka w pliku, to ustawienia to zmienna o
# shape = (2,), wiec odwoluje sie bezposrednio do zmiennej ustawienia
if ustawienia.shape == (2,):
parametry[re.sub('"','',ustawienia[0])] = ustawienia[1]
# jak mamy wiecej parametrow odwoluje sie do kolejnych linijek macierzy
# ustawienia
else:
for l in ustawienia:
parametry[re.sub('"','',l[0])] = l[1]
# zamieniamy napisy na odpowiednie wartosci - kontroluje te parametry, wiec
# robie to recznie
try:
parametry['tryb'] = parametry['tryb'].strip() #tryb
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
print("Podaj tryb odczytu!")
try:
parametry['bpm'] = int(parametry['bpm']) # tempo
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['freq'] = int(parametry['freq']) # frekwencja wyjsciowego wav
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['loud'] = float(parametry['loud'] ) # glosnosc
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
# lista wag dla sampli
parametry['wages'] = [float(s) for s in parametry['wages'].split(",")]
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
return parametry
#b = wczytywanie_ustawien("defs.txt")
#zglasnianie utworu
def zmiana_glosnosci(utwor, procent = 0):
"""
zmienia glosnosc utworu (jego amplitudy)
arg:
numpy.ndarray (numpy.int16): utwor - dzwiek, ktory ma byc zglosniony
lub zciszony
float: procent - liczba obrazujaca zmiane glosnosci utworu, osiaga
wartosci od -1 do 1, dla 0 brak zmian, dla 1 - "100%
glosniej", dla -1 "100% ciszej"
wyjscie:
numpy.ndarray (numpy.int16): glosniejszy -sciszony lub zglosniony utwor
"""
if(-1 <= procent <= 1):
#ile razy mamy pomnozyc amplitude naszego dzwieku
mnoznik = 0
if( procent < 0 ):
mnoznik = 1 + procent
else:
# obliczamy najwyzsza amplitude w danym utworze i ona bedzie
# wyznaczac jak bardzo mozemy podglosnic
maks_ampli = 0
maks_ampli = max(abs(utwor))
mnoznik = 32767/maks_ampli # maksymalny mnoznik
# mnoznik minimalnie moze osiagnac wartosc 1, to co powyzej
# (mnoznik-1) mnozymy o procent zglosnienia
# i dodajemy do podstawy (czyli 1)
mnoznik = 1 + (mnoznik - 1)*procent
glosniej = mnoznik * utwor
#glosniej = np.array(glosniej, dtype=np.int16)
glosniej = glosniej.astype(np.int16)
return glosniej
else:
print("Podaj procent z zakresu -1 do 1")
#wierszyk1 = zmiana_glosnosci(wierszyk, b['loud'])
#wierszyk1
def tworzenie_piosenki(macierz_piosenki, czy_pelna = True, bpm = 120, \
freq = 44100, wages = None, loud = 0):
"""
glowna funkcja generujaca cala piosenke
arg:
numpy.ndarray (str: U2): macierz_piosenki - macierz zawierajaca
definicje kolejnych cwiercnut (co ma byc grane
w danej cwiercnucie)
bool: czy_pelna - zmienna sprawdzajaca czy macierz_piosenki jest
zapisana (nie jest, gdy tracki mialy nieodpowiednia
liczbe wierszy lub kolumn)
int: bpm - tempo piosenki w jednostce bpm
int: freq - ilosc probek w jednej sekundzie
list (float): wages - wagi kolejnych sampli (jakie znaczenie ma miec 1
probka, 2 etc.)
float: loud - procent glosnosci, 0 - tak jak oryginalne probki, 1 - na
maxa, -1 - sciszamy na maxa
wyjscie:
numpy.ndarray (numpy.int16): gotowy utwór
"""
# macierz piosenki byla pusta, piosenka nie zostala utworzona
if(czy_pelna == False):
print("Nie utworzono piosenki")
return None
else:
import numpy as np
import scipy.io.wavfile
t_cwiercnuty = 60 / bpm # czas trwania jednej cwiercnuty (zalezy od
#tempa)
ile_cwiercnut = macierz_piosenki.shape[0] # ilosc cwiercnut
kanaly = macierz_piosenki.shape[1] # ilosc uzywanych sampli
frekw = freq
czas_utworu = ile_cwiercnut*t_cwiercnuty
# ile elementow bedzie w nowym utworze
ilosc_probek = int(frekw*czas_utworu)
# bedziemy tylko raz wczytywac zawartosc sampleXY.wav, wiec potrzebuje
# unikalne numery sampli
rozne_sample = np.unique(macierz_piosenki) # bierze lacznie z "--"
# w slownikach zapiszemy parametry tych sampli
# slownik z wartosciami danego sampla (tj. macierze numpy-owe z
# amplitudami)
sample_co = {}
sample_frekw = {} # slownik z ich frekwencjami
sample_dl = {} # slownik z ich dlugosciami
#wczytujemy te sample
# w iteratorze bierzemy napisy "01" "02" "--" itd. stringi!!!
for ktory_sampel in rozne_sample:
if(ktory_sampel != '--'):
# tworzymy napis z nazwa pliku sampla, np. "sample01.wav"
plik = ''.join(['sample',ktory_sampel,'.wav'])
# wczytujemy zawartosc i frekwencje danego sampla do
# odpowiednio nazwanego elementu w slowniku sample_co i
# sample_frekw
sample_frekw[ktory_sampel], sample_co[ktory_sampel] = \
scipy.io.wavfile.read(plik)
# tworzymy mono z naszego sampla
sample_co[ktory_sampel] = np.mean(sample_co[ktory_sampel],\
axis=1)/32767
# normalizujemy te wartosci
sample_co[ktory_sampel] = np.int16(sample_co[ktory_sampel]/ \
max(np.abs(sample_co[ktory_sampel])) * 32767)
# zapisujemy dlugosc sampli, czyli ilosc probek
# ( = czas_trwania*frekwencja)
sample_dl[ktory_sampel] = sample_co[ktory_sampel].shape[0]
else: # to samo robimy dla "--" recznie ustawiamy
# robimy cisze, gdy --
sample_co[ktory_sampel] = np.zeros((1,), dtype=np.int16)
sample_frekw[ktory_sampel] = frekw # taka sama jak domyslna
sample_dl[ktory_sampel] = 0 # zakladamy czas 0 sekund
if wages is None:
wages = np.ones((1,kanaly))
else:
# zeby mialo wymiar (1,kanaly), a nie (kanaly,)
wages = np.array(wages).reshape(1,kanaly)
# definicja nowego utworu
T = np.linspace(0, czas_utworu, ilosc_probek)
for wiersz in range(0, ile_cwiercnut):
sample = [] # wczytamy sample z danej cwiecnuty
dlugosci = [] # tu zapiszemy ich dlugosci w tej cwiercnucie
for i in range(0, kanaly):
sampus = macierz_piosenki[wiersz,i]
sample.append(sample_co[sampus])
dlugosci.append(sample_dl[sampus])
# bierzemy najdluzszy sample i w calosci bedziemy go odtwarzac;
# reszte zatem tez w calosci odtworzymy, a gdy sie skoncza damy
# cisze (zera)
maksik = max(dlugosci)
# mamy tutaj macierz 4 na max dlugosc, przygotowana do zlaczenia
# potem tych dzwiekow w jeden
pusty = np.int16(np.zeros((len(sample), maksik)))
# dodajemy nasze dzwieki do tej pustej
for k in range(0, kanaly):
pusty[k][0:dlugosci[k]] = sample[k]
# mnozymy kolejne elementy wektora pusty (czyli sample) przez
# wagi i sumujemy
cwiercnuta = np.dot(wages, pusty)
#otrzymamy wymiar (1, x), a chcemy (x,), wiec bierzemy pierwszy
# element
cwiercnuta = cwiercnuta[0]
# poczatek biezacej cwiercnuty
poczatek_cwiercnuty = int(wiersz*t_cwiercnuty*frekw)
# jesli dodanie ostatnich cwiercnut bedzie wiazalo sie z
# przekroczeniem dlugosci tworzonego utworu, obcinamy ostatnie
# dzwieki, tak by zmiescic sie w tej dlugosci
if (poczatek_cwiercnuty + maksik) > ilosc_probek:
T[poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)]=\
cwiercnuta[0:len(T[poczatek_cwiercnuty:(poczatek_cwiercnuty +\
maksik)])]
else:
T[poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)] += \
cwiercnuta
T= np.array(T, dtype=np.int16)
#ustalamy glosnosc utworu
T = zmiana_glosnosci(T, loud)
return T
| #wages = b['wages'])
#wierszyk = tworzenie_piosenki(pios, k, **b)
#wierszyk | #pios, k = wczytywanie_sciezek(a)
#wierszyk = tworzenie_piosenki(pios, k, bpm = b['bpm'], freq = b['freq'], \
| random_line_split |
zapis.py | # -*- coding: utf-8 -*-
"""
Modul do zapisu piosenki (wczytywanie ustawien (defs.txt), tworzenie .wav,
"zglasnianie utworu")
"""
print("Laduje modul o nazwie: "+__name__)
import numpy as np
def wczytywanie_ustawien(plik_konfiguracyjny = "defs.txt"):
"""
wczytywanie pliku z ustawieniami (pliku defs.txt) do slownika
arg:
str: plik_konfiguracyjny - nazwa pliku konfiguracyjnego z podanymi
wartosciami parametrow (tempo itd.)
wyjscie:
dict: parametry - zapisane nazwy i wartosci uzywanych parametrow
"""
import re
import numpy as np
# wczytuje zawartosc pliku (bez pierwszej i ostatniej linijki, jeden wiersz
# wyjsciowej macierzy, zawiera nazwe parametru i jego wartosc, jako
# oddzielne elementy, zapisane jako stringi)
ustawienia = np.genfromtxt(plik_konfiguracyjny, dtype = str, \
skip_header=1, skip_footer=1, delimiter=":")
# tworze slownik, ktory bedzie przechowywal wartosci
parametry = {}
# pozbywam się "" z key
# jesli mamy 1 parametr (1 linijka w pliku, to ustawienia to zmienna o
# shape = (2,), wiec odwoluje sie bezposrednio do zmiennej ustawienia
if ustawienia.shape == (2,):
parametry[re.sub('"','',ustawienia[0])] = ustawienia[1]
# jak mamy wiecej parametrow odwoluje sie do kolejnych linijek macierzy
# ustawienia
else:
for l in ustawienia:
parametry[re.sub('"','',l[0])] = l[1]
# zamieniamy napisy na odpowiednie wartosci - kontroluje te parametry, wiec
# robie to recznie
try:
parametry['tryb'] = parametry['tryb'].strip() #tryb
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
print("Podaj tryb odczytu!")
try:
parametry['bpm'] = int(parametry['bpm']) # tempo
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['freq'] = int(parametry['freq']) # frekwencja wyjsciowego wav
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
parametry['loud'] = float(parametry['loud'] ) # glosnosc
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
try:
# lista wag dla sampli
parametry['wages'] = [float(s) for s in parametry['wages'].split(",")]
# jak nie podano danego parametru to idz dalej, nie wyrzucaj bledu
except KeyError:
pass
return parametry
#b = wczytywanie_ustawien("defs.txt")
#zglasnianie utworu
def zmiana_glosnosci(utwor, procent = 0):
"""
zmienia glosnosc utworu (jego amplitudy)
arg:
numpy.ndarray (numpy.int16): utwor - dzwiek, ktory ma byc zglosniony
lub zciszony
float: procent - liczba obrazujaca zmiane glosnosci utworu, osiaga
wartosci od -1 do 1, dla 0 brak zmian, dla 1 - "100%
glosniej", dla -1 "100% ciszej"
wyjscie:
numpy.ndarray (numpy.int16): glosniejszy -sciszony lub zglosniony utwor
"""
if(-1 <= procent <= 1):
#ile razy mamy pomnozyc amplitude naszego dzwieku
mnoznik = 0
if( procent < 0 ):
mnoznik = 1 + procent
else:
# obliczamy najwyzsza amplitude w danym utworze i ona bedzie
# wyznaczac jak bardzo mozemy podglosnic
maks_ampli = 0
maks_ampli = max(abs(utwor))
mnoznik = 32767/maks_ampli # maksymalny mnoznik
# mnoznik minimalnie moze osiagnac wartosc 1, to co powyzej
# (mnoznik-1) mnozymy o procent zglosnienia
# i dodajemy do podstawy (czyli 1)
mnoznik = 1 + (mnoznik - 1)*procent
glosniej = mnoznik * utwor
#glosniej = np.array(glosniej, dtype=np.int16)
glosniej = glosniej.astype(np.int16)
return glosniej
else:
print("Podaj procent z zakresu -1 do 1")
#wierszyk1 = zmiana_glosnosci(wierszyk, b['loud'])
#wierszyk1
def tworzenie_piosenki(macierz_piosenki, czy_pelna = True, bpm = 120, \
freq = 44100, wages = None, loud = 0):
" |
#pios, k = wczytywanie_sciezek(a)
#wierszyk = tworzenie_piosenki(pios, k, bpm = b['bpm'], freq = b['freq'], \
#wages = b['wages'])
#wierszyk = tworzenie_piosenki(pios, k, **b)
#wierszyk | ""
glowna funkcja generujaca cala piosenke
arg:
numpy.ndarray (str: U2): macierz_piosenki - macierz zawierajaca
definicje kolejnych cwiercnut (co ma byc grane
w danej cwiercnucie)
bool: czy_pelna - zmienna sprawdzajaca czy macierz_piosenki jest
zapisana (nie jest, gdy tracki mialy nieodpowiednia
liczbe wierszy lub kolumn)
int: bpm - tempo piosenki w jednostce bpm
int: freq - ilosc probek w jednej sekundzie
list (float): wages - wagi kolejnych sampli (jakie znaczenie ma miec 1
probka, 2 etc.)
float: loud - procent glosnosci, 0 - tak jak oryginalne probki, 1 - na
maxa, -1 - sciszamy na maxa
wyjscie:
numpy.ndarray (numpy.int16): gotowy utwór
"""
# macierz piosenki byla pusta, piosenka nie zostala utworzona
if(czy_pelna == False):
print("Nie utworzono piosenki")
return None
else:
import numpy as np
import scipy.io.wavfile
t_cwiercnuty = 60 / bpm # czas trwania jednej cwiercnuty (zalezy od
#tempa)
ile_cwiercnut = macierz_piosenki.shape[0] # ilosc cwiercnut
kanaly = macierz_piosenki.shape[1] # ilosc uzywanych sampli
frekw = freq
czas_utworu = ile_cwiercnut*t_cwiercnuty
# ile elementow bedzie w nowym utworze
ilosc_probek = int(frekw*czas_utworu)
# bedziemy tylko raz wczytywac zawartosc sampleXY.wav, wiec potrzebuje
# unikalne numery sampli
rozne_sample = np.unique(macierz_piosenki) # bierze lacznie z "--"
# w slownikach zapiszemy parametry tych sampli
# slownik z wartosciami danego sampla (tj. macierze numpy-owe z
# amplitudami)
sample_co = {}
sample_frekw = {} # slownik z ich frekwencjami
sample_dl = {} # slownik z ich dlugosciami
#wczytujemy te sample
# w iteratorze bierzemy napisy "01" "02" "--" itd. stringi!!!
for ktory_sampel in rozne_sample:
if(ktory_sampel != '--'):
# tworzymy napis z nazwa pliku sampla, np. "sample01.wav"
plik = ''.join(['sample',ktory_sampel,'.wav'])
# wczytujemy zawartosc i frekwencje danego sampla do
# odpowiednio nazwanego elementu w slowniku sample_co i
# sample_frekw
sample_frekw[ktory_sampel], sample_co[ktory_sampel] = \
scipy.io.wavfile.read(plik)
# tworzymy mono z naszego sampla
sample_co[ktory_sampel] = np.mean(sample_co[ktory_sampel],\
axis=1)/32767
# normalizujemy te wartosci
sample_co[ktory_sampel] = np.int16(sample_co[ktory_sampel]/ \
max(np.abs(sample_co[ktory_sampel])) * 32767)
# zapisujemy dlugosc sampli, czyli ilosc probek
# ( = czas_trwania*frekwencja)
sample_dl[ktory_sampel] = sample_co[ktory_sampel].shape[0]
else: # to samo robimy dla "--" recznie ustawiamy
# robimy cisze, gdy --
sample_co[ktory_sampel] = np.zeros((1,), dtype=np.int16)
sample_frekw[ktory_sampel] = frekw # taka sama jak domyslna
sample_dl[ktory_sampel] = 0 # zakladamy czas 0 sekund
if wages is None:
wages = np.ones((1,kanaly))
else:
# zeby mialo wymiar (1,kanaly), a nie (kanaly,)
wages = np.array(wages).reshape(1,kanaly)
# definicja nowego utworu
T = np.linspace(0, czas_utworu, ilosc_probek)
for wiersz in range(0, ile_cwiercnut):
sample = [] # wczytamy sample z danej cwiecnuty
dlugosci = [] # tu zapiszemy ich dlugosci w tej cwiercnucie
for i in range(0, kanaly):
sampus = macierz_piosenki[wiersz,i]
sample.append(sample_co[sampus])
dlugosci.append(sample_dl[sampus])
# bierzemy najdluzszy sample i w calosci bedziemy go odtwarzac;
# reszte zatem tez w calosci odtworzymy, a gdy sie skoncza damy
# cisze (zera)
maksik = max(dlugosci)
# mamy tutaj macierz 4 na max dlugosc, przygotowana do zlaczenia
# potem tych dzwiekow w jeden
pusty = np.int16(np.zeros((len(sample), maksik)))
# dodajemy nasze dzwieki do tej pustej
for k in range(0, kanaly):
pusty[k][0:dlugosci[k]] = sample[k]
# mnozymy kolejne elementy wektora pusty (czyli sample) przez
# wagi i sumujemy
cwiercnuta = np.dot(wages, pusty)
#otrzymamy wymiar (1, x), a chcemy (x,), wiec bierzemy pierwszy
# element
cwiercnuta = cwiercnuta[0]
# poczatek biezacej cwiercnuty
poczatek_cwiercnuty = int(wiersz*t_cwiercnuty*frekw)
# jesli dodanie ostatnich cwiercnut bedzie wiazalo sie z
# przekroczeniem dlugosci tworzonego utworu, obcinamy ostatnie
# dzwieki, tak by zmiescic sie w tej dlugosci
if (poczatek_cwiercnuty + maksik) > ilosc_probek:
T[poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)]=\
cwiercnuta[0:len(T[poczatek_cwiercnuty:(poczatek_cwiercnuty +\
maksik)])]
else:
T[poczatek_cwiercnuty:(poczatek_cwiercnuty + maksik)] += \
cwiercnuta
T= np.array(T, dtype=np.int16)
#ustalamy glosnosc utworu
T = zmiana_glosnosci(T, loud)
return T
| identifier_body |
proof.rs | use std::cmp::Ordering;
use std::hash::{Hash, Hasher};
use ring::digest::Algorithm;
use crate::hashutils::HashUtils;
use crate::tree::Tree;
/// An inclusion proof represent the fact that a `value` is a member
/// of a `MerkleTree` with root hash `root_hash`, and hash function `algorithm`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug)]
pub struct Proof<T> {
/// The hashing algorithm used in the original `MerkleTree`
#[cfg_attr(feature = "serialization-serde", serde(with = "algorithm_serde"))]
pub algorithm: &'static Algorithm,
/// The hash of the root of the original `MerkleTree`
pub root_hash: Vec<u8>,
/// The first `Lemma` of the `Proof`
pub lemma: Lemma,
/// The value concerned by this `Proof`
pub value: T,
}
#[cfg(feature = "serialization-serde")]
mod algorithm_serde {
use ring::digest::{self, Algorithm};
use serde::de::Error;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S: Serializer>(
algorithm: &'static Algorithm,
se: S,
) -> Result<S::Ok, S::Error> {
// The `Debug` implementation of `Algorithm` prints its ID.
format!("{:?}", algorithm).serialize(se)
}
pub fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<&'static Algorithm, D::Error> {
let alg_str: String = Deserialize::deserialize(de)?;
match &*alg_str {
"SHA1" => Ok(&digest::SHA1_FOR_LEGACY_USE_ONLY),
"SHA256" => Ok(&digest::SHA256),
"SHA384" => Ok(&digest::SHA384),
"SHA512" => Ok(&digest::SHA512),
"SHA512_256" => Ok(&digest::SHA512_256),
_ => Err(D::Error::custom("unknown hash algorithm")),
}
}
#[cfg(test)]
mod test {
use super::*;
use ring::digest::{
SHA1_FOR_LEGACY_USE_ONLY as sha1, SHA256 as sha256, SHA384 as sha384, SHA512 as sha512,
SHA512_256 as sha512_256,
};
static SHA1: &Algorithm = &sha1;
static SHA256: &Algorithm = &sha256;
static SHA384: &Algorithm = &sha384;
static SHA512: &Algorithm = &sha512;
static SHA512_256: &Algorithm = &sha512_256;
#[test]
fn test_serialize_known_algorithms() {
extern crate serde_json;
for alg in &[SHA1, SHA256, SHA384, SHA512, SHA512_256] {
let mut serializer = serde_json::Serializer::with_formatter(
vec![],
serde_json::ser::PrettyFormatter::new(),
);
serialize(alg, &mut serializer).unwrap_or_else(|_| panic!("{:?}", alg));
let alg_ = deserialize(&mut serde_json::Deserializer::from_slice(
&serializer.into_inner()[..],
))
.unwrap_or_else(|_| panic!("{:?}", alg));
assert_eq!(*alg, alg_);
}
}
#[test]
#[should_panic(expected = "unknown hash algorithm")]
fn test_serialize_unknown_algorithm() {
extern crate serde_json;
{
let alg_str = "\"BLAKE2b\"";
let mut deserializer = serde_json::Deserializer::from_str(alg_str);
let _ = deserialize(&mut deserializer)
.unwrap_or_else(|_| panic!("unknown hash algorithm {:?}", alg_str));
}
}
}
}
impl<T: PartialEq> PartialEq for Proof<T> {
fn eq(&self, other: &Proof<T>) -> bool {
self.root_hash == other.root_hash && self.lemma == other.lemma && self.value == other.value
}
}
impl<T: Eq> Eq for Proof<T> {}
impl<T: Ord> PartialOrd for Proof<T> {
fn partial_cmp(&self, other: &Proof<T>) -> Option<Ordering> |
}
impl<T: Ord> Ord for Proof<T> {
fn cmp(&self, other: &Proof<T>) -> Ordering {
self.root_hash
.cmp(&other.root_hash)
.then(self.value.cmp(&other.value))
.then_with(|| self.lemma.cmp(&other.lemma))
}
}
impl<T: Hash> Hash for Proof<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.root_hash.hash(state);
self.lemma.hash(state);
self.value.hash(state);
}
}
impl<T> Proof<T> {
/// Constructs a new `Proof`
pub fn new(algorithm: &'static Algorithm, root_hash: Vec<u8>, lemma: Lemma, value: T) -> Self {
Proof {
algorithm,
root_hash,
lemma,
value,
}
}
/// Checks whether this inclusion proof is well-formed,
/// and whether its root hash matches the given `root_hash`.
pub fn validate(&self, root_hash: &[u8]) -> bool {
if self.root_hash != root_hash || self.lemma.node_hash != root_hash {
return false;
}
self.lemma.validate(self.algorithm)
}
/// Returns the index of this proof's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the proof is malformed. Call `validate` first.
pub fn index(&self, count: usize) -> usize {
self.lemma.index(count)
}
}
/// A `Lemma` holds the hash of a node, the hash of its sibling node,
/// and a sub lemma, whose `node_hash`, when combined with this `sibling_hash`
/// must be equal to this `node_hash`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Lemma {
pub node_hash: Vec<u8>,
pub sibling_hash: Option<Positioned<Vec<u8>>>,
pub sub_lemma: Option<Box<Lemma>>,
}
impl Lemma {
/// Attempts to generate a proof that the a value with hash `needle` is a
/// member of the given `tree`.
pub fn new<T>(tree: &Tree<T>, needle: &[u8]) -> Option<Lemma> {
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf { ref hash, .. } => Lemma::new_leaf_proof(hash, needle),
Tree::Node {
ref hash,
ref left,
ref right,
} => Lemma::new_tree_proof(hash, needle, left, right),
}
}
/// Attempts to generate a proof that the `idx`-th leaf is a member of
/// the given tree. The `count` must equal the number of leaves in the
/// `tree`. If `idx >= count`, `None` is returned. Otherwise it returns
/// the new `Lemma` and the `idx`-th value.
pub fn new_by_index<T>(tree: &Tree<T>, idx: usize, count: usize) -> Option<(Lemma, &T)> {
if idx >= count {
return None;
}
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf {
ref hash,
ref value,
..
} => {
if count != 1 {
return None;
}
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: None,
sub_lemma: None,
};
Some((lemma, value))
}
Tree::Node {
ref hash,
ref left,
ref right,
} => {
let left_count = count.next_power_of_two() / 2;
let (sub_lem_val, sibling_hash);
if idx < left_count {
sub_lem_val = Lemma::new_by_index(left, idx, left_count);
sibling_hash = Positioned::Right(right.hash().clone());
} else {
sub_lem_val = Lemma::new_by_index(right, idx - left_count, count - left_count);
sibling_hash = Positioned::Left(left.hash().clone());
}
sub_lem_val.map(|(sub_lemma, value)| {
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: Some(sibling_hash),
sub_lemma: Some(Box::new(sub_lemma)),
};
(lemma, value)
})
}
}
}
/// Returns the index of this lemma's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the lemma is malformed. Call `validate_lemma` first.
pub fn index(&self, count: usize) -> usize {
let left_count = count.next_power_of_two() / 2;
match (self.sub_lemma.as_ref(), self.sibling_hash.as_ref()) {
(None, None) => 0,
(Some(l), Some(&Positioned::Left(_))) => left_count + l.index(count - left_count),
(Some(l), Some(&Positioned::Right(_))) => l.index(left_count),
(None, Some(_)) | (Some(_), None) => panic!("malformed lemma"),
}
}
fn new_leaf_proof(hash: &[u8], needle: &[u8]) -> Option<Lemma> {
if *hash == *needle {
Some(Lemma {
node_hash: hash.into(),
sibling_hash: None,
sub_lemma: None,
})
} else {
None
}
}
fn new_tree_proof<T>(
hash: &[u8],
needle: &[u8],
left: &Tree<T>,
right: &Tree<T>,
) -> Option<Lemma> {
Lemma::new(left, needle)
.map(|lemma| {
let right_hash = right.hash().clone();
let sub_lemma = Some(Positioned::Right(right_hash));
(lemma, sub_lemma)
})
.or_else(|| {
let sub_lemma = Lemma::new(right, needle);
sub_lemma.map(|lemma| {
let left_hash = left.hash().clone();
let sub_lemma = Some(Positioned::Left(left_hash));
(lemma, sub_lemma)
})
})
.map(|(sub_lemma, sibling_hash)| Lemma {
node_hash: hash.into(),
sibling_hash,
sub_lemma: Some(Box::new(sub_lemma)),
})
}
fn validate(&self, algorithm: &'static Algorithm) -> bool {
match self.sub_lemma {
None => self.sibling_hash.is_none(),
Some(ref sub) => match self.sibling_hash {
None => false,
Some(Positioned::Left(ref hash)) => {
let combined = algorithm.hash_nodes(hash, &sub.node_hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
Some(Positioned::Right(ref hash)) => {
let combined = algorithm.hash_nodes(&sub.node_hash, hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
},
}
}
}
/// Tags a value so that we know from which branch of a `Tree` (if any) it was found.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Positioned<T> {
/// The value was found in the left branch
Left(T),
/// The value was found in the right branch
Right(T),
}
| {
Some(self.cmp(other))
} | identifier_body |
proof.rs | use std::cmp::Ordering;
use std::hash::{Hash, Hasher};
use ring::digest::Algorithm;
use crate::hashutils::HashUtils;
use crate::tree::Tree;
/// An inclusion proof represent the fact that a `value` is a member
/// of a `MerkleTree` with root hash `root_hash`, and hash function `algorithm`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug)]
pub struct Proof<T> {
/// The hashing algorithm used in the original `MerkleTree`
#[cfg_attr(feature = "serialization-serde", serde(with = "algorithm_serde"))]
pub algorithm: &'static Algorithm,
/// The hash of the root of the original `MerkleTree`
pub root_hash: Vec<u8>,
/// The first `Lemma` of the `Proof`
pub lemma: Lemma,
/// The value concerned by this `Proof`
pub value: T,
}
#[cfg(feature = "serialization-serde")]
mod algorithm_serde {
use ring::digest::{self, Algorithm};
use serde::de::Error;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S: Serializer>(
algorithm: &'static Algorithm,
se: S,
) -> Result<S::Ok, S::Error> {
// The `Debug` implementation of `Algorithm` prints its ID.
format!("{:?}", algorithm).serialize(se)
}
pub fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<&'static Algorithm, D::Error> {
let alg_str: String = Deserialize::deserialize(de)?;
match &*alg_str {
"SHA1" => Ok(&digest::SHA1_FOR_LEGACY_USE_ONLY),
"SHA256" => Ok(&digest::SHA256),
"SHA384" => Ok(&digest::SHA384),
"SHA512" => Ok(&digest::SHA512),
"SHA512_256" => Ok(&digest::SHA512_256),
_ => Err(D::Error::custom("unknown hash algorithm")),
}
}
#[cfg(test)]
mod test {
use super::*;
use ring::digest::{
SHA1_FOR_LEGACY_USE_ONLY as sha1, SHA256 as sha256, SHA384 as sha384, SHA512 as sha512,
SHA512_256 as sha512_256,
};
static SHA1: &Algorithm = &sha1;
static SHA256: &Algorithm = &sha256;
static SHA384: &Algorithm = &sha384;
static SHA512: &Algorithm = &sha512;
static SHA512_256: &Algorithm = &sha512_256;
#[test]
fn test_serialize_known_algorithms() {
extern crate serde_json;
for alg in &[SHA1, SHA256, SHA384, SHA512, SHA512_256] {
let mut serializer = serde_json::Serializer::with_formatter(
vec![],
serde_json::ser::PrettyFormatter::new(),
);
serialize(alg, &mut serializer).unwrap_or_else(|_| panic!("{:?}", alg));
let alg_ = deserialize(&mut serde_json::Deserializer::from_slice(
&serializer.into_inner()[..],
))
.unwrap_or_else(|_| panic!("{:?}", alg));
assert_eq!(*alg, alg_);
}
}
#[test]
#[should_panic(expected = "unknown hash algorithm")]
fn test_serialize_unknown_algorithm() {
extern crate serde_json;
{
let alg_str = "\"BLAKE2b\"";
let mut deserializer = serde_json::Deserializer::from_str(alg_str);
let _ = deserialize(&mut deserializer)
.unwrap_or_else(|_| panic!("unknown hash algorithm {:?}", alg_str));
}
}
}
}
impl<T: PartialEq> PartialEq for Proof<T> {
fn eq(&self, other: &Proof<T>) -> bool {
self.root_hash == other.root_hash && self.lemma == other.lemma && self.value == other.value
}
}
impl<T: Eq> Eq for Proof<T> {}
impl<T: Ord> PartialOrd for Proof<T> {
fn partial_cmp(&self, other: &Proof<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Ord> Ord for Proof<T> {
fn cmp(&self, other: &Proof<T>) -> Ordering {
self.root_hash
.cmp(&other.root_hash)
.then(self.value.cmp(&other.value))
.then_with(|| self.lemma.cmp(&other.lemma))
}
}
impl<T: Hash> Hash for Proof<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.root_hash.hash(state);
self.lemma.hash(state);
self.value.hash(state);
}
}
impl<T> Proof<T> {
/// Constructs a new `Proof`
pub fn new(algorithm: &'static Algorithm, root_hash: Vec<u8>, lemma: Lemma, value: T) -> Self {
Proof {
algorithm,
root_hash,
lemma,
value,
}
}
/// Checks whether this inclusion proof is well-formed,
/// and whether its root hash matches the given `root_hash`.
pub fn validate(&self, root_hash: &[u8]) -> bool {
if self.root_hash != root_hash || self.lemma.node_hash != root_hash {
return false;
}
self.lemma.validate(self.algorithm)
}
/// Returns the index of this proof's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the proof is malformed. Call `validate` first.
pub fn index(&self, count: usize) -> usize {
self.lemma.index(count)
}
}
/// A `Lemma` holds the hash of a node, the hash of its sibling node,
/// and a sub lemma, whose `node_hash`, when combined with this `sibling_hash`
/// must be equal to this `node_hash`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Lemma {
pub node_hash: Vec<u8>,
pub sibling_hash: Option<Positioned<Vec<u8>>>,
pub sub_lemma: Option<Box<Lemma>>,
}
impl Lemma {
/// Attempts to generate a proof that the a value with hash `needle` is a
/// member of the given `tree`.
pub fn new<T>(tree: &Tree<T>, needle: &[u8]) -> Option<Lemma> {
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf { ref hash, .. } => Lemma::new_leaf_proof(hash, needle),
Tree::Node {
ref hash,
ref left,
ref right,
} => Lemma::new_tree_proof(hash, needle, left, right),
}
}
/// Attempts to generate a proof that the `idx`-th leaf is a member of
/// the given tree. The `count` must equal the number of leaves in the
/// `tree`. If `idx >= count`, `None` is returned. Otherwise it returns
/// the new `Lemma` and the `idx`-th value.
pub fn new_by_index<T>(tree: &Tree<T>, idx: usize, count: usize) -> Option<(Lemma, &T)> {
if idx >= count {
return None;
}
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf {
ref hash,
ref value,
..
} => {
if count != 1 {
return None;
}
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: None,
sub_lemma: None,
};
Some((lemma, value))
}
Tree::Node {
ref hash,
ref left,
ref right,
} => {
let left_count = count.next_power_of_two() / 2;
let (sub_lem_val, sibling_hash);
if idx < left_count {
sub_lem_val = Lemma::new_by_index(left, idx, left_count);
sibling_hash = Positioned::Right(right.hash().clone());
} else {
sub_lem_val = Lemma::new_by_index(right, idx - left_count, count - left_count);
sibling_hash = Positioned::Left(left.hash().clone());
}
sub_lem_val.map(|(sub_lemma, value)| {
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: Some(sibling_hash),
sub_lemma: Some(Box::new(sub_lemma)),
};
(lemma, value)
})
}
}
}
/// Returns the index of this lemma's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the lemma is malformed. Call `validate_lemma` first.
pub fn index(&self, count: usize) -> usize {
let left_count = count.next_power_of_two() / 2;
match (self.sub_lemma.as_ref(), self.sibling_hash.as_ref()) {
(None, None) => 0,
(Some(l), Some(&Positioned::Left(_))) => left_count + l.index(count - left_count),
(Some(l), Some(&Positioned::Right(_))) => l.index(left_count),
(None, Some(_)) | (Some(_), None) => panic!("malformed lemma"),
}
}
fn new_leaf_proof(hash: &[u8], needle: &[u8]) -> Option<Lemma> {
if *hash == *needle | else {
None
}
}
fn new_tree_proof<T>(
hash: &[u8],
needle: &[u8],
left: &Tree<T>,
right: &Tree<T>,
) -> Option<Lemma> {
Lemma::new(left, needle)
.map(|lemma| {
let right_hash = right.hash().clone();
let sub_lemma = Some(Positioned::Right(right_hash));
(lemma, sub_lemma)
})
.or_else(|| {
let sub_lemma = Lemma::new(right, needle);
sub_lemma.map(|lemma| {
let left_hash = left.hash().clone();
let sub_lemma = Some(Positioned::Left(left_hash));
(lemma, sub_lemma)
})
})
.map(|(sub_lemma, sibling_hash)| Lemma {
node_hash: hash.into(),
sibling_hash,
sub_lemma: Some(Box::new(sub_lemma)),
})
}
fn validate(&self, algorithm: &'static Algorithm) -> bool {
match self.sub_lemma {
None => self.sibling_hash.is_none(),
Some(ref sub) => match self.sibling_hash {
None => false,
Some(Positioned::Left(ref hash)) => {
let combined = algorithm.hash_nodes(hash, &sub.node_hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
Some(Positioned::Right(ref hash)) => {
let combined = algorithm.hash_nodes(&sub.node_hash, hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
},
}
}
}
/// Tags a value so that we know from which branch of a `Tree` (if any) it was found.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Positioned<T> {
/// The value was found in the left branch
Left(T),
/// The value was found in the right branch
Right(T),
}
| {
Some(Lemma {
node_hash: hash.into(),
sibling_hash: None,
sub_lemma: None,
})
} | conditional_block |
proof.rs | use std::cmp::Ordering;
use std::hash::{Hash, Hasher};
use ring::digest::Algorithm;
use crate::hashutils::HashUtils;
use crate::tree::Tree;
/// An inclusion proof represent the fact that a `value` is a member
/// of a `MerkleTree` with root hash `root_hash`, and hash function `algorithm`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug)]
pub struct Proof<T> {
/// The hashing algorithm used in the original `MerkleTree`
#[cfg_attr(feature = "serialization-serde", serde(with = "algorithm_serde"))]
pub algorithm: &'static Algorithm,
/// The hash of the root of the original `MerkleTree`
pub root_hash: Vec<u8>,
/// The first `Lemma` of the `Proof`
pub lemma: Lemma,
/// The value concerned by this `Proof`
pub value: T,
}
#[cfg(feature = "serialization-serde")]
mod algorithm_serde {
use ring::digest::{self, Algorithm};
use serde::de::Error;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S: Serializer>(
algorithm: &'static Algorithm,
se: S,
) -> Result<S::Ok, S::Error> {
// The `Debug` implementation of `Algorithm` prints its ID.
format!("{:?}", algorithm).serialize(se)
}
pub fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<&'static Algorithm, D::Error> {
let alg_str: String = Deserialize::deserialize(de)?;
match &*alg_str {
"SHA1" => Ok(&digest::SHA1_FOR_LEGACY_USE_ONLY),
"SHA256" => Ok(&digest::SHA256),
"SHA384" => Ok(&digest::SHA384),
"SHA512" => Ok(&digest::SHA512),
"SHA512_256" => Ok(&digest::SHA512_256),
_ => Err(D::Error::custom("unknown hash algorithm")),
}
}
#[cfg(test)]
mod test {
use super::*;
use ring::digest::{
SHA1_FOR_LEGACY_USE_ONLY as sha1, SHA256 as sha256, SHA384 as sha384, SHA512 as sha512,
SHA512_256 as sha512_256,
};
static SHA1: &Algorithm = &sha1;
static SHA256: &Algorithm = &sha256;
static SHA384: &Algorithm = &sha384;
static SHA512: &Algorithm = &sha512;
static SHA512_256: &Algorithm = &sha512_256;
#[test]
fn test_serialize_known_algorithms() {
extern crate serde_json;
for alg in &[SHA1, SHA256, SHA384, SHA512, SHA512_256] {
let mut serializer = serde_json::Serializer::with_formatter(
vec![],
serde_json::ser::PrettyFormatter::new(),
);
serialize(alg, &mut serializer).unwrap_or_else(|_| panic!("{:?}", alg));
let alg_ = deserialize(&mut serde_json::Deserializer::from_slice(
&serializer.into_inner()[..],
))
.unwrap_or_else(|_| panic!("{:?}", alg));
assert_eq!(*alg, alg_);
}
}
#[test]
#[should_panic(expected = "unknown hash algorithm")]
fn test_serialize_unknown_algorithm() {
extern crate serde_json;
{
let alg_str = "\"BLAKE2b\"";
let mut deserializer = serde_json::Deserializer::from_str(alg_str);
let _ = deserialize(&mut deserializer)
.unwrap_or_else(|_| panic!("unknown hash algorithm {:?}", alg_str));
}
}
}
}
impl<T: PartialEq> PartialEq for Proof<T> {
fn eq(&self, other: &Proof<T>) -> bool {
self.root_hash == other.root_hash && self.lemma == other.lemma && self.value == other.value
}
}
impl<T: Eq> Eq for Proof<T> {}
impl<T: Ord> PartialOrd for Proof<T> {
fn partial_cmp(&self, other: &Proof<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Ord> Ord for Proof<T> {
fn cmp(&self, other: &Proof<T>) -> Ordering {
self.root_hash
.cmp(&other.root_hash)
.then(self.value.cmp(&other.value))
.then_with(|| self.lemma.cmp(&other.lemma))
}
}
impl<T: Hash> Hash for Proof<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.root_hash.hash(state);
self.lemma.hash(state);
self.value.hash(state);
}
}
impl<T> Proof<T> {
/// Constructs a new `Proof`
pub fn new(algorithm: &'static Algorithm, root_hash: Vec<u8>, lemma: Lemma, value: T) -> Self {
Proof {
algorithm,
root_hash,
lemma,
value,
}
}
/// Checks whether this inclusion proof is well-formed,
/// and whether its root hash matches the given `root_hash`.
pub fn validate(&self, root_hash: &[u8]) -> bool {
if self.root_hash != root_hash || self.lemma.node_hash != root_hash {
return false;
}
self.lemma.validate(self.algorithm)
}
/// Returns the index of this proof's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the proof is malformed. Call `validate` first.
pub fn index(&self, count: usize) -> usize {
self.lemma.index(count)
}
}
/// A `Lemma` holds the hash of a node, the hash of its sibling node,
/// and a sub lemma, whose `node_hash`, when combined with this `sibling_hash`
/// must be equal to this `node_hash`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Lemma {
pub node_hash: Vec<u8>,
pub sibling_hash: Option<Positioned<Vec<u8>>>,
pub sub_lemma: Option<Box<Lemma>>,
}
impl Lemma {
/// Attempts to generate a proof that the a value with hash `needle` is a
/// member of the given `tree`.
pub fn new<T>(tree: &Tree<T>, needle: &[u8]) -> Option<Lemma> {
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf { ref hash, .. } => Lemma::new_leaf_proof(hash, needle),
Tree::Node {
ref hash,
ref left,
ref right,
} => Lemma::new_tree_proof(hash, needle, left, right),
}
}
/// Attempts to generate a proof that the `idx`-th leaf is a member of
/// the given tree. The `count` must equal the number of leaves in the
/// `tree`. If `idx >= count`, `None` is returned. Otherwise it returns
/// the new `Lemma` and the `idx`-th value.
pub fn new_by_index<T>(tree: &Tree<T>, idx: usize, count: usize) -> Option<(Lemma, &T)> {
if idx >= count {
return None;
}
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf {
ref hash,
ref value,
..
} => {
if count != 1 {
return None;
}
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: None,
sub_lemma: None,
};
Some((lemma, value))
}
Tree::Node {
ref hash,
ref left,
ref right,
} => {
let left_count = count.next_power_of_two() / 2;
let (sub_lem_val, sibling_hash);
if idx < left_count {
sub_lem_val = Lemma::new_by_index(left, idx, left_count);
sibling_hash = Positioned::Right(right.hash().clone());
} else {
sub_lem_val = Lemma::new_by_index(right, idx - left_count, count - left_count);
sibling_hash = Positioned::Left(left.hash().clone());
}
sub_lem_val.map(|(sub_lemma, value)| {
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: Some(sibling_hash),
sub_lemma: Some(Box::new(sub_lemma)),
};
(lemma, value)
})
}
}
}
/// Returns the index of this lemma's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the lemma is malformed. Call `validate_lemma` first.
pub fn index(&self, count: usize) -> usize {
let left_count = count.next_power_of_two() / 2;
match (self.sub_lemma.as_ref(), self.sibling_hash.as_ref()) {
(None, None) => 0,
(Some(l), Some(&Positioned::Left(_))) => left_count + l.index(count - left_count),
(Some(l), Some(&Positioned::Right(_))) => l.index(left_count),
(None, Some(_)) | (Some(_), None) => panic!("malformed lemma"),
}
}
fn new_leaf_proof(hash: &[u8], needle: &[u8]) -> Option<Lemma> {
if *hash == *needle {
Some(Lemma {
node_hash: hash.into(),
sibling_hash: None,
sub_lemma: None,
})
} else {
None
}
}
fn | <T>(
hash: &[u8],
needle: &[u8],
left: &Tree<T>,
right: &Tree<T>,
) -> Option<Lemma> {
Lemma::new(left, needle)
.map(|lemma| {
let right_hash = right.hash().clone();
let sub_lemma = Some(Positioned::Right(right_hash));
(lemma, sub_lemma)
})
.or_else(|| {
let sub_lemma = Lemma::new(right, needle);
sub_lemma.map(|lemma| {
let left_hash = left.hash().clone();
let sub_lemma = Some(Positioned::Left(left_hash));
(lemma, sub_lemma)
})
})
.map(|(sub_lemma, sibling_hash)| Lemma {
node_hash: hash.into(),
sibling_hash,
sub_lemma: Some(Box::new(sub_lemma)),
})
}
fn validate(&self, algorithm: &'static Algorithm) -> bool {
match self.sub_lemma {
None => self.sibling_hash.is_none(),
Some(ref sub) => match self.sibling_hash {
None => false,
Some(Positioned::Left(ref hash)) => {
let combined = algorithm.hash_nodes(hash, &sub.node_hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
Some(Positioned::Right(ref hash)) => {
let combined = algorithm.hash_nodes(&sub.node_hash, hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
},
}
}
}
/// Tags a value so that we know from which branch of a `Tree` (if any) it was found.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Positioned<T> {
/// The value was found in the left branch
Left(T),
/// The value was found in the right branch
Right(T),
}
| new_tree_proof | identifier_name |
proof.rs | use std::cmp::Ordering;
use std::hash::{Hash, Hasher};
use ring::digest::Algorithm;
use crate::hashutils::HashUtils;
use crate::tree::Tree;
/// An inclusion proof represent the fact that a `value` is a member
/// of a `MerkleTree` with root hash `root_hash`, and hash function `algorithm`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug)]
pub struct Proof<T> {
/// The hashing algorithm used in the original `MerkleTree`
#[cfg_attr(feature = "serialization-serde", serde(with = "algorithm_serde"))]
pub algorithm: &'static Algorithm,
/// The hash of the root of the original `MerkleTree`
pub root_hash: Vec<u8>,
/// The first `Lemma` of the `Proof`
pub lemma: Lemma,
/// The value concerned by this `Proof`
pub value: T,
}
#[cfg(feature = "serialization-serde")]
mod algorithm_serde {
use ring::digest::{self, Algorithm};
use serde::de::Error;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S: Serializer>(
algorithm: &'static Algorithm,
se: S,
) -> Result<S::Ok, S::Error> {
// The `Debug` implementation of `Algorithm` prints its ID.
format!("{:?}", algorithm).serialize(se)
}
pub fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<&'static Algorithm, D::Error> {
let alg_str: String = Deserialize::deserialize(de)?;
match &*alg_str {
"SHA1" => Ok(&digest::SHA1_FOR_LEGACY_USE_ONLY),
"SHA256" => Ok(&digest::SHA256),
"SHA384" => Ok(&digest::SHA384),
"SHA512" => Ok(&digest::SHA512),
"SHA512_256" => Ok(&digest::SHA512_256),
_ => Err(D::Error::custom("unknown hash algorithm")),
}
}
#[cfg(test)]
mod test {
use super::*;
use ring::digest::{
SHA1_FOR_LEGACY_USE_ONLY as sha1, SHA256 as sha256, SHA384 as sha384, SHA512 as sha512,
SHA512_256 as sha512_256,
};
static SHA1: &Algorithm = &sha1;
static SHA256: &Algorithm = &sha256;
static SHA384: &Algorithm = &sha384;
static SHA512: &Algorithm = &sha512;
static SHA512_256: &Algorithm = &sha512_256;
#[test]
fn test_serialize_known_algorithms() {
extern crate serde_json;
for alg in &[SHA1, SHA256, SHA384, SHA512, SHA512_256] {
let mut serializer = serde_json::Serializer::with_formatter(
vec![],
serde_json::ser::PrettyFormatter::new(),
);
serialize(alg, &mut serializer).unwrap_or_else(|_| panic!("{:?}", alg));
let alg_ = deserialize(&mut serde_json::Deserializer::from_slice(
&serializer.into_inner()[..],
))
.unwrap_or_else(|_| panic!("{:?}", alg));
assert_eq!(*alg, alg_);
}
}
#[test]
#[should_panic(expected = "unknown hash algorithm")]
fn test_serialize_unknown_algorithm() {
extern crate serde_json;
{
let alg_str = "\"BLAKE2b\"";
let mut deserializer = serde_json::Deserializer::from_str(alg_str);
let _ = deserialize(&mut deserializer)
.unwrap_or_else(|_| panic!("unknown hash algorithm {:?}", alg_str));
}
}
}
}
impl<T: PartialEq> PartialEq for Proof<T> {
fn eq(&self, other: &Proof<T>) -> bool {
self.root_hash == other.root_hash && self.lemma == other.lemma && self.value == other.value
}
}
impl<T: Eq> Eq for Proof<T> {}
impl<T: Ord> PartialOrd for Proof<T> {
fn partial_cmp(&self, other: &Proof<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Ord> Ord for Proof<T> {
fn cmp(&self, other: &Proof<T>) -> Ordering {
self.root_hash
.cmp(&other.root_hash)
.then(self.value.cmp(&other.value))
.then_with(|| self.lemma.cmp(&other.lemma))
}
}
impl<T: Hash> Hash for Proof<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.root_hash.hash(state);
self.lemma.hash(state);
self.value.hash(state);
}
}
impl<T> Proof<T> {
/// Constructs a new `Proof`
pub fn new(algorithm: &'static Algorithm, root_hash: Vec<u8>, lemma: Lemma, value: T) -> Self {
Proof {
algorithm,
root_hash,
lemma,
value,
}
}
/// Checks whether this inclusion proof is well-formed,
/// and whether its root hash matches the given `root_hash`.
pub fn validate(&self, root_hash: &[u8]) -> bool {
if self.root_hash != root_hash || self.lemma.node_hash != root_hash {
return false;
}
self.lemma.validate(self.algorithm)
}
/// Returns the index of this proof's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the proof is malformed. Call `validate` first.
pub fn index(&self, count: usize) -> usize {
self.lemma.index(count)
}
}
/// A `Lemma` holds the hash of a node, the hash of its sibling node,
/// and a sub lemma, whose `node_hash`, when combined with this `sibling_hash`
/// must be equal to this `node_hash`.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Lemma {
pub node_hash: Vec<u8>,
pub sibling_hash: Option<Positioned<Vec<u8>>>,
pub sub_lemma: Option<Box<Lemma>>,
}
impl Lemma {
/// Attempts to generate a proof that the a value with hash `needle` is a
/// member of the given `tree`.
pub fn new<T>(tree: &Tree<T>, needle: &[u8]) -> Option<Lemma> {
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf { ref hash, .. } => Lemma::new_leaf_proof(hash, needle),
Tree::Node {
ref hash,
ref left,
ref right,
} => Lemma::new_tree_proof(hash, needle, left, right),
}
}
/// Attempts to generate a proof that the `idx`-th leaf is a member of
/// the given tree. The `count` must equal the number of leaves in the
/// `tree`. If `idx >= count`, `None` is returned. Otherwise it returns
/// the new `Lemma` and the `idx`-th value.
pub fn new_by_index<T>(tree: &Tree<T>, idx: usize, count: usize) -> Option<(Lemma, &T)> {
if idx >= count {
return None;
}
match *tree {
Tree::Empty { .. } => None,
Tree::Leaf { | ref hash,
ref value,
..
} => {
if count != 1 {
return None;
}
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: None,
sub_lemma: None,
};
Some((lemma, value))
}
Tree::Node {
ref hash,
ref left,
ref right,
} => {
let left_count = count.next_power_of_two() / 2;
let (sub_lem_val, sibling_hash);
if idx < left_count {
sub_lem_val = Lemma::new_by_index(left, idx, left_count);
sibling_hash = Positioned::Right(right.hash().clone());
} else {
sub_lem_val = Lemma::new_by_index(right, idx - left_count, count - left_count);
sibling_hash = Positioned::Left(left.hash().clone());
}
sub_lem_val.map(|(sub_lemma, value)| {
let lemma = Lemma {
node_hash: hash.clone(),
sibling_hash: Some(sibling_hash),
sub_lemma: Some(Box::new(sub_lemma)),
};
(lemma, value)
})
}
}
}
/// Returns the index of this lemma's value, given the total number of items in the tree.
///
/// # Panics
///
/// Panics if the lemma is malformed. Call `validate_lemma` first.
pub fn index(&self, count: usize) -> usize {
let left_count = count.next_power_of_two() / 2;
match (self.sub_lemma.as_ref(), self.sibling_hash.as_ref()) {
(None, None) => 0,
(Some(l), Some(&Positioned::Left(_))) => left_count + l.index(count - left_count),
(Some(l), Some(&Positioned::Right(_))) => l.index(left_count),
(None, Some(_)) | (Some(_), None) => panic!("malformed lemma"),
}
}
fn new_leaf_proof(hash: &[u8], needle: &[u8]) -> Option<Lemma> {
if *hash == *needle {
Some(Lemma {
node_hash: hash.into(),
sibling_hash: None,
sub_lemma: None,
})
} else {
None
}
}
fn new_tree_proof<T>(
hash: &[u8],
needle: &[u8],
left: &Tree<T>,
right: &Tree<T>,
) -> Option<Lemma> {
Lemma::new(left, needle)
.map(|lemma| {
let right_hash = right.hash().clone();
let sub_lemma = Some(Positioned::Right(right_hash));
(lemma, sub_lemma)
})
.or_else(|| {
let sub_lemma = Lemma::new(right, needle);
sub_lemma.map(|lemma| {
let left_hash = left.hash().clone();
let sub_lemma = Some(Positioned::Left(left_hash));
(lemma, sub_lemma)
})
})
.map(|(sub_lemma, sibling_hash)| Lemma {
node_hash: hash.into(),
sibling_hash,
sub_lemma: Some(Box::new(sub_lemma)),
})
}
fn validate(&self, algorithm: &'static Algorithm) -> bool {
match self.sub_lemma {
None => self.sibling_hash.is_none(),
Some(ref sub) => match self.sibling_hash {
None => false,
Some(Positioned::Left(ref hash)) => {
let combined = algorithm.hash_nodes(hash, &sub.node_hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
Some(Positioned::Right(ref hash)) => {
let combined = algorithm.hash_nodes(&sub.node_hash, hash);
let hashes_match = combined.as_ref() == self.node_hash.as_slice();
hashes_match && sub.validate(algorithm)
}
},
}
}
}
/// Tags a value so that we know from which branch of a `Tree` (if any) it was found.
#[cfg_attr(feature = "serialization-serde", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Positioned<T> {
/// The value was found in the left branch
Left(T),
/// The value was found in the right branch
Right(T),
} | random_line_split |
|
main.rs | mod capture;
mod d3d;
mod displays;
mod hotkey;
mod media;
mod resolution;
mod video;
use std::{path::Path, time::Duration};
use clap::{App, Arg, SubCommand};
use hotkey::HotKey;
use windows::{
core::{Result, RuntimeName},
Foundation::Metadata::ApiInformation,
Graphics::{
Capture::{GraphicsCaptureItem, GraphicsCaptureSession},
SizeInt32,
},
Storage::{
CreationCollisionOption, FileAccessMode, StorageFolder, Streams::IRandomAccessStream,
},
Win32::{
Foundation::{HWND, MAX_PATH, PWSTR},
Graphics::Direct3D11::ID3D11Device,
Media::MediaFoundation::{MFStartup, MFSTARTUP_FULL},
Storage::FileSystem::GetFullPathNameW,
System::{
Diagnostics::Debug::{DebugBreak, IsDebuggerPresent},
Threading::GetCurrentProcessId,
WinRT::{RoInitialize, RO_INIT_MULTITHREADED},
},
UI::{
Input::KeyboardAndMouse::{MOD_CONTROL, MOD_SHIFT},
WindowsAndMessaging::{DispatchMessageW, GetMessageW, MSG, WM_HOTKEY},
},
},
};
use crate::{
capture::create_capture_item_for_monitor,
d3d::create_d3d_device,
displays::get_display_handle_from_index,
media::MF_VERSION,
resolution::Resolution,
video::{encoder_device::VideoEncoderDevice, encoding_session::VideoEncodingSession},
};
fn run(
display_index: usize,
output_path: &str,
bit_rate: u32,
frame_rate: u32,
resolution: Resolution,
encoder_index: usize,
verbose: bool,
wait_for_debugger: bool,
console_mode: bool,
) -> Result<()> {
unsafe {
RoInitialize(RO_INIT_MULTITHREADED)?;
}
unsafe { MFStartup(MF_VERSION, MFSTARTUP_FULL)? }
if wait_for_debugger {
let pid = unsafe { GetCurrentProcessId() };
println!("Waiting for a debugger to attach (PID: {})...", pid);
loop {
if unsafe { IsDebuggerPresent().into() } {
break;
}
std::thread::sleep(Duration::from_secs(1));
}
unsafe {
DebugBreak();
}
}
// Check to make sure Windows.Graphics.Capture is available |
if verbose {
println!(
"Using index \"{}\" and path \"{}\".",
display_index, output_path
);
}
// Get the display handle using the provided index
let display_handle = get_display_handle_from_index(display_index)
.expect("The provided display index was out of bounds!");
let item = create_capture_item_for_monitor(display_handle)?;
// Resolve encoding settings
let resolution = if let Some(resolution) = resolution.get_size() {
resolution
} else {
item.Size()?
};
let bit_rate = bit_rate * 1000000;
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
if verbose {
println!("Encoders ({}):", encoder_devices.len());
for encoder_device in &encoder_devices {
println!(" {}", encoder_device.display_name());
}
}
let encoder_device = if let Some(encoder_device) = encoder_devices.get(encoder_index) {
encoder_device
} else {
exit_with_error("Encoder index is out of bounds!");
};
if verbose {
println!("Using: {}", encoder_device.display_name());
}
// Create our file
let path = unsafe {
let mut output_path: Vec<u16> = output_path.encode_utf16().collect();
output_path.push(0);
let mut new_path = vec![0u16; MAX_PATH as usize];
let length = GetFullPathNameW(
PWSTR(output_path.as_mut_ptr()),
new_path.len() as u32,
PWSTR(new_path.as_mut_ptr()),
std::ptr::null_mut(),
);
new_path.resize(length as usize, 0);
String::from_utf16(&new_path).unwrap()
};
let path = Path::new(&path);
let parent_folder_path = path.parent().unwrap();
let parent_folder =
StorageFolder::GetFolderFromPathAsync(parent_folder_path.as_os_str().to_str().unwrap())?
.get()?;
let file_name = path.file_name().unwrap();
let file = parent_folder
.CreateFileAsync(
file_name.to_str().unwrap(),
CreationCollisionOption::ReplaceExisting,
)?
.get()?;
// Start the recording
{
let stream = file.OpenAsync(FileAccessMode::ReadWrite)?.get()?;
let d3d_device = create_d3d_device()?;
let mut session = create_encoding_session(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
)?;
if !console_mode {
let mut is_recording = false;
pump_messages(|| -> Result<bool> {
Ok(if !is_recording {
is_recording = true;
println!("Starting recording...");
session.start()?;
false
} else {
true
})
})?;
println!("Stopping recording...");
} else {
session.start()?;
pause();
}
session.stop()?;
}
Ok(())
}
fn main() {
let mut app = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::with_name("display")
.short("d")
.long("display")
.value_name("display index")
.help("The index of the display you'd like to record.")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("bitRate")
.short("b")
.long("bitRate")
.value_name("bit rate (in Mbps)")
.help("The bit rate you would like to encode at (in Mbps).")
.takes_value(true)
.default_value("18")
.required(false),
)
.arg(
Arg::with_name("frameRate")
.short("f")
.long("frameRate")
.value_name("frame rate")
.help("The frame rate you would like to encode at.")
.takes_value(true)
.default_value("60")
.required(false),
)
.arg(
Arg::with_name("resolution")
.short("r")
.long("resolution")
.value_name("resolution enum")
.help("The resolution you would like to encode at: native, 720p, 1080p, 2160p, or 4320p.")
.takes_value(true)
.default_value("native")
.required(false),
)
.arg(
Arg::with_name("encoder")
.short("e")
.long("encoder")
.value_name("encoder index")
.help("The index of the encoder you'd like to use to record (use enum-encoders command for a list of encoders and their indices).")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("verbose")
.short("v")
.help("Enables verbose (debug) output.")
.required(false),
)
.arg(
Arg::with_name("waitForDebugger")
.long("waitForDebugger")
.help("The program will wait for a debugger to attach before starting.")
.required(false),
)
.arg(
Arg::with_name("consoleMode")
.long("consoleMode")
.help("Recording immediately starts. End the recording through console input.")
.required(false),
)
.arg(
Arg::with_name("OUTPUT FILE")
.help("The output file that will contain the recording.")
.default_value("recording.mp4")
.required(false),
)
.subcommand(
SubCommand::with_name("enum-encoders")
.about("Lists the available hardware H264 encoders.")
);
// Handle /?
let args: Vec<_> = std::env::args().collect();
if args.contains(&"/?".to_owned()) {
app.print_help().unwrap();
std::process::exit(0);
}
let matches = app.get_matches();
if let Some(name) = matches.subcommand_name() {
if name == "enum-encoders" {
enum_encoders().unwrap();
return;
}
}
let monitor_index: usize = matches
.value_of("display")
.unwrap()
.parse()
.expect("Invalid diplay index value!");
let output_path = matches.value_of("OUTPUT FILE").unwrap();
let verbose = matches.is_present("verbose");
let wait_for_debugger = matches.is_present("waitForDebugger");
let console_mode = matches.is_present("consoleMode");
let bit_rate: u32 = matches
.value_of("bitRate")
.unwrap()
.parse()
.expect("Invalid bit rate value!");
let frame_rate: u32 = matches
.value_of("frameRate")
.unwrap()
.parse()
.expect("Invalid frame rate value!");
let resolution: Resolution = matches
.value_of("resolution")
.unwrap()
.parse()
.expect("Invalid resolution value! Expecting: native, 720p, 1080p, 2160p, or 4320p.");
let encoder_index: usize = matches
.value_of("encoder")
.unwrap()
.parse()
.expect("Invalid encoder index value!");
// Validate some of the params
if !validate_path(output_path) {
exit_with_error("Invalid path specified!");
}
let result = run(
monitor_index,
&output_path,
bit_rate,
frame_rate,
resolution,
encoder_index,
verbose | wait_for_debugger,
wait_for_debugger,
console_mode,
);
// We do this for nicer HRESULT printing when errors occur.
if let Err(error) = result {
error.code().unwrap();
}
}
fn pause() {
println!("Press ENTER to stop recording...");
std::io::Read::read(&mut std::io::stdin(), &mut [0]).unwrap();
}
fn enum_encoders() -> Result<()> {
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
println!("Encoders ({}):", encoder_devices.len());
for (i, encoder_device) in encoder_devices.iter().enumerate() {
println!(" {} - {}", i, encoder_device.display_name());
}
Ok(())
}
fn create_encoding_session(
d3d_device: ID3D11Device,
item: GraphicsCaptureItem,
encoder_device: &VideoEncoderDevice,
resolution: SizeInt32,
bit_rate: u32,
frame_rate: u32,
stream: IRandomAccessStream,
) -> Result<VideoEncodingSession> {
let result = VideoEncodingSession::new(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
);
if result.is_err() {
println!("Error during encoder setup, try another set of encoding settings.");
}
result
}
fn validate_path<P: AsRef<Path>>(path: P) -> bool {
let path = path.as_ref();
let mut valid = true;
if let Some(extension) = path.extension() {
if extension != "mp4" {
valid = false;
}
} else {
valid = false;
}
valid
}
fn exit_with_error(message: &str) -> ! {
println!("{}", message);
std::process::exit(1);
}
fn win32_programmatic_capture_supported() -> Result<bool> {
ApiInformation::IsApiContractPresentByMajor("Windows.Foundation.UniversalApiContract", 8)
}
fn required_capture_features_supported() -> Result<bool> {
let result = ApiInformation::IsTypePresent(GraphicsCaptureSession::NAME)? && // Windows.Graphics.Capture is present
GraphicsCaptureSession::IsSupported()? && // The CaptureService is available
win32_programmatic_capture_supported()?;
Ok(result)
}
fn pump_messages<F: FnMut() -> Result<bool>>(mut hot_key_callback: F) -> Result<()> {
let _hot_key = HotKey::new(MOD_SHIFT | MOD_CONTROL, 0x52 /* R */)?;
println!("Press SHIFT+CTRL+R to start/stop the recording...");
unsafe {
let mut message = MSG::default();
while GetMessageW(&mut message, HWND(0), 0, 0).into() {
if message.message == WM_HOTKEY {
if hot_key_callback()? {
break;
}
}
DispatchMessageW(&mut message);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::validate_path;
#[test]
fn path_parsing_test() {
assert!(validate_path("something.mp4"));
assert!(validate_path("somedir/something.mp4"));
assert!(validate_path("somedir\\something.mp4"));
assert!(validate_path("../something.mp4"));
assert!(!validate_path("."));
assert!(!validate_path("*"));
assert!(!validate_path("something"));
assert!(!validate_path(".mp4"));
assert!(!validate_path("mp4"));
assert!(!validate_path("something.avi"));
}
} | if !required_capture_features_supported()? {
exit_with_error("The required screen capture features are not supported on this device for this release of Windows!\nPlease update your operating system (minimum: Windows 10 Version 1903, Build 18362).");
} | random_line_split |
main.rs | mod capture;
mod d3d;
mod displays;
mod hotkey;
mod media;
mod resolution;
mod video;
use std::{path::Path, time::Duration};
use clap::{App, Arg, SubCommand};
use hotkey::HotKey;
use windows::{
core::{Result, RuntimeName},
Foundation::Metadata::ApiInformation,
Graphics::{
Capture::{GraphicsCaptureItem, GraphicsCaptureSession},
SizeInt32,
},
Storage::{
CreationCollisionOption, FileAccessMode, StorageFolder, Streams::IRandomAccessStream,
},
Win32::{
Foundation::{HWND, MAX_PATH, PWSTR},
Graphics::Direct3D11::ID3D11Device,
Media::MediaFoundation::{MFStartup, MFSTARTUP_FULL},
Storage::FileSystem::GetFullPathNameW,
System::{
Diagnostics::Debug::{DebugBreak, IsDebuggerPresent},
Threading::GetCurrentProcessId,
WinRT::{RoInitialize, RO_INIT_MULTITHREADED},
},
UI::{
Input::KeyboardAndMouse::{MOD_CONTROL, MOD_SHIFT},
WindowsAndMessaging::{DispatchMessageW, GetMessageW, MSG, WM_HOTKEY},
},
},
};
use crate::{
capture::create_capture_item_for_monitor,
d3d::create_d3d_device,
displays::get_display_handle_from_index,
media::MF_VERSION,
resolution::Resolution,
video::{encoder_device::VideoEncoderDevice, encoding_session::VideoEncodingSession},
};
fn run(
display_index: usize,
output_path: &str,
bit_rate: u32,
frame_rate: u32,
resolution: Resolution,
encoder_index: usize,
verbose: bool,
wait_for_debugger: bool,
console_mode: bool,
) -> Result<()> {
unsafe {
RoInitialize(RO_INIT_MULTITHREADED)?;
}
unsafe { MFStartup(MF_VERSION, MFSTARTUP_FULL)? }
if wait_for_debugger {
let pid = unsafe { GetCurrentProcessId() };
println!("Waiting for a debugger to attach (PID: {})...", pid);
loop {
if unsafe { IsDebuggerPresent().into() } {
break;
}
std::thread::sleep(Duration::from_secs(1));
}
unsafe {
DebugBreak();
}
}
// Check to make sure Windows.Graphics.Capture is available
if !required_capture_features_supported()? {
exit_with_error("The required screen capture features are not supported on this device for this release of Windows!\nPlease update your operating system (minimum: Windows 10 Version 1903, Build 18362).");
}
if verbose {
println!(
"Using index \"{}\" and path \"{}\".",
display_index, output_path
);
}
// Get the display handle using the provided index
let display_handle = get_display_handle_from_index(display_index)
.expect("The provided display index was out of bounds!");
let item = create_capture_item_for_monitor(display_handle)?;
// Resolve encoding settings
let resolution = if let Some(resolution) = resolution.get_size() {
resolution
} else {
item.Size()?
};
let bit_rate = bit_rate * 1000000;
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
if verbose {
println!("Encoders ({}):", encoder_devices.len());
for encoder_device in &encoder_devices {
println!(" {}", encoder_device.display_name());
}
}
let encoder_device = if let Some(encoder_device) = encoder_devices.get(encoder_index) {
encoder_device
} else {
exit_with_error("Encoder index is out of bounds!");
};
if verbose {
println!("Using: {}", encoder_device.display_name());
}
// Create our file
let path = unsafe {
let mut output_path: Vec<u16> = output_path.encode_utf16().collect();
output_path.push(0);
let mut new_path = vec![0u16; MAX_PATH as usize];
let length = GetFullPathNameW(
PWSTR(output_path.as_mut_ptr()),
new_path.len() as u32,
PWSTR(new_path.as_mut_ptr()),
std::ptr::null_mut(),
);
new_path.resize(length as usize, 0);
String::from_utf16(&new_path).unwrap()
};
let path = Path::new(&path);
let parent_folder_path = path.parent().unwrap();
let parent_folder =
StorageFolder::GetFolderFromPathAsync(parent_folder_path.as_os_str().to_str().unwrap())?
.get()?;
let file_name = path.file_name().unwrap();
let file = parent_folder
.CreateFileAsync(
file_name.to_str().unwrap(),
CreationCollisionOption::ReplaceExisting,
)?
.get()?;
// Start the recording
{
let stream = file.OpenAsync(FileAccessMode::ReadWrite)?.get()?;
let d3d_device = create_d3d_device()?;
let mut session = create_encoding_session(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
)?;
if !console_mode {
let mut is_recording = false;
pump_messages(|| -> Result<bool> {
Ok(if !is_recording {
is_recording = true;
println!("Starting recording...");
session.start()?;
false
} else {
true
})
})?;
println!("Stopping recording...");
} else {
session.start()?;
pause();
}
session.stop()?;
}
Ok(())
}
fn main() {
let mut app = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::with_name("display")
.short("d")
.long("display")
.value_name("display index")
.help("The index of the display you'd like to record.")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("bitRate")
.short("b")
.long("bitRate")
.value_name("bit rate (in Mbps)")
.help("The bit rate you would like to encode at (in Mbps).")
.takes_value(true)
.default_value("18")
.required(false),
)
.arg(
Arg::with_name("frameRate")
.short("f")
.long("frameRate")
.value_name("frame rate")
.help("The frame rate you would like to encode at.")
.takes_value(true)
.default_value("60")
.required(false),
)
.arg(
Arg::with_name("resolution")
.short("r")
.long("resolution")
.value_name("resolution enum")
.help("The resolution you would like to encode at: native, 720p, 1080p, 2160p, or 4320p.")
.takes_value(true)
.default_value("native")
.required(false),
)
.arg(
Arg::with_name("encoder")
.short("e")
.long("encoder")
.value_name("encoder index")
.help("The index of the encoder you'd like to use to record (use enum-encoders command for a list of encoders and their indices).")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("verbose")
.short("v")
.help("Enables verbose (debug) output.")
.required(false),
)
.arg(
Arg::with_name("waitForDebugger")
.long("waitForDebugger")
.help("The program will wait for a debugger to attach before starting.")
.required(false),
)
.arg(
Arg::with_name("consoleMode")
.long("consoleMode")
.help("Recording immediately starts. End the recording through console input.")
.required(false),
)
.arg(
Arg::with_name("OUTPUT FILE")
.help("The output file that will contain the recording.")
.default_value("recording.mp4")
.required(false),
)
.subcommand(
SubCommand::with_name("enum-encoders")
.about("Lists the available hardware H264 encoders.")
);
// Handle /?
let args: Vec<_> = std::env::args().collect();
if args.contains(&"/?".to_owned()) {
app.print_help().unwrap();
std::process::exit(0);
}
let matches = app.get_matches();
if let Some(name) = matches.subcommand_name() {
if name == "enum-encoders" {
enum_encoders().unwrap();
return;
}
}
let monitor_index: usize = matches
.value_of("display")
.unwrap()
.parse()
.expect("Invalid diplay index value!");
let output_path = matches.value_of("OUTPUT FILE").unwrap();
let verbose = matches.is_present("verbose");
let wait_for_debugger = matches.is_present("waitForDebugger");
let console_mode = matches.is_present("consoleMode");
let bit_rate: u32 = matches
.value_of("bitRate")
.unwrap()
.parse()
.expect("Invalid bit rate value!");
let frame_rate: u32 = matches
.value_of("frameRate")
.unwrap()
.parse()
.expect("Invalid frame rate value!");
let resolution: Resolution = matches
.value_of("resolution")
.unwrap()
.parse()
.expect("Invalid resolution value! Expecting: native, 720p, 1080p, 2160p, or 4320p.");
let encoder_index: usize = matches
.value_of("encoder")
.unwrap()
.parse()
.expect("Invalid encoder index value!");
// Validate some of the params
if !validate_path(output_path) {
exit_with_error("Invalid path specified!");
}
let result = run(
monitor_index,
&output_path,
bit_rate,
frame_rate,
resolution,
encoder_index,
verbose | wait_for_debugger,
wait_for_debugger,
console_mode,
);
// We do this for nicer HRESULT printing when errors occur.
if let Err(error) = result {
error.code().unwrap();
}
}
fn pause() {
println!("Press ENTER to stop recording...");
std::io::Read::read(&mut std::io::stdin(), &mut [0]).unwrap();
}
fn enum_encoders() -> Result<()> {
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
println!("Encoders ({}):", encoder_devices.len());
for (i, encoder_device) in encoder_devices.iter().enumerate() {
println!(" {} - {}", i, encoder_device.display_name());
}
Ok(())
}
fn create_encoding_session(
d3d_device: ID3D11Device,
item: GraphicsCaptureItem,
encoder_device: &VideoEncoderDevice,
resolution: SizeInt32,
bit_rate: u32,
frame_rate: u32,
stream: IRandomAccessStream,
) -> Result<VideoEncodingSession> {
let result = VideoEncodingSession::new(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
);
if result.is_err() {
println!("Error during encoder setup, try another set of encoding settings.");
}
result
}
fn validate_path<P: AsRef<Path>>(path: P) -> bool {
let path = path.as_ref();
let mut valid = true;
if let Some(extension) = path.extension() {
if extension != "mp4" {
valid = false;
}
} else {
valid = false;
}
valid
}
fn exit_with_error(message: &str) -> ! {
println!("{}", message);
std::process::exit(1);
}
fn win32_programmatic_capture_supported() -> Result<bool> |
fn required_capture_features_supported() -> Result<bool> {
let result = ApiInformation::IsTypePresent(GraphicsCaptureSession::NAME)? && // Windows.Graphics.Capture is present
GraphicsCaptureSession::IsSupported()? && // The CaptureService is available
win32_programmatic_capture_supported()?;
Ok(result)
}
fn pump_messages<F: FnMut() -> Result<bool>>(mut hot_key_callback: F) -> Result<()> {
let _hot_key = HotKey::new(MOD_SHIFT | MOD_CONTROL, 0x52 /* R */)?;
println!("Press SHIFT+CTRL+R to start/stop the recording...");
unsafe {
let mut message = MSG::default();
while GetMessageW(&mut message, HWND(0), 0, 0).into() {
if message.message == WM_HOTKEY {
if hot_key_callback()? {
break;
}
}
DispatchMessageW(&mut message);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::validate_path;
#[test]
fn path_parsing_test() {
assert!(validate_path("something.mp4"));
assert!(validate_path("somedir/something.mp4"));
assert!(validate_path("somedir\\something.mp4"));
assert!(validate_path("../something.mp4"));
assert!(!validate_path("."));
assert!(!validate_path("*"));
assert!(!validate_path("something"));
assert!(!validate_path(".mp4"));
assert!(!validate_path("mp4"));
assert!(!validate_path("something.avi"));
}
}
| {
ApiInformation::IsApiContractPresentByMajor("Windows.Foundation.UniversalApiContract", 8)
} | identifier_body |
main.rs | mod capture;
mod d3d;
mod displays;
mod hotkey;
mod media;
mod resolution;
mod video;
use std::{path::Path, time::Duration};
use clap::{App, Arg, SubCommand};
use hotkey::HotKey;
use windows::{
core::{Result, RuntimeName},
Foundation::Metadata::ApiInformation,
Graphics::{
Capture::{GraphicsCaptureItem, GraphicsCaptureSession},
SizeInt32,
},
Storage::{
CreationCollisionOption, FileAccessMode, StorageFolder, Streams::IRandomAccessStream,
},
Win32::{
Foundation::{HWND, MAX_PATH, PWSTR},
Graphics::Direct3D11::ID3D11Device,
Media::MediaFoundation::{MFStartup, MFSTARTUP_FULL},
Storage::FileSystem::GetFullPathNameW,
System::{
Diagnostics::Debug::{DebugBreak, IsDebuggerPresent},
Threading::GetCurrentProcessId,
WinRT::{RoInitialize, RO_INIT_MULTITHREADED},
},
UI::{
Input::KeyboardAndMouse::{MOD_CONTROL, MOD_SHIFT},
WindowsAndMessaging::{DispatchMessageW, GetMessageW, MSG, WM_HOTKEY},
},
},
};
use crate::{
capture::create_capture_item_for_monitor,
d3d::create_d3d_device,
displays::get_display_handle_from_index,
media::MF_VERSION,
resolution::Resolution,
video::{encoder_device::VideoEncoderDevice, encoding_session::VideoEncodingSession},
};
fn run(
display_index: usize,
output_path: &str,
bit_rate: u32,
frame_rate: u32,
resolution: Resolution,
encoder_index: usize,
verbose: bool,
wait_for_debugger: bool,
console_mode: bool,
) -> Result<()> {
unsafe {
RoInitialize(RO_INIT_MULTITHREADED)?;
}
unsafe { MFStartup(MF_VERSION, MFSTARTUP_FULL)? }
if wait_for_debugger {
let pid = unsafe { GetCurrentProcessId() };
println!("Waiting for a debugger to attach (PID: {})...", pid);
loop {
if unsafe { IsDebuggerPresent().into() } {
break;
}
std::thread::sleep(Duration::from_secs(1));
}
unsafe {
DebugBreak();
}
}
// Check to make sure Windows.Graphics.Capture is available
if !required_capture_features_supported()? |
if verbose {
println!(
"Using index \"{}\" and path \"{}\".",
display_index, output_path
);
}
// Get the display handle using the provided index
let display_handle = get_display_handle_from_index(display_index)
.expect("The provided display index was out of bounds!");
let item = create_capture_item_for_monitor(display_handle)?;
// Resolve encoding settings
let resolution = if let Some(resolution) = resolution.get_size() {
resolution
} else {
item.Size()?
};
let bit_rate = bit_rate * 1000000;
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
if verbose {
println!("Encoders ({}):", encoder_devices.len());
for encoder_device in &encoder_devices {
println!(" {}", encoder_device.display_name());
}
}
let encoder_device = if let Some(encoder_device) = encoder_devices.get(encoder_index) {
encoder_device
} else {
exit_with_error("Encoder index is out of bounds!");
};
if verbose {
println!("Using: {}", encoder_device.display_name());
}
// Create our file
let path = unsafe {
let mut output_path: Vec<u16> = output_path.encode_utf16().collect();
output_path.push(0);
let mut new_path = vec![0u16; MAX_PATH as usize];
let length = GetFullPathNameW(
PWSTR(output_path.as_mut_ptr()),
new_path.len() as u32,
PWSTR(new_path.as_mut_ptr()),
std::ptr::null_mut(),
);
new_path.resize(length as usize, 0);
String::from_utf16(&new_path).unwrap()
};
let path = Path::new(&path);
let parent_folder_path = path.parent().unwrap();
let parent_folder =
StorageFolder::GetFolderFromPathAsync(parent_folder_path.as_os_str().to_str().unwrap())?
.get()?;
let file_name = path.file_name().unwrap();
let file = parent_folder
.CreateFileAsync(
file_name.to_str().unwrap(),
CreationCollisionOption::ReplaceExisting,
)?
.get()?;
// Start the recording
{
let stream = file.OpenAsync(FileAccessMode::ReadWrite)?.get()?;
let d3d_device = create_d3d_device()?;
let mut session = create_encoding_session(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
)?;
if !console_mode {
let mut is_recording = false;
pump_messages(|| -> Result<bool> {
Ok(if !is_recording {
is_recording = true;
println!("Starting recording...");
session.start()?;
false
} else {
true
})
})?;
println!("Stopping recording...");
} else {
session.start()?;
pause();
}
session.stop()?;
}
Ok(())
}
fn main() {
let mut app = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::with_name("display")
.short("d")
.long("display")
.value_name("display index")
.help("The index of the display you'd like to record.")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("bitRate")
.short("b")
.long("bitRate")
.value_name("bit rate (in Mbps)")
.help("The bit rate you would like to encode at (in Mbps).")
.takes_value(true)
.default_value("18")
.required(false),
)
.arg(
Arg::with_name("frameRate")
.short("f")
.long("frameRate")
.value_name("frame rate")
.help("The frame rate you would like to encode at.")
.takes_value(true)
.default_value("60")
.required(false),
)
.arg(
Arg::with_name("resolution")
.short("r")
.long("resolution")
.value_name("resolution enum")
.help("The resolution you would like to encode at: native, 720p, 1080p, 2160p, or 4320p.")
.takes_value(true)
.default_value("native")
.required(false),
)
.arg(
Arg::with_name("encoder")
.short("e")
.long("encoder")
.value_name("encoder index")
.help("The index of the encoder you'd like to use to record (use enum-encoders command for a list of encoders and their indices).")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("verbose")
.short("v")
.help("Enables verbose (debug) output.")
.required(false),
)
.arg(
Arg::with_name("waitForDebugger")
.long("waitForDebugger")
.help("The program will wait for a debugger to attach before starting.")
.required(false),
)
.arg(
Arg::with_name("consoleMode")
.long("consoleMode")
.help("Recording immediately starts. End the recording through console input.")
.required(false),
)
.arg(
Arg::with_name("OUTPUT FILE")
.help("The output file that will contain the recording.")
.default_value("recording.mp4")
.required(false),
)
.subcommand(
SubCommand::with_name("enum-encoders")
.about("Lists the available hardware H264 encoders.")
);
// Handle /?
let args: Vec<_> = std::env::args().collect();
if args.contains(&"/?".to_owned()) {
app.print_help().unwrap();
std::process::exit(0);
}
let matches = app.get_matches();
if let Some(name) = matches.subcommand_name() {
if name == "enum-encoders" {
enum_encoders().unwrap();
return;
}
}
let monitor_index: usize = matches
.value_of("display")
.unwrap()
.parse()
.expect("Invalid diplay index value!");
let output_path = matches.value_of("OUTPUT FILE").unwrap();
let verbose = matches.is_present("verbose");
let wait_for_debugger = matches.is_present("waitForDebugger");
let console_mode = matches.is_present("consoleMode");
let bit_rate: u32 = matches
.value_of("bitRate")
.unwrap()
.parse()
.expect("Invalid bit rate value!");
let frame_rate: u32 = matches
.value_of("frameRate")
.unwrap()
.parse()
.expect("Invalid frame rate value!");
let resolution: Resolution = matches
.value_of("resolution")
.unwrap()
.parse()
.expect("Invalid resolution value! Expecting: native, 720p, 1080p, 2160p, or 4320p.");
let encoder_index: usize = matches
.value_of("encoder")
.unwrap()
.parse()
.expect("Invalid encoder index value!");
// Validate some of the params
if !validate_path(output_path) {
exit_with_error("Invalid path specified!");
}
let result = run(
monitor_index,
&output_path,
bit_rate,
frame_rate,
resolution,
encoder_index,
verbose | wait_for_debugger,
wait_for_debugger,
console_mode,
);
// We do this for nicer HRESULT printing when errors occur.
if let Err(error) = result {
error.code().unwrap();
}
}
fn pause() {
println!("Press ENTER to stop recording...");
std::io::Read::read(&mut std::io::stdin(), &mut [0]).unwrap();
}
fn enum_encoders() -> Result<()> {
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
println!("Encoders ({}):", encoder_devices.len());
for (i, encoder_device) in encoder_devices.iter().enumerate() {
println!(" {} - {}", i, encoder_device.display_name());
}
Ok(())
}
fn create_encoding_session(
d3d_device: ID3D11Device,
item: GraphicsCaptureItem,
encoder_device: &VideoEncoderDevice,
resolution: SizeInt32,
bit_rate: u32,
frame_rate: u32,
stream: IRandomAccessStream,
) -> Result<VideoEncodingSession> {
let result = VideoEncodingSession::new(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
);
if result.is_err() {
println!("Error during encoder setup, try another set of encoding settings.");
}
result
}
fn validate_path<P: AsRef<Path>>(path: P) -> bool {
let path = path.as_ref();
let mut valid = true;
if let Some(extension) = path.extension() {
if extension != "mp4" {
valid = false;
}
} else {
valid = false;
}
valid
}
fn exit_with_error(message: &str) -> ! {
println!("{}", message);
std::process::exit(1);
}
fn win32_programmatic_capture_supported() -> Result<bool> {
ApiInformation::IsApiContractPresentByMajor("Windows.Foundation.UniversalApiContract", 8)
}
fn required_capture_features_supported() -> Result<bool> {
let result = ApiInformation::IsTypePresent(GraphicsCaptureSession::NAME)? && // Windows.Graphics.Capture is present
GraphicsCaptureSession::IsSupported()? && // The CaptureService is available
win32_programmatic_capture_supported()?;
Ok(result)
}
fn pump_messages<F: FnMut() -> Result<bool>>(mut hot_key_callback: F) -> Result<()> {
let _hot_key = HotKey::new(MOD_SHIFT | MOD_CONTROL, 0x52 /* R */)?;
println!("Press SHIFT+CTRL+R to start/stop the recording...");
unsafe {
let mut message = MSG::default();
while GetMessageW(&mut message, HWND(0), 0, 0).into() {
if message.message == WM_HOTKEY {
if hot_key_callback()? {
break;
}
}
DispatchMessageW(&mut message);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::validate_path;
#[test]
fn path_parsing_test() {
assert!(validate_path("something.mp4"));
assert!(validate_path("somedir/something.mp4"));
assert!(validate_path("somedir\\something.mp4"));
assert!(validate_path("../something.mp4"));
assert!(!validate_path("."));
assert!(!validate_path("*"));
assert!(!validate_path("something"));
assert!(!validate_path(".mp4"));
assert!(!validate_path("mp4"));
assert!(!validate_path("something.avi"));
}
}
| {
exit_with_error("The required screen capture features are not supported on this device for this release of Windows!\nPlease update your operating system (minimum: Windows 10 Version 1903, Build 18362).");
} | conditional_block |
main.rs | mod capture;
mod d3d;
mod displays;
mod hotkey;
mod media;
mod resolution;
mod video;
use std::{path::Path, time::Duration};
use clap::{App, Arg, SubCommand};
use hotkey::HotKey;
use windows::{
core::{Result, RuntimeName},
Foundation::Metadata::ApiInformation,
Graphics::{
Capture::{GraphicsCaptureItem, GraphicsCaptureSession},
SizeInt32,
},
Storage::{
CreationCollisionOption, FileAccessMode, StorageFolder, Streams::IRandomAccessStream,
},
Win32::{
Foundation::{HWND, MAX_PATH, PWSTR},
Graphics::Direct3D11::ID3D11Device,
Media::MediaFoundation::{MFStartup, MFSTARTUP_FULL},
Storage::FileSystem::GetFullPathNameW,
System::{
Diagnostics::Debug::{DebugBreak, IsDebuggerPresent},
Threading::GetCurrentProcessId,
WinRT::{RoInitialize, RO_INIT_MULTITHREADED},
},
UI::{
Input::KeyboardAndMouse::{MOD_CONTROL, MOD_SHIFT},
WindowsAndMessaging::{DispatchMessageW, GetMessageW, MSG, WM_HOTKEY},
},
},
};
use crate::{
capture::create_capture_item_for_monitor,
d3d::create_d3d_device,
displays::get_display_handle_from_index,
media::MF_VERSION,
resolution::Resolution,
video::{encoder_device::VideoEncoderDevice, encoding_session::VideoEncodingSession},
};
fn | (
display_index: usize,
output_path: &str,
bit_rate: u32,
frame_rate: u32,
resolution: Resolution,
encoder_index: usize,
verbose: bool,
wait_for_debugger: bool,
console_mode: bool,
) -> Result<()> {
unsafe {
RoInitialize(RO_INIT_MULTITHREADED)?;
}
unsafe { MFStartup(MF_VERSION, MFSTARTUP_FULL)? }
if wait_for_debugger {
let pid = unsafe { GetCurrentProcessId() };
println!("Waiting for a debugger to attach (PID: {})...", pid);
loop {
if unsafe { IsDebuggerPresent().into() } {
break;
}
std::thread::sleep(Duration::from_secs(1));
}
unsafe {
DebugBreak();
}
}
// Check to make sure Windows.Graphics.Capture is available
if !required_capture_features_supported()? {
exit_with_error("The required screen capture features are not supported on this device for this release of Windows!\nPlease update your operating system (minimum: Windows 10 Version 1903, Build 18362).");
}
if verbose {
println!(
"Using index \"{}\" and path \"{}\".",
display_index, output_path
);
}
// Get the display handle using the provided index
let display_handle = get_display_handle_from_index(display_index)
.expect("The provided display index was out of bounds!");
let item = create_capture_item_for_monitor(display_handle)?;
// Resolve encoding settings
let resolution = if let Some(resolution) = resolution.get_size() {
resolution
} else {
item.Size()?
};
let bit_rate = bit_rate * 1000000;
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
if verbose {
println!("Encoders ({}):", encoder_devices.len());
for encoder_device in &encoder_devices {
println!(" {}", encoder_device.display_name());
}
}
let encoder_device = if let Some(encoder_device) = encoder_devices.get(encoder_index) {
encoder_device
} else {
exit_with_error("Encoder index is out of bounds!");
};
if verbose {
println!("Using: {}", encoder_device.display_name());
}
// Create our file
let path = unsafe {
let mut output_path: Vec<u16> = output_path.encode_utf16().collect();
output_path.push(0);
let mut new_path = vec![0u16; MAX_PATH as usize];
let length = GetFullPathNameW(
PWSTR(output_path.as_mut_ptr()),
new_path.len() as u32,
PWSTR(new_path.as_mut_ptr()),
std::ptr::null_mut(),
);
new_path.resize(length as usize, 0);
String::from_utf16(&new_path).unwrap()
};
let path = Path::new(&path);
let parent_folder_path = path.parent().unwrap();
let parent_folder =
StorageFolder::GetFolderFromPathAsync(parent_folder_path.as_os_str().to_str().unwrap())?
.get()?;
let file_name = path.file_name().unwrap();
let file = parent_folder
.CreateFileAsync(
file_name.to_str().unwrap(),
CreationCollisionOption::ReplaceExisting,
)?
.get()?;
// Start the recording
{
let stream = file.OpenAsync(FileAccessMode::ReadWrite)?.get()?;
let d3d_device = create_d3d_device()?;
let mut session = create_encoding_session(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
)?;
if !console_mode {
let mut is_recording = false;
pump_messages(|| -> Result<bool> {
Ok(if !is_recording {
is_recording = true;
println!("Starting recording...");
session.start()?;
false
} else {
true
})
})?;
println!("Stopping recording...");
} else {
session.start()?;
pause();
}
session.stop()?;
}
Ok(())
}
fn main() {
let mut app = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::with_name("display")
.short("d")
.long("display")
.value_name("display index")
.help("The index of the display you'd like to record.")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("bitRate")
.short("b")
.long("bitRate")
.value_name("bit rate (in Mbps)")
.help("The bit rate you would like to encode at (in Mbps).")
.takes_value(true)
.default_value("18")
.required(false),
)
.arg(
Arg::with_name("frameRate")
.short("f")
.long("frameRate")
.value_name("frame rate")
.help("The frame rate you would like to encode at.")
.takes_value(true)
.default_value("60")
.required(false),
)
.arg(
Arg::with_name("resolution")
.short("r")
.long("resolution")
.value_name("resolution enum")
.help("The resolution you would like to encode at: native, 720p, 1080p, 2160p, or 4320p.")
.takes_value(true)
.default_value("native")
.required(false),
)
.arg(
Arg::with_name("encoder")
.short("e")
.long("encoder")
.value_name("encoder index")
.help("The index of the encoder you'd like to use to record (use enum-encoders command for a list of encoders and their indices).")
.takes_value(true)
.default_value("0")
.required(false),
)
.arg(
Arg::with_name("verbose")
.short("v")
.help("Enables verbose (debug) output.")
.required(false),
)
.arg(
Arg::with_name("waitForDebugger")
.long("waitForDebugger")
.help("The program will wait for a debugger to attach before starting.")
.required(false),
)
.arg(
Arg::with_name("consoleMode")
.long("consoleMode")
.help("Recording immediately starts. End the recording through console input.")
.required(false),
)
.arg(
Arg::with_name("OUTPUT FILE")
.help("The output file that will contain the recording.")
.default_value("recording.mp4")
.required(false),
)
.subcommand(
SubCommand::with_name("enum-encoders")
.about("Lists the available hardware H264 encoders.")
);
// Handle /?
let args: Vec<_> = std::env::args().collect();
if args.contains(&"/?".to_owned()) {
app.print_help().unwrap();
std::process::exit(0);
}
let matches = app.get_matches();
if let Some(name) = matches.subcommand_name() {
if name == "enum-encoders" {
enum_encoders().unwrap();
return;
}
}
let monitor_index: usize = matches
.value_of("display")
.unwrap()
.parse()
.expect("Invalid diplay index value!");
let output_path = matches.value_of("OUTPUT FILE").unwrap();
let verbose = matches.is_present("verbose");
let wait_for_debugger = matches.is_present("waitForDebugger");
let console_mode = matches.is_present("consoleMode");
let bit_rate: u32 = matches
.value_of("bitRate")
.unwrap()
.parse()
.expect("Invalid bit rate value!");
let frame_rate: u32 = matches
.value_of("frameRate")
.unwrap()
.parse()
.expect("Invalid frame rate value!");
let resolution: Resolution = matches
.value_of("resolution")
.unwrap()
.parse()
.expect("Invalid resolution value! Expecting: native, 720p, 1080p, 2160p, or 4320p.");
let encoder_index: usize = matches
.value_of("encoder")
.unwrap()
.parse()
.expect("Invalid encoder index value!");
// Validate some of the params
if !validate_path(output_path) {
exit_with_error("Invalid path specified!");
}
let result = run(
monitor_index,
&output_path,
bit_rate,
frame_rate,
resolution,
encoder_index,
verbose | wait_for_debugger,
wait_for_debugger,
console_mode,
);
// We do this for nicer HRESULT printing when errors occur.
if let Err(error) = result {
error.code().unwrap();
}
}
fn pause() {
println!("Press ENTER to stop recording...");
std::io::Read::read(&mut std::io::stdin(), &mut [0]).unwrap();
}
fn enum_encoders() -> Result<()> {
let encoder_devices = VideoEncoderDevice::enumerate()?;
if encoder_devices.is_empty() {
exit_with_error("No hardware H264 encoders found!");
}
println!("Encoders ({}):", encoder_devices.len());
for (i, encoder_device) in encoder_devices.iter().enumerate() {
println!(" {} - {}", i, encoder_device.display_name());
}
Ok(())
}
fn create_encoding_session(
d3d_device: ID3D11Device,
item: GraphicsCaptureItem,
encoder_device: &VideoEncoderDevice,
resolution: SizeInt32,
bit_rate: u32,
frame_rate: u32,
stream: IRandomAccessStream,
) -> Result<VideoEncodingSession> {
let result = VideoEncodingSession::new(
d3d_device,
item,
encoder_device,
resolution,
bit_rate,
frame_rate,
stream,
);
if result.is_err() {
println!("Error during encoder setup, try another set of encoding settings.");
}
result
}
fn validate_path<P: AsRef<Path>>(path: P) -> bool {
let path = path.as_ref();
let mut valid = true;
if let Some(extension) = path.extension() {
if extension != "mp4" {
valid = false;
}
} else {
valid = false;
}
valid
}
fn exit_with_error(message: &str) -> ! {
println!("{}", message);
std::process::exit(1);
}
fn win32_programmatic_capture_supported() -> Result<bool> {
ApiInformation::IsApiContractPresentByMajor("Windows.Foundation.UniversalApiContract", 8)
}
fn required_capture_features_supported() -> Result<bool> {
let result = ApiInformation::IsTypePresent(GraphicsCaptureSession::NAME)? && // Windows.Graphics.Capture is present
GraphicsCaptureSession::IsSupported()? && // The CaptureService is available
win32_programmatic_capture_supported()?;
Ok(result)
}
fn pump_messages<F: FnMut() -> Result<bool>>(mut hot_key_callback: F) -> Result<()> {
let _hot_key = HotKey::new(MOD_SHIFT | MOD_CONTROL, 0x52 /* R */)?;
println!("Press SHIFT+CTRL+R to start/stop the recording...");
unsafe {
let mut message = MSG::default();
while GetMessageW(&mut message, HWND(0), 0, 0).into() {
if message.message == WM_HOTKEY {
if hot_key_callback()? {
break;
}
}
DispatchMessageW(&mut message);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::validate_path;
#[test]
fn path_parsing_test() {
assert!(validate_path("something.mp4"));
assert!(validate_path("somedir/something.mp4"));
assert!(validate_path("somedir\\something.mp4"));
assert!(validate_path("../something.mp4"));
assert!(!validate_path("."));
assert!(!validate_path("*"));
assert!(!validate_path("something"));
assert!(!validate_path(".mp4"));
assert!(!validate_path("mp4"));
assert!(!validate_path("something.avi"));
}
}
| run | identifier_name |
utils.py | # Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for report generation"""
from __future__ import absolute_import
from __future__ import print_function
from collections import OrderedDict
from typing import Set, Dict, List, Union, Text
from ml_eda.proto import analysis_entity_pb2
from ml_eda.preprocessing.analysis_query import query_constants
from ml_eda.reporting import template
from ml_eda.reporting import visualization
from ml_eda.reporting import formatting
Analysis = analysis_entity_pb2.Analysis
TableMetric = analysis_entity_pb2.TableMetric
ScalarMetric = analysis_entity_pb2.ScalarMetric
Attribute = analysis_entity_pb2.Attribute
def create_table_descriptive_row_from_analysis(
attribute_name: Text,
base_analysis: Analysis,
additional_analysis: Analysis,
figure_base_path: Text
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create makrdown formatted descriptive analysis result
Args: | figure_base_path: (string), the folder for holding figures
Returns:
string, markdown formatted content
"""
row_template = template.TABLE_DESCRIPTIVE_ROW_TEMPLATE
stats_template = template.TABLE_DESCRIPTIVE_STATS_TEMPLATE
metrics = base_analysis.smetrics
attribute_type = base_analysis.features[0].type
# Make sure the display order of each attribute is consistent
common_order = query_constants.COMMON_ORDER
if attribute_type == Attribute.NUMERICAL:
detail_order = query_constants.NUMERICAL_ORDER
else:
detail_order = query_constants.CATEGORICAL_ORDER
# Use a OrderedDict to store the result
result_holder = OrderedDict(
[(item, 0) for item in common_order + detail_order])
for item in metrics:
name = ScalarMetric.Name.Name(item.name)
value = formatting.numeric_formatting(item.value)
result_holder[name] = value
# Construct the markdown formatted row
row_stats_contents = []
for item in result_holder:
row_stats_contents.append(stats_template.format(
metric=item,
value=result_holder[item]
))
figure_path = visualization.plot_bar_chart(additional_analysis,
figure_base_path)
return row_template.format(
name=attribute_name,
type=Attribute.Type.Name(attribute_type),
stats=' <br/> '.join(row_stats_contents),
url=figure_path,
alt_text=attribute_name,
)
def create_table_from_table_metric(table_metric: TableMetric) -> Text:
"""Create a table for a TableMetric object. Currently, this function is
used for Contingency_Table and TABLE_DESCRIPTIVE
Examples:
​|Cash|Credit Card|No Charge|Unknown|Mobile|Prcard
:-----:|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:
frequency|108114952.0|74475448.0|797730.0|369844.0|255082.0|192063.0
Args:
table_metric: (analysis_entity_pb2.TableMetric)
Returns:
string
"""
supported_metric = {
TableMetric.CONTINGENCY_TABLE,
TableMetric.TABLE_DESCRIPTIVE
}
assert table_metric.name in supported_metric
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(table_metric.column_indexes)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row in table_metric.rows:
# row header is in BOLD
row_header = template.BOLD.format(
content=str(row.row_index).strip())
row_values = [row_header] + [formatting.numeric_formatting(item.value)
for item in row.cells]
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def create_pairwise_metric_table(
row_list: Set[Text],
column_list: Set[Text],
name_value_map: Dict[Text, float],
same_match_value
) -> Text:
"""Construct table for pair-wise computed metrics, e.g.,
PEARSON_CORRELATION, ANOVA, CHI_SQUARE, INFORMATION_GAIN
Examples:
​|tips|tolls|trip_total
:-----:|:-----:|:-----:|:-----:
tips|1|0.0001942405360750854|0.1952170878648758
tolls|0.0001942405360750854|1|0.22858665883541107
trip_total|0.1952170878648758|0.22858665883541107|1
Args:
row_list: (List[str]), list of attribute names for table header
column_list: (List[str]), list of attribute names for table row name
name_value_map: (Dict[str, float]), map of name -> value
same_match_value: value if the column and row name are the same. This
could be either float or 'NA' depends on whether the computation of
A-v.s.-A makes sense
Returns:
string
"""
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(column_list)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row_name in row_list:
# row header is in BOLD
row_values = [template.BOLD.format(content=row_name.strip())]
for col_name in column_list:
# same_match_value is used when row_name == column_name
if row_name == col_name:
value = same_match_value
else:
value = name_value_map[row_name + '-' + col_name]
# if the same_match_value is string, simply append it
if isinstance(value, str):
row_values.append(same_match_value)
else:
row_values.append(formatting.numeric_formatting(value))
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def create_no_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float],
figure_base_path: Text,
table_name: Text = "NA"
) -> Text:
"""Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
figure_base_path: (string), the folder for holding figures
table_name: (str)
Returns:
string
"""
attribute_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
attribute_list.update(name_list)
analysis_name_value_map['-'.join(name_list)] = value
analysis_name_value_map['-'.join(reversed(name_list))] = value
table_content = create_pairwise_metric_table(
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value)
if table_name != "NA":
figure_path = visualization.plot_heat_map_for_metric_table(
heat_map_name=table_name,
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value,
figure_base_path=figure_base_path)
figure_content = template.IMAGE_TEMPLATE.format(
url=figure_path,
alt_text=table_name
)
else:
figure_content = ""
return table_content + figure_content
def create_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float]
) -> Text:
"""Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
Returns:
string
"""
row_list = set()
column_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
row_list.add(name_list[0])
column_list.add(name_list[1])
analysis_name_value_map['-'.join(name_list)] = value
return create_pairwise_metric_table(
row_list=row_list,
column_list=column_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value)
def create_target_metrics_highlight(
target_name: Text,
metric_name_list: List[Text],
metric_analysis_list: List[List[Analysis]]
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create the content for highlight section regarding a target attribute
Args:
target_name: (string)
metric_name_list: (List(string)
metric_analysis_list: (List[List[analysis_entity_pb2.Analysis]])
Returns:
"""
assert len(metric_name_list) == len(metric_analysis_list)
# Every metric should have the same length, i.e., target v.s. remaining
assert len({len(item) for item in metric_analysis_list}) == 1
name_enrich = {
'ANOVA': 'ANOVA P-value',
'CHI_SQUARE': 'Chi-square P-value',
'INFORMATION_GAIN': 'Information Gain',
'PEARSON_CORRELATION': 'Correlation Coefficient'
}
table_template = template.TARGET_METRIC_HIGHLIGHT_TEMPLATE
row_template = template.TARGET_METRIC_HIGHLIGHT_ROW_TEMPLATE
num_metrics = len(metric_name_list)
enrich_name_list = [name_enrich[item] if item in name_enrich else item
for item in metric_name_list]
metric_names_str = '|'.join(enrich_name_list)
separator_str = ':-----:|' * num_metrics
attribute_set = set()
metric_holders = {metric: {} for metric in metric_name_list}
for i in range(num_metrics):
for analysis in metric_analysis_list[i]:
metric_name = Analysis.Name.Name(analysis.name)
attribute_name = [att.name for att in analysis.features
if att.name != target_name][0]
attribute_set.add(attribute_name)
metric_value = analysis.smetrics[0].value
metric_holders[metric_name][attribute_name] = metric_value
row_content_list = []
for attribute in attribute_set:
values_str = '|'.join(
[formatting.numeric_formatting(metric_holders[metric][attribute])
for metric in metric_name_list])
row_content_list.append(row_template.format(
name=attribute,
values=values_str
))
return table_template.format(
target_column=target_name,
metric_names=metric_names_str,
seperators=separator_str,
row_content='\n'.join(row_content_list)
)
def create_content_list(contents: List[Text]) -> Text:
"""Format list of string into markdown list
Args:
contents: (List[string]), list of string to be formatted
Returns:
String
"""
# print(contents)
return '\n'.join(
[template.LIST_TEMPLATE.format(
level='',
content=item
) for item in contents if item.strip()])
def create_warning_notes(warnings: List[Text]) -> Text:
"""Format list of warnings into markdown list
Args:
warnings: (List(string)), list of warnings
Returns:
String
"""
warning_title = template.SUB_SUB_SECTION_TITLE.format(
content='Warnings'
)
return warning_title + create_content_list(warnings) | attribute_name: (string), name of the attribute
base_analysis: (analysis_entity_pb2.Analysis), analysis holding
all the metrics
additional_analysis: (analysis_entity_pb2.Analysis), histogram for
numerical attribute, value_counts for categorical attributes | random_line_split |
utils.py | # Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for report generation"""
from __future__ import absolute_import
from __future__ import print_function
from collections import OrderedDict
from typing import Set, Dict, List, Union, Text
from ml_eda.proto import analysis_entity_pb2
from ml_eda.preprocessing.analysis_query import query_constants
from ml_eda.reporting import template
from ml_eda.reporting import visualization
from ml_eda.reporting import formatting
Analysis = analysis_entity_pb2.Analysis
TableMetric = analysis_entity_pb2.TableMetric
ScalarMetric = analysis_entity_pb2.ScalarMetric
Attribute = analysis_entity_pb2.Attribute
def create_table_descriptive_row_from_analysis(
attribute_name: Text,
base_analysis: Analysis,
additional_analysis: Analysis,
figure_base_path: Text
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create makrdown formatted descriptive analysis result
Args:
attribute_name: (string), name of the attribute
base_analysis: (analysis_entity_pb2.Analysis), analysis holding
all the metrics
additional_analysis: (analysis_entity_pb2.Analysis), histogram for
numerical attribute, value_counts for categorical attributes
figure_base_path: (string), the folder for holding figures
Returns:
string, markdown formatted content
"""
row_template = template.TABLE_DESCRIPTIVE_ROW_TEMPLATE
stats_template = template.TABLE_DESCRIPTIVE_STATS_TEMPLATE
metrics = base_analysis.smetrics
attribute_type = base_analysis.features[0].type
# Make sure the display order of each attribute is consistent
common_order = query_constants.COMMON_ORDER
if attribute_type == Attribute.NUMERICAL:
detail_order = query_constants.NUMERICAL_ORDER
else:
detail_order = query_constants.CATEGORICAL_ORDER
# Use a OrderedDict to store the result
result_holder = OrderedDict(
[(item, 0) for item in common_order + detail_order])
for item in metrics:
name = ScalarMetric.Name.Name(item.name)
value = formatting.numeric_formatting(item.value)
result_holder[name] = value
# Construct the markdown formatted row
row_stats_contents = []
for item in result_holder:
row_stats_contents.append(stats_template.format(
metric=item,
value=result_holder[item]
))
figure_path = visualization.plot_bar_chart(additional_analysis,
figure_base_path)
return row_template.format(
name=attribute_name,
type=Attribute.Type.Name(attribute_type),
stats=' <br/> '.join(row_stats_contents),
url=figure_path,
alt_text=attribute_name,
)
def create_table_from_table_metric(table_metric: TableMetric) -> Text:
"""Create a table for a TableMetric object. Currently, this function is
used for Contingency_Table and TABLE_DESCRIPTIVE
Examples:
​|Cash|Credit Card|No Charge|Unknown|Mobile|Prcard
:-----:|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:
frequency|108114952.0|74475448.0|797730.0|369844.0|255082.0|192063.0
Args:
table_metric: (analysis_entity_pb2.TableMetric)
Returns:
string
"""
supported_metric = {
TableMetric.CONTINGENCY_TABLE,
TableMetric.TABLE_DESCRIPTIVE
}
assert table_metric.name in supported_metric
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(table_metric.column_indexes)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row in table_metric.rows:
# row header is in BOLD
row_header = template.BOLD.format(
content=str(row.row_index).strip())
row_values = [row_header] + [formatting.numeric_formatting(item.value)
for item in row.cells]
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def create_pairwise_metric_table(
row_list: Set[Text],
column_list: Set[Text],
name_value_map: Dict[Text, float],
same_match_value
) -> Text:
"""Construct table for pair-wise computed metrics, e.g.,
PEARSON_CORRELATION, ANOVA, CHI_SQUARE, INFORMATION_GAIN
Examples:
​|tips|tolls|trip_total
:-----:|:-----:|:-----:|:-----:
tips|1|0.0001942405360750854|0.1952170878648758
tolls|0.0001942405360750854|1|0.22858665883541107
trip_total|0.1952170878648758|0.22858665883541107|1
Args:
row_list: (List[str]), list of attribute names for table header
column_list: (List[str]), list of attribute names for table row name
name_value_map: (Dict[str, float]), map of name -> value
same_match_value: value if the column and row name are the same. This
could be either float or 'NA' depends on whether the computation of
A-v.s.-A makes sense
Returns:
string
"""
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(column_list)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row_name in row_list:
# row header is in BOLD
row_values = [template.BOLD.format(content=row_name.strip())]
for col_name in column_list:
# same_match_value is used when row_name == column_name
if row_name == col_name:
value = same_match_value
else:
value = name_value_map[row_name + '-' + col_name]
# if the same_match_value is string, simply append it
if isinstance(value, str):
row_values.append(same_match_value)
else:
row_values.append(formatting.numeric_formatting(value))
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def create_no_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float],
figure_base_path: Text,
table_name: Text = "NA"
) -> Text:
"""Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
figure_base_path: (string), the folder for holding figures
table_name: (str)
Returns:
string
"""
attribute_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
attribute_list.update(name_list)
analysis_name_value_map['-'.join(name_list)] = value
analysis_name_value_map['-'.join(reversed(name_list))] = value
table_content = create_pairwise_metric_table(
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value)
if table_name != "NA":
figure_path = visualization.plot_heat_map_for_metric_table(
heat_map_name=table_name,
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value,
figure_base_path=figure_base_path)
figure_content = template.IMAGE_TEMPLATE.format(
url=figure_path,
alt_text=table_name
)
else:
figure_content = ""
return table_content + figure_content
def create_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float]
) -> Text:
|
def create_target_metrics_highlight(
target_name: Text,
metric_name_list: List[Text],
metric_analysis_list: List[List[Analysis]]
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create the content for highlight section regarding a target attribute
Args:
target_name: (string)
metric_name_list: (List(string)
metric_analysis_list: (List[List[analysis_entity_pb2.Analysis]])
Returns:
"""
assert len(metric_name_list) == len(metric_analysis_list)
# Every metric should have the same length, i.e., target v.s. remaining
assert len({len(item) for item in metric_analysis_list}) == 1
name_enrich = {
'ANOVA': 'ANOVA P-value',
'CHI_SQUARE': 'Chi-square P-value',
'INFORMATION_GAIN': 'Information Gain',
'PEARSON_CORRELATION': 'Correlation Coefficient'
}
table_template = template.TARGET_METRIC_HIGHLIGHT_TEMPLATE
row_template = template.TARGET_METRIC_HIGHLIGHT_ROW_TEMPLATE
num_metrics = len(metric_name_list)
enrich_name_list = [name_enrich[item] if item in name_enrich else item
for item in metric_name_list]
metric_names_str = '|'.join(enrich_name_list)
separator_str = ':-----:|' * num_metrics
attribute_set = set()
metric_holders = {metric: {} for metric in metric_name_list}
for i in range(num_metrics):
for analysis in metric_analysis_list[i]:
metric_name = Analysis.Name.Name(analysis.name)
attribute_name = [att.name for att in analysis.features
if att.name != target_name][0]
attribute_set.add(attribute_name)
metric_value = analysis.smetrics[0].value
metric_holders[metric_name][attribute_name] = metric_value
row_content_list = []
for attribute in attribute_set:
values_str = '|'.join(
[formatting.numeric_formatting(metric_holders[metric][attribute])
for metric in metric_name_list])
row_content_list.append(row_template.format(
name=attribute,
values=values_str
))
return table_template.format(
target_column=target_name,
metric_names=metric_names_str,
seperators=separator_str,
row_content='\n'.join(row_content_list)
)
def create_content_list(contents: List[Text]) -> Text:
"""Format list of string into markdown list
Args:
contents: (List[string]), list of string to be formatted
Returns:
String
"""
# print(contents)
return '\n'.join(
[template.LIST_TEMPLATE.format(
level='',
content=item
) for item in contents if item.strip()])
def create_warning_notes(warnings: List[Text]) -> Text:
"""Format list of warnings into markdown list
Args:
warnings: (List(string)), list of warnings
Returns:
String
"""
warning_title = template.SUB_SUB_SECTION_TITLE.format(
content='Warnings'
)
return warning_title + create_content_list(warnings)
| """Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
Returns:
string
"""
row_list = set()
column_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
row_list.add(name_list[0])
column_list.add(name_list[1])
analysis_name_value_map['-'.join(name_list)] = value
return create_pairwise_metric_table(
row_list=row_list,
column_list=column_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value) | identifier_body |
utils.py | # Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for report generation"""
from __future__ import absolute_import
from __future__ import print_function
from collections import OrderedDict
from typing import Set, Dict, List, Union, Text
from ml_eda.proto import analysis_entity_pb2
from ml_eda.preprocessing.analysis_query import query_constants
from ml_eda.reporting import template
from ml_eda.reporting import visualization
from ml_eda.reporting import formatting
Analysis = analysis_entity_pb2.Analysis
TableMetric = analysis_entity_pb2.TableMetric
ScalarMetric = analysis_entity_pb2.ScalarMetric
Attribute = analysis_entity_pb2.Attribute
def create_table_descriptive_row_from_analysis(
attribute_name: Text,
base_analysis: Analysis,
additional_analysis: Analysis,
figure_base_path: Text
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create makrdown formatted descriptive analysis result
Args:
attribute_name: (string), name of the attribute
base_analysis: (analysis_entity_pb2.Analysis), analysis holding
all the metrics
additional_analysis: (analysis_entity_pb2.Analysis), histogram for
numerical attribute, value_counts for categorical attributes
figure_base_path: (string), the folder for holding figures
Returns:
string, markdown formatted content
"""
row_template = template.TABLE_DESCRIPTIVE_ROW_TEMPLATE
stats_template = template.TABLE_DESCRIPTIVE_STATS_TEMPLATE
metrics = base_analysis.smetrics
attribute_type = base_analysis.features[0].type
# Make sure the display order of each attribute is consistent
common_order = query_constants.COMMON_ORDER
if attribute_type == Attribute.NUMERICAL:
detail_order = query_constants.NUMERICAL_ORDER
else:
detail_order = query_constants.CATEGORICAL_ORDER
# Use a OrderedDict to store the result
result_holder = OrderedDict(
[(item, 0) for item in common_order + detail_order])
for item in metrics:
name = ScalarMetric.Name.Name(item.name)
value = formatting.numeric_formatting(item.value)
result_holder[name] = value
# Construct the markdown formatted row
row_stats_contents = []
for item in result_holder:
row_stats_contents.append(stats_template.format(
metric=item,
value=result_holder[item]
))
figure_path = visualization.plot_bar_chart(additional_analysis,
figure_base_path)
return row_template.format(
name=attribute_name,
type=Attribute.Type.Name(attribute_type),
stats=' <br/> '.join(row_stats_contents),
url=figure_path,
alt_text=attribute_name,
)
def create_table_from_table_metric(table_metric: TableMetric) -> Text:
"""Create a table for a TableMetric object. Currently, this function is
used for Contingency_Table and TABLE_DESCRIPTIVE
Examples:
​|Cash|Credit Card|No Charge|Unknown|Mobile|Prcard
:-----:|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:
frequency|108114952.0|74475448.0|797730.0|369844.0|255082.0|192063.0
Args:
table_metric: (analysis_entity_pb2.TableMetric)
Returns:
string
"""
supported_metric = {
TableMetric.CONTINGENCY_TABLE,
TableMetric.TABLE_DESCRIPTIVE
}
assert table_metric.name in supported_metric
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(table_metric.column_indexes)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row in table_metric.rows:
# row header is in BOLD
row_header = template.BOLD.format(
content=str(row.row_index).strip())
row_values = [row_header] + [formatting.numeric_formatting(item.value)
for item in row.cells]
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def | (
row_list: Set[Text],
column_list: Set[Text],
name_value_map: Dict[Text, float],
same_match_value
) -> Text:
"""Construct table for pair-wise computed metrics, e.g.,
PEARSON_CORRELATION, ANOVA, CHI_SQUARE, INFORMATION_GAIN
Examples:
​|tips|tolls|trip_total
:-----:|:-----:|:-----:|:-----:
tips|1|0.0001942405360750854|0.1952170878648758
tolls|0.0001942405360750854|1|0.22858665883541107
trip_total|0.1952170878648758|0.22858665883541107|1
Args:
row_list: (List[str]), list of attribute names for table header
column_list: (List[str]), list of attribute names for table row name
name_value_map: (Dict[str, float]), map of name -> value
same_match_value: value if the column and row name are the same. This
could be either float or 'NA' depends on whether the computation of
A-v.s.-A makes sense
Returns:
string
"""
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(column_list)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row_name in row_list:
# row header is in BOLD
row_values = [template.BOLD.format(content=row_name.strip())]
for col_name in column_list:
# same_match_value is used when row_name == column_name
if row_name == col_name:
value = same_match_value
else:
value = name_value_map[row_name + '-' + col_name]
# if the same_match_value is string, simply append it
if isinstance(value, str):
row_values.append(same_match_value)
else:
row_values.append(formatting.numeric_formatting(value))
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def create_no_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float],
figure_base_path: Text,
table_name: Text = "NA"
) -> Text:
"""Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
figure_base_path: (string), the folder for holding figures
table_name: (str)
Returns:
string
"""
attribute_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
attribute_list.update(name_list)
analysis_name_value_map['-'.join(name_list)] = value
analysis_name_value_map['-'.join(reversed(name_list))] = value
table_content = create_pairwise_metric_table(
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value)
if table_name != "NA":
figure_path = visualization.plot_heat_map_for_metric_table(
heat_map_name=table_name,
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value,
figure_base_path=figure_base_path)
figure_content = template.IMAGE_TEMPLATE.format(
url=figure_path,
alt_text=table_name
)
else:
figure_content = ""
return table_content + figure_content
def create_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float]
) -> Text:
"""Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
Returns:
string
"""
row_list = set()
column_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
row_list.add(name_list[0])
column_list.add(name_list[1])
analysis_name_value_map['-'.join(name_list)] = value
return create_pairwise_metric_table(
row_list=row_list,
column_list=column_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value)
def create_target_metrics_highlight(
target_name: Text,
metric_name_list: List[Text],
metric_analysis_list: List[List[Analysis]]
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create the content for highlight section regarding a target attribute
Args:
target_name: (string)
metric_name_list: (List(string)
metric_analysis_list: (List[List[analysis_entity_pb2.Analysis]])
Returns:
"""
assert len(metric_name_list) == len(metric_analysis_list)
# Every metric should have the same length, i.e., target v.s. remaining
assert len({len(item) for item in metric_analysis_list}) == 1
name_enrich = {
'ANOVA': 'ANOVA P-value',
'CHI_SQUARE': 'Chi-square P-value',
'INFORMATION_GAIN': 'Information Gain',
'PEARSON_CORRELATION': 'Correlation Coefficient'
}
table_template = template.TARGET_METRIC_HIGHLIGHT_TEMPLATE
row_template = template.TARGET_METRIC_HIGHLIGHT_ROW_TEMPLATE
num_metrics = len(metric_name_list)
enrich_name_list = [name_enrich[item] if item in name_enrich else item
for item in metric_name_list]
metric_names_str = '|'.join(enrich_name_list)
separator_str = ':-----:|' * num_metrics
attribute_set = set()
metric_holders = {metric: {} for metric in metric_name_list}
for i in range(num_metrics):
for analysis in metric_analysis_list[i]:
metric_name = Analysis.Name.Name(analysis.name)
attribute_name = [att.name for att in analysis.features
if att.name != target_name][0]
attribute_set.add(attribute_name)
metric_value = analysis.smetrics[0].value
metric_holders[metric_name][attribute_name] = metric_value
row_content_list = []
for attribute in attribute_set:
values_str = '|'.join(
[formatting.numeric_formatting(metric_holders[metric][attribute])
for metric in metric_name_list])
row_content_list.append(row_template.format(
name=attribute,
values=values_str
))
return table_template.format(
target_column=target_name,
metric_names=metric_names_str,
seperators=separator_str,
row_content='\n'.join(row_content_list)
)
def create_content_list(contents: List[Text]) -> Text:
"""Format list of string into markdown list
Args:
contents: (List[string]), list of string to be formatted
Returns:
String
"""
# print(contents)
return '\n'.join(
[template.LIST_TEMPLATE.format(
level='',
content=item
) for item in contents if item.strip()])
def create_warning_notes(warnings: List[Text]) -> Text:
"""Format list of warnings into markdown list
Args:
warnings: (List(string)), list of warnings
Returns:
String
"""
warning_title = template.SUB_SUB_SECTION_TITLE.format(
content='Warnings'
)
return warning_title + create_content_list(warnings)
| create_pairwise_metric_table | identifier_name |
utils.py | # Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for report generation"""
from __future__ import absolute_import
from __future__ import print_function
from collections import OrderedDict
from typing import Set, Dict, List, Union, Text
from ml_eda.proto import analysis_entity_pb2
from ml_eda.preprocessing.analysis_query import query_constants
from ml_eda.reporting import template
from ml_eda.reporting import visualization
from ml_eda.reporting import formatting
Analysis = analysis_entity_pb2.Analysis
TableMetric = analysis_entity_pb2.TableMetric
ScalarMetric = analysis_entity_pb2.ScalarMetric
Attribute = analysis_entity_pb2.Attribute
def create_table_descriptive_row_from_analysis(
attribute_name: Text,
base_analysis: Analysis,
additional_analysis: Analysis,
figure_base_path: Text
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create makrdown formatted descriptive analysis result
Args:
attribute_name: (string), name of the attribute
base_analysis: (analysis_entity_pb2.Analysis), analysis holding
all the metrics
additional_analysis: (analysis_entity_pb2.Analysis), histogram for
numerical attribute, value_counts for categorical attributes
figure_base_path: (string), the folder for holding figures
Returns:
string, markdown formatted content
"""
row_template = template.TABLE_DESCRIPTIVE_ROW_TEMPLATE
stats_template = template.TABLE_DESCRIPTIVE_STATS_TEMPLATE
metrics = base_analysis.smetrics
attribute_type = base_analysis.features[0].type
# Make sure the display order of each attribute is consistent
common_order = query_constants.COMMON_ORDER
if attribute_type == Attribute.NUMERICAL:
detail_order = query_constants.NUMERICAL_ORDER
else:
detail_order = query_constants.CATEGORICAL_ORDER
# Use a OrderedDict to store the result
result_holder = OrderedDict(
[(item, 0) for item in common_order + detail_order])
for item in metrics:
name = ScalarMetric.Name.Name(item.name)
value = formatting.numeric_formatting(item.value)
result_holder[name] = value
# Construct the markdown formatted row
row_stats_contents = []
for item in result_holder:
row_stats_contents.append(stats_template.format(
metric=item,
value=result_holder[item]
))
figure_path = visualization.plot_bar_chart(additional_analysis,
figure_base_path)
return row_template.format(
name=attribute_name,
type=Attribute.Type.Name(attribute_type),
stats=' <br/> '.join(row_stats_contents),
url=figure_path,
alt_text=attribute_name,
)
def create_table_from_table_metric(table_metric: TableMetric) -> Text:
"""Create a table for a TableMetric object. Currently, this function is
used for Contingency_Table and TABLE_DESCRIPTIVE
Examples:
​|Cash|Credit Card|No Charge|Unknown|Mobile|Prcard
:-----:|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:
frequency|108114952.0|74475448.0|797730.0|369844.0|255082.0|192063.0
Args:
table_metric: (analysis_entity_pb2.TableMetric)
Returns:
string
"""
supported_metric = {
TableMetric.CONTINGENCY_TABLE,
TableMetric.TABLE_DESCRIPTIVE
}
assert table_metric.name in supported_metric
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(table_metric.column_indexes)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row in table_metric.rows:
# row header is in BOLD
row_header = template.BOLD.format(
content=str(row.row_index).strip())
row_values = [row_header] + [formatting.numeric_formatting(item.value)
for item in row.cells]
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def create_pairwise_metric_table(
row_list: Set[Text],
column_list: Set[Text],
name_value_map: Dict[Text, float],
same_match_value
) -> Text:
"""Construct table for pair-wise computed metrics, e.g.,
PEARSON_CORRELATION, ANOVA, CHI_SQUARE, INFORMATION_GAIN
Examples:
​|tips|tolls|trip_total
:-----:|:-----:|:-----:|:-----:
tips|1|0.0001942405360750854|0.1952170878648758
tolls|0.0001942405360750854|1|0.22858665883541107
trip_total|0.1952170878648758|0.22858665883541107|1
Args:
row_list: (List[str]), list of attribute names for table header
column_list: (List[str]), list of attribute names for table row name
name_value_map: (Dict[str, float]), map of name -> value
same_match_value: value if the column and row name are the same. This
could be either float or 'NA' depends on whether the computation of
A-v.s.-A makes sense
Returns:
string
"""
table_template = template.TABLE_TEMPLATE
headers = ['​'] + list(column_list)
header_string = "|".join(headers)
header_separator = "|".join([":-----:" for i in range(len(headers))])
table_content = []
for row_name in row_list:
# row header is in BOLD
row_values = [template.BOLD.format(content=row_name.strip())]
for col_name in column_list:
# same_match_value is used when row_name == column_name
if row_name == col_name:
value = same_match_value
else:
value = name_value_map[row_name + '-' + col_name]
# if the same_match_value is string, simply append it
if isinstance(value, str):
row_values.append(same_match_value)
else:
row_values.append(formatting.numeric_formatting(value))
table_content.append("|".join(row_values))
table_content_string = "\n".join(table_content)
return table_template.format(
header=header_string,
header_separator=header_separator,
table_content=table_content_string
)
def create_no_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float],
figure_base_path: Text,
table_name: Text = "NA"
) -> Text:
"""Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
figure_base_path: (string), the folder for holding figures
table_name: (str)
Returns:
string
"""
attribute_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
attribute_list.update(name_list)
analysis_name_value_map['-'.join(name_list)] = value
analysis_name_value_map['-'.join(reversed(name_list))] = value
table_content = create_pairwise_metric_table(
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value)
if table_name != "NA":
figure_path = visualization.plot_heat_map_for_metric_table(
heat_map_name=table_name,
row_list=attribute_list,
column_list=attribute_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value,
figure_base_path=figure_base_path)
figure_content = template.IMAGE_TEMPLATE.format(
url=figure_path,
alt_text=table_name
)
else:
figure_content = ""
return table_content + figure_content
def create_order_pair_metric_section(
analysis_list: List[Analysis],
same_match_value: Union[Text, float]
) -> Text:
"""Create metric table for pairwise comparison
Args:
analysis_list: (List[analysis_entity_pb2.Analysis])
same_match_value: (Union[str, float])
Returns:
string
"""
row_list = set()
column_list = set()
# a dictionary with {attributeone-attributetwo: metric_value}
analysis_name_value_map = {}
for item in analysis_list:
value = item.smetrics[0].value
name_list = [att.name for att in item.features]
row_list.add(name_list[0])
column_list.add(name_list[1])
analysis_name_value_map['-'.join(name_list)] = value
return create_pairwise_metric_table(
row_list=row_list,
column_list=column_list,
name_value_map=analysis_name_value_map,
same_match_value=same_match_value)
def create_target_metrics_highlight(
target_name: Text,
metric_name_list: List[Text],
metric_analysis_list: List[List[Analysis]]
) -> Text:
# pylint: disable-msg=too-many-locals
"""Create the content for highlight section regarding a target attribute
Args:
target_name: (string)
metric_name_list: (List(string)
metric_analysis_list: (List[List[analysis_entity_pb2.Analysis]])
Returns:
"""
assert len(metric_name_list) == len(metric_analysis_list)
# Every metric should have the same length, i.e., target v.s. remaining
assert len({len(item) for item in metric_analysis_list}) == 1
name_enrich = {
'ANOVA': 'ANOVA P-value',
'CHI_SQUARE': 'Chi-square P-value',
'INFORMATION_GAIN': 'Information Gain',
'PEARSON_CORRELATION': 'Correlation Coefficient'
}
table_template = template.TARGET_METRIC_HIGHLIGHT_TEMPLATE
row_template = template.TARGET_METRIC_HIGHLIGHT_ROW_TEMPLATE
num_metrics = len(metric_name_list)
enrich_name_list = [name_enrich[item] if item in name_enrich else item
for item in metric_name_list]
metric_names_str = '|'.join(enrich_name_list)
separator_str = ':-----:|' * num_metrics
attribute_set = set()
metric_holders = {metric: {} for metric in metric_name_list}
for i in range(num_metrics):
for analysis in metric_analysis_list[i]:
|
row_content_list = []
for attribute in attribute_set:
values_str = '|'.join(
[formatting.numeric_formatting(metric_holders[metric][attribute])
for metric in metric_name_list])
row_content_list.append(row_template.format(
name=attribute,
values=values_str
))
return table_template.format(
target_column=target_name,
metric_names=metric_names_str,
seperators=separator_str,
row_content='\n'.join(row_content_list)
)
def create_content_list(contents: List[Text]) -> Text:
"""Format list of string into markdown list
Args:
contents: (List[string]), list of string to be formatted
Returns:
String
"""
# print(contents)
return '\n'.join(
[template.LIST_TEMPLATE.format(
level='',
content=item
) for item in contents if item.strip()])
def create_warning_notes(warnings: List[Text]) -> Text:
"""Format list of warnings into markdown list
Args:
warnings: (List(string)), list of warnings
Returns:
String
"""
warning_title = template.SUB_SUB_SECTION_TITLE.format(
content='Warnings'
)
return warning_title + create_content_list(warnings)
| metric_name = Analysis.Name.Name(analysis.name)
attribute_name = [att.name for att in analysis.features
if att.name != target_name][0]
attribute_set.add(attribute_name)
metric_value = analysis.smetrics[0].value
metric_holders[metric_name][attribute_name] = metric_value | conditional_block |
receiver.rs | use bytes::Bytes;
use failure::Error;
use futures::prelude::*;
use futures::ready;
use log::{debug, info, trace, warn};
use tokio::time::{self, delay_for, interval, Delay, Interval};
use crate::connection::HandshakeReturner;
use crate::loss_compression::compress_loss_list;
use crate::packet::{ControlPacket, ControlTypes, DataPacket, Packet, SrtControlPacket};
use crate::sink_send_wrapper::SinkSendWrapper;
use crate::{seq_number::seq_num_range, ConnectionSettings, SeqNumber};
use std::cmp;
use std::cmp::Ordering;
use std::iter::Iterator;
use std::net::SocketAddr;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::{Duration, Instant};
mod buffer;
use self::buffer::RecvBuffer;
struct LossListEntry {
seq_num: SeqNumber,
// last time it was feed into NAK
feedback_time: i32,
// the number of times this entry has been fed back into NAK
k: i32,
}
struct AckHistoryEntry {
/// the highest packet sequence number received that this ACK packet ACKs + 1
ack_number: SeqNumber,
/// the ack sequence number
ack_seq_num: i32,
/// timestamp that it was sent at
timestamp: i32,
}
pub struct Receiver<T> {
settings: ConnectionSettings,
// Function to return handshakes with
hs_returner: Option<HandshakeReturner>,
/// the round trip time, in microseconds
/// is calculated each ACK2
rtt: i32,
/// the round trip time variance, in microseconds
/// is calculated each ACK2
rtt_variance: i32,
/// the future to send or recieve packets
sock: T,
/// The time to wait for a packet to arrive
listen_timeout: Duration,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// Receiver's Loss List: It is a list of tuples whose values include:
/// the sequence numbers of detected lost data packets, the latest
/// feedback time of each tuple, and a parameter k that is the number
/// of times each one has been fed back in NAK. Values are stored in
/// the increasing order of packet sequence numbers.
loss_list: Vec<LossListEntry>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// ACK History Window: A circular array of each sent ACK and the time
/// it is sent out. The most recent value will overwrite the oldest
/// one if no more free space in the array.
ack_history_window: Vec<AckHistoryEntry>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// PKT History Window: A circular array that records the arrival time
/// of each data packet.
///
/// First is sequence number, second is timestamp
packet_history_window: Vec<(SeqNumber, i32)>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// Packet Pair Window: A circular array that records the time
/// interval between each probing packet pair.
///
/// First is seq num, second is time
packet_pair_window: Vec<(SeqNumber, i32)>,
/// Wakes the thread when an ACK
ack_interval: Interval,
/// Wakes the thread when a NAK is to be sent
nak_interval: Delay,
/// the highest received packet sequence number + 1
lrsn: SeqNumber,
/// The number of consecutive timeouts
exp_count: i32,
/// The ID of the next ack packet
next_ack: i32,
/// The timestamp of the probe time
/// Used to see duration between packets
probe_time: Option<i32>,
timeout_timer: Delay,
/// The ACK sequence number of the largest ACK2 received, and the ack number
lr_ack_acked: (i32, SeqNumber),
/// The buffer
buffer: RecvBuffer,
/// Shutdown flag. This is set so when the buffer is flushed, it returns Async::Ready(None)
shutdown_flag: bool,
/// Release delay
/// wakes the thread when there is a new packet to be released
release_delay: Delay,
/// A buffer of packets to send to the underlying sink
send_wrapper: SinkSendWrapper<(Packet, SocketAddr)>,
}
impl<T> Receiver<T>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
{
pub fn new(
sock: T,
settings: ConnectionSettings,
hs_returner: Option<HandshakeReturner>,
) -> Receiver<T> {
let init_seq_num = settings.init_seq_num;
info!(
"Receiving started from {:?}, with latency={:?}",
settings.remote, settings.tsbpd_latency
);
Receiver {
settings,
hs_returner,
sock,
rtt: 10_000,
rtt_variance: 1_000,
listen_timeout: Duration::from_secs(1),
loss_list: Vec::new(),
ack_history_window: Vec::new(),
packet_history_window: Vec::new(),
packet_pair_window: Vec::new(),
ack_interval: interval(Duration::from_millis(10)),
nak_interval: delay_for(Duration::from_millis(10)),
lrsn: init_seq_num, // at start, we have received everything until the first packet, exclusive (aka nothing)
next_ack: 1,
exp_count: 1,
probe_time: None,
timeout_timer: delay_for(Duration::from_secs(1)),
lr_ack_acked: (0, init_seq_num),
buffer: RecvBuffer::new(init_seq_num),
shutdown_flag: false,
release_delay: delay_for(Duration::from_secs(0)), // start with an empty delay
send_wrapper: SinkSendWrapper::new(),
}
}
pub fn settings(&self) -> &ConnectionSettings {
&self.settings
}
pub fn remote(&self) -> SocketAddr {
self.settings.remote
}
fn timeout_timer(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.timeout_timer)
}
fn sock(&mut self) -> Pin<&mut T> {
Pin::new(&mut self.sock)
}
fn ack_interval(&mut self) -> Pin<&mut Interval> {
Pin::new(&mut self.ack_interval)
}
fn nak_interval(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.nak_interval)
}
fn release_delay(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.release_delay)
}
fn send_to_remote(&mut self, cx: &mut Context, packet: Packet) -> Result<(), Error> {
self.send_wrapper
.send(&mut self.sock, (packet, self.settings.remote), cx)
}
fn reset_timeout(&mut self) {
self.timeout_timer.reset(time::Instant::from_std(
Instant::now() + self.listen_timeout,
))
}
fn on_ack_event(&mut self, cx: &mut Context) -> Result<(), Error> {
// get largest inclusive received packet number
let ack_number = match self.loss_list.first() {
// There is an element in the loss list
Some(i) => i.seq_num,
// No elements, use lrsn, as it's already exclusive
None => self.lrsn,
};
// 2) If (a) the ACK number equals to the largest ACK number ever
// acknowledged by ACK2
if ack_number == self.lr_ack_acked.1 {
// stop (do not send this ACK).
return Ok(());
}
// make sure this ACK number is greater or equal to a one sent previously
if let Some(w) = self.ack_history_window.last() {
assert!(w.ack_number <= ack_number);
}
trace!(
"Sending ACK; ack_num={:?}, lr_ack_acked={:?}",
ack_number,
self.lr_ack_acked.1
);
if let Some(&AckHistoryEntry {
ack_number: last_ack_number,
timestamp: last_timestamp,
..
}) = self.ack_history_window.first()
{
// or, (b) it is equal to the ACK number in the
// last ACK
if last_ack_number == ack_number &&
// and the time interval between this two ACK packets is
// less than 2 RTTs,
(self.get_timestamp_now() - last_timestamp) < (self.rtt * 2)
{
// stop (do not send this ACK).
return Ok(());
}
}
// 3) Assign this ACK a unique increasing ACK sequence number.
let ack_seq_num = self.next_ack;
self.next_ack += 1;
// 4) Calculate the packet arrival speed according to the following
// algorithm:
let packet_recv_rate = {
if self.packet_history_window.len() < 16 {
0
} else {
// Calculate the median value of the last 16 packet arrival
// intervals (AI) using the values stored in PKT History Window.
let mut last_16: Vec<_> = self.packet_history_window
[self.packet_history_window.len() - 16..]
.iter()
.map(|&(_, ts)| ts)
.collect();
last_16.sort();
// the median timestamp
let ai = last_16[last_16.len() / 2];
// In these 16 values, remove those either greater than AI*8 or
// less than AI/8.
let filtered: Vec<i32> = last_16
.iter()
.filter(|&&n| n / 8 < ai && n > ai / 8)
.cloned()
.collect();
// If more than 8 values are left, calculate the
// average of the left values AI', and the packet arrival speed is
// 1/AI' (number of packets per second). Otherwise, return 0.
if filtered.len() > 8 {
(filtered.iter().fold(0i64, |sum, &val| sum + i64::from(val))
/ (filtered.len() as i64)) as i32
} else {
0
}
}
};
// 5) Calculate the estimated link capacity according to the following algorithm:
let est_link_cap = {
if self.packet_pair_window.len() < 16 {
0
} else {
// Calculate the median value of the last 16 packet pair
// intervals (PI) using the values in Packet Pair Window, and the
// link capacity is 1/PI (number of packets per second).
let pi = {
let mut last_16: Vec<_> = self.packet_pair_window
[self.packet_pair_window.len() - 16..]
.iter()
.map(|&(_, time)| time)
.collect();
last_16.sort();
last_16[last_16.len() / 2]
};
// Multiply by 1M because pi is in microseconds
// pi is in us/packet
(1.0e6 / (pi as f32)) as i32
}
};
// Pack the ACK packet with RTT, RTT Variance, and flow window size (available
// receiver buffer size).
let ack = self.make_control_packet(ControlTypes::Ack {
ack_seq_num,
ack_number,
rtt: Some(self.rtt),
rtt_variance: Some(self.rtt_variance),
buffer_available: None, // TODO: add this
packet_recv_rate: Some(packet_recv_rate),
est_link_cap: Some(est_link_cap),
});
// add it to the ack history
let now = self.get_timestamp_now();
self.ack_history_window.push(AckHistoryEntry {
ack_number,
ack_seq_num,
timestamp: now,
});
self.send_to_remote(cx, ack)?;
Ok(())
}
fn on_nak_event(&mut self, cx: &mut Context) -> Result<(), Error> {
// reset NAK timer, rtt and variance are in us, so convert to ns
// NAK is used to trigger a negative acknowledgement (NAK). Its period
// is dynamically updated to 4 * RTT_+ RTTVar + SYN, where RTTVar is the
// variance of RTT samples.
let nak_interval_us = 4 * self.rtt as u64 + self.rtt_variance as u64 + 10_000;
self.nak_interval.reset(time::Instant::from_std(
Instant::now() + Duration::from_micros(nak_interval_us),
));
// Search the receiver's loss list, find out all those sequence numbers
// whose last feedback time is k*RTT before, where k is initialized as 2
// and increased by 1 each time the number is fed back. Compress
// (according to section 6.4) and send these numbers back to the sender
// in an NAK packet.
let now = self.get_timestamp_now();
// increment k and change feedback time, returning sequence numbers
let seq_nums = {
let mut ret = Vec::new();
let rtt = self.rtt;
for pak in self
.loss_list
.iter_mut()
.filter(|lle| lle.feedback_time < now - lle.k * rtt)
{
pak.k += 1;
pak.feedback_time = now;
ret.push(pak.seq_num);
}
ret
};
if seq_nums.is_empty() {
return Ok(());
}
// send the nak
self.send_nak(cx, seq_nums.into_iter())?;
Ok(())
}
// checks the timers
// if a timer was triggered, then an RSFutureTimeout will be returned
// if not, the socket is given back
fn check_timers(&mut self, cx: &mut Context) -> Result<(), Error> {
// see if we need to ACK or NAK
if let Poll::Ready(Some(_)) = self.ack_interval().poll_next(cx) {
self.on_ack_event(cx)?;
}
if let Poll::Ready(_) = self.nak_interval().poll(cx) {
self.on_nak_event(cx)?;
}
// no need to do anything specific
let _ = self.release_delay().poll(cx);
Ok(())
}
// handles a SRT control packet
fn handle_srt_control_packet(&mut self, pack: &SrtControlPacket) -> Result<(), Error> {
use self::SrtControlPacket::*;
match pack {
HandshakeRequest(_) | HandshakeResponse(_) => {
warn!("Received handshake SRT packet, HSv5 expected");
}
_ => unimplemented!(),
}
Ok(())
}
// handles an incomming a packet
fn handle_packet(
&mut self,
cx: &mut Context,
packet: &Packet,
from: &SocketAddr,
) -> Result<(), Error> {
// We don't care about packets from elsewhere
if *from != self.settings.remote {
info!("Packet received from unknown address: {:?}", from);
return Ok(());
}
if self.settings.local_sockid != packet.dest_sockid() {
// packet isn't applicable
info!(
"Packet send to socket id ({}) that does not match local ({})",
packet.dest_sockid().0,
self.settings.local_sockid.0
);
return Ok(());
}
trace!("Received packet: {:?}", packet);
match packet {
Packet::Control(ctrl) => {
// handle the control packet
match &ctrl.control_type {
ControlTypes::Ack { .. } => warn!("Receiver received ACK packet, unusual"),
ControlTypes::Ack2(seq_num) => self.handle_ack2(*seq_num)?,
ControlTypes::DropRequest { .. } => unimplemented!(),
ControlTypes::Handshake(_) => {
if let Some(ret) = self.hs_returner.as_ref() {
if let Some(pack) = (*ret)(&packet) {
self.send_to_remote(cx, pack)?;
}
}
}
ControlTypes::KeepAlive => {} // TODO: actually reset EXP etc
ControlTypes::Nak { .. } => warn!("Receiver received NAK packet, unusual"),
ControlTypes::Shutdown => {
info!("Shutdown packet received, flushing receiver...");
self.shutdown_flag = true;
} // end of stream
ControlTypes::Srt(srt_packet) => {
self.handle_srt_control_packet(srt_packet)?;
}
}
}
Packet::Data(data) => self.handle_data_packet(cx, &data)?,
};
Ok(())
}
fn handle_ack2(&mut self, seq_num: i32) -> Result<(), Error> {
// 1) Locate the related ACK in the ACK History Window according to the
// ACK sequence number in this ACK2.
let id_in_wnd = match self
.ack_history_window
.as_slice()
.binary_search_by(|entry| entry.ack_seq_num.cmp(&seq_num))
{
Ok(i) => Some(i),
Err(_) => None,
};
if let Some(id) = id_in_wnd {
let AckHistoryEntry {
timestamp: send_timestamp,
ack_number,
..
} = self.ack_history_window[id];
// 2) Update the largest ACK number ever been acknowledged.
self.lr_ack_acked = (seq_num, ack_number);
// 3) Calculate new rtt according to the ACK2 arrival time and the ACK
// departure time, and update the RTT value as: RTT = (RTT * 7 +
// rtt) / 8
let immediate_rtt = self.get_timestamp_now() - send_timestamp;
self.rtt = (self.rtt * 7 + immediate_rtt) / 8;
// 4) Update RTTVar by: RTTVar = (RTTVar * 3 + abs(RTT - rtt)) / 4.
self.rtt_variance =
(self.rtt_variance * 3 + (self.rtt_variance - immediate_rtt).abs()) / 4;
// 5) Update both ACK and NAK period to 4 * RTT + RTTVar + SYN.
let ack_us = 4 * self.rtt as u64 + self.rtt_variance as u64 + 10_000;
self.ack_interval = interval(Duration::from_micros(ack_us));
} else {
warn!(
"ACK sequence number in ACK2 packet not found in ACK history: {}",
seq_num
);
}
Ok(())
}
fn handle_data_packet(&mut self, cx: &mut Context, data: &DataPacket) -> Result<(), Error> {
let now = self.get_timestamp_now();
// 1) Reset the ExpCount to 1. If there is no unacknowledged data
// packet, or if this is an ACK or NAK control packet, reset the EXP
// timer.
self.exp_count = 1;
// 2&3 don't apply
// 4) If the sequence number of the current data packet is 16n + 1,
// where n is an integer, record the time interval between this
if data.seq_number % 16 == 0 {
self.probe_time = Some(now)
} else if data.seq_number % 16 == 1 {
// if there is an entry
if let Some(pt) = self.probe_time {
// calculate and insert
self.packet_pair_window.push((data.seq_number, now - pt));
// reset
self.probe_time = None;
}
}
// 5) Record the packet arrival time in PKT History Window.
self.packet_history_window.push((data.seq_number, now));
// 6)
// a. If the sequence number of the current data packet is greater
// than LRSN, put all the sequence numbers between (but
// excluding) these two values into the receiver's loss list and
// send them to the sender in an NAK packet.
match data.seq_number.cmp(&self.lrsn) {
Ordering::Greater => {
// lrsn is the latest packet received, so nak the one after that
for i in seq_num_range(self.lrsn, data.seq_number) {
self.loss_list.push(LossListEntry {
seq_num: i,
feedback_time: now,
// k is initialized at 2, as stated on page 12 (very end)
k: 2,
})
}
self.send_nak(cx, seq_num_range(self.lrsn, data.seq_number))?;
}
// b. If the sequence number is less than LRSN, remove it from the
// receiver's loss list.
Ordering::Less => {
match self.loss_list[..].binary_search_by(|ll| ll.seq_num.cmp(&data.seq_number)) {
Ok(i) => {
self.loss_list.remove(i);
}
Err(_) => {
debug!(
"Packet received that's not in the loss list: {:?}, loss_list={:?}",
data.seq_number,
self.loss_list
.iter()
.map(|ll| ll.seq_num.as_raw())
.collect::<Vec<_>>()
);
}
};
}
Ordering::Equal => {}
}
// record that we got this packet
self.lrsn = cmp::max(data.seq_number + 1, self.lrsn);
// we've already gotten this packet, drop it
if self.buffer.next_release() > data.seq_number {
debug!("Received packet {:?} twice", data.seq_number);
return Ok(());
}
self.buffer.add(data.clone());
trace!(
"Received data packet seq_num={}, loc={:?}, buffer={:?}",
data.seq_number,
data.message_loc,
self.buffer,
);
Ok(())
}
// send a NAK, and return the future
fn send_nak<I>(&mut self, cx: &mut Context, lost_seq_nums: I) -> Result<(), Error>
where
I: Iterator<Item = SeqNumber>,
|
fn make_control_packet(&self, control_type: ControlTypes) -> Packet {
Packet::Control(ControlPacket {
timestamp: self.get_timestamp_now(),
dest_sockid: self.settings.remote_sockid,
control_type,
})
}
/// Timestamp in us
fn get_timestamp_now(&self) -> i32 {
self.settings.get_timestamp_now()
}
}
impl<T> Stream for Receiver<T>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
{
type Item = Result<(Instant, Bytes), Error>;
fn poll_next(
self: Pin<&mut Self>,
cx: &mut Context,
) -> Poll<Option<Result<(Instant, Bytes), Error>>> {
let pin = self.get_mut();
pin.check_timers(cx)?;
pin.send_wrapper.poll_send(&mut pin.sock, cx)?;
// either way we want to continue
let _ = pin.sock().poll_flush(cx)?;
loop {
// try to release packets
if let Some((ts, p)) = pin
.buffer
.next_msg_tsbpd(pin.settings.tsbpd_latency, pin.settings.socket_start_time)
{
return Poll::Ready(Some(Ok((
pin.settings.socket_start_time + Duration::from_micros(ts as u64),
p,
))));
}
// drop packets
// TODO: do something with this
let _dropped = pin
.buffer
.drop_too_late_packets(pin.settings.tsbpd_latency, pin.settings.socket_start_time);
if let Poll::Ready(_) = pin.timeout_timer().poll(cx) {
pin.exp_count += 1;
pin.reset_timeout();
}
// if there is a packet ready, set the timeout timer for it
if let Some(release_time) = pin.buffer.next_message_release_time(
pin.settings.socket_start_time,
pin.settings.tsbpd_latency,
) {
pin.release_delay
.reset(time::Instant::from_std(release_time));
let _ = Pin::new(&mut pin.release_delay).poll(cx);
// if we are setup to shutdown, then the internal socket
// returned None, so we shouldn't poll it again, as it may panic
// returning Pending is okay assuming release_delay is greater
// than zero. Techically this should check the reutnr value of
// the above poll, but it seems to work.
if pin.shutdown_flag {
return Poll::Pending;
}
}
// if there isn't a complete message at the beginning of the buffer and we are supposed to be shutting down, shut down
if pin.shutdown_flag && pin.buffer.next_msg_ready().is_none() {
info!("Shutdown received and all packets released, finishing up");
return Poll::Ready(None);
}
// TODO: exp_count
let (packet, addr) = match ready!(pin.sock().poll_next(cx)) {
Some(Ok(p)) => p,
Some(Err(e)) => {
warn!("Error reading packet: {:?}", e);
continue;
}
None => {
// end of stream, shutdown
pin.shutdown_flag = true;
continue;
}
};
// handle the socket
// packet was received, reset exp_count
pin.exp_count = 1;
pin.reset_timeout();
pin.handle_packet(cx, &packet, &addr)?;
// TODO: should this be here for optimal performance?
let _ = pin.sock().poll_flush(cx)?;
}
}
}
| {
let vec: Vec<_> = lost_seq_nums.collect();
debug!("Sending NAK for={:?}", vec);
let pack = self.make_control_packet(ControlTypes::Nak(
compress_loss_list(vec.iter().cloned()).collect(),
));
self.send_to_remote(cx, pack)?;
Ok(())
} | identifier_body |
receiver.rs | use bytes::Bytes;
use failure::Error;
use futures::prelude::*;
use futures::ready;
use log::{debug, info, trace, warn};
use tokio::time::{self, delay_for, interval, Delay, Interval};
use crate::connection::HandshakeReturner;
use crate::loss_compression::compress_loss_list;
use crate::packet::{ControlPacket, ControlTypes, DataPacket, Packet, SrtControlPacket};
use crate::sink_send_wrapper::SinkSendWrapper;
use crate::{seq_number::seq_num_range, ConnectionSettings, SeqNumber};
use std::cmp;
use std::cmp::Ordering;
use std::iter::Iterator;
use std::net::SocketAddr;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::{Duration, Instant};
mod buffer;
use self::buffer::RecvBuffer;
struct LossListEntry {
seq_num: SeqNumber,
// last time it was feed into NAK
feedback_time: i32,
// the number of times this entry has been fed back into NAK
k: i32,
}
struct AckHistoryEntry {
/// the highest packet sequence number received that this ACK packet ACKs + 1
ack_number: SeqNumber,
/// the ack sequence number
ack_seq_num: i32,
/// timestamp that it was sent at
timestamp: i32,
}
pub struct Receiver<T> {
settings: ConnectionSettings,
// Function to return handshakes with
hs_returner: Option<HandshakeReturner>,
/// the round trip time, in microseconds
/// is calculated each ACK2
rtt: i32,
/// the round trip time variance, in microseconds
/// is calculated each ACK2
rtt_variance: i32,
/// the future to send or recieve packets
sock: T,
/// The time to wait for a packet to arrive
listen_timeout: Duration,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// Receiver's Loss List: It is a list of tuples whose values include:
/// the sequence numbers of detected lost data packets, the latest
/// feedback time of each tuple, and a parameter k that is the number
/// of times each one has been fed back in NAK. Values are stored in
/// the increasing order of packet sequence numbers.
loss_list: Vec<LossListEntry>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// ACK History Window: A circular array of each sent ACK and the time
/// it is sent out. The most recent value will overwrite the oldest
/// one if no more free space in the array.
ack_history_window: Vec<AckHistoryEntry>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// PKT History Window: A circular array that records the arrival time
/// of each data packet.
///
/// First is sequence number, second is timestamp
packet_history_window: Vec<(SeqNumber, i32)>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// Packet Pair Window: A circular array that records the time
/// interval between each probing packet pair.
///
/// First is seq num, second is time
packet_pair_window: Vec<(SeqNumber, i32)>,
/// Wakes the thread when an ACK
ack_interval: Interval,
/// Wakes the thread when a NAK is to be sent
nak_interval: Delay,
/// the highest received packet sequence number + 1
lrsn: SeqNumber,
/// The number of consecutive timeouts
exp_count: i32,
/// The ID of the next ack packet
next_ack: i32,
/// The timestamp of the probe time
/// Used to see duration between packets
probe_time: Option<i32>,
timeout_timer: Delay,
/// The ACK sequence number of the largest ACK2 received, and the ack number
lr_ack_acked: (i32, SeqNumber),
/// The buffer
buffer: RecvBuffer,
/// Shutdown flag. This is set so when the buffer is flushed, it returns Async::Ready(None)
shutdown_flag: bool,
/// Release delay
/// wakes the thread when there is a new packet to be released
release_delay: Delay,
/// A buffer of packets to send to the underlying sink
send_wrapper: SinkSendWrapper<(Packet, SocketAddr)>,
}
impl<T> Receiver<T>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
{
pub fn new(
sock: T,
settings: ConnectionSettings,
hs_returner: Option<HandshakeReturner>,
) -> Receiver<T> {
let init_seq_num = settings.init_seq_num;
info!(
"Receiving started from {:?}, with latency={:?}",
settings.remote, settings.tsbpd_latency
);
Receiver {
settings,
hs_returner,
sock,
rtt: 10_000,
rtt_variance: 1_000,
listen_timeout: Duration::from_secs(1),
loss_list: Vec::new(),
ack_history_window: Vec::new(),
packet_history_window: Vec::new(),
packet_pair_window: Vec::new(),
ack_interval: interval(Duration::from_millis(10)),
nak_interval: delay_for(Duration::from_millis(10)),
lrsn: init_seq_num, // at start, we have received everything until the first packet, exclusive (aka nothing)
next_ack: 1,
exp_count: 1,
probe_time: None,
timeout_timer: delay_for(Duration::from_secs(1)),
lr_ack_acked: (0, init_seq_num),
buffer: RecvBuffer::new(init_seq_num),
shutdown_flag: false,
release_delay: delay_for(Duration::from_secs(0)), // start with an empty delay
send_wrapper: SinkSendWrapper::new(),
}
}
pub fn settings(&self) -> &ConnectionSettings {
&self.settings
}
pub fn remote(&self) -> SocketAddr {
self.settings.remote
}
fn timeout_timer(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.timeout_timer)
}
fn sock(&mut self) -> Pin<&mut T> {
Pin::new(&mut self.sock)
}
fn ack_interval(&mut self) -> Pin<&mut Interval> {
Pin::new(&mut self.ack_interval)
}
fn nak_interval(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.nak_interval)
}
fn release_delay(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.release_delay)
}
fn | (&mut self, cx: &mut Context, packet: Packet) -> Result<(), Error> {
self.send_wrapper
.send(&mut self.sock, (packet, self.settings.remote), cx)
}
fn reset_timeout(&mut self) {
self.timeout_timer.reset(time::Instant::from_std(
Instant::now() + self.listen_timeout,
))
}
fn on_ack_event(&mut self, cx: &mut Context) -> Result<(), Error> {
// get largest inclusive received packet number
let ack_number = match self.loss_list.first() {
// There is an element in the loss list
Some(i) => i.seq_num,
// No elements, use lrsn, as it's already exclusive
None => self.lrsn,
};
// 2) If (a) the ACK number equals to the largest ACK number ever
// acknowledged by ACK2
if ack_number == self.lr_ack_acked.1 {
// stop (do not send this ACK).
return Ok(());
}
// make sure this ACK number is greater or equal to a one sent previously
if let Some(w) = self.ack_history_window.last() {
assert!(w.ack_number <= ack_number);
}
trace!(
"Sending ACK; ack_num={:?}, lr_ack_acked={:?}",
ack_number,
self.lr_ack_acked.1
);
if let Some(&AckHistoryEntry {
ack_number: last_ack_number,
timestamp: last_timestamp,
..
}) = self.ack_history_window.first()
{
// or, (b) it is equal to the ACK number in the
// last ACK
if last_ack_number == ack_number &&
// and the time interval between this two ACK packets is
// less than 2 RTTs,
(self.get_timestamp_now() - last_timestamp) < (self.rtt * 2)
{
// stop (do not send this ACK).
return Ok(());
}
}
// 3) Assign this ACK a unique increasing ACK sequence number.
let ack_seq_num = self.next_ack;
self.next_ack += 1;
// 4) Calculate the packet arrival speed according to the following
// algorithm:
let packet_recv_rate = {
if self.packet_history_window.len() < 16 {
0
} else {
// Calculate the median value of the last 16 packet arrival
// intervals (AI) using the values stored in PKT History Window.
let mut last_16: Vec<_> = self.packet_history_window
[self.packet_history_window.len() - 16..]
.iter()
.map(|&(_, ts)| ts)
.collect();
last_16.sort();
// the median timestamp
let ai = last_16[last_16.len() / 2];
// In these 16 values, remove those either greater than AI*8 or
// less than AI/8.
let filtered: Vec<i32> = last_16
.iter()
.filter(|&&n| n / 8 < ai && n > ai / 8)
.cloned()
.collect();
// If more than 8 values are left, calculate the
// average of the left values AI', and the packet arrival speed is
// 1/AI' (number of packets per second). Otherwise, return 0.
if filtered.len() > 8 {
(filtered.iter().fold(0i64, |sum, &val| sum + i64::from(val))
/ (filtered.len() as i64)) as i32
} else {
0
}
}
};
// 5) Calculate the estimated link capacity according to the following algorithm:
let est_link_cap = {
if self.packet_pair_window.len() < 16 {
0
} else {
// Calculate the median value of the last 16 packet pair
// intervals (PI) using the values in Packet Pair Window, and the
// link capacity is 1/PI (number of packets per second).
let pi = {
let mut last_16: Vec<_> = self.packet_pair_window
[self.packet_pair_window.len() - 16..]
.iter()
.map(|&(_, time)| time)
.collect();
last_16.sort();
last_16[last_16.len() / 2]
};
// Multiply by 1M because pi is in microseconds
// pi is in us/packet
(1.0e6 / (pi as f32)) as i32
}
};
// Pack the ACK packet with RTT, RTT Variance, and flow window size (available
// receiver buffer size).
let ack = self.make_control_packet(ControlTypes::Ack {
ack_seq_num,
ack_number,
rtt: Some(self.rtt),
rtt_variance: Some(self.rtt_variance),
buffer_available: None, // TODO: add this
packet_recv_rate: Some(packet_recv_rate),
est_link_cap: Some(est_link_cap),
});
// add it to the ack history
let now = self.get_timestamp_now();
self.ack_history_window.push(AckHistoryEntry {
ack_number,
ack_seq_num,
timestamp: now,
});
self.send_to_remote(cx, ack)?;
Ok(())
}
fn on_nak_event(&mut self, cx: &mut Context) -> Result<(), Error> {
// reset NAK timer, rtt and variance are in us, so convert to ns
// NAK is used to trigger a negative acknowledgement (NAK). Its period
// is dynamically updated to 4 * RTT_+ RTTVar + SYN, where RTTVar is the
// variance of RTT samples.
let nak_interval_us = 4 * self.rtt as u64 + self.rtt_variance as u64 + 10_000;
self.nak_interval.reset(time::Instant::from_std(
Instant::now() + Duration::from_micros(nak_interval_us),
));
// Search the receiver's loss list, find out all those sequence numbers
// whose last feedback time is k*RTT before, where k is initialized as 2
// and increased by 1 each time the number is fed back. Compress
// (according to section 6.4) and send these numbers back to the sender
// in an NAK packet.
let now = self.get_timestamp_now();
// increment k and change feedback time, returning sequence numbers
let seq_nums = {
let mut ret = Vec::new();
let rtt = self.rtt;
for pak in self
.loss_list
.iter_mut()
.filter(|lle| lle.feedback_time < now - lle.k * rtt)
{
pak.k += 1;
pak.feedback_time = now;
ret.push(pak.seq_num);
}
ret
};
if seq_nums.is_empty() {
return Ok(());
}
// send the nak
self.send_nak(cx, seq_nums.into_iter())?;
Ok(())
}
// checks the timers
// if a timer was triggered, then an RSFutureTimeout will be returned
// if not, the socket is given back
fn check_timers(&mut self, cx: &mut Context) -> Result<(), Error> {
// see if we need to ACK or NAK
if let Poll::Ready(Some(_)) = self.ack_interval().poll_next(cx) {
self.on_ack_event(cx)?;
}
if let Poll::Ready(_) = self.nak_interval().poll(cx) {
self.on_nak_event(cx)?;
}
// no need to do anything specific
let _ = self.release_delay().poll(cx);
Ok(())
}
// handles a SRT control packet
fn handle_srt_control_packet(&mut self, pack: &SrtControlPacket) -> Result<(), Error> {
use self::SrtControlPacket::*;
match pack {
HandshakeRequest(_) | HandshakeResponse(_) => {
warn!("Received handshake SRT packet, HSv5 expected");
}
_ => unimplemented!(),
}
Ok(())
}
// handles an incomming a packet
fn handle_packet(
&mut self,
cx: &mut Context,
packet: &Packet,
from: &SocketAddr,
) -> Result<(), Error> {
// We don't care about packets from elsewhere
if *from != self.settings.remote {
info!("Packet received from unknown address: {:?}", from);
return Ok(());
}
if self.settings.local_sockid != packet.dest_sockid() {
// packet isn't applicable
info!(
"Packet send to socket id ({}) that does not match local ({})",
packet.dest_sockid().0,
self.settings.local_sockid.0
);
return Ok(());
}
trace!("Received packet: {:?}", packet);
match packet {
Packet::Control(ctrl) => {
// handle the control packet
match &ctrl.control_type {
ControlTypes::Ack { .. } => warn!("Receiver received ACK packet, unusual"),
ControlTypes::Ack2(seq_num) => self.handle_ack2(*seq_num)?,
ControlTypes::DropRequest { .. } => unimplemented!(),
ControlTypes::Handshake(_) => {
if let Some(ret) = self.hs_returner.as_ref() {
if let Some(pack) = (*ret)(&packet) {
self.send_to_remote(cx, pack)?;
}
}
}
ControlTypes::KeepAlive => {} // TODO: actually reset EXP etc
ControlTypes::Nak { .. } => warn!("Receiver received NAK packet, unusual"),
ControlTypes::Shutdown => {
info!("Shutdown packet received, flushing receiver...");
self.shutdown_flag = true;
} // end of stream
ControlTypes::Srt(srt_packet) => {
self.handle_srt_control_packet(srt_packet)?;
}
}
}
Packet::Data(data) => self.handle_data_packet(cx, &data)?,
};
Ok(())
}
fn handle_ack2(&mut self, seq_num: i32) -> Result<(), Error> {
// 1) Locate the related ACK in the ACK History Window according to the
// ACK sequence number in this ACK2.
let id_in_wnd = match self
.ack_history_window
.as_slice()
.binary_search_by(|entry| entry.ack_seq_num.cmp(&seq_num))
{
Ok(i) => Some(i),
Err(_) => None,
};
if let Some(id) = id_in_wnd {
let AckHistoryEntry {
timestamp: send_timestamp,
ack_number,
..
} = self.ack_history_window[id];
// 2) Update the largest ACK number ever been acknowledged.
self.lr_ack_acked = (seq_num, ack_number);
// 3) Calculate new rtt according to the ACK2 arrival time and the ACK
// departure time, and update the RTT value as: RTT = (RTT * 7 +
// rtt) / 8
let immediate_rtt = self.get_timestamp_now() - send_timestamp;
self.rtt = (self.rtt * 7 + immediate_rtt) / 8;
// 4) Update RTTVar by: RTTVar = (RTTVar * 3 + abs(RTT - rtt)) / 4.
self.rtt_variance =
(self.rtt_variance * 3 + (self.rtt_variance - immediate_rtt).abs()) / 4;
// 5) Update both ACK and NAK period to 4 * RTT + RTTVar + SYN.
let ack_us = 4 * self.rtt as u64 + self.rtt_variance as u64 + 10_000;
self.ack_interval = interval(Duration::from_micros(ack_us));
} else {
warn!(
"ACK sequence number in ACK2 packet not found in ACK history: {}",
seq_num
);
}
Ok(())
}
fn handle_data_packet(&mut self, cx: &mut Context, data: &DataPacket) -> Result<(), Error> {
let now = self.get_timestamp_now();
// 1) Reset the ExpCount to 1. If there is no unacknowledged data
// packet, or if this is an ACK or NAK control packet, reset the EXP
// timer.
self.exp_count = 1;
// 2&3 don't apply
// 4) If the sequence number of the current data packet is 16n + 1,
// where n is an integer, record the time interval between this
if data.seq_number % 16 == 0 {
self.probe_time = Some(now)
} else if data.seq_number % 16 == 1 {
// if there is an entry
if let Some(pt) = self.probe_time {
// calculate and insert
self.packet_pair_window.push((data.seq_number, now - pt));
// reset
self.probe_time = None;
}
}
// 5) Record the packet arrival time in PKT History Window.
self.packet_history_window.push((data.seq_number, now));
// 6)
// a. If the sequence number of the current data packet is greater
// than LRSN, put all the sequence numbers between (but
// excluding) these two values into the receiver's loss list and
// send them to the sender in an NAK packet.
match data.seq_number.cmp(&self.lrsn) {
Ordering::Greater => {
// lrsn is the latest packet received, so nak the one after that
for i in seq_num_range(self.lrsn, data.seq_number) {
self.loss_list.push(LossListEntry {
seq_num: i,
feedback_time: now,
// k is initialized at 2, as stated on page 12 (very end)
k: 2,
})
}
self.send_nak(cx, seq_num_range(self.lrsn, data.seq_number))?;
}
// b. If the sequence number is less than LRSN, remove it from the
// receiver's loss list.
Ordering::Less => {
match self.loss_list[..].binary_search_by(|ll| ll.seq_num.cmp(&data.seq_number)) {
Ok(i) => {
self.loss_list.remove(i);
}
Err(_) => {
debug!(
"Packet received that's not in the loss list: {:?}, loss_list={:?}",
data.seq_number,
self.loss_list
.iter()
.map(|ll| ll.seq_num.as_raw())
.collect::<Vec<_>>()
);
}
};
}
Ordering::Equal => {}
}
// record that we got this packet
self.lrsn = cmp::max(data.seq_number + 1, self.lrsn);
// we've already gotten this packet, drop it
if self.buffer.next_release() > data.seq_number {
debug!("Received packet {:?} twice", data.seq_number);
return Ok(());
}
self.buffer.add(data.clone());
trace!(
"Received data packet seq_num={}, loc={:?}, buffer={:?}",
data.seq_number,
data.message_loc,
self.buffer,
);
Ok(())
}
// send a NAK, and return the future
fn send_nak<I>(&mut self, cx: &mut Context, lost_seq_nums: I) -> Result<(), Error>
where
I: Iterator<Item = SeqNumber>,
{
let vec: Vec<_> = lost_seq_nums.collect();
debug!("Sending NAK for={:?}", vec);
let pack = self.make_control_packet(ControlTypes::Nak(
compress_loss_list(vec.iter().cloned()).collect(),
));
self.send_to_remote(cx, pack)?;
Ok(())
}
fn make_control_packet(&self, control_type: ControlTypes) -> Packet {
Packet::Control(ControlPacket {
timestamp: self.get_timestamp_now(),
dest_sockid: self.settings.remote_sockid,
control_type,
})
}
/// Timestamp in us
fn get_timestamp_now(&self) -> i32 {
self.settings.get_timestamp_now()
}
}
impl<T> Stream for Receiver<T>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
{
type Item = Result<(Instant, Bytes), Error>;
fn poll_next(
self: Pin<&mut Self>,
cx: &mut Context,
) -> Poll<Option<Result<(Instant, Bytes), Error>>> {
let pin = self.get_mut();
pin.check_timers(cx)?;
pin.send_wrapper.poll_send(&mut pin.sock, cx)?;
// either way we want to continue
let _ = pin.sock().poll_flush(cx)?;
loop {
// try to release packets
if let Some((ts, p)) = pin
.buffer
.next_msg_tsbpd(pin.settings.tsbpd_latency, pin.settings.socket_start_time)
{
return Poll::Ready(Some(Ok((
pin.settings.socket_start_time + Duration::from_micros(ts as u64),
p,
))));
}
// drop packets
// TODO: do something with this
let _dropped = pin
.buffer
.drop_too_late_packets(pin.settings.tsbpd_latency, pin.settings.socket_start_time);
if let Poll::Ready(_) = pin.timeout_timer().poll(cx) {
pin.exp_count += 1;
pin.reset_timeout();
}
// if there is a packet ready, set the timeout timer for it
if let Some(release_time) = pin.buffer.next_message_release_time(
pin.settings.socket_start_time,
pin.settings.tsbpd_latency,
) {
pin.release_delay
.reset(time::Instant::from_std(release_time));
let _ = Pin::new(&mut pin.release_delay).poll(cx);
// if we are setup to shutdown, then the internal socket
// returned None, so we shouldn't poll it again, as it may panic
// returning Pending is okay assuming release_delay is greater
// than zero. Techically this should check the reutnr value of
// the above poll, but it seems to work.
if pin.shutdown_flag {
return Poll::Pending;
}
}
// if there isn't a complete message at the beginning of the buffer and we are supposed to be shutting down, shut down
if pin.shutdown_flag && pin.buffer.next_msg_ready().is_none() {
info!("Shutdown received and all packets released, finishing up");
return Poll::Ready(None);
}
// TODO: exp_count
let (packet, addr) = match ready!(pin.sock().poll_next(cx)) {
Some(Ok(p)) => p,
Some(Err(e)) => {
warn!("Error reading packet: {:?}", e);
continue;
}
None => {
// end of stream, shutdown
pin.shutdown_flag = true;
continue;
}
};
// handle the socket
// packet was received, reset exp_count
pin.exp_count = 1;
pin.reset_timeout();
pin.handle_packet(cx, &packet, &addr)?;
// TODO: should this be here for optimal performance?
let _ = pin.sock().poll_flush(cx)?;
}
}
}
| send_to_remote | identifier_name |
receiver.rs | use bytes::Bytes;
use failure::Error;
use futures::prelude::*;
use futures::ready;
use log::{debug, info, trace, warn};
use tokio::time::{self, delay_for, interval, Delay, Interval};
use crate::connection::HandshakeReturner;
use crate::loss_compression::compress_loss_list;
use crate::packet::{ControlPacket, ControlTypes, DataPacket, Packet, SrtControlPacket};
use crate::sink_send_wrapper::SinkSendWrapper;
use crate::{seq_number::seq_num_range, ConnectionSettings, SeqNumber};
use std::cmp;
use std::cmp::Ordering;
use std::iter::Iterator;
use std::net::SocketAddr;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::{Duration, Instant};
mod buffer;
use self::buffer::RecvBuffer;
struct LossListEntry {
seq_num: SeqNumber,
// last time it was feed into NAK
feedback_time: i32,
// the number of times this entry has been fed back into NAK
k: i32,
}
struct AckHistoryEntry {
/// the highest packet sequence number received that this ACK packet ACKs + 1
ack_number: SeqNumber,
/// the ack sequence number
ack_seq_num: i32,
/// timestamp that it was sent at
timestamp: i32,
}
pub struct Receiver<T> {
settings: ConnectionSettings,
// Function to return handshakes with
hs_returner: Option<HandshakeReturner>,
/// the round trip time, in microseconds
/// is calculated each ACK2
rtt: i32,
/// the round trip time variance, in microseconds
/// is calculated each ACK2
rtt_variance: i32,
/// the future to send or recieve packets
sock: T,
/// The time to wait for a packet to arrive
listen_timeout: Duration,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// Receiver's Loss List: It is a list of tuples whose values include:
/// the sequence numbers of detected lost data packets, the latest
/// feedback time of each tuple, and a parameter k that is the number
/// of times each one has been fed back in NAK. Values are stored in
/// the increasing order of packet sequence numbers.
loss_list: Vec<LossListEntry>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// ACK History Window: A circular array of each sent ACK and the time
/// it is sent out. The most recent value will overwrite the oldest
/// one if no more free space in the array.
ack_history_window: Vec<AckHistoryEntry>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// PKT History Window: A circular array that records the arrival time
/// of each data packet.
///
/// First is sequence number, second is timestamp
packet_history_window: Vec<(SeqNumber, i32)>,
/// https://tools.ietf.org/html/draft-gg-udt-03#page-12
/// Packet Pair Window: A circular array that records the time
/// interval between each probing packet pair.
///
/// First is seq num, second is time
packet_pair_window: Vec<(SeqNumber, i32)>,
/// Wakes the thread when an ACK
ack_interval: Interval,
/// Wakes the thread when a NAK is to be sent
nak_interval: Delay,
/// the highest received packet sequence number + 1
lrsn: SeqNumber,
/// The number of consecutive timeouts
exp_count: i32,
/// The ID of the next ack packet
next_ack: i32,
/// The timestamp of the probe time
/// Used to see duration between packets
probe_time: Option<i32>,
timeout_timer: Delay,
/// The ACK sequence number of the largest ACK2 received, and the ack number
lr_ack_acked: (i32, SeqNumber),
/// The buffer
buffer: RecvBuffer,
/// Shutdown flag. This is set so when the buffer is flushed, it returns Async::Ready(None)
shutdown_flag: bool,
/// Release delay
/// wakes the thread when there is a new packet to be released
release_delay: Delay,
/// A buffer of packets to send to the underlying sink
send_wrapper: SinkSendWrapper<(Packet, SocketAddr)>,
}
impl<T> Receiver<T>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
{
pub fn new(
sock: T,
settings: ConnectionSettings,
hs_returner: Option<HandshakeReturner>,
) -> Receiver<T> {
let init_seq_num = settings.init_seq_num;
info!(
"Receiving started from {:?}, with latency={:?}",
settings.remote, settings.tsbpd_latency
);
Receiver {
settings,
hs_returner,
sock,
rtt: 10_000,
rtt_variance: 1_000,
listen_timeout: Duration::from_secs(1),
loss_list: Vec::new(),
ack_history_window: Vec::new(),
packet_history_window: Vec::new(),
packet_pair_window: Vec::new(),
ack_interval: interval(Duration::from_millis(10)),
nak_interval: delay_for(Duration::from_millis(10)),
lrsn: init_seq_num, // at start, we have received everything until the first packet, exclusive (aka nothing)
next_ack: 1,
exp_count: 1,
probe_time: None,
timeout_timer: delay_for(Duration::from_secs(1)),
lr_ack_acked: (0, init_seq_num),
buffer: RecvBuffer::new(init_seq_num),
shutdown_flag: false,
release_delay: delay_for(Duration::from_secs(0)), // start with an empty delay
send_wrapper: SinkSendWrapper::new(),
}
}
pub fn settings(&self) -> &ConnectionSettings {
&self.settings
}
pub fn remote(&self) -> SocketAddr {
self.settings.remote
}
fn timeout_timer(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.timeout_timer)
}
fn sock(&mut self) -> Pin<&mut T> {
Pin::new(&mut self.sock)
}
fn ack_interval(&mut self) -> Pin<&mut Interval> {
Pin::new(&mut self.ack_interval)
}
fn nak_interval(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.nak_interval)
}
fn release_delay(&mut self) -> Pin<&mut Delay> {
Pin::new(&mut self.release_delay)
}
fn send_to_remote(&mut self, cx: &mut Context, packet: Packet) -> Result<(), Error> {
self.send_wrapper
.send(&mut self.sock, (packet, self.settings.remote), cx)
}
fn reset_timeout(&mut self) {
self.timeout_timer.reset(time::Instant::from_std(
Instant::now() + self.listen_timeout,
))
}
fn on_ack_event(&mut self, cx: &mut Context) -> Result<(), Error> {
// get largest inclusive received packet number
let ack_number = match self.loss_list.first() {
// There is an element in the loss list
Some(i) => i.seq_num,
// No elements, use lrsn, as it's already exclusive
None => self.lrsn,
};
// 2) If (a) the ACK number equals to the largest ACK number ever
// acknowledged by ACK2
if ack_number == self.lr_ack_acked.1 {
// stop (do not send this ACK).
return Ok(());
}
// make sure this ACK number is greater or equal to a one sent previously
if let Some(w) = self.ack_history_window.last() {
assert!(w.ack_number <= ack_number);
}
trace!(
"Sending ACK; ack_num={:?}, lr_ack_acked={:?}",
ack_number,
self.lr_ack_acked.1
);
if let Some(&AckHistoryEntry {
ack_number: last_ack_number,
timestamp: last_timestamp,
..
}) = self.ack_history_window.first()
{
// or, (b) it is equal to the ACK number in the
// last ACK
if last_ack_number == ack_number &&
// and the time interval between this two ACK packets is
// less than 2 RTTs,
(self.get_timestamp_now() - last_timestamp) < (self.rtt * 2)
{
// stop (do not send this ACK).
return Ok(());
}
}
// 3) Assign this ACK a unique increasing ACK sequence number.
let ack_seq_num = self.next_ack;
self.next_ack += 1;
// 4) Calculate the packet arrival speed according to the following
// algorithm:
let packet_recv_rate = {
if self.packet_history_window.len() < 16 {
0
} else {
// Calculate the median value of the last 16 packet arrival
// intervals (AI) using the values stored in PKT History Window.
let mut last_16: Vec<_> = self.packet_history_window
[self.packet_history_window.len() - 16..]
.iter()
.map(|&(_, ts)| ts)
.collect();
last_16.sort();
// the median timestamp
let ai = last_16[last_16.len() / 2];
// In these 16 values, remove those either greater than AI*8 or
// less than AI/8.
let filtered: Vec<i32> = last_16
.iter()
.filter(|&&n| n / 8 < ai && n > ai / 8)
.cloned()
.collect();
// If more than 8 values are left, calculate the
// average of the left values AI', and the packet arrival speed is
// 1/AI' (number of packets per second). Otherwise, return 0.
if filtered.len() > 8 {
(filtered.iter().fold(0i64, |sum, &val| sum + i64::from(val))
/ (filtered.len() as i64)) as i32
} else {
0
}
}
};
// 5) Calculate the estimated link capacity according to the following algorithm:
let est_link_cap = {
if self.packet_pair_window.len() < 16 {
0
} else {
// Calculate the median value of the last 16 packet pair
// intervals (PI) using the values in Packet Pair Window, and the
// link capacity is 1/PI (number of packets per second).
let pi = {
let mut last_16: Vec<_> = self.packet_pair_window
[self.packet_pair_window.len() - 16..]
.iter()
.map(|&(_, time)| time)
.collect();
last_16.sort();
last_16[last_16.len() / 2]
};
// Multiply by 1M because pi is in microseconds
// pi is in us/packet
(1.0e6 / (pi as f32)) as i32
}
};
// Pack the ACK packet with RTT, RTT Variance, and flow window size (available
// receiver buffer size).
let ack = self.make_control_packet(ControlTypes::Ack {
ack_seq_num,
ack_number,
rtt: Some(self.rtt),
rtt_variance: Some(self.rtt_variance),
buffer_available: None, // TODO: add this
packet_recv_rate: Some(packet_recv_rate),
est_link_cap: Some(est_link_cap),
});
// add it to the ack history
let now = self.get_timestamp_now();
self.ack_history_window.push(AckHistoryEntry {
ack_number,
ack_seq_num,
timestamp: now,
});
self.send_to_remote(cx, ack)?;
Ok(())
}
fn on_nak_event(&mut self, cx: &mut Context) -> Result<(), Error> {
// reset NAK timer, rtt and variance are in us, so convert to ns
// NAK is used to trigger a negative acknowledgement (NAK). Its period
// is dynamically updated to 4 * RTT_+ RTTVar + SYN, where RTTVar is the
// variance of RTT samples.
let nak_interval_us = 4 * self.rtt as u64 + self.rtt_variance as u64 + 10_000;
self.nak_interval.reset(time::Instant::from_std(
Instant::now() + Duration::from_micros(nak_interval_us),
));
// Search the receiver's loss list, find out all those sequence numbers
// whose last feedback time is k*RTT before, where k is initialized as 2
// and increased by 1 each time the number is fed back. Compress
// (according to section 6.4) and send these numbers back to the sender
// in an NAK packet.
let now = self.get_timestamp_now();
// increment k and change feedback time, returning sequence numbers
let seq_nums = {
let mut ret = Vec::new();
let rtt = self.rtt;
for pak in self
.loss_list
.iter_mut()
.filter(|lle| lle.feedback_time < now - lle.k * rtt)
{
pak.k += 1;
pak.feedback_time = now;
ret.push(pak.seq_num);
}
ret
};
if seq_nums.is_empty() {
return Ok(());
}
// send the nak
self.send_nak(cx, seq_nums.into_iter())?;
Ok(())
}
// checks the timers
// if a timer was triggered, then an RSFutureTimeout will be returned
// if not, the socket is given back
fn check_timers(&mut self, cx: &mut Context) -> Result<(), Error> {
// see if we need to ACK or NAK
if let Poll::Ready(Some(_)) = self.ack_interval().poll_next(cx) {
self.on_ack_event(cx)?;
}
if let Poll::Ready(_) = self.nak_interval().poll(cx) {
self.on_nak_event(cx)?;
}
// no need to do anything specific
let _ = self.release_delay().poll(cx);
Ok(())
}
// handles a SRT control packet
fn handle_srt_control_packet(&mut self, pack: &SrtControlPacket) -> Result<(), Error> {
use self::SrtControlPacket::*;
match pack {
HandshakeRequest(_) | HandshakeResponse(_) => {
warn!("Received handshake SRT packet, HSv5 expected");
}
_ => unimplemented!(),
}
Ok(())
}
// handles an incomming a packet
fn handle_packet(
&mut self,
cx: &mut Context,
packet: &Packet,
from: &SocketAddr,
) -> Result<(), Error> {
// We don't care about packets from elsewhere
if *from != self.settings.remote {
info!("Packet received from unknown address: {:?}", from);
return Ok(());
}
if self.settings.local_sockid != packet.dest_sockid() {
// packet isn't applicable | return Ok(());
}
trace!("Received packet: {:?}", packet);
match packet {
Packet::Control(ctrl) => {
// handle the control packet
match &ctrl.control_type {
ControlTypes::Ack { .. } => warn!("Receiver received ACK packet, unusual"),
ControlTypes::Ack2(seq_num) => self.handle_ack2(*seq_num)?,
ControlTypes::DropRequest { .. } => unimplemented!(),
ControlTypes::Handshake(_) => {
if let Some(ret) = self.hs_returner.as_ref() {
if let Some(pack) = (*ret)(&packet) {
self.send_to_remote(cx, pack)?;
}
}
}
ControlTypes::KeepAlive => {} // TODO: actually reset EXP etc
ControlTypes::Nak { .. } => warn!("Receiver received NAK packet, unusual"),
ControlTypes::Shutdown => {
info!("Shutdown packet received, flushing receiver...");
self.shutdown_flag = true;
} // end of stream
ControlTypes::Srt(srt_packet) => {
self.handle_srt_control_packet(srt_packet)?;
}
}
}
Packet::Data(data) => self.handle_data_packet(cx, &data)?,
};
Ok(())
}
fn handle_ack2(&mut self, seq_num: i32) -> Result<(), Error> {
// 1) Locate the related ACK in the ACK History Window according to the
// ACK sequence number in this ACK2.
let id_in_wnd = match self
.ack_history_window
.as_slice()
.binary_search_by(|entry| entry.ack_seq_num.cmp(&seq_num))
{
Ok(i) => Some(i),
Err(_) => None,
};
if let Some(id) = id_in_wnd {
let AckHistoryEntry {
timestamp: send_timestamp,
ack_number,
..
} = self.ack_history_window[id];
// 2) Update the largest ACK number ever been acknowledged.
self.lr_ack_acked = (seq_num, ack_number);
// 3) Calculate new rtt according to the ACK2 arrival time and the ACK
// departure time, and update the RTT value as: RTT = (RTT * 7 +
// rtt) / 8
let immediate_rtt = self.get_timestamp_now() - send_timestamp;
self.rtt = (self.rtt * 7 + immediate_rtt) / 8;
// 4) Update RTTVar by: RTTVar = (RTTVar * 3 + abs(RTT - rtt)) / 4.
self.rtt_variance =
(self.rtt_variance * 3 + (self.rtt_variance - immediate_rtt).abs()) / 4;
// 5) Update both ACK and NAK period to 4 * RTT + RTTVar + SYN.
let ack_us = 4 * self.rtt as u64 + self.rtt_variance as u64 + 10_000;
self.ack_interval = interval(Duration::from_micros(ack_us));
} else {
warn!(
"ACK sequence number in ACK2 packet not found in ACK history: {}",
seq_num
);
}
Ok(())
}
fn handle_data_packet(&mut self, cx: &mut Context, data: &DataPacket) -> Result<(), Error> {
let now = self.get_timestamp_now();
// 1) Reset the ExpCount to 1. If there is no unacknowledged data
// packet, or if this is an ACK or NAK control packet, reset the EXP
// timer.
self.exp_count = 1;
// 2&3 don't apply
// 4) If the sequence number of the current data packet is 16n + 1,
// where n is an integer, record the time interval between this
if data.seq_number % 16 == 0 {
self.probe_time = Some(now)
} else if data.seq_number % 16 == 1 {
// if there is an entry
if let Some(pt) = self.probe_time {
// calculate and insert
self.packet_pair_window.push((data.seq_number, now - pt));
// reset
self.probe_time = None;
}
}
// 5) Record the packet arrival time in PKT History Window.
self.packet_history_window.push((data.seq_number, now));
// 6)
// a. If the sequence number of the current data packet is greater
// than LRSN, put all the sequence numbers between (but
// excluding) these two values into the receiver's loss list and
// send them to the sender in an NAK packet.
match data.seq_number.cmp(&self.lrsn) {
Ordering::Greater => {
// lrsn is the latest packet received, so nak the one after that
for i in seq_num_range(self.lrsn, data.seq_number) {
self.loss_list.push(LossListEntry {
seq_num: i,
feedback_time: now,
// k is initialized at 2, as stated on page 12 (very end)
k: 2,
})
}
self.send_nak(cx, seq_num_range(self.lrsn, data.seq_number))?;
}
// b. If the sequence number is less than LRSN, remove it from the
// receiver's loss list.
Ordering::Less => {
match self.loss_list[..].binary_search_by(|ll| ll.seq_num.cmp(&data.seq_number)) {
Ok(i) => {
self.loss_list.remove(i);
}
Err(_) => {
debug!(
"Packet received that's not in the loss list: {:?}, loss_list={:?}",
data.seq_number,
self.loss_list
.iter()
.map(|ll| ll.seq_num.as_raw())
.collect::<Vec<_>>()
);
}
};
}
Ordering::Equal => {}
}
// record that we got this packet
self.lrsn = cmp::max(data.seq_number + 1, self.lrsn);
// we've already gotten this packet, drop it
if self.buffer.next_release() > data.seq_number {
debug!("Received packet {:?} twice", data.seq_number);
return Ok(());
}
self.buffer.add(data.clone());
trace!(
"Received data packet seq_num={}, loc={:?}, buffer={:?}",
data.seq_number,
data.message_loc,
self.buffer,
);
Ok(())
}
// send a NAK, and return the future
fn send_nak<I>(&mut self, cx: &mut Context, lost_seq_nums: I) -> Result<(), Error>
where
I: Iterator<Item = SeqNumber>,
{
let vec: Vec<_> = lost_seq_nums.collect();
debug!("Sending NAK for={:?}", vec);
let pack = self.make_control_packet(ControlTypes::Nak(
compress_loss_list(vec.iter().cloned()).collect(),
));
self.send_to_remote(cx, pack)?;
Ok(())
}
fn make_control_packet(&self, control_type: ControlTypes) -> Packet {
Packet::Control(ControlPacket {
timestamp: self.get_timestamp_now(),
dest_sockid: self.settings.remote_sockid,
control_type,
})
}
/// Timestamp in us
fn get_timestamp_now(&self) -> i32 {
self.settings.get_timestamp_now()
}
}
impl<T> Stream for Receiver<T>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
{
type Item = Result<(Instant, Bytes), Error>;
fn poll_next(
self: Pin<&mut Self>,
cx: &mut Context,
) -> Poll<Option<Result<(Instant, Bytes), Error>>> {
let pin = self.get_mut();
pin.check_timers(cx)?;
pin.send_wrapper.poll_send(&mut pin.sock, cx)?;
// either way we want to continue
let _ = pin.sock().poll_flush(cx)?;
loop {
// try to release packets
if let Some((ts, p)) = pin
.buffer
.next_msg_tsbpd(pin.settings.tsbpd_latency, pin.settings.socket_start_time)
{
return Poll::Ready(Some(Ok((
pin.settings.socket_start_time + Duration::from_micros(ts as u64),
p,
))));
}
// drop packets
// TODO: do something with this
let _dropped = pin
.buffer
.drop_too_late_packets(pin.settings.tsbpd_latency, pin.settings.socket_start_time);
if let Poll::Ready(_) = pin.timeout_timer().poll(cx) {
pin.exp_count += 1;
pin.reset_timeout();
}
// if there is a packet ready, set the timeout timer for it
if let Some(release_time) = pin.buffer.next_message_release_time(
pin.settings.socket_start_time,
pin.settings.tsbpd_latency,
) {
pin.release_delay
.reset(time::Instant::from_std(release_time));
let _ = Pin::new(&mut pin.release_delay).poll(cx);
// if we are setup to shutdown, then the internal socket
// returned None, so we shouldn't poll it again, as it may panic
// returning Pending is okay assuming release_delay is greater
// than zero. Techically this should check the reutnr value of
// the above poll, but it seems to work.
if pin.shutdown_flag {
return Poll::Pending;
}
}
// if there isn't a complete message at the beginning of the buffer and we are supposed to be shutting down, shut down
if pin.shutdown_flag && pin.buffer.next_msg_ready().is_none() {
info!("Shutdown received and all packets released, finishing up");
return Poll::Ready(None);
}
// TODO: exp_count
let (packet, addr) = match ready!(pin.sock().poll_next(cx)) {
Some(Ok(p)) => p,
Some(Err(e)) => {
warn!("Error reading packet: {:?}", e);
continue;
}
None => {
// end of stream, shutdown
pin.shutdown_flag = true;
continue;
}
};
// handle the socket
// packet was received, reset exp_count
pin.exp_count = 1;
pin.reset_timeout();
pin.handle_packet(cx, &packet, &addr)?;
// TODO: should this be here for optimal performance?
let _ = pin.sock().poll_flush(cx)?;
}
}
} | info!(
"Packet send to socket id ({}) that does not match local ({})",
packet.dest_sockid().0,
self.settings.local_sockid.0
); | random_line_split |
cluster.go | // Copyright Jetstack Ltd. See LICENSE for details.
// Copyright © 2017 The Kubicorn Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v1alpha1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const (
CloudAmazon = "amazon"
CloudAzure = "azure"
CloudGoogle = "google"
CloudBaremetal = "baremetal"
CloudDigitalOcean = "digitalocean"
)
const (
ClusterTypeHub = "hub"
ClusterTypeClusterSingle = "cluster-single"
ClusterTypeClusterMulti = "cluster-multi"
)
const (
// represents Terraform in a destroy state
StateDestroy = "destroy"
)
const (
PrometheusModeFull = "Full"
PrometheusModeExternalExportersOnly = "ExternalExportersOnly"
PrometheusModeExternalScrapeTargetsOnly = "ExternalScrapeTargetsOnly"
)
const (
CalicoBackendEtcd ClusterKubernetesCalicoBackend = "etcd"
CalicoBackendKubernetes ClusterKubernetesCalicoBackend = "kubernetes"
)
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:openapi-gen=true
// +resource:path=clusters
type Cluster struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
CloudId string `json:"cloudId,omitempty"`
InstancePools []InstancePool `json:"instancePools,omitempty"`
Cloud string `json:"cloud,omitempty"`
Location string `json:"location,omitempty"`
Network *Network `json:"network,omitempty"`
LoggingSinks []*LoggingSink `json:"loggingSinks,omitempty"`
Values *Values `json:"values,omitempty"`
KubernetesAPI *KubernetesAPI `json:"kubernetesAPI,omitempty"`
GroupIdentifier string `json:"groupIdentifier,omitempty"`
VaultHelper *ClusterVaultHelper `json:"vaultHelper,omitempty"`
Environment string `json:"environment,omitempty"`
Kubernetes *ClusterKubernetes `json:"kubernetes,omitempty"`
Type string `json:"-"` // This specifies if a cluster is a hub, single or multi
// Amazon specific options
Amazon *ClusterAmazon `json:"amazon"`
}
// ClusterAmazon offers Amazon-specific settings for that instance pool
type ClusterAmazon struct {
// This fields contains ARNs for additional IAM policies to be added to
// this instance pool
AdditionalIAMPolicies []string `json:"additionalIAMPolicies,omitempty"`
// When set to true, AWS Elastic Block Storage volumes are encrypted
EBSEncrypted *bool `json:"ebsEncrypted,omitempty"`
}
type ClusterKubernetes struct {
Zone string `json:"zone,omitempty"`
Version string `json:"version,omitempty"`
PodCIDR string `json:"podCIDR,omitempty"`
ServiceCIDR string `json:"serviceCIDR,omitempty"`
ClusterAutoscaler *ClusterKubernetesClusterAutoscaler `json:"clusterAutoscaler,omitempty"`
Tiller *ClusterKubernetesTiller `json:"tiller,omitempty"`
Dashboard *ClusterKubernetesDashboard `json:"dashboard,omitempty"`
PodSecurityPolicy *ClusterPodSecurityPolicy `json:"podSecurityPolicy,omitempty"`
Prometheus *ClusterKubernetesPrometheus `json:"prometheus,omitempty"`
Grafana *ClusterKubernetesGrafana `json:"grafana,omiempty"`
Heapster *ClusterKubernetesHeapster `json:"heapster,omiempty"`
InfluxDB *ClusterKubernetesInfluxDB `json:"influxDB,omiempty"`
EncryptionProvider *ClusterEncryptionProvider `json:"encryptionProvider"`
APIServer *ClusterKubernetesAPIServer `json:"apiServer,omitempty"`
Kubelet *ClusterKubernetesKubelet `json:"kubelet,omitempty"`
Scheduler *ClusterKubernetesScheduler `json:"scheduler,omitempty"`
Proxy *ClusterKubernetesProxy `json:"proxy,omitempty"`
ControllerManager *ClusterKubernetesControllerManager `json:"controllerManager,omitempty"`
Calico *ClusterKubernetesCalico `json:"calico,omitempty"`
GlobalFeatureGates map[string]bool `json:"globalFeatureGates,omitempty"`
Hyperkube *bool `json:"hyperkube,omitempty"`
}
type ClusterKubernetesClusterAutoscaler struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
ScaleDownUtilizationThreshold *float64 `json:"scaleDownUtilizationThreshold,omitempty"`
Overprovisioning *ClusterKubernetesClusterAutoscalerOverprovisioning `json:"overprovisioning,omitempty"`
}
type ClusterKubernetesClusterAutoscalerOverprovisioning struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
ReservedMillicoresPerReplica int `json:"reservedMillicoresPerReplica,omitempty"`
ReservedMegabytesPerReplica int `json:"reservedMegabytesPerReplica,omitempty"`
CoresPerReplica int `json:"coresPerReplica,omitempty"`
NodesPerReplica int `json:"nodesPerReplica,omitempty"`
ReplicaCount int `json:"replicaCount,omitempty"`
}
type ClusterKubernetesTiller struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
}
type ClusterKubernetesDashboard struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
}
type ClusterKubernetesAPIServer struct {
// expose the API server through a public load balancer
Public bool `json:"public,omitempty"`
AllowCIDRs []string `json:"allowCIDRs,omitempty"`
// create DNS record for the private load balancer, and optionally lock it down
PrivateRecord bool `json:"privateRecord,omitempty"`
PrivateAllowCIDRs []string `json:"privateAllowCIDRs,omitempty"`
EnableAdmissionControllers []string `json:"enableAdmissionControllers,omitempty"`
DisableAdmissionControllers []string `json:"disableAdmissionControllers,omitempty"`
// OIDC
OIDC *ClusterKubernetesAPIServerOIDC `json:"oidc,omitempty"`
// AWS specific options
Amazon *ClusterKubernetesAPIServerAmazon `json:"amazon,omitempty"`
AuthTokenWebhookFile string `json:"authTokenWebhookFile,omitempty"`
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesAPIServerOIDC struct {
// The client ID for the OpenID Connect client, must be set if oidc-issuer-url is set.
ClientID string `json:"clientID,omitempty" hiera:"kubernetes::apiserver::oidc_client_id"`
// If provided, the name of a custom OpenID Connect claim for specifying
// user groups. The claim value is expected to be a string or array of
// strings. This flag is experimental, please see the authentication
// documentation for further details.
GroupsClaim string `json:"groupsClaim,omitempty" hiera:"kubernetes::apiserver::oidc_groups_claim"`
// If provided, all groups will be prefixed with this value to prevent
// conflicts with other authentication strategies.
GroupsPrefix string `json:"groupsPrefix,omitempty" hiera:"kubernetes::apiserver::oidc_groups_prefix"`
// The URL of the OpenID issuer, only HTTPS scheme will be accepted. If
// set, it will be used to verify the OIDC JSON Web Token (JWT).
IssuerURL string `json:"issuerURL,omitempty" hiera:"kubernetes::apiserver::oidc_issuer_url"`
// Comma-separated list of allowed JOSE asymmetric signing algorithms. JWTs
// with a 'alg' header value not in this list will be rejected. Values are
// defined by RFC 7518 https://tools.ietf.org/html/rfc7518#section-3.1.
// (default [RS256])
SigningAlgs []string `json:"signingAlgs,omitempty" hiera:"kubernetes::apiserver::oidc_signing_algs"`
// The OpenID claim to use as the user name. Note that claims other than
// the default ('sub') is not guaranteed to be unique and immutable. This
// flag is experimental, please see the authentication documentation for
// further details. (default "sub")
UsernameClaim string `json:"usernameClaim,omitempty" hiera:"kubernetes::apiserver::oidc_username_claim"`
// If provided, all usernames will be prefixed with this value. If not
// provided, username claims other than 'email' are prefixed by the issuer
// URL to avoid clashes. To skip any prefixing, provide the value '-'.
UsernamePrefix string `json:"usernamePrefix,omitempty" hiera:"kubernetes::apiserver::oidc_username_prefix"`
}
type ClusterKubernetesAPIServerAmazon struct {
PublicELBAccessLogs *ClusterKubernetesAPIServerAmazonAccessLogs `json:"publicELBAccessLogs,omitempty"`
InternalELBAccessLogs *ClusterKubernetesAPIServerAmazonAccessLogs `json:"internalELBAccessLogs,omitempty"`
AwsIAMAuthenticatorInit bool `json:"awsIAMAuthenticatorInit,omitempty"`
}
type ClusterKubernetesAPIServerAmazonAccessLogs struct {
Enabled *bool `json:"enabled,omitempty"`
Bucket string `json:"bucket,omitempty"`
BucketPrefix string `json:"bucketPrefix,omitempty"`
Interval *int `json:"interval,omitempty"`
}
type ClusterPodSecurityPolicy struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterEncryptionProvider struct {
Enabled bool `json:"enabled,omitempty"`
Version string `json:"version,omitempty"`
}
// Configure the cluster internal deployment of prometheus
type ClusterKubernetesPrometheus struct {
// Enable a cluster internal prometheus deployment, default: true
Enabled bool `json:"enabled,omitempty"`
// Mode defines which components are installed
Mode string `json:"mode,omitempty"`
}
type ClusterKubernetesGrafana struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterKubernetesHeapster struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterKubernetesInfluxDB struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterVaultHelper struct {
URL string `json:"url,omitempty"`
}
type ClusterKubernetesScheduler struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesKubelet struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesProxy struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesControllerManager struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesCalicoBackend string
type ClusterKubernetesCalico struct {
Backend ClusterKubernetesCalicoBackend `json:"backend"`
EnableTypha bool `json:"enableTypha"`
TyphaReplicas *int `json:"typhaReplicas"`
}
// +k8s:openapi-gen=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type ClusterList struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Items []Cluster `json:"items"`
}
func NewCluster(name string) *Cluster { |
return &Cluster{
ObjectMeta: metav1.ObjectMeta{
Name: name,
},
}
}
| identifier_body |
|
cluster.go | // Copyright Jetstack Ltd. See LICENSE for details.
// Copyright © 2017 The Kubicorn Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v1alpha1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const (
CloudAmazon = "amazon"
CloudAzure = "azure"
CloudGoogle = "google"
CloudBaremetal = "baremetal"
CloudDigitalOcean = "digitalocean"
)
const (
ClusterTypeHub = "hub"
ClusterTypeClusterSingle = "cluster-single"
ClusterTypeClusterMulti = "cluster-multi"
)
const (
// represents Terraform in a destroy state
StateDestroy = "destroy"
)
const (
PrometheusModeFull = "Full"
PrometheusModeExternalExportersOnly = "ExternalExportersOnly"
PrometheusModeExternalScrapeTargetsOnly = "ExternalScrapeTargetsOnly"
)
const (
CalicoBackendEtcd ClusterKubernetesCalicoBackend = "etcd"
CalicoBackendKubernetes ClusterKubernetesCalicoBackend = "kubernetes"
)
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:openapi-gen=true
// +resource:path=clusters
type Cluster struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
CloudId string `json:"cloudId,omitempty"`
InstancePools []InstancePool `json:"instancePools,omitempty"`
Cloud string `json:"cloud,omitempty"`
Location string `json:"location,omitempty"`
Network *Network `json:"network,omitempty"`
LoggingSinks []*LoggingSink `json:"loggingSinks,omitempty"`
Values *Values `json:"values,omitempty"`
KubernetesAPI *KubernetesAPI `json:"kubernetesAPI,omitempty"`
GroupIdentifier string `json:"groupIdentifier,omitempty"`
VaultHelper *ClusterVaultHelper `json:"vaultHelper,omitempty"`
Environment string `json:"environment,omitempty"`
Kubernetes *ClusterKubernetes `json:"kubernetes,omitempty"`
Type string `json:"-"` // This specifies if a cluster is a hub, single or multi
// Amazon specific options
Amazon *ClusterAmazon `json:"amazon"`
}
// ClusterAmazon offers Amazon-specific settings for that instance pool
type ClusterAmazon struct {
// This fields contains ARNs for additional IAM policies to be added to
// this instance pool
AdditionalIAMPolicies []string `json:"additionalIAMPolicies,omitempty"`
// When set to true, AWS Elastic Block Storage volumes are encrypted
EBSEncrypted *bool `json:"ebsEncrypted,omitempty"`
}
type ClusterKubernetes struct {
Zone string `json:"zone,omitempty"`
Version string `json:"version,omitempty"`
PodCIDR string `json:"podCIDR,omitempty"`
ServiceCIDR string `json:"serviceCIDR,omitempty"`
ClusterAutoscaler *ClusterKubernetesClusterAutoscaler `json:"clusterAutoscaler,omitempty"`
Tiller *ClusterKubernetesTiller `json:"tiller,omitempty"`
Dashboard *ClusterKubernetesDashboard `json:"dashboard,omitempty"`
PodSecurityPolicy *ClusterPodSecurityPolicy `json:"podSecurityPolicy,omitempty"`
Prometheus *ClusterKubernetesPrometheus `json:"prometheus,omitempty"`
Grafana *ClusterKubernetesGrafana `json:"grafana,omiempty"`
Heapster *ClusterKubernetesHeapster `json:"heapster,omiempty"`
InfluxDB *ClusterKubernetesInfluxDB `json:"influxDB,omiempty"`
EncryptionProvider *ClusterEncryptionProvider `json:"encryptionProvider"`
APIServer *ClusterKubernetesAPIServer `json:"apiServer,omitempty"`
Kubelet *ClusterKubernetesKubelet `json:"kubelet,omitempty"`
Scheduler *ClusterKubernetesScheduler `json:"scheduler,omitempty"`
Proxy *ClusterKubernetesProxy `json:"proxy,omitempty"`
ControllerManager *ClusterKubernetesControllerManager `json:"controllerManager,omitempty"`
Calico *ClusterKubernetesCalico `json:"calico,omitempty"`
GlobalFeatureGates map[string]bool `json:"globalFeatureGates,omitempty"`
Hyperkube *bool `json:"hyperkube,omitempty"`
}
type ClusterKubernetesClusterAutoscaler struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
ScaleDownUtilizationThreshold *float64 `json:"scaleDownUtilizationThreshold,omitempty"`
Overprovisioning *ClusterKubernetesClusterAutoscalerOverprovisioning `json:"overprovisioning,omitempty"`
}
type ClusterKubernetesClusterAutoscalerOverprovisioning struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
ReservedMillicoresPerReplica int `json:"reservedMillicoresPerReplica,omitempty"`
ReservedMegabytesPerReplica int `json:"reservedMegabytesPerReplica,omitempty"`
CoresPerReplica int `json:"coresPerReplica,omitempty"`
NodesPerReplica int `json:"nodesPerReplica,omitempty"`
ReplicaCount int `json:"replicaCount,omitempty"`
}
type ClusterKubernetesTiller struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
}
type ClusterKubernetesDashboard struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
}
type ClusterKubernetesAPIServer struct {
// expose the API server through a public load balancer
Public bool `json:"public,omitempty"`
AllowCIDRs []string `json:"allowCIDRs,omitempty"`
// create DNS record for the private load balancer, and optionally lock it down
PrivateRecord bool `json:"privateRecord,omitempty"`
PrivateAllowCIDRs []string `json:"privateAllowCIDRs,omitempty"`
EnableAdmissionControllers []string `json:"enableAdmissionControllers,omitempty"`
DisableAdmissionControllers []string `json:"disableAdmissionControllers,omitempty"`
// OIDC
OIDC *ClusterKubernetesAPIServerOIDC `json:"oidc,omitempty"`
// AWS specific options
Amazon *ClusterKubernetesAPIServerAmazon `json:"amazon,omitempty"`
AuthTokenWebhookFile string `json:"authTokenWebhookFile,omitempty"`
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesAPIServerOIDC struct {
// The client ID for the OpenID Connect client, must be set if oidc-issuer-url is set.
ClientID string `json:"clientID,omitempty" hiera:"kubernetes::apiserver::oidc_client_id"`
// If provided, the name of a custom OpenID Connect claim for specifying
// user groups. The claim value is expected to be a string or array of
// strings. This flag is experimental, please see the authentication
// documentation for further details.
GroupsClaim string `json:"groupsClaim,omitempty" hiera:"kubernetes::apiserver::oidc_groups_claim"`
// If provided, all groups will be prefixed with this value to prevent
// conflicts with other authentication strategies.
GroupsPrefix string `json:"groupsPrefix,omitempty" hiera:"kubernetes::apiserver::oidc_groups_prefix"`
// The URL of the OpenID issuer, only HTTPS scheme will be accepted. If
// set, it will be used to verify the OIDC JSON Web Token (JWT).
IssuerURL string `json:"issuerURL,omitempty" hiera:"kubernetes::apiserver::oidc_issuer_url"`
// Comma-separated list of allowed JOSE asymmetric signing algorithms. JWTs
// with a 'alg' header value not in this list will be rejected. Values are
// defined by RFC 7518 https://tools.ietf.org/html/rfc7518#section-3.1.
// (default [RS256])
SigningAlgs []string `json:"signingAlgs,omitempty" hiera:"kubernetes::apiserver::oidc_signing_algs"`
// The OpenID claim to use as the user name. Note that claims other than
// the default ('sub') is not guaranteed to be unique and immutable. This
// flag is experimental, please see the authentication documentation for
// further details. (default "sub")
UsernameClaim string `json:"usernameClaim,omitempty" hiera:"kubernetes::apiserver::oidc_username_claim"`
// If provided, all usernames will be prefixed with this value. If not
// provided, username claims other than 'email' are prefixed by the issuer
// URL to avoid clashes. To skip any prefixing, provide the value '-'.
UsernamePrefix string `json:"usernamePrefix,omitempty" hiera:"kubernetes::apiserver::oidc_username_prefix"`
}
type ClusterKubernetesAPIServerAmazon struct {
PublicELBAccessLogs *ClusterKubernetesAPIServerAmazonAccessLogs `json:"publicELBAccessLogs,omitempty"`
InternalELBAccessLogs *ClusterKubernetesAPIServerAmazonAccessLogs `json:"internalELBAccessLogs,omitempty"`
AwsIAMAuthenticatorInit bool `json:"awsIAMAuthenticatorInit,omitempty"`
}
type ClusterKubernetesAPIServerAmazonAccessLogs struct {
Enabled *bool `json:"enabled,omitempty"`
Bucket string `json:"bucket,omitempty"`
BucketPrefix string `json:"bucketPrefix,omitempty"`
Interval *int `json:"interval,omitempty"`
}
type ClusterPodSecurityPolicy struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterEncryptionProvider struct {
Enabled bool `json:"enabled,omitempty"`
Version string `json:"version,omitempty"`
}
// Configure the cluster internal deployment of prometheus
type ClusterKubernetesPrometheus struct {
// Enable a cluster internal prometheus deployment, default: true
Enabled bool `json:"enabled,omitempty"`
// Mode defines which components are installed
Mode string `json:"mode,omitempty"`
}
type ClusterKubernetesGrafana struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterKubernetesHeapster struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterKubernetesInfluxDB struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterVaultHelper struct {
URL string `json:"url,omitempty"`
}
type ClusterKubernetesScheduler struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesKubelet struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesProxy struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesControllerManager struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesCalicoBackend string
type ClusterKubernetesCalico struct {
Backend ClusterKubernetesCalicoBackend `json:"backend"`
EnableTypha bool `json:"enableTypha"`
TyphaReplicas *int `json:"typhaReplicas"`
}
// +k8s:openapi-gen=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type ClusterList struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Items []Cluster `json:"items"`
}
func N | name string) *Cluster {
return &Cluster{
ObjectMeta: metav1.ObjectMeta{
Name: name,
},
}
}
| ewCluster( | identifier_name |
cluster.go | // Copyright Jetstack Ltd. See LICENSE for details.
// Copyright © 2017 The Kubicorn Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v1alpha1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const (
CloudAmazon = "amazon"
CloudAzure = "azure"
CloudGoogle = "google"
CloudBaremetal = "baremetal"
CloudDigitalOcean = "digitalocean"
)
const (
ClusterTypeHub = "hub"
ClusterTypeClusterSingle = "cluster-single"
ClusterTypeClusterMulti = "cluster-multi"
)
const (
// represents Terraform in a destroy state
StateDestroy = "destroy"
)
const (
PrometheusModeFull = "Full"
PrometheusModeExternalExportersOnly = "ExternalExportersOnly"
PrometheusModeExternalScrapeTargetsOnly = "ExternalScrapeTargetsOnly"
)
const (
CalicoBackendEtcd ClusterKubernetesCalicoBackend = "etcd"
CalicoBackendKubernetes ClusterKubernetesCalicoBackend = "kubernetes"
)
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:openapi-gen=true
// +resource:path=clusters
type Cluster struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
CloudId string `json:"cloudId,omitempty"`
InstancePools []InstancePool `json:"instancePools,omitempty"`
Cloud string `json:"cloud,omitempty"`
Location string `json:"location,omitempty"`
Network *Network `json:"network,omitempty"`
LoggingSinks []*LoggingSink `json:"loggingSinks,omitempty"`
Values *Values `json:"values,omitempty"`
KubernetesAPI *KubernetesAPI `json:"kubernetesAPI,omitempty"`
GroupIdentifier string `json:"groupIdentifier,omitempty"`
VaultHelper *ClusterVaultHelper `json:"vaultHelper,omitempty"`
Environment string `json:"environment,omitempty"`
Kubernetes *ClusterKubernetes `json:"kubernetes,omitempty"`
Type string `json:"-"` // This specifies if a cluster is a hub, single or multi
// Amazon specific options
Amazon *ClusterAmazon `json:"amazon"`
}
// ClusterAmazon offers Amazon-specific settings for that instance pool
type ClusterAmazon struct {
// This fields contains ARNs for additional IAM policies to be added to
// this instance pool
AdditionalIAMPolicies []string `json:"additionalIAMPolicies,omitempty"`
// When set to true, AWS Elastic Block Storage volumes are encrypted
EBSEncrypted *bool `json:"ebsEncrypted,omitempty"`
}
type ClusterKubernetes struct {
Zone string `json:"zone,omitempty"`
Version string `json:"version,omitempty"`
PodCIDR string `json:"podCIDR,omitempty"`
ServiceCIDR string `json:"serviceCIDR,omitempty"`
ClusterAutoscaler *ClusterKubernetesClusterAutoscaler `json:"clusterAutoscaler,omitempty"`
Tiller *ClusterKubernetesTiller `json:"tiller,omitempty"`
Dashboard *ClusterKubernetesDashboard `json:"dashboard,omitempty"`
PodSecurityPolicy *ClusterPodSecurityPolicy `json:"podSecurityPolicy,omitempty"`
Prometheus *ClusterKubernetesPrometheus `json:"prometheus,omitempty"`
Grafana *ClusterKubernetesGrafana `json:"grafana,omiempty"`
Heapster *ClusterKubernetesHeapster `json:"heapster,omiempty"`
InfluxDB *ClusterKubernetesInfluxDB `json:"influxDB,omiempty"`
EncryptionProvider *ClusterEncryptionProvider `json:"encryptionProvider"`
APIServer *ClusterKubernetesAPIServer `json:"apiServer,omitempty"`
Kubelet *ClusterKubernetesKubelet `json:"kubelet,omitempty"`
Scheduler *ClusterKubernetesScheduler `json:"scheduler,omitempty"`
Proxy *ClusterKubernetesProxy `json:"proxy,omitempty"`
ControllerManager *ClusterKubernetesControllerManager `json:"controllerManager,omitempty"`
Calico *ClusterKubernetesCalico `json:"calico,omitempty"`
GlobalFeatureGates map[string]bool `json:"globalFeatureGates,omitempty"`
Hyperkube *bool `json:"hyperkube,omitempty"`
}
type ClusterKubernetesClusterAutoscaler struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
ScaleDownUtilizationThreshold *float64 `json:"scaleDownUtilizationThreshold,omitempty"`
Overprovisioning *ClusterKubernetesClusterAutoscalerOverprovisioning `json:"overprovisioning,omitempty"`
}
type ClusterKubernetesClusterAutoscalerOverprovisioning struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
ReservedMillicoresPerReplica int `json:"reservedMillicoresPerReplica,omitempty"`
ReservedMegabytesPerReplica int `json:"reservedMegabytesPerReplica,omitempty"`
CoresPerReplica int `json:"coresPerReplica,omitempty"`
NodesPerReplica int `json:"nodesPerReplica,omitempty"`
ReplicaCount int `json:"replicaCount,omitempty"`
}
type ClusterKubernetesTiller struct {
Enabled bool `json:"enabled,omitempty"`
Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
}
type ClusterKubernetesDashboard struct {
Enabled bool `json:"enabled,omitempty"` | // expose the API server through a public load balancer
Public bool `json:"public,omitempty"`
AllowCIDRs []string `json:"allowCIDRs,omitempty"`
// create DNS record for the private load balancer, and optionally lock it down
PrivateRecord bool `json:"privateRecord,omitempty"`
PrivateAllowCIDRs []string `json:"privateAllowCIDRs,omitempty"`
EnableAdmissionControllers []string `json:"enableAdmissionControllers,omitempty"`
DisableAdmissionControllers []string `json:"disableAdmissionControllers,omitempty"`
// OIDC
OIDC *ClusterKubernetesAPIServerOIDC `json:"oidc,omitempty"`
// AWS specific options
Amazon *ClusterKubernetesAPIServerAmazon `json:"amazon,omitempty"`
AuthTokenWebhookFile string `json:"authTokenWebhookFile,omitempty"`
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesAPIServerOIDC struct {
// The client ID for the OpenID Connect client, must be set if oidc-issuer-url is set.
ClientID string `json:"clientID,omitempty" hiera:"kubernetes::apiserver::oidc_client_id"`
// If provided, the name of a custom OpenID Connect claim for specifying
// user groups. The claim value is expected to be a string or array of
// strings. This flag is experimental, please see the authentication
// documentation for further details.
GroupsClaim string `json:"groupsClaim,omitempty" hiera:"kubernetes::apiserver::oidc_groups_claim"`
// If provided, all groups will be prefixed with this value to prevent
// conflicts with other authentication strategies.
GroupsPrefix string `json:"groupsPrefix,omitempty" hiera:"kubernetes::apiserver::oidc_groups_prefix"`
// The URL of the OpenID issuer, only HTTPS scheme will be accepted. If
// set, it will be used to verify the OIDC JSON Web Token (JWT).
IssuerURL string `json:"issuerURL,omitempty" hiera:"kubernetes::apiserver::oidc_issuer_url"`
// Comma-separated list of allowed JOSE asymmetric signing algorithms. JWTs
// with a 'alg' header value not in this list will be rejected. Values are
// defined by RFC 7518 https://tools.ietf.org/html/rfc7518#section-3.1.
// (default [RS256])
SigningAlgs []string `json:"signingAlgs,omitempty" hiera:"kubernetes::apiserver::oidc_signing_algs"`
// The OpenID claim to use as the user name. Note that claims other than
// the default ('sub') is not guaranteed to be unique and immutable. This
// flag is experimental, please see the authentication documentation for
// further details. (default "sub")
UsernameClaim string `json:"usernameClaim,omitempty" hiera:"kubernetes::apiserver::oidc_username_claim"`
// If provided, all usernames will be prefixed with this value. If not
// provided, username claims other than 'email' are prefixed by the issuer
// URL to avoid clashes. To skip any prefixing, provide the value '-'.
UsernamePrefix string `json:"usernamePrefix,omitempty" hiera:"kubernetes::apiserver::oidc_username_prefix"`
}
type ClusterKubernetesAPIServerAmazon struct {
PublicELBAccessLogs *ClusterKubernetesAPIServerAmazonAccessLogs `json:"publicELBAccessLogs,omitempty"`
InternalELBAccessLogs *ClusterKubernetesAPIServerAmazonAccessLogs `json:"internalELBAccessLogs,omitempty"`
AwsIAMAuthenticatorInit bool `json:"awsIAMAuthenticatorInit,omitempty"`
}
type ClusterKubernetesAPIServerAmazonAccessLogs struct {
Enabled *bool `json:"enabled,omitempty"`
Bucket string `json:"bucket,omitempty"`
BucketPrefix string `json:"bucketPrefix,omitempty"`
Interval *int `json:"interval,omitempty"`
}
type ClusterPodSecurityPolicy struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterEncryptionProvider struct {
Enabled bool `json:"enabled,omitempty"`
Version string `json:"version,omitempty"`
}
// Configure the cluster internal deployment of prometheus
type ClusterKubernetesPrometheus struct {
// Enable a cluster internal prometheus deployment, default: true
Enabled bool `json:"enabled,omitempty"`
// Mode defines which components are installed
Mode string `json:"mode,omitempty"`
}
type ClusterKubernetesGrafana struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterKubernetesHeapster struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterKubernetesInfluxDB struct {
Enabled bool `json:"enabled,omitempty"`
}
type ClusterVaultHelper struct {
URL string `json:"url,omitempty"`
}
type ClusterKubernetesScheduler struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesKubelet struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesProxy struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesControllerManager struct {
FeatureGates map[string]bool `json:"featureGates,omitempty"`
}
type ClusterKubernetesCalicoBackend string
type ClusterKubernetesCalico struct {
Backend ClusterKubernetesCalicoBackend `json:"backend"`
EnableTypha bool `json:"enableTypha"`
TyphaReplicas *int `json:"typhaReplicas"`
}
// +k8s:openapi-gen=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type ClusterList struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Items []Cluster `json:"items"`
}
func NewCluster(name string) *Cluster {
return &Cluster{
ObjectMeta: metav1.ObjectMeta{
Name: name,
},
}
} | Image string `json:"image,omitempty"`
Version string `json:"version,omitempty"`
}
type ClusterKubernetesAPIServer struct { | random_line_split |
orderbook.go | package main
import (
"log"
"math"
"strings"
)
type MD struct {
Open float64
High float64
Low float64
Close float64
Qty float64
Vol float64
Vwap float64
Ask float64
Bid float64
AskSize float64
BidSize float64
}
type Security struct {
Id int64
Symbol string
LocalSymbol string
Bbgid string
Cusip string
Sedol string
Isin string
Market string
Type string
LotSize float64
Multiplier float64
PrevClose float64
Rate float64 // currency rate
Currency string
Adv20 float64
MarketCap float64
Sector string
IndustryGroup string
Industry string
SubIndustry string
MD
}
func (s *Security) GetClose() float64 {
close := s.Close
if close <= 0 {
close = s.PrevClose
}
return close
}
var SecurityMapById = make(map[int64]*Security)
var SecurityMapByMarket = make(map[string]map[string]*Security)
func ParseSecurity(msg []interface{}) {
sec := &Security{
Id: int64(msg[1].(float64)),
Symbol: msg[2].(string),
Market: msg[3].(string),
Type: msg[4].(string),
LotSize: msg[5].(float64),
Multiplier: msg[6].(float64),
Currency: msg[7].(string),
Rate: msg[8].(float64),
PrevClose: msg[9].(float64),
LocalSymbol: msg[10].(string),
Adv20: msg[11].(float64),
MarketCap: msg[12].(float64),
Sector: msg[13].(string),
IndustryGroup: msg[14].(string),
Industry: msg[15].(string),
SubIndustry: msg[16].(string),
Bbgid: msg[17].(string),
Cusip: msg[18].(string),
Sedol: msg[19].(string),
Isin: msg[20].(string),
}
if sec.Market == "CURRENCY" {
sec.Market = "FX"
}
if sec.Multiplier <= 0 {
sec.Multiplier = 1
}
if sec.Rate <= 0 {
sec.Rate = 1
}
sec0 := SecurityMapById[sec.Id]
if sec0 == nil {
SecurityMapById[sec.Id] = sec
} else {
*sec0 = *sec
sec = sec0
}
tmp := SecurityMapByMarket[sec.Market]
if tmp == nil {
tmp = make(map[string]*Security)
SecurityMapByMarket[sec.Market] = tmp
}
tmp[sec.Symbol] = sec
}
type Order struct {
Id int64
OrigClOrdId int64
// Tm int64
// Seq int64
St string
Security *Security
// UserId int
Acc int
Qty float64
Px float64
Side string
Type string
// Tif string
CumQty float64
AvgPx float64
LastQty float64
LastPx float64
}
var orders = make(map[int64]*Order)
type PositionBase struct {
Qty float64
AvgPx float64
Commission float64
RealizedPnl float64
}
type Position struct {
PositionBase
Bod PositionBase
OutstandBuyQty float64
OutstandSellQty float64
BuyQty float64
BuyValue float64
SellQty float64
SellValue float64
Security *Security
Acc int
Target float64
}
var Positions = make(map[int]map[int64]*Position)
var usedSecurities = make(map[int64]bool)
func getPos(acc int, securityId int64) *Position {
tmp := Positions[acc]
if tmp == nil {
tmp = make(map[int64]*Position)
Positions[acc] = tmp
}
p := tmp[securityId]
if p == nil {
p = &Position{}
p.Acc = acc
p.Security = SecurityMapById[securityId]
if p.Security == nil {
log.Println("unknown securityId", securityId)
return p
}
tmp[securityId] = p
used := usedSecurities[securityId]
if !used {
Request([]interface{}{"sub", securityId})
usedSecurities[securityId] = true
}
}
return p
}
func isLive(st string) bool {
if st == "" {
return true
}
st = strings.ToLower(st)
return strings.HasPrefix(st, "pending") || strings.HasPrefix(st, "unconfirmed") || strings.HasPrefix(st, "partial") || st == "new" || st == "suspended"
}
func updatePos(ord *Order) {
securityId := ord.Security.Id
p := getPos(ord.Acc, securityId)
var outstand *float64
if ord.Side == "buy" {
outstand = &p.OutstandBuyQty
} else {
outstand = &p.OutstandSellQty
}
switch ord.St {
case "unconfirmed", "unconfirmed_replace":
*outstand += ord.Qty - ord.CumQty
case "filled", "partial":
if ord.LastQty > 0 && ord.Type != "otc" {
*outstand -= ord.LastQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
qty := ord.LastQty
if ord.Side == "buy" {
p.BuyQty += ord.LastQty
p.BuyValue += ord.LastQty * ord.LastPx
} else {
qty = -qty
p.SellQty += ord.LastQty
p.SellValue += ord.LastQty * ord.LastPx
}
qty0 := p.Qty
px := ord.LastPx
multiplier := ord.Security.Rate * ord.Security.Multiplier
if (qty0 > 0) && (qty < 0) { // sell trade to cover position
if qty0 > -qty {
p.RealizedPnl += (px - p.AvgPx) * -qty * multiplier
} else {
p.RealizedPnl += (px - p.AvgPx) * qty0 * multiplier
p.AvgPx = px
}
} else if (qty0 < 0) && (qty > 0) { // buy trade to cover position
if -qty0 > qty {
p.RealizedPnl += (p.AvgPx - px) * qty * multiplier
} else {
p.RealizedPnl += (p.AvgPx - px) * -qty0 * multiplier
p.AvgPx = px
}
} else { // open position
p.AvgPx = (qty0*p.AvgPx + qty*px) / (qty0 + qty)
}
p.Qty += qty
if p.Qty == 0 {
p.AvgPx = 0
}
default:
*outstand -= ord.Qty - ord.CumQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
}
var seqNum int64 = 0
var offlineDone = false
var onlineCache [][]interface{}
func ParseOffline(msg []interface{}) {
if msg[1].(string) == "complete" {
for _, msg := range onlineCache {
ParseOrder(msg, false)
}
onlineCache = onlineCache[:0]
offlineDone = true
log.Print("offline done")
}
}
func ParseOrder(msg []interface{}, isOnline bool) {
if isOnline && !offlineDone {
onlineCache = append(onlineCache, msg)
return
}
clOrdId := int64(msg[1].(float64))
// tm := int64(msg[2].(float64))
seq := int64(msg[3].(float64))
if seq <= seqNum {
return
}
seqNum = seq
switch st := msg[4].(string); st {
case "unconfirmed", "unconfirmed_replace":
securityId := int64(msg[5].(float64))
security := SecurityMapById[securityId]
if security == nil {
log.Println("not found security", securityId)
return
}
// aid := int(msg[6].(float64))
// userId := int(msg[7].(float64))
acc := int(msg[8].(float64))
// brokerAcc := int(msg[9].(float65))
qty := msg[10].(float64)
px := msg[11].(float64)
side := msg[12].(string)
// ordType := msg[13].(string)
// tif := msg[14].(string)
ord := Order{
Id: clOrdId,
St: st,
Security: security,
Acc: acc,
Qty: qty,
Px: px,
Side: side,
}
if st == "unconfirmed_replace" {
origClOrdId := int64(msg[14].(float64))
ord.OrigClOrdId = origClOrdId
}
orders[clOrdId] = &ord
updatePos(&ord)
case "filled", "partial":
qty := msg[5].(float64)
px := msg[6].(float64)
// tradeId = msg[7].(string)
execTransType := msg[8].(string)
if execTransType == "cancel" {
qty = -qty
}
ord := orders[clOrdId]
if ord != nil {
ord.AvgPx = (ord.CumQty*ord.AvgPx + qty*px) / (ord.CumQty + qty)
ord.CumQty += qty
ord.CumQty = math.Round(ord.CumQty*1e6) / 1e6
if ord.CumQty > ord.Qty {
log.Printf("overfill found: %s", msg)
}
ord.LastQty = qty
ord.LastPx = px
if ord.CumQty >= ord.Qty {
st = "filled"
} else {
st = "partial"
}
ord.St = st
updatePos(ord)
} else {
log.Println("not found order for", clOrdId)
}
case "canceled":
case "cancelled":
case "expired":
case "done_for_day":
case "calculated":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "new", "pending", "replaced", "suspended":
ord := orders[clOrdId]
if ord != nil {
if st == "replaced" {
old := orders[ord.OrigClOrdId]
if old == nil {
log.Println("can not find order for", ord.OrigClOrdId)
} else {
old.St = st
}
st = "confirmed"
}
ord.St = st
} else {
log.Println("can not find order for", clOrdId)
}
case "new_rejected", "replace_rejected":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "risk_rejected":
ord := orders[clOrdId]
if ord != nil {
ord.St = st
updatePos(ord)
}
}
}
func ParseTarget(msg []interface{}) {
acc := int(msg[1].(float64)) // msg[2] is acc name
for _, v := range Positions[acc] {
v.Target = 0.
}
if len(msg) < 4 {
return
}
if _, ok := msg[3].([]interface{}); !ok {
return
}
for _, v := range msg[3].([]interface{}) {
t := v.([]interface{})
securityId := int64(t[0].(float64))
target := t[1].(float64)
getPos(acc, securityId).Target = target
}
}
func ParseBod(msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
qty := msg[3].(float64)
avgPx := msg[4].(float64)
commission := msg[5].(float64)
realizedPnl := msg[6].(float64)
// brokerAcc := int(msg[7])
// tm := int64(msg[8].(float64))
p := getPos(acc, securityId)
p.Qty = qty
p.AvgPx = avgPx
p.Commission = commission
p.RealizedPnl = realizedPnl
p.Bod.Qty = qty
p.Bod.AvgPx = avgPx
p.Bod.Commission = commission
p.Bod.RealizedPnl = realizedPnl
}
func | (msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
p := getPos(acc, securityId)
// ignore unrealizedPnl and realizedPnl which we can deduce ourself
if len(msg) > 4 {
p.Commission = msg[4].(float64)
}
}
func ParseMd(msg []interface{}) {
for i := 1; i < len(msg); i++ {
data := msg[i].([]interface{})
securityId := int64(data[0].(float64))
md := data[1].(map[string]interface{})
for k, _v := range md {
v := _v.(float64)
s := SecurityMapById[securityId]
if s == nil {
log.Println("unknown security id", securityId)
continue
}
switch k {
case "o":
s.Open = v
case "h":
s.High = v
case "l":
s.Low = v
case "c":
s.Close = v
case "q":
s.Qty = v
case "v":
s.Vol = v
case "V":
s.Vwap = v
case "a0":
s.Ask = v
case "b0":
s.Bid = v
case "A0":
s.AskSize = v
case "B0":
s.BidSize = v
}
}
}
}
| ParsePnl | identifier_name |
orderbook.go | package main
import (
"log"
"math"
"strings"
)
type MD struct {
Open float64
High float64
Low float64
Close float64
Qty float64
Vol float64
Vwap float64
Ask float64
Bid float64
AskSize float64
BidSize float64
}
type Security struct {
Id int64
Symbol string
LocalSymbol string
Bbgid string
Cusip string
Sedol string
Isin string
Market string
Type string
LotSize float64
Multiplier float64
PrevClose float64
Rate float64 // currency rate
Currency string
Adv20 float64
MarketCap float64
Sector string
IndustryGroup string
Industry string
SubIndustry string
MD
}
func (s *Security) GetClose() float64 {
close := s.Close
if close <= 0 {
close = s.PrevClose
}
return close
}
var SecurityMapById = make(map[int64]*Security)
var SecurityMapByMarket = make(map[string]map[string]*Security)
func ParseSecurity(msg []interface{}) {
sec := &Security{
Id: int64(msg[1].(float64)),
Symbol: msg[2].(string),
Market: msg[3].(string),
Type: msg[4].(string),
LotSize: msg[5].(float64),
Multiplier: msg[6].(float64),
Currency: msg[7].(string),
Rate: msg[8].(float64),
PrevClose: msg[9].(float64),
LocalSymbol: msg[10].(string),
Adv20: msg[11].(float64),
MarketCap: msg[12].(float64),
Sector: msg[13].(string),
IndustryGroup: msg[14].(string),
Industry: msg[15].(string),
SubIndustry: msg[16].(string),
Bbgid: msg[17].(string),
Cusip: msg[18].(string),
Sedol: msg[19].(string),
Isin: msg[20].(string),
}
if sec.Market == "CURRENCY" {
sec.Market = "FX"
}
if sec.Multiplier <= 0 {
sec.Multiplier = 1
}
if sec.Rate <= 0 {
sec.Rate = 1
}
sec0 := SecurityMapById[sec.Id]
if sec0 == nil {
SecurityMapById[sec.Id] = sec
} else {
*sec0 = *sec
sec = sec0
}
tmp := SecurityMapByMarket[sec.Market]
if tmp == nil {
tmp = make(map[string]*Security)
SecurityMapByMarket[sec.Market] = tmp
}
tmp[sec.Symbol] = sec
}
type Order struct {
Id int64
OrigClOrdId int64
// Tm int64
// Seq int64
St string
Security *Security
// UserId int
Acc int
Qty float64
Px float64
Side string
Type string
// Tif string
CumQty float64
AvgPx float64
LastQty float64
LastPx float64
}
var orders = make(map[int64]*Order)
type PositionBase struct {
Qty float64
AvgPx float64
Commission float64
RealizedPnl float64
}
type Position struct {
PositionBase
Bod PositionBase
OutstandBuyQty float64
OutstandSellQty float64
BuyQty float64
BuyValue float64
SellQty float64
SellValue float64
Security *Security
Acc int
Target float64
}
var Positions = make(map[int]map[int64]*Position)
var usedSecurities = make(map[int64]bool)
func getPos(acc int, securityId int64) *Position {
tmp := Positions[acc]
if tmp == nil {
tmp = make(map[int64]*Position)
Positions[acc] = tmp
}
p := tmp[securityId]
if p == nil {
p = &Position{}
p.Acc = acc
p.Security = SecurityMapById[securityId]
if p.Security == nil {
log.Println("unknown securityId", securityId)
return p
}
tmp[securityId] = p
used := usedSecurities[securityId]
if !used {
Request([]interface{}{"sub", securityId})
usedSecurities[securityId] = true
}
}
return p
}
func isLive(st string) bool {
if st == "" {
return true
}
st = strings.ToLower(st)
return strings.HasPrefix(st, "pending") || strings.HasPrefix(st, "unconfirmed") || strings.HasPrefix(st, "partial") || st == "new" || st == "suspended"
}
func updatePos(ord *Order) {
securityId := ord.Security.Id
p := getPos(ord.Acc, securityId)
var outstand *float64
if ord.Side == "buy" {
outstand = &p.OutstandBuyQty
} else {
outstand = &p.OutstandSellQty
}
switch ord.St {
case "unconfirmed", "unconfirmed_replace":
*outstand += ord.Qty - ord.CumQty
case "filled", "partial":
if ord.LastQty > 0 && ord.Type != "otc" {
*outstand -= ord.LastQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
qty := ord.LastQty
if ord.Side == "buy" {
p.BuyQty += ord.LastQty
p.BuyValue += ord.LastQty * ord.LastPx
} else {
qty = -qty
p.SellQty += ord.LastQty
p.SellValue += ord.LastQty * ord.LastPx
}
qty0 := p.Qty
px := ord.LastPx
multiplier := ord.Security.Rate * ord.Security.Multiplier
if (qty0 > 0) && (qty < 0) { // sell trade to cover position
if qty0 > -qty {
p.RealizedPnl += (px - p.AvgPx) * -qty * multiplier
} else {
p.RealizedPnl += (px - p.AvgPx) * qty0 * multiplier
p.AvgPx = px
}
} else if (qty0 < 0) && (qty > 0) { // buy trade to cover position
if -qty0 > qty {
p.RealizedPnl += (p.AvgPx - px) * qty * multiplier
} else {
p.RealizedPnl += (p.AvgPx - px) * -qty0 * multiplier
p.AvgPx = px
}
} else { // open position
p.AvgPx = (qty0*p.AvgPx + qty*px) / (qty0 + qty)
}
p.Qty += qty
if p.Qty == 0 {
p.AvgPx = 0
}
default:
*outstand -= ord.Qty - ord.CumQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
}
var seqNum int64 = 0
var offlineDone = false
var onlineCache [][]interface{}
func ParseOffline(msg []interface{}) {
if msg[1].(string) == "complete" {
for _, msg := range onlineCache {
ParseOrder(msg, false)
}
onlineCache = onlineCache[:0]
offlineDone = true
log.Print("offline done")
}
}
func ParseOrder(msg []interface{}, isOnline bool) {
if isOnline && !offlineDone {
onlineCache = append(onlineCache, msg)
return
}
clOrdId := int64(msg[1].(float64))
// tm := int64(msg[2].(float64))
seq := int64(msg[3].(float64))
if seq <= seqNum {
return
}
seqNum = seq
switch st := msg[4].(string); st {
case "unconfirmed", "unconfirmed_replace":
securityId := int64(msg[5].(float64))
security := SecurityMapById[securityId]
if security == nil {
log.Println("not found security", securityId)
return
}
// aid := int(msg[6].(float64))
// userId := int(msg[7].(float64))
acc := int(msg[8].(float64))
// brokerAcc := int(msg[9].(float65))
qty := msg[10].(float64)
px := msg[11].(float64)
side := msg[12].(string)
// ordType := msg[13].(string)
// tif := msg[14].(string)
ord := Order{
Id: clOrdId,
St: st,
Security: security,
Acc: acc,
Qty: qty,
Px: px,
Side: side,
}
if st == "unconfirmed_replace" {
origClOrdId := int64(msg[14].(float64))
ord.OrigClOrdId = origClOrdId
}
orders[clOrdId] = &ord
updatePos(&ord)
case "filled", "partial":
qty := msg[5].(float64)
px := msg[6].(float64)
// tradeId = msg[7].(string)
execTransType := msg[8].(string)
if execTransType == "cancel" {
qty = -qty
}
ord := orders[clOrdId]
if ord != nil {
ord.AvgPx = (ord.CumQty*ord.AvgPx + qty*px) / (ord.CumQty + qty)
ord.CumQty += qty
ord.CumQty = math.Round(ord.CumQty*1e6) / 1e6
if ord.CumQty > ord.Qty {
log.Printf("overfill found: %s", msg)
}
ord.LastQty = qty
ord.LastPx = px
if ord.CumQty >= ord.Qty {
st = "filled"
} else {
st = "partial"
}
ord.St = st
updatePos(ord)
} else {
log.Println("not found order for", clOrdId)
}
case "canceled":
case "cancelled":
case "expired":
case "done_for_day":
case "calculated":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "new", "pending", "replaced", "suspended":
ord := orders[clOrdId]
if ord != nil {
if st == "replaced" {
old := orders[ord.OrigClOrdId]
if old == nil {
log.Println("can not find order for", ord.OrigClOrdId)
} else {
old.St = st
}
st = "confirmed"
}
ord.St = st
} else {
log.Println("can not find order for", clOrdId)
}
case "new_rejected", "replace_rejected":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "risk_rejected":
ord := orders[clOrdId]
if ord != nil {
ord.St = st
updatePos(ord)
}
}
}
func ParseTarget(msg []interface{}) |
func ParseBod(msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
qty := msg[3].(float64)
avgPx := msg[4].(float64)
commission := msg[5].(float64)
realizedPnl := msg[6].(float64)
// brokerAcc := int(msg[7])
// tm := int64(msg[8].(float64))
p := getPos(acc, securityId)
p.Qty = qty
p.AvgPx = avgPx
p.Commission = commission
p.RealizedPnl = realizedPnl
p.Bod.Qty = qty
p.Bod.AvgPx = avgPx
p.Bod.Commission = commission
p.Bod.RealizedPnl = realizedPnl
}
func ParsePnl(msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
p := getPos(acc, securityId)
// ignore unrealizedPnl and realizedPnl which we can deduce ourself
if len(msg) > 4 {
p.Commission = msg[4].(float64)
}
}
func ParseMd(msg []interface{}) {
for i := 1; i < len(msg); i++ {
data := msg[i].([]interface{})
securityId := int64(data[0].(float64))
md := data[1].(map[string]interface{})
for k, _v := range md {
v := _v.(float64)
s := SecurityMapById[securityId]
if s == nil {
log.Println("unknown security id", securityId)
continue
}
switch k {
case "o":
s.Open = v
case "h":
s.High = v
case "l":
s.Low = v
case "c":
s.Close = v
case "q":
s.Qty = v
case "v":
s.Vol = v
case "V":
s.Vwap = v
case "a0":
s.Ask = v
case "b0":
s.Bid = v
case "A0":
s.AskSize = v
case "B0":
s.BidSize = v
}
}
}
}
| {
acc := int(msg[1].(float64)) // msg[2] is acc name
for _, v := range Positions[acc] {
v.Target = 0.
}
if len(msg) < 4 {
return
}
if _, ok := msg[3].([]interface{}); !ok {
return
}
for _, v := range msg[3].([]interface{}) {
t := v.([]interface{})
securityId := int64(t[0].(float64))
target := t[1].(float64)
getPos(acc, securityId).Target = target
}
} | identifier_body |
orderbook.go | package main
import (
"log"
"math"
"strings"
)
type MD struct {
Open float64
High float64
Low float64
Close float64
Qty float64
Vol float64
Vwap float64
Ask float64
Bid float64
AskSize float64
BidSize float64
}
type Security struct {
Id int64
Symbol string
LocalSymbol string
Bbgid string
Cusip string
Sedol string
Isin string
Market string
Type string
LotSize float64
Multiplier float64
PrevClose float64
Rate float64 // currency rate
Currency string
Adv20 float64
MarketCap float64
Sector string
IndustryGroup string
Industry string
SubIndustry string
MD
}
func (s *Security) GetClose() float64 {
close := s.Close
if close <= 0 {
close = s.PrevClose
}
return close
}
var SecurityMapById = make(map[int64]*Security)
var SecurityMapByMarket = make(map[string]map[string]*Security)
func ParseSecurity(msg []interface{}) {
sec := &Security{
Id: int64(msg[1].(float64)),
Symbol: msg[2].(string),
Market: msg[3].(string),
Type: msg[4].(string),
LotSize: msg[5].(float64),
Multiplier: msg[6].(float64),
Currency: msg[7].(string),
Rate: msg[8].(float64),
PrevClose: msg[9].(float64),
LocalSymbol: msg[10].(string),
Adv20: msg[11].(float64),
MarketCap: msg[12].(float64),
Sector: msg[13].(string),
IndustryGroup: msg[14].(string),
Industry: msg[15].(string),
SubIndustry: msg[16].(string),
Bbgid: msg[17].(string),
Cusip: msg[18].(string),
Sedol: msg[19].(string),
Isin: msg[20].(string),
}
if sec.Market == "CURRENCY" |
if sec.Multiplier <= 0 {
sec.Multiplier = 1
}
if sec.Rate <= 0 {
sec.Rate = 1
}
sec0 := SecurityMapById[sec.Id]
if sec0 == nil {
SecurityMapById[sec.Id] = sec
} else {
*sec0 = *sec
sec = sec0
}
tmp := SecurityMapByMarket[sec.Market]
if tmp == nil {
tmp = make(map[string]*Security)
SecurityMapByMarket[sec.Market] = tmp
}
tmp[sec.Symbol] = sec
}
type Order struct {
Id int64
OrigClOrdId int64
// Tm int64
// Seq int64
St string
Security *Security
// UserId int
Acc int
Qty float64
Px float64
Side string
Type string
// Tif string
CumQty float64
AvgPx float64
LastQty float64
LastPx float64
}
var orders = make(map[int64]*Order)
type PositionBase struct {
Qty float64
AvgPx float64
Commission float64
RealizedPnl float64
}
type Position struct {
PositionBase
Bod PositionBase
OutstandBuyQty float64
OutstandSellQty float64
BuyQty float64
BuyValue float64
SellQty float64
SellValue float64
Security *Security
Acc int
Target float64
}
var Positions = make(map[int]map[int64]*Position)
var usedSecurities = make(map[int64]bool)
func getPos(acc int, securityId int64) *Position {
tmp := Positions[acc]
if tmp == nil {
tmp = make(map[int64]*Position)
Positions[acc] = tmp
}
p := tmp[securityId]
if p == nil {
p = &Position{}
p.Acc = acc
p.Security = SecurityMapById[securityId]
if p.Security == nil {
log.Println("unknown securityId", securityId)
return p
}
tmp[securityId] = p
used := usedSecurities[securityId]
if !used {
Request([]interface{}{"sub", securityId})
usedSecurities[securityId] = true
}
}
return p
}
func isLive(st string) bool {
if st == "" {
return true
}
st = strings.ToLower(st)
return strings.HasPrefix(st, "pending") || strings.HasPrefix(st, "unconfirmed") || strings.HasPrefix(st, "partial") || st == "new" || st == "suspended"
}
func updatePos(ord *Order) {
securityId := ord.Security.Id
p := getPos(ord.Acc, securityId)
var outstand *float64
if ord.Side == "buy" {
outstand = &p.OutstandBuyQty
} else {
outstand = &p.OutstandSellQty
}
switch ord.St {
case "unconfirmed", "unconfirmed_replace":
*outstand += ord.Qty - ord.CumQty
case "filled", "partial":
if ord.LastQty > 0 && ord.Type != "otc" {
*outstand -= ord.LastQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
qty := ord.LastQty
if ord.Side == "buy" {
p.BuyQty += ord.LastQty
p.BuyValue += ord.LastQty * ord.LastPx
} else {
qty = -qty
p.SellQty += ord.LastQty
p.SellValue += ord.LastQty * ord.LastPx
}
qty0 := p.Qty
px := ord.LastPx
multiplier := ord.Security.Rate * ord.Security.Multiplier
if (qty0 > 0) && (qty < 0) { // sell trade to cover position
if qty0 > -qty {
p.RealizedPnl += (px - p.AvgPx) * -qty * multiplier
} else {
p.RealizedPnl += (px - p.AvgPx) * qty0 * multiplier
p.AvgPx = px
}
} else if (qty0 < 0) && (qty > 0) { // buy trade to cover position
if -qty0 > qty {
p.RealizedPnl += (p.AvgPx - px) * qty * multiplier
} else {
p.RealizedPnl += (p.AvgPx - px) * -qty0 * multiplier
p.AvgPx = px
}
} else { // open position
p.AvgPx = (qty0*p.AvgPx + qty*px) / (qty0 + qty)
}
p.Qty += qty
if p.Qty == 0 {
p.AvgPx = 0
}
default:
*outstand -= ord.Qty - ord.CumQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
}
var seqNum int64 = 0
var offlineDone = false
var onlineCache [][]interface{}
func ParseOffline(msg []interface{}) {
if msg[1].(string) == "complete" {
for _, msg := range onlineCache {
ParseOrder(msg, false)
}
onlineCache = onlineCache[:0]
offlineDone = true
log.Print("offline done")
}
}
func ParseOrder(msg []interface{}, isOnline bool) {
if isOnline && !offlineDone {
onlineCache = append(onlineCache, msg)
return
}
clOrdId := int64(msg[1].(float64))
// tm := int64(msg[2].(float64))
seq := int64(msg[3].(float64))
if seq <= seqNum {
return
}
seqNum = seq
switch st := msg[4].(string); st {
case "unconfirmed", "unconfirmed_replace":
securityId := int64(msg[5].(float64))
security := SecurityMapById[securityId]
if security == nil {
log.Println("not found security", securityId)
return
}
// aid := int(msg[6].(float64))
// userId := int(msg[7].(float64))
acc := int(msg[8].(float64))
// brokerAcc := int(msg[9].(float65))
qty := msg[10].(float64)
px := msg[11].(float64)
side := msg[12].(string)
// ordType := msg[13].(string)
// tif := msg[14].(string)
ord := Order{
Id: clOrdId,
St: st,
Security: security,
Acc: acc,
Qty: qty,
Px: px,
Side: side,
}
if st == "unconfirmed_replace" {
origClOrdId := int64(msg[14].(float64))
ord.OrigClOrdId = origClOrdId
}
orders[clOrdId] = &ord
updatePos(&ord)
case "filled", "partial":
qty := msg[5].(float64)
px := msg[6].(float64)
// tradeId = msg[7].(string)
execTransType := msg[8].(string)
if execTransType == "cancel" {
qty = -qty
}
ord := orders[clOrdId]
if ord != nil {
ord.AvgPx = (ord.CumQty*ord.AvgPx + qty*px) / (ord.CumQty + qty)
ord.CumQty += qty
ord.CumQty = math.Round(ord.CumQty*1e6) / 1e6
if ord.CumQty > ord.Qty {
log.Printf("overfill found: %s", msg)
}
ord.LastQty = qty
ord.LastPx = px
if ord.CumQty >= ord.Qty {
st = "filled"
} else {
st = "partial"
}
ord.St = st
updatePos(ord)
} else {
log.Println("not found order for", clOrdId)
}
case "canceled":
case "cancelled":
case "expired":
case "done_for_day":
case "calculated":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "new", "pending", "replaced", "suspended":
ord := orders[clOrdId]
if ord != nil {
if st == "replaced" {
old := orders[ord.OrigClOrdId]
if old == nil {
log.Println("can not find order for", ord.OrigClOrdId)
} else {
old.St = st
}
st = "confirmed"
}
ord.St = st
} else {
log.Println("can not find order for", clOrdId)
}
case "new_rejected", "replace_rejected":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "risk_rejected":
ord := orders[clOrdId]
if ord != nil {
ord.St = st
updatePos(ord)
}
}
}
func ParseTarget(msg []interface{}) {
acc := int(msg[1].(float64)) // msg[2] is acc name
for _, v := range Positions[acc] {
v.Target = 0.
}
if len(msg) < 4 {
return
}
if _, ok := msg[3].([]interface{}); !ok {
return
}
for _, v := range msg[3].([]interface{}) {
t := v.([]interface{})
securityId := int64(t[0].(float64))
target := t[1].(float64)
getPos(acc, securityId).Target = target
}
}
func ParseBod(msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
qty := msg[3].(float64)
avgPx := msg[4].(float64)
commission := msg[5].(float64)
realizedPnl := msg[6].(float64)
// brokerAcc := int(msg[7])
// tm := int64(msg[8].(float64))
p := getPos(acc, securityId)
p.Qty = qty
p.AvgPx = avgPx
p.Commission = commission
p.RealizedPnl = realizedPnl
p.Bod.Qty = qty
p.Bod.AvgPx = avgPx
p.Bod.Commission = commission
p.Bod.RealizedPnl = realizedPnl
}
func ParsePnl(msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
p := getPos(acc, securityId)
// ignore unrealizedPnl and realizedPnl which we can deduce ourself
if len(msg) > 4 {
p.Commission = msg[4].(float64)
}
}
func ParseMd(msg []interface{}) {
for i := 1; i < len(msg); i++ {
data := msg[i].([]interface{})
securityId := int64(data[0].(float64))
md := data[1].(map[string]interface{})
for k, _v := range md {
v := _v.(float64)
s := SecurityMapById[securityId]
if s == nil {
log.Println("unknown security id", securityId)
continue
}
switch k {
case "o":
s.Open = v
case "h":
s.High = v
case "l":
s.Low = v
case "c":
s.Close = v
case "q":
s.Qty = v
case "v":
s.Vol = v
case "V":
s.Vwap = v
case "a0":
s.Ask = v
case "b0":
s.Bid = v
case "A0":
s.AskSize = v
case "B0":
s.BidSize = v
}
}
}
}
| {
sec.Market = "FX"
} | conditional_block |
orderbook.go | package main
import (
"log"
"math"
"strings"
)
type MD struct {
Open float64
High float64
Low float64
Close float64
Qty float64
Vol float64
Vwap float64
Ask float64
Bid float64
AskSize float64
BidSize float64
}
type Security struct {
Id int64
Symbol string
LocalSymbol string
Bbgid string
Cusip string
Sedol string
Isin string
Market string
Type string
LotSize float64
Multiplier float64
PrevClose float64
Rate float64 // currency rate
Currency string
Adv20 float64
MarketCap float64
Sector string
IndustryGroup string
Industry string
SubIndustry string
MD
}
func (s *Security) GetClose() float64 {
close := s.Close
if close <= 0 {
close = s.PrevClose
}
return close
}
var SecurityMapById = make(map[int64]*Security)
var SecurityMapByMarket = make(map[string]map[string]*Security)
func ParseSecurity(msg []interface{}) {
sec := &Security{
Id: int64(msg[1].(float64)),
Symbol: msg[2].(string),
Market: msg[3].(string),
Type: msg[4].(string),
LotSize: msg[5].(float64),
Multiplier: msg[6].(float64),
Currency: msg[7].(string),
Rate: msg[8].(float64),
PrevClose: msg[9].(float64),
LocalSymbol: msg[10].(string),
Adv20: msg[11].(float64),
MarketCap: msg[12].(float64),
Sector: msg[13].(string),
IndustryGroup: msg[14].(string),
Industry: msg[15].(string),
SubIndustry: msg[16].(string),
Bbgid: msg[17].(string),
Cusip: msg[18].(string),
Sedol: msg[19].(string),
Isin: msg[20].(string),
}
if sec.Market == "CURRENCY" {
sec.Market = "FX"
}
if sec.Multiplier <= 0 {
sec.Multiplier = 1
}
if sec.Rate <= 0 {
sec.Rate = 1
}
sec0 := SecurityMapById[sec.Id]
if sec0 == nil {
SecurityMapById[sec.Id] = sec
} else {
*sec0 = *sec
sec = sec0
}
tmp := SecurityMapByMarket[sec.Market]
if tmp == nil {
tmp = make(map[string]*Security)
SecurityMapByMarket[sec.Market] = tmp
}
tmp[sec.Symbol] = sec
}
type Order struct {
Id int64
OrigClOrdId int64
// Tm int64
// Seq int64
St string
Security *Security
// UserId int
Acc int
Qty float64
Px float64
Side string
Type string
// Tif string
CumQty float64
AvgPx float64
LastQty float64
LastPx float64
}
var orders = make(map[int64]*Order)
type PositionBase struct {
Qty float64
AvgPx float64
Commission float64
RealizedPnl float64
}
type Position struct {
PositionBase
Bod PositionBase
OutstandBuyQty float64
OutstandSellQty float64
BuyQty float64
BuyValue float64
SellQty float64
SellValue float64
Security *Security
Acc int
Target float64
}
var Positions = make(map[int]map[int64]*Position)
var usedSecurities = make(map[int64]bool)
func getPos(acc int, securityId int64) *Position {
tmp := Positions[acc]
if tmp == nil {
tmp = make(map[int64]*Position)
Positions[acc] = tmp
}
p := tmp[securityId]
if p == nil {
p = &Position{}
p.Acc = acc
p.Security = SecurityMapById[securityId]
if p.Security == nil {
log.Println("unknown securityId", securityId)
return p
}
tmp[securityId] = p
used := usedSecurities[securityId]
if !used {
Request([]interface{}{"sub", securityId})
usedSecurities[securityId] = true
}
}
return p
}
func isLive(st string) bool {
if st == "" {
return true
}
st = strings.ToLower(st)
return strings.HasPrefix(st, "pending") || strings.HasPrefix(st, "unconfirmed") || strings.HasPrefix(st, "partial") || st == "new" || st == "suspended"
}
func updatePos(ord *Order) {
securityId := ord.Security.Id
p := getPos(ord.Acc, securityId)
var outstand *float64
if ord.Side == "buy" {
outstand = &p.OutstandBuyQty
} else {
outstand = &p.OutstandSellQty
}
switch ord.St {
case "unconfirmed", "unconfirmed_replace":
*outstand += ord.Qty - ord.CumQty
case "filled", "partial":
if ord.LastQty > 0 && ord.Type != "otc" {
*outstand -= ord.LastQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
qty := ord.LastQty
if ord.Side == "buy" {
p.BuyQty += ord.LastQty
p.BuyValue += ord.LastQty * ord.LastPx
} else {
qty = -qty
p.SellQty += ord.LastQty
p.SellValue += ord.LastQty * ord.LastPx
}
qty0 := p.Qty
px := ord.LastPx
multiplier := ord.Security.Rate * ord.Security.Multiplier
if (qty0 > 0) && (qty < 0) { // sell trade to cover position
if qty0 > -qty {
p.RealizedPnl += (px - p.AvgPx) * -qty * multiplier
} else {
p.RealizedPnl += (px - p.AvgPx) * qty0 * multiplier
p.AvgPx = px
}
} else if (qty0 < 0) && (qty > 0) { // buy trade to cover position
if -qty0 > qty {
p.RealizedPnl += (p.AvgPx - px) * qty * multiplier
} else {
p.RealizedPnl += (p.AvgPx - px) * -qty0 * multiplier
p.AvgPx = px
}
} else { // open position
p.AvgPx = (qty0*p.AvgPx + qty*px) / (qty0 + qty)
}
p.Qty += qty
if p.Qty == 0 {
p.AvgPx = 0
}
default:
*outstand -= ord.Qty - ord.CumQty
if *outstand < 0 {
log.Printf("Outstand < 0: %s", ord)
*outstand = 0
}
}
}
var seqNum int64 = 0
var offlineDone = false
var onlineCache [][]interface{}
func ParseOffline(msg []interface{}) {
if msg[1].(string) == "complete" {
for _, msg := range onlineCache {
ParseOrder(msg, false)
}
onlineCache = onlineCache[:0]
offlineDone = true
log.Print("offline done")
}
}
func ParseOrder(msg []interface{}, isOnline bool) {
if isOnline && !offlineDone {
onlineCache = append(onlineCache, msg)
return
}
clOrdId := int64(msg[1].(float64))
// tm := int64(msg[2].(float64))
seq := int64(msg[3].(float64))
if seq <= seqNum {
return
}
seqNum = seq
switch st := msg[4].(string); st {
case "unconfirmed", "unconfirmed_replace":
securityId := int64(msg[5].(float64))
security := SecurityMapById[securityId]
if security == nil {
log.Println("not found security", securityId)
return
}
// aid := int(msg[6].(float64))
// userId := int(msg[7].(float64))
acc := int(msg[8].(float64))
// brokerAcc := int(msg[9].(float65))
qty := msg[10].(float64)
px := msg[11].(float64)
side := msg[12].(string)
// ordType := msg[13].(string)
// tif := msg[14].(string)
ord := Order{
Id: clOrdId,
St: st,
Security: security,
Acc: acc,
Qty: qty,
Px: px,
Side: side,
}
if st == "unconfirmed_replace" {
origClOrdId := int64(msg[14].(float64))
ord.OrigClOrdId = origClOrdId
}
orders[clOrdId] = &ord
updatePos(&ord)
case "filled", "partial":
qty := msg[5].(float64)
px := msg[6].(float64)
// tradeId = msg[7].(string)
execTransType := msg[8].(string)
if execTransType == "cancel" {
qty = -qty
}
ord := orders[clOrdId]
if ord != nil {
ord.AvgPx = (ord.CumQty*ord.AvgPx + qty*px) / (ord.CumQty + qty)
ord.CumQty += qty
ord.CumQty = math.Round(ord.CumQty*1e6) / 1e6
if ord.CumQty > ord.Qty {
log.Printf("overfill found: %s", msg)
}
ord.LastQty = qty
ord.LastPx = px
if ord.CumQty >= ord.Qty {
st = "filled"
} else {
st = "partial"
}
ord.St = st
updatePos(ord)
} else {
log.Println("not found order for", clOrdId)
}
case "canceled":
case "cancelled":
case "expired":
case "done_for_day":
case "calculated":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "new", "pending", "replaced", "suspended":
ord := orders[clOrdId]
if ord != nil {
if st == "replaced" {
old := orders[ord.OrigClOrdId]
if old == nil {
log.Println("can not find order for", ord.OrigClOrdId)
} else {
old.St = st
}
st = "confirmed"
}
ord.St = st
} else {
log.Println("can not find order for", clOrdId)
} | case "new_rejected", "replace_rejected":
ord := orders[clOrdId]
if ord != nil {
st0 := ord.St
ord.St = st
if isLive(st0) {
updatePos(ord)
}
} else {
log.Println("can not find order for", clOrdId)
}
case "risk_rejected":
ord := orders[clOrdId]
if ord != nil {
ord.St = st
updatePos(ord)
}
}
}
func ParseTarget(msg []interface{}) {
acc := int(msg[1].(float64)) // msg[2] is acc name
for _, v := range Positions[acc] {
v.Target = 0.
}
if len(msg) < 4 {
return
}
if _, ok := msg[3].([]interface{}); !ok {
return
}
for _, v := range msg[3].([]interface{}) {
t := v.([]interface{})
securityId := int64(t[0].(float64))
target := t[1].(float64)
getPos(acc, securityId).Target = target
}
}
func ParseBod(msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
qty := msg[3].(float64)
avgPx := msg[4].(float64)
commission := msg[5].(float64)
realizedPnl := msg[6].(float64)
// brokerAcc := int(msg[7])
// tm := int64(msg[8].(float64))
p := getPos(acc, securityId)
p.Qty = qty
p.AvgPx = avgPx
p.Commission = commission
p.RealizedPnl = realizedPnl
p.Bod.Qty = qty
p.Bod.AvgPx = avgPx
p.Bod.Commission = commission
p.Bod.RealizedPnl = realizedPnl
}
func ParsePnl(msg []interface{}) {
acc := int(msg[1].(float64))
securityId := int64(msg[2].(float64))
p := getPos(acc, securityId)
// ignore unrealizedPnl and realizedPnl which we can deduce ourself
if len(msg) > 4 {
p.Commission = msg[4].(float64)
}
}
func ParseMd(msg []interface{}) {
for i := 1; i < len(msg); i++ {
data := msg[i].([]interface{})
securityId := int64(data[0].(float64))
md := data[1].(map[string]interface{})
for k, _v := range md {
v := _v.(float64)
s := SecurityMapById[securityId]
if s == nil {
log.Println("unknown security id", securityId)
continue
}
switch k {
case "o":
s.Open = v
case "h":
s.High = v
case "l":
s.Low = v
case "c":
s.Close = v
case "q":
s.Qty = v
case "v":
s.Vol = v
case "V":
s.Vwap = v
case "a0":
s.Ask = v
case "b0":
s.Bid = v
case "A0":
s.AskSize = v
case "B0":
s.BidSize = v
}
}
}
} | random_line_split |
|
main.rs | #[macro_use]
extern crate clap;
extern crate irb;
extern crate las;
extern crate palette;
extern crate riscan_pro;
extern crate scanifc;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate text_io;
extern crate toml;
use clap::{App, ArgMatches};
use irb::Irb;
use las::Color;
use las::point::Format;
use palette::{Gradient, Rgb};
use riscan_pro::{CameraCalibration, MountCalibration, Point, Project, ScanPosition, Socs};
use riscan_pro::scan_position::Image;
use scanifc::point3d::Stream;
use std::fmt;
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::u16;
fn main() {
let yaml = load_yaml!("cli.yml");
let matches = App::from_yaml(yaml).get_matches();
print!("Configuring...");
std::io::stdout().flush().unwrap();
let config = Config::new(&matches);
println!("done.");
println!("{}", config);
loop {
print!("Continue? (y/n) ");
std::io::stdout().flush().unwrap();
let answer: String = read!();
println!();
match answer.to_lowercase().as_str() {
"y" => break,
"n" => return,
_ => println!("Unknown response: {}", answer),
}
}
for scan_position in config.scan_positions() {
println!("Colorizing {}:", scan_position.name);
let translations = config.translations(scan_position);
if translations.is_empty() {
println!(" - No translations found");
} else {
for translation in translations {
println!(
" - Translation:\n - Infile: {}\n - Outfile: {}",
translation.infile.display(),
translation.outfile.display()
);
config.colorize(scan_position, &translation);
}
}
}
println!("Complete!");
}
struct Config {
image_dir: PathBuf,
keep_without_thermal: bool,
las_dir: PathBuf,
max_reflectance: f32,
min_reflectance: f32,
project: Project,
rotate: bool,
scan_position_names: Option<Vec<String>>,
sync_to_pps: bool,
temperature_gradient: Gradient<Rgb>,
use_scanpos_names: bool,
name_map: NameMap,
}
struct ImageGroup<'a> {
camera_calibration: &'a CameraCalibration,
image: &'a Image,
irb: Irb,
irb_path: PathBuf,
mount_calibration: &'a MountCalibration,
rotate: bool,
}
struct Translation {
infile: PathBuf,
outfile: PathBuf,
}
#[derive(Debug, Default, Deserialize)]
struct NameMap {
maps: Vec<FromTo>,
}
#[derive(Debug, Default, Deserialize)]
struct FromTo {
from: String,
to: String,
}
impl Config {
fn new(matches: &ArgMatches) -> Config {
use std::fs::File;
use std::io::Read;
use toml;
let project = Project::from_path(matches.value_of("PROJECT").unwrap()).unwrap();
let image_dir = PathBuf::from(matches.value_of("IMAGE_DIR").unwrap());
let las_dir = Path::new(matches.value_of("LAS_DIR").unwrap()).to_path_buf();
let min_reflectance = value_t!(matches, "min-reflectance", f32).unwrap();
let max_reflectance = value_t!(matches, "max-reflectance", f32).unwrap();
let min_temperature = value_t!(matches, "min-temperature", f32).unwrap();
let max_temperature = value_t!(matches, "max-temperature", f32).unwrap();
let min_temperature_color = Rgb::new(0.0, 0., 1.0);
let max_temperature_color = Rgb::new(1.0, 0., 0.);
let temperature_gradient = Gradient::with_domain(vec![
(min_temperature, min_temperature_color),
(max_temperature, max_temperature_color),
]);
let name_map = if let Some(name_map) = matches.value_of("name-map") {
let mut s = String::new();
File::open(name_map)
.unwrap()
.read_to_string(&mut s)
.unwrap();
toml::from_str(&s).unwrap()
} else {
NameMap::default()
};
Config {
image_dir: image_dir,
keep_without_thermal: matches.is_present("keep-without-thermal"),
las_dir: las_dir,
max_reflectance: max_reflectance,
min_reflectance: min_reflectance,
project: project,
rotate: matches.is_present("rotate"),
scan_position_names: matches.values_of("scan-position").map(|values| {
values.map(|name| name.to_string()).collect()
}),
sync_to_pps: matches.is_present("sync-to-pps"),
temperature_gradient: temperature_gradient,
use_scanpos_names: matches.is_present("use-scanpos-names"),
name_map: name_map,
}
}
fn translations(&self, scan_position: &ScanPosition) -> Vec<Translation> {
let paths = scan_position.singlescan_rxp_paths(&self.project);
if self.use_scanpos_names && paths.len() > 1 {
panic!(
"--use-scanpos-names was provided, but there are {} rxp files for scan position {}",
paths.len(),
scan_position.name
);
}
paths
.into_iter()
.map(|path| {
Translation {
outfile: self.outfile(scan_position, &path),
infile: path,
}
})
.collect()
}
fn colorize(&self, scan_position: &ScanPosition, translation: &Translation) {
use std::f64;
let image_groups = self.image_groups(scan_position);
let stream = Stream::from_path(&translation.infile)
.sync_to_pps(self.sync_to_pps)
.open()
.unwrap();
let mut writer = las::Writer::from_path(&translation.outfile, self.las_header()).unwrap();
for point in stream {
let point = point.expect("could not read rxp point");
let socs = Point::socs(point.x, point.y, point.z);
let temperatures = image_groups
.iter()
.filter_map(|image_group| image_group.temperature(&socs))
.collect::<Vec<_>>();
let temperature = if temperatures.is_empty() {
if self.keep_without_thermal {
f64::NAN
} else {
continue;
}
} else {
temperatures.iter().sum::<f64>() / temperatures.len() as f64
};
let glcs = socs.to_prcs(scan_position.sop).to_glcs(self.project.pop);
let point = las::Point {
x: glcs.x,
y: glcs.y,
z: glcs.z,
intensity: self.to_intensity(point.reflectance),
color: Some(self.to_color(temperature as f32)),
gps_time: Some(temperature),
..Default::default()
};
writer.write(point).expect("could not write las point");
}
}
fn scan_positions(&self) -> Vec<&ScanPosition> {
let mut scan_positions: Vec<_> = if let Some(names) = self.scan_position_names.as_ref() {
names
.iter()
.map(|name| self.project.scan_positions.get(name).unwrap())
.collect()
} else {
self.project.scan_positions.values().collect()
};
scan_positions.sort_by_key(|s| &s.name);
scan_positions
}
fn to_color(&self, n: f32) -> Color {
let color = self.temperature_gradient.get(n);
Color {
red: (u16::MAX as f32 * color.red) as u16,
green: (u16::MAX as f32 * color.green) as u16,
blue: (u16::MAX as f32 * color.blue) as u16,
}
}
fn to_intensity(&self, n: f32) -> u16 {
(u16::MAX as f32 * (n - self.min_reflectance) /
(self.max_reflectance - self.min_reflectance)) as u16
}
fn las_header(&self) -> las::Header {
let mut header = las::Header::default();
header.point_format = Format::new(3).unwrap();
header.transforms = las::Vector {
x: las::Transform {
scale: 0.001,
offset: self.project.pop[(0, 3)],
},
y: las::Transform {
scale: 0.001,
offset: self.project.pop[(1, 3)],
},
z: las::Transform {
scale: 0.001,
offset: self.project.pop[(2, 3)],
},
};
header
}
fn image_groups<'a>(&'a self, scan_position: &'a ScanPosition) -> Vec<ImageGroup<'a>> {
let mut image_dir = self.image_dir.clone();
image_dir.push(&scan_position.name);
match fs::read_dir(image_dir) {
Ok(read_dir) => {
read_dir
.enumerate()
.filter_map(|(i, entry)| {
let entry = entry.unwrap();
if entry.path().extension().map(|e| e == "irb").unwrap_or(
false,
)
{
let image = if let Some(name) = self.name_map(scan_position) {
let image_name = format!("{} - Image{:03}", name, i + 1);
scan_position.images.get(&image_name).expect(&format!(
"Could not find image {}",
image_name
))
} else {
scan_position.image_from_path(entry.path()).unwrap()
};
let irb = Irb::from_path(entry.path().to_string_lossy().as_ref())
.unwrap();
let camera_calibration =
image.camera_calibration(&self.project).unwrap();
let mount_calibration = image.mount_calibration(&self.project).unwrap();
Some(ImageGroup {
camera_calibration: camera_calibration,
image: image,
irb: irb,
irb_path: entry.path(),
mount_calibration: mount_calibration,
rotate: self.rotate,
})
} else {
None
}
})
.collect() | match err.kind() {
ErrorKind::NotFound => Vec::new(),
_ => panic!("io error: {}", err),
}
}
}
}
fn outfile<P: AsRef<Path>>(&self, scan_position: &ScanPosition, infile: P) -> PathBuf {
let mut outfile = self.las_dir.clone();
if self.use_scanpos_names {
outfile.push(Path::new(&scan_position.name).with_extension("las"));
} else {
outfile.push(infile.as_ref().with_extension("las").file_name().unwrap());
}
outfile
}
fn name_map(&self, scan_position: &ScanPosition) -> Option<&str> {
self.name_map
.maps
.iter()
.find(|map| map.from == scan_position.name)
.map(|map| map.to.as_str())
}
}
impl fmt::Display for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Configuration:")?;
writeln!(f, " - project: {}", self.project.path.display())?;
writeln!(f, " - image dir: {}", self.image_dir.display())?;
writeln!(f, " - las dir: {}", self.las_dir.display())?;
writeln!(f, " - scan positions:")?;
for scan_position in self.scan_positions() {
writeln!(f, " - name: {}", scan_position.name)?;
let image_groups = self.image_groups(scan_position);
if image_groups.is_empty() {
writeln!(f, " - no images for this scan position")?;
} else {
writeln!(f, " - images:")?;
for image_group in image_groups {
writeln!(f, " - {}", image_group.irb_path.display())?;
}
}
}
Ok(())
}
}
impl<'a> ImageGroup<'a> {
fn temperature(&self, socs: &Point<Socs>) -> Option<f64> {
let cmcs = socs.to_cmcs(self.image.cop, self.mount_calibration);
self.camera_calibration.cmcs_to_ics(&cmcs).map(|(mut u,
mut v)| {
if self.rotate {
let new_u = self.camera_calibration.height as f64 - v;
v = u;
u = new_u;
}
self.irb
.temperature(u.trunc() as i32, v.trunc() as i32)
.expect("error when retrieving temperature") - 273.15
})
}
} | }
Err(err) => {
use std::io::ErrorKind; | random_line_split |
main.rs | #[macro_use]
extern crate clap;
extern crate irb;
extern crate las;
extern crate palette;
extern crate riscan_pro;
extern crate scanifc;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate text_io;
extern crate toml;
use clap::{App, ArgMatches};
use irb::Irb;
use las::Color;
use las::point::Format;
use palette::{Gradient, Rgb};
use riscan_pro::{CameraCalibration, MountCalibration, Point, Project, ScanPosition, Socs};
use riscan_pro::scan_position::Image;
use scanifc::point3d::Stream;
use std::fmt;
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::u16;
fn main() {
let yaml = load_yaml!("cli.yml");
let matches = App::from_yaml(yaml).get_matches();
print!("Configuring...");
std::io::stdout().flush().unwrap();
let config = Config::new(&matches);
println!("done.");
println!("{}", config);
loop {
print!("Continue? (y/n) ");
std::io::stdout().flush().unwrap();
let answer: String = read!();
println!();
match answer.to_lowercase().as_str() {
"y" => break,
"n" => return,
_ => println!("Unknown response: {}", answer),
}
}
for scan_position in config.scan_positions() {
println!("Colorizing {}:", scan_position.name);
let translations = config.translations(scan_position);
if translations.is_empty() {
println!(" - No translations found");
} else {
for translation in translations {
println!(
" - Translation:\n - Infile: {}\n - Outfile: {}",
translation.infile.display(),
translation.outfile.display()
);
config.colorize(scan_position, &translation);
}
}
}
println!("Complete!");
}
struct Config {
image_dir: PathBuf,
keep_without_thermal: bool,
las_dir: PathBuf,
max_reflectance: f32,
min_reflectance: f32,
project: Project,
rotate: bool,
scan_position_names: Option<Vec<String>>,
sync_to_pps: bool,
temperature_gradient: Gradient<Rgb>,
use_scanpos_names: bool,
name_map: NameMap,
}
struct ImageGroup<'a> {
camera_calibration: &'a CameraCalibration,
image: &'a Image,
irb: Irb,
irb_path: PathBuf,
mount_calibration: &'a MountCalibration,
rotate: bool,
}
struct Translation {
infile: PathBuf,
outfile: PathBuf,
}
#[derive(Debug, Default, Deserialize)]
struct NameMap {
maps: Vec<FromTo>,
}
#[derive(Debug, Default, Deserialize)]
struct FromTo {
from: String,
to: String,
}
impl Config {
fn new(matches: &ArgMatches) -> Config {
use std::fs::File;
use std::io::Read;
use toml;
let project = Project::from_path(matches.value_of("PROJECT").unwrap()).unwrap();
let image_dir = PathBuf::from(matches.value_of("IMAGE_DIR").unwrap());
let las_dir = Path::new(matches.value_of("LAS_DIR").unwrap()).to_path_buf();
let min_reflectance = value_t!(matches, "min-reflectance", f32).unwrap();
let max_reflectance = value_t!(matches, "max-reflectance", f32).unwrap();
let min_temperature = value_t!(matches, "min-temperature", f32).unwrap();
let max_temperature = value_t!(matches, "max-temperature", f32).unwrap();
let min_temperature_color = Rgb::new(0.0, 0., 1.0);
let max_temperature_color = Rgb::new(1.0, 0., 0.);
let temperature_gradient = Gradient::with_domain(vec![
(min_temperature, min_temperature_color),
(max_temperature, max_temperature_color),
]);
let name_map = if let Some(name_map) = matches.value_of("name-map") {
let mut s = String::new();
File::open(name_map)
.unwrap()
.read_to_string(&mut s)
.unwrap();
toml::from_str(&s).unwrap()
} else {
NameMap::default()
};
Config {
image_dir: image_dir,
keep_without_thermal: matches.is_present("keep-without-thermal"),
las_dir: las_dir,
max_reflectance: max_reflectance,
min_reflectance: min_reflectance,
project: project,
rotate: matches.is_present("rotate"),
scan_position_names: matches.values_of("scan-position").map(|values| {
values.map(|name| name.to_string()).collect()
}),
sync_to_pps: matches.is_present("sync-to-pps"),
temperature_gradient: temperature_gradient,
use_scanpos_names: matches.is_present("use-scanpos-names"),
name_map: name_map,
}
}
fn translations(&self, scan_position: &ScanPosition) -> Vec<Translation> {
let paths = scan_position.singlescan_rxp_paths(&self.project);
if self.use_scanpos_names && paths.len() > 1 {
panic!(
"--use-scanpos-names was provided, but there are {} rxp files for scan position {}",
paths.len(),
scan_position.name
);
}
paths
.into_iter()
.map(|path| {
Translation {
outfile: self.outfile(scan_position, &path),
infile: path,
}
})
.collect()
}
fn colorize(&self, scan_position: &ScanPosition, translation: &Translation) {
use std::f64;
let image_groups = self.image_groups(scan_position);
let stream = Stream::from_path(&translation.infile)
.sync_to_pps(self.sync_to_pps)
.open()
.unwrap();
let mut writer = las::Writer::from_path(&translation.outfile, self.las_header()).unwrap();
for point in stream {
let point = point.expect("could not read rxp point");
let socs = Point::socs(point.x, point.y, point.z);
let temperatures = image_groups
.iter()
.filter_map(|image_group| image_group.temperature(&socs))
.collect::<Vec<_>>();
let temperature = if temperatures.is_empty() {
if self.keep_without_thermal {
f64::NAN
} else {
continue;
}
} else {
temperatures.iter().sum::<f64>() / temperatures.len() as f64
};
let glcs = socs.to_prcs(scan_position.sop).to_glcs(self.project.pop);
let point = las::Point {
x: glcs.x,
y: glcs.y,
z: glcs.z,
intensity: self.to_intensity(point.reflectance),
color: Some(self.to_color(temperature as f32)),
gps_time: Some(temperature),
..Default::default()
};
writer.write(point).expect("could not write las point");
}
}
fn scan_positions(&self) -> Vec<&ScanPosition> {
let mut scan_positions: Vec<_> = if let Some(names) = self.scan_position_names.as_ref() {
names
.iter()
.map(|name| self.project.scan_positions.get(name).unwrap())
.collect()
} else {
self.project.scan_positions.values().collect()
};
scan_positions.sort_by_key(|s| &s.name);
scan_positions
}
fn to_color(&self, n: f32) -> Color {
let color = self.temperature_gradient.get(n);
Color {
red: (u16::MAX as f32 * color.red) as u16,
green: (u16::MAX as f32 * color.green) as u16,
blue: (u16::MAX as f32 * color.blue) as u16,
}
}
fn to_intensity(&self, n: f32) -> u16 {
(u16::MAX as f32 * (n - self.min_reflectance) /
(self.max_reflectance - self.min_reflectance)) as u16
}
fn las_header(&self) -> las::Header {
let mut header = las::Header::default();
header.point_format = Format::new(3).unwrap();
header.transforms = las::Vector {
x: las::Transform {
scale: 0.001,
offset: self.project.pop[(0, 3)],
},
y: las::Transform {
scale: 0.001,
offset: self.project.pop[(1, 3)],
},
z: las::Transform {
scale: 0.001,
offset: self.project.pop[(2, 3)],
},
};
header
}
fn image_groups<'a>(&'a self, scan_position: &'a ScanPosition) -> Vec<ImageGroup<'a>> {
let mut image_dir = self.image_dir.clone();
image_dir.push(&scan_position.name);
match fs::read_dir(image_dir) {
Ok(read_dir) => {
read_dir
.enumerate()
.filter_map(|(i, entry)| {
let entry = entry.unwrap();
if entry.path().extension().map(|e| e == "irb").unwrap_or(
false,
)
{
let image = if let Some(name) = self.name_map(scan_position) {
let image_name = format!("{} - Image{:03}", name, i + 1);
scan_position.images.get(&image_name).expect(&format!(
"Could not find image {}",
image_name
))
} else {
scan_position.image_from_path(entry.path()).unwrap()
};
let irb = Irb::from_path(entry.path().to_string_lossy().as_ref())
.unwrap();
let camera_calibration =
image.camera_calibration(&self.project).unwrap();
let mount_calibration = image.mount_calibration(&self.project).unwrap();
Some(ImageGroup {
camera_calibration: camera_calibration,
image: image,
irb: irb,
irb_path: entry.path(),
mount_calibration: mount_calibration,
rotate: self.rotate,
})
} else {
None
}
})
.collect()
}
Err(err) => {
use std::io::ErrorKind;
match err.kind() {
ErrorKind::NotFound => Vec::new(),
_ => panic!("io error: {}", err),
}
}
}
}
fn outfile<P: AsRef<Path>>(&self, scan_position: &ScanPosition, infile: P) -> PathBuf {
let mut outfile = self.las_dir.clone();
if self.use_scanpos_names {
outfile.push(Path::new(&scan_position.name).with_extension("las"));
} else {
outfile.push(infile.as_ref().with_extension("las").file_name().unwrap());
}
outfile
}
fn name_map(&self, scan_position: &ScanPosition) -> Option<&str> {
self.name_map
.maps
.iter()
.find(|map| map.from == scan_position.name)
.map(|map| map.to.as_str())
}
}
impl fmt::Display for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Configuration:")?;
writeln!(f, " - project: {}", self.project.path.display())?;
writeln!(f, " - image dir: {}", self.image_dir.display())?;
writeln!(f, " - las dir: {}", self.las_dir.display())?;
writeln!(f, " - scan positions:")?;
for scan_position in self.scan_positions() {
writeln!(f, " - name: {}", scan_position.name)?;
let image_groups = self.image_groups(scan_position);
if image_groups.is_empty() {
writeln!(f, " - no images for this scan position")?;
} else {
writeln!(f, " - images:")?;
for image_group in image_groups {
writeln!(f, " - {}", image_group.irb_path.display())?;
}
}
}
Ok(())
}
}
impl<'a> ImageGroup<'a> {
fn | (&self, socs: &Point<Socs>) -> Option<f64> {
let cmcs = socs.to_cmcs(self.image.cop, self.mount_calibration);
self.camera_calibration.cmcs_to_ics(&cmcs).map(|(mut u,
mut v)| {
if self.rotate {
let new_u = self.camera_calibration.height as f64 - v;
v = u;
u = new_u;
}
self.irb
.temperature(u.trunc() as i32, v.trunc() as i32)
.expect("error when retrieving temperature") - 273.15
})
}
}
| temperature | identifier_name |
main.rs | #[macro_use]
extern crate clap;
extern crate irb;
extern crate las;
extern crate palette;
extern crate riscan_pro;
extern crate scanifc;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate text_io;
extern crate toml;
use clap::{App, ArgMatches};
use irb::Irb;
use las::Color;
use las::point::Format;
use palette::{Gradient, Rgb};
use riscan_pro::{CameraCalibration, MountCalibration, Point, Project, ScanPosition, Socs};
use riscan_pro::scan_position::Image;
use scanifc::point3d::Stream;
use std::fmt;
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::u16;
fn main() {
let yaml = load_yaml!("cli.yml");
let matches = App::from_yaml(yaml).get_matches();
print!("Configuring...");
std::io::stdout().flush().unwrap();
let config = Config::new(&matches);
println!("done.");
println!("{}", config);
loop {
print!("Continue? (y/n) ");
std::io::stdout().flush().unwrap();
let answer: String = read!();
println!();
match answer.to_lowercase().as_str() {
"y" => break,
"n" => return,
_ => println!("Unknown response: {}", answer),
}
}
for scan_position in config.scan_positions() {
println!("Colorizing {}:", scan_position.name);
let translations = config.translations(scan_position);
if translations.is_empty() {
println!(" - No translations found");
} else {
for translation in translations {
println!(
" - Translation:\n - Infile: {}\n - Outfile: {}",
translation.infile.display(),
translation.outfile.display()
);
config.colorize(scan_position, &translation);
}
}
}
println!("Complete!");
}
struct Config {
image_dir: PathBuf,
keep_without_thermal: bool,
las_dir: PathBuf,
max_reflectance: f32,
min_reflectance: f32,
project: Project,
rotate: bool,
scan_position_names: Option<Vec<String>>,
sync_to_pps: bool,
temperature_gradient: Gradient<Rgb>,
use_scanpos_names: bool,
name_map: NameMap,
}
struct ImageGroup<'a> {
camera_calibration: &'a CameraCalibration,
image: &'a Image,
irb: Irb,
irb_path: PathBuf,
mount_calibration: &'a MountCalibration,
rotate: bool,
}
struct Translation {
infile: PathBuf,
outfile: PathBuf,
}
#[derive(Debug, Default, Deserialize)]
struct NameMap {
maps: Vec<FromTo>,
}
#[derive(Debug, Default, Deserialize)]
struct FromTo {
from: String,
to: String,
}
impl Config {
fn new(matches: &ArgMatches) -> Config {
use std::fs::File;
use std::io::Read;
use toml;
let project = Project::from_path(matches.value_of("PROJECT").unwrap()).unwrap();
let image_dir = PathBuf::from(matches.value_of("IMAGE_DIR").unwrap());
let las_dir = Path::new(matches.value_of("LAS_DIR").unwrap()).to_path_buf();
let min_reflectance = value_t!(matches, "min-reflectance", f32).unwrap();
let max_reflectance = value_t!(matches, "max-reflectance", f32).unwrap();
let min_temperature = value_t!(matches, "min-temperature", f32).unwrap();
let max_temperature = value_t!(matches, "max-temperature", f32).unwrap();
let min_temperature_color = Rgb::new(0.0, 0., 1.0);
let max_temperature_color = Rgb::new(1.0, 0., 0.);
let temperature_gradient = Gradient::with_domain(vec![
(min_temperature, min_temperature_color),
(max_temperature, max_temperature_color),
]);
let name_map = if let Some(name_map) = matches.value_of("name-map") {
let mut s = String::new();
File::open(name_map)
.unwrap()
.read_to_string(&mut s)
.unwrap();
toml::from_str(&s).unwrap()
} else {
NameMap::default()
};
Config {
image_dir: image_dir,
keep_without_thermal: matches.is_present("keep-without-thermal"),
las_dir: las_dir,
max_reflectance: max_reflectance,
min_reflectance: min_reflectance,
project: project,
rotate: matches.is_present("rotate"),
scan_position_names: matches.values_of("scan-position").map(|values| {
values.map(|name| name.to_string()).collect()
}),
sync_to_pps: matches.is_present("sync-to-pps"),
temperature_gradient: temperature_gradient,
use_scanpos_names: matches.is_present("use-scanpos-names"),
name_map: name_map,
}
}
fn translations(&self, scan_position: &ScanPosition) -> Vec<Translation> {
let paths = scan_position.singlescan_rxp_paths(&self.project);
if self.use_scanpos_names && paths.len() > 1 {
panic!(
"--use-scanpos-names was provided, but there are {} rxp files for scan position {}",
paths.len(),
scan_position.name
);
}
paths
.into_iter()
.map(|path| {
Translation {
outfile: self.outfile(scan_position, &path),
infile: path,
}
})
.collect()
}
fn colorize(&self, scan_position: &ScanPosition, translation: &Translation) {
use std::f64;
let image_groups = self.image_groups(scan_position);
let stream = Stream::from_path(&translation.infile)
.sync_to_pps(self.sync_to_pps)
.open()
.unwrap();
let mut writer = las::Writer::from_path(&translation.outfile, self.las_header()).unwrap();
for point in stream {
let point = point.expect("could not read rxp point");
let socs = Point::socs(point.x, point.y, point.z);
let temperatures = image_groups
.iter()
.filter_map(|image_group| image_group.temperature(&socs))
.collect::<Vec<_>>();
let temperature = if temperatures.is_empty() {
if self.keep_without_thermal {
f64::NAN
} else {
continue;
}
} else {
temperatures.iter().sum::<f64>() / temperatures.len() as f64
};
let glcs = socs.to_prcs(scan_position.sop).to_glcs(self.project.pop);
let point = las::Point {
x: glcs.x,
y: glcs.y,
z: glcs.z,
intensity: self.to_intensity(point.reflectance),
color: Some(self.to_color(temperature as f32)),
gps_time: Some(temperature),
..Default::default()
};
writer.write(point).expect("could not write las point");
}
}
fn scan_positions(&self) -> Vec<&ScanPosition> {
let mut scan_positions: Vec<_> = if let Some(names) = self.scan_position_names.as_ref() {
names
.iter()
.map(|name| self.project.scan_positions.get(name).unwrap())
.collect()
} else {
self.project.scan_positions.values().collect()
};
scan_positions.sort_by_key(|s| &s.name);
scan_positions
}
fn to_color(&self, n: f32) -> Color {
let color = self.temperature_gradient.get(n);
Color {
red: (u16::MAX as f32 * color.red) as u16,
green: (u16::MAX as f32 * color.green) as u16,
blue: (u16::MAX as f32 * color.blue) as u16,
}
}
fn to_intensity(&self, n: f32) -> u16 {
(u16::MAX as f32 * (n - self.min_reflectance) /
(self.max_reflectance - self.min_reflectance)) as u16
}
fn las_header(&self) -> las::Header {
let mut header = las::Header::default();
header.point_format = Format::new(3).unwrap();
header.transforms = las::Vector {
x: las::Transform {
scale: 0.001,
offset: self.project.pop[(0, 3)],
},
y: las::Transform {
scale: 0.001,
offset: self.project.pop[(1, 3)],
},
z: las::Transform {
scale: 0.001,
offset: self.project.pop[(2, 3)],
},
};
header
}
fn image_groups<'a>(&'a self, scan_position: &'a ScanPosition) -> Vec<ImageGroup<'a>> {
let mut image_dir = self.image_dir.clone();
image_dir.push(&scan_position.name);
match fs::read_dir(image_dir) {
Ok(read_dir) => {
read_dir
.enumerate()
.filter_map(|(i, entry)| {
let entry = entry.unwrap();
if entry.path().extension().map(|e| e == "irb").unwrap_or(
false,
)
{
let image = if let Some(name) = self.name_map(scan_position) {
let image_name = format!("{} - Image{:03}", name, i + 1);
scan_position.images.get(&image_name).expect(&format!(
"Could not find image {}",
image_name
))
} else {
scan_position.image_from_path(entry.path()).unwrap()
};
let irb = Irb::from_path(entry.path().to_string_lossy().as_ref())
.unwrap();
let camera_calibration =
image.camera_calibration(&self.project).unwrap();
let mount_calibration = image.mount_calibration(&self.project).unwrap();
Some(ImageGroup {
camera_calibration: camera_calibration,
image: image,
irb: irb,
irb_path: entry.path(),
mount_calibration: mount_calibration,
rotate: self.rotate,
})
} else |
})
.collect()
}
Err(err) => {
use std::io::ErrorKind;
match err.kind() {
ErrorKind::NotFound => Vec::new(),
_ => panic!("io error: {}", err),
}
}
}
}
fn outfile<P: AsRef<Path>>(&self, scan_position: &ScanPosition, infile: P) -> PathBuf {
let mut outfile = self.las_dir.clone();
if self.use_scanpos_names {
outfile.push(Path::new(&scan_position.name).with_extension("las"));
} else {
outfile.push(infile.as_ref().with_extension("las").file_name().unwrap());
}
outfile
}
fn name_map(&self, scan_position: &ScanPosition) -> Option<&str> {
self.name_map
.maps
.iter()
.find(|map| map.from == scan_position.name)
.map(|map| map.to.as_str())
}
}
impl fmt::Display for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Configuration:")?;
writeln!(f, " - project: {}", self.project.path.display())?;
writeln!(f, " - image dir: {}", self.image_dir.display())?;
writeln!(f, " - las dir: {}", self.las_dir.display())?;
writeln!(f, " - scan positions:")?;
for scan_position in self.scan_positions() {
writeln!(f, " - name: {}", scan_position.name)?;
let image_groups = self.image_groups(scan_position);
if image_groups.is_empty() {
writeln!(f, " - no images for this scan position")?;
} else {
writeln!(f, " - images:")?;
for image_group in image_groups {
writeln!(f, " - {}", image_group.irb_path.display())?;
}
}
}
Ok(())
}
}
impl<'a> ImageGroup<'a> {
fn temperature(&self, socs: &Point<Socs>) -> Option<f64> {
let cmcs = socs.to_cmcs(self.image.cop, self.mount_calibration);
self.camera_calibration.cmcs_to_ics(&cmcs).map(|(mut u,
mut v)| {
if self.rotate {
let new_u = self.camera_calibration.height as f64 - v;
v = u;
u = new_u;
}
self.irb
.temperature(u.trunc() as i32, v.trunc() as i32)
.expect("error when retrieving temperature") - 273.15
})
}
}
| {
None
} | conditional_block |
data_collection.py | import pygame
import random
import math
from password_types import PasswordTypes
from textbox import TextBox
from time import time, strftime, gmtime, sleep, mktime
import datetime
import uuid
import asyncio
import threading
import csv
import helpers
import os
from pylsl import StreamInfo, StreamOutlet, LostError
from enum import Enum
from pylsl import StreamInlet, resolve_byprop
from constants import Constants
class DataCollectionState(Enum):
MUSE_DISCONNECTED = 0
RUNNING = 1
FINISHED = 2
class DataCollection:
def __init__(self, user, mode, iterations, museID = None):
self.user = user
self.museID = museID
pygame.init()
self.width = 600
self.height = 600
pygame.display.set_caption(user + ' Data Collection Session')
self.screen = pygame.display.set_mode((self.width, self.height))
self.totalIterations = iterations
self.passwords = self.generate_passwords(mode, iterations)
self.mode = mode
self.currentPassIndex = 0
self.currentCharIndex = 0
self.donePass = False
self.inputSize = (300, 60)
self.inputPosition = (self.width/2 - self.inputSize[0]/2, self.height/2 - self.inputSize[1]/2)
font = pygame.font.Font(None, 50)
inputRect = pygame.Rect(self.inputPosition[0], self.inputPosition[1], self.inputSize[0], self.inputSize[1])
self.input = TextBox(inputRect, clear_on_enter=True, inactive_on_enter=False, font=font)
self.gameRunning = False
self.state = DataCollectionState.MUSE_DISCONNECTED # 0 = Muse Disconnected, 1 = Session Running, 2 = Finished
self.setup_marker_streaming()
self.markers = [[]] # Each item is array of 2 items - timestamp + the key which was pressed.
self.eegData = [[]] # Each item is array of timestamp + data for each channel.
self.get_eeg_stream(0.5)
self.startTime = time() # Timestamp of experiment start.
self.finishTime = 0 # Timestamp of experiment finish.
self.lastEEGSampleTime = self.startTime
def setup_marker_streaming(self):
streamName = self.user + ' Training Session Markers'
self.markerInfo = StreamInfo(streamName, 'Keystroke Markers', 1, 0, 'string', str(uuid.uuid1()))
self.markerOutlet = StreamOutlet(self.markerInfo)
def get_eeg_stream(self, timeout):
eeg_inlet_streams : StreamInlet = resolve_byprop('type', 'EEG', timeout=timeout)
for stream in eeg_inlet_streams:
if self.museID == None or not stream.name().find(self.museID) == -1:
self.eegInlet = StreamInlet(stream)
self.eegTimeCorrection = self.eegInlet.time_correction()
self.state = DataCollectionState.RUNNING
self.doneCheckEEG = True
def push_marker(self, timestamp, currentChar):
self.markerOutlet.push_sample(currentChar, timestamp) # Push key marker with timestamp via LSL for other programs.
self.markers.append([timestamp, currentChar])
def pull_eeg_data(self, timeout = 0.0, max_samples = 360):
samples, timestamps = self.eegInlet.pull_chunk(timeout, max_samples) # Pull samples.
timestampCount = len(timestamps)
if(timestampCount > 0):
|
def save_data(self):
info = self.eegInlet.info()
desc = info.desc()
chanNum = info.channel_count()
channels = desc.child('channels').first_child()
channelNames = [channels.child_value('label')]
for i in range(1, chanNum):
channels = channels.next_sibling()
channelNames.append(channels.child_value('label'))
startTime = datetime.datetime.fromtimestamp(self.startTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
finishTime = datetime.datetime.fromtimestamp(self.finishTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
# Save EEG Data
fileBase = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime)
file = fileBase + '_EEG.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp'] + channelNames)
for data in self.eegData:
writer.writerow(data)
print('Saved EEG data to: ' + file)
# Save Marker Data
file = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime).replace(':','\ua789')
file = fileBase + '_MRK.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp','key marker'])
for data in self.markers:
writer.writerow(data)
print('Saved Marker data to: ' + file)
def generate_passwords(self, mode, iterations):
passwords = [''] * iterations
if mode == PasswordTypes.PIN_FIXED_4:
length = 4
poolInit = '0123456789'
elif mode == PasswordTypes.MIXED_FIXED_8:
length = 8
poolInit = 'abcdefghijklmnopqrstuvwxyz'
pool = poolInit
# Calculate number of each character required for even distribution.
freq = math.floor(iterations * length / len(pool))
poolTrack = [freq] * len(pool) # Keeps track of how many of each letter has been used.
for i in range(iterations):
for j in range(length):
if len(poolTrack) != 0:
index = random.randint(0, len(poolTrack) - 1)
char = pool[index]
poolTrack[index] -= 1
if poolTrack[index] == 0:
poolTrack.pop(index)
pool = pool.replace(char,'')
# Once we've used the minimum required "freq" of each character, we simply do a random choice from the initial pool.
else: char = random.choice(poolInit)
passwords[i] += char.upper()
return passwords
def draw_static_ui(self):
fontPassEnt = pygame.font.Font(None, 40)
passEnt = 'Passwords Entered: '
passEntS = fontPassEnt.render(passEnt, 1, (0,0,0))
iter = str(self.currentPassIndex) + ' / ' + str(self.totalIterations)
iterS = fontPassEnt.render(iter, 1, (0,0,0))
iterOffsetX = fontPassEnt.size(iter)[0] + 10
self.screen.blit(passEntS, (self.width - iterOffsetX - fontPassEnt.size(passEnt)[0] - 10, 10))
self.screen.blit(iterS, (self.width - iterOffsetX, 10))
if self.state == DataCollectionState.RUNNING:
instruct = 'Type the password below, press ENTER when done:'
elif self.state == DataCollectionState.MUSE_DISCONNECTED:
instruct = 'Error: a Muse LSL stream must be active to continue (Muse ID: {0})'.format(self.museID)
else:
instruct = 'Finished session. This window will close in a moment.'
fontInstruct = pygame.font.Font(None, 24)
instructS = fontInstruct.render(instruct, 1, (0,0,0))
instructSize = fontInstruct.size(instruct)
self.screen.blit(instructS, (self.width/2 - instructSize[0]/2, self.height/4 - instructSize[1]/2))
def process_input(self):
for event in pygame.event.get():
if self.state == DataCollectionState.RUNNING:
currentPass = self.passwords[self.currentPassIndex]
currentChar = currentPass[self.currentCharIndex]
if event.type == pygame.KEYDOWN:
if (event.key == ord(currentChar) or event.key == ord(currentChar.lower())) and not self.donePass:
newEvent = pygame.event.Event(pygame.KEYDOWN, {'unicode': currentChar.upper(),'key': ord(currentChar.upper()), 'mod': None})
self.input.get_event(newEvent)
self.push_marker(float(time()), currentChar)
if self.currentCharIndex < len(currentPass) - 1:
self.currentCharIndex += 1
else: self.donePass = True
elif event.key == pygame.K_RETURN and self.donePass:
self.currentCharIndex = 0
self.currentPassIndex += 1
if self.currentPassIndex == self.totalIterations:
self.state = DataCollectionState.FINISHED
self.input.get_event(event)
self.donePass = False
if event.type == pygame.QUIT:
pygame.quit()
def process_logic(self):
if self.state == DataCollectionState.MUSE_DISCONNECTED:
if self.doneCheckEEG == True:
self.doneCheckEEG = False
threading.Thread(target = self.get_eeg_stream, kwargs={'timeout' : 5}).start()
elif self.state == DataCollectionState.RUNNING:
self.pull_eeg_data()
elif self.state == DataCollectionState.FINISHED:
if self.finishTime == 0:
self.finishTime = time()
self.save_data()
if time() - self.finishTime >= 3:
self.gameRunning = False
self.input.update()
def draw_password(self):
font = pygame.font.Font(None, 50)
password = self.passwords[self.currentPassIndex]
passwordS = font.render(password, 1, (0,0,0))
passwordSize = font.size(password)
self.screen.blit(passwordS, (self.inputPosition[0], self.height/2 - passwordSize[1]/2 - self.inputSize[1]))
def draw(self):
self.screen.fill((255,255,255))
self.draw_static_ui()
if self.state == DataCollectionState.RUNNING:
self.draw_password()
self.input.draw(self.screen)
pygame.display.flip()
def start(self):
self.gameRunning = True
while self.gameRunning:
self.process_input()
self.process_logic()
self.draw()
pygame.quit()
| print('Number of samples: {0} | Time since last: {1}'.format(timestampCount, time() - self.lastEEGSampleTime))
self.lastEEGSampleTime = time()
for i in range(0, len(timestamps)):
self.eegData.append([timestamps[i]] + samples[i]) | conditional_block |
data_collection.py | import pygame
import random
import math
from password_types import PasswordTypes
from textbox import TextBox
from time import time, strftime, gmtime, sleep, mktime
import datetime
import uuid
import asyncio
import threading
import csv
import helpers
import os
from pylsl import StreamInfo, StreamOutlet, LostError
from enum import Enum
from pylsl import StreamInlet, resolve_byprop
from constants import Constants
class DataCollectionState(Enum):
MUSE_DISCONNECTED = 0
RUNNING = 1
FINISHED = 2
class DataCollection:
def __init__(self, user, mode, iterations, museID = None):
self.user = user
self.museID = museID
pygame.init()
self.width = 600
self.height = 600
pygame.display.set_caption(user + ' Data Collection Session')
self.screen = pygame.display.set_mode((self.width, self.height))
self.totalIterations = iterations
self.passwords = self.generate_passwords(mode, iterations)
self.mode = mode
self.currentPassIndex = 0
self.currentCharIndex = 0
self.donePass = False
self.inputSize = (300, 60)
self.inputPosition = (self.width/2 - self.inputSize[0]/2, self.height/2 - self.inputSize[1]/2)
font = pygame.font.Font(None, 50)
inputRect = pygame.Rect(self.inputPosition[0], self.inputPosition[1], self.inputSize[0], self.inputSize[1])
self.input = TextBox(inputRect, clear_on_enter=True, inactive_on_enter=False, font=font)
self.gameRunning = False
self.state = DataCollectionState.MUSE_DISCONNECTED # 0 = Muse Disconnected, 1 = Session Running, 2 = Finished
self.setup_marker_streaming()
self.markers = [[]] # Each item is array of 2 items - timestamp + the key which was pressed.
self.eegData = [[]] # Each item is array of timestamp + data for each channel.
self.get_eeg_stream(0.5)
self.startTime = time() # Timestamp of experiment start.
self.finishTime = 0 # Timestamp of experiment finish.
self.lastEEGSampleTime = self.startTime
| def setup_marker_streaming(self):
streamName = self.user + ' Training Session Markers'
self.markerInfo = StreamInfo(streamName, 'Keystroke Markers', 1, 0, 'string', str(uuid.uuid1()))
self.markerOutlet = StreamOutlet(self.markerInfo)
def get_eeg_stream(self, timeout):
eeg_inlet_streams : StreamInlet = resolve_byprop('type', 'EEG', timeout=timeout)
for stream in eeg_inlet_streams:
if self.museID == None or not stream.name().find(self.museID) == -1:
self.eegInlet = StreamInlet(stream)
self.eegTimeCorrection = self.eegInlet.time_correction()
self.state = DataCollectionState.RUNNING
self.doneCheckEEG = True
def push_marker(self, timestamp, currentChar):
self.markerOutlet.push_sample(currentChar, timestamp) # Push key marker with timestamp via LSL for other programs.
self.markers.append([timestamp, currentChar])
def pull_eeg_data(self, timeout = 0.0, max_samples = 360):
samples, timestamps = self.eegInlet.pull_chunk(timeout, max_samples) # Pull samples.
timestampCount = len(timestamps)
if(timestampCount > 0):
print('Number of samples: {0} | Time since last: {1}'.format(timestampCount, time() - self.lastEEGSampleTime))
self.lastEEGSampleTime = time()
for i in range(0, len(timestamps)):
self.eegData.append([timestamps[i]] + samples[i])
def save_data(self):
info = self.eegInlet.info()
desc = info.desc()
chanNum = info.channel_count()
channels = desc.child('channels').first_child()
channelNames = [channels.child_value('label')]
for i in range(1, chanNum):
channels = channels.next_sibling()
channelNames.append(channels.child_value('label'))
startTime = datetime.datetime.fromtimestamp(self.startTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
finishTime = datetime.datetime.fromtimestamp(self.finishTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
# Save EEG Data
fileBase = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime)
file = fileBase + '_EEG.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp'] + channelNames)
for data in self.eegData:
writer.writerow(data)
print('Saved EEG data to: ' + file)
# Save Marker Data
file = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime).replace(':','\ua789')
file = fileBase + '_MRK.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp','key marker'])
for data in self.markers:
writer.writerow(data)
print('Saved Marker data to: ' + file)
def generate_passwords(self, mode, iterations):
passwords = [''] * iterations
if mode == PasswordTypes.PIN_FIXED_4:
length = 4
poolInit = '0123456789'
elif mode == PasswordTypes.MIXED_FIXED_8:
length = 8
poolInit = 'abcdefghijklmnopqrstuvwxyz'
pool = poolInit
# Calculate number of each character required for even distribution.
freq = math.floor(iterations * length / len(pool))
poolTrack = [freq] * len(pool) # Keeps track of how many of each letter has been used.
for i in range(iterations):
for j in range(length):
if len(poolTrack) != 0:
index = random.randint(0, len(poolTrack) - 1)
char = pool[index]
poolTrack[index] -= 1
if poolTrack[index] == 0:
poolTrack.pop(index)
pool = pool.replace(char,'')
# Once we've used the minimum required "freq" of each character, we simply do a random choice from the initial pool.
else: char = random.choice(poolInit)
passwords[i] += char.upper()
return passwords
def draw_static_ui(self):
fontPassEnt = pygame.font.Font(None, 40)
passEnt = 'Passwords Entered: '
passEntS = fontPassEnt.render(passEnt, 1, (0,0,0))
iter = str(self.currentPassIndex) + ' / ' + str(self.totalIterations)
iterS = fontPassEnt.render(iter, 1, (0,0,0))
iterOffsetX = fontPassEnt.size(iter)[0] + 10
self.screen.blit(passEntS, (self.width - iterOffsetX - fontPassEnt.size(passEnt)[0] - 10, 10))
self.screen.blit(iterS, (self.width - iterOffsetX, 10))
if self.state == DataCollectionState.RUNNING:
instruct = 'Type the password below, press ENTER when done:'
elif self.state == DataCollectionState.MUSE_DISCONNECTED:
instruct = 'Error: a Muse LSL stream must be active to continue (Muse ID: {0})'.format(self.museID)
else:
instruct = 'Finished session. This window will close in a moment.'
fontInstruct = pygame.font.Font(None, 24)
instructS = fontInstruct.render(instruct, 1, (0,0,0))
instructSize = fontInstruct.size(instruct)
self.screen.blit(instructS, (self.width/2 - instructSize[0]/2, self.height/4 - instructSize[1]/2))
def process_input(self):
for event in pygame.event.get():
if self.state == DataCollectionState.RUNNING:
currentPass = self.passwords[self.currentPassIndex]
currentChar = currentPass[self.currentCharIndex]
if event.type == pygame.KEYDOWN:
if (event.key == ord(currentChar) or event.key == ord(currentChar.lower())) and not self.donePass:
newEvent = pygame.event.Event(pygame.KEYDOWN, {'unicode': currentChar.upper(),'key': ord(currentChar.upper()), 'mod': None})
self.input.get_event(newEvent)
self.push_marker(float(time()), currentChar)
if self.currentCharIndex < len(currentPass) - 1:
self.currentCharIndex += 1
else: self.donePass = True
elif event.key == pygame.K_RETURN and self.donePass:
self.currentCharIndex = 0
self.currentPassIndex += 1
if self.currentPassIndex == self.totalIterations:
self.state = DataCollectionState.FINISHED
self.input.get_event(event)
self.donePass = False
if event.type == pygame.QUIT:
pygame.quit()
def process_logic(self):
if self.state == DataCollectionState.MUSE_DISCONNECTED:
if self.doneCheckEEG == True:
self.doneCheckEEG = False
threading.Thread(target = self.get_eeg_stream, kwargs={'timeout' : 5}).start()
elif self.state == DataCollectionState.RUNNING:
self.pull_eeg_data()
elif self.state == DataCollectionState.FINISHED:
if self.finishTime == 0:
self.finishTime = time()
self.save_data()
if time() - self.finishTime >= 3:
self.gameRunning = False
self.input.update()
def draw_password(self):
font = pygame.font.Font(None, 50)
password = self.passwords[self.currentPassIndex]
passwordS = font.render(password, 1, (0,0,0))
passwordSize = font.size(password)
self.screen.blit(passwordS, (self.inputPosition[0], self.height/2 - passwordSize[1]/2 - self.inputSize[1]))
def draw(self):
self.screen.fill((255,255,255))
self.draw_static_ui()
if self.state == DataCollectionState.RUNNING:
self.draw_password()
self.input.draw(self.screen)
pygame.display.flip()
def start(self):
self.gameRunning = True
while self.gameRunning:
self.process_input()
self.process_logic()
self.draw()
pygame.quit() | random_line_split |
|
data_collection.py | import pygame
import random
import math
from password_types import PasswordTypes
from textbox import TextBox
from time import time, strftime, gmtime, sleep, mktime
import datetime
import uuid
import asyncio
import threading
import csv
import helpers
import os
from pylsl import StreamInfo, StreamOutlet, LostError
from enum import Enum
from pylsl import StreamInlet, resolve_byprop
from constants import Constants
class DataCollectionState(Enum):
MUSE_DISCONNECTED = 0
RUNNING = 1
FINISHED = 2
class DataCollection:
def __init__(self, user, mode, iterations, museID = None):
self.user = user
self.museID = museID
pygame.init()
self.width = 600
self.height = 600
pygame.display.set_caption(user + ' Data Collection Session')
self.screen = pygame.display.set_mode((self.width, self.height))
self.totalIterations = iterations
self.passwords = self.generate_passwords(mode, iterations)
self.mode = mode
self.currentPassIndex = 0
self.currentCharIndex = 0
self.donePass = False
self.inputSize = (300, 60)
self.inputPosition = (self.width/2 - self.inputSize[0]/2, self.height/2 - self.inputSize[1]/2)
font = pygame.font.Font(None, 50)
inputRect = pygame.Rect(self.inputPosition[0], self.inputPosition[1], self.inputSize[0], self.inputSize[1])
self.input = TextBox(inputRect, clear_on_enter=True, inactive_on_enter=False, font=font)
self.gameRunning = False
self.state = DataCollectionState.MUSE_DISCONNECTED # 0 = Muse Disconnected, 1 = Session Running, 2 = Finished
self.setup_marker_streaming()
self.markers = [[]] # Each item is array of 2 items - timestamp + the key which was pressed.
self.eegData = [[]] # Each item is array of timestamp + data for each channel.
self.get_eeg_stream(0.5)
self.startTime = time() # Timestamp of experiment start.
self.finishTime = 0 # Timestamp of experiment finish.
self.lastEEGSampleTime = self.startTime
def setup_marker_streaming(self):
streamName = self.user + ' Training Session Markers'
self.markerInfo = StreamInfo(streamName, 'Keystroke Markers', 1, 0, 'string', str(uuid.uuid1()))
self.markerOutlet = StreamOutlet(self.markerInfo)
def get_eeg_stream(self, timeout):
eeg_inlet_streams : StreamInlet = resolve_byprop('type', 'EEG', timeout=timeout)
for stream in eeg_inlet_streams:
if self.museID == None or not stream.name().find(self.museID) == -1:
self.eegInlet = StreamInlet(stream)
self.eegTimeCorrection = self.eegInlet.time_correction()
self.state = DataCollectionState.RUNNING
self.doneCheckEEG = True
def push_marker(self, timestamp, currentChar):
self.markerOutlet.push_sample(currentChar, timestamp) # Push key marker with timestamp via LSL for other programs.
self.markers.append([timestamp, currentChar])
def pull_eeg_data(self, timeout = 0.0, max_samples = 360):
samples, timestamps = self.eegInlet.pull_chunk(timeout, max_samples) # Pull samples.
timestampCount = len(timestamps)
if(timestampCount > 0):
print('Number of samples: {0} | Time since last: {1}'.format(timestampCount, time() - self.lastEEGSampleTime))
self.lastEEGSampleTime = time()
for i in range(0, len(timestamps)):
self.eegData.append([timestamps[i]] + samples[i])
def save_data(self):
info = self.eegInlet.info()
desc = info.desc()
chanNum = info.channel_count()
channels = desc.child('channels').first_child()
channelNames = [channels.child_value('label')]
for i in range(1, chanNum):
channels = channels.next_sibling()
channelNames.append(channels.child_value('label'))
startTime = datetime.datetime.fromtimestamp(self.startTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
finishTime = datetime.datetime.fromtimestamp(self.finishTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
# Save EEG Data
fileBase = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime)
file = fileBase + '_EEG.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp'] + channelNames)
for data in self.eegData:
writer.writerow(data)
print('Saved EEG data to: ' + file)
# Save Marker Data
file = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime).replace(':','\ua789')
file = fileBase + '_MRK.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp','key marker'])
for data in self.markers:
writer.writerow(data)
print('Saved Marker data to: ' + file)
def generate_passwords(self, mode, iterations):
passwords = [''] * iterations
if mode == PasswordTypes.PIN_FIXED_4:
length = 4
poolInit = '0123456789'
elif mode == PasswordTypes.MIXED_FIXED_8:
length = 8
poolInit = 'abcdefghijklmnopqrstuvwxyz'
pool = poolInit
# Calculate number of each character required for even distribution.
freq = math.floor(iterations * length / len(pool))
poolTrack = [freq] * len(pool) # Keeps track of how many of each letter has been used.
for i in range(iterations):
for j in range(length):
if len(poolTrack) != 0:
index = random.randint(0, len(poolTrack) - 1)
char = pool[index]
poolTrack[index] -= 1
if poolTrack[index] == 0:
poolTrack.pop(index)
pool = pool.replace(char,'')
# Once we've used the minimum required "freq" of each character, we simply do a random choice from the initial pool.
else: char = random.choice(poolInit)
passwords[i] += char.upper()
return passwords
def draw_static_ui(self):
fontPassEnt = pygame.font.Font(None, 40)
passEnt = 'Passwords Entered: '
passEntS = fontPassEnt.render(passEnt, 1, (0,0,0))
iter = str(self.currentPassIndex) + ' / ' + str(self.totalIterations)
iterS = fontPassEnt.render(iter, 1, (0,0,0))
iterOffsetX = fontPassEnt.size(iter)[0] + 10
self.screen.blit(passEntS, (self.width - iterOffsetX - fontPassEnt.size(passEnt)[0] - 10, 10))
self.screen.blit(iterS, (self.width - iterOffsetX, 10))
if self.state == DataCollectionState.RUNNING:
instruct = 'Type the password below, press ENTER when done:'
elif self.state == DataCollectionState.MUSE_DISCONNECTED:
instruct = 'Error: a Muse LSL stream must be active to continue (Muse ID: {0})'.format(self.museID)
else:
instruct = 'Finished session. This window will close in a moment.'
fontInstruct = pygame.font.Font(None, 24)
instructS = fontInstruct.render(instruct, 1, (0,0,0))
instructSize = fontInstruct.size(instruct)
self.screen.blit(instructS, (self.width/2 - instructSize[0]/2, self.height/4 - instructSize[1]/2))
def process_input(self):
for event in pygame.event.get():
if self.state == DataCollectionState.RUNNING:
currentPass = self.passwords[self.currentPassIndex]
currentChar = currentPass[self.currentCharIndex]
if event.type == pygame.KEYDOWN:
if (event.key == ord(currentChar) or event.key == ord(currentChar.lower())) and not self.donePass:
newEvent = pygame.event.Event(pygame.KEYDOWN, {'unicode': currentChar.upper(),'key': ord(currentChar.upper()), 'mod': None})
self.input.get_event(newEvent)
self.push_marker(float(time()), currentChar)
if self.currentCharIndex < len(currentPass) - 1:
self.currentCharIndex += 1
else: self.donePass = True
elif event.key == pygame.K_RETURN and self.donePass:
self.currentCharIndex = 0
self.currentPassIndex += 1
if self.currentPassIndex == self.totalIterations:
self.state = DataCollectionState.FINISHED
self.input.get_event(event)
self.donePass = False
if event.type == pygame.QUIT:
pygame.quit()
def | (self):
if self.state == DataCollectionState.MUSE_DISCONNECTED:
if self.doneCheckEEG == True:
self.doneCheckEEG = False
threading.Thread(target = self.get_eeg_stream, kwargs={'timeout' : 5}).start()
elif self.state == DataCollectionState.RUNNING:
self.pull_eeg_data()
elif self.state == DataCollectionState.FINISHED:
if self.finishTime == 0:
self.finishTime = time()
self.save_data()
if time() - self.finishTime >= 3:
self.gameRunning = False
self.input.update()
def draw_password(self):
font = pygame.font.Font(None, 50)
password = self.passwords[self.currentPassIndex]
passwordS = font.render(password, 1, (0,0,0))
passwordSize = font.size(password)
self.screen.blit(passwordS, (self.inputPosition[0], self.height/2 - passwordSize[1]/2 - self.inputSize[1]))
def draw(self):
self.screen.fill((255,255,255))
self.draw_static_ui()
if self.state == DataCollectionState.RUNNING:
self.draw_password()
self.input.draw(self.screen)
pygame.display.flip()
def start(self):
self.gameRunning = True
while self.gameRunning:
self.process_input()
self.process_logic()
self.draw()
pygame.quit()
| process_logic | identifier_name |
data_collection.py | import pygame
import random
import math
from password_types import PasswordTypes
from textbox import TextBox
from time import time, strftime, gmtime, sleep, mktime
import datetime
import uuid
import asyncio
import threading
import csv
import helpers
import os
from pylsl import StreamInfo, StreamOutlet, LostError
from enum import Enum
from pylsl import StreamInlet, resolve_byprop
from constants import Constants
class DataCollectionState(Enum):
MUSE_DISCONNECTED = 0
RUNNING = 1
FINISHED = 2
class DataCollection:
| def __init__(self, user, mode, iterations, museID = None):
self.user = user
self.museID = museID
pygame.init()
self.width = 600
self.height = 600
pygame.display.set_caption(user + ' Data Collection Session')
self.screen = pygame.display.set_mode((self.width, self.height))
self.totalIterations = iterations
self.passwords = self.generate_passwords(mode, iterations)
self.mode = mode
self.currentPassIndex = 0
self.currentCharIndex = 0
self.donePass = False
self.inputSize = (300, 60)
self.inputPosition = (self.width/2 - self.inputSize[0]/2, self.height/2 - self.inputSize[1]/2)
font = pygame.font.Font(None, 50)
inputRect = pygame.Rect(self.inputPosition[0], self.inputPosition[1], self.inputSize[0], self.inputSize[1])
self.input = TextBox(inputRect, clear_on_enter=True, inactive_on_enter=False, font=font)
self.gameRunning = False
self.state = DataCollectionState.MUSE_DISCONNECTED # 0 = Muse Disconnected, 1 = Session Running, 2 = Finished
self.setup_marker_streaming()
self.markers = [[]] # Each item is array of 2 items - timestamp + the key which was pressed.
self.eegData = [[]] # Each item is array of timestamp + data for each channel.
self.get_eeg_stream(0.5)
self.startTime = time() # Timestamp of experiment start.
self.finishTime = 0 # Timestamp of experiment finish.
self.lastEEGSampleTime = self.startTime
def setup_marker_streaming(self):
streamName = self.user + ' Training Session Markers'
self.markerInfo = StreamInfo(streamName, 'Keystroke Markers', 1, 0, 'string', str(uuid.uuid1()))
self.markerOutlet = StreamOutlet(self.markerInfo)
def get_eeg_stream(self, timeout):
eeg_inlet_streams : StreamInlet = resolve_byprop('type', 'EEG', timeout=timeout)
for stream in eeg_inlet_streams:
if self.museID == None or not stream.name().find(self.museID) == -1:
self.eegInlet = StreamInlet(stream)
self.eegTimeCorrection = self.eegInlet.time_correction()
self.state = DataCollectionState.RUNNING
self.doneCheckEEG = True
def push_marker(self, timestamp, currentChar):
self.markerOutlet.push_sample(currentChar, timestamp) # Push key marker with timestamp via LSL for other programs.
self.markers.append([timestamp, currentChar])
def pull_eeg_data(self, timeout = 0.0, max_samples = 360):
samples, timestamps = self.eegInlet.pull_chunk(timeout, max_samples) # Pull samples.
timestampCount = len(timestamps)
if(timestampCount > 0):
print('Number of samples: {0} | Time since last: {1}'.format(timestampCount, time() - self.lastEEGSampleTime))
self.lastEEGSampleTime = time()
for i in range(0, len(timestamps)):
self.eegData.append([timestamps[i]] + samples[i])
def save_data(self):
info = self.eegInlet.info()
desc = info.desc()
chanNum = info.channel_count()
channels = desc.child('channels').first_child()
channelNames = [channels.child_value('label')]
for i in range(1, chanNum):
channels = channels.next_sibling()
channelNames.append(channels.child_value('label'))
startTime = datetime.datetime.fromtimestamp(self.startTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
finishTime = datetime.datetime.fromtimestamp(self.finishTime).strftime(Constants.SESSION_FILE_DATETIME_FORMAT)
# Save EEG Data
fileBase = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime)
file = fileBase + '_EEG.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp'] + channelNames)
for data in self.eegData:
writer.writerow(data)
print('Saved EEG data to: ' + file)
# Save Marker Data
file = os.path.join('session_data', self.user, self.mode.name, self.user + '_' + self.mode.name + '_' + startTime + '_' + finishTime).replace(':','\ua789')
file = fileBase + '_MRK.csv'
helpers.ensure_dir(file)
with open(file, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(['timestamp','key marker'])
for data in self.markers:
writer.writerow(data)
print('Saved Marker data to: ' + file)
def generate_passwords(self, mode, iterations):
passwords = [''] * iterations
if mode == PasswordTypes.PIN_FIXED_4:
length = 4
poolInit = '0123456789'
elif mode == PasswordTypes.MIXED_FIXED_8:
length = 8
poolInit = 'abcdefghijklmnopqrstuvwxyz'
pool = poolInit
# Calculate number of each character required for even distribution.
freq = math.floor(iterations * length / len(pool))
poolTrack = [freq] * len(pool) # Keeps track of how many of each letter has been used.
for i in range(iterations):
for j in range(length):
if len(poolTrack) != 0:
index = random.randint(0, len(poolTrack) - 1)
char = pool[index]
poolTrack[index] -= 1
if poolTrack[index] == 0:
poolTrack.pop(index)
pool = pool.replace(char,'')
# Once we've used the minimum required "freq" of each character, we simply do a random choice from the initial pool.
else: char = random.choice(poolInit)
passwords[i] += char.upper()
return passwords
def draw_static_ui(self):
fontPassEnt = pygame.font.Font(None, 40)
passEnt = 'Passwords Entered: '
passEntS = fontPassEnt.render(passEnt, 1, (0,0,0))
iter = str(self.currentPassIndex) + ' / ' + str(self.totalIterations)
iterS = fontPassEnt.render(iter, 1, (0,0,0))
iterOffsetX = fontPassEnt.size(iter)[0] + 10
self.screen.blit(passEntS, (self.width - iterOffsetX - fontPassEnt.size(passEnt)[0] - 10, 10))
self.screen.blit(iterS, (self.width - iterOffsetX, 10))
if self.state == DataCollectionState.RUNNING:
instruct = 'Type the password below, press ENTER when done:'
elif self.state == DataCollectionState.MUSE_DISCONNECTED:
instruct = 'Error: a Muse LSL stream must be active to continue (Muse ID: {0})'.format(self.museID)
else:
instruct = 'Finished session. This window will close in a moment.'
fontInstruct = pygame.font.Font(None, 24)
instructS = fontInstruct.render(instruct, 1, (0,0,0))
instructSize = fontInstruct.size(instruct)
self.screen.blit(instructS, (self.width/2 - instructSize[0]/2, self.height/4 - instructSize[1]/2))
def process_input(self):
for event in pygame.event.get():
if self.state == DataCollectionState.RUNNING:
currentPass = self.passwords[self.currentPassIndex]
currentChar = currentPass[self.currentCharIndex]
if event.type == pygame.KEYDOWN:
if (event.key == ord(currentChar) or event.key == ord(currentChar.lower())) and not self.donePass:
newEvent = pygame.event.Event(pygame.KEYDOWN, {'unicode': currentChar.upper(),'key': ord(currentChar.upper()), 'mod': None})
self.input.get_event(newEvent)
self.push_marker(float(time()), currentChar)
if self.currentCharIndex < len(currentPass) - 1:
self.currentCharIndex += 1
else: self.donePass = True
elif event.key == pygame.K_RETURN and self.donePass:
self.currentCharIndex = 0
self.currentPassIndex += 1
if self.currentPassIndex == self.totalIterations:
self.state = DataCollectionState.FINISHED
self.input.get_event(event)
self.donePass = False
if event.type == pygame.QUIT:
pygame.quit()
def process_logic(self):
if self.state == DataCollectionState.MUSE_DISCONNECTED:
if self.doneCheckEEG == True:
self.doneCheckEEG = False
threading.Thread(target = self.get_eeg_stream, kwargs={'timeout' : 5}).start()
elif self.state == DataCollectionState.RUNNING:
self.pull_eeg_data()
elif self.state == DataCollectionState.FINISHED:
if self.finishTime == 0:
self.finishTime = time()
self.save_data()
if time() - self.finishTime >= 3:
self.gameRunning = False
self.input.update()
def draw_password(self):
font = pygame.font.Font(None, 50)
password = self.passwords[self.currentPassIndex]
passwordS = font.render(password, 1, (0,0,0))
passwordSize = font.size(password)
self.screen.blit(passwordS, (self.inputPosition[0], self.height/2 - passwordSize[1]/2 - self.inputSize[1]))
def draw(self):
self.screen.fill((255,255,255))
self.draw_static_ui()
if self.state == DataCollectionState.RUNNING:
self.draw_password()
self.input.draw(self.screen)
pygame.display.flip()
def start(self):
self.gameRunning = True
while self.gameRunning:
self.process_input()
self.process_logic()
self.draw()
pygame.quit() | identifier_body |
|
mp4TagWriter.ts | import { concatArrayBuffers } from "../utils/download";
import { TagWriter } from "./tagWriter";
interface Atom {
length: number;
name?: string;
offset?: number;
children?: Atom[];
data?: ArrayBuffer;
}
interface AtomLevel {
parent: Atom;
offset: number;
childIndex: number;
}
// length(4) + name(4)
const ATOM_HEAD_LENGTH = 8;
// data-length(4) + data-name(4) + data-flags(4)
const ATOM_DATA_HEAD_LENGTH = 16;
const ATOM_HEADER_LENGTH = ATOM_HEAD_LENGTH + ATOM_DATA_HEAD_LENGTH;
class Mp4 {
private readonly _metadataPath = ["moov", "udta", "meta", "ilst"];
private _buffer: ArrayBuffer | null;
private _bufferView: DataView | null;
private _atoms: Atom[] = [];
constructor(buffer: ArrayBuffer) {
this._buffer = buffer;
this._bufferView = new DataView(buffer);
}
parse() {
if (!this._buffer) throw new Error("Buffer can not be null");
if (this._atoms.length > 0) throw new Error("Buffer already parsed");
let offset = 0;
let atom: Atom;
while (true) {
atom = this._readAtom(offset);
if (!atom || atom.length < 1) break;
this._atoms.push(atom);
offset = atom.offset + atom.length;
}
if (this._atoms.length < 1) throw new Error("Buffer could not be parsed");
}
setDuration(duration: number) {
const mvhdAtom: Atom = this._findAtom(this._atoms, ["moov", "mvhd"]);
if (!mvhdAtom) throw new Error("'mvhd' atom could not be found");
// version(4) + created(4) + modified(4) + timescale(4)
const precedingDataLength = 16;
this._bufferView.setUint32(mvhdAtom.offset + ATOM_HEAD_LENGTH + precedingDataLength, duration);
}
| (name: string, data: ArrayBuffer | string | number) {
if (name.length > 4 || name.length < 1) throw new Error(`Unsupported atom name: '${name}'`);
let dataBuffer: ArrayBuffer;
if (data instanceof ArrayBuffer) {
dataBuffer = data;
} else if (typeof data === "string") {
dataBuffer = this._getBufferFromString(data);
} else if (typeof data === "number") {
dataBuffer = new ArrayBuffer(4);
const dataView = new DataView(dataBuffer);
dataView.setUint32(0, data);
} else {
throw new Error(`Unsupported data: '${data}'`);
}
const atom: Atom = {
name,
length: ATOM_HEADER_LENGTH + dataBuffer.byteLength,
data: dataBuffer,
};
this._insertAtom(atom, this._metadataPath);
}
getBuffer() {
const buffers: ArrayBuffer[] = [];
let bufferIndex = 0;
// we don't change the offsets, since it would add needless complexity without benefit
for (const atom of this._atoms) {
if (!atom.children) {
// nothing has been added or removed
const slice = this._buffer.slice(atom.offset, atom.offset + atom.length);
buffers.push(slice);
bufferIndex++;
continue;
}
atom.length = ATOM_HEAD_LENGTH;
const levels: AtomLevel[] = [{ parent: atom, offset: bufferIndex, childIndex: 0 }];
let levelIndex = 0;
while (true) {
const { parent, offset, childIndex } = levels[levelIndex];
if (childIndex >= parent.children.length) {
// move one level up
levelIndex--;
levels.pop();
let parentHeadLength = ATOM_HEAD_LENGTH;
if (parent.name === "meta") {
parent.length += 4;
parentHeadLength += 4;
} else if (parent.name === "stsd") {
parent.length += 8;
parentHeadLength += 8;
}
// set length of parent in buffer
this._bufferView.setUint32(parent.offset, parent.length);
const parentHeader = this._buffer.slice(parent.offset, parent.offset + parentHeadLength);
buffers.splice(offset, 0, parentHeader);
// we completed the last parent - exit
if (levelIndex < 0) break;
// add our current parents length to new parent and move childIndex of new parent one ahead
const newParent = levels[levelIndex].parent;
newParent.length += parent.length;
levels[levelIndex].childIndex++;
continue;
}
const child = parent.children[childIndex];
if (child.children) {
// move one level down
child.length = ATOM_HEAD_LENGTH;
levels.push({ parent: child, offset: bufferIndex, childIndex: 0 });
levelIndex++;
continue;
} else if (child.data) {
// add new data to buffer
const headerBuffer = this._getHeaderBufferFromAtom(child);
buffers.push(headerBuffer);
buffers.push(child.data);
} else {
// add entire child to buffer
const slice = this._buffer.slice(child.offset, child.offset + child.length);
buffers.push(slice);
}
bufferIndex++;
parent.length += child.length;
// move one child ahead
levels[levelIndex].childIndex++;
}
}
this._bufferView = null;
this._buffer = null;
this._atoms = [];
return concatArrayBuffers(buffers);
}
private _insertAtom(atom: Atom, path: string[]) {
if (!path) throw new Error("Path can not be empty");
const parentAtom = this._findAtom(this._atoms, path);
if (!parentAtom) throw new Error(`Parent atom at path '${path.join(" > ")}' could not be found`);
if (parentAtom.children === undefined) {
parentAtom.children = this._readChildAtoms(parentAtom);
}
let offset = parentAtom.offset + ATOM_HEAD_LENGTH;
if (parentAtom.name === "meta") {
offset += 4;
} else if (parentAtom.name === "stsd") {
offset += 8;
}
if (parentAtom.children.length > 0) {
const lastChild = parentAtom.children[parentAtom.children.length - 1];
offset = lastChild.offset + lastChild.length;
}
atom.offset = offset;
parentAtom.children.push(atom);
}
private _findAtom(atoms: Atom[], path: string[]): Atom | null {
if (!path || path.length < 1) throw new Error("Path can not be empty");
const curPath = [...path];
const curName = curPath.shift();
const curElem = atoms.find((i) => i.name === curName);
if (curPath.length < 1) return curElem;
if (!curElem) return null;
if (curElem.children === undefined) {
curElem.children = this._readChildAtoms(curElem);
}
if (curElem.children.length < 1) return null;
return this._findAtom(curElem.children, curPath);
}
private _readChildAtoms(atom: Atom): Atom[] {
const children: Atom[] = [];
const childEnd = atom.offset + atom.length;
let childOffset = atom.offset + ATOM_HEAD_LENGTH;
if (atom.name === "meta") {
childOffset += 4;
} else if (atom.name === "stsd") {
childOffset += 8;
}
while (true) {
if (childOffset >= childEnd) break;
const childAtom = this._readAtom(childOffset);
if (!childAtom || childAtom.length < 1) break;
childOffset = childAtom.offset + childAtom.length;
children.push(childAtom);
}
return children;
}
private _readAtom(offset: number): Atom {
const begin = offset;
const end = offset + ATOM_HEAD_LENGTH;
const buffer = this._buffer.slice(begin, end);
if (buffer.byteLength < ATOM_HEAD_LENGTH) {
return {
length: buffer.byteLength,
offset,
};
}
const dataView = new DataView(buffer);
let length = dataView.getUint32(0, false);
let name = "";
for (let i = 0; i < 4; i++) {
name += String.fromCharCode(dataView.getUint8(4 + i));
}
return {
name,
length,
offset,
};
}
private _getHeaderBufferFromAtom(atom: Atom) {
if (!atom || atom.length < 1 || !atom.name || !atom.data)
throw new Error("Can not compute header buffer for this atom");
const headerBuffer = new ArrayBuffer(ATOM_HEADER_LENGTH);
const headerBufferView = new DataView(headerBuffer);
// length at 0, length = 4
headerBufferView.setUint32(0, atom.length);
// name at 4, length = 4
const nameChars = this._getCharCodes(atom.name);
for (let i = 0; i < nameChars.length; i++) {
headerBufferView.setUint8(4 + i, nameChars[i]);
}
// data length at 8, length = 4
headerBufferView.setUint32(8, ATOM_DATA_HEAD_LENGTH + atom.data.byteLength);
// data name at 12, length = 4
const dataNameChars = this._getCharCodes("data");
for (let i = 0; i < dataNameChars.length; i++) {
headerBufferView.setUint8(12 + i, dataNameChars[i]);
}
// data flags at 16, length = 4
headerBufferView.setUint32(16, this._getFlags(atom.name));
return headerBuffer;
}
private _getBufferFromString(input: string): ArrayBuffer {
// return new TextEncoder().encode(input).buffer;
const buffer = new ArrayBuffer(input.length);
const bufferView = new DataView(buffer);
const chars = this._getCharCodes(input);
for (let i = 0; i < chars.length; i++) {
bufferView.setUint8(i, chars[i]);
}
return buffer;
}
private _getCharCodes(input: string) {
const chars: number[] = [];
for (let i = 0; i < input.length; i++) {
chars.push(input.charCodeAt(i));
}
return chars;
}
private _getFlags(name: string) {
switch (name) {
case "covr":
// 13 for jpeg, 14 for png
return 13;
case "trkn":
case "disk":
return 0;
case "tmpo":
case "cpil":
case "rtng":
return 21;
default:
return 1;
}
}
}
export class Mp4TagWriter implements TagWriter {
private _mp4: Mp4;
constructor(buffer: ArrayBuffer) {
this._mp4 = new Mp4(buffer);
this._mp4.parse();
}
setTitle(title: string): void {
if (!title) throw new Error("Invalid value for title");
this._mp4.addMetadataAtom("©nam", title);
}
setArtists(artists: string[]): void {
if (!artists || artists.length < 1) throw new Error("Invalid value for artists");
this._mp4.addMetadataAtom("©ART", artists.join(", "));
}
setAlbum(album: string): void {
if (!album) throw new Error("Invalid value for album");
this._mp4.addMetadataAtom("©alb", album);
}
setComment(comment: string): void {
if (!comment) throw new Error("Invalid value for comment");
this._mp4.addMetadataAtom("©cmt", comment);
}
setTrackNumber(trackNumber: number): void {
// max trackNumber is max of Uint8
if (trackNumber < 1 || trackNumber > 32767) throw new Error("Invalid value for trackNumber");
this._mp4.addMetadataAtom("trkn", trackNumber);
}
setYear(year: number): void {
if (year < 1) throw new Error("Invalud value for year");
this._mp4.addMetadataAtom("©day", year.toString());
}
setArtwork(artworkBuffer: ArrayBuffer): void {
if (!artworkBuffer || artworkBuffer.byteLength < 1) throw new Error("Invalid value for artworkBuffer");
this._mp4.addMetadataAtom("covr", artworkBuffer);
}
setDuration(duration: number): void {
if (duration < 1) throw new Error("Invalid value for duration");
this._mp4.setDuration(duration);
}
getBuffer(): Promise<ArrayBuffer> {
const buffer = this._mp4.getBuffer();
return Promise.resolve(buffer);
}
}
| addMetadataAtom | identifier_name |
mp4TagWriter.ts | import { concatArrayBuffers } from "../utils/download";
import { TagWriter } from "./tagWriter";
interface Atom {
length: number;
name?: string;
offset?: number;
children?: Atom[];
data?: ArrayBuffer;
}
interface AtomLevel {
parent: Atom;
offset: number;
childIndex: number;
}
// length(4) + name(4)
const ATOM_HEAD_LENGTH = 8;
// data-length(4) + data-name(4) + data-flags(4)
const ATOM_DATA_HEAD_LENGTH = 16;
const ATOM_HEADER_LENGTH = ATOM_HEAD_LENGTH + ATOM_DATA_HEAD_LENGTH;
class Mp4 {
private readonly _metadataPath = ["moov", "udta", "meta", "ilst"];
private _buffer: ArrayBuffer | null;
private _bufferView: DataView | null;
private _atoms: Atom[] = [];
constructor(buffer: ArrayBuffer) {
this._buffer = buffer;
this._bufferView = new DataView(buffer);
}
parse() {
if (!this._buffer) throw new Error("Buffer can not be null");
if (this._atoms.length > 0) throw new Error("Buffer already parsed");
let offset = 0;
let atom: Atom;
while (true) {
atom = this._readAtom(offset);
if (!atom || atom.length < 1) break;
this._atoms.push(atom);
offset = atom.offset + atom.length;
}
if (this._atoms.length < 1) throw new Error("Buffer could not be parsed");
}
setDuration(duration: number) {
const mvhdAtom: Atom = this._findAtom(this._atoms, ["moov", "mvhd"]);
if (!mvhdAtom) throw new Error("'mvhd' atom could not be found");
// version(4) + created(4) + modified(4) + timescale(4)
const precedingDataLength = 16;
this._bufferView.setUint32(mvhdAtom.offset + ATOM_HEAD_LENGTH + precedingDataLength, duration);
}
addMetadataAtom(name: string, data: ArrayBuffer | string | number) {
if (name.length > 4 || name.length < 1) throw new Error(`Unsupported atom name: '${name}'`);
let dataBuffer: ArrayBuffer;
if (data instanceof ArrayBuffer) {
dataBuffer = data;
} else if (typeof data === "string") {
dataBuffer = this._getBufferFromString(data);
} else if (typeof data === "number") {
dataBuffer = new ArrayBuffer(4);
const dataView = new DataView(dataBuffer);
dataView.setUint32(0, data);
} else {
throw new Error(`Unsupported data: '${data}'`);
}
const atom: Atom = {
name,
length: ATOM_HEADER_LENGTH + dataBuffer.byteLength,
data: dataBuffer,
};
this._insertAtom(atom, this._metadataPath);
}
getBuffer() {
const buffers: ArrayBuffer[] = [];
let bufferIndex = 0;
// we don't change the offsets, since it would add needless complexity without benefit
for (const atom of this._atoms) {
if (!atom.children) {
// nothing has been added or removed
const slice = this._buffer.slice(atom.offset, atom.offset + atom.length);
buffers.push(slice);
bufferIndex++;
continue;
}
atom.length = ATOM_HEAD_LENGTH;
const levels: AtomLevel[] = [{ parent: atom, offset: bufferIndex, childIndex: 0 }];
let levelIndex = 0;
while (true) {
const { parent, offset, childIndex } = levels[levelIndex];
if (childIndex >= parent.children.length) {
// move one level up
levelIndex--;
levels.pop();
let parentHeadLength = ATOM_HEAD_LENGTH;
if (parent.name === "meta") {
parent.length += 4;
parentHeadLength += 4;
} else if (parent.name === "stsd") {
parent.length += 8;
parentHeadLength += 8;
}
// set length of parent in buffer
this._bufferView.setUint32(parent.offset, parent.length);
const parentHeader = this._buffer.slice(parent.offset, parent.offset + parentHeadLength);
buffers.splice(offset, 0, parentHeader);
// we completed the last parent - exit
if (levelIndex < 0) break;
// add our current parents length to new parent and move childIndex of new parent one ahead
const newParent = levels[levelIndex].parent;
newParent.length += parent.length;
levels[levelIndex].childIndex++;
continue;
}
const child = parent.children[childIndex];
if (child.children) {
// move one level down
child.length = ATOM_HEAD_LENGTH;
levels.push({ parent: child, offset: bufferIndex, childIndex: 0 });
levelIndex++;
continue;
} else if (child.data) {
// add new data to buffer
const headerBuffer = this._getHeaderBufferFromAtom(child);
buffers.push(headerBuffer);
buffers.push(child.data);
} else {
// add entire child to buffer
const slice = this._buffer.slice(child.offset, child.offset + child.length);
buffers.push(slice);
}
bufferIndex++;
parent.length += child.length;
// move one child ahead
levels[levelIndex].childIndex++;
}
}
this._bufferView = null;
this._buffer = null;
this._atoms = [];
return concatArrayBuffers(buffers);
}
private _insertAtom(atom: Atom, path: string[]) {
if (!path) throw new Error("Path can not be empty");
const parentAtom = this._findAtom(this._atoms, path);
if (!parentAtom) throw new Error(`Parent atom at path '${path.join(" > ")}' could not be found`);
if (parentAtom.children === undefined) {
parentAtom.children = this._readChildAtoms(parentAtom);
}
let offset = parentAtom.offset + ATOM_HEAD_LENGTH;
if (parentAtom.name === "meta") {
offset += 4;
} else if (parentAtom.name === "stsd") {
offset += 8;
}
if (parentAtom.children.length > 0) {
const lastChild = parentAtom.children[parentAtom.children.length - 1];
offset = lastChild.offset + lastChild.length;
}
atom.offset = offset;
parentAtom.children.push(atom);
}
private _findAtom(atoms: Atom[], path: string[]): Atom | null {
if (!path || path.length < 1) throw new Error("Path can not be empty");
const curPath = [...path];
const curName = curPath.shift();
const curElem = atoms.find((i) => i.name === curName);
if (curPath.length < 1) return curElem;
if (!curElem) return null;
if (curElem.children === undefined) {
curElem.children = this._readChildAtoms(curElem);
}
if (curElem.children.length < 1) return null;
return this._findAtom(curElem.children, curPath);
}
private _readChildAtoms(atom: Atom): Atom[] {
const children: Atom[] = [];
const childEnd = atom.offset + atom.length;
let childOffset = atom.offset + ATOM_HEAD_LENGTH;
if (atom.name === "meta") {
childOffset += 4;
} else if (atom.name === "stsd") {
childOffset += 8;
}
while (true) {
if (childOffset >= childEnd) break;
const childAtom = this._readAtom(childOffset);
if (!childAtom || childAtom.length < 1) break;
childOffset = childAtom.offset + childAtom.length;
children.push(childAtom);
}
return children;
}
private _readAtom(offset: number): Atom |
private _getHeaderBufferFromAtom(atom: Atom) {
if (!atom || atom.length < 1 || !atom.name || !atom.data)
throw new Error("Can not compute header buffer for this atom");
const headerBuffer = new ArrayBuffer(ATOM_HEADER_LENGTH);
const headerBufferView = new DataView(headerBuffer);
// length at 0, length = 4
headerBufferView.setUint32(0, atom.length);
// name at 4, length = 4
const nameChars = this._getCharCodes(atom.name);
for (let i = 0; i < nameChars.length; i++) {
headerBufferView.setUint8(4 + i, nameChars[i]);
}
// data length at 8, length = 4
headerBufferView.setUint32(8, ATOM_DATA_HEAD_LENGTH + atom.data.byteLength);
// data name at 12, length = 4
const dataNameChars = this._getCharCodes("data");
for (let i = 0; i < dataNameChars.length; i++) {
headerBufferView.setUint8(12 + i, dataNameChars[i]);
}
// data flags at 16, length = 4
headerBufferView.setUint32(16, this._getFlags(atom.name));
return headerBuffer;
}
private _getBufferFromString(input: string): ArrayBuffer {
// return new TextEncoder().encode(input).buffer;
const buffer = new ArrayBuffer(input.length);
const bufferView = new DataView(buffer);
const chars = this._getCharCodes(input);
for (let i = 0; i < chars.length; i++) {
bufferView.setUint8(i, chars[i]);
}
return buffer;
}
private _getCharCodes(input: string) {
const chars: number[] = [];
for (let i = 0; i < input.length; i++) {
chars.push(input.charCodeAt(i));
}
return chars;
}
private _getFlags(name: string) {
switch (name) {
case "covr":
// 13 for jpeg, 14 for png
return 13;
case "trkn":
case "disk":
return 0;
case "tmpo":
case "cpil":
case "rtng":
return 21;
default:
return 1;
}
}
}
export class Mp4TagWriter implements TagWriter {
private _mp4: Mp4;
constructor(buffer: ArrayBuffer) {
this._mp4 = new Mp4(buffer);
this._mp4.parse();
}
setTitle(title: string): void {
if (!title) throw new Error("Invalid value for title");
this._mp4.addMetadataAtom("©nam", title);
}
setArtists(artists: string[]): void {
if (!artists || artists.length < 1) throw new Error("Invalid value for artists");
this._mp4.addMetadataAtom("©ART", artists.join(", "));
}
setAlbum(album: string): void {
if (!album) throw new Error("Invalid value for album");
this._mp4.addMetadataAtom("©alb", album);
}
setComment(comment: string): void {
if (!comment) throw new Error("Invalid value for comment");
this._mp4.addMetadataAtom("©cmt", comment);
}
setTrackNumber(trackNumber: number): void {
// max trackNumber is max of Uint8
if (trackNumber < 1 || trackNumber > 32767) throw new Error("Invalid value for trackNumber");
this._mp4.addMetadataAtom("trkn", trackNumber);
}
setYear(year: number): void {
if (year < 1) throw new Error("Invalud value for year");
this._mp4.addMetadataAtom("©day", year.toString());
}
setArtwork(artworkBuffer: ArrayBuffer): void {
if (!artworkBuffer || artworkBuffer.byteLength < 1) throw new Error("Invalid value for artworkBuffer");
this._mp4.addMetadataAtom("covr", artworkBuffer);
}
setDuration(duration: number): void {
if (duration < 1) throw new Error("Invalid value for duration");
this._mp4.setDuration(duration);
}
getBuffer(): Promise<ArrayBuffer> {
const buffer = this._mp4.getBuffer();
return Promise.resolve(buffer);
}
}
| {
const begin = offset;
const end = offset + ATOM_HEAD_LENGTH;
const buffer = this._buffer.slice(begin, end);
if (buffer.byteLength < ATOM_HEAD_LENGTH) {
return {
length: buffer.byteLength,
offset,
};
}
const dataView = new DataView(buffer);
let length = dataView.getUint32(0, false);
let name = "";
for (let i = 0; i < 4; i++) {
name += String.fromCharCode(dataView.getUint8(4 + i));
}
return {
name,
length,
offset,
};
} | identifier_body |
mp4TagWriter.ts | import { concatArrayBuffers } from "../utils/download";
import { TagWriter } from "./tagWriter";
interface Atom {
length: number;
name?: string;
offset?: number;
children?: Atom[];
data?: ArrayBuffer;
}
interface AtomLevel {
parent: Atom;
offset: number;
childIndex: number;
}
// length(4) + name(4)
const ATOM_HEAD_LENGTH = 8;
// data-length(4) + data-name(4) + data-flags(4)
const ATOM_DATA_HEAD_LENGTH = 16;
const ATOM_HEADER_LENGTH = ATOM_HEAD_LENGTH + ATOM_DATA_HEAD_LENGTH;
class Mp4 {
private readonly _metadataPath = ["moov", "udta", "meta", "ilst"];
private _buffer: ArrayBuffer | null;
private _bufferView: DataView | null;
private _atoms: Atom[] = [];
constructor(buffer: ArrayBuffer) {
this._buffer = buffer;
this._bufferView = new DataView(buffer);
}
parse() {
if (!this._buffer) throw new Error("Buffer can not be null");
if (this._atoms.length > 0) throw new Error("Buffer already parsed");
let offset = 0;
let atom: Atom;
while (true) {
atom = this._readAtom(offset);
if (!atom || atom.length < 1) break;
this._atoms.push(atom);
offset = atom.offset + atom.length;
}
if (this._atoms.length < 1) throw new Error("Buffer could not be parsed");
}
setDuration(duration: number) {
const mvhdAtom: Atom = this._findAtom(this._atoms, ["moov", "mvhd"]);
if (!mvhdAtom) throw new Error("'mvhd' atom could not be found");
// version(4) + created(4) + modified(4) + timescale(4)
const precedingDataLength = 16;
this._bufferView.setUint32(mvhdAtom.offset + ATOM_HEAD_LENGTH + precedingDataLength, duration);
}
addMetadataAtom(name: string, data: ArrayBuffer | string | number) {
if (name.length > 4 || name.length < 1) throw new Error(`Unsupported atom name: '${name}'`);
let dataBuffer: ArrayBuffer;
if (data instanceof ArrayBuffer) {
dataBuffer = data;
} else if (typeof data === "string") {
dataBuffer = this._getBufferFromString(data);
} else if (typeof data === "number") {
dataBuffer = new ArrayBuffer(4);
const dataView = new DataView(dataBuffer);
dataView.setUint32(0, data);
} else {
throw new Error(`Unsupported data: '${data}'`);
}
const atom: Atom = {
name,
length: ATOM_HEADER_LENGTH + dataBuffer.byteLength,
data: dataBuffer,
};
this._insertAtom(atom, this._metadataPath);
}
getBuffer() {
const buffers: ArrayBuffer[] = [];
let bufferIndex = 0;
// we don't change the offsets, since it would add needless complexity without benefit
for (const atom of this._atoms) {
if (!atom.children) {
// nothing has been added or removed
const slice = this._buffer.slice(atom.offset, atom.offset + atom.length);
buffers.push(slice);
bufferIndex++;
continue;
}
atom.length = ATOM_HEAD_LENGTH;
const levels: AtomLevel[] = [{ parent: atom, offset: bufferIndex, childIndex: 0 }];
let levelIndex = 0;
while (true) {
const { parent, offset, childIndex } = levels[levelIndex];
if (childIndex >= parent.children.length) {
// move one level up
levelIndex--;
levels.pop();
let parentHeadLength = ATOM_HEAD_LENGTH;
if (parent.name === "meta") {
parent.length += 4;
parentHeadLength += 4;
} else if (parent.name === "stsd") {
parent.length += 8;
parentHeadLength += 8;
}
// set length of parent in buffer
this._bufferView.setUint32(parent.offset, parent.length);
const parentHeader = this._buffer.slice(parent.offset, parent.offset + parentHeadLength);
buffers.splice(offset, 0, parentHeader);
// we completed the last parent - exit
if (levelIndex < 0) break;
// add our current parents length to new parent and move childIndex of new parent one ahead
const newParent = levels[levelIndex].parent;
newParent.length += parent.length;
levels[levelIndex].childIndex++;
continue;
}
const child = parent.children[childIndex];
if (child.children) {
// move one level down
child.length = ATOM_HEAD_LENGTH;
levels.push({ parent: child, offset: bufferIndex, childIndex: 0 });
levelIndex++;
continue;
} else if (child.data) {
// add new data to buffer
const headerBuffer = this._getHeaderBufferFromAtom(child);
buffers.push(headerBuffer);
buffers.push(child.data);
} else {
// add entire child to buffer
const slice = this._buffer.slice(child.offset, child.offset + child.length);
buffers.push(slice);
}
bufferIndex++;
parent.length += child.length;
// move one child ahead
levels[levelIndex].childIndex++;
}
}
this._bufferView = null;
this._buffer = null;
this._atoms = [];
return concatArrayBuffers(buffers);
}
private _insertAtom(atom: Atom, path: string[]) {
if (!path) throw new Error("Path can not be empty");
const parentAtom = this._findAtom(this._atoms, path);
if (!parentAtom) throw new Error(`Parent atom at path '${path.join(" > ")}' could not be found`);
if (parentAtom.children === undefined) {
parentAtom.children = this._readChildAtoms(parentAtom);
}
let offset = parentAtom.offset + ATOM_HEAD_LENGTH;
if (parentAtom.name === "meta") {
offset += 4;
} else if (parentAtom.name === "stsd") {
offset += 8;
}
if (parentAtom.children.length > 0) {
const lastChild = parentAtom.children[parentAtom.children.length - 1];
offset = lastChild.offset + lastChild.length;
}
atom.offset = offset;
parentAtom.children.push(atom);
}
private _findAtom(atoms: Atom[], path: string[]): Atom | null {
if (!path || path.length < 1) throw new Error("Path can not be empty");
const curPath = [...path];
const curName = curPath.shift();
const curElem = atoms.find((i) => i.name === curName);
if (curPath.length < 1) return curElem;
if (!curElem) return null;
if (curElem.children === undefined) {
curElem.children = this._readChildAtoms(curElem);
}
if (curElem.children.length < 1) return null;
return this._findAtom(curElem.children, curPath);
}
private _readChildAtoms(atom: Atom): Atom[] {
const children: Atom[] = [];
const childEnd = atom.offset + atom.length;
let childOffset = atom.offset + ATOM_HEAD_LENGTH;
if (atom.name === "meta") {
childOffset += 4;
} else if (atom.name === "stsd") {
childOffset += 8;
}
while (true) {
if (childOffset >= childEnd) break;
const childAtom = this._readAtom(childOffset);
if (!childAtom || childAtom.length < 1) break;
childOffset = childAtom.offset + childAtom.length;
children.push(childAtom);
}
return children;
}
private _readAtom(offset: number): Atom {
const begin = offset;
const end = offset + ATOM_HEAD_LENGTH;
const buffer = this._buffer.slice(begin, end);
if (buffer.byteLength < ATOM_HEAD_LENGTH) {
return {
length: buffer.byteLength,
offset,
};
}
const dataView = new DataView(buffer);
let length = dataView.getUint32(0, false);
let name = "";
for (let i = 0; i < 4; i++) {
name += String.fromCharCode(dataView.getUint8(4 + i));
}
return {
name,
length,
offset,
};
}
private _getHeaderBufferFromAtom(atom: Atom) {
if (!atom || atom.length < 1 || !atom.name || !atom.data)
throw new Error("Can not compute header buffer for this atom");
const headerBuffer = new ArrayBuffer(ATOM_HEADER_LENGTH);
const headerBufferView = new DataView(headerBuffer);
// length at 0, length = 4
headerBufferView.setUint32(0, atom.length);
// name at 4, length = 4
const nameChars = this._getCharCodes(atom.name);
for (let i = 0; i < nameChars.length; i++) {
headerBufferView.setUint8(4 + i, nameChars[i]);
}
// data length at 8, length = 4
headerBufferView.setUint32(8, ATOM_DATA_HEAD_LENGTH + atom.data.byteLength);
// data name at 12, length = 4
const dataNameChars = this._getCharCodes("data");
for (let i = 0; i < dataNameChars.length; i++) {
headerBufferView.setUint8(12 + i, dataNameChars[i]);
}
// data flags at 16, length = 4
headerBufferView.setUint32(16, this._getFlags(atom.name));
return headerBuffer;
}
private _getBufferFromString(input: string): ArrayBuffer {
// return new TextEncoder().encode(input).buffer;
const buffer = new ArrayBuffer(input.length);
const bufferView = new DataView(buffer);
const chars = this._getCharCodes(input);
for (let i = 0; i < chars.length; i++) {
bufferView.setUint8(i, chars[i]);
}
return buffer;
}
private _getCharCodes(input: string) {
const chars: number[] = [];
for (let i = 0; i < input.length; i++) {
chars.push(input.charCodeAt(i));
}
return chars;
}
private _getFlags(name: string) {
switch (name) {
case "covr":
// 13 for jpeg, 14 for png
return 13;
case "trkn":
case "disk":
return 0;
case "tmpo":
case "cpil":
case "rtng":
return 21;
default:
return 1;
}
}
}
export class Mp4TagWriter implements TagWriter {
private _mp4: Mp4;
constructor(buffer: ArrayBuffer) {
this._mp4 = new Mp4(buffer);
this._mp4.parse();
}
setTitle(title: string): void {
if (!title) throw new Error("Invalid value for title");
|
this._mp4.addMetadataAtom("©ART", artists.join(", "));
}
setAlbum(album: string): void {
if (!album) throw new Error("Invalid value for album");
this._mp4.addMetadataAtom("©alb", album);
}
setComment(comment: string): void {
if (!comment) throw new Error("Invalid value for comment");
this._mp4.addMetadataAtom("©cmt", comment);
}
setTrackNumber(trackNumber: number): void {
// max trackNumber is max of Uint8
if (trackNumber < 1 || trackNumber > 32767) throw new Error("Invalid value for trackNumber");
this._mp4.addMetadataAtom("trkn", trackNumber);
}
setYear(year: number): void {
if (year < 1) throw new Error("Invalud value for year");
this._mp4.addMetadataAtom("©day", year.toString());
}
setArtwork(artworkBuffer: ArrayBuffer): void {
if (!artworkBuffer || artworkBuffer.byteLength < 1) throw new Error("Invalid value for artworkBuffer");
this._mp4.addMetadataAtom("covr", artworkBuffer);
}
setDuration(duration: number): void {
if (duration < 1) throw new Error("Invalid value for duration");
this._mp4.setDuration(duration);
}
getBuffer(): Promise<ArrayBuffer> {
const buffer = this._mp4.getBuffer();
return Promise.resolve(buffer);
}
} | this._mp4.addMetadataAtom("©nam", title);
}
setArtists(artists: string[]): void {
if (!artists || artists.length < 1) throw new Error("Invalid value for artists"); | random_line_split |
mp4TagWriter.ts | import { concatArrayBuffers } from "../utils/download";
import { TagWriter } from "./tagWriter";
interface Atom {
length: number;
name?: string;
offset?: number;
children?: Atom[];
data?: ArrayBuffer;
}
interface AtomLevel {
parent: Atom;
offset: number;
childIndex: number;
}
// length(4) + name(4)
const ATOM_HEAD_LENGTH = 8;
// data-length(4) + data-name(4) + data-flags(4)
const ATOM_DATA_HEAD_LENGTH = 16;
const ATOM_HEADER_LENGTH = ATOM_HEAD_LENGTH + ATOM_DATA_HEAD_LENGTH;
class Mp4 {
private readonly _metadataPath = ["moov", "udta", "meta", "ilst"];
private _buffer: ArrayBuffer | null;
private _bufferView: DataView | null;
private _atoms: Atom[] = [];
constructor(buffer: ArrayBuffer) {
this._buffer = buffer;
this._bufferView = new DataView(buffer);
}
parse() {
if (!this._buffer) throw new Error("Buffer can not be null");
if (this._atoms.length > 0) throw new Error("Buffer already parsed");
let offset = 0;
let atom: Atom;
while (true) {
atom = this._readAtom(offset);
if (!atom || atom.length < 1) break;
this._atoms.push(atom);
offset = atom.offset + atom.length;
}
if (this._atoms.length < 1) throw new Error("Buffer could not be parsed");
}
setDuration(duration: number) {
const mvhdAtom: Atom = this._findAtom(this._atoms, ["moov", "mvhd"]);
if (!mvhdAtom) throw new Error("'mvhd' atom could not be found");
// version(4) + created(4) + modified(4) + timescale(4)
const precedingDataLength = 16;
this._bufferView.setUint32(mvhdAtom.offset + ATOM_HEAD_LENGTH + precedingDataLength, duration);
}
addMetadataAtom(name: string, data: ArrayBuffer | string | number) {
if (name.length > 4 || name.length < 1) throw new Error(`Unsupported atom name: '${name}'`);
let dataBuffer: ArrayBuffer;
if (data instanceof ArrayBuffer) {
dataBuffer = data;
} else if (typeof data === "string") {
dataBuffer = this._getBufferFromString(data);
} else if (typeof data === "number") {
dataBuffer = new ArrayBuffer(4);
const dataView = new DataView(dataBuffer);
dataView.setUint32(0, data);
} else {
throw new Error(`Unsupported data: '${data}'`);
}
const atom: Atom = {
name,
length: ATOM_HEADER_LENGTH + dataBuffer.byteLength,
data: dataBuffer,
};
this._insertAtom(atom, this._metadataPath);
}
getBuffer() {
const buffers: ArrayBuffer[] = [];
let bufferIndex = 0;
// we don't change the offsets, since it would add needless complexity without benefit
for (const atom of this._atoms) {
if (!atom.children) {
// nothing has been added or removed
const slice = this._buffer.slice(atom.offset, atom.offset + atom.length);
buffers.push(slice);
bufferIndex++;
continue;
}
atom.length = ATOM_HEAD_LENGTH;
const levels: AtomLevel[] = [{ parent: atom, offset: bufferIndex, childIndex: 0 }];
let levelIndex = 0;
while (true) {
const { parent, offset, childIndex } = levels[levelIndex];
if (childIndex >= parent.children.length) {
// move one level up
levelIndex--;
levels.pop();
let parentHeadLength = ATOM_HEAD_LENGTH;
if (parent.name === "meta") {
parent.length += 4;
parentHeadLength += 4;
} else if (parent.name === "stsd") {
parent.length += 8;
parentHeadLength += 8;
}
// set length of parent in buffer
this._bufferView.setUint32(parent.offset, parent.length);
const parentHeader = this._buffer.slice(parent.offset, parent.offset + parentHeadLength);
buffers.splice(offset, 0, parentHeader);
// we completed the last parent - exit
if (levelIndex < 0) break;
// add our current parents length to new parent and move childIndex of new parent one ahead
const newParent = levels[levelIndex].parent;
newParent.length += parent.length;
levels[levelIndex].childIndex++;
continue;
}
const child = parent.children[childIndex];
if (child.children) {
// move one level down
child.length = ATOM_HEAD_LENGTH;
levels.push({ parent: child, offset: bufferIndex, childIndex: 0 });
levelIndex++;
continue;
} else if (child.data) {
// add new data to buffer
const headerBuffer = this._getHeaderBufferFromAtom(child);
buffers.push(headerBuffer);
buffers.push(child.data);
} else {
// add entire child to buffer
const slice = this._buffer.slice(child.offset, child.offset + child.length);
buffers.push(slice);
}
bufferIndex++;
parent.length += child.length;
// move one child ahead
levels[levelIndex].childIndex++;
}
}
this._bufferView = null;
this._buffer = null;
this._atoms = [];
return concatArrayBuffers(buffers);
}
private _insertAtom(atom: Atom, path: string[]) {
if (!path) throw new Error("Path can not be empty");
const parentAtom = this._findAtom(this._atoms, path);
if (!parentAtom) throw new Error(`Parent atom at path '${path.join(" > ")}' could not be found`);
if (parentAtom.children === undefined) {
parentAtom.children = this._readChildAtoms(parentAtom);
}
let offset = parentAtom.offset + ATOM_HEAD_LENGTH;
if (parentAtom.name === "meta") {
offset += 4;
} else if (parentAtom.name === "stsd") {
offset += 8;
}
if (parentAtom.children.length > 0) {
const lastChild = parentAtom.children[parentAtom.children.length - 1];
offset = lastChild.offset + lastChild.length;
}
atom.offset = offset;
parentAtom.children.push(atom);
}
private _findAtom(atoms: Atom[], path: string[]): Atom | null {
if (!path || path.length < 1) throw new Error("Path can not be empty");
const curPath = [...path];
const curName = curPath.shift();
const curElem = atoms.find((i) => i.name === curName);
if (curPath.length < 1) return curElem;
if (!curElem) return null;
if (curElem.children === undefined) {
curElem.children = this._readChildAtoms(curElem);
}
if (curElem.children.length < 1) return null;
return this._findAtom(curElem.children, curPath);
}
private _readChildAtoms(atom: Atom): Atom[] {
const children: Atom[] = [];
const childEnd = atom.offset + atom.length;
let childOffset = atom.offset + ATOM_HEAD_LENGTH;
if (atom.name === "meta") {
childOffset += 4;
} else if (atom.name === "stsd") {
childOffset += 8;
}
while (true) |
return children;
}
private _readAtom(offset: number): Atom {
const begin = offset;
const end = offset + ATOM_HEAD_LENGTH;
const buffer = this._buffer.slice(begin, end);
if (buffer.byteLength < ATOM_HEAD_LENGTH) {
return {
length: buffer.byteLength,
offset,
};
}
const dataView = new DataView(buffer);
let length = dataView.getUint32(0, false);
let name = "";
for (let i = 0; i < 4; i++) {
name += String.fromCharCode(dataView.getUint8(4 + i));
}
return {
name,
length,
offset,
};
}
private _getHeaderBufferFromAtom(atom: Atom) {
if (!atom || atom.length < 1 || !atom.name || !atom.data)
throw new Error("Can not compute header buffer for this atom");
const headerBuffer = new ArrayBuffer(ATOM_HEADER_LENGTH);
const headerBufferView = new DataView(headerBuffer);
// length at 0, length = 4
headerBufferView.setUint32(0, atom.length);
// name at 4, length = 4
const nameChars = this._getCharCodes(atom.name);
for (let i = 0; i < nameChars.length; i++) {
headerBufferView.setUint8(4 + i, nameChars[i]);
}
// data length at 8, length = 4
headerBufferView.setUint32(8, ATOM_DATA_HEAD_LENGTH + atom.data.byteLength);
// data name at 12, length = 4
const dataNameChars = this._getCharCodes("data");
for (let i = 0; i < dataNameChars.length; i++) {
headerBufferView.setUint8(12 + i, dataNameChars[i]);
}
// data flags at 16, length = 4
headerBufferView.setUint32(16, this._getFlags(atom.name));
return headerBuffer;
}
private _getBufferFromString(input: string): ArrayBuffer {
// return new TextEncoder().encode(input).buffer;
const buffer = new ArrayBuffer(input.length);
const bufferView = new DataView(buffer);
const chars = this._getCharCodes(input);
for (let i = 0; i < chars.length; i++) {
bufferView.setUint8(i, chars[i]);
}
return buffer;
}
private _getCharCodes(input: string) {
const chars: number[] = [];
for (let i = 0; i < input.length; i++) {
chars.push(input.charCodeAt(i));
}
return chars;
}
private _getFlags(name: string) {
switch (name) {
case "covr":
// 13 for jpeg, 14 for png
return 13;
case "trkn":
case "disk":
return 0;
case "tmpo":
case "cpil":
case "rtng":
return 21;
default:
return 1;
}
}
}
export class Mp4TagWriter implements TagWriter {
private _mp4: Mp4;
constructor(buffer: ArrayBuffer) {
this._mp4 = new Mp4(buffer);
this._mp4.parse();
}
setTitle(title: string): void {
if (!title) throw new Error("Invalid value for title");
this._mp4.addMetadataAtom("©nam", title);
}
setArtists(artists: string[]): void {
if (!artists || artists.length < 1) throw new Error("Invalid value for artists");
this._mp4.addMetadataAtom("©ART", artists.join(", "));
}
setAlbum(album: string): void {
if (!album) throw new Error("Invalid value for album");
this._mp4.addMetadataAtom("©alb", album);
}
setComment(comment: string): void {
if (!comment) throw new Error("Invalid value for comment");
this._mp4.addMetadataAtom("©cmt", comment);
}
setTrackNumber(trackNumber: number): void {
// max trackNumber is max of Uint8
if (trackNumber < 1 || trackNumber > 32767) throw new Error("Invalid value for trackNumber");
this._mp4.addMetadataAtom("trkn", trackNumber);
}
setYear(year: number): void {
if (year < 1) throw new Error("Invalud value for year");
this._mp4.addMetadataAtom("©day", year.toString());
}
setArtwork(artworkBuffer: ArrayBuffer): void {
if (!artworkBuffer || artworkBuffer.byteLength < 1) throw new Error("Invalid value for artworkBuffer");
this._mp4.addMetadataAtom("covr", artworkBuffer);
}
setDuration(duration: number): void {
if (duration < 1) throw new Error("Invalid value for duration");
this._mp4.setDuration(duration);
}
getBuffer(): Promise<ArrayBuffer> {
const buffer = this._mp4.getBuffer();
return Promise.resolve(buffer);
}
}
| {
if (childOffset >= childEnd) break;
const childAtom = this._readAtom(childOffset);
if (!childAtom || childAtom.length < 1) break;
childOffset = childAtom.offset + childAtom.length;
children.push(childAtom);
} | conditional_block |
types.d.ts | /*-
* Copyright (c) 2018, 2023 Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl/
*/
/**
* Defines types used for NoSQL driver configuration for Oracle Cloud
* Infrastructure Identity and Access Management (IAM).
*/
import type { Config } from "../../config";
import type { AuthConfig } from "../config";
/**
* This type encapsulates credentials required for generating OCI request
* signature. It is used as a return type for {@link loadIAMCredentials} in
* {@link IAMCredentialsProvider}. The properties of this type are the same
* as in {@link IAMConfig} when credentials are provided directly. See
* {@link IAMConfig} for more information.
* <p>
* When returning this object, you have choices to return <em>privateKey</em>
* or <em>privateKeyFile</em> and to return <em>Buffer</em> or <em>string</em>
* for fields indicated as such.
*
* @see {@link IAMConfig}
* @see {@link loadIAMCredentials}
* @see {@link IAMCredentialsProvider}
*/
export interface IAMCredentials {
/**
* Tenancy OCID.
*/
tenantId: string;
/**
* User OCID.
*/
userId: string;
/**
* Public key fingerprint.
*/
fingerprint: string;
/**
* PEM-encoded private key data. If specified as {@link !Buffer | Buffer},
* you may clear the buffer contents afer {@link NoSQLClient} instance is
* created for added security. Note that only one of {@link privateKey}
* or {@link privateKeyFile} properties may be specified.
*/
privateKey?: string|Buffer;
/**
* Path to PEM private key file. Path may be absolute or relative to
* current directory. May be <em>string</em> or UTF-8 encoded
* {@link !Buffer | Buffer}. Note that only one of {@link privateKey} or
* {@link privateKeyFile} properties may be specified.
*/
privateKeyFile?: string|Buffer;
/**
* Passphrase for the private key if it is encrypted. If specified as
* {@link !Buffer | Buffer}, you may clear the buffer contents after
* {@link NoSQLClient} instance is created for added security.
*/
passphrase?: Buffer|string;
}
/**
* {@link IAMConfig} is required to authorize operations using Oracle Cloud
* Infrastructure Identity and Access Management (IAM). It should be set as
* {@link AuthConfig#iam}.
* <p>
* See {@link https://docs.cloud.oracle.com/iaas/Content/Identity/Concepts/overview.htm | Overview of Oracle Cloud Infrastructure Identity and Access Management}
* for information on IAM components and how they work together to provide
* security for Oracle Cloud services.
* <p>
* All operations require a request signature that is used by the system to
* authorize the operation. The request signature may be created in one of
* the following ways:
* <ol>
* <li>Using specific user's indentity. See information below on what
* credentials are required and how to obtain them, as well as the ways these
* credentials may be provided to the driver via {@link IAMConfig}.</li>
* <li>Using Instance Principal. You may use Instance Principal when
* calling Oracle NoSQL Database Cloud Service from a compute instance in the
* Oracle Cloud Infrastructure (OCI). See
* {@link https://docs.cloud.oracle.com/en-us/iaas/Content/Identity/Tasks/callingservicesfrominstances.htm | Calling Services from an Instance}
* for more information. Use Instance Principal by setting
* {@link IAMConfig#useInstancePrincipal} property to true.</li>
* <li>Using Resource Principal. You may use Resource Principal when calling
* Oracle NoSQL Database Cloud Service from other Oracle Cloud service
* resource such as
* {@link https://docs.cloud.oracle.com/en-us/iaas/Content/Functions/Concepts/functionsoverview.htm | Functions}.
* See
* {@link https://docs.cloud.oracle.com/en-us/iaas/Content/Functions/Tasks/functionsaccessingociresources.htm | Accessing Other Oracle Cloud Infrastructure Resources from Running Functions}
* for more information. Use Resouce Principal by setting
* {@link IAMConfig#useResourcePrincipal} property to true.</li>
* </ol>
* <p>
* Note that when using Instance Principal or Resource Principal you must
* specify compartiment id (OCID), either as
* {@link Config#compartment} property of the initial configuration or as
* <em>opt.compartment</em> for each {@link NoSQLClient} method call. Note
* that you must use compartment id (OCID) and not compartment name. This
* also means that you may not prefix table name with compartment name when
* calling methods of {@link NoSQLClient}. These restrictions do not apply
* when using specific user identity, which is best when naming flexibility is
* desired.
* <p>
* To use specific user's identity, you must provide the following credentials:
* <ul>
* <li>Tenancy OCID. This is Oracle Cloud ID (OCID) for your tenancy. See
* {@link https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm | Resource Identifiers}
* for information on OCIDs.</li>
* <li>User's OCID. This is Oracle Cloud ID (OCID) for the user in your
* tenancy. See
* {@link https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm | Resource Identifiers}
* for information on OCIDs.</li>
* <li>API Signing Key. This is public-private key pair used to sign the API
* requests, see {@link https://docs.cloud.oracle.com/iaas/Content/API/Concepts/apisigningkey.htm | Required Keys and OCIDs}.
* In particular, private key is needed to generate the request signature.</li>
* <li>Public Key Fingerprint. This is an identifier of the public key of the
* API Signing Key pair.</li>
* <li>Passphrase for the private key of API Signing Key pair if the private
* key is encrypted.</li>
* </ul>
* <p>
* See {@link https://docs.cloud.oracle.com/iaas/Content/API/Concepts/apisigningkey.htm | Required Keys and OCIDs}
* for detailed description of the above credentials and the steps you need to
* perform to enable signing of API requests, which are:
* <ul>
* <li>Generate the key pair described above.</li>
* <li>Upload public key.</li>
* <li>Obtain tenancy and user OCIDs and public key fingerprint.</li>
* </ul> | * <p>
* You may provide these credentials in one of the following ways, in order of
* increased security:
* <ul>
* <li>Directly as properties of {@link IAMConfig}.
* In this case, set properties {@link tenantId}, {@link userId},
* {@link privateKey} or {@link privateKeyFile}, {@link fingerprint} and
* {@link passphrase} (if private key is encrypted)</li>
* <li>As part of an OCI configuration file. See
* {@link https://docs.cloud.oracle.com/iaas/Content/API/Concepts/sdkconfig.htm | SDK and CLI Configuration File}
* for information on OCI configuration file and what entries are used for the
* required credentials. In this case, you may set properties
* {@link configFile} and/or {@link profileName}. If not set,
* appropriate default values will be used, see property descriptions.</li>
* <li>Specify your own credentials provider in the form of
* {@link IAMCredentialsProvider} that implements {@link loadIAMCredentials}
* function. This allows you to store and retrieve credentials in a secure
* manner. In this case, specify {@link credentialsProvider} property.</li>
* </ul>
* Note that the private key must be in PEM format. You may provide a path
* to the PEM key file. Alternatively, except when using OCI configuration
* file, you may provide PEM encoded private key directly as <em>Buffer</em>
* or <em>string</em>. Note that the {@link passphrase} must be provided if
* the private key is encrypted.
* <p>
* <p>
* The driver will determine the method of authorization as follows:
* <ol>
* <li>If {@link IAMConfig#useResourcePrincipal} is set to <em>true</em>, then
* Resource Principal authentication will be used. No other properties listed
* below are allowed for Resource Prinicpal authorization.</li>
* <li>If {@link IAMConfig#useInstancePrincipal} is set to <em>true</em>, then
* Instance Principal authentication will be used. You may also set
* {@link IAMConfig#federationEndpoint}, although it is not requred and in
* most cases federation endpoint will be auto-detected. No other properties
* listed below are allowed for Instance Principal authorization.</li>
* <li>If {@link useSessionToken} is set to <em>true</em>, then session
* token-based authentication will be used. Note that this method uses OCI
* config file, so settings to properties {@link configFile} and
* {@link profileName} also apply. See {@link useSessionToken} for more
* information.
* <li>If {@link IAMConfig} has any of user identity properties such as
* {@link tenantId}, {@link userId}, {@link privateKey}, {@link fingerprint}
* or {@link passphrase}, the driver assumes that you are using a specific
* user's identity and that the credentials are provided directly in
* {@link IAMConfig}. All required user's credentials, as described above,
* must be present as properties of {@link IAMConfig}, otherwise
* {@link NoSQLArgumentError} will result.</li>
* <li>If {@link IAMConfig} has {@link credentialsProvider} property, the
* driver assumes that you are using a specific user's identity and the
* credentials are obtained through the credentials provider which must be in
* the form of {@link IAMCredentialsProvider}. In this case the credentials
* must not be set directly in {@link IAMConfig}.</li>
* <li>If none of the above, the driver assumes that you are using a specific
* user's identity and the credentials are stored in OCI config
* file and will use {@link configFile} and {@link profileName} properties
* if present, otherwise it will assume their default values. In particular,
* if you specify {@link Config#serviceType} as {@link ServiceType.CLOUD}
* and omit {@link Config#auth} alltogether, the driver will use IAM
* authorization with default OCI config file and default profile name.</li>
* </ol>
* <p>
* Note that if using an OCI configuration file, you may also specify region
* identifier in the same profile as your credentials. In this case, you need
* not specify either region or endpoint in {@link Config}. In particular,
* if you use the default OCI config file (<em>~/.oci/config</em>) and default
* profile name (<em>DEFAULT</em>) and do not need to customize any other
* configuration properties, you may create {@link NoSQLClient} instance
* without providing configuration to {@link NoSQLClient} constructor.
* See {@link NoSQLClient} for more information.
* <p>
* If using Resource Principal, you also need not specify either region or
* endpoint in {@link Config}, as Resource Principal's region will be used.
* In fact, when running in Functions service, you may only access NoSQL
* service in the same region as the running function, so when using Resource
* Principal, it is preferable not to specify either region or endpoint in
* {@link Config}.
* <p>
* Generated authorization signature is valid for a period of time and is
* cached for effeciency. The caching behavior may be customized with
* properties {@link IAMConfig#durationSeconds} and
* {@link IAMConfig#refreshAheadMs}. See their property descriptions for
* details.
*
* @see {@link AuthConfig}
* @see {@link IAMCredentials}
* @see {@link IAMCredentialsProvider}
* @see {@page connect-cloud.md}
*
* @example
* JSON {@link Config} object supplying user's credentials directly
* (sensitiveinfo not shown).
* ```json
* {
* "region": "us-phoenix-1",
* "auth": {
* "iam": {
* "tenantId": "ocid1.tenancy.oc...................",
* "userId": "ocid1.user.oc.....................",
* "fingerprint": "aa:aa:aa:aa:.....",
* "privateKeyFile": "~/myapp/security/oci_api_key.pem",
* "passphrase": "..............."
* }
* }
* }
* ```
*
* @example
* JSON {@link Config} object supplying user's credentials through OCI
* configuration file.
* ```json
* {
* "region": "us-phoenix-1",
* "auth": {
* "iam": {
* "configFile": "~/myapp/.oci/config",
* "profileName": "John"
* }
* }
* }
* ```
*
* @example
* Javascript {@link Config} object supplying user's credentials via custom
* credentials provider.
* ```js
* {
* region: "us-phoenix-1",
* auth: {
* iam: {
* credentialsProvider: async () => {
* .......... //retrieve credentials somehow
* ..........
* return {
* tenantId: myTenantId,
* userId: myUserId,
* fingerprint: myFingerprint,
* privateKey: myPrivateKey,
* passphrase: myPassphrase
* };
* }
* }
* }
* }
* ```
*
* @example
* JSON {@link Config} object using Instance Principal.
* ```json
* {
* "region": "us-phoenix-1",
* "compartment": "ocid1.compartment.oc1.............................",
* "auth": {
* "iam": {
* "useInstancePrincipal": "true"
* }
* }
* }
* ```
*
* @example
* JSON {@link Config} object using Resource Principal.
* ```json
* {
* "compartment": "ocid1.compartment.oc1.............................",
* "auth": {
* "iam": {
* "useResourcePrincipal": "true"
* }
* }
* }
* ```
*/
export interface IAMConfig extends Partial<IAMCredentials> {
/**
* If set to true, Instance Principal authorization will be used. May not
* be combined with {@link useResourcePrincipal} or any properties used
* for specific user's identity.
*/
useInstancePrincipal?: boolean;
/**
* When using Instance Principal, specifies endpoint to use to communicate
* with authorization server. Usually this does not need to be specified
* as the driver will detect the federation endpoint automatically.
* Specify this if you need to override the default federation endpoint.
* The endpoint must be in the form
* <em>https://auth.\{region-identifier\}.\{second-level-domain\}</em>,
* e.g. <em>https://auth.ap-hyderabad-1.oraclecloud.com</em>.
*/
federationEndpoint?: string|URL;
/**
* Used only with instance principal (see <em>useInstancePrincipal</em>).
* The delegation token allows the instance to assume the privileges of
* the user for which the token was created and act on behalf of that
* user. Use this property to specify the value of the delegation token
* directly. Otherwise, to use a provider interface or obtain a token
* from a file, use {@link delegationTokenProvider} property. This
* property is exclusive with {@link delegationTokenProvider}.
*/
delegationToken?: string;
/**
* Used only with instance principal (see <em>useInstancePrincipal</em>).
* The delegation token allows the instance to assume the privileges of
* the user for which the token was created and act on behalf of that
* user. Use this property to specify how delegation token is to be
* obtained. If set as a string, it will be interpreted as file path
* (absolute or relative) to load the delegation token from. Otherwise,
* specify {@link DelegationTokenProvider} as a custom provider used to
* load the delegation token. In either case, the delegation token will
* be reloaded each time the authorization signature is refreshed. This
* property is exclusive with {@link delegationToken}.
*/
delegationTokenProvider?: string|DelegationTokenProvider;
/**
* If set to true, Resource Principal authorization will be used. May not
* be combined with {@link useInstancePrincipal} or any properties used
* for specific user's identity.
*/
useResourcePrincipal?: boolean;
/**
* If set to true,
* {@link https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm#sdk_authentication_methods_session_token | Session Token-Based Authentication}
* will be used. This method uses temporary session token read from a
* token file. The path of the token file is read from a profile in OCI
* configuration file as the value of field <em>security_token_file</em>.
* See
* {@link https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm | SDK Configuration File} for details of the file's contents and format. In
* addition, see the description of {@link IAMConfig} above.
* <p>
* Because this method uses OCI Configuration File, you may use the
* properties {@link configFile} and {@link profileName} to specify the
* path to the configuration file and the profile name within the
* configuration file. The same defaults apply.
* <p>
* For session token-based authentication, the properties required in the
* OCI config file by the driver are <em>tenancy</em> for tenant OCID,
* <em>security_token_file</em> for security token file and
* <em>key_file</em> for private key file.
* You may also specify <em>pass_phrase</em> property for private key
* passphrase as well as <em>region</em> property (instead of specifying
* {@link Config#region} property in the {@link Config} as previously
* described).
* <p>
* You can use the OCI CLI to authenticate and create a token, see
* {@link https://docs.oracle.com/en-us/iaas/Content/API/SDKDocs/clitoken.htm" | Token-based Authentication for the CLI}.
* <p>
*/
useSessionToken?: boolean;
/**
* OCI configuration file path. May be absolute or relative to current
* directory. May be <em>string</em> or UTF-8 encoded
* {@link !Buffer | Buffer}.
* @defaultValue Path "\~/.oci/config", where "\~" represents user's home
* directory on Unix systems and %USERPROFILE% directory on Windows
* (see USERPROFILE environment variable).
*/
configFile?: string|Buffer;
/**
* Profile name within the OCI configuration file, used only if
* credentials are obtained from the configuration file as described.
* @defaultValue If not set, the name "DEFAULT" is used.
*/
profileName?: string;
/**
* Custom credentials provider to use to obtain credentials in the form of
* {@link IAMCredentials}. You may also specify string for a module name
* or path that exports {@link IAMCredentialsProvider}.
*/
credentialsProvider?: IAMCredentialsProvider|string;
/**
* Cache duration of the signature in seconds. Specifies how long cached
* signature may be used before new one has to be created. Maximum allowed
* duration is 5 minutes (300 seconds), which is also the default.
* @defaultValue 300 (5 minutes)
*/
durationSeconds?: number;
/**
* Tells the driver when to automatically refresh the signature before its
* expiration in the cache, measured in number of milliseconds before
* expiration. E.g. value 10000 means that the driver will attempt to
* refresh the signature 10 seconds before its expiration. Using refresh
* allows to avoid slowing down of database operations by creating the
* signature asynchronously. You can set this property to <em>null</em>
* to disable automatic refresh.
* @defaultValue 10000 (10 seconds)
*/
refreshAheadMs?: number|null;
/**
* Timeout in milliseconds used for requests to the authorization server.
* Currently this is only used with Instance Principal.
* @defaultValue 120000 (2 minutes)
*/
timeout?: number;
}
/**
* Interface to asynchronously load credentials required for generating OCI
* request signature. Used in {@link IAMCredentialsProvider}.
* @see {@link IAMCredentialsProvider}
* @see {@link IAMCredentials}
* @async
* @returns {Promise} Promise resolved with {@link IAMCredentials} or
* rejected with an error. Properties of type {@link !Buffer | Buffer} in
* such as {@link IAMCredentials#privateKey} or
* {@link IAMCredentials#passphrase} will be erased once the signature is
* generated.
*/
export type loadIAMCredentials = () => Promise<IAMCredentials>;
/**
* You may implement {@link IAMCredentialsProvider} interface to securely
* obtain credentials required for generation of an OCI request signature, as
* described in {@link IAMConfig}. {@link IAMCredentialsProvider} is
* set as {@link IAMConfig#credentialsProvider} property and may be specified
* either as {@link loadIAMCredentials} function or as an object
* implementing <em>loadCredentials</em> function.
*
* @see {@link loadIAMCredentials}
* @see {@link IAMCredentials}
* @see {@link IAMConfig}
* @see {@page connect-cloud.md}
*/
export type IAMCredentialsProvider = loadIAMCredentials |
{ loadCredentials: loadIAMCredentials };
/**
* Interface to load delegation token, as used in
* {@link DelegationTokenProvider}.
* @see {@link DelegationTokenProvider}
* @async
* @returns {Promise} Promise resolved with a <em>string</em> delegation token
* or rejected with an error
*/
export type loadDelegationToken = () => Promise<string>;
/**
* You may implement {@link DelegationTokenProvider} interface to securely
* obtain delegation token when using instance principal.
* {@link DelegationTokenProvider} may be set as
* {@link IAMConfig#delegationToken} and may be specified as
* either a {@link loadDelegationToken} function or as an object implelenting
* {@link loadDelegationToken} function.
*
* @see {@link loadDelegationToken}
* @see {@link IAMConfig#delegationToken}
*/
export type DelegationTokenProvider = loadDelegationToken |
{ loadDelegationToken: loadDelegationToken }; | random_line_split |
|
mqttclient.go | package kvm
// Connect to the broker, subscribe, and write messages received to a file
import (
"encoding/json"
"fmt"
"os"
"os/signal"
"sync"
"syscall"
"time"
//"github.com/didi/nightingale/src/modules/agent/config"
//"github.com/didi/nightingale/src/modules/agent/wol"
"github.com/pion/rtsp-bench/server/config"
"github.com/pion/rtsp-bench/server/wol"
mqtt "github.com/eclipse/paho.mqtt.golang"
enc "github.com/pion/rtsp-bench/server/signal"
)
/*
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
CLIENTID = "mqtt_subscriber"
WRITETOLOG = true // If true then received messages will be written to the console
WRITETODISK = false // If true then received messages will be written to the file below
OUTPUTFILE = "/binds/receivedMessages.txt"
)
*/
// handler is a simple struct that provides a function to be called when a message is received. The message is parsed
// and the count followed by the raw message is written to the file (this makes it easier to sort the file)
type handler struct {
f *os.File
}
//var mqttclient mqtt.Client
var (
msgChans chan PublishMsg //prompb.WriteRequest //multi node one chan
)
type heartmsg struct {
Count uint64
}
func NewHandler() *handler {
var f *os.File
if config.Config.Mqtt.WRITETODISK {
var err error
f, err = os.Create(config.Config.Mqtt.OUTPUTFILE)
if err != nil {
panic(err)
}
}
return &handler{f: f}
}
// Close closes the file
func (o *handler) Close() {
if o.f != nil {
if err := o.f.Close(); err != nil {
fmt.Printf("ERROR closing file: %s", err)
}
o.f = nil
}
}
// handle is called when a message is received
func (o *handler) handle(client mqtt.Client, msg mqtt.Message) {
// We extract the count and write that out first to simplify checking for missing values
var m Message
var resp Session
if err := json.Unmarshal(msg.Payload(), &resp); err != nil {
fmt.Printf("Message could not be parsed (%s): %s", msg.Payload(), err)
return
}
fmt.Println(resp)
switch resp.Type {
case CMDMSG_OFFER:
enc.Decode(resp.Data, &m)
Notice(m)
case CMDMSG_DISC:
var devcmd DiscoveryCmd
enc.Decode(resp.Data, &devcmd)
DiscoveryDev(&devcmd)
case CMDMSG_WAKE:
var fing Fing
enc.Decode(resp.Data, &fing)
wakemac(fing)
case CMDMSG_UPDATE:
var newver *versionUpdate
GetUpdateMyself(newver)
case CMDMSG_MR2:
var mr2info Mr2Msg
enc.Decode(resp.Data, &mr2info)
Mr2HostPort(&mr2info)
}
}
func Mr2HostPort(mr2info *Mr2Msg) {
arg := fmt.Sprintf("client -s %s -p %s -P %d -c %s", mr2info.ServerAddr, mr2info.Password, mr2info.ExposePort, mr2info.ExposeAddr)
fmt.Println("mr2", arg)
err := fmt.Errorf("")
//err := sys.CmdRun("./mr2", arg)
if err != nil {
CmdFeedBack(CMDMSG_MR2, 0, err.Error(), time.Now().String())
return
} else {
CmdFeedBack(CMDMSG_MR2, 1, "成功", time.Now().String())
}
}
func wakemac(fing Fing) {
for _, v := range fing.Devices {
wol.Wake(v.Mac, "", "", "")
}
}
func DiscoveryDev(devcmd *DiscoveryCmd) {
go func() {
switch devcmd.DevType {
case DEVICE_IP:
dev := DiscoveryDevice()
| req := &Session{}
req.Type = "discoveryrsp"
req.DeviceId = "kvm1"
req.Data = enc.Encode(dev) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "discoveryrsp",
Msg: req,
}
fmt.Println("discoveryrsp", answermsg)
SendMsg(answermsg) //response)
case DEVICE_ONVIF:
case DEVICE_SNMP:
case DEVICE_MODBUS:
case DEVICE_BACNET:
case DEVICE_CAN:
case DEVICE_UPCA:
}
}()
}
func CmdFeedBack(cmdstr string, status int, err string, sid string) {
resp := ResponseMsg{
Cmdstr: cmdstr,
Status: status,
Err: err,
Sid: sid,
}
req := &Session{}
req.Type = "cmdFeedback"
req.DeviceId = "kvm1"
req.Data = enc.Encode(resp) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "cmdFeedback",
Msg: req,
}
fmt.Println("cmdFeedback", answermsg)
SendMsg(answermsg) //response)
}
func GetCurrentPath() string {
getwd, err := os.Getwd()
if err != nil {
fmt.Print(err.Error())
} else {
fmt.Print(getwd)
}
return getwd
}
func GetUpdateMyself(newver *versionUpdate) {
if newver.ForceUpdate == 1 {
if newver.DownLoadUrl != "" {
go func() {
filepath := GetCurrentPath() + "/" + newver.Version
fileext := ".zip"
filename, err := DownloadFile(filepath, newver.DownLoadUrl, fileext)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
fmt.Println("Download Finished")
CmdFeedBack(CMDMSG_UPDATE, 1, "Download Finished", "1")
if IsZip(filename) {
err = Unzip(filename, filepath)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
CmdFeedBack(CMDMSG_UPDATE, 1, "zip ok", "1")
}
}
//以版本号建立新目录并解压包
//后期写更新配置文件告诉Process守护进程需要更新
//并立即退出或者按某种策略更新
//守护进程在判断到需要更新软件时就按更新配置文件的新路径执行程序
}
}()
}
}
}
/*
func SendMsgAnswer(msg Answer) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
*/
func SendMsg(msg PublishMsg) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
func StartMqtt() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdout, "[CRITICAL] ", 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
// Create a handler that will deal with incoming messages
h := NewHandler()
defer h.Close()
msgChans = make(chan PublishMsg, 10)
// Now we establish the connection to the mqtt broker
opts := mqtt.NewClientOptions()
opts.AddBroker(config.Config.Mqtt.SERVERADDRESS)
opts.SetClientID(config.Config.Mqtt.CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 30 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// If using QOS2 and CleanSession = FALSE then it is possible that we will receive messages on topics that we
// have not subscribed to here (if they were previously subscribed to they are part of the session and survive
// disconnect/reconnect). Adding a DefaultPublishHandler lets us detect this.
opts.DefaultPublishHandler = func(_ mqtt.Client, msg mqtt.Message) {
fmt.Printf("UNEXPECTED MESSAGE: %s\n", msg)
}
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(c mqtt.Client) {
fmt.Println("connection established")
// Establish the subscription - doing this here means that it willSUB happen every time a connection is established
// (useful if opts.CleanSession is TRUE or the broker does not reliably store session data)
t := c.Subscribe(config.Config.Mqtt.SUBTOPIC, config.Config.Mqtt.QOS, h.handle)
// the connection handler is called in a goroutine so blocking here would hot cause an issue. However as blocking
// in other handlers does cause problems its best to just assume we should not block
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR SUBSCRIBING: %s\n", t.Error())
} else {
fmt.Println("subscribed to: ", config.Config.Mqtt.SUBTOPIC)
}
}()
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
// If using QOS2 and CleanSession = FALSE then messages may be transmitted to us before the subscribe completes.
// Adding routes prior to connecting is a way of ensuring that these messages are processed
client.AddRoute(config.Config.Mqtt.SUBTOPIC, h.handle)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
done := make(chan struct{})
var wg sync.WaitGroup
wg.Add(1)
go func() {
var count uint64
for {
select {
case data := <-msgChans:
msg, err := json.Marshal(data.Msg)
if err != nil {
panic(err)
}
//t := client.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+data.Topic, config.Config.Mqtt.QOS, false, msg)
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("msg PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("msg PUBLISHING:", msg)
}
}()
case <-time.After(time.Second * time.Duration(config.Config.Mqtt.HEARTTIME)):
req := &Session{}
req.Type = "heart"
req.DeviceId = config.Config.Mqtt.CLIENTID //"kvm1"
count += 1
msg, err := json.Marshal(heartmsg{Count: count})
if err != nil {
panic(err)
}
req.Data = enc.Encode(msg)
//data := signal.Encode(*peerConnection.LocalDescription())
answermsg := PublishMsg{
Topic: "heart",
Msg: req,
}
msg, err = json.Marshal(answermsg.Msg)
if err != nil {
panic(err)
}
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+answermsg.Topic, config.Config.Mqtt.QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("HEART PUBLISHING: ", msg)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Messages will be delivered asynchronously so we just need to wait for a signal to shutdown
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
client.Disconnect(1000)
fmt.Println("shutdown complete")
}
/*
// Connect to the broker and publish a message periodically
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
DELAY = time.Second
CLIENTID = "mqtt_publisher"
)
func main() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdout, "[CRITICAL] ", 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
opts := mqtt.NewClientOptions()
opts.AddBroker(SERVERADDRESS)
opts.SetClientID(CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 10 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(mqtt.Client) {
fmt.Println("connection established")
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
//
// Publish messages until we receive a signal
//
done := make(chan struct{})
var wg sync.WaitGroup
// The message could be anything; lets make it JSON containing a simple count (makes it simpler to track the messages)
type msg struct {
Count uint64
}
wg.Add(1)
go func() {
var count uint64
for {
select {
case <-time.After(DELAY):
count += 1
msg, err := json.Marshal(msg{Count: count})
if err != nil {
panic(err)
}
t := client.Publish(TOPIC, QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", err)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Wait for a signal before exiting
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
close(done)
wg.Wait()
fmt.Println("shutdown complete")
}
*/ | random_line_split |
|
mqttclient.go | package kvm
// Connect to the broker, subscribe, and write messages received to a file
import (
"encoding/json"
"fmt"
"os"
"os/signal"
"sync"
"syscall"
"time"
//"github.com/didi/nightingale/src/modules/agent/config"
//"github.com/didi/nightingale/src/modules/agent/wol"
"github.com/pion/rtsp-bench/server/config"
"github.com/pion/rtsp-bench/server/wol"
mqtt "github.com/eclipse/paho.mqtt.golang"
enc "github.com/pion/rtsp-bench/server/signal"
)
/*
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
CLIENTID = "mqtt_subscriber"
WRITETOLOG = true // If true then received messages will be written to the console
WRITETODISK = false // If true then received messages will be written to the file below
OUTPUTFILE = "/binds/receivedMessages.txt"
)
*/
// handler is a simple struct that provides a function to be called when a message is received. The message is parsed
// and the count followed by the raw message is written to the file (this makes it easier to sort the file)
type handler struct {
f *os.File
}
//var mqttclient mqtt.Client
var (
msgChans chan PublishMsg //prompb.WriteRequest //multi node one chan
)
type heartmsg struct {
Count uint64
}
func NewHandler() *handler {
var f *os.File
if config.Config.Mqtt.WRITETODISK {
var err error
f, err = os.Create(config.Config.Mqtt.OUTPUTFILE)
if err != nil {
panic(err)
}
}
return &handler{f: f}
}
// Close closes the file
func (o *handler) Close() {
if o.f != nil {
if err := o.f.Close(); err != nil {
fmt.Printf("ERROR closing file: %s", err)
}
o.f = nil
}
}
// handle is called when a message is received
func (o *handler) handle(client mqtt.Client, msg mqtt.Message) {
// We extract the count and write that out first to simplify checking for missing values
var m Message
var resp Session
if err := json.Unmarshal(msg.Payload(), &resp); err != nil {
fmt.Printf("Message could not be parsed (%s): %s", msg.Payload(), err)
return
}
fmt.Println(resp)
switch resp.Type {
case CMDMSG_OFFER:
enc.Decode(resp.Data, &m)
Notice(m)
case CMDMSG_DISC:
var devcmd DiscoveryCmd
enc.Decode(resp.Data, &devcmd)
DiscoveryDev(&devcmd)
case CMDMSG_WAKE:
var fing Fing
enc.Decode(resp.Data, &fing)
wakemac(fing)
case CMDMSG_UPDATE:
var newver *versionUpdate
GetUpdateMyself(newver)
case CMDMSG_MR2:
var mr2info Mr2Msg
enc.Decode(resp.Data, &mr2info)
Mr2HostPort(&mr2info)
}
}
func Mr2HostPort(mr2info *Mr2Msg) {
arg := fmt.Sprintf("client -s %s -p %s -P %d -c %s", mr2info.ServerAddr, mr2info.Password, mr2info.ExposePort, mr2info.ExposeAddr)
fmt.Println("mr2", arg)
err := fmt.Errorf("")
//err := sys.CmdRun("./mr2", arg)
if err != nil {
CmdFeedBack(CMDMSG_MR2, 0, err.Error(), time.Now().String())
return
} else {
CmdFeedBack(CMDMSG_MR2, 1, "成功", time.Now().String())
}
}
func wakemac(fing Fing) {
for _, v := range fing.Devices {
wol.Wake(v.Mac, "", "", "")
}
}
func DiscoveryDev(devcmd *DiscoveryCmd) {
go func() {
switch devcmd.DevType {
case DEVICE_IP:
dev := DiscoveryDevice()
req := &Session{}
req.Type = "discoveryrsp"
req.DeviceId = "kvm1"
req.Data = enc.Encode(dev) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "discoveryrsp",
Msg: req,
}
fmt.Println("discoveryrsp", answermsg)
SendMsg(answermsg) //response)
case DEVICE_ONVIF:
case DEVICE_SNMP:
case DEVICE_MODBUS:
case DEVICE_BACNET:
case DEVICE_CAN:
case DEVICE_UPCA:
}
}()
}
func CmdFeedBack(cmdstr string, status int, err string, sid string) {
resp := ResponseMsg{
Cmdstr: cmdstr,
Status: status,
Err: err,
Sid: sid,
}
req := &Session{}
req.Type = "cmdFeedback"
req.DeviceId = "kvm1"
req.Data = enc.Encode(resp) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "cmdFeedback",
Msg: req,
}
fmt.Println("cmdFeedback", answermsg)
SendMsg(answermsg) //response)
}
func GetCurrentPath() string {
getwd, err := os.Getwd()
if err != nil {
fmt.Print(err.Error())
} else {
fmt.Print(getwd)
}
return getwd
}
func GetUpdateMyself(newver *versionUpdate) {
if newver.ForceUpdate == 1 {
if newver.DownLoadUrl != "" {
go func() {
filepath := GetCurrentPath() + "/" + newver.Version
fileext := ".zip"
filename, err := DownloadFile(filepath, newver.DownLoadUrl, fileext)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
fmt.Println("Download Finished")
CmdFeedBack(CMDMSG_UPDATE, 1, "Download Finished", "1")
if IsZip(filename) {
err = Unzip(filename, filepath)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
CmdFeedBack(CMDMSG_UPDATE, 1, "zip ok", "1")
}
}
//以版本号建立新目录并解压包
//后期写更新配置文件告诉Process守护进程需要更新
//并立即退出或者按某种策略更新
//守护进程在判断到需要更新软件时就按更新配置文件的新路径执行程序
}
}()
}
}
}
/*
func SendMsgAnswer(msg Answer) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
*/
func SendMsg(msg PublishMsg) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
func Star | t, "[CRITICAL] ", 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
// Create a handler that will deal with incoming messages
h := NewHandler()
defer h.Close()
msgChans = make(chan PublishMsg, 10)
// Now we establish the connection to the mqtt broker
opts := mqtt.NewClientOptions()
opts.AddBroker(config.Config.Mqtt.SERVERADDRESS)
opts.SetClientID(config.Config.Mqtt.CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 30 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// If using QOS2 and CleanSession = FALSE then it is possible that we will receive messages on topics that we
// have not subscribed to here (if they were previously subscribed to they are part of the session and survive
// disconnect/reconnect). Adding a DefaultPublishHandler lets us detect this.
opts.DefaultPublishHandler = func(_ mqtt.Client, msg mqtt.Message) {
fmt.Printf("UNEXPECTED MESSAGE: %s\n", msg)
}
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(c mqtt.Client) {
fmt.Println("connection established")
// Establish the subscription - doing this here means that it willSUB happen every time a connection is established
// (useful if opts.CleanSession is TRUE or the broker does not reliably store session data)
t := c.Subscribe(config.Config.Mqtt.SUBTOPIC, config.Config.Mqtt.QOS, h.handle)
// the connection handler is called in a goroutine so blocking here would hot cause an issue. However as blocking
// in other handlers does cause problems its best to just assume we should not block
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR SUBSCRIBING: %s\n", t.Error())
} else {
fmt.Println("subscribed to: ", config.Config.Mqtt.SUBTOPIC)
}
}()
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
// If using QOS2 and CleanSession = FALSE then messages may be transmitted to us before the subscribe completes.
// Adding routes prior to connecting is a way of ensuring that these messages are processed
client.AddRoute(config.Config.Mqtt.SUBTOPIC, h.handle)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
done := make(chan struct{})
var wg sync.WaitGroup
wg.Add(1)
go func() {
var count uint64
for {
select {
case data := <-msgChans:
msg, err := json.Marshal(data.Msg)
if err != nil {
panic(err)
}
//t := client.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+data.Topic, config.Config.Mqtt.QOS, false, msg)
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("msg PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("msg PUBLISHING:", msg)
}
}()
case <-time.After(time.Second * time.Duration(config.Config.Mqtt.HEARTTIME)):
req := &Session{}
req.Type = "heart"
req.DeviceId = config.Config.Mqtt.CLIENTID //"kvm1"
count += 1
msg, err := json.Marshal(heartmsg{Count: count})
if err != nil {
panic(err)
}
req.Data = enc.Encode(msg)
//data := signal.Encode(*peerConnection.LocalDescription())
answermsg := PublishMsg{
Topic: "heart",
Msg: req,
}
msg, err = json.Marshal(answermsg.Msg)
if err != nil {
panic(err)
}
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+answermsg.Topic, config.Config.Mqtt.QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("HEART PUBLISHING: ", msg)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Messages will be delivered asynchronously so we just need to wait for a signal to shutdown
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
client.Disconnect(1000)
fmt.Println("shutdown complete")
}
/*
// Connect to the broker and publish a message periodically
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
DELAY = time.Second
CLIENTID = "mqtt_publisher"
)
func main() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdout, "[CRITICAL] ", 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
opts := mqtt.NewClientOptions()
opts.AddBroker(SERVERADDRESS)
opts.SetClientID(CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 10 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(mqtt.Client) {
fmt.Println("connection established")
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
//
// Publish messages until we receive a signal
//
done := make(chan struct{})
var wg sync.WaitGroup
// The message could be anything; lets make it JSON containing a simple count (makes it simpler to track the messages)
type msg struct {
Count uint64
}
wg.Add(1)
go func() {
var count uint64
for {
select {
case <-time.After(DELAY):
count += 1
msg, err := json.Marshal(msg{Count: count})
if err != nil {
panic(err)
}
t := client.Publish(TOPIC, QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", err)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Wait for a signal before exiting
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
close(done)
wg.Wait()
fmt.Println("shutdown complete")
}
*/
| tMqtt() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdou | identifier_body |
mqttclient.go | package kvm
// Connect to the broker, subscribe, and write messages received to a file
import (
"encoding/json"
"fmt"
"os"
"os/signal"
"sync"
"syscall"
"time"
//"github.com/didi/nightingale/src/modules/agent/config"
//"github.com/didi/nightingale/src/modules/agent/wol"
"github.com/pion/rtsp-bench/server/config"
"github.com/pion/rtsp-bench/server/wol"
mqtt "github.com/eclipse/paho.mqtt.golang"
enc "github.com/pion/rtsp-bench/server/signal"
)
/*
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
CLIENTID = "mqtt_subscriber"
WRITETOLOG = true // If true then received messages will be written to the console
WRITETODISK = false // If true then received messages will be written to the file below
OUTPUTFILE = "/binds/receivedMessages.txt"
)
*/
// handler is a simple struct that provides a function to be called when a message is received. The message is parsed
// and the count followed by the raw message is written to the file (this makes it easier to sort the file)
type handler struct {
f *os.File
}
//var mqttclient mqtt.Client
var (
msgChans chan PublishMsg //prompb.WriteRequest //multi node one chan
)
type heartmsg struct {
Count uint64
}
func NewHandler() *handler {
var f *os.File
if config.Config.Mqtt.WRITETODISK {
var err error
f, err = os.Create(config.Config.Mqtt.OUTPUTFILE)
if err != nil {
panic(err)
}
}
return &handler{f: f}
}
// Close closes the file
func (o *handler) Close() {
if o.f != nil {
if err := o.f.Close(); err != nil {
fmt.Printf("ERROR closing file: %s", err)
}
o.f = nil
}
}
// handle is called when a message is received
func (o *handler) handle(client mqtt.Client, msg mqtt.Message) {
// We extract the count and write that out first to simplify checking for missing values
var m Message
var resp Session
if err := json.Unmarshal(msg.Payload(), &resp); err != nil {
fmt.Printf("Message could not be parsed (%s): %s", msg.Payload(), err)
return
}
fmt.Println(resp)
switch resp.Type {
case CMDMSG_OFFER:
enc.Decode(resp.Data, &m)
Notice(m)
case CMDMSG_DISC:
var devcmd DiscoveryCmd
enc.Decode(resp.Data, &devcmd)
DiscoveryDev(&devcmd)
case CMDMSG_WAKE:
var fing Fing
enc.Decode(resp.Data, &fing)
wakemac(fing)
case CMDMSG_UPDATE:
var newver *versionUpdate
GetUpdateMyself(newver)
case CMDMSG_MR2:
var mr2info Mr2Msg
enc.Decode(resp.Data, &mr2info)
Mr2HostPort(&mr2info)
}
}
func Mr2HostPort(mr2info *Mr2Msg) {
arg := fmt.Sprintf("client -s %s -p %s -P %d -c %s", mr2info.ServerAddr, mr2info.Password, mr2info.ExposePort, mr2info.ExposeAddr)
fmt.Println("mr2", arg)
err := fmt.Errorf("")
//err := sys.CmdRun("./mr2", arg)
if err != nil {
CmdFeedBack(CMDMSG_MR2, 0, err.Error(), time.Now().String())
return
} else {
CmdFeedBack(CMDMSG_MR2, 1, "成功", time.Now().String())
}
}
func wakemac(fing Fing) {
for _, v := range fing.Devices {
wol.Wake(v.Mac, "", "", "")
}
}
func DiscoveryDev(devcmd *DiscoveryCmd) {
go func() {
switch devcmd.DevType {
case DEVICE_IP:
dev := DiscoveryDevice()
req := &Session{}
req.Type = "discoveryrsp"
req.DeviceId = "kvm1"
req.Data = enc.Encode(dev) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "discoveryrsp",
Msg: req,
}
fmt.Println("discoveryrsp", answermsg)
SendMsg(answermsg) //response)
case DEVICE_ONVIF:
case DEVICE_SNMP:
case DEVICE_MODBUS:
case DEVICE_BACNET:
case DEVICE_CAN:
case DEVICE_UPCA:
}
}()
}
func CmdFeedBack(cmdstr string, status int, err string, sid string) {
resp := ResponseMsg{
Cmdstr: cmdstr,
Status: status,
Err: err,
Sid: sid,
}
req := &Session{}
req.Type = "cmdFeedback"
req.DeviceId = "kvm1"
req.Data = enc.Encode(resp) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "cmdFeedback",
Msg: req,
}
fmt.Println("cmdFeedback", answermsg)
SendMsg(answermsg) //response)
}
func GetCurrentPath() string {
getwd, err := os.Getwd()
if err != nil {
fmt.Print(err.Error())
} else {
fmt.Print(getwd)
}
return getwd
}
func GetUpdateMyself(newver *versionUpdate) {
if newver.ForceUpdate == 1 {
if newver.DownLoadUrl != "" {
go func() {
filepath := GetCurrentPath() + "/" + newver.Version
fileext := ".zip"
filename, err := DownloadFile(filepath, newver.DownLoadUrl, fileext)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
fmt.Println("Download Finished")
CmdFeedBack(CMDMSG_UPDATE, 1, "Download Finished", "1")
if IsZip(filename) {
err = Unzip(filename, filepath)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
CmdFeedBack(CMDMSG_UPDATE, 1, "zip ok", "1")
}
}
//以版本号建立新目录并解压包
//后期写更新配置文件告诉Process守护进程需要更新
//并立即退出或者按某种策略更新
//守护进程在判断到需要更新软件时就按更新配置文件的新路径执行程序
}
}()
}
}
}
/*
func SendMsgAnswer(msg Answer) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
*/
func SendMsg(msg PublishMsg) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
func StartMqtt() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdout, "[CRIT | 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
// Create a handler that will deal with incoming messages
h := NewHandler()
defer h.Close()
msgChans = make(chan PublishMsg, 10)
// Now we establish the connection to the mqtt broker
opts := mqtt.NewClientOptions()
opts.AddBroker(config.Config.Mqtt.SERVERADDRESS)
opts.SetClientID(config.Config.Mqtt.CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 30 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// If using QOS2 and CleanSession = FALSE then it is possible that we will receive messages on topics that we
// have not subscribed to here (if they were previously subscribed to they are part of the session and survive
// disconnect/reconnect). Adding a DefaultPublishHandler lets us detect this.
opts.DefaultPublishHandler = func(_ mqtt.Client, msg mqtt.Message) {
fmt.Printf("UNEXPECTED MESSAGE: %s\n", msg)
}
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(c mqtt.Client) {
fmt.Println("connection established")
// Establish the subscription - doing this here means that it willSUB happen every time a connection is established
// (useful if opts.CleanSession is TRUE or the broker does not reliably store session data)
t := c.Subscribe(config.Config.Mqtt.SUBTOPIC, config.Config.Mqtt.QOS, h.handle)
// the connection handler is called in a goroutine so blocking here would hot cause an issue. However as blocking
// in other handlers does cause problems its best to just assume we should not block
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR SUBSCRIBING: %s\n", t.Error())
} else {
fmt.Println("subscribed to: ", config.Config.Mqtt.SUBTOPIC)
}
}()
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
// If using QOS2 and CleanSession = FALSE then messages may be transmitted to us before the subscribe completes.
// Adding routes prior to connecting is a way of ensuring that these messages are processed
client.AddRoute(config.Config.Mqtt.SUBTOPIC, h.handle)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
done := make(chan struct{})
var wg sync.WaitGroup
wg.Add(1)
go func() {
var count uint64
for {
select {
case data := <-msgChans:
msg, err := json.Marshal(data.Msg)
if err != nil {
panic(err)
}
//t := client.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+data.Topic, config.Config.Mqtt.QOS, false, msg)
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("msg PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("msg PUBLISHING:", msg)
}
}()
case <-time.After(time.Second * time.Duration(config.Config.Mqtt.HEARTTIME)):
req := &Session{}
req.Type = "heart"
req.DeviceId = config.Config.Mqtt.CLIENTID //"kvm1"
count += 1
msg, err := json.Marshal(heartmsg{Count: count})
if err != nil {
panic(err)
}
req.Data = enc.Encode(msg)
//data := signal.Encode(*peerConnection.LocalDescription())
answermsg := PublishMsg{
Topic: "heart",
Msg: req,
}
msg, err = json.Marshal(answermsg.Msg)
if err != nil {
panic(err)
}
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+answermsg.Topic, config.Config.Mqtt.QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("HEART PUBLISHING: ", msg)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Messages will be delivered asynchronously so we just need to wait for a signal to shutdown
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
client.Disconnect(1000)
fmt.Println("shutdown complete")
}
/*
// Connect to the broker and publish a message periodically
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
DELAY = time.Second
CLIENTID = "mqtt_publisher"
)
func main() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdout, "[CRITICAL] ", 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
opts := mqtt.NewClientOptions()
opts.AddBroker(SERVERADDRESS)
opts.SetClientID(CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 10 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(mqtt.Client) {
fmt.Println("connection established")
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
//
// Publish messages until we receive a signal
//
done := make(chan struct{})
var wg sync.WaitGroup
// The message could be anything; lets make it JSON containing a simple count (makes it simpler to track the messages)
type msg struct {
Count uint64
}
wg.Add(1)
go func() {
var count uint64
for {
select {
case <-time.After(DELAY):
count += 1
msg, err := json.Marshal(msg{Count: count})
if err != nil {
panic(err)
}
t := client.Publish(TOPIC, QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", err)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Wait for a signal before exiting
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
close(done)
wg.Wait()
fmt.Println("shutdown complete")
}
*/
| ICAL] ", | identifier_name |
mqttclient.go | package kvm
// Connect to the broker, subscribe, and write messages received to a file
import (
"encoding/json"
"fmt"
"os"
"os/signal"
"sync"
"syscall"
"time"
//"github.com/didi/nightingale/src/modules/agent/config"
//"github.com/didi/nightingale/src/modules/agent/wol"
"github.com/pion/rtsp-bench/server/config"
"github.com/pion/rtsp-bench/server/wol"
mqtt "github.com/eclipse/paho.mqtt.golang"
enc "github.com/pion/rtsp-bench/server/signal"
)
/*
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
CLIENTID = "mqtt_subscriber"
WRITETOLOG = true // If true then received messages will be written to the console
WRITETODISK = false // If true then received messages will be written to the file below
OUTPUTFILE = "/binds/receivedMessages.txt"
)
*/
// handler is a simple struct that provides a function to be called when a message is received. The message is parsed
// and the count followed by the raw message is written to the file (this makes it easier to sort the file)
type handler struct {
f *os.File
}
//var mqttclient mqtt.Client
var (
msgChans chan PublishMsg //prompb.WriteRequest //multi node one chan
)
type heartmsg struct {
Count uint64
}
func NewHandler() *handler {
var f *os.File
if config.Config.Mqtt.WRITETODISK {
var err error
f, err = os.Create(config.Config.Mqtt.OUTPUTFILE)
if err != nil {
panic(err)
}
}
return &handler{f: f}
}
// Close closes the file
func (o *handler) Close() {
if o.f != nil {
if err := o.f.Close(); err != nil |
o.f = nil
}
}
// handle is called when a message is received
func (o *handler) handle(client mqtt.Client, msg mqtt.Message) {
// We extract the count and write that out first to simplify checking for missing values
var m Message
var resp Session
if err := json.Unmarshal(msg.Payload(), &resp); err != nil {
fmt.Printf("Message could not be parsed (%s): %s", msg.Payload(), err)
return
}
fmt.Println(resp)
switch resp.Type {
case CMDMSG_OFFER:
enc.Decode(resp.Data, &m)
Notice(m)
case CMDMSG_DISC:
var devcmd DiscoveryCmd
enc.Decode(resp.Data, &devcmd)
DiscoveryDev(&devcmd)
case CMDMSG_WAKE:
var fing Fing
enc.Decode(resp.Data, &fing)
wakemac(fing)
case CMDMSG_UPDATE:
var newver *versionUpdate
GetUpdateMyself(newver)
case CMDMSG_MR2:
var mr2info Mr2Msg
enc.Decode(resp.Data, &mr2info)
Mr2HostPort(&mr2info)
}
}
func Mr2HostPort(mr2info *Mr2Msg) {
arg := fmt.Sprintf("client -s %s -p %s -P %d -c %s", mr2info.ServerAddr, mr2info.Password, mr2info.ExposePort, mr2info.ExposeAddr)
fmt.Println("mr2", arg)
err := fmt.Errorf("")
//err := sys.CmdRun("./mr2", arg)
if err != nil {
CmdFeedBack(CMDMSG_MR2, 0, err.Error(), time.Now().String())
return
} else {
CmdFeedBack(CMDMSG_MR2, 1, "成功", time.Now().String())
}
}
func wakemac(fing Fing) {
for _, v := range fing.Devices {
wol.Wake(v.Mac, "", "", "")
}
}
func DiscoveryDev(devcmd *DiscoveryCmd) {
go func() {
switch devcmd.DevType {
case DEVICE_IP:
dev := DiscoveryDevice()
req := &Session{}
req.Type = "discoveryrsp"
req.DeviceId = "kvm1"
req.Data = enc.Encode(dev) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "discoveryrsp",
Msg: req,
}
fmt.Println("discoveryrsp", answermsg)
SendMsg(answermsg) //response)
case DEVICE_ONVIF:
case DEVICE_SNMP:
case DEVICE_MODBUS:
case DEVICE_BACNET:
case DEVICE_CAN:
case DEVICE_UPCA:
}
}()
}
func CmdFeedBack(cmdstr string, status int, err string, sid string) {
resp := ResponseMsg{
Cmdstr: cmdstr,
Status: status,
Err: err,
Sid: sid,
}
req := &Session{}
req.Type = "cmdFeedback"
req.DeviceId = "kvm1"
req.Data = enc.Encode(resp) //enc.Encode(answer)
answermsg := PublishMsg{
Topic: "cmdFeedback",
Msg: req,
}
fmt.Println("cmdFeedback", answermsg)
SendMsg(answermsg) //response)
}
func GetCurrentPath() string {
getwd, err := os.Getwd()
if err != nil {
fmt.Print(err.Error())
} else {
fmt.Print(getwd)
}
return getwd
}
func GetUpdateMyself(newver *versionUpdate) {
if newver.ForceUpdate == 1 {
if newver.DownLoadUrl != "" {
go func() {
filepath := GetCurrentPath() + "/" + newver.Version
fileext := ".zip"
filename, err := DownloadFile(filepath, newver.DownLoadUrl, fileext)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
fmt.Println("Download Finished")
CmdFeedBack(CMDMSG_UPDATE, 1, "Download Finished", "1")
if IsZip(filename) {
err = Unzip(filename, filepath)
if err != nil {
CmdFeedBack(CMDMSG_UPDATE, 0, err.Error(), "1")
} else {
CmdFeedBack(CMDMSG_UPDATE, 1, "zip ok", "1")
}
}
//以版本号建立新目录并解压包
//后期写更新配置文件告诉Process守护进程需要更新
//并立即退出或者按某种策略更新
//守护进程在判断到需要更新软件时就按更新配置文件的新路径执行程序
}
}()
}
}
}
/*
func SendMsgAnswer(msg Answer) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
*/
func SendMsg(msg PublishMsg) {
msgChans <- msg
fmt.Print("SendMsg OK")
//mqttclient.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
}
func StartMqtt() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdout, "[CRITICAL] ", 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
// Create a handler that will deal with incoming messages
h := NewHandler()
defer h.Close()
msgChans = make(chan PublishMsg, 10)
// Now we establish the connection to the mqtt broker
opts := mqtt.NewClientOptions()
opts.AddBroker(config.Config.Mqtt.SERVERADDRESS)
opts.SetClientID(config.Config.Mqtt.CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 30 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// If using QOS2 and CleanSession = FALSE then it is possible that we will receive messages on topics that we
// have not subscribed to here (if they were previously subscribed to they are part of the session and survive
// disconnect/reconnect). Adding a DefaultPublishHandler lets us detect this.
opts.DefaultPublishHandler = func(_ mqtt.Client, msg mqtt.Message) {
fmt.Printf("UNEXPECTED MESSAGE: %s\n", msg)
}
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(c mqtt.Client) {
fmt.Println("connection established")
// Establish the subscription - doing this here means that it willSUB happen every time a connection is established
// (useful if opts.CleanSession is TRUE or the broker does not reliably store session data)
t := c.Subscribe(config.Config.Mqtt.SUBTOPIC, config.Config.Mqtt.QOS, h.handle)
// the connection handler is called in a goroutine so blocking here would hot cause an issue. However as blocking
// in other handlers does cause problems its best to just assume we should not block
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR SUBSCRIBING: %s\n", t.Error())
} else {
fmt.Println("subscribed to: ", config.Config.Mqtt.SUBTOPIC)
}
}()
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
// If using QOS2 and CleanSession = FALSE then messages may be transmitted to us before the subscribe completes.
// Adding routes prior to connecting is a way of ensuring that these messages are processed
client.AddRoute(config.Config.Mqtt.SUBTOPIC, h.handle)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
done := make(chan struct{})
var wg sync.WaitGroup
wg.Add(1)
go func() {
var count uint64
for {
select {
case data := <-msgChans:
msg, err := json.Marshal(data.Msg)
if err != nil {
panic(err)
}
//t := client.Publish(Config.Mqtt.PUBTOPIC+"/"+Config.Report.SN, Config.Mqtt.QOS, false, msg)
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+data.Topic, config.Config.Mqtt.QOS, false, msg)
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("msg PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("msg PUBLISHING:", msg)
}
}()
case <-time.After(time.Second * time.Duration(config.Config.Mqtt.HEARTTIME)):
req := &Session{}
req.Type = "heart"
req.DeviceId = config.Config.Mqtt.CLIENTID //"kvm1"
count += 1
msg, err := json.Marshal(heartmsg{Count: count})
if err != nil {
panic(err)
}
req.Data = enc.Encode(msg)
//data := signal.Encode(*peerConnection.LocalDescription())
answermsg := PublishMsg{
Topic: "heart",
Msg: req,
}
msg, err = json.Marshal(answermsg.Msg)
if err != nil {
panic(err)
}
t := client.Publish(config.Config.Mqtt.PUBTOPIC+"/"+answermsg.Topic, config.Config.Mqtt.QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", t.Error().Error())
} else {
//fmt.Println("HEART PUBLISHING: ", msg)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Messages will be delivered asynchronously so we just need to wait for a signal to shutdown
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
client.Disconnect(1000)
fmt.Println("shutdown complete")
}
/*
// Connect to the broker and publish a message periodically
const (
TOPIC = "topic1"
QOS = 1
SERVERADDRESS = "tcp://mosquitto:1883"
DELAY = time.Second
CLIENTID = "mqtt_publisher"
)
func main() {
// Enable logging by uncommenting the below
// mqtt.ERROR = log.New(os.Stdout, "[ERROR] ", 0)
// mqtt.CRITICAL = log.New(os.Stdout, "[CRITICAL] ", 0)
// mqtt.WARN = log.New(os.Stdout, "[WARN] ", 0)
// mqtt.DEBUG = log.New(os.Stdout, "[DEBUG] ", 0)
opts := mqtt.NewClientOptions()
opts.AddBroker(SERVERADDRESS)
opts.SetClientID(CLIENTID)
opts.ConnectTimeout = time.Second // Minimal delays on connect
opts.WriteTimeout = time.Second // Minimal delays on writes
opts.KeepAlive = 10 // Keepalive every 10 seconds so we quickly detect network outages
opts.PingTimeout = time.Second // local broker so response should be quick
// Automate connection management (will keep trying to connect and will reconnect if network drops)
opts.ConnectRetry = true
opts.AutoReconnect = true
// Log events
opts.OnConnectionLost = func(cl mqtt.Client, err error) {
fmt.Println("connection lost")
}
opts.OnConnect = func(mqtt.Client) {
fmt.Println("connection established")
}
opts.OnReconnecting = func(mqtt.Client, *mqtt.ClientOptions) {
fmt.Println("attempting to reconnect")
}
//
// Connect to the broker
//
client := mqtt.NewClient(opts)
if token := client.Connect(); token.Wait() && token.Error() != nil {
panic(token.Error())
}
fmt.Println("Connection is up")
//
// Publish messages until we receive a signal
//
done := make(chan struct{})
var wg sync.WaitGroup
// The message could be anything; lets make it JSON containing a simple count (makes it simpler to track the messages)
type msg struct {
Count uint64
}
wg.Add(1)
go func() {
var count uint64
for {
select {
case <-time.After(DELAY):
count += 1
msg, err := json.Marshal(msg{Count: count})
if err != nil {
panic(err)
}
t := client.Publish(TOPIC, QOS, false, msg)
// Handle the token in a go routine so this loop keeps sending messages regardless of delivery status
go func() {
_ = t.Wait() // Can also use '<-t.Done()' in releases > 1.2.0
if t.Error() != nil {
fmt.Printf("ERROR PUBLISHING: %s\n", err)
}
}()
case <-done:
fmt.Println("publisher done")
wg.Done()
return
}
}
}()
// Wait for a signal before exiting
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
signal.Notify(sig, syscall.SIGTERM)
<-sig
fmt.Println("signal caught - exiting")
close(done)
wg.Wait()
fmt.Println("shutdown complete")
}
*/
| {
fmt.Printf("ERROR closing file: %s", err)
} | conditional_block |
background.js | 'use strict'
import { app, protocol, BrowserWindow, globalShortcut, nativeTheme, ipcMain, dialog, Notification, shell, powerMonitor, session } from 'electron'
import { createProtocol } from 'vue-cli-plugin-electron-builder/lib'
import installExtension, { VUEJS_DEVTOOLS } from 'electron-devtools-installer'
import {
ElectronBlocker
} from '@cliqz/adblocker-electron'
import fetch from 'cross-fetch'
import { touchBar } from './main/touchbar'
import {
createMenu,
createFeedMenu,
createCategoryMenu,
createArticleItemMenu
} from './main/menu'
import axios from 'axios'
import os from 'os'
import Store from 'electron-store'
import log from 'electron-log'
import contextMenu from 'electron-context-menu'
import { autoUpdater } from 'electron-updater'
import fs from 'fs'
import path from 'path'
import { URL, URLSearchParams } from 'url'
import dayjs from 'dayjs'
import i18nextMainBackend from './i18nmain.config'
import {
parseArticle
} from './main/article'
require('v8-compile-cache')
const FormData = require('form-data')
const i18nextBackend = require('i18next-electron-fs-backend')
const isDevelopment = process.env.NODE_ENV !== 'production'
autoUpdater.logger = log
autoUpdater.logger.transports.file.level = 'info'
contextMenu({
showInspectElement: false
})
const store = new Store({
encryptionKey: process.env.VUE_APP_ENCRYPT_KEY,
clearInvalidConfig: true
})
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let win
let menu
let winUrl
let consumerKey
let code
// Scheme must be registered before the app is ready
protocol.registerSchemesAsPrivileged([
{ scheme: 'app', privileges: { secure: true, standard: true } }
])
async function createWindow () {
// Create the browser window.
win = new BrowserWindow({
minWidth: 1280,
minHeight: 720,
width: 1400,
height: 768,
title: 'Raven Reader',
maximizable: true,
webPreferences: {
// Use pluginOptions.nodeIntegration, leave this alone
// See nklayman.github.io/vue-cli-plugin-electron-builder/guide/security.html#node-integration for more info
webviewTag: true,
contextIsolation: true,
nodeIntegration: process.env.ELECTRON_NODE_INTEGRATION,
preload: path.join(__dirname, 'preload.js'),
disableBlinkFeatures: 'Auxclick'
}
})
// Maximize window on startup when not in development
if (!isDevelopment && win !== null) {
win.maximize()
}
i18nextBackend.mainBindings(ipcMain, win, fs)
ElectronBlocker.fromPrebuiltAdsAndTracking(fetch).then((blocker) => {
blocker.enableBlockingInSession(session.defaultSession)
})
win.setTouchBar(touchBar)
if (process.env.WEBPACK_DEV_SERVER_URL) {
// Load the url of the dev server if in development mode
winUrl = process.env.WEBPACK_DEV_SERVER_URL
if (!process.env.IS_TEST) win.webContents.openDevTools()
} else {
createProtocol('app')
winUrl = 'app://./index.html'
autoUpdater.checkForUpdatesAndNotify()
}
// Load the index.html when not in development
win.loadURL(winUrl)
const proxy = store.get('settings.proxy') ? store.get('settings.proxy') : null
let proxyRules = 'direct://'
if (proxy) {
if (proxy.http !== null && proxy.https === null) {
proxyRules = `http=${proxy.http},${proxyRules}`
}
if (proxy.http !== null && proxy.https !== null) {
proxyRules = `http=${proxy.http};https=${proxy.https},${proxyRules}`
}
}
win.webContents.session.setProxy({
proxyRules: proxyRules,
proxyBypassRules: proxy && proxy.bypass ? proxy.bypass : '<local>'
}, () => {
if (win) {
win.loadURL(winUrl)
}
})
win.on('closed', () => {
win = null
})
if (process.platform !== 'darwin') {
globalShortcut.register('Alt+M', () => {
const visible = win.isMenuBarVisible()
win.setMenuBarVisibility(visible)
})
}
// Set up necessary bindings to update the menu items
// based on the current language selected
i18nextMainBackend.on('loaded', (loaded) => {
i18nextMainBackend.changeLanguage(app.getLocale())
i18nextMainBackend.off('loaded')
})
menu = createMenu(win, i18nextMainBackend)
i18nextMainBackend.on('languageChanged', (lng) => {
log.info('Language changed')
menu = createMenu(win, i18nextMainBackend)
})
if (store.get('settings.start_in_trays')) { win.hide() }
}
function signInInoreader () {
shell.openExternal(`https://www.inoreader.com/oauth2/auth?client_id=${process.env.VUE_APP_INOREADER_CLIENT_ID}&redirect_uri=ravenreader://inoreader/auth&response_type=code&scope=read%20write&state=ravenreader`)
}
function signInPocketWithPopUp () {
if (os.platform() === 'darwin') {
consumerKey = process.env.VUE_APP_POCKET_MAC_KEY
}
if (os.platform() === 'win32') {
consumerKey = process.env.VUE_APP_POCKET_WINDOWS_KEY
}
if (os.platform() === 'linux') {
consumerKey = process.env.VUE_APP_POCKET_LINUX_KEY
}
axios
.post(
'https://getpocket.com/v3/oauth/request', {
consumer_key: consumerKey,
redirect_uri: 'http://127.0.0.1'
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
code = data.data.code
shell.openExternal(`https://getpocket.com/auth/authorize?request_token=${code}&redirect_uri=ravenreader://pocket/auth`)
})
}
function registerLocalResourceProtocol () {
protocol.registerFileProtocol('local-resource', (request, callback) => {
const url = request.url.replace(/^local-resource:\/\//, '')
// Decode URL to prevent errors when loading filenames with UTF-8 chars or chars like "#"
const decodedUrl = decodeURI(url) // Needed in case URL contains spaces
try {
return callback(decodedUrl)
} catch (error) {
console.error('ERROR: registerLocalResourceProtocol: Could not get file path:', error)
}
})
}
function handleInoreader (url) {
if (url.includes('ravenreader://inoreader/auth')) {
const q = new URL(url).searchParams
if (q.has('code')) {
axios.post('https://www.inoreader.com/oauth2/token', {
code: q.get('code'),
client_id: process.env.VUE_APP_INOREADER_CLIENT_ID,
client_secret: process.env.VUE_APP_INOREADER_CLIENT_SECRET,
redirect_uri: 'ravenreader://inoreader/auth',
scope: null,
grant_type: 'authorization_code'
}).then((data) => {
data.data.expires_in = dayjs().add(data.data.expires_in, 'second').valueOf()
win.webContents.send('inoreader-authenticated', data.data)
})
}
}
if (url === 'ravenreader://pocket/auth') {
axios
.post(
'https://getpocket.com/v3/oauth/authorize', {
consumer_key: consumerKey,
code: code
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
data.data.consumer_key = consumerKey
win.webContents.send('pocket-authenticated', data.data)
})
}
}
app.setAsDefaultProtocolClient('ravenreader')
const primaryInstance = app.requestSingleInstanceLock()
if (!primaryInstance) {
app.quit()
} else {
app.on('second-instance', (event, argv, cmd) => {
event.preventDefault()
const url = argv[argv.length - 1]
if (win) {
if (win.isMinimized()) {
win.restore()
}
win.focus()
}
if (process.platform !== 'darwin') {
handleInoreader(url)
}
})
}
app.commandLine.appendSwitch('lang', app.getLocale())
app.commandLine.appendSwitch('disable-features', 'OutOfBlinkCors')
// Quit when all windows are closed.
app.on('window-all-closed', () => {
// On macOS it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit()
} else {
i18nextBackend.clearMainBindings(ipcMain)
}
})
app.on('open-url', (event, url) => { | handleInoreader(url)
})
nativeTheme.on('updated', () => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
win.webContents.send('Dark mode', {
darkmode: nativeTheme.shouldUseDarkColors
})
})
ipcMain.handle('article-selected', (event, status) => {
const menuItemViewBrowser = menu.getMenuItemById('view-browser')
const menuItemToggleFavourite = menu.getMenuItemById('toggle-favourite')
const menuItemSaveOffline = menu.getMenuItemById('save-offline')
const menuItemToggleRead = menu.getMenuItemById('toggle-read')
menuItemViewBrowser.enabled = true
menuItemToggleFavourite.enabled = true
menuItemSaveOffline.enabled = true
menuItemToggleRead.enabled = true
})
ipcMain.on('online-status-changed', (event, status) => {
event.sender.send('onlinestatus', status)
})
app.on('activate', () => {
// On macOS it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (BrowserWindow.getAllWindows().length === 0) {
createWindow()
}
})
app.on('web-contents-created', (event, contents) => {
contents.on('will-navigate', (event, navigationUrl) => {
event.preventDefault()
})
})
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', async () => {
if (isDevelopment && !process.env.IS_TEST) {
// Install Vue Devtools
try {
await installExtension(VUEJS_DEVTOOLS)
} catch (e) {
console.error('Vue Devtools failed to install:', e.toString())
}
}
// Modify the origin for all requests to the following urls.
registerLocalResourceProtocol()
createWindow()
})
app.whenReady().then(() => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
if (!store.has('settings.theme_option')) {
store.set('settings.theme_option', 'system')
}
})
app.on('before-quit', () => {
app.isQuiting = true
globalShortcut.unregisterAll()
})
// Exit cleanly on request from parent process in development mode.
if (isDevelopment) {
if (process.platform === 'win32') {
process.on('message', (data) => {
if (data === 'graceful-exit') {
app.quit()
}
})
} else {
process.on('SIGTERM', () => {
app.quit()
})
}
}
ipcMain.handle('set-feedbin-last-fetched', (event, arg) => {
if (arg) {
store.set('feedbin_fetched_lastime', arg)
}
})
ipcMain.on('get-inoreader-last', (event, arg) => {
event.returnValue = store.get('inoreader_fetched_lastime')
})
ipcMain.on('get-feedbin-last', (event, arg) => {
event.returnValue = store.get('feedbin_fetched_lastime')
})
ipcMain.on('sort-preference', (event, arg) => {
event.returnValue = store.get('settings.oldestArticles', 'on')
})
ipcMain.on('get-settings', (event, arg) => {
const state = {}
state.cronSettings = store.get('settings.cronjob', '*/5 * * * *')
state.themeOption = store.get('settings.theme_option', 'system')
state.oldestArticles = store.get('settings.oldestArticles', false)
state.disableImages = store.get('settings.imagePreference', false)
state.fullArticleDefault = store.get('settings.fullArticlePreference', false)
state.viewOriginalDefault = store.get('settings.viewOriginalPreference', false)
state.recentlyReadPreference = store.get('settings.recentlyReadPreference', false)
state.proxy = store.get('settings.proxy', {
http: '',
https: '',
bypass: ''
})
state.keepRead = store.get('settings.keepread', 1)
if (store.has('inoreader_creds')) {
state.inoreader_connected = true
state.inoreader = store.get('inoreader_creds')
}
if (store.has('inoreader_fetched_lasttime')) {
state.inoreader_last_fetched = store.get('inoreader_fetched_lasttime')
}
if (store.has('pocket_creds')) {
state.pocket_connected = true
state.pocket = store.get('pocket_creds')
}
if (store.has('instapaper_creds')) {
state.instapaper_connected = true
state.instapaper = store.get('instapaper_creds')
}
if (store.has('fever_creds')) {
state.fever_connected = true
state.fever = store.get('fever_creds')
}
if (store.has('selfhost_creds')) {
state.selfhost_connected = true
state.selfhost = store.get('selfhost_creds')
}
if (store.has('feedbin_creds')) {
state.feedbin_connected = true
state.feedbin = store.get('feedbin_creds', JSON.stringify({
endpoint: 'https://api.feedbin.com/v2/',
email: null,
password: null
}))
}
event.returnValue = state
})
ipcMain.on('get-setting-item', (event, arg) => {
event.returnValue = store.get(arg)
})
ipcMain.handle('set-settings-item', (event, arg) => {
switch (arg.type) {
case 'set':
store.set(arg.key, arg.data)
break
case 'delete':
store.delete(arg.key, arg.data)
break
}
})
ipcMain.on('get-locale', (event) => {
event.returnValue = app.getLocale()
})
ipcMain.on('get-dark', (event) => {
event.returnValue = store.get('isDarkMode')
})
ipcMain.on('proxy-settings-get', (event) => {
event.returnValue = store.get('settings.proxy', null)
})
ipcMain.handle('export-opml', (event, arg) => {
fs.unlink(
`${app.getPath('downloads')}/subscriptions.opml`,
err => {
if (err && err.code !== 'ENOENT') throw err
fs.writeFile(
`${app.getPath(
'downloads'
)}/subscriptions.opml`,
arg, {
flag: 'w',
encoding: 'utf8'
},
err => {
if (err) throw err
log.info('XML Saved')
const notification = new Notification({
title: 'Raven Reader',
body: 'Exported all feeds successfully to downloads folder.'
})
notification.show()
}
)
}
)
})
ipcMain.on('login-pocket', (event) => {
event.returnValue = signInPocketWithPopUp()
})
ipcMain.on('login-inoreader', (event) => {
event.returnValue = signInInoreader()
})
ipcMain.handle('context-menu', (event, arg) => {
if (arg.type === 'feed') {
createFeedMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'category') {
createCategoryMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'article') {
createArticleItemMenu(arg.data, win, i18nextMainBackend)
}
})
ipcMain.handle('parse-article', async (event, url) => {
return await parseArticle(url)
})
ipcMain.handle('instapaper-login', async (event, data) => {
const result = await axios.post('https://www.instapaper.com/api/authenticate', {}, {
auth: data
})
return result.data
})
ipcMain.handle('instapaper-save', async (event, data) => {
const result = await axios.post(`https://www.instapaper.com/api/add?url=${data.url}`, {}, {
auth: {
username: data.username,
password: data.password
}
})
return result.data
})
ipcMain.handle('save-pocket', async (event, data) => {
const result = await axios.post('https://getpocket.com/v3/add', {
url: data.url,
access_token: data.credential.access_token,
consumer_key: data.credential.consumer_key
})
return result.data
})
ipcMain.handle('fever-login', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const config = {
url: `${data.endpoint}?api`,
method: 'post',
data: formData,
headers: {
...formData.getHeaders()
}
}
const result = await axios(config)
return result.data
})
ipcMain.handle('fever-endpoint-execute', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const result = await axios.post(data.endpoint, formData, {
headers: {
...formData.getHeaders()
}
})
return result.data
})
ipcMain.handle('google-login', async (event, data) => {
const params = new URLSearchParams(data.formData)
const result = await axios.post(data.endpoint, params.toString())
return result.data
})
ipcMain.handle('inoreader-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('inoreader-endpoint-refresh', async (event, data) => {
const result = axios.post(data.endpoint, data.formData)
return result.data
})
ipcMain.handle('inoreader-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData.data, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('feedbin-login', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
powerMonitor.on('resume', () => {
win.webContents.send('power-resume')
})
autoUpdater.on('checking-for-update', () => {
log.info('Checking for update...')
})
autoUpdater.on('update-not-available', (info) => {
log.info('Update not available.')
})
autoUpdater.on('error', (error) => {
log.info(error == null ? 'unknown' : (error.stack || error).toString())
})
autoUpdater.on('update-downloaded', (info) => {
log.info('Update downloaded')
dialog.showMessageBox({
title: 'Install Updates',
message: 'Updates downloaded, application will be quit for update...'
}, () => {
setImmediate(() => autoUpdater.quitAndInstall())
})
})
autoUpdater.on('download-progress', (progressObj) => {
let logMessage = 'Download speed: ' + progressObj.bytesPerSecond
logMessage = logMessage + ' - Downloaded ' + progressObj.percent + '%'
logMessage = logMessage + ' (' + progressObj.transferred + '/' + progressObj.total + ')'
log.info(logMessage)
}) | random_line_split |
|
background.js | 'use strict'
import { app, protocol, BrowserWindow, globalShortcut, nativeTheme, ipcMain, dialog, Notification, shell, powerMonitor, session } from 'electron'
import { createProtocol } from 'vue-cli-plugin-electron-builder/lib'
import installExtension, { VUEJS_DEVTOOLS } from 'electron-devtools-installer'
import {
ElectronBlocker
} from '@cliqz/adblocker-electron'
import fetch from 'cross-fetch'
import { touchBar } from './main/touchbar'
import {
createMenu,
createFeedMenu,
createCategoryMenu,
createArticleItemMenu
} from './main/menu'
import axios from 'axios'
import os from 'os'
import Store from 'electron-store'
import log from 'electron-log'
import contextMenu from 'electron-context-menu'
import { autoUpdater } from 'electron-updater'
import fs from 'fs'
import path from 'path'
import { URL, URLSearchParams } from 'url'
import dayjs from 'dayjs'
import i18nextMainBackend from './i18nmain.config'
import {
parseArticle
} from './main/article'
require('v8-compile-cache')
const FormData = require('form-data')
const i18nextBackend = require('i18next-electron-fs-backend')
const isDevelopment = process.env.NODE_ENV !== 'production'
autoUpdater.logger = log
autoUpdater.logger.transports.file.level = 'info'
contextMenu({
showInspectElement: false
})
const store = new Store({
encryptionKey: process.env.VUE_APP_ENCRYPT_KEY,
clearInvalidConfig: true
})
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let win
let menu
let winUrl
let consumerKey
let code
// Scheme must be registered before the app is ready
protocol.registerSchemesAsPrivileged([
{ scheme: 'app', privileges: { secure: true, standard: true } }
])
async function createWindow () |
function signInInoreader () {
shell.openExternal(`https://www.inoreader.com/oauth2/auth?client_id=${process.env.VUE_APP_INOREADER_CLIENT_ID}&redirect_uri=ravenreader://inoreader/auth&response_type=code&scope=read%20write&state=ravenreader`)
}
function signInPocketWithPopUp () {
if (os.platform() === 'darwin') {
consumerKey = process.env.VUE_APP_POCKET_MAC_KEY
}
if (os.platform() === 'win32') {
consumerKey = process.env.VUE_APP_POCKET_WINDOWS_KEY
}
if (os.platform() === 'linux') {
consumerKey = process.env.VUE_APP_POCKET_LINUX_KEY
}
axios
.post(
'https://getpocket.com/v3/oauth/request', {
consumer_key: consumerKey,
redirect_uri: 'http://127.0.0.1'
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
code = data.data.code
shell.openExternal(`https://getpocket.com/auth/authorize?request_token=${code}&redirect_uri=ravenreader://pocket/auth`)
})
}
function registerLocalResourceProtocol () {
protocol.registerFileProtocol('local-resource', (request, callback) => {
const url = request.url.replace(/^local-resource:\/\//, '')
// Decode URL to prevent errors when loading filenames with UTF-8 chars or chars like "#"
const decodedUrl = decodeURI(url) // Needed in case URL contains spaces
try {
return callback(decodedUrl)
} catch (error) {
console.error('ERROR: registerLocalResourceProtocol: Could not get file path:', error)
}
})
}
function handleInoreader (url) {
if (url.includes('ravenreader://inoreader/auth')) {
const q = new URL(url).searchParams
if (q.has('code')) {
axios.post('https://www.inoreader.com/oauth2/token', {
code: q.get('code'),
client_id: process.env.VUE_APP_INOREADER_CLIENT_ID,
client_secret: process.env.VUE_APP_INOREADER_CLIENT_SECRET,
redirect_uri: 'ravenreader://inoreader/auth',
scope: null,
grant_type: 'authorization_code'
}).then((data) => {
data.data.expires_in = dayjs().add(data.data.expires_in, 'second').valueOf()
win.webContents.send('inoreader-authenticated', data.data)
})
}
}
if (url === 'ravenreader://pocket/auth') {
axios
.post(
'https://getpocket.com/v3/oauth/authorize', {
consumer_key: consumerKey,
code: code
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
data.data.consumer_key = consumerKey
win.webContents.send('pocket-authenticated', data.data)
})
}
}
app.setAsDefaultProtocolClient('ravenreader')
const primaryInstance = app.requestSingleInstanceLock()
if (!primaryInstance) {
app.quit()
} else {
app.on('second-instance', (event, argv, cmd) => {
event.preventDefault()
const url = argv[argv.length - 1]
if (win) {
if (win.isMinimized()) {
win.restore()
}
win.focus()
}
if (process.platform !== 'darwin') {
handleInoreader(url)
}
})
}
app.commandLine.appendSwitch('lang', app.getLocale())
app.commandLine.appendSwitch('disable-features', 'OutOfBlinkCors')
// Quit when all windows are closed.
app.on('window-all-closed', () => {
// On macOS it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit()
} else {
i18nextBackend.clearMainBindings(ipcMain)
}
})
app.on('open-url', (event, url) => {
handleInoreader(url)
})
nativeTheme.on('updated', () => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
win.webContents.send('Dark mode', {
darkmode: nativeTheme.shouldUseDarkColors
})
})
ipcMain.handle('article-selected', (event, status) => {
const menuItemViewBrowser = menu.getMenuItemById('view-browser')
const menuItemToggleFavourite = menu.getMenuItemById('toggle-favourite')
const menuItemSaveOffline = menu.getMenuItemById('save-offline')
const menuItemToggleRead = menu.getMenuItemById('toggle-read')
menuItemViewBrowser.enabled = true
menuItemToggleFavourite.enabled = true
menuItemSaveOffline.enabled = true
menuItemToggleRead.enabled = true
})
ipcMain.on('online-status-changed', (event, status) => {
event.sender.send('onlinestatus', status)
})
app.on('activate', () => {
// On macOS it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (BrowserWindow.getAllWindows().length === 0) {
createWindow()
}
})
app.on('web-contents-created', (event, contents) => {
contents.on('will-navigate', (event, navigationUrl) => {
event.preventDefault()
})
})
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', async () => {
if (isDevelopment && !process.env.IS_TEST) {
// Install Vue Devtools
try {
await installExtension(VUEJS_DEVTOOLS)
} catch (e) {
console.error('Vue Devtools failed to install:', e.toString())
}
}
// Modify the origin for all requests to the following urls.
registerLocalResourceProtocol()
createWindow()
})
app.whenReady().then(() => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
if (!store.has('settings.theme_option')) {
store.set('settings.theme_option', 'system')
}
})
app.on('before-quit', () => {
app.isQuiting = true
globalShortcut.unregisterAll()
})
// Exit cleanly on request from parent process in development mode.
if (isDevelopment) {
if (process.platform === 'win32') {
process.on('message', (data) => {
if (data === 'graceful-exit') {
app.quit()
}
})
} else {
process.on('SIGTERM', () => {
app.quit()
})
}
}
ipcMain.handle('set-feedbin-last-fetched', (event, arg) => {
if (arg) {
store.set('feedbin_fetched_lastime', arg)
}
})
ipcMain.on('get-inoreader-last', (event, arg) => {
event.returnValue = store.get('inoreader_fetched_lastime')
})
ipcMain.on('get-feedbin-last', (event, arg) => {
event.returnValue = store.get('feedbin_fetched_lastime')
})
ipcMain.on('sort-preference', (event, arg) => {
event.returnValue = store.get('settings.oldestArticles', 'on')
})
ipcMain.on('get-settings', (event, arg) => {
const state = {}
state.cronSettings = store.get('settings.cronjob', '*/5 * * * *')
state.themeOption = store.get('settings.theme_option', 'system')
state.oldestArticles = store.get('settings.oldestArticles', false)
state.disableImages = store.get('settings.imagePreference', false)
state.fullArticleDefault = store.get('settings.fullArticlePreference', false)
state.viewOriginalDefault = store.get('settings.viewOriginalPreference', false)
state.recentlyReadPreference = store.get('settings.recentlyReadPreference', false)
state.proxy = store.get('settings.proxy', {
http: '',
https: '',
bypass: ''
})
state.keepRead = store.get('settings.keepread', 1)
if (store.has('inoreader_creds')) {
state.inoreader_connected = true
state.inoreader = store.get('inoreader_creds')
}
if (store.has('inoreader_fetched_lasttime')) {
state.inoreader_last_fetched = store.get('inoreader_fetched_lasttime')
}
if (store.has('pocket_creds')) {
state.pocket_connected = true
state.pocket = store.get('pocket_creds')
}
if (store.has('instapaper_creds')) {
state.instapaper_connected = true
state.instapaper = store.get('instapaper_creds')
}
if (store.has('fever_creds')) {
state.fever_connected = true
state.fever = store.get('fever_creds')
}
if (store.has('selfhost_creds')) {
state.selfhost_connected = true
state.selfhost = store.get('selfhost_creds')
}
if (store.has('feedbin_creds')) {
state.feedbin_connected = true
state.feedbin = store.get('feedbin_creds', JSON.stringify({
endpoint: 'https://api.feedbin.com/v2/',
email: null,
password: null
}))
}
event.returnValue = state
})
ipcMain.on('get-setting-item', (event, arg) => {
event.returnValue = store.get(arg)
})
ipcMain.handle('set-settings-item', (event, arg) => {
switch (arg.type) {
case 'set':
store.set(arg.key, arg.data)
break
case 'delete':
store.delete(arg.key, arg.data)
break
}
})
ipcMain.on('get-locale', (event) => {
event.returnValue = app.getLocale()
})
ipcMain.on('get-dark', (event) => {
event.returnValue = store.get('isDarkMode')
})
ipcMain.on('proxy-settings-get', (event) => {
event.returnValue = store.get('settings.proxy', null)
})
ipcMain.handle('export-opml', (event, arg) => {
fs.unlink(
`${app.getPath('downloads')}/subscriptions.opml`,
err => {
if (err && err.code !== 'ENOENT') throw err
fs.writeFile(
`${app.getPath(
'downloads'
)}/subscriptions.opml`,
arg, {
flag: 'w',
encoding: 'utf8'
},
err => {
if (err) throw err
log.info('XML Saved')
const notification = new Notification({
title: 'Raven Reader',
body: 'Exported all feeds successfully to downloads folder.'
})
notification.show()
}
)
}
)
})
ipcMain.on('login-pocket', (event) => {
event.returnValue = signInPocketWithPopUp()
})
ipcMain.on('login-inoreader', (event) => {
event.returnValue = signInInoreader()
})
ipcMain.handle('context-menu', (event, arg) => {
if (arg.type === 'feed') {
createFeedMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'category') {
createCategoryMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'article') {
createArticleItemMenu(arg.data, win, i18nextMainBackend)
}
})
ipcMain.handle('parse-article', async (event, url) => {
return await parseArticle(url)
})
ipcMain.handle('instapaper-login', async (event, data) => {
const result = await axios.post('https://www.instapaper.com/api/authenticate', {}, {
auth: data
})
return result.data
})
ipcMain.handle('instapaper-save', async (event, data) => {
const result = await axios.post(`https://www.instapaper.com/api/add?url=${data.url}`, {}, {
auth: {
username: data.username,
password: data.password
}
})
return result.data
})
ipcMain.handle('save-pocket', async (event, data) => {
const result = await axios.post('https://getpocket.com/v3/add', {
url: data.url,
access_token: data.credential.access_token,
consumer_key: data.credential.consumer_key
})
return result.data
})
ipcMain.handle('fever-login', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const config = {
url: `${data.endpoint}?api`,
method: 'post',
data: formData,
headers: {
...formData.getHeaders()
}
}
const result = await axios(config)
return result.data
})
ipcMain.handle('fever-endpoint-execute', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const result = await axios.post(data.endpoint, formData, {
headers: {
...formData.getHeaders()
}
})
return result.data
})
ipcMain.handle('google-login', async (event, data) => {
const params = new URLSearchParams(data.formData)
const result = await axios.post(data.endpoint, params.toString())
return result.data
})
ipcMain.handle('inoreader-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('inoreader-endpoint-refresh', async (event, data) => {
const result = axios.post(data.endpoint, data.formData)
return result.data
})
ipcMain.handle('inoreader-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData.data, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('feedbin-login', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
powerMonitor.on('resume', () => {
win.webContents.send('power-resume')
})
autoUpdater.on('checking-for-update', () => {
log.info('Checking for update...')
})
autoUpdater.on('update-not-available', (info) => {
log.info('Update not available.')
})
autoUpdater.on('error', (error) => {
log.info(error == null ? 'unknown' : (error.stack || error).toString())
})
autoUpdater.on('update-downloaded', (info) => {
log.info('Update downloaded')
dialog.showMessageBox({
title: 'Install Updates',
message: 'Updates downloaded, application will be quit for update...'
}, () => {
setImmediate(() => autoUpdater.quitAndInstall())
})
})
autoUpdater.on('download-progress', (progressObj) => {
let logMessage = 'Download speed: ' + progressObj.bytesPerSecond
logMessage = logMessage + ' - Downloaded ' + progressObj.percent + '%'
logMessage = logMessage + ' (' + progressObj.transferred + '/' + progressObj.total + ')'
log.info(logMessage)
})
| {
// Create the browser window.
win = new BrowserWindow({
minWidth: 1280,
minHeight: 720,
width: 1400,
height: 768,
title: 'Raven Reader',
maximizable: true,
webPreferences: {
// Use pluginOptions.nodeIntegration, leave this alone
// See nklayman.github.io/vue-cli-plugin-electron-builder/guide/security.html#node-integration for more info
webviewTag: true,
contextIsolation: true,
nodeIntegration: process.env.ELECTRON_NODE_INTEGRATION,
preload: path.join(__dirname, 'preload.js'),
disableBlinkFeatures: 'Auxclick'
}
})
// Maximize window on startup when not in development
if (!isDevelopment && win !== null) {
win.maximize()
}
i18nextBackend.mainBindings(ipcMain, win, fs)
ElectronBlocker.fromPrebuiltAdsAndTracking(fetch).then((blocker) => {
blocker.enableBlockingInSession(session.defaultSession)
})
win.setTouchBar(touchBar)
if (process.env.WEBPACK_DEV_SERVER_URL) {
// Load the url of the dev server if in development mode
winUrl = process.env.WEBPACK_DEV_SERVER_URL
if (!process.env.IS_TEST) win.webContents.openDevTools()
} else {
createProtocol('app')
winUrl = 'app://./index.html'
autoUpdater.checkForUpdatesAndNotify()
}
// Load the index.html when not in development
win.loadURL(winUrl)
const proxy = store.get('settings.proxy') ? store.get('settings.proxy') : null
let proxyRules = 'direct://'
if (proxy) {
if (proxy.http !== null && proxy.https === null) {
proxyRules = `http=${proxy.http},${proxyRules}`
}
if (proxy.http !== null && proxy.https !== null) {
proxyRules = `http=${proxy.http};https=${proxy.https},${proxyRules}`
}
}
win.webContents.session.setProxy({
proxyRules: proxyRules,
proxyBypassRules: proxy && proxy.bypass ? proxy.bypass : '<local>'
}, () => {
if (win) {
win.loadURL(winUrl)
}
})
win.on('closed', () => {
win = null
})
if (process.platform !== 'darwin') {
globalShortcut.register('Alt+M', () => {
const visible = win.isMenuBarVisible()
win.setMenuBarVisibility(visible)
})
}
// Set up necessary bindings to update the menu items
// based on the current language selected
i18nextMainBackend.on('loaded', (loaded) => {
i18nextMainBackend.changeLanguage(app.getLocale())
i18nextMainBackend.off('loaded')
})
menu = createMenu(win, i18nextMainBackend)
i18nextMainBackend.on('languageChanged', (lng) => {
log.info('Language changed')
menu = createMenu(win, i18nextMainBackend)
})
if (store.get('settings.start_in_trays')) { win.hide() }
} | identifier_body |
background.js | 'use strict'
import { app, protocol, BrowserWindow, globalShortcut, nativeTheme, ipcMain, dialog, Notification, shell, powerMonitor, session } from 'electron'
import { createProtocol } from 'vue-cli-plugin-electron-builder/lib'
import installExtension, { VUEJS_DEVTOOLS } from 'electron-devtools-installer'
import {
ElectronBlocker
} from '@cliqz/adblocker-electron'
import fetch from 'cross-fetch'
import { touchBar } from './main/touchbar'
import {
createMenu,
createFeedMenu,
createCategoryMenu,
createArticleItemMenu
} from './main/menu'
import axios from 'axios'
import os from 'os'
import Store from 'electron-store'
import log from 'electron-log'
import contextMenu from 'electron-context-menu'
import { autoUpdater } from 'electron-updater'
import fs from 'fs'
import path from 'path'
import { URL, URLSearchParams } from 'url'
import dayjs from 'dayjs'
import i18nextMainBackend from './i18nmain.config'
import {
parseArticle
} from './main/article'
require('v8-compile-cache')
const FormData = require('form-data')
const i18nextBackend = require('i18next-electron-fs-backend')
const isDevelopment = process.env.NODE_ENV !== 'production'
autoUpdater.logger = log
autoUpdater.logger.transports.file.level = 'info'
contextMenu({
showInspectElement: false
})
const store = new Store({
encryptionKey: process.env.VUE_APP_ENCRYPT_KEY,
clearInvalidConfig: true
})
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let win
let menu
let winUrl
let consumerKey
let code
// Scheme must be registered before the app is ready
protocol.registerSchemesAsPrivileged([
{ scheme: 'app', privileges: { secure: true, standard: true } }
])
async function createWindow () {
// Create the browser window.
win = new BrowserWindow({
minWidth: 1280,
minHeight: 720,
width: 1400,
height: 768,
title: 'Raven Reader',
maximizable: true,
webPreferences: {
// Use pluginOptions.nodeIntegration, leave this alone
// See nklayman.github.io/vue-cli-plugin-electron-builder/guide/security.html#node-integration for more info
webviewTag: true,
contextIsolation: true,
nodeIntegration: process.env.ELECTRON_NODE_INTEGRATION,
preload: path.join(__dirname, 'preload.js'),
disableBlinkFeatures: 'Auxclick'
}
})
// Maximize window on startup when not in development
if (!isDevelopment && win !== null) |
i18nextBackend.mainBindings(ipcMain, win, fs)
ElectronBlocker.fromPrebuiltAdsAndTracking(fetch).then((blocker) => {
blocker.enableBlockingInSession(session.defaultSession)
})
win.setTouchBar(touchBar)
if (process.env.WEBPACK_DEV_SERVER_URL) {
// Load the url of the dev server if in development mode
winUrl = process.env.WEBPACK_DEV_SERVER_URL
if (!process.env.IS_TEST) win.webContents.openDevTools()
} else {
createProtocol('app')
winUrl = 'app://./index.html'
autoUpdater.checkForUpdatesAndNotify()
}
// Load the index.html when not in development
win.loadURL(winUrl)
const proxy = store.get('settings.proxy') ? store.get('settings.proxy') : null
let proxyRules = 'direct://'
if (proxy) {
if (proxy.http !== null && proxy.https === null) {
proxyRules = `http=${proxy.http},${proxyRules}`
}
if (proxy.http !== null && proxy.https !== null) {
proxyRules = `http=${proxy.http};https=${proxy.https},${proxyRules}`
}
}
win.webContents.session.setProxy({
proxyRules: proxyRules,
proxyBypassRules: proxy && proxy.bypass ? proxy.bypass : '<local>'
}, () => {
if (win) {
win.loadURL(winUrl)
}
})
win.on('closed', () => {
win = null
})
if (process.platform !== 'darwin') {
globalShortcut.register('Alt+M', () => {
const visible = win.isMenuBarVisible()
win.setMenuBarVisibility(visible)
})
}
// Set up necessary bindings to update the menu items
// based on the current language selected
i18nextMainBackend.on('loaded', (loaded) => {
i18nextMainBackend.changeLanguage(app.getLocale())
i18nextMainBackend.off('loaded')
})
menu = createMenu(win, i18nextMainBackend)
i18nextMainBackend.on('languageChanged', (lng) => {
log.info('Language changed')
menu = createMenu(win, i18nextMainBackend)
})
if (store.get('settings.start_in_trays')) { win.hide() }
}
function signInInoreader () {
shell.openExternal(`https://www.inoreader.com/oauth2/auth?client_id=${process.env.VUE_APP_INOREADER_CLIENT_ID}&redirect_uri=ravenreader://inoreader/auth&response_type=code&scope=read%20write&state=ravenreader`)
}
function signInPocketWithPopUp () {
if (os.platform() === 'darwin') {
consumerKey = process.env.VUE_APP_POCKET_MAC_KEY
}
if (os.platform() === 'win32') {
consumerKey = process.env.VUE_APP_POCKET_WINDOWS_KEY
}
if (os.platform() === 'linux') {
consumerKey = process.env.VUE_APP_POCKET_LINUX_KEY
}
axios
.post(
'https://getpocket.com/v3/oauth/request', {
consumer_key: consumerKey,
redirect_uri: 'http://127.0.0.1'
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
code = data.data.code
shell.openExternal(`https://getpocket.com/auth/authorize?request_token=${code}&redirect_uri=ravenreader://pocket/auth`)
})
}
function registerLocalResourceProtocol () {
protocol.registerFileProtocol('local-resource', (request, callback) => {
const url = request.url.replace(/^local-resource:\/\//, '')
// Decode URL to prevent errors when loading filenames with UTF-8 chars or chars like "#"
const decodedUrl = decodeURI(url) // Needed in case URL contains spaces
try {
return callback(decodedUrl)
} catch (error) {
console.error('ERROR: registerLocalResourceProtocol: Could not get file path:', error)
}
})
}
function handleInoreader (url) {
if (url.includes('ravenreader://inoreader/auth')) {
const q = new URL(url).searchParams
if (q.has('code')) {
axios.post('https://www.inoreader.com/oauth2/token', {
code: q.get('code'),
client_id: process.env.VUE_APP_INOREADER_CLIENT_ID,
client_secret: process.env.VUE_APP_INOREADER_CLIENT_SECRET,
redirect_uri: 'ravenreader://inoreader/auth',
scope: null,
grant_type: 'authorization_code'
}).then((data) => {
data.data.expires_in = dayjs().add(data.data.expires_in, 'second').valueOf()
win.webContents.send('inoreader-authenticated', data.data)
})
}
}
if (url === 'ravenreader://pocket/auth') {
axios
.post(
'https://getpocket.com/v3/oauth/authorize', {
consumer_key: consumerKey,
code: code
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
data.data.consumer_key = consumerKey
win.webContents.send('pocket-authenticated', data.data)
})
}
}
app.setAsDefaultProtocolClient('ravenreader')
const primaryInstance = app.requestSingleInstanceLock()
if (!primaryInstance) {
app.quit()
} else {
app.on('second-instance', (event, argv, cmd) => {
event.preventDefault()
const url = argv[argv.length - 1]
if (win) {
if (win.isMinimized()) {
win.restore()
}
win.focus()
}
if (process.platform !== 'darwin') {
handleInoreader(url)
}
})
}
app.commandLine.appendSwitch('lang', app.getLocale())
app.commandLine.appendSwitch('disable-features', 'OutOfBlinkCors')
// Quit when all windows are closed.
app.on('window-all-closed', () => {
// On macOS it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit()
} else {
i18nextBackend.clearMainBindings(ipcMain)
}
})
app.on('open-url', (event, url) => {
handleInoreader(url)
})
nativeTheme.on('updated', () => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
win.webContents.send('Dark mode', {
darkmode: nativeTheme.shouldUseDarkColors
})
})
ipcMain.handle('article-selected', (event, status) => {
const menuItemViewBrowser = menu.getMenuItemById('view-browser')
const menuItemToggleFavourite = menu.getMenuItemById('toggle-favourite')
const menuItemSaveOffline = menu.getMenuItemById('save-offline')
const menuItemToggleRead = menu.getMenuItemById('toggle-read')
menuItemViewBrowser.enabled = true
menuItemToggleFavourite.enabled = true
menuItemSaveOffline.enabled = true
menuItemToggleRead.enabled = true
})
ipcMain.on('online-status-changed', (event, status) => {
event.sender.send('onlinestatus', status)
})
app.on('activate', () => {
// On macOS it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (BrowserWindow.getAllWindows().length === 0) {
createWindow()
}
})
app.on('web-contents-created', (event, contents) => {
contents.on('will-navigate', (event, navigationUrl) => {
event.preventDefault()
})
})
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', async () => {
if (isDevelopment && !process.env.IS_TEST) {
// Install Vue Devtools
try {
await installExtension(VUEJS_DEVTOOLS)
} catch (e) {
console.error('Vue Devtools failed to install:', e.toString())
}
}
// Modify the origin for all requests to the following urls.
registerLocalResourceProtocol()
createWindow()
})
app.whenReady().then(() => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
if (!store.has('settings.theme_option')) {
store.set('settings.theme_option', 'system')
}
})
app.on('before-quit', () => {
app.isQuiting = true
globalShortcut.unregisterAll()
})
// Exit cleanly on request from parent process in development mode.
if (isDevelopment) {
if (process.platform === 'win32') {
process.on('message', (data) => {
if (data === 'graceful-exit') {
app.quit()
}
})
} else {
process.on('SIGTERM', () => {
app.quit()
})
}
}
ipcMain.handle('set-feedbin-last-fetched', (event, arg) => {
if (arg) {
store.set('feedbin_fetched_lastime', arg)
}
})
ipcMain.on('get-inoreader-last', (event, arg) => {
event.returnValue = store.get('inoreader_fetched_lastime')
})
ipcMain.on('get-feedbin-last', (event, arg) => {
event.returnValue = store.get('feedbin_fetched_lastime')
})
ipcMain.on('sort-preference', (event, arg) => {
event.returnValue = store.get('settings.oldestArticles', 'on')
})
ipcMain.on('get-settings', (event, arg) => {
const state = {}
state.cronSettings = store.get('settings.cronjob', '*/5 * * * *')
state.themeOption = store.get('settings.theme_option', 'system')
state.oldestArticles = store.get('settings.oldestArticles', false)
state.disableImages = store.get('settings.imagePreference', false)
state.fullArticleDefault = store.get('settings.fullArticlePreference', false)
state.viewOriginalDefault = store.get('settings.viewOriginalPreference', false)
state.recentlyReadPreference = store.get('settings.recentlyReadPreference', false)
state.proxy = store.get('settings.proxy', {
http: '',
https: '',
bypass: ''
})
state.keepRead = store.get('settings.keepread', 1)
if (store.has('inoreader_creds')) {
state.inoreader_connected = true
state.inoreader = store.get('inoreader_creds')
}
if (store.has('inoreader_fetched_lasttime')) {
state.inoreader_last_fetched = store.get('inoreader_fetched_lasttime')
}
if (store.has('pocket_creds')) {
state.pocket_connected = true
state.pocket = store.get('pocket_creds')
}
if (store.has('instapaper_creds')) {
state.instapaper_connected = true
state.instapaper = store.get('instapaper_creds')
}
if (store.has('fever_creds')) {
state.fever_connected = true
state.fever = store.get('fever_creds')
}
if (store.has('selfhost_creds')) {
state.selfhost_connected = true
state.selfhost = store.get('selfhost_creds')
}
if (store.has('feedbin_creds')) {
state.feedbin_connected = true
state.feedbin = store.get('feedbin_creds', JSON.stringify({
endpoint: 'https://api.feedbin.com/v2/',
email: null,
password: null
}))
}
event.returnValue = state
})
ipcMain.on('get-setting-item', (event, arg) => {
event.returnValue = store.get(arg)
})
ipcMain.handle('set-settings-item', (event, arg) => {
switch (arg.type) {
case 'set':
store.set(arg.key, arg.data)
break
case 'delete':
store.delete(arg.key, arg.data)
break
}
})
ipcMain.on('get-locale', (event) => {
event.returnValue = app.getLocale()
})
ipcMain.on('get-dark', (event) => {
event.returnValue = store.get('isDarkMode')
})
ipcMain.on('proxy-settings-get', (event) => {
event.returnValue = store.get('settings.proxy', null)
})
ipcMain.handle('export-opml', (event, arg) => {
fs.unlink(
`${app.getPath('downloads')}/subscriptions.opml`,
err => {
if (err && err.code !== 'ENOENT') throw err
fs.writeFile(
`${app.getPath(
'downloads'
)}/subscriptions.opml`,
arg, {
flag: 'w',
encoding: 'utf8'
},
err => {
if (err) throw err
log.info('XML Saved')
const notification = new Notification({
title: 'Raven Reader',
body: 'Exported all feeds successfully to downloads folder.'
})
notification.show()
}
)
}
)
})
ipcMain.on('login-pocket', (event) => {
event.returnValue = signInPocketWithPopUp()
})
ipcMain.on('login-inoreader', (event) => {
event.returnValue = signInInoreader()
})
ipcMain.handle('context-menu', (event, arg) => {
if (arg.type === 'feed') {
createFeedMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'category') {
createCategoryMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'article') {
createArticleItemMenu(arg.data, win, i18nextMainBackend)
}
})
ipcMain.handle('parse-article', async (event, url) => {
return await parseArticle(url)
})
ipcMain.handle('instapaper-login', async (event, data) => {
const result = await axios.post('https://www.instapaper.com/api/authenticate', {}, {
auth: data
})
return result.data
})
ipcMain.handle('instapaper-save', async (event, data) => {
const result = await axios.post(`https://www.instapaper.com/api/add?url=${data.url}`, {}, {
auth: {
username: data.username,
password: data.password
}
})
return result.data
})
ipcMain.handle('save-pocket', async (event, data) => {
const result = await axios.post('https://getpocket.com/v3/add', {
url: data.url,
access_token: data.credential.access_token,
consumer_key: data.credential.consumer_key
})
return result.data
})
ipcMain.handle('fever-login', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const config = {
url: `${data.endpoint}?api`,
method: 'post',
data: formData,
headers: {
...formData.getHeaders()
}
}
const result = await axios(config)
return result.data
})
ipcMain.handle('fever-endpoint-execute', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const result = await axios.post(data.endpoint, formData, {
headers: {
...formData.getHeaders()
}
})
return result.data
})
ipcMain.handle('google-login', async (event, data) => {
const params = new URLSearchParams(data.formData)
const result = await axios.post(data.endpoint, params.toString())
return result.data
})
ipcMain.handle('inoreader-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('inoreader-endpoint-refresh', async (event, data) => {
const result = axios.post(data.endpoint, data.formData)
return result.data
})
ipcMain.handle('inoreader-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData.data, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('feedbin-login', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
powerMonitor.on('resume', () => {
win.webContents.send('power-resume')
})
autoUpdater.on('checking-for-update', () => {
log.info('Checking for update...')
})
autoUpdater.on('update-not-available', (info) => {
log.info('Update not available.')
})
autoUpdater.on('error', (error) => {
log.info(error == null ? 'unknown' : (error.stack || error).toString())
})
autoUpdater.on('update-downloaded', (info) => {
log.info('Update downloaded')
dialog.showMessageBox({
title: 'Install Updates',
message: 'Updates downloaded, application will be quit for update...'
}, () => {
setImmediate(() => autoUpdater.quitAndInstall())
})
})
autoUpdater.on('download-progress', (progressObj) => {
let logMessage = 'Download speed: ' + progressObj.bytesPerSecond
logMessage = logMessage + ' - Downloaded ' + progressObj.percent + '%'
logMessage = logMessage + ' (' + progressObj.transferred + '/' + progressObj.total + ')'
log.info(logMessage)
})
| {
win.maximize()
} | conditional_block |
background.js | 'use strict'
import { app, protocol, BrowserWindow, globalShortcut, nativeTheme, ipcMain, dialog, Notification, shell, powerMonitor, session } from 'electron'
import { createProtocol } from 'vue-cli-plugin-electron-builder/lib'
import installExtension, { VUEJS_DEVTOOLS } from 'electron-devtools-installer'
import {
ElectronBlocker
} from '@cliqz/adblocker-electron'
import fetch from 'cross-fetch'
import { touchBar } from './main/touchbar'
import {
createMenu,
createFeedMenu,
createCategoryMenu,
createArticleItemMenu
} from './main/menu'
import axios from 'axios'
import os from 'os'
import Store from 'electron-store'
import log from 'electron-log'
import contextMenu from 'electron-context-menu'
import { autoUpdater } from 'electron-updater'
import fs from 'fs'
import path from 'path'
import { URL, URLSearchParams } from 'url'
import dayjs from 'dayjs'
import i18nextMainBackend from './i18nmain.config'
import {
parseArticle
} from './main/article'
require('v8-compile-cache')
const FormData = require('form-data')
const i18nextBackend = require('i18next-electron-fs-backend')
const isDevelopment = process.env.NODE_ENV !== 'production'
autoUpdater.logger = log
autoUpdater.logger.transports.file.level = 'info'
contextMenu({
showInspectElement: false
})
const store = new Store({
encryptionKey: process.env.VUE_APP_ENCRYPT_KEY,
clearInvalidConfig: true
})
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let win
let menu
let winUrl
let consumerKey
let code
// Scheme must be registered before the app is ready
protocol.registerSchemesAsPrivileged([
{ scheme: 'app', privileges: { secure: true, standard: true } }
])
async function | () {
// Create the browser window.
win = new BrowserWindow({
minWidth: 1280,
minHeight: 720,
width: 1400,
height: 768,
title: 'Raven Reader',
maximizable: true,
webPreferences: {
// Use pluginOptions.nodeIntegration, leave this alone
// See nklayman.github.io/vue-cli-plugin-electron-builder/guide/security.html#node-integration for more info
webviewTag: true,
contextIsolation: true,
nodeIntegration: process.env.ELECTRON_NODE_INTEGRATION,
preload: path.join(__dirname, 'preload.js'),
disableBlinkFeatures: 'Auxclick'
}
})
// Maximize window on startup when not in development
if (!isDevelopment && win !== null) {
win.maximize()
}
i18nextBackend.mainBindings(ipcMain, win, fs)
ElectronBlocker.fromPrebuiltAdsAndTracking(fetch).then((blocker) => {
blocker.enableBlockingInSession(session.defaultSession)
})
win.setTouchBar(touchBar)
if (process.env.WEBPACK_DEV_SERVER_URL) {
// Load the url of the dev server if in development mode
winUrl = process.env.WEBPACK_DEV_SERVER_URL
if (!process.env.IS_TEST) win.webContents.openDevTools()
} else {
createProtocol('app')
winUrl = 'app://./index.html'
autoUpdater.checkForUpdatesAndNotify()
}
// Load the index.html when not in development
win.loadURL(winUrl)
const proxy = store.get('settings.proxy') ? store.get('settings.proxy') : null
let proxyRules = 'direct://'
if (proxy) {
if (proxy.http !== null && proxy.https === null) {
proxyRules = `http=${proxy.http},${proxyRules}`
}
if (proxy.http !== null && proxy.https !== null) {
proxyRules = `http=${proxy.http};https=${proxy.https},${proxyRules}`
}
}
win.webContents.session.setProxy({
proxyRules: proxyRules,
proxyBypassRules: proxy && proxy.bypass ? proxy.bypass : '<local>'
}, () => {
if (win) {
win.loadURL(winUrl)
}
})
win.on('closed', () => {
win = null
})
if (process.platform !== 'darwin') {
globalShortcut.register('Alt+M', () => {
const visible = win.isMenuBarVisible()
win.setMenuBarVisibility(visible)
})
}
// Set up necessary bindings to update the menu items
// based on the current language selected
i18nextMainBackend.on('loaded', (loaded) => {
i18nextMainBackend.changeLanguage(app.getLocale())
i18nextMainBackend.off('loaded')
})
menu = createMenu(win, i18nextMainBackend)
i18nextMainBackend.on('languageChanged', (lng) => {
log.info('Language changed')
menu = createMenu(win, i18nextMainBackend)
})
if (store.get('settings.start_in_trays')) { win.hide() }
}
function signInInoreader () {
shell.openExternal(`https://www.inoreader.com/oauth2/auth?client_id=${process.env.VUE_APP_INOREADER_CLIENT_ID}&redirect_uri=ravenreader://inoreader/auth&response_type=code&scope=read%20write&state=ravenreader`)
}
function signInPocketWithPopUp () {
if (os.platform() === 'darwin') {
consumerKey = process.env.VUE_APP_POCKET_MAC_KEY
}
if (os.platform() === 'win32') {
consumerKey = process.env.VUE_APP_POCKET_WINDOWS_KEY
}
if (os.platform() === 'linux') {
consumerKey = process.env.VUE_APP_POCKET_LINUX_KEY
}
axios
.post(
'https://getpocket.com/v3/oauth/request', {
consumer_key: consumerKey,
redirect_uri: 'http://127.0.0.1'
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
code = data.data.code
shell.openExternal(`https://getpocket.com/auth/authorize?request_token=${code}&redirect_uri=ravenreader://pocket/auth`)
})
}
function registerLocalResourceProtocol () {
protocol.registerFileProtocol('local-resource', (request, callback) => {
const url = request.url.replace(/^local-resource:\/\//, '')
// Decode URL to prevent errors when loading filenames with UTF-8 chars or chars like "#"
const decodedUrl = decodeURI(url) // Needed in case URL contains spaces
try {
return callback(decodedUrl)
} catch (error) {
console.error('ERROR: registerLocalResourceProtocol: Could not get file path:', error)
}
})
}
function handleInoreader (url) {
if (url.includes('ravenreader://inoreader/auth')) {
const q = new URL(url).searchParams
if (q.has('code')) {
axios.post('https://www.inoreader.com/oauth2/token', {
code: q.get('code'),
client_id: process.env.VUE_APP_INOREADER_CLIENT_ID,
client_secret: process.env.VUE_APP_INOREADER_CLIENT_SECRET,
redirect_uri: 'ravenreader://inoreader/auth',
scope: null,
grant_type: 'authorization_code'
}).then((data) => {
data.data.expires_in = dayjs().add(data.data.expires_in, 'second').valueOf()
win.webContents.send('inoreader-authenticated', data.data)
})
}
}
if (url === 'ravenreader://pocket/auth') {
axios
.post(
'https://getpocket.com/v3/oauth/authorize', {
consumer_key: consumerKey,
code: code
}, {
withCredentials: true,
headers: {
'Content-Type': 'application/json',
'X-Accept': 'application/json'
}
}
)
.then(data => {
data.data.consumer_key = consumerKey
win.webContents.send('pocket-authenticated', data.data)
})
}
}
app.setAsDefaultProtocolClient('ravenreader')
const primaryInstance = app.requestSingleInstanceLock()
if (!primaryInstance) {
app.quit()
} else {
app.on('second-instance', (event, argv, cmd) => {
event.preventDefault()
const url = argv[argv.length - 1]
if (win) {
if (win.isMinimized()) {
win.restore()
}
win.focus()
}
if (process.platform !== 'darwin') {
handleInoreader(url)
}
})
}
app.commandLine.appendSwitch('lang', app.getLocale())
app.commandLine.appendSwitch('disable-features', 'OutOfBlinkCors')
// Quit when all windows are closed.
app.on('window-all-closed', () => {
// On macOS it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit()
} else {
i18nextBackend.clearMainBindings(ipcMain)
}
})
app.on('open-url', (event, url) => {
handleInoreader(url)
})
nativeTheme.on('updated', () => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
win.webContents.send('Dark mode', {
darkmode: nativeTheme.shouldUseDarkColors
})
})
ipcMain.handle('article-selected', (event, status) => {
const menuItemViewBrowser = menu.getMenuItemById('view-browser')
const menuItemToggleFavourite = menu.getMenuItemById('toggle-favourite')
const menuItemSaveOffline = menu.getMenuItemById('save-offline')
const menuItemToggleRead = menu.getMenuItemById('toggle-read')
menuItemViewBrowser.enabled = true
menuItemToggleFavourite.enabled = true
menuItemSaveOffline.enabled = true
menuItemToggleRead.enabled = true
})
ipcMain.on('online-status-changed', (event, status) => {
event.sender.send('onlinestatus', status)
})
app.on('activate', () => {
// On macOS it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (BrowserWindow.getAllWindows().length === 0) {
createWindow()
}
})
app.on('web-contents-created', (event, contents) => {
contents.on('will-navigate', (event, navigationUrl) => {
event.preventDefault()
})
})
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', async () => {
if (isDevelopment && !process.env.IS_TEST) {
// Install Vue Devtools
try {
await installExtension(VUEJS_DEVTOOLS)
} catch (e) {
console.error('Vue Devtools failed to install:', e.toString())
}
}
// Modify the origin for all requests to the following urls.
registerLocalResourceProtocol()
createWindow()
})
app.whenReady().then(() => {
store.set('isDarkMode', nativeTheme.shouldUseDarkColors)
if (!store.has('settings.theme_option')) {
store.set('settings.theme_option', 'system')
}
})
app.on('before-quit', () => {
app.isQuiting = true
globalShortcut.unregisterAll()
})
// Exit cleanly on request from parent process in development mode.
if (isDevelopment) {
if (process.platform === 'win32') {
process.on('message', (data) => {
if (data === 'graceful-exit') {
app.quit()
}
})
} else {
process.on('SIGTERM', () => {
app.quit()
})
}
}
ipcMain.handle('set-feedbin-last-fetched', (event, arg) => {
if (arg) {
store.set('feedbin_fetched_lastime', arg)
}
})
ipcMain.on('get-inoreader-last', (event, arg) => {
event.returnValue = store.get('inoreader_fetched_lastime')
})
ipcMain.on('get-feedbin-last', (event, arg) => {
event.returnValue = store.get('feedbin_fetched_lastime')
})
ipcMain.on('sort-preference', (event, arg) => {
event.returnValue = store.get('settings.oldestArticles', 'on')
})
ipcMain.on('get-settings', (event, arg) => {
const state = {}
state.cronSettings = store.get('settings.cronjob', '*/5 * * * *')
state.themeOption = store.get('settings.theme_option', 'system')
state.oldestArticles = store.get('settings.oldestArticles', false)
state.disableImages = store.get('settings.imagePreference', false)
state.fullArticleDefault = store.get('settings.fullArticlePreference', false)
state.viewOriginalDefault = store.get('settings.viewOriginalPreference', false)
state.recentlyReadPreference = store.get('settings.recentlyReadPreference', false)
state.proxy = store.get('settings.proxy', {
http: '',
https: '',
bypass: ''
})
state.keepRead = store.get('settings.keepread', 1)
if (store.has('inoreader_creds')) {
state.inoreader_connected = true
state.inoreader = store.get('inoreader_creds')
}
if (store.has('inoreader_fetched_lasttime')) {
state.inoreader_last_fetched = store.get('inoreader_fetched_lasttime')
}
if (store.has('pocket_creds')) {
state.pocket_connected = true
state.pocket = store.get('pocket_creds')
}
if (store.has('instapaper_creds')) {
state.instapaper_connected = true
state.instapaper = store.get('instapaper_creds')
}
if (store.has('fever_creds')) {
state.fever_connected = true
state.fever = store.get('fever_creds')
}
if (store.has('selfhost_creds')) {
state.selfhost_connected = true
state.selfhost = store.get('selfhost_creds')
}
if (store.has('feedbin_creds')) {
state.feedbin_connected = true
state.feedbin = store.get('feedbin_creds', JSON.stringify({
endpoint: 'https://api.feedbin.com/v2/',
email: null,
password: null
}))
}
event.returnValue = state
})
ipcMain.on('get-setting-item', (event, arg) => {
event.returnValue = store.get(arg)
})
ipcMain.handle('set-settings-item', (event, arg) => {
switch (arg.type) {
case 'set':
store.set(arg.key, arg.data)
break
case 'delete':
store.delete(arg.key, arg.data)
break
}
})
ipcMain.on('get-locale', (event) => {
event.returnValue = app.getLocale()
})
ipcMain.on('get-dark', (event) => {
event.returnValue = store.get('isDarkMode')
})
ipcMain.on('proxy-settings-get', (event) => {
event.returnValue = store.get('settings.proxy', null)
})
ipcMain.handle('export-opml', (event, arg) => {
fs.unlink(
`${app.getPath('downloads')}/subscriptions.opml`,
err => {
if (err && err.code !== 'ENOENT') throw err
fs.writeFile(
`${app.getPath(
'downloads'
)}/subscriptions.opml`,
arg, {
flag: 'w',
encoding: 'utf8'
},
err => {
if (err) throw err
log.info('XML Saved')
const notification = new Notification({
title: 'Raven Reader',
body: 'Exported all feeds successfully to downloads folder.'
})
notification.show()
}
)
}
)
})
ipcMain.on('login-pocket', (event) => {
event.returnValue = signInPocketWithPopUp()
})
ipcMain.on('login-inoreader', (event) => {
event.returnValue = signInInoreader()
})
ipcMain.handle('context-menu', (event, arg) => {
if (arg.type === 'feed') {
createFeedMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'category') {
createCategoryMenu(arg.data, win, i18nextMainBackend)
}
if (arg.type === 'article') {
createArticleItemMenu(arg.data, win, i18nextMainBackend)
}
})
ipcMain.handle('parse-article', async (event, url) => {
return await parseArticle(url)
})
ipcMain.handle('instapaper-login', async (event, data) => {
const result = await axios.post('https://www.instapaper.com/api/authenticate', {}, {
auth: data
})
return result.data
})
ipcMain.handle('instapaper-save', async (event, data) => {
const result = await axios.post(`https://www.instapaper.com/api/add?url=${data.url}`, {}, {
auth: {
username: data.username,
password: data.password
}
})
return result.data
})
ipcMain.handle('save-pocket', async (event, data) => {
const result = await axios.post('https://getpocket.com/v3/add', {
url: data.url,
access_token: data.credential.access_token,
consumer_key: data.credential.consumer_key
})
return result.data
})
ipcMain.handle('fever-login', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const config = {
url: `${data.endpoint}?api`,
method: 'post',
data: formData,
headers: {
...formData.getHeaders()
}
}
const result = await axios(config)
return result.data
})
ipcMain.handle('fever-endpoint-execute', async (event, data) => {
const formData = new FormData()
formData.append('api_key', data.formData)
const result = await axios.post(data.endpoint, formData, {
headers: {
...formData.getHeaders()
}
})
return result.data
})
ipcMain.handle('google-login', async (event, data) => {
const params = new URLSearchParams(data.formData)
const result = await axios.post(data.endpoint, params.toString())
return result.data
})
ipcMain.handle('inoreader-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('inoreader-endpoint-refresh', async (event, data) => {
const result = axios.post(data.endpoint, data.formData)
return result.data
})
ipcMain.handle('inoreader-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
headers: {
Authorization: `Bearer ${data.access_token}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('google-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData.data, {
headers: {
Authorization: `GoogleLogin auth=${data.formData.auth}`
}
})
return result.data
})
ipcMain.handle('feedbin-login', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-fetch', async (event, data) => {
const result = await axios.get(data.endpoint, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
ipcMain.handle('feedbin-endpoint-execute', async (event, data) => {
const result = await axios.post(data.endpoint, data.formData, {
auth: {
username: data.creds.email,
password: data.creds.password
}
})
return result.data
})
powerMonitor.on('resume', () => {
win.webContents.send('power-resume')
})
autoUpdater.on('checking-for-update', () => {
log.info('Checking for update...')
})
autoUpdater.on('update-not-available', (info) => {
log.info('Update not available.')
})
autoUpdater.on('error', (error) => {
log.info(error == null ? 'unknown' : (error.stack || error).toString())
})
autoUpdater.on('update-downloaded', (info) => {
log.info('Update downloaded')
dialog.showMessageBox({
title: 'Install Updates',
message: 'Updates downloaded, application will be quit for update...'
}, () => {
setImmediate(() => autoUpdater.quitAndInstall())
})
})
autoUpdater.on('download-progress', (progressObj) => {
let logMessage = 'Download speed: ' + progressObj.bytesPerSecond
logMessage = logMessage + ' - Downloaded ' + progressObj.percent + '%'
logMessage = logMessage + ' (' + progressObj.transferred + '/' + progressObj.total + ')'
log.info(logMessage)
})
| createWindow | identifier_name |
click.component.ts | import { Component, OnInit, OnDestroy, Input, ViewChild, ViewEncapsulation,ViewContainerRef } from '@angular/core';
import { FormBuilder, FormGroup, Validators, FormControl } from '@angular/forms';
import { DatatableComponent } from '@swimlane/ngx-datatable';
import { NgbModal, ModalDismissReasons, NgbTabChangeEvent,NgbModalRef } from '@ng-bootstrap/ng-bootstrap';
import { CommonService } from '../../common.service';
import { Http,Headers,RequestOptions,Response } from '@angular/http';
import { NgbTabset} from "@ng-bootstrap/ng-bootstrap";
import { Router } from '@angular/router';
import { BackendHost } from '../../../assets/varconfig';
import { ToastrService } from 'ngx-toastr';
import { CookieService } from 'ngx-cookie-service';
import { ngxCsv } from 'ngx-csv/ngx-csv';
import { NgForm } from '@angular/forms';
@Component({
selector: 'ngx-click',
templateUrl: './click.component.html',
styleUrls: ['./click.component.scss']
})
export class ClickComponent implements OnInit {
@ViewChild(DatatableComponent) newstable:DatatableComponent;
@ViewChild('form') form;
value:any;
serviceHost = BackendHost;
bannerblockrows = [];
paring=[];
bannerblocktemp = [];
bannerblocklist = [];
modalRef: NgbModalRef;
img_selected=false;
closeResult: string;
ban:any={}
banner_Status;
currencyObj;
question="";
select:any={};
answer="";
limits = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit: number = this.limits[0].value;
rowLimits: Array<any> = this.limits;
page = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort: any = {dir: "desc", prop: "datetime"};
defsearch = "";
loading: boolean = false;
showtblview: boolean = true;
limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit_banner: number = this.limits_banner[0].value;
rowLimits_news: Array<any> = this.limits_banner;
page_banner = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort_banner: any = {dir: "desc", prop: "datetime"};
defsearch_news = "";
loading_banner: boolean = false;
constructor(
private modalService: NgbModal,private router: Router,
private CommonService: CommonService,
private toastr: ToastrService,
private http:Http,
private cookieService: CookieService
){
this.newsloadfn();
}
ngOnInit() {
var data = {"_id":this.cookieService.get('session')}
this.CommonService.requestData('admin/getsubadmin',data).subscribe(resData => {
if(resData.token_details == 1){
return true
}
else{
this.router.navigate(['/pages/dashboard'])
}
})
this.ban.base_coin ="ETH"
}
newsloadfn(){
var lstinput = {"page":this.page_banner,"sorting":this.defsort_banner,"search": this.defsearch_news};
this.loading_banner = true;
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.page_banner.totalPages = this.page_banner.totalElements / this.page_banner.size;
this.bannerblocklist = resData.data;
this.bannerblocktemp = this.bannerblocklist;
this.bannerblockrows = this.bannerblocklist;
this.loading_banner = false;
});
}
//reset the banner list
resetnewslist(){
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner = {
size: this.limits_banner[0].value,totalElements:0,totalPages:0,pageNumber:0
}
this.defsort_banner= {dir: "desc", prop: "datetime"};
this.defsearch_news = "";
this.newsloadfn();
}
//end of the function
setPage_banner(pageInfo){
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.pageNumber = pageInfo.offset;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//sort the function
loadbannerlist(page,sort,search){
this.loading_banner = true;
var lstinput = {"page":page,"sorting":sort,"search":search};
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.bannerblocklist = resData.data;
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.loading_banner = false;
});
}
onSort_banner(event) {
this.page_banner.pageNumber = 0;
this.defsort_banner = event.sorts[0];
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//search bar function
updateFilter_news() {
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner.pageNumber = 0;
this.newsloadfn();
}
//change the row limit
changeRowLimits_news(event) {
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.size = +event.target.value;
this.page_banner.pageNumber = 0;
this.newstable.limit = +event.target.value;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//add pop up display
addnewsClick(){
return true
}
bannerIndx = 0;
opennewsmodel(content) {
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
//edit pop up display
editbanners = {};
openeditmodel(content,bannerrow) {
this.editbanners = bannerrow
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
bannerindx = -1;
bannersvalue = {};
banner = "";
changebanner(content, bannerrow){
this.bannersvalue = bannerrow;
this.bannerindx = bannerrow.$$index;
this.modalRef = this.modalService.open(content,{size:'sm'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
cancelemailtemp(){
this.loadbannerlist(this.page,this.defsort,this.defsearch);
}
closeAddToken(){
this.ban = {};
this.ban.base_coin = 'ETH';
}
private getDismissReason(reason: any): string {
if (reason === ModalDismissReasons.ESC) {
return 'by pressing ESC';
} else if (reason === ModalDismissReasons.BACKDROP_CLICK) {
return 'by clicking on a backdrop';
} else |
}
filesToUploads: Array<File> = [];
urls=[];
filenames = "";
fileChangeEvents(fileInput: any) {
this.filesToUploads=[];
var path = fileInput.target.files[0].type;
if(path == "image/jpeg" || path == "image/gif" || path == "image/jpg" || path == "image/png")
{
this.filesToUploads = <Array<File>>fileInput.target.files;
this.filenames = fileInput.target.files[0].name;
let files = fileInput.target.files;
if (files) {
for (let file of files) {
let reader = new FileReader();
reader.onload = (e: any) => {
this.urls = e.target.result;
}
reader.readAsDataURL(file);
}
}
this.img_selected=true;
}
else{
this.toastr.error('Please choose a right file!', 'Error');
this.filesToUploads=[];
this.urls=[];
this.filenames='';
this.img_selected=false;
}
}
onsubmit(form: NgForm){
if(this.paring.length > 0){
if(this.img_selected == false){
this.toastr.error('Please select image to continue','Error')
}
else{
const formData: any = new FormData();
const files: Array<File> = this.filesToUploads;
for(let i =0; i < files.length; i++){
formData.append("uploads[]", this.filesToUploads[i], files[i]['name']);
}
this.http.post(this.serviceHost+'currencydetails/uploadPhoto',formData)
.map(files => files.json())
.subscribe(files => {
this.ban.logo=files.result.secure_url
this.add_content(form)
})
}
}
else{
this.toastr.error('Please select a pair','Error')
}
}
checkbox(val){
if((<HTMLInputElement> document.getElementById(val)).checked == true){
this.paring.push(val)
}
else{
var filtered = this.paring.filter(function(value, index, arr){
return value != val;
});
this.paring = filtered;
}
}
add_content(form: NgForm){
var pairs=''
for(var i=0;i<this.paring.length;i++){
if(i+1 != this.paring.length){
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase()+",";
}
else{
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase();
}
}
var req={
"name" : this.ban.token_name,
"symbol" : this.ban.token_symbol.toUpperCase(),
"logo" : this.ban.logo,
"minimum" : this.ban.minwithdraw,
"pairlist" : pairs,
"maximum" : this.ban.maxwithdraw,
"fee" : this.ban.withdrawfee,
"decimal" : this.ban.decimal,
"conaddress" : this.ban.contract_address,
"status" : 'Approved',
"baseCoin" : this.ban.base_coin
}
this.CommonService.requestData('token/addtoken',req).subscribe(res=>{
if(res.status == true){
this.toastr.success(res.Message,"Success");
var button=document.getElementById('shortcut_close');
button.click();
location.reload();
}
else{
this.toastr.error(res.Message,"Error");
}
})
}
}
| {
return 'with: ${reason}';
} | conditional_block |
click.component.ts | import { Component, OnInit, OnDestroy, Input, ViewChild, ViewEncapsulation,ViewContainerRef } from '@angular/core';
import { FormBuilder, FormGroup, Validators, FormControl } from '@angular/forms';
import { DatatableComponent } from '@swimlane/ngx-datatable';
import { NgbModal, ModalDismissReasons, NgbTabChangeEvent,NgbModalRef } from '@ng-bootstrap/ng-bootstrap';
import { CommonService } from '../../common.service';
import { Http,Headers,RequestOptions,Response } from '@angular/http';
import { NgbTabset} from "@ng-bootstrap/ng-bootstrap";
import { Router } from '@angular/router';
import { BackendHost } from '../../../assets/varconfig';
import { ToastrService } from 'ngx-toastr';
import { CookieService } from 'ngx-cookie-service';
import { ngxCsv } from 'ngx-csv/ngx-csv';
import { NgForm } from '@angular/forms';
@Component({
selector: 'ngx-click',
templateUrl: './click.component.html',
styleUrls: ['./click.component.scss']
})
export class ClickComponent implements OnInit {
@ViewChild(DatatableComponent) newstable:DatatableComponent;
@ViewChild('form') form;
value:any;
serviceHost = BackendHost;
bannerblockrows = [];
paring=[];
bannerblocktemp = [];
bannerblocklist = [];
modalRef: NgbModalRef;
img_selected=false;
closeResult: string;
ban:any={}
banner_Status;
currencyObj;
question="";
select:any={};
answer="";
limits = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit: number = this.limits[0].value;
rowLimits: Array<any> = this.limits;
page = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort: any = {dir: "desc", prop: "datetime"};
defsearch = "";
loading: boolean = false;
showtblview: boolean = true;
limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit_banner: number = this.limits_banner[0].value;
rowLimits_news: Array<any> = this.limits_banner;
page_banner = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort_banner: any = {dir: "desc", prop: "datetime"};
defsearch_news = "";
loading_banner: boolean = false;
constructor(
private modalService: NgbModal,private router: Router,
private CommonService: CommonService,
private toastr: ToastrService,
private http:Http,
private cookieService: CookieService
){
this.newsloadfn();
}
ngOnInit() {
var data = {"_id":this.cookieService.get('session')}
this.CommonService.requestData('admin/getsubadmin',data).subscribe(resData => {
if(resData.token_details == 1){
return true
}
else{
this.router.navigate(['/pages/dashboard'])
}
})
this.ban.base_coin ="ETH"
}
newsloadfn(){
var lstinput = {"page":this.page_banner,"sorting":this.defsort_banner,"search": this.defsearch_news};
this.loading_banner = true;
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.page_banner.totalPages = this.page_banner.totalElements / this.page_banner.size;
this.bannerblocklist = resData.data;
this.bannerblocktemp = this.bannerblocklist;
this.bannerblockrows = this.bannerblocklist;
this.loading_banner = false;
});
}
//reset the banner list
resetnewslist(){
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner = { | this.defsearch_news = "";
this.newsloadfn();
}
//end of the function
setPage_banner(pageInfo){
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.pageNumber = pageInfo.offset;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//sort the function
loadbannerlist(page,sort,search){
this.loading_banner = true;
var lstinput = {"page":page,"sorting":sort,"search":search};
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.bannerblocklist = resData.data;
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.loading_banner = false;
});
}
onSort_banner(event) {
this.page_banner.pageNumber = 0;
this.defsort_banner = event.sorts[0];
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//search bar function
updateFilter_news() {
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner.pageNumber = 0;
this.newsloadfn();
}
//change the row limit
changeRowLimits_news(event) {
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.size = +event.target.value;
this.page_banner.pageNumber = 0;
this.newstable.limit = +event.target.value;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//add pop up display
addnewsClick(){
return true
}
bannerIndx = 0;
opennewsmodel(content) {
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
//edit pop up display
editbanners = {};
openeditmodel(content,bannerrow) {
this.editbanners = bannerrow
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
bannerindx = -1;
bannersvalue = {};
banner = "";
changebanner(content, bannerrow){
this.bannersvalue = bannerrow;
this.bannerindx = bannerrow.$$index;
this.modalRef = this.modalService.open(content,{size:'sm'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
cancelemailtemp(){
this.loadbannerlist(this.page,this.defsort,this.defsearch);
}
closeAddToken(){
this.ban = {};
this.ban.base_coin = 'ETH';
}
private getDismissReason(reason: any): string {
if (reason === ModalDismissReasons.ESC) {
return 'by pressing ESC';
} else if (reason === ModalDismissReasons.BACKDROP_CLICK) {
return 'by clicking on a backdrop';
} else {
return 'with: ${reason}';
}
}
filesToUploads: Array<File> = [];
urls=[];
filenames = "";
fileChangeEvents(fileInput: any) {
this.filesToUploads=[];
var path = fileInput.target.files[0].type;
if(path == "image/jpeg" || path == "image/gif" || path == "image/jpg" || path == "image/png")
{
this.filesToUploads = <Array<File>>fileInput.target.files;
this.filenames = fileInput.target.files[0].name;
let files = fileInput.target.files;
if (files) {
for (let file of files) {
let reader = new FileReader();
reader.onload = (e: any) => {
this.urls = e.target.result;
}
reader.readAsDataURL(file);
}
}
this.img_selected=true;
}
else{
this.toastr.error('Please choose a right file!', 'Error');
this.filesToUploads=[];
this.urls=[];
this.filenames='';
this.img_selected=false;
}
}
onsubmit(form: NgForm){
if(this.paring.length > 0){
if(this.img_selected == false){
this.toastr.error('Please select image to continue','Error')
}
else{
const formData: any = new FormData();
const files: Array<File> = this.filesToUploads;
for(let i =0; i < files.length; i++){
formData.append("uploads[]", this.filesToUploads[i], files[i]['name']);
}
this.http.post(this.serviceHost+'currencydetails/uploadPhoto',formData)
.map(files => files.json())
.subscribe(files => {
this.ban.logo=files.result.secure_url
this.add_content(form)
})
}
}
else{
this.toastr.error('Please select a pair','Error')
}
}
checkbox(val){
if((<HTMLInputElement> document.getElementById(val)).checked == true){
this.paring.push(val)
}
else{
var filtered = this.paring.filter(function(value, index, arr){
return value != val;
});
this.paring = filtered;
}
}
add_content(form: NgForm){
var pairs=''
for(var i=0;i<this.paring.length;i++){
if(i+1 != this.paring.length){
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase()+",";
}
else{
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase();
}
}
var req={
"name" : this.ban.token_name,
"symbol" : this.ban.token_symbol.toUpperCase(),
"logo" : this.ban.logo,
"minimum" : this.ban.minwithdraw,
"pairlist" : pairs,
"maximum" : this.ban.maxwithdraw,
"fee" : this.ban.withdrawfee,
"decimal" : this.ban.decimal,
"conaddress" : this.ban.contract_address,
"status" : 'Approved',
"baseCoin" : this.ban.base_coin
}
this.CommonService.requestData('token/addtoken',req).subscribe(res=>{
if(res.status == true){
this.toastr.success(res.Message,"Success");
var button=document.getElementById('shortcut_close');
button.click();
location.reload();
}
else{
this.toastr.error(res.Message,"Error");
}
})
}
} | size: this.limits_banner[0].value,totalElements:0,totalPages:0,pageNumber:0
}
this.defsort_banner= {dir: "desc", prop: "datetime"}; | random_line_split |
click.component.ts | import { Component, OnInit, OnDestroy, Input, ViewChild, ViewEncapsulation,ViewContainerRef } from '@angular/core';
import { FormBuilder, FormGroup, Validators, FormControl } from '@angular/forms';
import { DatatableComponent } from '@swimlane/ngx-datatable';
import { NgbModal, ModalDismissReasons, NgbTabChangeEvent,NgbModalRef } from '@ng-bootstrap/ng-bootstrap';
import { CommonService } from '../../common.service';
import { Http,Headers,RequestOptions,Response } from '@angular/http';
import { NgbTabset} from "@ng-bootstrap/ng-bootstrap";
import { Router } from '@angular/router';
import { BackendHost } from '../../../assets/varconfig';
import { ToastrService } from 'ngx-toastr';
import { CookieService } from 'ngx-cookie-service';
import { ngxCsv } from 'ngx-csv/ngx-csv';
import { NgForm } from '@angular/forms';
@Component({
selector: 'ngx-click',
templateUrl: './click.component.html',
styleUrls: ['./click.component.scss']
})
export class ClickComponent implements OnInit {
@ViewChild(DatatableComponent) newstable:DatatableComponent;
@ViewChild('form') form;
value:any;
serviceHost = BackendHost;
bannerblockrows = [];
paring=[];
bannerblocktemp = [];
bannerblocklist = [];
modalRef: NgbModalRef;
img_selected=false;
closeResult: string;
ban:any={}
banner_Status;
currencyObj;
question="";
select:any={};
answer="";
limits = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit: number = this.limits[0].value;
rowLimits: Array<any> = this.limits;
page = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort: any = {dir: "desc", prop: "datetime"};
defsearch = "";
loading: boolean = false;
showtblview: boolean = true;
limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit_banner: number = this.limits_banner[0].value;
rowLimits_news: Array<any> = this.limits_banner;
page_banner = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort_banner: any = {dir: "desc", prop: "datetime"};
defsearch_news = "";
loading_banner: boolean = false;
constructor(
private modalService: NgbModal,private router: Router,
private CommonService: CommonService,
private toastr: ToastrService,
private http:Http,
private cookieService: CookieService
){
this.newsloadfn();
}
ngOnInit() {
var data = {"_id":this.cookieService.get('session')}
this.CommonService.requestData('admin/getsubadmin',data).subscribe(resData => {
if(resData.token_details == 1){
return true
}
else{
this.router.navigate(['/pages/dashboard'])
}
})
this.ban.base_coin ="ETH"
}
newsloadfn(){
var lstinput = {"page":this.page_banner,"sorting":this.defsort_banner,"search": this.defsearch_news};
this.loading_banner = true;
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.page_banner.totalPages = this.page_banner.totalElements / this.page_banner.size;
this.bannerblocklist = resData.data;
this.bannerblocktemp = this.bannerblocklist;
this.bannerblockrows = this.bannerblocklist;
this.loading_banner = false;
});
}
//reset the banner list
resetnewslist(){
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner = {
size: this.limits_banner[0].value,totalElements:0,totalPages:0,pageNumber:0
}
this.defsort_banner= {dir: "desc", prop: "datetime"};
this.defsearch_news = "";
this.newsloadfn();
}
//end of the function
setPage_banner(pageInfo){
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.pageNumber = pageInfo.offset;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//sort the function
loadbannerlist(page,sort,search){
this.loading_banner = true;
var lstinput = {"page":page,"sorting":sort,"search":search};
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.bannerblocklist = resData.data;
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.loading_banner = false;
});
}
| (event) {
this.page_banner.pageNumber = 0;
this.defsort_banner = event.sorts[0];
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//search bar function
updateFilter_news() {
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner.pageNumber = 0;
this.newsloadfn();
}
//change the row limit
changeRowLimits_news(event) {
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.size = +event.target.value;
this.page_banner.pageNumber = 0;
this.newstable.limit = +event.target.value;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//add pop up display
addnewsClick(){
return true
}
bannerIndx = 0;
opennewsmodel(content) {
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
//edit pop up display
editbanners = {};
openeditmodel(content,bannerrow) {
this.editbanners = bannerrow
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
bannerindx = -1;
bannersvalue = {};
banner = "";
changebanner(content, bannerrow){
this.bannersvalue = bannerrow;
this.bannerindx = bannerrow.$$index;
this.modalRef = this.modalService.open(content,{size:'sm'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
cancelemailtemp(){
this.loadbannerlist(this.page,this.defsort,this.defsearch);
}
closeAddToken(){
this.ban = {};
this.ban.base_coin = 'ETH';
}
private getDismissReason(reason: any): string {
if (reason === ModalDismissReasons.ESC) {
return 'by pressing ESC';
} else if (reason === ModalDismissReasons.BACKDROP_CLICK) {
return 'by clicking on a backdrop';
} else {
return 'with: ${reason}';
}
}
filesToUploads: Array<File> = [];
urls=[];
filenames = "";
fileChangeEvents(fileInput: any) {
this.filesToUploads=[];
var path = fileInput.target.files[0].type;
if(path == "image/jpeg" || path == "image/gif" || path == "image/jpg" || path == "image/png")
{
this.filesToUploads = <Array<File>>fileInput.target.files;
this.filenames = fileInput.target.files[0].name;
let files = fileInput.target.files;
if (files) {
for (let file of files) {
let reader = new FileReader();
reader.onload = (e: any) => {
this.urls = e.target.result;
}
reader.readAsDataURL(file);
}
}
this.img_selected=true;
}
else{
this.toastr.error('Please choose a right file!', 'Error');
this.filesToUploads=[];
this.urls=[];
this.filenames='';
this.img_selected=false;
}
}
onsubmit(form: NgForm){
if(this.paring.length > 0){
if(this.img_selected == false){
this.toastr.error('Please select image to continue','Error')
}
else{
const formData: any = new FormData();
const files: Array<File> = this.filesToUploads;
for(let i =0; i < files.length; i++){
formData.append("uploads[]", this.filesToUploads[i], files[i]['name']);
}
this.http.post(this.serviceHost+'currencydetails/uploadPhoto',formData)
.map(files => files.json())
.subscribe(files => {
this.ban.logo=files.result.secure_url
this.add_content(form)
})
}
}
else{
this.toastr.error('Please select a pair','Error')
}
}
checkbox(val){
if((<HTMLInputElement> document.getElementById(val)).checked == true){
this.paring.push(val)
}
else{
var filtered = this.paring.filter(function(value, index, arr){
return value != val;
});
this.paring = filtered;
}
}
add_content(form: NgForm){
var pairs=''
for(var i=0;i<this.paring.length;i++){
if(i+1 != this.paring.length){
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase()+",";
}
else{
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase();
}
}
var req={
"name" : this.ban.token_name,
"symbol" : this.ban.token_symbol.toUpperCase(),
"logo" : this.ban.logo,
"minimum" : this.ban.minwithdraw,
"pairlist" : pairs,
"maximum" : this.ban.maxwithdraw,
"fee" : this.ban.withdrawfee,
"decimal" : this.ban.decimal,
"conaddress" : this.ban.contract_address,
"status" : 'Approved',
"baseCoin" : this.ban.base_coin
}
this.CommonService.requestData('token/addtoken',req).subscribe(res=>{
if(res.status == true){
this.toastr.success(res.Message,"Success");
var button=document.getElementById('shortcut_close');
button.click();
location.reload();
}
else{
this.toastr.error(res.Message,"Error");
}
})
}
}
| onSort_banner | identifier_name |
click.component.ts | import { Component, OnInit, OnDestroy, Input, ViewChild, ViewEncapsulation,ViewContainerRef } from '@angular/core';
import { FormBuilder, FormGroup, Validators, FormControl } from '@angular/forms';
import { DatatableComponent } from '@swimlane/ngx-datatable';
import { NgbModal, ModalDismissReasons, NgbTabChangeEvent,NgbModalRef } from '@ng-bootstrap/ng-bootstrap';
import { CommonService } from '../../common.service';
import { Http,Headers,RequestOptions,Response } from '@angular/http';
import { NgbTabset} from "@ng-bootstrap/ng-bootstrap";
import { Router } from '@angular/router';
import { BackendHost } from '../../../assets/varconfig';
import { ToastrService } from 'ngx-toastr';
import { CookieService } from 'ngx-cookie-service';
import { ngxCsv } from 'ngx-csv/ngx-csv';
import { NgForm } from '@angular/forms';
@Component({
selector: 'ngx-click',
templateUrl: './click.component.html',
styleUrls: ['./click.component.scss']
})
export class ClickComponent implements OnInit {
@ViewChild(DatatableComponent) newstable:DatatableComponent;
@ViewChild('form') form;
value:any;
serviceHost = BackendHost;
bannerblockrows = [];
paring=[];
bannerblocktemp = [];
bannerblocklist = [];
modalRef: NgbModalRef;
img_selected=false;
closeResult: string;
ban:any={}
banner_Status;
currencyObj;
question="";
select:any={};
answer="";
limits = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit: number = this.limits[0].value;
rowLimits: Array<any> = this.limits;
page = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort: any = {dir: "desc", prop: "datetime"};
defsearch = "";
loading: boolean = false;
showtblview: boolean = true;
limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100', value: 100 }
];
limit_banner: number = this.limits_banner[0].value;
rowLimits_news: Array<any> = this.limits_banner;
page_banner = {
size: this.limits[0].value,totalElements:0,totalPages:0,pageNumber:0
}
defsort_banner: any = {dir: "desc", prop: "datetime"};
defsearch_news = "";
loading_banner: boolean = false;
constructor(
private modalService: NgbModal,private router: Router,
private CommonService: CommonService,
private toastr: ToastrService,
private http:Http,
private cookieService: CookieService
){
this.newsloadfn();
}
ngOnInit() {
var data = {"_id":this.cookieService.get('session')}
this.CommonService.requestData('admin/getsubadmin',data).subscribe(resData => {
if(resData.token_details == 1){
return true
}
else{
this.router.navigate(['/pages/dashboard'])
}
})
this.ban.base_coin ="ETH"
}
newsloadfn(){
var lstinput = {"page":this.page_banner,"sorting":this.defsort_banner,"search": this.defsearch_news};
this.loading_banner = true;
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.page_banner.totalPages = this.page_banner.totalElements / this.page_banner.size;
this.bannerblocklist = resData.data;
this.bannerblocktemp = this.bannerblocklist;
this.bannerblockrows = this.bannerblocklist;
this.loading_banner = false;
});
}
//reset the banner list
resetnewslist(){
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner = {
size: this.limits_banner[0].value,totalElements:0,totalPages:0,pageNumber:0
}
this.defsort_banner= {dir: "desc", prop: "datetime"};
this.defsearch_news = "";
this.newsloadfn();
}
//end of the function
setPage_banner(pageInfo){
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.pageNumber = pageInfo.offset;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//sort the function
loadbannerlist(page,sort,search){
this.loading_banner = true;
var lstinput = {"page":page,"sorting":sort,"search":search};
this.CommonService.requestData('token/gettokendetails',lstinput)
.subscribe(resData => {
this.page_banner.totalElements = resData.bannerTotalCount;
this.bannerblocklist = resData.data;
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.loading_banner = false;
});
}
onSort_banner(event) {
this.page_banner.pageNumber = 0;
this.defsort_banner = event.sorts[0];
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//search bar function
updateFilter_news() {
this.limits_banner = [
{ key: '5', value: 5 },
{ key: '10', value: 10 },
{ key: '25', value: 25 },
{ key: '50', value: 50 },
{ key: '100',value: 100 }
];
this.limit_banner = this.limits_banner[0].value;
this.rowLimits_news = this.limits_banner;
this.page_banner.pageNumber = 0;
this.newsloadfn();
}
//change the row limit
changeRowLimits_news(event) {
this.bannerblocklist = [];
this.bannerblockrows = this.bannerblocklist;
this.bannerblocktemp = this.bannerblocklist;
this.page_banner.size = +event.target.value;
this.page_banner.pageNumber = 0;
this.newstable.limit = +event.target.value;
this.loadbannerlist(this.page_banner,this.defsort_banner,this.defsearch_news);
}
//add pop up display
addnewsClick(){
return true
}
bannerIndx = 0;
opennewsmodel(content) {
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
//edit pop up display
editbanners = {};
openeditmodel(content,bannerrow) {
this.editbanners = bannerrow
this.modalRef = this.modalService.open(content,{size:'lg'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
bannerindx = -1;
bannersvalue = {};
banner = "";
changebanner(content, bannerrow){
this.bannersvalue = bannerrow;
this.bannerindx = bannerrow.$$index;
this.modalRef = this.modalService.open(content,{size:'sm'});
this.modalRef.result.then((result) => {
this.closeResult = `Closed with: ${result}`;
}, (reason) => {
this.closeResult = `Dismissed ${this.getDismissReason(reason)}`;
});
}
cancelemailtemp(){
this.loadbannerlist(this.page,this.defsort,this.defsearch);
}
closeAddToken(){
this.ban = {};
this.ban.base_coin = 'ETH';
}
private getDismissReason(reason: any): string {
if (reason === ModalDismissReasons.ESC) {
return 'by pressing ESC';
} else if (reason === ModalDismissReasons.BACKDROP_CLICK) {
return 'by clicking on a backdrop';
} else {
return 'with: ${reason}';
}
}
filesToUploads: Array<File> = [];
urls=[];
filenames = "";
fileChangeEvents(fileInput: any) |
onsubmit(form: NgForm){
if(this.paring.length > 0){
if(this.img_selected == false){
this.toastr.error('Please select image to continue','Error')
}
else{
const formData: any = new FormData();
const files: Array<File> = this.filesToUploads;
for(let i =0; i < files.length; i++){
formData.append("uploads[]", this.filesToUploads[i], files[i]['name']);
}
this.http.post(this.serviceHost+'currencydetails/uploadPhoto',formData)
.map(files => files.json())
.subscribe(files => {
this.ban.logo=files.result.secure_url
this.add_content(form)
})
}
}
else{
this.toastr.error('Please select a pair','Error')
}
}
checkbox(val){
if((<HTMLInputElement> document.getElementById(val)).checked == true){
this.paring.push(val)
}
else{
var filtered = this.paring.filter(function(value, index, arr){
return value != val;
});
this.paring = filtered;
}
}
add_content(form: NgForm){
var pairs=''
for(var i=0;i<this.paring.length;i++){
if(i+1 != this.paring.length){
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase()+",";
}
else{
pairs += this.ban.token_symbol.toUpperCase()+"/"+this.paring[i].toUpperCase();
}
}
var req={
"name" : this.ban.token_name,
"symbol" : this.ban.token_symbol.toUpperCase(),
"logo" : this.ban.logo,
"minimum" : this.ban.minwithdraw,
"pairlist" : pairs,
"maximum" : this.ban.maxwithdraw,
"fee" : this.ban.withdrawfee,
"decimal" : this.ban.decimal,
"conaddress" : this.ban.contract_address,
"status" : 'Approved',
"baseCoin" : this.ban.base_coin
}
this.CommonService.requestData('token/addtoken',req).subscribe(res=>{
if(res.status == true){
this.toastr.success(res.Message,"Success");
var button=document.getElementById('shortcut_close');
button.click();
location.reload();
}
else{
this.toastr.error(res.Message,"Error");
}
})
}
}
| {
this.filesToUploads=[];
var path = fileInput.target.files[0].type;
if(path == "image/jpeg" || path == "image/gif" || path == "image/jpg" || path == "image/png")
{
this.filesToUploads = <Array<File>>fileInput.target.files;
this.filenames = fileInput.target.files[0].name;
let files = fileInput.target.files;
if (files) {
for (let file of files) {
let reader = new FileReader();
reader.onload = (e: any) => {
this.urls = e.target.result;
}
reader.readAsDataURL(file);
}
}
this.img_selected=true;
}
else{
this.toastr.error('Please choose a right file!', 'Error');
this.filesToUploads=[];
this.urls=[];
this.filenames='';
this.img_selected=false;
}
} | identifier_body |
main.go | package main
import (
"bufio"
"encoding/base64"
"encoding/json"
"encoding/xml"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"license/public"
)
const (
OneYearSeconds = 31536000
OneDaySeconds = 86400
OneMinuteSeconds = 60
DataDir = "data"
)
var (
LicenseID = "LicenseID"
ProductName = "ProductName"
EndTime = "EndTime"
kvMap map[string]bool = map[string]bool{
LicenseID: true,
ProductName: true,
EndTime: true,
}
PriKeyFilePath = ""
)
type Products struct {
ProductExplan string
ProductName string
}
type AttrKV struct {
Desc string
Key string
Val string
}
var (
inputReader *bufio.Reader
products = []*Products{
&Products{
ProductExplan: "产品1",
ProductName: "product_name1",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name2",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name3",
},
}
)
func SelectProduct() (string, string, error) {
var (
index int
err error
productExplan string
productName string
)
fmt.Printf("%s\n", "请选择需要激活的产品(输入数字):")
for i := 1; i <= len(products); i++ {
fmt.Printf("%d %s\n", i, products[i-1].ProductExplan)
}
input, err := inputReader.ReadString('\n')
if err != nil {
os.Exit(0)
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
index, err = strconv.Atoi(inputString)
if err != nil {
return "", "", err
}
if index <= 0 || index > len(products) {
return "", "", fmt.Errorf("invalid input, please input 1 ~ %d number", len(products))
}
fmt.Println()
fmt.Printf("你选择的产品是: %s\n", products[index-1].ProductExplan)
fmt.Println()
productName = products[index-1].ProductName
productExplan = products[index-1].ProductExplan
}
return productName, productExplan, nil
}
func InputExpiresTime() (int64, error) {
var (
err error
expiresAt int64
)
fmt.Printf("%s\n", "请输入过期时间(格式:数字+单位,例如12d, 单位:天[d] 分钟[m] 秒[s] 年[y]):")
input, err := inputReader.ReadString('\n')
if err != nil {
return 0, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
if strings.HasSuffix(inputString, "y") {
inputString = inputString[:len(inputString)-1]
years, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if years <= 0 || years > 100 {
return 0, fmt.Errorf("输入年数不能小于0,大于%d年", 100)
}
expiresAt = years * OneYearSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期年数: %d years, 过期日期:%s \n", years, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "d") {
inputString = inputString[:len(inputString)-1]
days, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if days <= 0 || days > 100*356 {
return 0, fmt.Errorf("输入天数不能小于0,大于%d天", 100*356) | }
expiresAt = int64(days * OneDaySeconds)
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期天数: %d days, 过期日期:%s \n", days, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "m") {
inputString = inputString[:len(inputString)-1]
minute, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if minute <= 0 || minute > 100*356*24*60 {
return 0, fmt.Errorf("输入分钟不能小于0,大于%d分钟", 100*356*24*60)
}
expiresAt = minute * OneMinuteSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期分钟数: %d minute, 过期日期:%s \n", minute, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "s") {
inputString = inputString[:len(inputString)-1]
seconds, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if seconds <= 0 || seconds > 100*356*24*60*60 {
return 0, fmt.Errorf("输入秒不能小于0,大于%d秒", 100*356*24*60*60)
}
expiresAt = seconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期秒数: %d seconds, 过期日期:%s \n", seconds, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else {
return 0, fmt.Errorf("%s", "输入不正确,请输入时间单位")
}
}
// fmt.Println("\033[H\033[2J")
return expiresAt, nil
}
func InputMachineID() (string, error) {
fmt.Printf("%s\n", "请输入机器码:")
input, err := inputReader.ReadString('\n')
if err != nil {
return "", err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
fmt.Println()
fmt.Printf("你输入的机器码是: %s\n", inputString)
fmt.Println()
}
return inputString, nil
}
func ShowActiveCode(dir, fileName, uuid string) {
fmt.Printf("序号:%s \n", uuid)
fmt.Printf("\n%s\n", "激活码是:")
readPath := filepath.Join(dir, fileName)
licenseActive, err := public.ReadLicensePem(readPath)
if err != nil {
fmt.Println(err.Error())
return
}
fmt.Println(base64.URLEncoding.EncodeToString(licenseActive))
// fmt.Println(string(licenseActive))
}
func ReadCustomKV(productName string) ([]AttrKV, error) {
type Option struct {
XMLName xml.Name `xml:"option"`
Desc string `xml:"desc"`
Key string `xml:"key"`
Value string `xml:"val"`
}
type XMLProduct struct {
XMLName xml.Name `xml:"options"`
Version string `xml:"version,attr"`
Options []Option `xml:"option"`
}
filePath := filepath.Join(DataDir, strings.Join([]string{productName, ".xml"}, ""))
if public.Exists(filePath) {
var (
attr = XMLProduct{}
attrKV []AttrKV
)
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attrKVBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = xml.Unmarshal(attrKVBytes, &attr)
if err != nil {
return nil, err
}
// fmt.Printf("%s特性选择:\n", productExplan)
for i := 0; i < len(attr.Options); i++ {
// fmt.Println(i+1, attr.Options[i].Desc, attr.Options[i].Key, attr.Options[i].Value)
attrKV = append(attrKV, AttrKV{Desc: attr.Options[i].Desc, Key: attr.Options[i].Key, Val: attr.Options[i].Value})
}
// fmt.Println("请输入数字序号,以分号间隔:")
return attrKV, nil
} else {
fd, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attr := &XMLProduct{
Version: "1",
}
attr.Options = append(attr.Options, Option{
Desc: "示例配置",
Key: "key1",
Value: "val1",
})
output, err := xml.MarshalIndent(attr, "", " ")
if err != nil {
fmt.Println(err)
return nil, err
}
_, err = fd.Write([]byte(xml.Header))
if err != nil {
return nil, err
}
_, err = fd.Write(output)
if err != nil {
return nil, err
}
}
return nil, nil
}
func SelectCustomKV(productExplan string, kv []AttrKV) ([]AttrKV, error) {
var (
arrayIdx []int
kvs []AttrKV
)
if kv != nil {
fmt.Printf("%s启用配置选择(请输入数字序号,以逗号间隔,跳过按回车):\n", productExplan)
for i := 0; i < len(kv); i++ {
fmt.Println(i+1, kv[i].Desc)
}
input, err := inputReader.ReadString('\n')
if err != nil {
return nil, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
arrayIndx := strings.Split(inputString, ",")
for i := 0; i < len(arrayIndx); i++ {
num := strings.TrimSpace(arrayIndx[i])
if num == "" {
continue
}
idx, err := strconv.Atoi(num)
if err != nil {
return nil, err
}
if idx <= 0 || idx > len(kv) {
return nil, fmt.Errorf("输入不能小于等于0或大于%d", len(kv))
}
arrayIdx = append(arrayIdx, idx)
}
arrayIdx = public.RemoveDuplicate(arrayIdx)
fmt.Printf("\n你选择的是%v,启用的配置是:\n", arrayIdx)
for _, indx := range arrayIdx {
fmt.Printf("%d %s\n", indx, kv[indx-1].Desc)
kvs = append(kvs, kv[indx-1])
}
fmt.Println()
}
return kvs, nil
}
fmt.Println()
return nil, nil
}
func IsQuit() bool {
input, err := inputReader.ReadString('\n')
if err != nil {
return false
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
}
return true
}
func init() {
flag.StringVar(&PriKeyFilePath, "prikey", "", "prikey.pem file path")
flag.Usage = usage
}
func usage() {
fmt.Println("input 'quit' or 'q' to exit the program")
fmt.Println("input '-prikey' set prikey.pem file path,for example -prikey=keys/v1/prikey.pem")
fmt.Println(public.GetAppInfo())
}
func LoadConfig() ([]*Products, error) {
var (
productList = []*Products{}
)
isExist := public.Exists(DataDir)
if isExist == false && DataDir != "." {
err := os.MkdirAll(DataDir, os.ModePerm)
if err != nil {
return nil, err
}
}
filePath := filepath.Join(DataDir, "products.json")
if public.Exists(filePath) { //文件存在
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
configBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = json.Unmarshal(configBytes, &productList)
if err != nil {
return nil, err
}
} else {
encByte, err := json.Marshal(products)
if err != nil {
return nil, err
}
err = ioutil.WriteFile(filePath, encByte, 0644)
if err != nil {
return nil, err
}
productList = products
}
return productList, nil
}
func main() {
var (
err error
productName string
productExplan string
expiresAt int64
machineID string
kv []AttrKV
)
flag.Parse()
//load config file
products, err = LoadConfig()
if err != nil {
fmt.Println(err.Error())
return
}
inputReader = bufio.NewReader(os.Stdin)
for {
productName, productExplan, err = SelectProduct()
if err != nil {
fmt.Println(err.Error())
continue
}
if productName == "" {
continue
}
break
}
attrKV, err := ReadCustomKV(productName)
if err != nil {
fmt.Println(err.Error())
return
}
for {
if len(attrKV) == 1 && attrKV[0].Desc == "示例配置" {
break
}
kv, err = SelectCustomKV(productExplan, attrKV)
if err != nil {
fmt.Println(err.Error())
continue
}
break
}
for {
machineID, err = InputMachineID()
if err != nil {
fmt.Println(err.Error())
continue
}
if machineID == "" {
continue
}
break
}
for {
expiresAt, err = InputExpiresTime()
if err != nil {
fmt.Println(err.Error())
continue
}
if expiresAt <= 0 {
continue
}
break
}
// alg, err := public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
var (
alg *public.NonEquAlgorthm
)
if PriKeyFilePath == "" {
alg, err = public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
} else {
priBytes, err := public.LoadKey(PriKeyFilePath)
if err != nil {
fmt.Println(err.Error())
return
}
alg, err = public.GetNonEquAlgorthm(priBytes, nil)
if err != nil {
fmt.Println(err.Error())
return
}
}
duration := time.Duration(expiresAt) * time.Second
//定义License HEAD KV
uuid := public.GetUUID()
expiresTime := time.Now().Add(duration)
customKV := map[string]string{LicenseID: uuid, ProductName: productName, EndTime: expiresTime.Format(time.RFC3339)}
for _, v := range kv {
if _, ok := kvMap[v.Key]; ok {
fmt.Printf("模板定义字段%s与系统定义字段冲突\n", v.Key)
return
}
customKV[v.Key] = v.Val
}
//构造license结构
licenseIns := public.GenerateLicense(uuid, productName, machineID, expiresTime.Unix(), customKV)
enCodeBytes, err := licenseIns.ToBytes()
if err != nil {
fmt.Println(err.Error())
return
}
//签名license
licenseString, err := alg.SignedBytes(enCodeBytes)
if err != nil {
fmt.Println(err.Error())
return
}
dir := filepath.Join(DataDir, "db")
fileName := strings.Join([]string{"license", licenseIns.LicenseUUID, "dat"}, ".")
err = public.SaveLicensePem(dir, fileName, licenseString, customKV)
if err != nil {
fmt.Println(err.Error())
return
}
ShowActiveCode(dir, fileName, licenseIns.LicenseUUID)
for {
IsQuit()
}
} | random_line_split |
|
main.go | package main
import (
"bufio"
"encoding/base64"
"encoding/json"
"encoding/xml"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"license/public"
)
const (
OneYearSeconds = 31536000
OneDaySeconds = 86400
OneMinuteSeconds = 60
DataDir = "data"
)
var (
LicenseID = "LicenseID"
ProductName = "ProductName"
EndTime = "EndTime"
kvMap map[string]bool = map[string]bool{
LicenseID: true,
ProductName: true,
EndTime: true,
}
PriKeyFilePath = ""
)
type Products struct {
ProductExplan string
ProductName string
}
type AttrKV struct {
Desc string
Key string
Val string
}
var (
inputReader *bufio.Reader
products = []*Products{
&Products{
ProductExplan: "产品1",
ProductName: "product_name1",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name2",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name3",
},
}
)
func SelectProduct() (string, string, error) {
var (
index int
err error
productExplan string
productName string
)
fmt.Printf("%s\n", "请选择需要激活的产品(输入数字):")
for i := 1; i <= len(products); i++ {
fmt.Printf("%d %s\n", i, products[i-1].ProductExplan)
}
input, err := inputReader.ReadString('\n')
if err != nil {
os.Exit(0)
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
index, err = strconv.Atoi(inputString)
if err != nil {
return "", "", err
}
if index <= 0 || index > len(products) {
return "", "", fmt.Errorf("invalid input, please input 1 ~ %d number", len(products))
}
fmt.Println()
fmt.Printf("你选择的产品是: %s\n", products[index-1].ProductExplan)
fmt.Println()
productName = products[index-1].ProductName
productExplan = products[index-1].ProductExplan
}
return productName, productExplan, nil
}
func InputExpiresTime() (int64, error) {
var (
err error
expiresAt int64
)
fmt.Printf("%s\n", "请输入过期时间(格式:数字+单位,例如12d, 单位:天[d] 分钟[m] 秒[s] 年[y]):")
input, err := inputReader.ReadString('\n')
if err != nil {
return 0, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
if strings.HasSuffix(inputString, "y") {
inputString = inputString[:len(inputString)-1]
years, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if years <= 0 || years > 100 {
return 0, fmt.Errorf("输入年数不能小于0,大于%d年", 100)
}
expiresAt = years * OneYearSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期年数: %d years, 过期日期:%s \n", years, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "d") {
inputString = inputString[:len(inputString)-1]
days, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if days <= 0 || days > 100*356 {
return 0, fmt.Errorf("输入天数不能小于0,大于%d天", 100*356)
}
expiresAt = int64(days * OneDaySeconds)
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期天数: %d days, 过期日期:%s \n", days, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "m") {
inputString = inputString[:len(inputString)-1]
minute, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if minute <= 0 || minute > 100*356*24*60 {
return 0, fmt.Errorf("输入分钟不能小于0,大于%d分钟", 100*356*24*60)
}
expiresAt = minute * OneMinuteSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期分钟数: %d minute, 过期日期:%s \n", minute, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "s") {
inputString = inputString[:len(inputString)-1]
seconds, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if seconds <= 0 || seconds > 100*356*24*60*60 {
return 0, fmt.Errorf("输入秒不能小于0,大于%d秒", 100*356*24*60*60)
}
expiresAt = seconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期秒数: %d seconds, 过期日期:%s \n", seconds, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else {
return 0, fmt.Errorf("%s", "输入不正确,请输入时间单位")
}
}
// fmt.Println("\033[H\033[2J")
return expiresAt, nil
}
func InputMachineID() (string, error) {
fmt.Printf("%s\n", "请输入机器码:")
input, err := inputReader.ReadString('\n')
if err != nil {
return "", err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
} | eToString(licenseActive))
// fmt.Println(string(licenseActive))
}
func ReadCustomKV(productName string) ([]AttrKV, error) {
type Option struct {
XMLName xml.Name `xml:"option"`
Desc string `xml:"desc"`
Key string `xml:"key"`
Value string `xml:"val"`
}
type XMLProduct struct {
XMLName xml.Name `xml:"options"`
Version string `xml:"version,attr"`
Options []Option `xml:"option"`
}
filePath := filepath.Join(DataDir, strings.Join([]string{productName, ".xml"}, ""))
if public.Exists(filePath) {
var (
attr = XMLProduct{}
attrKV []AttrKV
)
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attrKVBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = xml.Unmarshal(attrKVBytes, &attr)
if err != nil {
return nil, err
}
// fmt.Printf("%s特性选择:\n", productExplan)
for i := 0; i < len(attr.Options); i++ {
// fmt.Println(i+1, attr.Options[i].Desc, attr.Options[i].Key, attr.Options[i].Value)
attrKV = append(attrKV, AttrKV{Desc: attr.Options[i].Desc, Key: attr.Options[i].Key, Val: attr.Options[i].Value})
}
// fmt.Println("请输入数字序号,以分号间隔:")
return attrKV, nil
} else {
fd, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attr := &XMLProduct{
Version: "1",
}
attr.Options = append(attr.Options, Option{
Desc: "示例配置",
Key: "key1",
Value: "val1",
})
output, err := xml.MarshalIndent(attr, "", " ")
if err != nil {
fmt.Println(err)
return nil, err
}
_, err = fd.Write([]byte(xml.Header))
if err != nil {
return nil, err
}
_, err = fd.Write(output)
if err != nil {
return nil, err
}
}
return nil, nil
}
func SelectCustomKV(productExplan string, kv []AttrKV) ([]AttrKV, error) {
var (
arrayIdx []int
kvs []AttrKV
)
if kv != nil {
fmt.Printf("%s启用配置选择(请输入数字序号,以逗号间隔,跳过按回车):\n", productExplan)
for i := 0; i < len(kv); i++ {
fmt.Println(i+1, kv[i].Desc)
}
input, err := inputReader.ReadString('\n')
if err != nil {
return nil, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
arrayIndx := strings.Split(inputString, ",")
for i := 0; i < len(arrayIndx); i++ {
num := strings.TrimSpace(arrayIndx[i])
if num == "" {
continue
}
idx, err := strconv.Atoi(num)
if err != nil {
return nil, err
}
if idx <= 0 || idx > len(kv) {
return nil, fmt.Errorf("输入不能小于等于0或大于%d", len(kv))
}
arrayIdx = append(arrayIdx, idx)
}
arrayIdx = public.RemoveDuplicate(arrayIdx)
fmt.Printf("\n你选择的是%v,启用的配置是:\n", arrayIdx)
for _, indx := range arrayIdx {
fmt.Printf("%d %s\n", indx, kv[indx-1].Desc)
kvs = append(kvs, kv[indx-1])
}
fmt.Println()
}
return kvs, nil
}
fmt.Println()
return nil, nil
}
func IsQuit() bool {
input, err := inputReader.ReadString('\n')
if err != nil {
return false
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
}
return true
}
func init() {
flag.StringVar(&PriKeyFilePath, "prikey", "", "prikey.pem file path")
flag.Usage = usage
}
func usage() {
fmt.Println("input 'quit' or 'q' to exit the program")
fmt.Println("input '-prikey' set prikey.pem file path,for example -prikey=keys/v1/prikey.pem")
fmt.Println(public.GetAppInfo())
}
func LoadConfig() ([]*Products, error) {
var (
productList = []*Products{}
)
isExist := public.Exists(DataDir)
if isExist == false && DataDir != "." {
err := os.MkdirAll(DataDir, os.ModePerm)
if err != nil {
return nil, err
}
}
filePath := filepath.Join(DataDir, "products.json")
if public.Exists(filePath) { //文件存在
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
configBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = json.Unmarshal(configBytes, &productList)
if err != nil {
return nil, err
}
} else {
encByte, err := json.Marshal(products)
if err != nil {
return nil, err
}
err = ioutil.WriteFile(filePath, encByte, 0644)
if err != nil {
return nil, err
}
productList = products
}
return productList, nil
}
func main() {
var (
err error
productName string
productExplan string
expiresAt int64
machineID string
kv []AttrKV
)
flag.Parse()
//load config file
products, err = LoadConfig()
if err != nil {
fmt.Println(err.Error())
return
}
inputReader = bufio.NewReader(os.Stdin)
for {
productName, productExplan, err = SelectProduct()
if err != nil {
fmt.Println(err.Error())
continue
}
if productName == "" {
continue
}
break
}
attrKV, err := ReadCustomKV(productName)
if err != nil {
fmt.Println(err.Error())
return
}
for {
if len(attrKV) == 1 && attrKV[0].Desc == "示例配置" {
break
}
kv, err = SelectCustomKV(productExplan, attrKV)
if err != nil {
fmt.Println(err.Error())
continue
}
break
}
for {
machineID, err = InputMachineID()
if err != nil {
fmt.Println(err.Error())
continue
}
if machineID == "" {
continue
}
break
}
for {
expiresAt, err = InputExpiresTime()
if err != nil {
fmt.Println(err.Error())
continue
}
if expiresAt <= 0 {
continue
}
break
}
// alg, err := public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
var (
alg *public.NonEquAlgorthm
)
if PriKeyFilePath == "" {
alg, err = public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
} else {
priBytes, err := public.LoadKey(PriKeyFilePath)
if err != nil {
fmt.Println(err.Error())
return
}
alg, err = public.GetNonEquAlgorthm(priBytes, nil)
if err != nil {
fmt.Println(err.Error())
return
}
}
duration := time.Duration(expiresAt) * time.Second
//定义License HEAD KV
uuid := public.GetUUID()
expiresTime := time.Now().Add(duration)
customKV := map[string]string{LicenseID: uuid, ProductName: productName, EndTime: expiresTime.Format(time.RFC3339)}
for _, v := range kv {
if _, ok := kvMap[v.Key]; ok {
fmt.Printf("模板定义字段%s与系统定义字段冲突\n", v.Key)
return
}
customKV[v.Key] = v.Val
}
//构造license结构
licenseIns := public.GenerateLicense(uuid, productName, machineID, expiresTime.Unix(), customKV)
enCodeBytes, err := licenseIns.ToBytes()
if err != nil {
fmt.Println(err.Error())
return
}
//签名license
licenseString, err := alg.SignedBytes(enCodeBytes)
if err != nil {
fmt.Println(err.Error())
return
}
dir := filepath.Join(DataDir, "db")
fileName := strings.Join([]string{"license", licenseIns.LicenseUUID, "dat"}, ".")
err = public.SaveLicensePem(dir, fileName, licenseString, customKV)
if err != nil {
fmt.Println(err.Error())
return
}
ShowActiveCode(dir, fileName, licenseIns.LicenseUUID)
for {
IsQuit()
}
}
|
fmt.Println()
fmt.Printf("你输入的机器码是: %s\n", inputString)
fmt.Println()
}
return inputString, nil
}
func ShowActiveCode(dir, fileName, uuid string) {
fmt.Printf("序号:%s \n", uuid)
fmt.Printf("\n%s\n", "激活码是:")
readPath := filepath.Join(dir, fileName)
licenseActive, err := public.ReadLicensePem(readPath)
if err != nil {
fmt.Println(err.Error())
return
}
fmt.Println(base64.URLEncoding.Encod | identifier_body |
main.go | package main
import (
"bufio"
"encoding/base64"
"encoding/json"
"encoding/xml"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"license/public"
)
const (
OneYearSeconds = 31536000
OneDaySeconds = 86400
OneMinuteSeconds = 60
DataDir = "data"
)
var (
LicenseID = "LicenseID"
ProductName = "ProductName"
EndTime = "EndTime"
kvMap map[string]bool = map[string]bool{
LicenseID: true,
ProductName: true,
EndTime: true,
}
PriKeyFilePath = ""
)
type Products struct {
ProductExplan string
ProductName string
}
type AttrKV struct {
Desc string
Key string
Val string
}
var (
inputReader *bufio.Reader
products = []*Products{
&Products{
ProductExplan: "产品1",
ProductName: "product_name1",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name2",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name3",
},
}
)
func SelectProduct() (string, string, error) {
var (
index int
err error
productExplan string
productName string
)
fmt.Printf("%s\n", "请选择需要激活的产品(输入数字):")
for i := 1; i <= len(products); i++ {
fmt.Printf("%d %s\n", i, products[i-1].ProductExplan)
}
input, err := inputReader.ReadString('\n')
if err != nil {
os.Exit(0)
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
index, err = strconv.Atoi(inputString)
if err != nil {
return "", "", err
}
if index <= 0 || index > len(products) {
return "", "", fmt.Errorf("invalid input, please input 1 ~ %d number", len(products))
}
fmt.Println()
fmt.Printf("你选择的产品是: %s\n", products[index-1].ProductExplan)
fmt.Println()
productName = products[index-1].ProductName
productExplan = products[index-1].ProductExplan
}
return productName, productExplan, nil
}
func InputExpiresTime() (int64, error) {
var (
err error
expiresAt int64
)
fmt.Printf("%s\n", "请输入过期时间(格式:数字+单位,例如12d, 单位:天[d] 分钟[m] 秒[s] 年[y]):")
input, err := inputReader.ReadString('\n')
if err != nil {
return 0, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
if strings.HasSuffix(inputString, "y") {
inputString = inputString[:len(inputString)-1]
years, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if years <= 0 || years > 100 {
return 0, fmt.Errorf("输入年数不能小于0,大于%d年", 100)
}
expiresAt = years * OneYearSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期年数: %d years, 过期日期:%s \n", years, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "d") {
inputString = inputString[:len(inputString)-1]
days, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if days <= 0 || days > 100*356 {
return 0, fmt.Errorf("输入天数不能小于0,大于%d天", 100*356)
}
expiresAt = int64(days * OneDaySeconds)
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期天数: %d days, 过期日期:%s \n", days, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "m") {
inputString = inputString[:len(inputString)-1]
minute, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if minute <= 0 || minute > 100*356*24*60 {
return 0, fmt.Errorf("输入分钟不能小于0,大于%d分钟", 100*356*24*60)
}
expiresAt = minute * OneMinuteSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期分钟数: %d minute, 过期日期:%s \n", minute, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "s") {
inputString = inputString[:len(inputString)-1]
seconds, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if seconds <= 0 || seconds > 100*356*24*60*60 {
return 0, fmt.Errorf("输入秒不能小于0,大于%d秒", 100*356*24*60*60)
}
expiresAt = seconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期秒数: %d seconds, 过期日期:%s \n", seconds, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else {
return 0, fmt.Errorf("%s", "输入不正确,请输入时间单位")
}
}
// fmt.Println("\033[H\033[2J")
return expiresAt, nil
}
func InputMachineID() (string, error) {
fmt.Printf("%s\n", "请输入机器码:")
input, err := inputReader.ReadString('\n')
if err != nil {
return "", err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
fmt.Println()
fmt.Printf("你输入的机器码是: %s\n", inputString)
fmt.Println()
}
return inputString, nil
}
func ShowActiveCode(dir, fileName, uuid string) {
fmt.Printf("序号:%s \n", uuid)
fmt.Printf("\n%s\n", "激活码是:")
readPath := filepath.Join(dir, fileName)
licenseActive, err := public.ReadLicensePem(readPath)
if err != nil {
fmt.Println(err.Error())
return
}
fmt.Println(base64.URLEncoding.EncodeToString(licenseActive))
// fmt.Println(string(licenseActive))
}
func ReadCustomKV(productName string) ([]AttrKV, error) {
type Option struct {
XMLName xml.Name `xml:"option"`
Desc string `xml:"desc"`
Key string `xml:"key"`
Value string `xml:"val"`
}
type XMLProduct struct {
XMLName xml.Name `xml:"options"`
Version string `xml:"version,attr"`
Options []Option `xml:"option"`
}
filePath := filepath.Join(DataDir, strings.Join([]string{productName, ".xml"}, ""))
if public.Exists(filePath) {
var (
attr = XMLProduct{}
attrKV []AttrKV
)
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attrKVBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = xml.Unmarshal(attrKVBytes, &attr)
if err != nil {
return nil, err
}
// fmt.Printf("%s特性选择:\n", productExplan)
for i := 0; i < len(attr.Options); i++ {
// fmt.Println(i+1, attr.Options[i].Desc, attr.Options[i].Key, attr.Options[i].Value)
attrKV = append(attrKV, AttrKV{Desc: attr.Options[i].Desc, Key: attr.Options[i].Key, Val: attr.Options[i].Value})
}
// fmt.Println("请输入数字序号,以分号间隔:")
return attrKV, nil
} else {
fd, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attr := &XMLProduct{
Version: "1",
}
attr.Options = append(attr.Options, Option{
Desc: "示例配置",
Key: "key1",
Value: "val1",
})
output, err := xml.MarshalIndent(attr, "", " ")
if err != nil {
fmt.Println(err)
return nil, err
}
_, err = fd.Write([]byte(xml.Header))
if err != nil {
return nil, err
}
_, err = fd.Write(output)
if err != nil {
return nil, err
}
}
return nil, nil
}
func SelectCustomKV(productExplan string, kv []AttrKV) ([]AttrKV, error) {
var (
arrayIdx []int
kvs []AttrKV
)
if kv != nil {
fmt.Printf("%s启用配置选择(请输入数字序号,以逗号间隔,跳过按回车):\n", productExplan)
for i := 0; i < len(kv); i++ {
fmt.Println(i+1, kv[i].Desc)
}
input, err := inputReader.ReadStr | l {
return nil, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
arrayIndx := strings.Split(inputString, ",")
for i := 0; i < len(arrayIndx); i++ {
num := strings.TrimSpace(arrayIndx[i])
if num == "" {
continue
}
idx, err := strconv.Atoi(num)
if err != nil {
return nil, err
}
if idx <= 0 || idx > len(kv) {
return nil, fmt.Errorf("输入不能小于等于0或大于%d", len(kv))
}
arrayIdx = append(arrayIdx, idx)
}
arrayIdx = public.RemoveDuplicate(arrayIdx)
fmt.Printf("\n你选择的是%v,启用的配置是:\n", arrayIdx)
for _, indx := range arrayIdx {
fmt.Printf("%d %s\n", indx, kv[indx-1].Desc)
kvs = append(kvs, kv[indx-1])
}
fmt.Println()
}
return kvs, nil
}
fmt.Println()
return nil, nil
}
func IsQuit() bool {
input, err := inputReader.ReadString('\n')
if err != nil {
return false
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
}
return true
}
func init() {
flag.StringVar(&PriKeyFilePath, "prikey", "", "prikey.pem file path")
flag.Usage = usage
}
func usage() {
fmt.Println("input 'quit' or 'q' to exit the program")
fmt.Println("input '-prikey' set prikey.pem file path,for example -prikey=keys/v1/prikey.pem")
fmt.Println(public.GetAppInfo())
}
func LoadConfig() ([]*Products, error) {
var (
productList = []*Products{}
)
isExist := public.Exists(DataDir)
if isExist == false && DataDir != "." {
err := os.MkdirAll(DataDir, os.ModePerm)
if err != nil {
return nil, err
}
}
filePath := filepath.Join(DataDir, "products.json")
if public.Exists(filePath) { //文件存在
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
configBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = json.Unmarshal(configBytes, &productList)
if err != nil {
return nil, err
}
} else {
encByte, err := json.Marshal(products)
if err != nil {
return nil, err
}
err = ioutil.WriteFile(filePath, encByte, 0644)
if err != nil {
return nil, err
}
productList = products
}
return productList, nil
}
func main() {
var (
err error
productName string
productExplan string
expiresAt int64
machineID string
kv []AttrKV
)
flag.Parse()
//load config file
products, err = LoadConfig()
if err != nil {
fmt.Println(err.Error())
return
}
inputReader = bufio.NewReader(os.Stdin)
for {
productName, productExplan, err = SelectProduct()
if err != nil {
fmt.Println(err.Error())
continue
}
if productName == "" {
continue
}
break
}
attrKV, err := ReadCustomKV(productName)
if err != nil {
fmt.Println(err.Error())
return
}
for {
if len(attrKV) == 1 && attrKV[0].Desc == "示例配置" {
break
}
kv, err = SelectCustomKV(productExplan, attrKV)
if err != nil {
fmt.Println(err.Error())
continue
}
break
}
for {
machineID, err = InputMachineID()
if err != nil {
fmt.Println(err.Error())
continue
}
if machineID == "" {
continue
}
break
}
for {
expiresAt, err = InputExpiresTime()
if err != nil {
fmt.Println(err.Error())
continue
}
if expiresAt <= 0 {
continue
}
break
}
// alg, err := public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
var (
alg *public.NonEquAlgorthm
)
if PriKeyFilePath == "" {
alg, err = public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
} else {
priBytes, err := public.LoadKey(PriKeyFilePath)
if err != nil {
fmt.Println(err.Error())
return
}
alg, err = public.GetNonEquAlgorthm(priBytes, nil)
if err != nil {
fmt.Println(err.Error())
return
}
}
duration := time.Duration(expiresAt) * time.Second
//定义License HEAD KV
uuid := public.GetUUID()
expiresTime := time.Now().Add(duration)
customKV := map[string]string{LicenseID: uuid, ProductName: productName, EndTime: expiresTime.Format(time.RFC3339)}
for _, v := range kv {
if _, ok := kvMap[v.Key]; ok {
fmt.Printf("模板定义字段%s与系统定义字段冲突\n", v.Key)
return
}
customKV[v.Key] = v.Val
}
//构造license结构
licenseIns := public.GenerateLicense(uuid, productName, machineID, expiresTime.Unix(), customKV)
enCodeBytes, err := licenseIns.ToBytes()
if err != nil {
fmt.Println(err.Error())
return
}
//签名license
licenseString, err := alg.SignedBytes(enCodeBytes)
if err != nil {
fmt.Println(err.Error())
return
}
dir := filepath.Join(DataDir, "db")
fileName := strings.Join([]string{"license", licenseIns.LicenseUUID, "dat"}, ".")
err = public.SaveLicensePem(dir, fileName, licenseString, customKV)
if err != nil {
fmt.Println(err.Error())
return
}
ShowActiveCode(dir, fileName, licenseIns.LicenseUUID)
for {
IsQuit()
}
}
| ing('\n')
if err != ni | conditional_block |
main.go | package main
import (
"bufio"
"encoding/base64"
"encoding/json"
"encoding/xml"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"license/public"
)
const (
OneYearSeconds = 31536000
OneDaySeconds = 86400
OneMinuteSeconds = 60
DataDir = "data"
)
var (
LicenseID = "LicenseID"
ProductName = "ProductName"
EndTime = "EndTime"
kvMap map[string]bool = map[string]bool{
LicenseID: true,
ProductName: true,
EndTime: true,
}
PriKeyFilePath = ""
)
type Products struct {
ProductExplan string
ProductName string
}
type AttrKV struct {
Desc string
Key string
Val string
}
var (
inputReader *bufio.Reader
products = []*Products{
&Products{
ProductExplan: "产品1",
ProductName: "product_name1",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name2",
},
&Products{
ProductExplan: "产品2",
ProductName: "product_name3",
},
}
)
func SelectProduct() (string, string, error) {
var (
index int
err error
productExplan string
productName string
)
fmt.Printf("%s\n", "请选择需要激活的产品(输入数字):")
for i := 1; i <= len(products); i++ {
fmt.Printf("%d %s\n", i, products[i-1].ProductExplan)
}
input, err := inputReader.ReadString('\n')
if err != nil {
os.Exit(0)
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
index, err = strconv.Atoi(inputString)
if err != nil {
return "", "", err
}
if index <= 0 || index > len(products) {
return "", "", fmt.Errorf("invalid input, please input 1 ~ %d number", len(products))
}
fmt.Println()
fmt.Printf("你选择的产品是: %s\n", products[index-1].ProductExplan)
fmt.Println()
productName = products[index-1].ProductName
productExplan = products[index-1].ProductExplan
}
return productName, productExplan, nil
}
func InputExpiresTime() (int64, error) {
var (
err error
expiresAt int64
)
fmt.Printf("%s\n", "请输入过期时间(格式:数字+单位,例如12d, 单位:天[d] 分钟[m] 秒[s] 年[y]):")
input, err := inputReader.ReadString('\n')
if err != nil {
return 0, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
if strings.HasSuffix(inputString, "y") {
inputString = inputString[:len(inputString)-1]
years, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if years <= 0 || years > 100 {
return 0, fmt.Errorf("输入年数不能小于0,大于%d年", 100)
}
expiresAt = years * OneYearSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期年数: %d years, 过期日期:%s \n", years, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "d") {
inputString = inputString[:len(inputString)-1]
days, err := strconv.ParseInt(inputString, 10, 64)
// days, err := strconv.ParseFloat(inputString, 64)
if err != nil {
return 0, err
}
if days <= 0 || days > 100*356 {
return 0, fmt.Errorf("输入天数不能小于0,大于%d天", 100*356)
}
expiresAt = int64(days * OneDaySeconds)
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期天数: %d days, 过期日期:%s \n", days, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "m") {
inputString = inputString[:len(inputString)-1]
minute, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if minute <= 0 || minute > 100*356*24*60 {
return 0, fmt.Errorf("输入分钟不能小于0,大于%d分钟", 100*356*24*60)
}
expiresAt = minute * OneMinuteSeconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期分钟数: %d minute, 过期日期:%s \n", minute, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else if strings.HasSuffix(inputString, "s") {
inputString = inputString[:len(inputString)-1]
seconds, err := strconv.ParseInt(inputString, 10, 64)
if err != nil {
return 0, err
}
if seconds <= 0 || seconds > 100*356*24*60*60 {
return 0, fmt.Errorf("输入秒不能小于0,大于%d秒", 100*356*24*60*60)
}
expiresAt = seconds
duration := time.Duration(expiresAt) * time.Second
fmt.Println()
fmt.Printf("过期秒数: %d seconds, 过期日期:%s \n", seconds, time.Now().Add(duration).Format("2006-01-02 15:04:05"))
fmt.Println()
} else {
return 0, fmt.Errorf("%s", "输入不正确,请输入时间单位")
}
}
// fmt.Println("\033[H\033[2J")
return expiresAt, nil
}
func InputMachineID() (string, error) {
fmt.Printf("%s\n", "请输入机器码:")
input, err := inputReader.ReadString('\n')
if err != nil {
return "", err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inpu | {
os.Exit(0)
}
fmt.Println()
fmt.Printf("你输入的机器码是: %s\n", inputString)
fmt.Println()
}
return inputString, nil
}
func ShowActiveCode(dir, fileName, uuid string) {
fmt.Printf("序号:%s \n", uuid)
fmt.Printf("\n%s\n", "激活码是:")
readPath := filepath.Join(dir, fileName)
licenseActive, err := public.ReadLicensePem(readPath)
if err != nil {
fmt.Println(err.Error())
return
}
fmt.Println(base64.URLEncoding.EncodeToString(licenseActive))
// fmt.Println(string(licenseActive))
}
func ReadCustomKV(productName string) ([]AttrKV, error) {
type Option struct {
XMLName xml.Name `xml:"option"`
Desc string `xml:"desc"`
Key string `xml:"key"`
Value string `xml:"val"`
}
type XMLProduct struct {
XMLName xml.Name `xml:"options"`
Version string `xml:"version,attr"`
Options []Option `xml:"option"`
}
filePath := filepath.Join(DataDir, strings.Join([]string{productName, ".xml"}, ""))
if public.Exists(filePath) {
var (
attr = XMLProduct{}
attrKV []AttrKV
)
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attrKVBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = xml.Unmarshal(attrKVBytes, &attr)
if err != nil {
return nil, err
}
// fmt.Printf("%s特性选择:\n", productExplan)
for i := 0; i < len(attr.Options); i++ {
// fmt.Println(i+1, attr.Options[i].Desc, attr.Options[i].Key, attr.Options[i].Value)
attrKV = append(attrKV, AttrKV{Desc: attr.Options[i].Desc, Key: attr.Options[i].Key, Val: attr.Options[i].Value})
}
// fmt.Println("请输入数字序号,以分号间隔:")
return attrKV, nil
} else {
fd, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
attr := &XMLProduct{
Version: "1",
}
attr.Options = append(attr.Options, Option{
Desc: "示例配置",
Key: "key1",
Value: "val1",
})
output, err := xml.MarshalIndent(attr, "", " ")
if err != nil {
fmt.Println(err)
return nil, err
}
_, err = fd.Write([]byte(xml.Header))
if err != nil {
return nil, err
}
_, err = fd.Write(output)
if err != nil {
return nil, err
}
}
return nil, nil
}
func SelectCustomKV(productExplan string, kv []AttrKV) ([]AttrKV, error) {
var (
arrayIdx []int
kvs []AttrKV
)
if kv != nil {
fmt.Printf("%s启用配置选择(请输入数字序号,以逗号间隔,跳过按回车):\n", productExplan)
for i := 0; i < len(kv); i++ {
fmt.Println(i+1, kv[i].Desc)
}
input, err := inputReader.ReadString('\n')
if err != nil {
return nil, err
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
arrayIndx := strings.Split(inputString, ",")
for i := 0; i < len(arrayIndx); i++ {
num := strings.TrimSpace(arrayIndx[i])
if num == "" {
continue
}
idx, err := strconv.Atoi(num)
if err != nil {
return nil, err
}
if idx <= 0 || idx > len(kv) {
return nil, fmt.Errorf("输入不能小于等于0或大于%d", len(kv))
}
arrayIdx = append(arrayIdx, idx)
}
arrayIdx = public.RemoveDuplicate(arrayIdx)
fmt.Printf("\n你选择的是%v,启用的配置是:\n", arrayIdx)
for _, indx := range arrayIdx {
fmt.Printf("%d %s\n", indx, kv[indx-1].Desc)
kvs = append(kvs, kv[indx-1])
}
fmt.Println()
}
return kvs, nil
}
fmt.Println()
return nil, nil
}
func IsQuit() bool {
input, err := inputReader.ReadString('\n')
if err != nil {
return false
}
defer inputReader.Reset(os.Stdin)
inputString := strings.TrimSpace(input)
if inputString != "" {
if strings.HasPrefix(inputString, "q") {
os.Exit(0)
}
}
return true
}
func init() {
flag.StringVar(&PriKeyFilePath, "prikey", "", "prikey.pem file path")
flag.Usage = usage
}
func usage() {
fmt.Println("input 'quit' or 'q' to exit the program")
fmt.Println("input '-prikey' set prikey.pem file path,for example -prikey=keys/v1/prikey.pem")
fmt.Println(public.GetAppInfo())
}
func LoadConfig() ([]*Products, error) {
var (
productList = []*Products{}
)
isExist := public.Exists(DataDir)
if isExist == false && DataDir != "." {
err := os.MkdirAll(DataDir, os.ModePerm)
if err != nil {
return nil, err
}
}
filePath := filepath.Join(DataDir, "products.json")
if public.Exists(filePath) { //文件存在
fd, err := os.OpenFile(filePath, os.O_RDONLY, 0644)
if err != nil {
return nil, err
}
defer fd.Close()
configBytes, err := ioutil.ReadAll(fd)
if err != nil {
return nil, err
}
err = json.Unmarshal(configBytes, &productList)
if err != nil {
return nil, err
}
} else {
encByte, err := json.Marshal(products)
if err != nil {
return nil, err
}
err = ioutil.WriteFile(filePath, encByte, 0644)
if err != nil {
return nil, err
}
productList = products
}
return productList, nil
}
func main() {
var (
err error
productName string
productExplan string
expiresAt int64
machineID string
kv []AttrKV
)
flag.Parse()
//load config file
products, err = LoadConfig()
if err != nil {
fmt.Println(err.Error())
return
}
inputReader = bufio.NewReader(os.Stdin)
for {
productName, productExplan, err = SelectProduct()
if err != nil {
fmt.Println(err.Error())
continue
}
if productName == "" {
continue
}
break
}
attrKV, err := ReadCustomKV(productName)
if err != nil {
fmt.Println(err.Error())
return
}
for {
if len(attrKV) == 1 && attrKV[0].Desc == "示例配置" {
break
}
kv, err = SelectCustomKV(productExplan, attrKV)
if err != nil {
fmt.Println(err.Error())
continue
}
break
}
for {
machineID, err = InputMachineID()
if err != nil {
fmt.Println(err.Error())
continue
}
if machineID == "" {
continue
}
break
}
for {
expiresAt, err = InputExpiresTime()
if err != nil {
fmt.Println(err.Error())
continue
}
if expiresAt <= 0 {
continue
}
break
}
// alg, err := public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
var (
alg *public.NonEquAlgorthm
)
if PriKeyFilePath == "" {
alg, err = public.GetNonEquAlgorthm([]byte(public.ECDSA_PRIVATE), []byte(public.ECDSA_PUBLICKEY))
} else {
priBytes, err := public.LoadKey(PriKeyFilePath)
if err != nil {
fmt.Println(err.Error())
return
}
alg, err = public.GetNonEquAlgorthm(priBytes, nil)
if err != nil {
fmt.Println(err.Error())
return
}
}
duration := time.Duration(expiresAt) * time.Second
//定义License HEAD KV
uuid := public.GetUUID()
expiresTime := time.Now().Add(duration)
customKV := map[string]string{LicenseID: uuid, ProductName: productName, EndTime: expiresTime.Format(time.RFC3339)}
for _, v := range kv {
if _, ok := kvMap[v.Key]; ok {
fmt.Printf("模板定义字段%s与系统定义字段冲突\n", v.Key)
return
}
customKV[v.Key] = v.Val
}
//构造license结构
licenseIns := public.GenerateLicense(uuid, productName, machineID, expiresTime.Unix(), customKV)
enCodeBytes, err := licenseIns.ToBytes()
if err != nil {
fmt.Println(err.Error())
return
}
//签名license
licenseString, err := alg.SignedBytes(enCodeBytes)
if err != nil {
fmt.Println(err.Error())
return
}
dir := filepath.Join(DataDir, "db")
fileName := strings.Join([]string{"license", licenseIns.LicenseUUID, "dat"}, ".")
err = public.SaveLicensePem(dir, fileName, licenseString, customKV)
if err != nil {
fmt.Println(err.Error())
return
}
ShowActiveCode(dir, fileName, licenseIns.LicenseUUID)
for {
IsQuit()
}
}
| tString, "q") | identifier_name |
scope-auth.go | package middleware
import (
"fmt"
"net/http"
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/satori/go.uuid"
cobxtypes "github.com/jiarung/mochi/apps/exchange/cobx-types"
"github.com/jiarung/mochi/cache"
"github.com/jiarung/mochi/cache/helper"
"github.com/jiarung/mochi/cache/keys"
apicontext "github.com/jiarung/mochi/common/api/context"
apierrors "github.com/jiarung/mochi/common/api/errors"
jwtFactory "github.com/jiarung/mochi/common/jwt"
"github.com/jiarung/mochi/common/logging"
"github.com/jiarung/mochi/common/scope-auth"
"github.com/jiarung/mochi/database"
"github.com/jiarung/mochi/gcp/kms"
"github.com/jiarung/mochi/infra/api/middleware/logger"
models "github.com/jiarung/mochi/models/exchange"
"github.com/jiarung/mochi/types"
)
const tokensWriteTimeout = 1 << 3
// OAuth2AccessTokenType is the type of token issued by our OAuth2 server.
var OAuth2AccessTokenType = "Bearer"
// ScopeAuth return a middleware that validates the scopes of each endpoints
// from `scopeMap` and JWT from users if needed. This middleware should be placed
// - AFTER `AppContextMiddleware` and `ErrorHandler`
// - BEFORE all the handler functions
//
// - Check if JWT exists
// - Get scopes of the endpoint from `scopeMap`
// - if no JWT exists
// - Proceed only if requiring public scope
// - else
// - Proceed if JWT is valid and one of the followings:
// - `validScopes` contains `ScopePublic`
// - `userScopes` contains at least one valid scope
func ScopeAuth(service cobxtypes.ServiceName, opt ...interface{}) gin.HandlerFunc {
store := jwtFactory.NewAPIKeySecret()
return func(ctx *gin.Context) {
// Always allow OPTIONS to pass through.
if ctx.Request.Method == http.MethodOptions {
return
}
appCtx, err := apicontext.GetAppContext(ctx)
if err != nil {
logger := logger.Get(ctx)
logger.Error("Fail to obtain AppContext Error: %v", err)
ctx.Abort()
return
}
logger := appCtx.Logger()
logger.SetLabel(logging.LabelApp, "scope-auth:middleware")
defer logger.DeleteLabel(logging.LabelApp)
var jwtStr string
var jwtExists bool
if len(strings.Split(ctx.GetHeader("Authorization"), ".")) == 4 {
// API token
vErr := validateAPIToken(appCtx, store,
ctx.GetHeader("Authorization"))
if vErr != nil {
logger.Error("api token is invalid: %v", vErr)
} else {
jwtExists = true
}
} else if len(strings.Split(ctx.GetHeader("Authorization"), " ")) == 2 {
// OAuth2 token
authParts := strings.SplitN(ctx.GetHeader("Authorization"), " ", 2)
authType, authCredentials := authParts[0], authParts[1]
if authType == OAuth2AccessTokenType && len(authCredentials) > 0 {
vErr := validateOAuth2Token(appCtx, authCredentials)
if vErr != nil {
logger.Error("OAuth2 access token is invalid: %v", vErr)
} else {
jwtExists = true
}
}
} else {
// Access token: Get authorization from header or coockie.
jwtStr, jwtExists = extractJWT(ctx, logger)
}
// Get scopes of endpoint.
validScopes, err := scopeauth.GetScopes(service,
strings.ToUpper(ctx.Request.Method), ctx.Request.URL.Path)
if err != nil {
logger.Info("can't find scopes of [%s] [%s] %s. err(%s)",
service, ctx.Request.Method, ctx.Request.URL.Path, err)
appCtx.SetError(apierrors.ResourceNotFound)
return
}
if len(validScopes) == 0 {
logger.Error("empty scopes of [%s] %s.%s.",
ctx.Request.Method, service, ctx.Request.URL.Path)
ctx.Abort()
return
}
appCtx.RequiredScopes = validScopes
logger.Debug("required scopes: %s", validScopes)
validScopeMap := make(map[types.Scope]struct{})
hasPublicScope := false
for _, s := range validScopes {
if s == types.ScopePublic {
hasPublicScope = true
}
validScopeMap[s] = struct{}{}
}
// No JWT.
if !jwtExists {
if hasPublicScope {
ctx.Next()
} else {
logger.Error("authentication failed with no JWT found")
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
if !appCtx.IsAPIToken() && !appCtx.IsOAuth2Token() {
skipIPCheck := false
if len(opt) > 0 {
skipIPCheck = opt[0].(bool)
}
// With JWT, validation
userID, deviceAuthorizationID, accessTokenID, userScopes,
devicePlatform, err := authenticateJWT(jwtStr, appCtx.RequestIP,
skipIPCheck, appCtx.ServiceName)
if err != nil {
if hasPublicScope {
ctx.Next()
} else {
logger.Error("authentication failed with invalid JWT. err: %+v", err)
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
logger.SetLabel(logging.LabelUserDeviceID, deviceAuthorizationID.String())
appCtx.Platform = devicePlatform
appCtx.UserID = userID
appCtx.AccessTokenID = accessTokenID
appCtx.DeviceAuthorizationID = deviceAuthorizationID
appCtx.UserAuthorizationScopes = userScopes
}
logger.SetLabel(logging.LabelUserID, appCtx.UserID.String())
logger.Debug("user scopes: %s", appCtx.UserAuthorizationScopes)
if hasPublicScope {
ctx.Next()
return
}
// Validate scopes.
for _, s := range appCtx.UserAuthorizationScopes {
if _, ok := validScopeMap[s]; ok {
// user scope is in valid scope map, proceed
ctx.Next()
return
}
}
// User scope is not in valid, abort
logger.Error("unauthorized scopes. user scopes [%s]. required scopes[%s]",
appCtx.UserAuthorizationScopes, validScopes)
appCtx.SetError(apierrors.UnauthorizedScope)
}
}
func validateAPIToken(appCtx *apicontext.AppContext,
store *jwtFactory.APIKeySecret, token string) (err error) {
err = jwtFactory.ValidateCOBSecret(token, store)
if err != nil {
return
}
claimMap, err := jwtFactory.ParseJWTPayload(token)
if err != nil {
return
}
// check scope
scopesFromClaim, sExist := claimMap["scope"].([]interface{})
if !sExist {
err = fmt.Errorf("scope not exist")
return
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopesFromClaim {
str, ok := scope.(string)
if ok {
s := types.Scope(str)
userScopes = append(userScopes, s)
}
}
// check user id
userIDStr := claimMap["user_id"].(string)
userID, err := uuid.FromString(userIDStr)
if err != nil {
return
}
// check api token
apiTokenIDStr, exist := claimMap["api_token_id"].(string)
if !exist {
// it should be panic if api_token_id is not exist in payload
panic(fmt.Errorf("api_token_id not exist"))
}
apiTokenID, err := uuid.FromString(apiTokenIDStr)
if err != nil {
return
}
// create shared key
apiTokenKey := keys.GetAPITokenKeyByUserStr(userIDStr)
// check secret
secret, err := appCtx.Cache.GetFieldOfMap(apiTokenKey, keys.APITokenSecretKey)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
// lazy loading
var client *kms.Client
client, err = kms.NewDefaultClient(appCtx, kms.KeyAPIToken)
if err != nil {
return
}
// set secret
apiSecret := models.APISecret{}
err = appCtx.DB.Where("user_id = ?", userID).First(&apiSecret).Error
if err != nil {
return
}
var secretBytes []byte
secretBytes, err = client.Decrypt(apiSecret.Secret)
if err != nil {
return
}
secret = string(secretBytes)
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, "secret", secret)
if err != nil {
return
}
}
// valid token with secret
partialToken := strings.Join(strings.Split(token, ".")[:3], ".")
_, _, err = jwtFactory.BuildWithSecret(jwtFactory.APITokenObj{}, secret).
Validate(partialToken, appCtx.ServiceName)
if err != nil {
return
}
// check cached data and lazy loading with db query
_, err = appCtx.Cache.GetFieldOfMap(apiTokenKey, apiTokenIDStr)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
lockKey := apiTokenKey + "_lock"
// lock while updating tokens
appCtx.Cache.Lock(lockKey, appCtx.RequestTag(), tokensWriteTimeout)
defer appCtx.Cache.UnLock(lockKey, appCtx.RequestTag())
// set api tokens
apiTokens := []models.APIToken{}
err = appCtx.DB.Where("user_id = ? AND revoked_at IS NULL",
userID).Find(&apiTokens).Error
if err != nil {
return
}
var isTokenExisting bool
for _, at := range apiTokens {
id := at.ID.String()
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, id, "")
if err != nil {
return
}
if !isTokenExisting {
isTokenExisting = id == apiTokenIDStr
}
}
if !isTokenExisting {
err = fmt.Errorf("user<%s> api token<%s> error: %v",
userIDStr, apiTokenIDStr, err)
return
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.APITokenID = &apiTokenID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "api_token")
return nil
}
func validateOAuth2Token(appCtx *apicontext.AppContext, token string) error {
claims, _, err := jwtFactory.Build(
jwtFactory.OAuth2AccessTokenObj{}).Validate(token, appCtx.ServiceName)
if err != nil {
return err
}
oauth2AccessTokenIDStr, found := claims["oauth2_access_token_id"].(string)
if !found {
return fmt.Errorf("oauth2_access_token_id could not be found")
}
oauth2AccessTokenID, err := uuid.FromString(oauth2AccessTokenIDStr)
if err != nil {
return err
}
clientIDStr, found := claims["client_id"].(string)
if !found {
return fmt.Errorf("client_id could not be found")
}
clientID, err := uuid.FromString(clientIDStr)
if err != nil {
return err
}
userIDStr, found := claims["user_id"].(string)
if !found {
return fmt.Errorf("user_id could not be found")
}
userID, err := uuid.FromString(userIDStr)
if err != nil {
return err
}
scopes, found := claims["scope"].([]interface{})
if !found {
return fmt.Errorf("scope could not be found")
}
oauth2TokenKey := "oauth2_access_token:" + oauth2AccessTokenIDStr
found, err = appCtx.Cache.Exist(oauth2TokenKey)
if err != nil {
return err
}
if !found {
oauth2Token := models.OAuth2Token{}
result := appCtx.DB.Where("id = ? AND type = ? AND revoked_at IS NULL",
oauth2AccessTokenID, types.OAuth2AccessToken).First(&oauth2Token)
if result.Error != nil |
expireSec := int(oauth2Token.ExpireAt.Sub(time.Now()) / time.Second)
appCtx.Cache.Set(oauth2TokenKey, 1, expireSec)
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopes {
scopeStr, ok := scope.(string)
if ok {
userScopes = append(userScopes, types.Scope(scopeStr))
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.OAuth2TokenID = &oauth2AccessTokenID
appCtx.OAuth2ClientID = clientID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "oauth2_access_token")
return nil
}
func extractJWT(ctx *gin.Context, logger logging.Logger) (jwtString string, exists bool) {
var jwtStr string
jwtStr = ctx.GetHeader("Authorization")
if len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
var err error
jwtStr, err = ctx.Cookie("Authorization")
if err == nil && len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
jwtString = ""
exists = false
return
}
func authenticateJWT(jwtStr string, requestIP string, skipIPCheck bool,
serviceName cobxtypes.ServiceName) (userID *uuid.UUID,
deviceAuthorizationID *uuid.UUID, accessTokenID *uuid.UUID,
userScopes []types.Scope, devicePlatform types.DevicePlatform, err error) {
claims, isExpired, delErr := jwtFactory.Build(jwtFactory.AccessTokenObj{}).
Validate(jwtStr, serviceName)
if delErr != nil || isExpired {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
if isExpired {
err = fmt.Errorf("token <%v> expired", accessTokenID)
} else {
err = delErr
}
return
}
userIDFromClaim, uExist := claims["user_id"].(string)
accessTokenIDFromClaim, aExist := claims["access_token_id"].(string)
devAuthIDFromClaim, dExist := claims["device_authorization_id"].(string)
platformFromClaim, pExist := claims["platform"].(string)
if !uExist || !aExist || !dExist || !pExist {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"invalid claims [user_id: %v, access_token_id: %v,"+
" device_authorization_id: %v, platform: %v]",
uExist, aExist, dExist, pExist,
)
return
}
accessTokenPayload := &helper.AccessTokenPayload{}
if redisErr := accessTokenPayload.Get(
database.GetDB(database.Default), accessTokenIDFromClaim); redisErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"can't validate token on cache with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, redisErr,
)
return
}
if !skipIPCheck && platformFromClaim == "Web" &&
requestIP != accessTokenPayload.IP {
delErr = cache.GetRedis().Delete(
keys.GetAccessTokenCacheKey(accessTokenIDFromClaim))
if delErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("Delete token with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, delErr)
return
}
result := database.GetDB(database.Default).Model(models.AccessToken{}).Where("id = ?",
accessTokenIDFromClaim).Update("revoked_at", time.Now())
if result.Error != nil || result.RowsAffected != 1 {
err = fmt.Errorf("revoke access token <%v> error: %v",
accessTokenIDFromClaim, result.Error)
return
}
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("JWT IP is different from request IP. JWT IP (%s). RequestIP (%s)",
accessTokenPayload.IP, requestIP)
return
}
userIDValue := uuid.FromStringOrNil(userIDFromClaim)
userID = &userIDValue
deviceAuthorizationIDValue := uuid.FromStringOrNil(devAuthIDFromClaim)
deviceAuthorizationID = &deviceAuthorizationIDValue
accessTokenIDValue := uuid.FromStringOrNil(accessTokenIDFromClaim)
accessTokenID = &accessTokenIDValue
for _, r := range accessTokenPayload.Roles {
userScopes = append(userScopes, types.GetScopesOfRole(r)...)
}
devicePlatform = types.DevicePlatform(platformFromClaim)
err = nil
return
}
| {
return result.Error
} | conditional_block |
scope-auth.go | package middleware
import (
"fmt"
"net/http"
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/satori/go.uuid"
cobxtypes "github.com/jiarung/mochi/apps/exchange/cobx-types"
"github.com/jiarung/mochi/cache"
"github.com/jiarung/mochi/cache/helper"
"github.com/jiarung/mochi/cache/keys"
apicontext "github.com/jiarung/mochi/common/api/context"
apierrors "github.com/jiarung/mochi/common/api/errors"
jwtFactory "github.com/jiarung/mochi/common/jwt"
"github.com/jiarung/mochi/common/logging"
"github.com/jiarung/mochi/common/scope-auth"
"github.com/jiarung/mochi/database"
"github.com/jiarung/mochi/gcp/kms"
"github.com/jiarung/mochi/infra/api/middleware/logger"
models "github.com/jiarung/mochi/models/exchange"
"github.com/jiarung/mochi/types"
)
const tokensWriteTimeout = 1 << 3
// OAuth2AccessTokenType is the type of token issued by our OAuth2 server.
var OAuth2AccessTokenType = "Bearer"
// ScopeAuth return a middleware that validates the scopes of each endpoints
// from `scopeMap` and JWT from users if needed. This middleware should be placed
// - AFTER `AppContextMiddleware` and `ErrorHandler`
// - BEFORE all the handler functions
//
// - Check if JWT exists
// - Get scopes of the endpoint from `scopeMap`
// - if no JWT exists
// - Proceed only if requiring public scope
// - else
// - Proceed if JWT is valid and one of the followings:
// - `validScopes` contains `ScopePublic`
// - `userScopes` contains at least one valid scope
func ScopeAuth(service cobxtypes.ServiceName, opt ...interface{}) gin.HandlerFunc {
store := jwtFactory.NewAPIKeySecret()
return func(ctx *gin.Context) {
// Always allow OPTIONS to pass through.
if ctx.Request.Method == http.MethodOptions {
return
}
appCtx, err := apicontext.GetAppContext(ctx)
if err != nil {
logger := logger.Get(ctx)
logger.Error("Fail to obtain AppContext Error: %v", err)
ctx.Abort()
return
}
logger := appCtx.Logger()
logger.SetLabel(logging.LabelApp, "scope-auth:middleware")
defer logger.DeleteLabel(logging.LabelApp)
var jwtStr string
var jwtExists bool
if len(strings.Split(ctx.GetHeader("Authorization"), ".")) == 4 {
// API token
vErr := validateAPIToken(appCtx, store,
ctx.GetHeader("Authorization"))
if vErr != nil {
logger.Error("api token is invalid: %v", vErr)
} else {
jwtExists = true
}
} else if len(strings.Split(ctx.GetHeader("Authorization"), " ")) == 2 {
// OAuth2 token
authParts := strings.SplitN(ctx.GetHeader("Authorization"), " ", 2)
authType, authCredentials := authParts[0], authParts[1]
if authType == OAuth2AccessTokenType && len(authCredentials) > 0 {
vErr := validateOAuth2Token(appCtx, authCredentials)
if vErr != nil {
logger.Error("OAuth2 access token is invalid: %v", vErr)
} else {
jwtExists = true
}
}
} else {
// Access token: Get authorization from header or coockie.
jwtStr, jwtExists = extractJWT(ctx, logger)
}
// Get scopes of endpoint.
validScopes, err := scopeauth.GetScopes(service,
strings.ToUpper(ctx.Request.Method), ctx.Request.URL.Path)
if err != nil {
logger.Info("can't find scopes of [%s] [%s] %s. err(%s)",
service, ctx.Request.Method, ctx.Request.URL.Path, err)
appCtx.SetError(apierrors.ResourceNotFound)
return
}
if len(validScopes) == 0 {
logger.Error("empty scopes of [%s] %s.%s.",
ctx.Request.Method, service, ctx.Request.URL.Path)
ctx.Abort()
return
}
appCtx.RequiredScopes = validScopes
logger.Debug("required scopes: %s", validScopes)
validScopeMap := make(map[types.Scope]struct{})
hasPublicScope := false
for _, s := range validScopes {
if s == types.ScopePublic {
hasPublicScope = true
}
validScopeMap[s] = struct{}{}
}
// No JWT.
if !jwtExists {
if hasPublicScope {
ctx.Next()
} else {
logger.Error("authentication failed with no JWT found")
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
if !appCtx.IsAPIToken() && !appCtx.IsOAuth2Token() {
skipIPCheck := false
if len(opt) > 0 {
skipIPCheck = opt[0].(bool)
}
// With JWT, validation
userID, deviceAuthorizationID, accessTokenID, userScopes,
devicePlatform, err := authenticateJWT(jwtStr, appCtx.RequestIP,
skipIPCheck, appCtx.ServiceName)
if err != nil {
if hasPublicScope {
ctx.Next()
} else {
logger.Error("authentication failed with invalid JWT. err: %+v", err)
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
logger.SetLabel(logging.LabelUserDeviceID, deviceAuthorizationID.String())
appCtx.Platform = devicePlatform
appCtx.UserID = userID
appCtx.AccessTokenID = accessTokenID
appCtx.DeviceAuthorizationID = deviceAuthorizationID
appCtx.UserAuthorizationScopes = userScopes
}
logger.SetLabel(logging.LabelUserID, appCtx.UserID.String())
logger.Debug("user scopes: %s", appCtx.UserAuthorizationScopes)
if hasPublicScope {
ctx.Next()
return
}
// Validate scopes.
for _, s := range appCtx.UserAuthorizationScopes {
if _, ok := validScopeMap[s]; ok {
// user scope is in valid scope map, proceed
ctx.Next()
return
}
}
// User scope is not in valid, abort
logger.Error("unauthorized scopes. user scopes [%s]. required scopes[%s]",
appCtx.UserAuthorizationScopes, validScopes)
appCtx.SetError(apierrors.UnauthorizedScope)
}
}
func validateAPIToken(appCtx *apicontext.AppContext,
store *jwtFactory.APIKeySecret, token string) (err error) {
err = jwtFactory.ValidateCOBSecret(token, store)
if err != nil {
return
}
claimMap, err := jwtFactory.ParseJWTPayload(token)
if err != nil {
return
}
// check scope
scopesFromClaim, sExist := claimMap["scope"].([]interface{})
if !sExist {
err = fmt.Errorf("scope not exist")
return
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopesFromClaim {
str, ok := scope.(string)
if ok {
s := types.Scope(str)
userScopes = append(userScopes, s)
}
}
// check user id
userIDStr := claimMap["user_id"].(string)
userID, err := uuid.FromString(userIDStr)
if err != nil {
return
}
// check api token
apiTokenIDStr, exist := claimMap["api_token_id"].(string)
if !exist {
// it should be panic if api_token_id is not exist in payload
panic(fmt.Errorf("api_token_id not exist"))
}
apiTokenID, err := uuid.FromString(apiTokenIDStr)
if err != nil {
return
}
// create shared key
apiTokenKey := keys.GetAPITokenKeyByUserStr(userIDStr)
// check secret
secret, err := appCtx.Cache.GetFieldOfMap(apiTokenKey, keys.APITokenSecretKey)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
// lazy loading
var client *kms.Client
client, err = kms.NewDefaultClient(appCtx, kms.KeyAPIToken)
if err != nil {
return
}
// set secret
apiSecret := models.APISecret{}
err = appCtx.DB.Where("user_id = ?", userID).First(&apiSecret).Error
if err != nil {
return
}
var secretBytes []byte
secretBytes, err = client.Decrypt(apiSecret.Secret)
if err != nil {
return
}
secret = string(secretBytes)
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, "secret", secret)
if err != nil {
return
}
}
// valid token with secret
partialToken := strings.Join(strings.Split(token, ".")[:3], ".")
_, _, err = jwtFactory.BuildWithSecret(jwtFactory.APITokenObj{}, secret).
Validate(partialToken, appCtx.ServiceName)
if err != nil {
return
}
// check cached data and lazy loading with db query
_, err = appCtx.Cache.GetFieldOfMap(apiTokenKey, apiTokenIDStr)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
lockKey := apiTokenKey + "_lock"
// lock while updating tokens
appCtx.Cache.Lock(lockKey, appCtx.RequestTag(), tokensWriteTimeout)
defer appCtx.Cache.UnLock(lockKey, appCtx.RequestTag())
// set api tokens
apiTokens := []models.APIToken{}
err = appCtx.DB.Where("user_id = ? AND revoked_at IS NULL",
userID).Find(&apiTokens).Error
if err != nil {
return
}
var isTokenExisting bool
for _, at := range apiTokens {
id := at.ID.String()
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, id, "")
if err != nil {
return
}
if !isTokenExisting {
isTokenExisting = id == apiTokenIDStr
}
}
if !isTokenExisting {
err = fmt.Errorf("user<%s> api token<%s> error: %v",
userIDStr, apiTokenIDStr, err)
return
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.APITokenID = &apiTokenID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "api_token")
return nil
}
func validateOAuth2Token(appCtx *apicontext.AppContext, token string) error {
claims, _, err := jwtFactory.Build(
jwtFactory.OAuth2AccessTokenObj{}).Validate(token, appCtx.ServiceName)
if err != nil {
return err
}
oauth2AccessTokenIDStr, found := claims["oauth2_access_token_id"].(string)
if !found {
return fmt.Errorf("oauth2_access_token_id could not be found")
}
oauth2AccessTokenID, err := uuid.FromString(oauth2AccessTokenIDStr)
if err != nil {
return err
}
clientIDStr, found := claims["client_id"].(string)
if !found {
return fmt.Errorf("client_id could not be found")
}
clientID, err := uuid.FromString(clientIDStr)
if err != nil {
return err
}
userIDStr, found := claims["user_id"].(string)
if !found {
return fmt.Errorf("user_id could not be found")
}
userID, err := uuid.FromString(userIDStr)
if err != nil {
return err
}
scopes, found := claims["scope"].([]interface{})
if !found {
return fmt.Errorf("scope could not be found")
}
oauth2TokenKey := "oauth2_access_token:" + oauth2AccessTokenIDStr
found, err = appCtx.Cache.Exist(oauth2TokenKey)
if err != nil {
return err
}
if !found {
oauth2Token := models.OAuth2Token{}
result := appCtx.DB.Where("id = ? AND type = ? AND revoked_at IS NULL",
oauth2AccessTokenID, types.OAuth2AccessToken).First(&oauth2Token)
if result.Error != nil {
return result.Error
}
expireSec := int(oauth2Token.ExpireAt.Sub(time.Now()) / time.Second)
appCtx.Cache.Set(oauth2TokenKey, 1, expireSec)
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopes {
scopeStr, ok := scope.(string)
if ok {
userScopes = append(userScopes, types.Scope(scopeStr))
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.OAuth2TokenID = &oauth2AccessTokenID
appCtx.OAuth2ClientID = clientID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "oauth2_access_token")
return nil
}
func | (ctx *gin.Context, logger logging.Logger) (jwtString string, exists bool) {
var jwtStr string
jwtStr = ctx.GetHeader("Authorization")
if len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
var err error
jwtStr, err = ctx.Cookie("Authorization")
if err == nil && len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
jwtString = ""
exists = false
return
}
func authenticateJWT(jwtStr string, requestIP string, skipIPCheck bool,
serviceName cobxtypes.ServiceName) (userID *uuid.UUID,
deviceAuthorizationID *uuid.UUID, accessTokenID *uuid.UUID,
userScopes []types.Scope, devicePlatform types.DevicePlatform, err error) {
claims, isExpired, delErr := jwtFactory.Build(jwtFactory.AccessTokenObj{}).
Validate(jwtStr, serviceName)
if delErr != nil || isExpired {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
if isExpired {
err = fmt.Errorf("token <%v> expired", accessTokenID)
} else {
err = delErr
}
return
}
userIDFromClaim, uExist := claims["user_id"].(string)
accessTokenIDFromClaim, aExist := claims["access_token_id"].(string)
devAuthIDFromClaim, dExist := claims["device_authorization_id"].(string)
platformFromClaim, pExist := claims["platform"].(string)
if !uExist || !aExist || !dExist || !pExist {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"invalid claims [user_id: %v, access_token_id: %v,"+
" device_authorization_id: %v, platform: %v]",
uExist, aExist, dExist, pExist,
)
return
}
accessTokenPayload := &helper.AccessTokenPayload{}
if redisErr := accessTokenPayload.Get(
database.GetDB(database.Default), accessTokenIDFromClaim); redisErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"can't validate token on cache with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, redisErr,
)
return
}
if !skipIPCheck && platformFromClaim == "Web" &&
requestIP != accessTokenPayload.IP {
delErr = cache.GetRedis().Delete(
keys.GetAccessTokenCacheKey(accessTokenIDFromClaim))
if delErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("Delete token with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, delErr)
return
}
result := database.GetDB(database.Default).Model(models.AccessToken{}).Where("id = ?",
accessTokenIDFromClaim).Update("revoked_at", time.Now())
if result.Error != nil || result.RowsAffected != 1 {
err = fmt.Errorf("revoke access token <%v> error: %v",
accessTokenIDFromClaim, result.Error)
return
}
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("JWT IP is different from request IP. JWT IP (%s). RequestIP (%s)",
accessTokenPayload.IP, requestIP)
return
}
userIDValue := uuid.FromStringOrNil(userIDFromClaim)
userID = &userIDValue
deviceAuthorizationIDValue := uuid.FromStringOrNil(devAuthIDFromClaim)
deviceAuthorizationID = &deviceAuthorizationIDValue
accessTokenIDValue := uuid.FromStringOrNil(accessTokenIDFromClaim)
accessTokenID = &accessTokenIDValue
for _, r := range accessTokenPayload.Roles {
userScopes = append(userScopes, types.GetScopesOfRole(r)...)
}
devicePlatform = types.DevicePlatform(platformFromClaim)
err = nil
return
}
| extractJWT | identifier_name |
scope-auth.go | package middleware
import (
"fmt"
"net/http"
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/satori/go.uuid"
cobxtypes "github.com/jiarung/mochi/apps/exchange/cobx-types"
"github.com/jiarung/mochi/cache"
"github.com/jiarung/mochi/cache/helper"
"github.com/jiarung/mochi/cache/keys"
apicontext "github.com/jiarung/mochi/common/api/context"
apierrors "github.com/jiarung/mochi/common/api/errors"
jwtFactory "github.com/jiarung/mochi/common/jwt"
"github.com/jiarung/mochi/common/logging"
"github.com/jiarung/mochi/common/scope-auth"
"github.com/jiarung/mochi/database"
"github.com/jiarung/mochi/gcp/kms"
"github.com/jiarung/mochi/infra/api/middleware/logger"
models "github.com/jiarung/mochi/models/exchange"
"github.com/jiarung/mochi/types"
)
const tokensWriteTimeout = 1 << 3
// OAuth2AccessTokenType is the type of token issued by our OAuth2 server.
var OAuth2AccessTokenType = "Bearer"
// ScopeAuth return a middleware that validates the scopes of each endpoints
// from `scopeMap` and JWT from users if needed. This middleware should be placed
// - AFTER `AppContextMiddleware` and `ErrorHandler`
// - BEFORE all the handler functions
//
// - Check if JWT exists
// - Get scopes of the endpoint from `scopeMap`
// - if no JWT exists
// - Proceed only if requiring public scope
// - else
// - Proceed if JWT is valid and one of the followings:
// - `validScopes` contains `ScopePublic`
// - `userScopes` contains at least one valid scope
func ScopeAuth(service cobxtypes.ServiceName, opt ...interface{}) gin.HandlerFunc {
store := jwtFactory.NewAPIKeySecret()
return func(ctx *gin.Context) {
// Always allow OPTIONS to pass through.
if ctx.Request.Method == http.MethodOptions {
return
}
appCtx, err := apicontext.GetAppContext(ctx)
if err != nil {
logger := logger.Get(ctx)
logger.Error("Fail to obtain AppContext Error: %v", err)
ctx.Abort()
return
}
logger := appCtx.Logger()
logger.SetLabel(logging.LabelApp, "scope-auth:middleware")
defer logger.DeleteLabel(logging.LabelApp)
var jwtStr string
var jwtExists bool
if len(strings.Split(ctx.GetHeader("Authorization"), ".")) == 4 {
// API token
vErr := validateAPIToken(appCtx, store,
ctx.GetHeader("Authorization"))
if vErr != nil {
logger.Error("api token is invalid: %v", vErr)
} else {
jwtExists = true
}
} else if len(strings.Split(ctx.GetHeader("Authorization"), " ")) == 2 {
// OAuth2 token
authParts := strings.SplitN(ctx.GetHeader("Authorization"), " ", 2)
authType, authCredentials := authParts[0], authParts[1]
if authType == OAuth2AccessTokenType && len(authCredentials) > 0 {
vErr := validateOAuth2Token(appCtx, authCredentials)
if vErr != nil {
logger.Error("OAuth2 access token is invalid: %v", vErr)
} else {
jwtExists = true
}
}
} else {
// Access token: Get authorization from header or coockie.
jwtStr, jwtExists = extractJWT(ctx, logger)
}
// Get scopes of endpoint.
validScopes, err := scopeauth.GetScopes(service,
strings.ToUpper(ctx.Request.Method), ctx.Request.URL.Path)
if err != nil {
logger.Info("can't find scopes of [%s] [%s] %s. err(%s)",
service, ctx.Request.Method, ctx.Request.URL.Path, err)
appCtx.SetError(apierrors.ResourceNotFound)
return
}
if len(validScopes) == 0 {
logger.Error("empty scopes of [%s] %s.%s.",
ctx.Request.Method, service, ctx.Request.URL.Path)
ctx.Abort()
return
}
appCtx.RequiredScopes = validScopes
logger.Debug("required scopes: %s", validScopes)
validScopeMap := make(map[types.Scope]struct{})
hasPublicScope := false
for _, s := range validScopes {
if s == types.ScopePublic {
hasPublicScope = true
}
validScopeMap[s] = struct{}{}
}
// No JWT.
if !jwtExists {
if hasPublicScope {
ctx.Next()
} else {
logger.Error("authentication failed with no JWT found")
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
if !appCtx.IsAPIToken() && !appCtx.IsOAuth2Token() {
skipIPCheck := false
if len(opt) > 0 {
skipIPCheck = opt[0].(bool)
}
// With JWT, validation
userID, deviceAuthorizationID, accessTokenID, userScopes,
devicePlatform, err := authenticateJWT(jwtStr, appCtx.RequestIP,
skipIPCheck, appCtx.ServiceName)
if err != nil {
if hasPublicScope { | logger.Error("authentication failed with invalid JWT. err: %+v", err)
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
logger.SetLabel(logging.LabelUserDeviceID, deviceAuthorizationID.String())
appCtx.Platform = devicePlatform
appCtx.UserID = userID
appCtx.AccessTokenID = accessTokenID
appCtx.DeviceAuthorizationID = deviceAuthorizationID
appCtx.UserAuthorizationScopes = userScopes
}
logger.SetLabel(logging.LabelUserID, appCtx.UserID.String())
logger.Debug("user scopes: %s", appCtx.UserAuthorizationScopes)
if hasPublicScope {
ctx.Next()
return
}
// Validate scopes.
for _, s := range appCtx.UserAuthorizationScopes {
if _, ok := validScopeMap[s]; ok {
// user scope is in valid scope map, proceed
ctx.Next()
return
}
}
// User scope is not in valid, abort
logger.Error("unauthorized scopes. user scopes [%s]. required scopes[%s]",
appCtx.UserAuthorizationScopes, validScopes)
appCtx.SetError(apierrors.UnauthorizedScope)
}
}
func validateAPIToken(appCtx *apicontext.AppContext,
store *jwtFactory.APIKeySecret, token string) (err error) {
err = jwtFactory.ValidateCOBSecret(token, store)
if err != nil {
return
}
claimMap, err := jwtFactory.ParseJWTPayload(token)
if err != nil {
return
}
// check scope
scopesFromClaim, sExist := claimMap["scope"].([]interface{})
if !sExist {
err = fmt.Errorf("scope not exist")
return
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopesFromClaim {
str, ok := scope.(string)
if ok {
s := types.Scope(str)
userScopes = append(userScopes, s)
}
}
// check user id
userIDStr := claimMap["user_id"].(string)
userID, err := uuid.FromString(userIDStr)
if err != nil {
return
}
// check api token
apiTokenIDStr, exist := claimMap["api_token_id"].(string)
if !exist {
// it should be panic if api_token_id is not exist in payload
panic(fmt.Errorf("api_token_id not exist"))
}
apiTokenID, err := uuid.FromString(apiTokenIDStr)
if err != nil {
return
}
// create shared key
apiTokenKey := keys.GetAPITokenKeyByUserStr(userIDStr)
// check secret
secret, err := appCtx.Cache.GetFieldOfMap(apiTokenKey, keys.APITokenSecretKey)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
// lazy loading
var client *kms.Client
client, err = kms.NewDefaultClient(appCtx, kms.KeyAPIToken)
if err != nil {
return
}
// set secret
apiSecret := models.APISecret{}
err = appCtx.DB.Where("user_id = ?", userID).First(&apiSecret).Error
if err != nil {
return
}
var secretBytes []byte
secretBytes, err = client.Decrypt(apiSecret.Secret)
if err != nil {
return
}
secret = string(secretBytes)
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, "secret", secret)
if err != nil {
return
}
}
// valid token with secret
partialToken := strings.Join(strings.Split(token, ".")[:3], ".")
_, _, err = jwtFactory.BuildWithSecret(jwtFactory.APITokenObj{}, secret).
Validate(partialToken, appCtx.ServiceName)
if err != nil {
return
}
// check cached data and lazy loading with db query
_, err = appCtx.Cache.GetFieldOfMap(apiTokenKey, apiTokenIDStr)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
lockKey := apiTokenKey + "_lock"
// lock while updating tokens
appCtx.Cache.Lock(lockKey, appCtx.RequestTag(), tokensWriteTimeout)
defer appCtx.Cache.UnLock(lockKey, appCtx.RequestTag())
// set api tokens
apiTokens := []models.APIToken{}
err = appCtx.DB.Where("user_id = ? AND revoked_at IS NULL",
userID).Find(&apiTokens).Error
if err != nil {
return
}
var isTokenExisting bool
for _, at := range apiTokens {
id := at.ID.String()
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, id, "")
if err != nil {
return
}
if !isTokenExisting {
isTokenExisting = id == apiTokenIDStr
}
}
if !isTokenExisting {
err = fmt.Errorf("user<%s> api token<%s> error: %v",
userIDStr, apiTokenIDStr, err)
return
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.APITokenID = &apiTokenID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "api_token")
return nil
}
func validateOAuth2Token(appCtx *apicontext.AppContext, token string) error {
claims, _, err := jwtFactory.Build(
jwtFactory.OAuth2AccessTokenObj{}).Validate(token, appCtx.ServiceName)
if err != nil {
return err
}
oauth2AccessTokenIDStr, found := claims["oauth2_access_token_id"].(string)
if !found {
return fmt.Errorf("oauth2_access_token_id could not be found")
}
oauth2AccessTokenID, err := uuid.FromString(oauth2AccessTokenIDStr)
if err != nil {
return err
}
clientIDStr, found := claims["client_id"].(string)
if !found {
return fmt.Errorf("client_id could not be found")
}
clientID, err := uuid.FromString(clientIDStr)
if err != nil {
return err
}
userIDStr, found := claims["user_id"].(string)
if !found {
return fmt.Errorf("user_id could not be found")
}
userID, err := uuid.FromString(userIDStr)
if err != nil {
return err
}
scopes, found := claims["scope"].([]interface{})
if !found {
return fmt.Errorf("scope could not be found")
}
oauth2TokenKey := "oauth2_access_token:" + oauth2AccessTokenIDStr
found, err = appCtx.Cache.Exist(oauth2TokenKey)
if err != nil {
return err
}
if !found {
oauth2Token := models.OAuth2Token{}
result := appCtx.DB.Where("id = ? AND type = ? AND revoked_at IS NULL",
oauth2AccessTokenID, types.OAuth2AccessToken).First(&oauth2Token)
if result.Error != nil {
return result.Error
}
expireSec := int(oauth2Token.ExpireAt.Sub(time.Now()) / time.Second)
appCtx.Cache.Set(oauth2TokenKey, 1, expireSec)
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopes {
scopeStr, ok := scope.(string)
if ok {
userScopes = append(userScopes, types.Scope(scopeStr))
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.OAuth2TokenID = &oauth2AccessTokenID
appCtx.OAuth2ClientID = clientID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "oauth2_access_token")
return nil
}
func extractJWT(ctx *gin.Context, logger logging.Logger) (jwtString string, exists bool) {
var jwtStr string
jwtStr = ctx.GetHeader("Authorization")
if len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
var err error
jwtStr, err = ctx.Cookie("Authorization")
if err == nil && len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
jwtString = ""
exists = false
return
}
func authenticateJWT(jwtStr string, requestIP string, skipIPCheck bool,
serviceName cobxtypes.ServiceName) (userID *uuid.UUID,
deviceAuthorizationID *uuid.UUID, accessTokenID *uuid.UUID,
userScopes []types.Scope, devicePlatform types.DevicePlatform, err error) {
claims, isExpired, delErr := jwtFactory.Build(jwtFactory.AccessTokenObj{}).
Validate(jwtStr, serviceName)
if delErr != nil || isExpired {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
if isExpired {
err = fmt.Errorf("token <%v> expired", accessTokenID)
} else {
err = delErr
}
return
}
userIDFromClaim, uExist := claims["user_id"].(string)
accessTokenIDFromClaim, aExist := claims["access_token_id"].(string)
devAuthIDFromClaim, dExist := claims["device_authorization_id"].(string)
platformFromClaim, pExist := claims["platform"].(string)
if !uExist || !aExist || !dExist || !pExist {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"invalid claims [user_id: %v, access_token_id: %v,"+
" device_authorization_id: %v, platform: %v]",
uExist, aExist, dExist, pExist,
)
return
}
accessTokenPayload := &helper.AccessTokenPayload{}
if redisErr := accessTokenPayload.Get(
database.GetDB(database.Default), accessTokenIDFromClaim); redisErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"can't validate token on cache with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, redisErr,
)
return
}
if !skipIPCheck && platformFromClaim == "Web" &&
requestIP != accessTokenPayload.IP {
delErr = cache.GetRedis().Delete(
keys.GetAccessTokenCacheKey(accessTokenIDFromClaim))
if delErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("Delete token with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, delErr)
return
}
result := database.GetDB(database.Default).Model(models.AccessToken{}).Where("id = ?",
accessTokenIDFromClaim).Update("revoked_at", time.Now())
if result.Error != nil || result.RowsAffected != 1 {
err = fmt.Errorf("revoke access token <%v> error: %v",
accessTokenIDFromClaim, result.Error)
return
}
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("JWT IP is different from request IP. JWT IP (%s). RequestIP (%s)",
accessTokenPayload.IP, requestIP)
return
}
userIDValue := uuid.FromStringOrNil(userIDFromClaim)
userID = &userIDValue
deviceAuthorizationIDValue := uuid.FromStringOrNil(devAuthIDFromClaim)
deviceAuthorizationID = &deviceAuthorizationIDValue
accessTokenIDValue := uuid.FromStringOrNil(accessTokenIDFromClaim)
accessTokenID = &accessTokenIDValue
for _, r := range accessTokenPayload.Roles {
userScopes = append(userScopes, types.GetScopesOfRole(r)...)
}
devicePlatform = types.DevicePlatform(platformFromClaim)
err = nil
return
} | ctx.Next()
} else { | random_line_split |
scope-auth.go | package middleware
import (
"fmt"
"net/http"
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/satori/go.uuid"
cobxtypes "github.com/jiarung/mochi/apps/exchange/cobx-types"
"github.com/jiarung/mochi/cache"
"github.com/jiarung/mochi/cache/helper"
"github.com/jiarung/mochi/cache/keys"
apicontext "github.com/jiarung/mochi/common/api/context"
apierrors "github.com/jiarung/mochi/common/api/errors"
jwtFactory "github.com/jiarung/mochi/common/jwt"
"github.com/jiarung/mochi/common/logging"
"github.com/jiarung/mochi/common/scope-auth"
"github.com/jiarung/mochi/database"
"github.com/jiarung/mochi/gcp/kms"
"github.com/jiarung/mochi/infra/api/middleware/logger"
models "github.com/jiarung/mochi/models/exchange"
"github.com/jiarung/mochi/types"
)
const tokensWriteTimeout = 1 << 3
// OAuth2AccessTokenType is the type of token issued by our OAuth2 server.
var OAuth2AccessTokenType = "Bearer"
// ScopeAuth return a middleware that validates the scopes of each endpoints
// from `scopeMap` and JWT from users if needed. This middleware should be placed
// - AFTER `AppContextMiddleware` and `ErrorHandler`
// - BEFORE all the handler functions
//
// - Check if JWT exists
// - Get scopes of the endpoint from `scopeMap`
// - if no JWT exists
// - Proceed only if requiring public scope
// - else
// - Proceed if JWT is valid and one of the followings:
// - `validScopes` contains `ScopePublic`
// - `userScopes` contains at least one valid scope
func ScopeAuth(service cobxtypes.ServiceName, opt ...interface{}) gin.HandlerFunc {
store := jwtFactory.NewAPIKeySecret()
return func(ctx *gin.Context) {
// Always allow OPTIONS to pass through.
if ctx.Request.Method == http.MethodOptions {
return
}
appCtx, err := apicontext.GetAppContext(ctx)
if err != nil {
logger := logger.Get(ctx)
logger.Error("Fail to obtain AppContext Error: %v", err)
ctx.Abort()
return
}
logger := appCtx.Logger()
logger.SetLabel(logging.LabelApp, "scope-auth:middleware")
defer logger.DeleteLabel(logging.LabelApp)
var jwtStr string
var jwtExists bool
if len(strings.Split(ctx.GetHeader("Authorization"), ".")) == 4 {
// API token
vErr := validateAPIToken(appCtx, store,
ctx.GetHeader("Authorization"))
if vErr != nil {
logger.Error("api token is invalid: %v", vErr)
} else {
jwtExists = true
}
} else if len(strings.Split(ctx.GetHeader("Authorization"), " ")) == 2 {
// OAuth2 token
authParts := strings.SplitN(ctx.GetHeader("Authorization"), " ", 2)
authType, authCredentials := authParts[0], authParts[1]
if authType == OAuth2AccessTokenType && len(authCredentials) > 0 {
vErr := validateOAuth2Token(appCtx, authCredentials)
if vErr != nil {
logger.Error("OAuth2 access token is invalid: %v", vErr)
} else {
jwtExists = true
}
}
} else {
// Access token: Get authorization from header or coockie.
jwtStr, jwtExists = extractJWT(ctx, logger)
}
// Get scopes of endpoint.
validScopes, err := scopeauth.GetScopes(service,
strings.ToUpper(ctx.Request.Method), ctx.Request.URL.Path)
if err != nil {
logger.Info("can't find scopes of [%s] [%s] %s. err(%s)",
service, ctx.Request.Method, ctx.Request.URL.Path, err)
appCtx.SetError(apierrors.ResourceNotFound)
return
}
if len(validScopes) == 0 {
logger.Error("empty scopes of [%s] %s.%s.",
ctx.Request.Method, service, ctx.Request.URL.Path)
ctx.Abort()
return
}
appCtx.RequiredScopes = validScopes
logger.Debug("required scopes: %s", validScopes)
validScopeMap := make(map[types.Scope]struct{})
hasPublicScope := false
for _, s := range validScopes {
if s == types.ScopePublic {
hasPublicScope = true
}
validScopeMap[s] = struct{}{}
}
// No JWT.
if !jwtExists {
if hasPublicScope {
ctx.Next()
} else {
logger.Error("authentication failed with no JWT found")
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
if !appCtx.IsAPIToken() && !appCtx.IsOAuth2Token() {
skipIPCheck := false
if len(opt) > 0 {
skipIPCheck = opt[0].(bool)
}
// With JWT, validation
userID, deviceAuthorizationID, accessTokenID, userScopes,
devicePlatform, err := authenticateJWT(jwtStr, appCtx.RequestIP,
skipIPCheck, appCtx.ServiceName)
if err != nil {
if hasPublicScope {
ctx.Next()
} else {
logger.Error("authentication failed with invalid JWT. err: %+v", err)
appCtx.SetError(apierrors.AuthenticationError)
}
return
}
logger.SetLabel(logging.LabelUserDeviceID, deviceAuthorizationID.String())
appCtx.Platform = devicePlatform
appCtx.UserID = userID
appCtx.AccessTokenID = accessTokenID
appCtx.DeviceAuthorizationID = deviceAuthorizationID
appCtx.UserAuthorizationScopes = userScopes
}
logger.SetLabel(logging.LabelUserID, appCtx.UserID.String())
logger.Debug("user scopes: %s", appCtx.UserAuthorizationScopes)
if hasPublicScope {
ctx.Next()
return
}
// Validate scopes.
for _, s := range appCtx.UserAuthorizationScopes {
if _, ok := validScopeMap[s]; ok {
// user scope is in valid scope map, proceed
ctx.Next()
return
}
}
// User scope is not in valid, abort
logger.Error("unauthorized scopes. user scopes [%s]. required scopes[%s]",
appCtx.UserAuthorizationScopes, validScopes)
appCtx.SetError(apierrors.UnauthorizedScope)
}
}
func validateAPIToken(appCtx *apicontext.AppContext,
store *jwtFactory.APIKeySecret, token string) (err error) {
err = jwtFactory.ValidateCOBSecret(token, store)
if err != nil {
return
}
claimMap, err := jwtFactory.ParseJWTPayload(token)
if err != nil {
return
}
// check scope
scopesFromClaim, sExist := claimMap["scope"].([]interface{})
if !sExist {
err = fmt.Errorf("scope not exist")
return
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopesFromClaim {
str, ok := scope.(string)
if ok {
s := types.Scope(str)
userScopes = append(userScopes, s)
}
}
// check user id
userIDStr := claimMap["user_id"].(string)
userID, err := uuid.FromString(userIDStr)
if err != nil {
return
}
// check api token
apiTokenIDStr, exist := claimMap["api_token_id"].(string)
if !exist {
// it should be panic if api_token_id is not exist in payload
panic(fmt.Errorf("api_token_id not exist"))
}
apiTokenID, err := uuid.FromString(apiTokenIDStr)
if err != nil {
return
}
// create shared key
apiTokenKey := keys.GetAPITokenKeyByUserStr(userIDStr)
// check secret
secret, err := appCtx.Cache.GetFieldOfMap(apiTokenKey, keys.APITokenSecretKey)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
// lazy loading
var client *kms.Client
client, err = kms.NewDefaultClient(appCtx, kms.KeyAPIToken)
if err != nil {
return
}
// set secret
apiSecret := models.APISecret{}
err = appCtx.DB.Where("user_id = ?", userID).First(&apiSecret).Error
if err != nil {
return
}
var secretBytes []byte
secretBytes, err = client.Decrypt(apiSecret.Secret)
if err != nil {
return
}
secret = string(secretBytes)
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, "secret", secret)
if err != nil {
return
}
}
// valid token with secret
partialToken := strings.Join(strings.Split(token, ".")[:3], ".")
_, _, err = jwtFactory.BuildWithSecret(jwtFactory.APITokenObj{}, secret).
Validate(partialToken, appCtx.ServiceName)
if err != nil {
return
}
// check cached data and lazy loading with db query
_, err = appCtx.Cache.GetFieldOfMap(apiTokenKey, apiTokenIDStr)
if err != nil {
errorCode := cache.ParseCacheErrorCode(err)
if errorCode != cache.ErrNilKey {
return
}
lockKey := apiTokenKey + "_lock"
// lock while updating tokens
appCtx.Cache.Lock(lockKey, appCtx.RequestTag(), tokensWriteTimeout)
defer appCtx.Cache.UnLock(lockKey, appCtx.RequestTag())
// set api tokens
apiTokens := []models.APIToken{}
err = appCtx.DB.Where("user_id = ? AND revoked_at IS NULL",
userID).Find(&apiTokens).Error
if err != nil {
return
}
var isTokenExisting bool
for _, at := range apiTokens {
id := at.ID.String()
err = appCtx.Cache.SetFieldOfMap(apiTokenKey, id, "")
if err != nil {
return
}
if !isTokenExisting {
isTokenExisting = id == apiTokenIDStr
}
}
if !isTokenExisting {
err = fmt.Errorf("user<%s> api token<%s> error: %v",
userIDStr, apiTokenIDStr, err)
return
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.APITokenID = &apiTokenID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "api_token")
return nil
}
func validateOAuth2Token(appCtx *apicontext.AppContext, token string) error {
claims, _, err := jwtFactory.Build(
jwtFactory.OAuth2AccessTokenObj{}).Validate(token, appCtx.ServiceName)
if err != nil {
return err
}
oauth2AccessTokenIDStr, found := claims["oauth2_access_token_id"].(string)
if !found {
return fmt.Errorf("oauth2_access_token_id could not be found")
}
oauth2AccessTokenID, err := uuid.FromString(oauth2AccessTokenIDStr)
if err != nil {
return err
}
clientIDStr, found := claims["client_id"].(string)
if !found {
return fmt.Errorf("client_id could not be found")
}
clientID, err := uuid.FromString(clientIDStr)
if err != nil {
return err
}
userIDStr, found := claims["user_id"].(string)
if !found {
return fmt.Errorf("user_id could not be found")
}
userID, err := uuid.FromString(userIDStr)
if err != nil {
return err
}
scopes, found := claims["scope"].([]interface{})
if !found {
return fmt.Errorf("scope could not be found")
}
oauth2TokenKey := "oauth2_access_token:" + oauth2AccessTokenIDStr
found, err = appCtx.Cache.Exist(oauth2TokenKey)
if err != nil {
return err
}
if !found {
oauth2Token := models.OAuth2Token{}
result := appCtx.DB.Where("id = ? AND type = ? AND revoked_at IS NULL",
oauth2AccessTokenID, types.OAuth2AccessToken).First(&oauth2Token)
if result.Error != nil {
return result.Error
}
expireSec := int(oauth2Token.ExpireAt.Sub(time.Now()) / time.Second)
appCtx.Cache.Set(oauth2TokenKey, 1, expireSec)
}
userScopes := make([]types.Scope, 0)
for _, scope := range scopes {
scopeStr, ok := scope.(string)
if ok {
userScopes = append(userScopes, types.Scope(scopeStr))
}
}
appCtx.UserAuthorizationScopes = userScopes
appCtx.UserID = &userID
appCtx.OAuth2TokenID = &oauth2AccessTokenID
appCtx.OAuth2ClientID = clientID
appCtx.Logger().SetLabel(logging.LabelAuthMethod, "oauth2_access_token")
return nil
}
func extractJWT(ctx *gin.Context, logger logging.Logger) (jwtString string, exists bool) {
var jwtStr string
jwtStr = ctx.GetHeader("Authorization")
if len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
var err error
jwtStr, err = ctx.Cookie("Authorization")
if err == nil && len(jwtStr) != 0 {
jwtString = jwtStr
exists = true
return
}
jwtString = ""
exists = false
return
}
func authenticateJWT(jwtStr string, requestIP string, skipIPCheck bool,
serviceName cobxtypes.ServiceName) (userID *uuid.UUID,
deviceAuthorizationID *uuid.UUID, accessTokenID *uuid.UUID,
userScopes []types.Scope, devicePlatform types.DevicePlatform, err error) | {
claims, isExpired, delErr := jwtFactory.Build(jwtFactory.AccessTokenObj{}).
Validate(jwtStr, serviceName)
if delErr != nil || isExpired {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
if isExpired {
err = fmt.Errorf("token <%v> expired", accessTokenID)
} else {
err = delErr
}
return
}
userIDFromClaim, uExist := claims["user_id"].(string)
accessTokenIDFromClaim, aExist := claims["access_token_id"].(string)
devAuthIDFromClaim, dExist := claims["device_authorization_id"].(string)
platformFromClaim, pExist := claims["platform"].(string)
if !uExist || !aExist || !dExist || !pExist {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"invalid claims [user_id: %v, access_token_id: %v,"+
" device_authorization_id: %v, platform: %v]",
uExist, aExist, dExist, pExist,
)
return
}
accessTokenPayload := &helper.AccessTokenPayload{}
if redisErr := accessTokenPayload.Get(
database.GetDB(database.Default), accessTokenIDFromClaim); redisErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf(
"can't validate token on cache with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, redisErr,
)
return
}
if !skipIPCheck && platformFromClaim == "Web" &&
requestIP != accessTokenPayload.IP {
delErr = cache.GetRedis().Delete(
keys.GetAccessTokenCacheKey(accessTokenIDFromClaim))
if delErr != nil {
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("Delete token with key: <token:%v>. Err: %+v",
accessTokenIDFromClaim, delErr)
return
}
result := database.GetDB(database.Default).Model(models.AccessToken{}).Where("id = ?",
accessTokenIDFromClaim).Update("revoked_at", time.Now())
if result.Error != nil || result.RowsAffected != 1 {
err = fmt.Errorf("revoke access token <%v> error: %v",
accessTokenIDFromClaim, result.Error)
return
}
userID = nil
deviceAuthorizationID = nil
accessTokenID = nil
userScopes = nil
err = fmt.Errorf("JWT IP is different from request IP. JWT IP (%s). RequestIP (%s)",
accessTokenPayload.IP, requestIP)
return
}
userIDValue := uuid.FromStringOrNil(userIDFromClaim)
userID = &userIDValue
deviceAuthorizationIDValue := uuid.FromStringOrNil(devAuthIDFromClaim)
deviceAuthorizationID = &deviceAuthorizationIDValue
accessTokenIDValue := uuid.FromStringOrNil(accessTokenIDFromClaim)
accessTokenID = &accessTokenIDValue
for _, r := range accessTokenPayload.Roles {
userScopes = append(userScopes, types.GetScopesOfRole(r)...)
}
devicePlatform = types.DevicePlatform(platformFromClaim)
err = nil
return
} | identifier_body |
|
ts_analyzer.py | #!/usr/bin/env python
from __future__ import print_function
from colorprint import *
#import tornado_pyuv
#tornado_pyuv.install()
from tornado.ioloop import IOLoop
from tornado_pyuv import UVLoop
IOLoop.configure(UVLoop)
import signal
import tornado.ioloop
import tornado.web
import os
import sys
import pyuv
import struct
import pprint
from sys import stdout
import syslog
import datetime
import ts_analyzer
import argparse
def handle_signal(sig, frame):
tornado.ioloop.IOLoop.instance().add_callback(tornado.ioloop.IOLoop.instance().stop)
if sys.version_info >= (3, 0):
LINESEP = os.linesep.encode()
else:
LINESEP = os.linesep
def output_program_association_table(f, length, payload_start):
#pids[mcast][pid]['extra']='test'
#print(" <program_association_table>")
pointer_field = None
cursor = 0
if payload_start:
pointer_field = ord(f[0:1])
# if pointer_field:
# print(" <pointer_field>"+str(pointer_field)+"</pointer_field>")
cursor+=1
table_id = ord(f[1:2]); cursor+=1
# if table_id:
# #=str(pointer_field)
# print(" <table_id>"+str(pointer_field)+"</table_id>")
byte3 = ord(f[2:3]) ; cursor+=1
# if byte3 & 0x80 != 0x80:
# print(" <!-- selection_syntax_indicator is not 1 -->")
# if byte3 & 0x40 != 0x00:
# print(" <!-- reserved1 is not 0 -->")
# if byte3 & 0x30 != 0x30:
# print(" <!-- reserved2 is not 11 -->")
# if byte3 & 0x0C != 0x00:
# print(" <!-- two higher bits of secrion_length is are not 00 -->")
byte4 = ord(f[3:4]) ; cursor+=1
section_length = byte4 | ((byte3 & 0x07) << 8)
# if section_length:
# print(" <section_length>"+str(section_length)+"</section_length>")
byte5 = ord(f[4:5]) ; cursor += 1
byte6 = ord(f[5:6]) ; cursor += 1
transport_stream_ID = byte5 << 8 | byte6
# if transport_stream_ID:
# print(" <transport_stream_ID>"+str(transport_stream_ID)+"</transport_stream_ID>")
byte7 = ord(f[6:7]) ; cursor += 1
# if byte7 & 0xC0 != 0xC0:
# # print(" <!-- reserved3 is not 11 -->")
version_number = (byte7 & 0x3E) >> 1
# print(" <version_number>"+str(version_number)+"</version_number>")
current_indicator = bool(byte7 & 0x01)
# if not current_indicator:
# print(" <not_appliable_yet/>")
section_number = ord(f[7:8]) ; cursor += 1
last_section_number = ord(f[8:9]) ; cursor += 1
# if last_section_number:
# print(" <section_number>"+str(section_number)+"</section_number>")
# print(" <last_section_number>"+str(last_section_number)+"</last_section_number>")
for i in range(0,(section_length-5-4)/4):
# print(" <program>")
cursor+=4
program_num = (ord(f[9+i:10+i]) << 8) | ord(f[10+i:11+i])
b1 = ord(f[11+i:12+i])
b2 = ord(f[12+i:13+i])
if b1 & 0xE0 != 0xE0:
print(" <!-- reserved is not 111 -->")
program_pid = b2 | ((b1 & 0x1F) << 8)
# print(" <program_num>"+str(program_num)+"</program_num>")
# print(" <program_pid>"+hex(program_pid)+"</program_pid>")
# print(" </program>\n")
#program_map_pids.add(program_pid)
crc32 = f[cursor:cursor+4]; cursor+=4
length -= cursor
if length>0:
rest = f[cursor:cursor+length]
if (rest != '\xff' * length) and (rest != '\x00' * length):
print(" <rest>"+binascii.hexlify(rest)+"</rest>\n")
# print(" </program_association_table>\n")
return({'table_id':str(pointer_field),'transportstream_id':str(transport_stream_ID),'program':str(program_num),'pmt':hex(program_pid) })
def output_adaptation_field(f):
print(" <adaptation_field>\n")
additional_length = ord(f.read(1))
if additional_length == 0:
print(" </adaptation_field>\n")
return 1
flags = ord(f.read(1))
discontinuity = bool(flags & 0x80)
random_access = bool(flags & 0x40)
elementary_stream_priority = bool(flags & 0x20)
pcr = bool(flags & 0x10)
opcr = bool(flags & 0x08)
splicing_point = bool(flags & 0x04)
transport_private = bool(flags & 0x02)
adaptation_field_extension = bool(flags & 0x01)
if discontinuity: print(" <discontinuity/>\n")
if random_access: print(" <random_access/>\n")
if elementary_stream_priority: |
length = additional_length+1 # size byte
additional_length-=1 # flags
def read_pcr():
pcr_byte_1 = ord(f.read(1)) # base
pcr_byte_2 = ord(f.read(1)) # base
pcr_byte_3 = ord(f.read(1)) # base
pcr_byte_4 = ord(f.read(1)) # base
pcr_byte_5 = ord(f.read(1)) # 1 bit base, 6 bits paddding, 1 bit ext
pcr_byte_6 = ord(f.read(1)) # 8 bits ext
base = (pcr_byte_1 << (1+8*3)) + \
(pcr_byte_2 << (1+8*2)) + \
(pcr_byte_3 << (1+8*1)) + \
(pcr_byte_4 << (1+8*0)) + \
(pcr_byte_5 >> 7)
ext = ((pcr_byte_5 & 0x01) << 8) + pcr_byte_6
time = base / 90000.0 + ext / 27000000.0
return time
if pcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <program_clock_reference>"+str(val)+"</program_clock_reference>\n")
if opcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <original_program_clock_reference>"+str(val)+"</original_program_clock_reference>\n")
if splicing_point:
if additional_length>=1:
additional_length-=1
splice_count = ord(f.read(1))
print(" <splice_countdown>"+str(splice_count)+"</splice_countdown>\n")
if additional_length:
print(" <!-- ignoring " + str(additional_length) + " bytes -->\n")
f.read(additional_length)
print(" </adaptation_field>\n")
return length
def on_read(handle, ip_port, flags, data, error):
global bits_second, start_time_packet
if error is not None:
print (error,color='red')
return
if start_time_packet == 'unset':
start_time_packet=datetime.datetime.now()
data = data.strip()
mcast=handle.getsockname()
if data:
ip, port = ip_port
diff = datetime.datetime.now()-start_time_packet
if diff.total_seconds() >= '30':
bits_second=1
start_time_packet=datetime.datetime.now()
else:
bits_second=1+bits_second
for i in range(0,len(data),188):
offset =+ i
#print(offset)
sync = ord(data[offset:offset+1])
header1 = ord(data[offset+1:offset+2])
header2 = ord(data[offset+2:offset+3])
header3 = ord(data[offset+3:offset+4])
transport_error = bool(header1 & 0x80)
payload_unit_start = bool(header1 & 0x40)
transport_priority = bool(header1 & 0x20)
pid = header2 | ((header1 & 0x1F) << 8)
scrambling = ((header3 & 0xC0) >> 6)
have_adaptation_field = bool(header3 & 0x20)
adaptation_field = ((header3 & 0x30) >> 4)
have_payload = bool(header3 & 0x10)
cc = header3 & 0x0F
length = len(data)
# We have sync:
if sync == 0x47:
if mcast not in pids:
pids[mcast]={}
if pid not in pids[mcast]:
pids[mcast][pid]={'packets': 1, 'cc': cc, 'error': 0, 'ip': ip, 'type': 'unknown', 'extra': {}}
print ("===> Found new PID in stream %s (src=%s)" % (mcast,ip),end='')
if pid == 0:
pids[mcast][pid]['type'] = "PAT"
print (" [PAT] ",end='')
buf.append("%s NEW PID: %s (src=%s)(PID: %s [%s]) [PAT]" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
buf.append("%s NEW PID: %s (src=%s) (PID: %s [%s])" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
print ("(PID: ",end='')
print ("%s"% hex(pid), color='green',end='')
print (" [%s])"% pid)
else:
pids[mcast][pid]['packets']= pids[mcast][pid]['packets']+1
if adaptation_field != 2:
cc_com = (pids[mcast][pid]['cc']+1) % 16
pids[mcast][pid]['cc'] = cc
if cc is not cc_com:
pids[mcast][pid]['error'] = pids[mcast][pid]['error']+1
print ("%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc,
mcast, hex(pid), length),
color='red')
syslog.syslog(syslog.LOG_ERR, "%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
buf.append( "%s ERR: expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
if pid == 0x00:
#adaptation_field_size = 0
#print(have_adaptation_field)
#if have_adaptation_field:
adaptation_field_size = 167
payload_size = 188 - 4 - adaptation_field_size
#print(payload_unit_start);
#print(mcast)
pids[mcast][pid]['extra']=output_program_association_table(data[offset+4:offset+188], payload_size, payload_unit_start)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class MainHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
run_time = datetime.datetime.now() - start_time
packet_time = datetime.datetime.now() - start_time_packet
bits=((bits_second*1316)*8/packet_time.total_seconds())/1000000
self.render('index.html',version=ts_analyzer.__version__,addresses=dict(addresses),hostname=hostname,
location=location,bits=round(bits,2), run_time=run_time,buf=buf,peers=peers)
class LogHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('log.html',buf=buf, hostname=hostname)
class SelfLogHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('self_log.html',buf=buf, hostname=hostname)
class LogsHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('logs.html',buf=buf, peers=peers, hostname=hostname )
class ChannelHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.write(pids_new)
class RingBuffer:
def __init__(self, size):
self.data = [None for i in xrange(size)]
def append(self, x):
self.data.pop(0)
self.data.append(x)
def get(self):
return reversed(self.data)
class ChannelOverviewHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.render('base.html',pids_new=pids_new)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class NewChannelHandler(tornado.web.RequestHandler):
def post(self):
# Debug
#self.write(tornado.escape.json_encode(self.request.arguments["post"]))
try:
posted_config = tornado.escape.json_decode(self.request.body)
except:
print("Invalid JSON")
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(posted_config)
#class Server(object):
# def __init__(self,address)
# self.server = pyuv.UDP(loop._loop)
# self.server.bind(key)
# self.server.set_membership(key[0], pyuv.UV_JOIN_GROUP)
# self.server.start_recv(on_read)
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
(r"/channels/overview", ChannelOverviewHandler),
(r"/channels", ChannelHandler),
(r"/channels/new", NewChannelHandler),
(r"/logs", LogsHandler),
(r"/log", LogHandler),
(r"/selflog", SelfLogHandler)
])
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version', version="ts_analyzer %s" % ts_analyzer.__version__)
args = parser.parse_args()
os.system(['clear', 'cls'][os.name == 'nt'])
print ("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__), color='white', background='blue')
template_path = os.path.join(os.path.dirname(__file__), "templates")
syslog.syslog("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__))
pids = {}
location = ''
location = 'Vrijhof - 253'
addresses = {}
addresses[("239.192.71.3", 1234)] = 1
addresses[("239.192.27.1", 1234)] = 1
addresses[("239.192.23.1", 1234)] = 1
buf = RingBuffer(100)
peers = {}
peers["iptv2-cam"]=("130.89.175.42",8889)
start_time_packet='unset'
bits_second = 1
start_time=datetime.datetime.now()
#pp2 = pprint.PrettyPrinter(indent=4)
#pp2.pprint(addresses)
signal.signal(signal.SIGINT, handle_signal)
signal.signal(signal.SIGTERM, handle_signal)
application.listen(8889)
loop = tornado.ioloop.IOLoop.instance()
# for addr in addresses.keys():
# print ("In dict: %s" % (addr))
counter=0
servers={}
for key in addresses:
print ('%s corresponds to' % key[0])
servers[counter] = pyuv.UDP(loop._loop)
servers[counter].bind(key)
servers[counter].set_membership(key[0], pyuv.UV_JOIN_GROUP)
servers[counter].start_recv(on_read)
counter = counter + 1
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server = pyuv.UDP(loop._loop)
# server.bind(("239.192.71.3", 1234))
# server.set_membership("239.192.71.3", pyuv.UV_JOIN_GROUP)
# server.start_recv(on_read)
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server2 = pyuv.UDP(loop._loop)
# server2.bind(("239.192.27.2", 1234))
# server2.set_membership("239.192.27.2", pyuv.UV_JOIN_GROUP)
# server2.start_recv(on_read)
# server3 = pyuv.UDP(loop._loop)
# server3.bind(("239.192.27.1", 1234))
# server3.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server3.start_recv(on_read)
# server5 = pyuv.UDP(loop._loop)
# server5.bind(("239.192.49.2", 1234))
# server5.set_membership("239.192.49.2", pyuv.UV_JOIN_GROUP)
# server5.start_recv(on_read)
# server4 = pyuv.UDP(loop._loop)
# server4.bind(("239.192.72.1", 1234))
# server4.set_membership("239.192.72.1", pyuv.UV_JOIN_GROUP)
# server4.start_recv(on_read)
# server6 = pyuv.UDP(loop._loop)
# server6.bind(("239.192.23.2", 1234))
# server6.set_membership("239.192.23.2", pyuv.UV_JOIN_GROUP)
# server6.start_recv(on_read)
# server7 = pyuv.UDP(loop._loop)
# server7.bind(("239.192.25.2", 1234))
# server7.set_membership("239.192.25.2", pyuv.UV_JOIN_GROUP)
# server7.start_recv(on_read)
loop.start()
tornado.ioloop.IOLoop.instance().close()
| print(" <elementary_stream_priority/>\n") | conditional_block |
ts_analyzer.py | #!/usr/bin/env python
from __future__ import print_function
from colorprint import *
#import tornado_pyuv
#tornado_pyuv.install()
from tornado.ioloop import IOLoop
from tornado_pyuv import UVLoop
IOLoop.configure(UVLoop)
import signal
import tornado.ioloop
import tornado.web
import os
import sys
import pyuv
import struct
import pprint
from sys import stdout
import syslog
import datetime
import ts_analyzer
import argparse
def handle_signal(sig, frame):
tornado.ioloop.IOLoop.instance().add_callback(tornado.ioloop.IOLoop.instance().stop)
if sys.version_info >= (3, 0):
LINESEP = os.linesep.encode()
else:
LINESEP = os.linesep
def output_program_association_table(f, length, payload_start):
#pids[mcast][pid]['extra']='test'
#print(" <program_association_table>")
pointer_field = None
cursor = 0
if payload_start:
pointer_field = ord(f[0:1])
# if pointer_field:
# print(" <pointer_field>"+str(pointer_field)+"</pointer_field>")
cursor+=1
table_id = ord(f[1:2]); cursor+=1
# if table_id:
# #=str(pointer_field)
# print(" <table_id>"+str(pointer_field)+"</table_id>")
byte3 = ord(f[2:3]) ; cursor+=1
# if byte3 & 0x80 != 0x80:
# print(" <!-- selection_syntax_indicator is not 1 -->")
# if byte3 & 0x40 != 0x00:
# print(" <!-- reserved1 is not 0 -->")
# if byte3 & 0x30 != 0x30:
# print(" <!-- reserved2 is not 11 -->")
# if byte3 & 0x0C != 0x00:
# print(" <!-- two higher bits of secrion_length is are not 00 -->")
byte4 = ord(f[3:4]) ; cursor+=1
section_length = byte4 | ((byte3 & 0x07) << 8)
# if section_length:
# print(" <section_length>"+str(section_length)+"</section_length>")
byte5 = ord(f[4:5]) ; cursor += 1
byte6 = ord(f[5:6]) ; cursor += 1
transport_stream_ID = byte5 << 8 | byte6
# if transport_stream_ID:
# print(" <transport_stream_ID>"+str(transport_stream_ID)+"</transport_stream_ID>")
byte7 = ord(f[6:7]) ; cursor += 1
# if byte7 & 0xC0 != 0xC0:
# # print(" <!-- reserved3 is not 11 -->")
version_number = (byte7 & 0x3E) >> 1
# print(" <version_number>"+str(version_number)+"</version_number>")
current_indicator = bool(byte7 & 0x01)
# if not current_indicator:
# print(" <not_appliable_yet/>")
section_number = ord(f[7:8]) ; cursor += 1
last_section_number = ord(f[8:9]) ; cursor += 1
# if last_section_number:
# print(" <section_number>"+str(section_number)+"</section_number>")
# print(" <last_section_number>"+str(last_section_number)+"</last_section_number>")
for i in range(0,(section_length-5-4)/4):
# print(" <program>")
cursor+=4
program_num = (ord(f[9+i:10+i]) << 8) | ord(f[10+i:11+i])
b1 = ord(f[11+i:12+i])
b2 = ord(f[12+i:13+i])
if b1 & 0xE0 != 0xE0:
print(" <!-- reserved is not 111 -->")
program_pid = b2 | ((b1 & 0x1F) << 8)
# print(" <program_num>"+str(program_num)+"</program_num>")
# print(" <program_pid>"+hex(program_pid)+"</program_pid>")
# print(" </program>\n")
#program_map_pids.add(program_pid)
crc32 = f[cursor:cursor+4]; cursor+=4
length -= cursor
if length>0:
rest = f[cursor:cursor+length]
if (rest != '\xff' * length) and (rest != '\x00' * length):
print(" <rest>"+binascii.hexlify(rest)+"</rest>\n")
# print(" </program_association_table>\n")
return({'table_id':str(pointer_field),'transportstream_id':str(transport_stream_ID),'program':str(program_num),'pmt':hex(program_pid) })
def output_adaptation_field(f):
print(" <adaptation_field>\n")
additional_length = ord(f.read(1))
if additional_length == 0:
print(" </adaptation_field>\n")
return 1
flags = ord(f.read(1))
discontinuity = bool(flags & 0x80)
random_access = bool(flags & 0x40)
elementary_stream_priority = bool(flags & 0x20)
pcr = bool(flags & 0x10)
opcr = bool(flags & 0x08)
splicing_point = bool(flags & 0x04)
transport_private = bool(flags & 0x02)
adaptation_field_extension = bool(flags & 0x01)
if discontinuity: print(" <discontinuity/>\n")
if random_access: print(" <random_access/>\n")
if elementary_stream_priority: print(" <elementary_stream_priority/>\n")
length = additional_length+1 # size byte
additional_length-=1 # flags
def read_pcr():
pcr_byte_1 = ord(f.read(1)) # base
pcr_byte_2 = ord(f.read(1)) # base
pcr_byte_3 = ord(f.read(1)) # base
pcr_byte_4 = ord(f.read(1)) # base
pcr_byte_5 = ord(f.read(1)) # 1 bit base, 6 bits paddding, 1 bit ext
pcr_byte_6 = ord(f.read(1)) # 8 bits ext
base = (pcr_byte_1 << (1+8*3)) + \
(pcr_byte_2 << (1+8*2)) + \
(pcr_byte_3 << (1+8*1)) + \
(pcr_byte_4 << (1+8*0)) + \
(pcr_byte_5 >> 7)
ext = ((pcr_byte_5 & 0x01) << 8) + pcr_byte_6
time = base / 90000.0 + ext / 27000000.0
return time
if pcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <program_clock_reference>"+str(val)+"</program_clock_reference>\n")
if opcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <original_program_clock_reference>"+str(val)+"</original_program_clock_reference>\n")
if splicing_point:
if additional_length>=1:
additional_length-=1
splice_count = ord(f.read(1))
print(" <splice_countdown>"+str(splice_count)+"</splice_countdown>\n")
if additional_length:
print(" <!-- ignoring " + str(additional_length) + " bytes -->\n")
f.read(additional_length)
print(" </adaptation_field>\n")
return length
def on_read(handle, ip_port, flags, data, error):
global bits_second, start_time_packet
if error is not None:
print (error,color='red')
return
if start_time_packet == 'unset':
start_time_packet=datetime.datetime.now()
data = data.strip()
mcast=handle.getsockname()
if data:
ip, port = ip_port
diff = datetime.datetime.now()-start_time_packet
if diff.total_seconds() >= '30':
bits_second=1
start_time_packet=datetime.datetime.now()
else:
bits_second=1+bits_second
for i in range(0,len(data),188):
offset =+ i
#print(offset)
sync = ord(data[offset:offset+1])
header1 = ord(data[offset+1:offset+2])
header2 = ord(data[offset+2:offset+3])
header3 = ord(data[offset+3:offset+4])
transport_error = bool(header1 & 0x80)
payload_unit_start = bool(header1 & 0x40)
transport_priority = bool(header1 & 0x20)
pid = header2 | ((header1 & 0x1F) << 8)
scrambling = ((header3 & 0xC0) >> 6)
have_adaptation_field = bool(header3 & 0x20)
adaptation_field = ((header3 & 0x30) >> 4)
have_payload = bool(header3 & 0x10)
cc = header3 & 0x0F
length = len(data)
# We have sync:
if sync == 0x47:
if mcast not in pids:
pids[mcast]={}
if pid not in pids[mcast]:
pids[mcast][pid]={'packets': 1, 'cc': cc, 'error': 0, 'ip': ip, 'type': 'unknown', 'extra': {}}
print ("===> Found new PID in stream %s (src=%s)" % (mcast,ip),end='')
if pid == 0:
pids[mcast][pid]['type'] = "PAT"
print (" [PAT] ",end='')
buf.append("%s NEW PID: %s (src=%s)(PID: %s [%s]) [PAT]" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
buf.append("%s NEW PID: %s (src=%s) (PID: %s [%s])" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
print ("(PID: ",end='')
print ("%s"% hex(pid), color='green',end='')
print (" [%s])"% pid)
else:
pids[mcast][pid]['packets']= pids[mcast][pid]['packets']+1
if adaptation_field != 2:
cc_com = (pids[mcast][pid]['cc']+1) % 16
pids[mcast][pid]['cc'] = cc
if cc is not cc_com:
pids[mcast][pid]['error'] = pids[mcast][pid]['error']+1
print ("%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc,
mcast, hex(pid), length),
color='red')
syslog.syslog(syslog.LOG_ERR, "%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
buf.append( "%s ERR: expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
if pid == 0x00:
#adaptation_field_size = 0
#print(have_adaptation_field)
#if have_adaptation_field:
adaptation_field_size = 167
payload_size = 188 - 4 - adaptation_field_size
#print(payload_unit_start);
#print(mcast)
pids[mcast][pid]['extra']=output_program_association_table(data[offset+4:offset+188], payload_size, payload_unit_start)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class MainHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
run_time = datetime.datetime.now() - start_time
packet_time = datetime.datetime.now() - start_time_packet
bits=((bits_second*1316)*8/packet_time.total_seconds())/1000000
self.render('index.html',version=ts_analyzer.__version__,addresses=dict(addresses),hostname=hostname,
location=location,bits=round(bits,2), run_time=run_time,buf=buf,peers=peers)
class LogHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('log.html',buf=buf, hostname=hostname)
class SelfLogHandler(tornado.web.RequestHandler):
def get(self):
|
class LogsHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('logs.html',buf=buf, peers=peers, hostname=hostname )
class ChannelHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.write(pids_new)
class RingBuffer:
def __init__(self, size):
self.data = [None for i in xrange(size)]
def append(self, x):
self.data.pop(0)
self.data.append(x)
def get(self):
return reversed(self.data)
class ChannelOverviewHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.render('base.html',pids_new=pids_new)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class NewChannelHandler(tornado.web.RequestHandler):
def post(self):
# Debug
#self.write(tornado.escape.json_encode(self.request.arguments["post"]))
try:
posted_config = tornado.escape.json_decode(self.request.body)
except:
print("Invalid JSON")
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(posted_config)
#class Server(object):
# def __init__(self,address)
# self.server = pyuv.UDP(loop._loop)
# self.server.bind(key)
# self.server.set_membership(key[0], pyuv.UV_JOIN_GROUP)
# self.server.start_recv(on_read)
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
(r"/channels/overview", ChannelOverviewHandler),
(r"/channels", ChannelHandler),
(r"/channels/new", NewChannelHandler),
(r"/logs", LogsHandler),
(r"/log", LogHandler),
(r"/selflog", SelfLogHandler)
])
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version', version="ts_analyzer %s" % ts_analyzer.__version__)
args = parser.parse_args()
os.system(['clear', 'cls'][os.name == 'nt'])
print ("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__), color='white', background='blue')
template_path = os.path.join(os.path.dirname(__file__), "templates")
syslog.syslog("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__))
pids = {}
location = ''
location = 'Vrijhof - 253'
addresses = {}
addresses[("239.192.71.3", 1234)] = 1
addresses[("239.192.27.1", 1234)] = 1
addresses[("239.192.23.1", 1234)] = 1
buf = RingBuffer(100)
peers = {}
peers["iptv2-cam"]=("130.89.175.42",8889)
start_time_packet='unset'
bits_second = 1
start_time=datetime.datetime.now()
#pp2 = pprint.PrettyPrinter(indent=4)
#pp2.pprint(addresses)
signal.signal(signal.SIGINT, handle_signal)
signal.signal(signal.SIGTERM, handle_signal)
application.listen(8889)
loop = tornado.ioloop.IOLoop.instance()
# for addr in addresses.keys():
# print ("In dict: %s" % (addr))
counter=0
servers={}
for key in addresses:
print ('%s corresponds to' % key[0])
servers[counter] = pyuv.UDP(loop._loop)
servers[counter].bind(key)
servers[counter].set_membership(key[0], pyuv.UV_JOIN_GROUP)
servers[counter].start_recv(on_read)
counter = counter + 1
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server = pyuv.UDP(loop._loop)
# server.bind(("239.192.71.3", 1234))
# server.set_membership("239.192.71.3", pyuv.UV_JOIN_GROUP)
# server.start_recv(on_read)
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server2 = pyuv.UDP(loop._loop)
# server2.bind(("239.192.27.2", 1234))
# server2.set_membership("239.192.27.2", pyuv.UV_JOIN_GROUP)
# server2.start_recv(on_read)
# server3 = pyuv.UDP(loop._loop)
# server3.bind(("239.192.27.1", 1234))
# server3.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server3.start_recv(on_read)
# server5 = pyuv.UDP(loop._loop)
# server5.bind(("239.192.49.2", 1234))
# server5.set_membership("239.192.49.2", pyuv.UV_JOIN_GROUP)
# server5.start_recv(on_read)
# server4 = pyuv.UDP(loop._loop)
# server4.bind(("239.192.72.1", 1234))
# server4.set_membership("239.192.72.1", pyuv.UV_JOIN_GROUP)
# server4.start_recv(on_read)
# server6 = pyuv.UDP(loop._loop)
# server6.bind(("239.192.23.2", 1234))
# server6.set_membership("239.192.23.2", pyuv.UV_JOIN_GROUP)
# server6.start_recv(on_read)
# server7 = pyuv.UDP(loop._loop)
# server7.bind(("239.192.25.2", 1234))
# server7.set_membership("239.192.25.2", pyuv.UV_JOIN_GROUP)
# server7.start_recv(on_read)
loop.start()
tornado.ioloop.IOLoop.instance().close()
| from platform import uname
hostname = uname()[1]
self.render('self_log.html',buf=buf, hostname=hostname) | identifier_body |
ts_analyzer.py | #!/usr/bin/env python
from __future__ import print_function
from colorprint import *
#import tornado_pyuv
#tornado_pyuv.install()
from tornado.ioloop import IOLoop
from tornado_pyuv import UVLoop
IOLoop.configure(UVLoop)
import signal
import tornado.ioloop
import tornado.web
import os
import sys
import pyuv
import struct
import pprint
from sys import stdout
import syslog
import datetime
import ts_analyzer
import argparse
def handle_signal(sig, frame):
tornado.ioloop.IOLoop.instance().add_callback(tornado.ioloop.IOLoop.instance().stop)
if sys.version_info >= (3, 0):
LINESEP = os.linesep.encode()
else:
LINESEP = os.linesep
def output_program_association_table(f, length, payload_start):
#pids[mcast][pid]['extra']='test'
#print(" <program_association_table>")
pointer_field = None
cursor = 0
if payload_start:
pointer_field = ord(f[0:1])
# if pointer_field:
# print(" <pointer_field>"+str(pointer_field)+"</pointer_field>")
cursor+=1
table_id = ord(f[1:2]); cursor+=1
# if table_id:
# #=str(pointer_field)
# print(" <table_id>"+str(pointer_field)+"</table_id>")
byte3 = ord(f[2:3]) ; cursor+=1
# if byte3 & 0x80 != 0x80:
# print(" <!-- selection_syntax_indicator is not 1 -->")
# if byte3 & 0x40 != 0x00:
# print(" <!-- reserved1 is not 0 -->")
# if byte3 & 0x30 != 0x30:
# print(" <!-- reserved2 is not 11 -->")
# if byte3 & 0x0C != 0x00:
# print(" <!-- two higher bits of secrion_length is are not 00 -->")
byte4 = ord(f[3:4]) ; cursor+=1
section_length = byte4 | ((byte3 & 0x07) << 8)
# if section_length:
# print(" <section_length>"+str(section_length)+"</section_length>")
byte5 = ord(f[4:5]) ; cursor += 1
byte6 = ord(f[5:6]) ; cursor += 1
transport_stream_ID = byte5 << 8 | byte6
# if transport_stream_ID:
# print(" <transport_stream_ID>"+str(transport_stream_ID)+"</transport_stream_ID>")
byte7 = ord(f[6:7]) ; cursor += 1
# if byte7 & 0xC0 != 0xC0:
# # print(" <!-- reserved3 is not 11 -->")
version_number = (byte7 & 0x3E) >> 1
# print(" <version_number>"+str(version_number)+"</version_number>")
current_indicator = bool(byte7 & 0x01)
# if not current_indicator:
# print(" <not_appliable_yet/>")
section_number = ord(f[7:8]) ; cursor += 1
last_section_number = ord(f[8:9]) ; cursor += 1
# if last_section_number:
# print(" <section_number>"+str(section_number)+"</section_number>")
# print(" <last_section_number>"+str(last_section_number)+"</last_section_number>")
for i in range(0,(section_length-5-4)/4):
# print(" <program>")
cursor+=4
program_num = (ord(f[9+i:10+i]) << 8) | ord(f[10+i:11+i])
b1 = ord(f[11+i:12+i])
b2 = ord(f[12+i:13+i])
if b1 & 0xE0 != 0xE0:
print(" <!-- reserved is not 111 -->")
program_pid = b2 | ((b1 & 0x1F) << 8)
# print(" <program_num>"+str(program_num)+"</program_num>")
# print(" <program_pid>"+hex(program_pid)+"</program_pid>")
# print(" </program>\n")
#program_map_pids.add(program_pid)
crc32 = f[cursor:cursor+4]; cursor+=4
length -= cursor
if length>0:
rest = f[cursor:cursor+length]
if (rest != '\xff' * length) and (rest != '\x00' * length):
print(" <rest>"+binascii.hexlify(rest)+"</rest>\n")
# print(" </program_association_table>\n")
return({'table_id':str(pointer_field),'transportstream_id':str(transport_stream_ID),'program':str(program_num),'pmt':hex(program_pid) })
def output_adaptation_field(f):
print(" <adaptation_field>\n")
additional_length = ord(f.read(1))
if additional_length == 0:
print(" </adaptation_field>\n")
return 1
flags = ord(f.read(1))
discontinuity = bool(flags & 0x80)
random_access = bool(flags & 0x40)
elementary_stream_priority = bool(flags & 0x20)
pcr = bool(flags & 0x10)
opcr = bool(flags & 0x08)
splicing_point = bool(flags & 0x04)
transport_private = bool(flags & 0x02)
adaptation_field_extension = bool(flags & 0x01)
if discontinuity: print(" <discontinuity/>\n")
if random_access: print(" <random_access/>\n")
if elementary_stream_priority: print(" <elementary_stream_priority/>\n")
length = additional_length+1 # size byte
additional_length-=1 # flags
def read_pcr():
pcr_byte_1 = ord(f.read(1)) # base
pcr_byte_2 = ord(f.read(1)) # base
pcr_byte_3 = ord(f.read(1)) # base
pcr_byte_4 = ord(f.read(1)) # base
pcr_byte_5 = ord(f.read(1)) # 1 bit base, 6 bits paddding, 1 bit ext
pcr_byte_6 = ord(f.read(1)) # 8 bits ext
base = (pcr_byte_1 << (1+8*3)) + \
(pcr_byte_2 << (1+8*2)) + \
(pcr_byte_3 << (1+8*1)) + \
(pcr_byte_4 << (1+8*0)) + \
(pcr_byte_5 >> 7)
ext = ((pcr_byte_5 & 0x01) << 8) + pcr_byte_6
time = base / 90000.0 + ext / 27000000.0
return time
if pcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <program_clock_reference>"+str(val)+"</program_clock_reference>\n")
if opcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <original_program_clock_reference>"+str(val)+"</original_program_clock_reference>\n")
if splicing_point:
if additional_length>=1:
additional_length-=1
splice_count = ord(f.read(1))
print(" <splice_countdown>"+str(splice_count)+"</splice_countdown>\n")
if additional_length:
print(" <!-- ignoring " + str(additional_length) + " bytes -->\n")
f.read(additional_length)
print(" </adaptation_field>\n")
return length
def on_read(handle, ip_port, flags, data, error):
global bits_second, start_time_packet
if error is not None:
print (error,color='red')
return
if start_time_packet == 'unset':
start_time_packet=datetime.datetime.now()
data = data.strip()
mcast=handle.getsockname()
if data:
ip, port = ip_port
diff = datetime.datetime.now()-start_time_packet
if diff.total_seconds() >= '30':
bits_second=1
start_time_packet=datetime.datetime.now()
else:
bits_second=1+bits_second
for i in range(0,len(data),188):
offset =+ i
#print(offset)
sync = ord(data[offset:offset+1])
header1 = ord(data[offset+1:offset+2])
header2 = ord(data[offset+2:offset+3])
header3 = ord(data[offset+3:offset+4])
transport_error = bool(header1 & 0x80)
payload_unit_start = bool(header1 & 0x40)
transport_priority = bool(header1 & 0x20)
pid = header2 | ((header1 & 0x1F) << 8)
scrambling = ((header3 & 0xC0) >> 6)
have_adaptation_field = bool(header3 & 0x20)
adaptation_field = ((header3 & 0x30) >> 4)
have_payload = bool(header3 & 0x10)
cc = header3 & 0x0F
length = len(data)
# We have sync:
if sync == 0x47:
if mcast not in pids:
pids[mcast]={}
if pid not in pids[mcast]:
pids[mcast][pid]={'packets': 1, 'cc': cc, 'error': 0, 'ip': ip, 'type': 'unknown', 'extra': {}}
print ("===> Found new PID in stream %s (src=%s)" % (mcast,ip),end='')
if pid == 0:
pids[mcast][pid]['type'] = "PAT"
print (" [PAT] ",end='')
buf.append("%s NEW PID: %s (src=%s)(PID: %s [%s]) [PAT]" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
buf.append("%s NEW PID: %s (src=%s) (PID: %s [%s])" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
print ("(PID: ",end='')
print ("%s"% hex(pid), color='green',end='')
print (" [%s])"% pid)
else:
pids[mcast][pid]['packets']= pids[mcast][pid]['packets']+1
if adaptation_field != 2:
cc_com = (pids[mcast][pid]['cc']+1) % 16
pids[mcast][pid]['cc'] = cc
if cc is not cc_com:
pids[mcast][pid]['error'] = pids[mcast][pid]['error']+1
print ("%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc,
mcast, hex(pid), length),
color='red')
syslog.syslog(syslog.LOG_ERR, "%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
buf.append( "%s ERR: expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
if pid == 0x00:
#adaptation_field_size = 0
#print(have_adaptation_field)
#if have_adaptation_field:
adaptation_field_size = 167
payload_size = 188 - 4 - adaptation_field_size
#print(payload_unit_start);
#print(mcast)
pids[mcast][pid]['extra']=output_program_association_table(data[offset+4:offset+188], payload_size, payload_unit_start)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class MainHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
run_time = datetime.datetime.now() - start_time
packet_time = datetime.datetime.now() - start_time_packet
bits=((bits_second*1316)*8/packet_time.total_seconds())/1000000
self.render('index.html',version=ts_analyzer.__version__,addresses=dict(addresses),hostname=hostname,
location=location,bits=round(bits,2), run_time=run_time,buf=buf,peers=peers)
class LogHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('log.html',buf=buf, hostname=hostname)
class SelfLogHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('self_log.html',buf=buf, hostname=hostname)
class LogsHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('logs.html',buf=buf, peers=peers, hostname=hostname )
class ChannelHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.write(pids_new)
class RingBuffer:
def __init__(self, size):
self.data = [None for i in xrange(size)]
def append(self, x):
self.data.pop(0)
self.data.append(x)
def get(self):
return reversed(self.data)
class ChannelOverviewHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.render('base.html',pids_new=pids_new)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class NewChannelHandler(tornado.web.RequestHandler):
def post(self):
# Debug
#self.write(tornado.escape.json_encode(self.request.arguments["post"]))
try:
posted_config = tornado.escape.json_decode(self.request.body)
except:
print("Invalid JSON")
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(posted_config)
#class Server(object):
# def __init__(self,address)
# self.server = pyuv.UDP(loop._loop)
# self.server.bind(key)
# self.server.set_membership(key[0], pyuv.UV_JOIN_GROUP)
# self.server.start_recv(on_read)
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
(r"/channels/overview", ChannelOverviewHandler),
(r"/channels", ChannelHandler),
(r"/channels/new", NewChannelHandler),
(r"/logs", LogsHandler),
(r"/log", LogHandler),
(r"/selflog", SelfLogHandler)
])
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version', version="ts_analyzer %s" % ts_analyzer.__version__)
args = parser.parse_args()
os.system(['clear', 'cls'][os.name == 'nt'])
print ("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__), color='white', background='blue')
template_path = os.path.join(os.path.dirname(__file__), "templates")
syslog.syslog("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__))
pids = {}
location = ''
location = 'Vrijhof - 253'
addresses = {}
addresses[("239.192.71.3", 1234)] = 1
addresses[("239.192.27.1", 1234)] = 1
addresses[("239.192.23.1", 1234)] = 1
buf = RingBuffer(100)
peers = {}
peers["iptv2-cam"]=("130.89.175.42",8889)
start_time_packet='unset'
bits_second = 1
start_time=datetime.datetime.now()
#pp2 = pprint.PrettyPrinter(indent=4)
#pp2.pprint(addresses)
signal.signal(signal.SIGINT, handle_signal)
signal.signal(signal.SIGTERM, handle_signal)
application.listen(8889)
loop = tornado.ioloop.IOLoop.instance()
# for addr in addresses.keys():
# print ("In dict: %s" % (addr))
counter=0 | servers={}
for key in addresses:
print ('%s corresponds to' % key[0])
servers[counter] = pyuv.UDP(loop._loop)
servers[counter].bind(key)
servers[counter].set_membership(key[0], pyuv.UV_JOIN_GROUP)
servers[counter].start_recv(on_read)
counter = counter + 1
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server = pyuv.UDP(loop._loop)
# server.bind(("239.192.71.3", 1234))
# server.set_membership("239.192.71.3", pyuv.UV_JOIN_GROUP)
# server.start_recv(on_read)
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server2 = pyuv.UDP(loop._loop)
# server2.bind(("239.192.27.2", 1234))
# server2.set_membership("239.192.27.2", pyuv.UV_JOIN_GROUP)
# server2.start_recv(on_read)
# server3 = pyuv.UDP(loop._loop)
# server3.bind(("239.192.27.1", 1234))
# server3.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server3.start_recv(on_read)
# server5 = pyuv.UDP(loop._loop)
# server5.bind(("239.192.49.2", 1234))
# server5.set_membership("239.192.49.2", pyuv.UV_JOIN_GROUP)
# server5.start_recv(on_read)
# server4 = pyuv.UDP(loop._loop)
# server4.bind(("239.192.72.1", 1234))
# server4.set_membership("239.192.72.1", pyuv.UV_JOIN_GROUP)
# server4.start_recv(on_read)
# server6 = pyuv.UDP(loop._loop)
# server6.bind(("239.192.23.2", 1234))
# server6.set_membership("239.192.23.2", pyuv.UV_JOIN_GROUP)
# server6.start_recv(on_read)
# server7 = pyuv.UDP(loop._loop)
# server7.bind(("239.192.25.2", 1234))
# server7.set_membership("239.192.25.2", pyuv.UV_JOIN_GROUP)
# server7.start_recv(on_read)
loop.start()
tornado.ioloop.IOLoop.instance().close() | random_line_split |
|
ts_analyzer.py | #!/usr/bin/env python
from __future__ import print_function
from colorprint import *
#import tornado_pyuv
#tornado_pyuv.install()
from tornado.ioloop import IOLoop
from tornado_pyuv import UVLoop
IOLoop.configure(UVLoop)
import signal
import tornado.ioloop
import tornado.web
import os
import sys
import pyuv
import struct
import pprint
from sys import stdout
import syslog
import datetime
import ts_analyzer
import argparse
def | (sig, frame):
tornado.ioloop.IOLoop.instance().add_callback(tornado.ioloop.IOLoop.instance().stop)
if sys.version_info >= (3, 0):
LINESEP = os.linesep.encode()
else:
LINESEP = os.linesep
def output_program_association_table(f, length, payload_start):
#pids[mcast][pid]['extra']='test'
#print(" <program_association_table>")
pointer_field = None
cursor = 0
if payload_start:
pointer_field = ord(f[0:1])
# if pointer_field:
# print(" <pointer_field>"+str(pointer_field)+"</pointer_field>")
cursor+=1
table_id = ord(f[1:2]); cursor+=1
# if table_id:
# #=str(pointer_field)
# print(" <table_id>"+str(pointer_field)+"</table_id>")
byte3 = ord(f[2:3]) ; cursor+=1
# if byte3 & 0x80 != 0x80:
# print(" <!-- selection_syntax_indicator is not 1 -->")
# if byte3 & 0x40 != 0x00:
# print(" <!-- reserved1 is not 0 -->")
# if byte3 & 0x30 != 0x30:
# print(" <!-- reserved2 is not 11 -->")
# if byte3 & 0x0C != 0x00:
# print(" <!-- two higher bits of secrion_length is are not 00 -->")
byte4 = ord(f[3:4]) ; cursor+=1
section_length = byte4 | ((byte3 & 0x07) << 8)
# if section_length:
# print(" <section_length>"+str(section_length)+"</section_length>")
byte5 = ord(f[4:5]) ; cursor += 1
byte6 = ord(f[5:6]) ; cursor += 1
transport_stream_ID = byte5 << 8 | byte6
# if transport_stream_ID:
# print(" <transport_stream_ID>"+str(transport_stream_ID)+"</transport_stream_ID>")
byte7 = ord(f[6:7]) ; cursor += 1
# if byte7 & 0xC0 != 0xC0:
# # print(" <!-- reserved3 is not 11 -->")
version_number = (byte7 & 0x3E) >> 1
# print(" <version_number>"+str(version_number)+"</version_number>")
current_indicator = bool(byte7 & 0x01)
# if not current_indicator:
# print(" <not_appliable_yet/>")
section_number = ord(f[7:8]) ; cursor += 1
last_section_number = ord(f[8:9]) ; cursor += 1
# if last_section_number:
# print(" <section_number>"+str(section_number)+"</section_number>")
# print(" <last_section_number>"+str(last_section_number)+"</last_section_number>")
for i in range(0,(section_length-5-4)/4):
# print(" <program>")
cursor+=4
program_num = (ord(f[9+i:10+i]) << 8) | ord(f[10+i:11+i])
b1 = ord(f[11+i:12+i])
b2 = ord(f[12+i:13+i])
if b1 & 0xE0 != 0xE0:
print(" <!-- reserved is not 111 -->")
program_pid = b2 | ((b1 & 0x1F) << 8)
# print(" <program_num>"+str(program_num)+"</program_num>")
# print(" <program_pid>"+hex(program_pid)+"</program_pid>")
# print(" </program>\n")
#program_map_pids.add(program_pid)
crc32 = f[cursor:cursor+4]; cursor+=4
length -= cursor
if length>0:
rest = f[cursor:cursor+length]
if (rest != '\xff' * length) and (rest != '\x00' * length):
print(" <rest>"+binascii.hexlify(rest)+"</rest>\n")
# print(" </program_association_table>\n")
return({'table_id':str(pointer_field),'transportstream_id':str(transport_stream_ID),'program':str(program_num),'pmt':hex(program_pid) })
def output_adaptation_field(f):
print(" <adaptation_field>\n")
additional_length = ord(f.read(1))
if additional_length == 0:
print(" </adaptation_field>\n")
return 1
flags = ord(f.read(1))
discontinuity = bool(flags & 0x80)
random_access = bool(flags & 0x40)
elementary_stream_priority = bool(flags & 0x20)
pcr = bool(flags & 0x10)
opcr = bool(flags & 0x08)
splicing_point = bool(flags & 0x04)
transport_private = bool(flags & 0x02)
adaptation_field_extension = bool(flags & 0x01)
if discontinuity: print(" <discontinuity/>\n")
if random_access: print(" <random_access/>\n")
if elementary_stream_priority: print(" <elementary_stream_priority/>\n")
length = additional_length+1 # size byte
additional_length-=1 # flags
def read_pcr():
pcr_byte_1 = ord(f.read(1)) # base
pcr_byte_2 = ord(f.read(1)) # base
pcr_byte_3 = ord(f.read(1)) # base
pcr_byte_4 = ord(f.read(1)) # base
pcr_byte_5 = ord(f.read(1)) # 1 bit base, 6 bits paddding, 1 bit ext
pcr_byte_6 = ord(f.read(1)) # 8 bits ext
base = (pcr_byte_1 << (1+8*3)) + \
(pcr_byte_2 << (1+8*2)) + \
(pcr_byte_3 << (1+8*1)) + \
(pcr_byte_4 << (1+8*0)) + \
(pcr_byte_5 >> 7)
ext = ((pcr_byte_5 & 0x01) << 8) + pcr_byte_6
time = base / 90000.0 + ext / 27000000.0
return time
if pcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <program_clock_reference>"+str(val)+"</program_clock_reference>\n")
if opcr:
if additional_length>=6:
additional_length-=6
val = read_pcr()
print(" <original_program_clock_reference>"+str(val)+"</original_program_clock_reference>\n")
if splicing_point:
if additional_length>=1:
additional_length-=1
splice_count = ord(f.read(1))
print(" <splice_countdown>"+str(splice_count)+"</splice_countdown>\n")
if additional_length:
print(" <!-- ignoring " + str(additional_length) + " bytes -->\n")
f.read(additional_length)
print(" </adaptation_field>\n")
return length
def on_read(handle, ip_port, flags, data, error):
global bits_second, start_time_packet
if error is not None:
print (error,color='red')
return
if start_time_packet == 'unset':
start_time_packet=datetime.datetime.now()
data = data.strip()
mcast=handle.getsockname()
if data:
ip, port = ip_port
diff = datetime.datetime.now()-start_time_packet
if diff.total_seconds() >= '30':
bits_second=1
start_time_packet=datetime.datetime.now()
else:
bits_second=1+bits_second
for i in range(0,len(data),188):
offset =+ i
#print(offset)
sync = ord(data[offset:offset+1])
header1 = ord(data[offset+1:offset+2])
header2 = ord(data[offset+2:offset+3])
header3 = ord(data[offset+3:offset+4])
transport_error = bool(header1 & 0x80)
payload_unit_start = bool(header1 & 0x40)
transport_priority = bool(header1 & 0x20)
pid = header2 | ((header1 & 0x1F) << 8)
scrambling = ((header3 & 0xC0) >> 6)
have_adaptation_field = bool(header3 & 0x20)
adaptation_field = ((header3 & 0x30) >> 4)
have_payload = bool(header3 & 0x10)
cc = header3 & 0x0F
length = len(data)
# We have sync:
if sync == 0x47:
if mcast not in pids:
pids[mcast]={}
if pid not in pids[mcast]:
pids[mcast][pid]={'packets': 1, 'cc': cc, 'error': 0, 'ip': ip, 'type': 'unknown', 'extra': {}}
print ("===> Found new PID in stream %s (src=%s)" % (mcast,ip),end='')
if pid == 0:
pids[mcast][pid]['type'] = "PAT"
print (" [PAT] ",end='')
buf.append("%s NEW PID: %s (src=%s)(PID: %s [%s]) [PAT]" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
buf.append("%s NEW PID: %s (src=%s) (PID: %s [%s])" %
(datetime.datetime.now(),mcast,ip,hex(pid),pid))
print ("(PID: ",end='')
print ("%s"% hex(pid), color='green',end='')
print (" [%s])"% pid)
else:
pids[mcast][pid]['packets']= pids[mcast][pid]['packets']+1
if adaptation_field != 2:
cc_com = (pids[mcast][pid]['cc']+1) % 16
pids[mcast][pid]['cc'] = cc
if cc is not cc_com:
pids[mcast][pid]['error'] = pids[mcast][pid]['error']+1
print ("%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc,
mcast, hex(pid), length),
color='red')
syslog.syslog(syslog.LOG_ERR, "%s Error expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
buf.append( "%s ERR: expected %s got %s (%s) %s %s" %
(datetime.datetime.now(), cc_com, cc, mcast, hex(pid), length))
if pid == 0x00:
#adaptation_field_size = 0
#print(have_adaptation_field)
#if have_adaptation_field:
adaptation_field_size = 167
payload_size = 188 - 4 - adaptation_field_size
#print(payload_unit_start);
#print(mcast)
pids[mcast][pid]['extra']=output_program_association_table(data[offset+4:offset+188], payload_size, payload_unit_start)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class MainHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
run_time = datetime.datetime.now() - start_time
packet_time = datetime.datetime.now() - start_time_packet
bits=((bits_second*1316)*8/packet_time.total_seconds())/1000000
self.render('index.html',version=ts_analyzer.__version__,addresses=dict(addresses),hostname=hostname,
location=location,bits=round(bits,2), run_time=run_time,buf=buf,peers=peers)
class LogHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('log.html',buf=buf, hostname=hostname)
class SelfLogHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('self_log.html',buf=buf, hostname=hostname)
class LogsHandler(tornado.web.RequestHandler):
def get(self):
from platform import uname
hostname = uname()[1]
self.render('logs.html',buf=buf, peers=peers, hostname=hostname )
class ChannelHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.write(pids_new)
class RingBuffer:
def __init__(self, size):
self.data = [None for i in xrange(size)]
def append(self, x):
self.data.pop(0)
self.data.append(x)
def get(self):
return reversed(self.data)
class ChannelOverviewHandler(tornado.web.RequestHandler):
def get(self):
pids_new=pids.copy()
for key in pids_new.keys():
if type(key) is not str:
try:
pids_new[str(key)] = pids_new[key]
except:
try:
pids_new[repr(key)] == pids_new[key]
except:
pass
del pids_new[key]
self.render('base.html',pids_new=pids_new)
#pp = pprint.PrettyPrinter(indent=4)
#pp.pprint(pids)
class NewChannelHandler(tornado.web.RequestHandler):
def post(self):
# Debug
#self.write(tornado.escape.json_encode(self.request.arguments["post"]))
try:
posted_config = tornado.escape.json_decode(self.request.body)
except:
print("Invalid JSON")
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(posted_config)
#class Server(object):
# def __init__(self,address)
# self.server = pyuv.UDP(loop._loop)
# self.server.bind(key)
# self.server.set_membership(key[0], pyuv.UV_JOIN_GROUP)
# self.server.start_recv(on_read)
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
(r"/channels/overview", ChannelOverviewHandler),
(r"/channels", ChannelHandler),
(r"/channels/new", NewChannelHandler),
(r"/logs", LogsHandler),
(r"/log", LogHandler),
(r"/selflog", SelfLogHandler)
])
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version', version="ts_analyzer %s" % ts_analyzer.__version__)
args = parser.parse_args()
os.system(['clear', 'cls'][os.name == 'nt'])
print ("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__), color='white', background='blue')
template_path = os.path.join(os.path.dirname(__file__), "templates")
syslog.syslog("TS_Analyzer version %s (Using PyUV version %s)" % (ts_analyzer.__version__, pyuv.__version__))
pids = {}
location = ''
location = 'Vrijhof - 253'
addresses = {}
addresses[("239.192.71.3", 1234)] = 1
addresses[("239.192.27.1", 1234)] = 1
addresses[("239.192.23.1", 1234)] = 1
buf = RingBuffer(100)
peers = {}
peers["iptv2-cam"]=("130.89.175.42",8889)
start_time_packet='unset'
bits_second = 1
start_time=datetime.datetime.now()
#pp2 = pprint.PrettyPrinter(indent=4)
#pp2.pprint(addresses)
signal.signal(signal.SIGINT, handle_signal)
signal.signal(signal.SIGTERM, handle_signal)
application.listen(8889)
loop = tornado.ioloop.IOLoop.instance()
# for addr in addresses.keys():
# print ("In dict: %s" % (addr))
counter=0
servers={}
for key in addresses:
print ('%s corresponds to' % key[0])
servers[counter] = pyuv.UDP(loop._loop)
servers[counter].bind(key)
servers[counter].set_membership(key[0], pyuv.UV_JOIN_GROUP)
servers[counter].start_recv(on_read)
counter = counter + 1
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server = pyuv.UDP(loop._loop)
# server.bind(("239.192.71.3", 1234))
# server.set_membership("239.192.71.3", pyuv.UV_JOIN_GROUP)
# server.start_recv(on_read)
# server1 = pyuv.UDP(loop._loop)
# server1.bind(("239.192.27.1", 1234))
# server1.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server1.start_recv(on_read)
# server2 = pyuv.UDP(loop._loop)
# server2.bind(("239.192.27.2", 1234))
# server2.set_membership("239.192.27.2", pyuv.UV_JOIN_GROUP)
# server2.start_recv(on_read)
# server3 = pyuv.UDP(loop._loop)
# server3.bind(("239.192.27.1", 1234))
# server3.set_membership("239.192.27.1", pyuv.UV_JOIN_GROUP)
# server3.start_recv(on_read)
# server5 = pyuv.UDP(loop._loop)
# server5.bind(("239.192.49.2", 1234))
# server5.set_membership("239.192.49.2", pyuv.UV_JOIN_GROUP)
# server5.start_recv(on_read)
# server4 = pyuv.UDP(loop._loop)
# server4.bind(("239.192.72.1", 1234))
# server4.set_membership("239.192.72.1", pyuv.UV_JOIN_GROUP)
# server4.start_recv(on_read)
# server6 = pyuv.UDP(loop._loop)
# server6.bind(("239.192.23.2", 1234))
# server6.set_membership("239.192.23.2", pyuv.UV_JOIN_GROUP)
# server6.start_recv(on_read)
# server7 = pyuv.UDP(loop._loop)
# server7.bind(("239.192.25.2", 1234))
# server7.set_membership("239.192.25.2", pyuv.UV_JOIN_GROUP)
# server7.start_recv(on_read)
loop.start()
tornado.ioloop.IOLoop.instance().close()
| handle_signal | identifier_name |
map_output_tracker.rs | use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use std::sync::Arc;
use std::time::Duration;
use crate::serialized_data_capnp::serialized_data;
use crate::{Error, NetworkError, Result};
use capnp::message::{Builder as MsgBuilder, ReaderOptions};
use capnp_futures::serialize as capnp_serialize;
use dashmap::{DashMap, DashSet};
use parking_lot::Mutex;
use thiserror::Error;
use tokio::{
net::{TcpListener, TcpStream},
stream::StreamExt,
};
use tokio_util::compat::{Tokio02AsyncReadCompatExt, Tokio02AsyncWriteCompatExt};
const CAPNP_BUF_READ_OPTS: ReaderOptions = ReaderOptions {
traversal_limit_in_words: std::u64::MAX,
nesting_limit: 64,
};
pub(crate) enum MapOutputTrackerMessage {
// Contains shuffle_id
GetMapOutputLocations(i64),
StopMapOutputTracker,
}
/// The key is the shuffle_id
pub type ServerUris = Arc<DashMap<usize, Vec<Option<String>>>>;
// Starts the server in master node and client in slave nodes. Similar to cache tracker.
#[derive(Clone, Debug)]
pub(crate) struct MapOutputTracker {
is_master: bool,
pub server_uris: ServerUris,
fetching: Arc<DashSet<usize>>,
generation: Arc<Mutex<i64>>,
master_addr: SocketAddr,
}
// Only master_addr doesn't have a default.
impl Default for MapOutputTracker {
fn default() -> Self {
MapOutputTracker {
is_master: Default::default(),
server_uris: Default::default(),
fetching: Default::default(),
generation: Default::default(),
master_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0),
}
}
}
impl MapOutputTracker {
pub fn new(is_master: bool, master_addr: SocketAddr) -> Self {
let output_tracker = MapOutputTracker {
is_master,
server_uris: Arc::new(DashMap::new()),
fetching: Arc::new(DashSet::new()),
generation: Arc::new(Mutex::new(0)),
master_addr,
};
output_tracker.server();
output_tracker
}
async fn client(&self, shuffle_id: usize) -> Result<Vec<String>> {
let mut stream = loop {
match TcpStream::connect(self.master_addr).await {
Ok(stream) => break stream,
Err(_) => continue,
}
};
let (reader, writer) = stream.split();
let reader = reader.compat();
let mut writer = writer.compat_write();
log::debug!(
"connected to master to fetch shuffle task #{} data hosts",
shuffle_id
);
let shuffle_id_bytes = bincode::serialize(&shuffle_id)?;
let mut message = MsgBuilder::new_default();
let mut shuffle_data = message.init_root::<serialized_data::Builder>();
shuffle_data.set_msg(&shuffle_id_bytes);
capnp_serialize::write_message(&mut writer, &message).await?;
let message_reader = capnp_serialize::read_message(reader, CAPNP_BUF_READ_OPTS)
.await?
.ok_or_else(|| NetworkError::NoMessageReceived)?;
let shuffle_data = message_reader.get_root::<serialized_data::Reader>()?;
let locs: Vec<String> = bincode::deserialize(&shuffle_data.get_msg()?)?;
Ok(locs)
}
fn server(&self) {
if !self.is_master {
return;
}
log::debug!("map output tracker server starting");
let master_addr = self.master_addr;
let server_uris = self.server_uris.clone();
tokio::spawn(async move {
let mut listener = TcpListener::bind(master_addr)
.await
.map_err(NetworkError::TcpListener)?;
log::debug!("map output tracker server started");
while let Some(Ok(mut stream)) = listener.incoming().next().await {
let server_uris_clone = server_uris.clone();
tokio::spawn(async move {
let (reader, writer) = stream.split();
let reader = reader.compat();
let writer = writer.compat_write();
// reading
let message_reader = capnp_serialize::read_message(reader, CAPNP_BUF_READ_OPTS)
.await?
.ok_or_else(|| NetworkError::NoMessageReceived)?;
let shuffle_id = {
let data = message_reader.get_root::<serialized_data::Reader>()?;
bincode::deserialize(data.get_msg()?)?
};
while server_uris_clone
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.count()
== 0
{
//check whether this will hurt the performance or not
tokio::time::delay_for(Duration::from_millis(1)).await;
}
let locs = server_uris_clone
.get(&shuffle_id)
.map(|kv| {
kv.value()
.iter()
.cloned()
.map(|x| x.unwrap())
.collect::<Vec<_>>()
})
.unwrap_or_default();
log::debug!(
"locs inside map output tracker server for shuffle id #{}: {:?}",
shuffle_id,
locs
);
// writting response
let result = bincode::serialize(&locs)?;
let mut message = MsgBuilder::new_default();
let mut locs_data = message.init_root::<serialized_data::Builder>();
locs_data.set_msg(&result);
// TODO: remove blocking call when possible
futures::executor::block_on(async {
capnp_futures::serialize::write_message(writer, message)
.await
.map_err(Error::CapnpDeserialization)?;
Ok::<_, Error>(())
})?;
Ok::<_, Error>(())
});
}
Err::<(), _>(Error::ExecutorShutdown)
});
}
pub fn register_shuffle(&self, shuffle_id: usize, num_maps: usize) {
log::debug!("inside register shuffle");
if self.server_uris.get(&shuffle_id).is_some() {
// TODO: error handling
log::debug!("map tracker register shuffle none");
return;
}
self.server_uris.insert(shuffle_id, vec![None; num_maps]);
log::debug!("server_uris after register_shuffle {:?}", self.server_uris);
}
pub fn register_map_output(&self, shuffle_id: usize, map_id: usize, server_uri: String) {
log::debug!(
"registering map output from shuffle task #{} with map id #{} at server: {}",
shuffle_id,
map_id,
server_uri
);
self.server_uris.get_mut(&shuffle_id).unwrap()[map_id] = Some(server_uri);
}
pub fn register_map_outputs(&self, shuffle_id: usize, locs: Vec<Option<String>>) {
log::debug!(
"registering map outputs inside map output tracker for shuffle id #{}: {:?}",
shuffle_id,
locs
);
self.server_uris.insert(shuffle_id, locs);
}
pub fn unregister_map_output(&self, shuffle_id: usize, map_id: usize, server_uri: String) {
let array = self.server_uris.get(&shuffle_id);
if let Some(arr) = array {
if arr.get(map_id).unwrap() == &Some(server_uri) {
self.server_uris
.get_mut(&shuffle_id)
.unwrap()
.insert(map_id, None)
}
self.increment_generation();
} else {
// TODO: error logging
}
}
pub async fn get_server_uris(&self, shuffle_id: usize) -> Result<Vec<String>> {
log::debug!(
"trying to get uri for shuffle task #{}, current server uris: {:?}",
shuffle_id,
self.server_uris
);
if self
.server_uris
.get(&shuffle_id)
.map(|some| some.iter().filter_map(|x| x.clone()).next())
.flatten()
.is_none()
{
if self.fetching.contains(&shuffle_id) {
while self.fetching.contains(&shuffle_id) {
// TODO: check whether this will hurt the performance or not
tokio::time::delay_for(Duration::from_millis(1)).await;
}
let servers = self
.server_uris
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.map(|x| x.clone().unwrap())
.collect::<Vec<_>>();
log::debug!("returning after fetching done, return: {:?}", servers);
return Ok(servers);
} else {
log::debug!("adding to fetching queue");
self.fetching.insert(shuffle_id);
}
let fetched = self.client(shuffle_id).await?;
log::debug!("fetched locs from client: {:?}", fetched);
self.server_uris.insert(
shuffle_id,
fetched.iter().map(|x| Some(x.clone())).collect(),
);
log::debug!("added locs to server uris after fetching");
self.fetching.remove(&shuffle_id);
Ok(fetched)
} else {
Ok(self
.server_uris
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.map(|x| x.clone().unwrap())
.collect())
}
}
pub fn increment_generation(&self) {
*self.generation.lock() += 1;
}
pub fn get_generation(&self) -> i64 {
*self.generation.lock()
} | pub fn update_generation(&mut self, new_gen: i64) {
if new_gen > *self.generation.lock() {
self.server_uris = Arc::new(DashMap::new());
*self.generation.lock() = new_gen;
}
}
}
#[derive(Debug, Error)]
pub enum MapOutputError {
#[error("Shuffle id output #{0} not found in the map")]
ShuffleIdNotFound(usize),
} | random_line_split |
|
map_output_tracker.rs | use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use std::sync::Arc;
use std::time::Duration;
use crate::serialized_data_capnp::serialized_data;
use crate::{Error, NetworkError, Result};
use capnp::message::{Builder as MsgBuilder, ReaderOptions};
use capnp_futures::serialize as capnp_serialize;
use dashmap::{DashMap, DashSet};
use parking_lot::Mutex;
use thiserror::Error;
use tokio::{
net::{TcpListener, TcpStream},
stream::StreamExt,
};
use tokio_util::compat::{Tokio02AsyncReadCompatExt, Tokio02AsyncWriteCompatExt};
const CAPNP_BUF_READ_OPTS: ReaderOptions = ReaderOptions {
traversal_limit_in_words: std::u64::MAX,
nesting_limit: 64,
};
pub(crate) enum MapOutputTrackerMessage {
// Contains shuffle_id
GetMapOutputLocations(i64),
StopMapOutputTracker,
}
/// The key is the shuffle_id
pub type ServerUris = Arc<DashMap<usize, Vec<Option<String>>>>;
// Starts the server in master node and client in slave nodes. Similar to cache tracker.
#[derive(Clone, Debug)]
pub(crate) struct MapOutputTracker {
is_master: bool,
pub server_uris: ServerUris,
fetching: Arc<DashSet<usize>>,
generation: Arc<Mutex<i64>>,
master_addr: SocketAddr,
}
// Only master_addr doesn't have a default.
impl Default for MapOutputTracker {
fn default() -> Self {
MapOutputTracker {
is_master: Default::default(),
server_uris: Default::default(),
fetching: Default::default(),
generation: Default::default(),
master_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0),
}
}
}
impl MapOutputTracker {
pub fn | (is_master: bool, master_addr: SocketAddr) -> Self {
let output_tracker = MapOutputTracker {
is_master,
server_uris: Arc::new(DashMap::new()),
fetching: Arc::new(DashSet::new()),
generation: Arc::new(Mutex::new(0)),
master_addr,
};
output_tracker.server();
output_tracker
}
async fn client(&self, shuffle_id: usize) -> Result<Vec<String>> {
let mut stream = loop {
match TcpStream::connect(self.master_addr).await {
Ok(stream) => break stream,
Err(_) => continue,
}
};
let (reader, writer) = stream.split();
let reader = reader.compat();
let mut writer = writer.compat_write();
log::debug!(
"connected to master to fetch shuffle task #{} data hosts",
shuffle_id
);
let shuffle_id_bytes = bincode::serialize(&shuffle_id)?;
let mut message = MsgBuilder::new_default();
let mut shuffle_data = message.init_root::<serialized_data::Builder>();
shuffle_data.set_msg(&shuffle_id_bytes);
capnp_serialize::write_message(&mut writer, &message).await?;
let message_reader = capnp_serialize::read_message(reader, CAPNP_BUF_READ_OPTS)
.await?
.ok_or_else(|| NetworkError::NoMessageReceived)?;
let shuffle_data = message_reader.get_root::<serialized_data::Reader>()?;
let locs: Vec<String> = bincode::deserialize(&shuffle_data.get_msg()?)?;
Ok(locs)
}
fn server(&self) {
if !self.is_master {
return;
}
log::debug!("map output tracker server starting");
let master_addr = self.master_addr;
let server_uris = self.server_uris.clone();
tokio::spawn(async move {
let mut listener = TcpListener::bind(master_addr)
.await
.map_err(NetworkError::TcpListener)?;
log::debug!("map output tracker server started");
while let Some(Ok(mut stream)) = listener.incoming().next().await {
let server_uris_clone = server_uris.clone();
tokio::spawn(async move {
let (reader, writer) = stream.split();
let reader = reader.compat();
let writer = writer.compat_write();
// reading
let message_reader = capnp_serialize::read_message(reader, CAPNP_BUF_READ_OPTS)
.await?
.ok_or_else(|| NetworkError::NoMessageReceived)?;
let shuffle_id = {
let data = message_reader.get_root::<serialized_data::Reader>()?;
bincode::deserialize(data.get_msg()?)?
};
while server_uris_clone
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.count()
== 0
{
//check whether this will hurt the performance or not
tokio::time::delay_for(Duration::from_millis(1)).await;
}
let locs = server_uris_clone
.get(&shuffle_id)
.map(|kv| {
kv.value()
.iter()
.cloned()
.map(|x| x.unwrap())
.collect::<Vec<_>>()
})
.unwrap_or_default();
log::debug!(
"locs inside map output tracker server for shuffle id #{}: {:?}",
shuffle_id,
locs
);
// writting response
let result = bincode::serialize(&locs)?;
let mut message = MsgBuilder::new_default();
let mut locs_data = message.init_root::<serialized_data::Builder>();
locs_data.set_msg(&result);
// TODO: remove blocking call when possible
futures::executor::block_on(async {
capnp_futures::serialize::write_message(writer, message)
.await
.map_err(Error::CapnpDeserialization)?;
Ok::<_, Error>(())
})?;
Ok::<_, Error>(())
});
}
Err::<(), _>(Error::ExecutorShutdown)
});
}
pub fn register_shuffle(&self, shuffle_id: usize, num_maps: usize) {
log::debug!("inside register shuffle");
if self.server_uris.get(&shuffle_id).is_some() {
// TODO: error handling
log::debug!("map tracker register shuffle none");
return;
}
self.server_uris.insert(shuffle_id, vec![None; num_maps]);
log::debug!("server_uris after register_shuffle {:?}", self.server_uris);
}
pub fn register_map_output(&self, shuffle_id: usize, map_id: usize, server_uri: String) {
log::debug!(
"registering map output from shuffle task #{} with map id #{} at server: {}",
shuffle_id,
map_id,
server_uri
);
self.server_uris.get_mut(&shuffle_id).unwrap()[map_id] = Some(server_uri);
}
pub fn register_map_outputs(&self, shuffle_id: usize, locs: Vec<Option<String>>) {
log::debug!(
"registering map outputs inside map output tracker for shuffle id #{}: {:?}",
shuffle_id,
locs
);
self.server_uris.insert(shuffle_id, locs);
}
pub fn unregister_map_output(&self, shuffle_id: usize, map_id: usize, server_uri: String) {
let array = self.server_uris.get(&shuffle_id);
if let Some(arr) = array {
if arr.get(map_id).unwrap() == &Some(server_uri) {
self.server_uris
.get_mut(&shuffle_id)
.unwrap()
.insert(map_id, None)
}
self.increment_generation();
} else {
// TODO: error logging
}
}
pub async fn get_server_uris(&self, shuffle_id: usize) -> Result<Vec<String>> {
log::debug!(
"trying to get uri for shuffle task #{}, current server uris: {:?}",
shuffle_id,
self.server_uris
);
if self
.server_uris
.get(&shuffle_id)
.map(|some| some.iter().filter_map(|x| x.clone()).next())
.flatten()
.is_none()
{
if self.fetching.contains(&shuffle_id) {
while self.fetching.contains(&shuffle_id) {
// TODO: check whether this will hurt the performance or not
tokio::time::delay_for(Duration::from_millis(1)).await;
}
let servers = self
.server_uris
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.map(|x| x.clone().unwrap())
.collect::<Vec<_>>();
log::debug!("returning after fetching done, return: {:?}", servers);
return Ok(servers);
} else {
log::debug!("adding to fetching queue");
self.fetching.insert(shuffle_id);
}
let fetched = self.client(shuffle_id).await?;
log::debug!("fetched locs from client: {:?}", fetched);
self.server_uris.insert(
shuffle_id,
fetched.iter().map(|x| Some(x.clone())).collect(),
);
log::debug!("added locs to server uris after fetching");
self.fetching.remove(&shuffle_id);
Ok(fetched)
} else {
Ok(self
.server_uris
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.map(|x| x.clone().unwrap())
.collect())
}
}
pub fn increment_generation(&self) {
*self.generation.lock() += 1;
}
pub fn get_generation(&self) -> i64 {
*self.generation.lock()
}
pub fn update_generation(&mut self, new_gen: i64) {
if new_gen > *self.generation.lock() {
self.server_uris = Arc::new(DashMap::new());
*self.generation.lock() = new_gen;
}
}
}
#[derive(Debug, Error)]
pub enum MapOutputError {
#[error("Shuffle id output #{0} not found in the map")]
ShuffleIdNotFound(usize),
}
| new | identifier_name |
map_output_tracker.rs | use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use std::sync::Arc;
use std::time::Duration;
use crate::serialized_data_capnp::serialized_data;
use crate::{Error, NetworkError, Result};
use capnp::message::{Builder as MsgBuilder, ReaderOptions};
use capnp_futures::serialize as capnp_serialize;
use dashmap::{DashMap, DashSet};
use parking_lot::Mutex;
use thiserror::Error;
use tokio::{
net::{TcpListener, TcpStream},
stream::StreamExt,
};
use tokio_util::compat::{Tokio02AsyncReadCompatExt, Tokio02AsyncWriteCompatExt};
const CAPNP_BUF_READ_OPTS: ReaderOptions = ReaderOptions {
traversal_limit_in_words: std::u64::MAX,
nesting_limit: 64,
};
pub(crate) enum MapOutputTrackerMessage {
// Contains shuffle_id
GetMapOutputLocations(i64),
StopMapOutputTracker,
}
/// The key is the shuffle_id
pub type ServerUris = Arc<DashMap<usize, Vec<Option<String>>>>;
// Starts the server in master node and client in slave nodes. Similar to cache tracker.
#[derive(Clone, Debug)]
pub(crate) struct MapOutputTracker {
is_master: bool,
pub server_uris: ServerUris,
fetching: Arc<DashSet<usize>>,
generation: Arc<Mutex<i64>>,
master_addr: SocketAddr,
}
// Only master_addr doesn't have a default.
impl Default for MapOutputTracker {
fn default() -> Self {
MapOutputTracker {
is_master: Default::default(),
server_uris: Default::default(),
fetching: Default::default(),
generation: Default::default(),
master_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0),
}
}
}
impl MapOutputTracker {
pub fn new(is_master: bool, master_addr: SocketAddr) -> Self {
let output_tracker = MapOutputTracker {
is_master,
server_uris: Arc::new(DashMap::new()),
fetching: Arc::new(DashSet::new()),
generation: Arc::new(Mutex::new(0)),
master_addr,
};
output_tracker.server();
output_tracker
}
async fn client(&self, shuffle_id: usize) -> Result<Vec<String>> {
let mut stream = loop {
match TcpStream::connect(self.master_addr).await {
Ok(stream) => break stream,
Err(_) => continue,
}
};
let (reader, writer) = stream.split();
let reader = reader.compat();
let mut writer = writer.compat_write();
log::debug!(
"connected to master to fetch shuffle task #{} data hosts",
shuffle_id
);
let shuffle_id_bytes = bincode::serialize(&shuffle_id)?;
let mut message = MsgBuilder::new_default();
let mut shuffle_data = message.init_root::<serialized_data::Builder>();
shuffle_data.set_msg(&shuffle_id_bytes);
capnp_serialize::write_message(&mut writer, &message).await?;
let message_reader = capnp_serialize::read_message(reader, CAPNP_BUF_READ_OPTS)
.await?
.ok_or_else(|| NetworkError::NoMessageReceived)?;
let shuffle_data = message_reader.get_root::<serialized_data::Reader>()?;
let locs: Vec<String> = bincode::deserialize(&shuffle_data.get_msg()?)?;
Ok(locs)
}
fn server(&self) {
if !self.is_master {
return;
}
log::debug!("map output tracker server starting");
let master_addr = self.master_addr;
let server_uris = self.server_uris.clone();
tokio::spawn(async move {
let mut listener = TcpListener::bind(master_addr)
.await
.map_err(NetworkError::TcpListener)?;
log::debug!("map output tracker server started");
while let Some(Ok(mut stream)) = listener.incoming().next().await {
let server_uris_clone = server_uris.clone();
tokio::spawn(async move {
let (reader, writer) = stream.split();
let reader = reader.compat();
let writer = writer.compat_write();
// reading
let message_reader = capnp_serialize::read_message(reader, CAPNP_BUF_READ_OPTS)
.await?
.ok_or_else(|| NetworkError::NoMessageReceived)?;
let shuffle_id = {
let data = message_reader.get_root::<serialized_data::Reader>()?;
bincode::deserialize(data.get_msg()?)?
};
while server_uris_clone
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.count()
== 0
{
//check whether this will hurt the performance or not
tokio::time::delay_for(Duration::from_millis(1)).await;
}
let locs = server_uris_clone
.get(&shuffle_id)
.map(|kv| {
kv.value()
.iter()
.cloned()
.map(|x| x.unwrap())
.collect::<Vec<_>>()
})
.unwrap_or_default();
log::debug!(
"locs inside map output tracker server for shuffle id #{}: {:?}",
shuffle_id,
locs
);
// writting response
let result = bincode::serialize(&locs)?;
let mut message = MsgBuilder::new_default();
let mut locs_data = message.init_root::<serialized_data::Builder>();
locs_data.set_msg(&result);
// TODO: remove blocking call when possible
futures::executor::block_on(async {
capnp_futures::serialize::write_message(writer, message)
.await
.map_err(Error::CapnpDeserialization)?;
Ok::<_, Error>(())
})?;
Ok::<_, Error>(())
});
}
Err::<(), _>(Error::ExecutorShutdown)
});
}
pub fn register_shuffle(&self, shuffle_id: usize, num_maps: usize) {
log::debug!("inside register shuffle");
if self.server_uris.get(&shuffle_id).is_some() {
// TODO: error handling
log::debug!("map tracker register shuffle none");
return;
}
self.server_uris.insert(shuffle_id, vec![None; num_maps]);
log::debug!("server_uris after register_shuffle {:?}", self.server_uris);
}
pub fn register_map_output(&self, shuffle_id: usize, map_id: usize, server_uri: String) {
log::debug!(
"registering map output from shuffle task #{} with map id #{} at server: {}",
shuffle_id,
map_id,
server_uri
);
self.server_uris.get_mut(&shuffle_id).unwrap()[map_id] = Some(server_uri);
}
pub fn register_map_outputs(&self, shuffle_id: usize, locs: Vec<Option<String>>) {
log::debug!(
"registering map outputs inside map output tracker for shuffle id #{}: {:?}",
shuffle_id,
locs
);
self.server_uris.insert(shuffle_id, locs);
}
pub fn unregister_map_output(&self, shuffle_id: usize, map_id: usize, server_uri: String) {
let array = self.server_uris.get(&shuffle_id);
if let Some(arr) = array | else {
// TODO: error logging
}
}
pub async fn get_server_uris(&self, shuffle_id: usize) -> Result<Vec<String>> {
log::debug!(
"trying to get uri for shuffle task #{}, current server uris: {:?}",
shuffle_id,
self.server_uris
);
if self
.server_uris
.get(&shuffle_id)
.map(|some| some.iter().filter_map(|x| x.clone()).next())
.flatten()
.is_none()
{
if self.fetching.contains(&shuffle_id) {
while self.fetching.contains(&shuffle_id) {
// TODO: check whether this will hurt the performance or not
tokio::time::delay_for(Duration::from_millis(1)).await;
}
let servers = self
.server_uris
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.map(|x| x.clone().unwrap())
.collect::<Vec<_>>();
log::debug!("returning after fetching done, return: {:?}", servers);
return Ok(servers);
} else {
log::debug!("adding to fetching queue");
self.fetching.insert(shuffle_id);
}
let fetched = self.client(shuffle_id).await?;
log::debug!("fetched locs from client: {:?}", fetched);
self.server_uris.insert(
shuffle_id,
fetched.iter().map(|x| Some(x.clone())).collect(),
);
log::debug!("added locs to server uris after fetching");
self.fetching.remove(&shuffle_id);
Ok(fetched)
} else {
Ok(self
.server_uris
.get(&shuffle_id)
.ok_or_else(|| MapOutputError::ShuffleIdNotFound(shuffle_id))?
.iter()
.filter(|x| !x.is_none())
.map(|x| x.clone().unwrap())
.collect())
}
}
pub fn increment_generation(&self) {
*self.generation.lock() += 1;
}
pub fn get_generation(&self) -> i64 {
*self.generation.lock()
}
pub fn update_generation(&mut self, new_gen: i64) {
if new_gen > *self.generation.lock() {
self.server_uris = Arc::new(DashMap::new());
*self.generation.lock() = new_gen;
}
}
}
#[derive(Debug, Error)]
pub enum MapOutputError {
#[error("Shuffle id output #{0} not found in the map")]
ShuffleIdNotFound(usize),
}
| {
if arr.get(map_id).unwrap() == &Some(server_uri) {
self.server_uris
.get_mut(&shuffle_id)
.unwrap()
.insert(map_id, None)
}
self.increment_generation();
} | conditional_block |
core.py | import wx
import sys
from wx.lib.mixins.listctrl import ListCtrlAutoWidthMixin
from player import ViewPlayer
OK = wx.OK | wx.ICON_EXCLAMATION
ACV = wx.ALIGN_CENTER_VERTICAL
YN = wx.YES_NO | wx.ICON_WARNING
class AutoWidthListCtrl(wx.ListCtrl, ListCtrlAutoWidthMixin):
def __init__(self, parent):
wx.ListCtrl.__init__(self, parent, -1, style=wx.LC_REPORT)
ListCtrlAutoWidthMixin.__init__(self)
class Core(wx.Frame):
def __init__(self, parent, controller, title):
super(Core, self).__init__(parent=parent, title=title)
self.parent = parent
self.controller = controller
self.child = None
self.panel = Panel(self)
self.panel.SetBackgroundColour('LightGray')
self.status_bar = self.CreateStatusBar(2)
self.status_bar.SetStatusWidths([200, -1])
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge = wx.Gauge(self.status_bar, -1, 100, gauge_pos, gauge_size)
# Menues
self.menubar = wx.MenuBar()
self.SetMenuBar(self.menubar)
# Player menu
menu_player = wx.Menu()
self.menubar.Append(menu_player, "Players")
self.menu_new_player = menu_player.Append(-1, "New Player",
"New Player")
menu_player.AppendSeparator()
self.menu_players_import = menu_player.Append(-1, "import Players",
"import Players")
menu_player.AppendSeparator()
self.menu_evaluations = menu_player.Append(-1, "extract evaluations",
"extract evaluations")
# Bindings
# player bindings
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_MENU, self.on_new_player, self.menu_new_player)
self.Bind(wx.EVT_MENU, self.on_import, self.menu_players_import)
self.Bind(wx.EVT_MENU, self.on_extract, self.menu_evaluations)
self.Bind(wx.EVT_BUTTON, self.quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_refresh, self.panel.btn_refresh)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.on_list,
self.panel.players)
self.Bind(wx.EVT_LIST_COL_CLICK, self.on_list_column,
self.panel.players) # Players initialization
players = self.controller.get_players()
if players:
self.fill_players(players)
self.set_status_text('Players on database: %s' % len(players))
else:
self.set_status_text('No Players on database')
size = (450, 500)
self.SetSize(size)
self.Show()
# noinspection PyUnusedLocal
def quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_extract(self, event):
self.child = ViewExtract(parent=self, title="extract evaluations")
self.show_child()
# noinspection PyUnusedLocal
def on_import(self, event):
choice = wx.MessageBox('Deleting All Players?', 'warning', YN)
if choice == wx.YES:
self.controller.delete_all_players()
self.controller.import_players()
wx.MessageBox('Players successfully imported!', '', OK)
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def edit_player(self, event):
self.Disable()
ViewPlayer(parent=self, title='Edit Player', is_editor=True)
# noinspection PyUnusedLocal
def on_new_player(self, event):
self.child = ViewPlayer(parent=self, title='New Player')
self.show_child()
# noinspection PyUnusedLocal
def on_quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_refresh(self, event):
self.panel.players.DeleteAllItems()
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def on_list(self, event):
item_id = event.m_itemIndex
player_code = self.panel.players.GetItemText(item_id)
player = self.controller.get_player_by_code(player_code)
self.controller.set_temporary_object(player)
item_name = self.panel.players.GetItem(item_id, 1)
player_name = item_name.GetText()
item_fullname = self.panel.players.GetItem(item_id, 2)
player_fullname = item_fullname.GetText()
view_edit = ViewPlayer(self, "Edit player", is_editor=True)
view_edit.Show()
view_edit.panel.code.SetValue(player_code)
view_edit.panel.name.SetValue(player_name)
view_edit.panel.fullname.SetValue(player_fullname)
# Qui utilizzo ChangeValue per non scatenare EVT_TEXT, come invece
# succederebbe usando SetValue. Questo EVT_TEXT, cercherebbe un
# Handler al livello attuale (Frame) e non trovandolo, passerebbe
# al Parent, che invece ha proprio un Bind di EVT_TEXT, proprio con
# la textctrl 'ppl', la quale richiamerebbe la callback
# 'on_text_entry, facendo casino.
# view_edit.panel.btn_delete.Enable()
view_edit.SetWindowStyle(wx.STAY_ON_TOP)
def on_list_column(self, event):
self.panel.players.DeleteAllItems()
id_column = event.GetColumn()
players = self.controller.get_sorted_players(id_column)
self.fill_players(players)
def fill_players(self, players):
for player in players:
index = self.panel.players.InsertStringItem(sys.maxint,
str(player.code))
self.panel.players.SetStringItem(index, 1, str(player.name))
self.panel.players.SetStringItem(index, 2, str(player.fullname))
@staticmethod
def show_message(string):
wx.MessageBox(string, 'core info', wx.OK | wx.ICON_EXCLAMATION)
def show_child(self):
self.Disable()
self.child.Centre()
self.child.Show()
def get_gauge_dimensions(self):
"""
get_gauge_dimensions(self) -> tuple_a, tuple_b
tuple_a is a tuple with x position and y position of seconf field
of the StatusBar
"""
pos_x, pos_y, dim_x, dim_y = self.status_bar.GetFieldRect(1)
return (pos_x, pos_y), (dim_x, dim_y)
def on_size(self, event):
"""
on_size()
it redraws the gauge rectangle and repositions when frame windows is
resized
"""
size = self.GetSize()
self.SetSize(size)
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge.SetSize(gauge_size)
event.Skip()
self.Update()
def set_range(self, value):
"""
set_range(value)
It sets the maximum value of gauge widget
"""
self.gauge.SetRange(value)
def set_progress(self, value):
"""
set_progress(value)
It sets the actual progress value to gauge widget
"""
self.gauge.SetValue(value)
def set_status_text(self, value):
"""
set_status_text(value)
It sets the text to the first field of StatusBar
"""
self.status_bar.SetStatusText(value)
class Panel(wx.Panel):
def __init__(self, parent):
super(Panel, self).__init__(parent=parent)
self.players = AutoWidthListCtrl(self)
self.players.InsertColumn(0, 'code', wx.LIST_FORMAT_RIGHT, 50)
self.players.InsertColumn(1, 'name', width=125)
self.players.InsertColumn(2, 'fullname', width=175)
players_box = wx.BoxSizer(wx.HORIZONTAL)
players_box.Add(self.players, 1, wx.EXPAND)
btn_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_refresh = wx.Button(self, wx.ID_OK, label="Refresh")
btn_sizer.Add(self.btn_quit, 0, wx.EXPAND)
btn_sizer.Add(self.btn_refresh, 0, wx.EXPAND)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(players_box, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(btn_sizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
class ViewExtract(wx.Frame):
def __init__(self, parent, title):
self.parent = parent
self.title = title
super(ViewExtract, self).__init__(parent=self.parent, title=title)
self.controller = self.parent.controller
self.panel = PanelExtract(parent=self)
self.SetSize((300, 150))
# bindings
self.Bind(wx.EVT_CLOSE, self.on_quit)
self.Bind(wx.EVT_BUTTON, self.on_quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_extract, self.panel.btn_extract)
# noinspection PyUnusedLocal
def on_extract(self, event):
day = self.panel.day.GetValue()
if day:
if self.controller.are_evaluations_ready(day):
self.controller.extract_evaluations(day)
self.parent.Enable()
self.Destroy()
else:
wx.MessageBox('Evaluations for day %s not ready!' % day, '', OK)
else:
|
# noinspection PyUnusedLocal
def on_quit(self, event):
self.parent.Enable()
self.Destroy()
class PanelExtract(wx.Panel):
def __init__(self, parent):
super(PanelExtract, self).__init__(parent)
# Attributes
self.day = wx.TextCtrl(self)
# Layout
text_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
text_sizer.Add(wx.StaticText(self, label="Day:"), 0, ACV)
text_sizer.Add(self.day, 0, ACV)
text_sizer.AddGrowableCol(1)
button_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_extract = wx.Button(self, wx.ID_OK, label="Extract")
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_quit.SetDefault()
button_sizer.Add(self.btn_extract, 0, ACV)
button_sizer.Add(self.btn_quit, 0, ACV)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(text_sizer, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(button_sizer, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.SetBackgroundColour('LightGray')
self.SetSizer(sizer)
| wx.MessageBox('Please set a day to extract!', '', OK) | conditional_block |
core.py | import wx
import sys
from wx.lib.mixins.listctrl import ListCtrlAutoWidthMixin
from player import ViewPlayer
OK = wx.OK | wx.ICON_EXCLAMATION
ACV = wx.ALIGN_CENTER_VERTICAL
YN = wx.YES_NO | wx.ICON_WARNING
class AutoWidthListCtrl(wx.ListCtrl, ListCtrlAutoWidthMixin):
def __init__(self, parent):
wx.ListCtrl.__init__(self, parent, -1, style=wx.LC_REPORT)
ListCtrlAutoWidthMixin.__init__(self)
class Core(wx.Frame):
def __init__(self, parent, controller, title):
super(Core, self).__init__(parent=parent, title=title)
self.parent = parent
self.controller = controller
self.child = None
self.panel = Panel(self)
self.panel.SetBackgroundColour('LightGray')
self.status_bar = self.CreateStatusBar(2)
self.status_bar.SetStatusWidths([200, -1])
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge = wx.Gauge(self.status_bar, -1, 100, gauge_pos, gauge_size)
# Menues
self.menubar = wx.MenuBar()
self.SetMenuBar(self.menubar)
# Player menu
menu_player = wx.Menu()
self.menubar.Append(menu_player, "Players")
self.menu_new_player = menu_player.Append(-1, "New Player",
"New Player")
menu_player.AppendSeparator()
self.menu_players_import = menu_player.Append(-1, "import Players",
"import Players")
menu_player.AppendSeparator()
self.menu_evaluations = menu_player.Append(-1, "extract evaluations",
"extract evaluations")
# Bindings
# player bindings
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_MENU, self.on_new_player, self.menu_new_player)
self.Bind(wx.EVT_MENU, self.on_import, self.menu_players_import)
self.Bind(wx.EVT_MENU, self.on_extract, self.menu_evaluations)
self.Bind(wx.EVT_BUTTON, self.quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_refresh, self.panel.btn_refresh)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.on_list,
self.panel.players)
self.Bind(wx.EVT_LIST_COL_CLICK, self.on_list_column,
self.panel.players) # Players initialization
players = self.controller.get_players()
if players:
self.fill_players(players)
self.set_status_text('Players on database: %s' % len(players))
else:
self.set_status_text('No Players on database')
size = (450, 500)
self.SetSize(size)
self.Show()
# noinspection PyUnusedLocal
def quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_extract(self, event):
self.child = ViewExtract(parent=self, title="extract evaluations")
self.show_child()
# noinspection PyUnusedLocal
def on_import(self, event):
choice = wx.MessageBox('Deleting All Players?', 'warning', YN)
if choice == wx.YES:
self.controller.delete_all_players()
self.controller.import_players()
wx.MessageBox('Players successfully imported!', '', OK)
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def edit_player(self, event):
self.Disable() | def on_new_player(self, event):
self.child = ViewPlayer(parent=self, title='New Player')
self.show_child()
# noinspection PyUnusedLocal
def on_quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_refresh(self, event):
self.panel.players.DeleteAllItems()
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def on_list(self, event):
item_id = event.m_itemIndex
player_code = self.panel.players.GetItemText(item_id)
player = self.controller.get_player_by_code(player_code)
self.controller.set_temporary_object(player)
item_name = self.panel.players.GetItem(item_id, 1)
player_name = item_name.GetText()
item_fullname = self.panel.players.GetItem(item_id, 2)
player_fullname = item_fullname.GetText()
view_edit = ViewPlayer(self, "Edit player", is_editor=True)
view_edit.Show()
view_edit.panel.code.SetValue(player_code)
view_edit.panel.name.SetValue(player_name)
view_edit.panel.fullname.SetValue(player_fullname)
# Qui utilizzo ChangeValue per non scatenare EVT_TEXT, come invece
# succederebbe usando SetValue. Questo EVT_TEXT, cercherebbe un
# Handler al livello attuale (Frame) e non trovandolo, passerebbe
# al Parent, che invece ha proprio un Bind di EVT_TEXT, proprio con
# la textctrl 'ppl', la quale richiamerebbe la callback
# 'on_text_entry, facendo casino.
# view_edit.panel.btn_delete.Enable()
view_edit.SetWindowStyle(wx.STAY_ON_TOP)
def on_list_column(self, event):
self.panel.players.DeleteAllItems()
id_column = event.GetColumn()
players = self.controller.get_sorted_players(id_column)
self.fill_players(players)
def fill_players(self, players):
for player in players:
index = self.panel.players.InsertStringItem(sys.maxint,
str(player.code))
self.panel.players.SetStringItem(index, 1, str(player.name))
self.panel.players.SetStringItem(index, 2, str(player.fullname))
@staticmethod
def show_message(string):
wx.MessageBox(string, 'core info', wx.OK | wx.ICON_EXCLAMATION)
def show_child(self):
self.Disable()
self.child.Centre()
self.child.Show()
def get_gauge_dimensions(self):
"""
get_gauge_dimensions(self) -> tuple_a, tuple_b
tuple_a is a tuple with x position and y position of seconf field
of the StatusBar
"""
pos_x, pos_y, dim_x, dim_y = self.status_bar.GetFieldRect(1)
return (pos_x, pos_y), (dim_x, dim_y)
def on_size(self, event):
"""
on_size()
it redraws the gauge rectangle and repositions when frame windows is
resized
"""
size = self.GetSize()
self.SetSize(size)
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge.SetSize(gauge_size)
event.Skip()
self.Update()
def set_range(self, value):
"""
set_range(value)
It sets the maximum value of gauge widget
"""
self.gauge.SetRange(value)
def set_progress(self, value):
"""
set_progress(value)
It sets the actual progress value to gauge widget
"""
self.gauge.SetValue(value)
def set_status_text(self, value):
"""
set_status_text(value)
It sets the text to the first field of StatusBar
"""
self.status_bar.SetStatusText(value)
class Panel(wx.Panel):
def __init__(self, parent):
super(Panel, self).__init__(parent=parent)
self.players = AutoWidthListCtrl(self)
self.players.InsertColumn(0, 'code', wx.LIST_FORMAT_RIGHT, 50)
self.players.InsertColumn(1, 'name', width=125)
self.players.InsertColumn(2, 'fullname', width=175)
players_box = wx.BoxSizer(wx.HORIZONTAL)
players_box.Add(self.players, 1, wx.EXPAND)
btn_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_refresh = wx.Button(self, wx.ID_OK, label="Refresh")
btn_sizer.Add(self.btn_quit, 0, wx.EXPAND)
btn_sizer.Add(self.btn_refresh, 0, wx.EXPAND)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(players_box, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(btn_sizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
class ViewExtract(wx.Frame):
def __init__(self, parent, title):
self.parent = parent
self.title = title
super(ViewExtract, self).__init__(parent=self.parent, title=title)
self.controller = self.parent.controller
self.panel = PanelExtract(parent=self)
self.SetSize((300, 150))
# bindings
self.Bind(wx.EVT_CLOSE, self.on_quit)
self.Bind(wx.EVT_BUTTON, self.on_quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_extract, self.panel.btn_extract)
# noinspection PyUnusedLocal
def on_extract(self, event):
day = self.panel.day.GetValue()
if day:
if self.controller.are_evaluations_ready(day):
self.controller.extract_evaluations(day)
self.parent.Enable()
self.Destroy()
else:
wx.MessageBox('Evaluations for day %s not ready!' % day, '', OK)
else:
wx.MessageBox('Please set a day to extract!', '', OK)
# noinspection PyUnusedLocal
def on_quit(self, event):
self.parent.Enable()
self.Destroy()
class PanelExtract(wx.Panel):
def __init__(self, parent):
super(PanelExtract, self).__init__(parent)
# Attributes
self.day = wx.TextCtrl(self)
# Layout
text_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
text_sizer.Add(wx.StaticText(self, label="Day:"), 0, ACV)
text_sizer.Add(self.day, 0, ACV)
text_sizer.AddGrowableCol(1)
button_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_extract = wx.Button(self, wx.ID_OK, label="Extract")
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_quit.SetDefault()
button_sizer.Add(self.btn_extract, 0, ACV)
button_sizer.Add(self.btn_quit, 0, ACV)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(text_sizer, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(button_sizer, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.SetBackgroundColour('LightGray')
self.SetSizer(sizer) | ViewPlayer(parent=self, title='Edit Player', is_editor=True)
# noinspection PyUnusedLocal | random_line_split |
core.py | import wx
import sys
from wx.lib.mixins.listctrl import ListCtrlAutoWidthMixin
from player import ViewPlayer
OK = wx.OK | wx.ICON_EXCLAMATION
ACV = wx.ALIGN_CENTER_VERTICAL
YN = wx.YES_NO | wx.ICON_WARNING
class AutoWidthListCtrl(wx.ListCtrl, ListCtrlAutoWidthMixin):
def __init__(self, parent):
wx.ListCtrl.__init__(self, parent, -1, style=wx.LC_REPORT)
ListCtrlAutoWidthMixin.__init__(self)
class Core(wx.Frame):
def __init__(self, parent, controller, title):
super(Core, self).__init__(parent=parent, title=title)
self.parent = parent
self.controller = controller
self.child = None
self.panel = Panel(self)
self.panel.SetBackgroundColour('LightGray')
self.status_bar = self.CreateStatusBar(2)
self.status_bar.SetStatusWidths([200, -1])
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge = wx.Gauge(self.status_bar, -1, 100, gauge_pos, gauge_size)
# Menues
self.menubar = wx.MenuBar()
self.SetMenuBar(self.menubar)
# Player menu
menu_player = wx.Menu()
self.menubar.Append(menu_player, "Players")
self.menu_new_player = menu_player.Append(-1, "New Player",
"New Player")
menu_player.AppendSeparator()
self.menu_players_import = menu_player.Append(-1, "import Players",
"import Players")
menu_player.AppendSeparator()
self.menu_evaluations = menu_player.Append(-1, "extract evaluations",
"extract evaluations")
# Bindings
# player bindings
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_MENU, self.on_new_player, self.menu_new_player)
self.Bind(wx.EVT_MENU, self.on_import, self.menu_players_import)
self.Bind(wx.EVT_MENU, self.on_extract, self.menu_evaluations)
self.Bind(wx.EVT_BUTTON, self.quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_refresh, self.panel.btn_refresh)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.on_list,
self.panel.players)
self.Bind(wx.EVT_LIST_COL_CLICK, self.on_list_column,
self.panel.players) # Players initialization
players = self.controller.get_players()
if players:
self.fill_players(players)
self.set_status_text('Players on database: %s' % len(players))
else:
self.set_status_text('No Players on database')
size = (450, 500)
self.SetSize(size)
self.Show()
# noinspection PyUnusedLocal
def quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_extract(self, event):
self.child = ViewExtract(parent=self, title="extract evaluations")
self.show_child()
# noinspection PyUnusedLocal
def on_import(self, event):
choice = wx.MessageBox('Deleting All Players?', 'warning', YN)
if choice == wx.YES:
self.controller.delete_all_players()
self.controller.import_players()
wx.MessageBox('Players successfully imported!', '', OK)
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def edit_player(self, event):
self.Disable()
ViewPlayer(parent=self, title='Edit Player', is_editor=True)
# noinspection PyUnusedLocal
def on_new_player(self, event):
self.child = ViewPlayer(parent=self, title='New Player')
self.show_child()
# noinspection PyUnusedLocal
def on_quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_refresh(self, event):
self.panel.players.DeleteAllItems()
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def on_list(self, event):
item_id = event.m_itemIndex
player_code = self.panel.players.GetItemText(item_id)
player = self.controller.get_player_by_code(player_code)
self.controller.set_temporary_object(player)
item_name = self.panel.players.GetItem(item_id, 1)
player_name = item_name.GetText()
item_fullname = self.panel.players.GetItem(item_id, 2)
player_fullname = item_fullname.GetText()
view_edit = ViewPlayer(self, "Edit player", is_editor=True)
view_edit.Show()
view_edit.panel.code.SetValue(player_code)
view_edit.panel.name.SetValue(player_name)
view_edit.panel.fullname.SetValue(player_fullname)
# Qui utilizzo ChangeValue per non scatenare EVT_TEXT, come invece
# succederebbe usando SetValue. Questo EVT_TEXT, cercherebbe un
# Handler al livello attuale (Frame) e non trovandolo, passerebbe
# al Parent, che invece ha proprio un Bind di EVT_TEXT, proprio con
# la textctrl 'ppl', la quale richiamerebbe la callback
# 'on_text_entry, facendo casino.
# view_edit.panel.btn_delete.Enable()
view_edit.SetWindowStyle(wx.STAY_ON_TOP)
def on_list_column(self, event):
self.panel.players.DeleteAllItems()
id_column = event.GetColumn()
players = self.controller.get_sorted_players(id_column)
self.fill_players(players)
def fill_players(self, players):
for player in players:
index = self.panel.players.InsertStringItem(sys.maxint,
str(player.code))
self.panel.players.SetStringItem(index, 1, str(player.name))
self.panel.players.SetStringItem(index, 2, str(player.fullname))
@staticmethod
def show_message(string):
wx.MessageBox(string, 'core info', wx.OK | wx.ICON_EXCLAMATION)
def show_child(self):
self.Disable()
self.child.Centre()
self.child.Show()
def get_gauge_dimensions(self):
"""
get_gauge_dimensions(self) -> tuple_a, tuple_b
tuple_a is a tuple with x position and y position of seconf field
of the StatusBar
"""
pos_x, pos_y, dim_x, dim_y = self.status_bar.GetFieldRect(1)
return (pos_x, pos_y), (dim_x, dim_y)
def on_size(self, event):
"""
on_size()
it redraws the gauge rectangle and repositions when frame windows is
resized
"""
size = self.GetSize()
self.SetSize(size)
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge.SetSize(gauge_size)
event.Skip()
self.Update()
def set_range(self, value):
"""
set_range(value)
It sets the maximum value of gauge widget
"""
self.gauge.SetRange(value)
def set_progress(self, value):
"""
set_progress(value)
It sets the actual progress value to gauge widget
"""
self.gauge.SetValue(value)
def set_status_text(self, value):
"""
set_status_text(value)
It sets the text to the first field of StatusBar
"""
self.status_bar.SetStatusText(value)
class Panel(wx.Panel):
def __init__(self, parent):
super(Panel, self).__init__(parent=parent)
self.players = AutoWidthListCtrl(self)
self.players.InsertColumn(0, 'code', wx.LIST_FORMAT_RIGHT, 50)
self.players.InsertColumn(1, 'name', width=125)
self.players.InsertColumn(2, 'fullname', width=175)
players_box = wx.BoxSizer(wx.HORIZONTAL)
players_box.Add(self.players, 1, wx.EXPAND)
btn_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_refresh = wx.Button(self, wx.ID_OK, label="Refresh")
btn_sizer.Add(self.btn_quit, 0, wx.EXPAND)
btn_sizer.Add(self.btn_refresh, 0, wx.EXPAND)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(players_box, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(btn_sizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
class ViewExtract(wx.Frame):
def __init__(self, parent, title):
|
# noinspection PyUnusedLocal
def on_extract(self, event):
day = self.panel.day.GetValue()
if day:
if self.controller.are_evaluations_ready(day):
self.controller.extract_evaluations(day)
self.parent.Enable()
self.Destroy()
else:
wx.MessageBox('Evaluations for day %s not ready!' % day, '', OK)
else:
wx.MessageBox('Please set a day to extract!', '', OK)
# noinspection PyUnusedLocal
def on_quit(self, event):
self.parent.Enable()
self.Destroy()
class PanelExtract(wx.Panel):
def __init__(self, parent):
super(PanelExtract, self).__init__(parent)
# Attributes
self.day = wx.TextCtrl(self)
# Layout
text_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
text_sizer.Add(wx.StaticText(self, label="Day:"), 0, ACV)
text_sizer.Add(self.day, 0, ACV)
text_sizer.AddGrowableCol(1)
button_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_extract = wx.Button(self, wx.ID_OK, label="Extract")
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_quit.SetDefault()
button_sizer.Add(self.btn_extract, 0, ACV)
button_sizer.Add(self.btn_quit, 0, ACV)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(text_sizer, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(button_sizer, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.SetBackgroundColour('LightGray')
self.SetSizer(sizer)
| self.parent = parent
self.title = title
super(ViewExtract, self).__init__(parent=self.parent, title=title)
self.controller = self.parent.controller
self.panel = PanelExtract(parent=self)
self.SetSize((300, 150))
# bindings
self.Bind(wx.EVT_CLOSE, self.on_quit)
self.Bind(wx.EVT_BUTTON, self.on_quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_extract, self.panel.btn_extract) | identifier_body |
core.py | import wx
import sys
from wx.lib.mixins.listctrl import ListCtrlAutoWidthMixin
from player import ViewPlayer
OK = wx.OK | wx.ICON_EXCLAMATION
ACV = wx.ALIGN_CENTER_VERTICAL
YN = wx.YES_NO | wx.ICON_WARNING
class AutoWidthListCtrl(wx.ListCtrl, ListCtrlAutoWidthMixin):
def __init__(self, parent):
wx.ListCtrl.__init__(self, parent, -1, style=wx.LC_REPORT)
ListCtrlAutoWidthMixin.__init__(self)
class Core(wx.Frame):
def __init__(self, parent, controller, title):
super(Core, self).__init__(parent=parent, title=title)
self.parent = parent
self.controller = controller
self.child = None
self.panel = Panel(self)
self.panel.SetBackgroundColour('LightGray')
self.status_bar = self.CreateStatusBar(2)
self.status_bar.SetStatusWidths([200, -1])
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge = wx.Gauge(self.status_bar, -1, 100, gauge_pos, gauge_size)
# Menues
self.menubar = wx.MenuBar()
self.SetMenuBar(self.menubar)
# Player menu
menu_player = wx.Menu()
self.menubar.Append(menu_player, "Players")
self.menu_new_player = menu_player.Append(-1, "New Player",
"New Player")
menu_player.AppendSeparator()
self.menu_players_import = menu_player.Append(-1, "import Players",
"import Players")
menu_player.AppendSeparator()
self.menu_evaluations = menu_player.Append(-1, "extract evaluations",
"extract evaluations")
# Bindings
# player bindings
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_MENU, self.on_new_player, self.menu_new_player)
self.Bind(wx.EVT_MENU, self.on_import, self.menu_players_import)
self.Bind(wx.EVT_MENU, self.on_extract, self.menu_evaluations)
self.Bind(wx.EVT_BUTTON, self.quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_refresh, self.panel.btn_refresh)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.on_list,
self.panel.players)
self.Bind(wx.EVT_LIST_COL_CLICK, self.on_list_column,
self.panel.players) # Players initialization
players = self.controller.get_players()
if players:
self.fill_players(players)
self.set_status_text('Players on database: %s' % len(players))
else:
self.set_status_text('No Players on database')
size = (450, 500)
self.SetSize(size)
self.Show()
# noinspection PyUnusedLocal
def quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_extract(self, event):
self.child = ViewExtract(parent=self, title="extract evaluations")
self.show_child()
# noinspection PyUnusedLocal
def on_import(self, event):
choice = wx.MessageBox('Deleting All Players?', 'warning', YN)
if choice == wx.YES:
self.controller.delete_all_players()
self.controller.import_players()
wx.MessageBox('Players successfully imported!', '', OK)
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def edit_player(self, event):
self.Disable()
ViewPlayer(parent=self, title='Edit Player', is_editor=True)
# noinspection PyUnusedLocal
def on_new_player(self, event):
self.child = ViewPlayer(parent=self, title='New Player')
self.show_child()
# noinspection PyUnusedLocal
def on_quit(self, event):
self.Destroy()
# noinspection PyUnusedLocal
def on_refresh(self, event):
self.panel.players.DeleteAllItems()
players = self.controller.get_players()
self.fill_players(players)
# noinspection PyUnusedLocal
def on_list(self, event):
item_id = event.m_itemIndex
player_code = self.panel.players.GetItemText(item_id)
player = self.controller.get_player_by_code(player_code)
self.controller.set_temporary_object(player)
item_name = self.panel.players.GetItem(item_id, 1)
player_name = item_name.GetText()
item_fullname = self.panel.players.GetItem(item_id, 2)
player_fullname = item_fullname.GetText()
view_edit = ViewPlayer(self, "Edit player", is_editor=True)
view_edit.Show()
view_edit.panel.code.SetValue(player_code)
view_edit.panel.name.SetValue(player_name)
view_edit.panel.fullname.SetValue(player_fullname)
# Qui utilizzo ChangeValue per non scatenare EVT_TEXT, come invece
# succederebbe usando SetValue. Questo EVT_TEXT, cercherebbe un
# Handler al livello attuale (Frame) e non trovandolo, passerebbe
# al Parent, che invece ha proprio un Bind di EVT_TEXT, proprio con
# la textctrl 'ppl', la quale richiamerebbe la callback
# 'on_text_entry, facendo casino.
# view_edit.panel.btn_delete.Enable()
view_edit.SetWindowStyle(wx.STAY_ON_TOP)
def on_list_column(self, event):
self.panel.players.DeleteAllItems()
id_column = event.GetColumn()
players = self.controller.get_sorted_players(id_column)
self.fill_players(players)
def fill_players(self, players):
for player in players:
index = self.panel.players.InsertStringItem(sys.maxint,
str(player.code))
self.panel.players.SetStringItem(index, 1, str(player.name))
self.panel.players.SetStringItem(index, 2, str(player.fullname))
@staticmethod
def show_message(string):
wx.MessageBox(string, 'core info', wx.OK | wx.ICON_EXCLAMATION)
def show_child(self):
self.Disable()
self.child.Centre()
self.child.Show()
def get_gauge_dimensions(self):
"""
get_gauge_dimensions(self) -> tuple_a, tuple_b
tuple_a is a tuple with x position and y position of seconf field
of the StatusBar
"""
pos_x, pos_y, dim_x, dim_y = self.status_bar.GetFieldRect(1)
return (pos_x, pos_y), (dim_x, dim_y)
def on_size(self, event):
"""
on_size()
it redraws the gauge rectangle and repositions when frame windows is
resized
"""
size = self.GetSize()
self.SetSize(size)
gauge_pos, gauge_size = self.get_gauge_dimensions()
self.gauge.SetSize(gauge_size)
event.Skip()
self.Update()
def set_range(self, value):
"""
set_range(value)
It sets the maximum value of gauge widget
"""
self.gauge.SetRange(value)
def set_progress(self, value):
"""
set_progress(value)
It sets the actual progress value to gauge widget
"""
self.gauge.SetValue(value)
def set_status_text(self, value):
"""
set_status_text(value)
It sets the text to the first field of StatusBar
"""
self.status_bar.SetStatusText(value)
class Panel(wx.Panel):
def __init__(self, parent):
super(Panel, self).__init__(parent=parent)
self.players = AutoWidthListCtrl(self)
self.players.InsertColumn(0, 'code', wx.LIST_FORMAT_RIGHT, 50)
self.players.InsertColumn(1, 'name', width=125)
self.players.InsertColumn(2, 'fullname', width=175)
players_box = wx.BoxSizer(wx.HORIZONTAL)
players_box.Add(self.players, 1, wx.EXPAND)
btn_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_refresh = wx.Button(self, wx.ID_OK, label="Refresh")
btn_sizer.Add(self.btn_quit, 0, wx.EXPAND)
btn_sizer.Add(self.btn_refresh, 0, wx.EXPAND)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(players_box, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(btn_sizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
class ViewExtract(wx.Frame):
def __init__(self, parent, title):
self.parent = parent
self.title = title
super(ViewExtract, self).__init__(parent=self.parent, title=title)
self.controller = self.parent.controller
self.panel = PanelExtract(parent=self)
self.SetSize((300, 150))
# bindings
self.Bind(wx.EVT_CLOSE, self.on_quit)
self.Bind(wx.EVT_BUTTON, self.on_quit, self.panel.btn_quit)
self.Bind(wx.EVT_BUTTON, self.on_extract, self.panel.btn_extract)
# noinspection PyUnusedLocal
def on_extract(self, event):
day = self.panel.day.GetValue()
if day:
if self.controller.are_evaluations_ready(day):
self.controller.extract_evaluations(day)
self.parent.Enable()
self.Destroy()
else:
wx.MessageBox('Evaluations for day %s not ready!' % day, '', OK)
else:
wx.MessageBox('Please set a day to extract!', '', OK)
# noinspection PyUnusedLocal
def on_quit(self, event):
self.parent.Enable()
self.Destroy()
class PanelExtract(wx.Panel):
def | (self, parent):
super(PanelExtract, self).__init__(parent)
# Attributes
self.day = wx.TextCtrl(self)
# Layout
text_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
text_sizer.Add(wx.StaticText(self, label="Day:"), 0, ACV)
text_sizer.Add(self.day, 0, ACV)
text_sizer.AddGrowableCol(1)
button_sizer = wx.FlexGridSizer(rows=1, cols=2, hgap=5, vgap=5)
self.btn_extract = wx.Button(self, wx.ID_OK, label="Extract")
self.btn_quit = wx.Button(self, wx.ID_CANCEL, label="Quit")
self.btn_quit.SetDefault()
button_sizer.Add(self.btn_extract, 0, ACV)
button_sizer.Add(self.btn_quit, 0, ACV)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(text_sizer, 1, wx.EXPAND | wx.ALL, 5)
sizer.Add(button_sizer, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.SetBackgroundColour('LightGray')
self.SetSizer(sizer)
| __init__ | identifier_name |
index.js | const _Page = require("/__antmove/component/componentClass.js")("Page");
const _my = require("/__antmove/api/index.js")(my);
const app = app || getApp();
const zutils = require("../../utils/zutils.js");
_Page({
data: {
hideCoupon: true,
hideBanners: false,
banners: [
[
"https://cdn.chinaruankao.com/fs/20180123/akwarg2q20k37zkk.png",
"/pages/acts/share-guide"
]
],
openAis: false
},
onLoad: function(e) {
e = e || {};
let osi = app.GLOBAL_DATA.SYS_INFO;
if (osi && osi.screenWidth != 375) {
let swiperHeight = "height:" + osi.screenWidth / 2.5 + "px";
this.setData({
swiperHeight: swiperHeight
});
}
this.setData({
isAndroid: app.GLOBAL_DATA.IS_ANDROID
});
const that = this;
app.getUserInfo(function(u) {
that.__loadComdata();
that.__loadRecent();
that.__loadRecommend();
// that.__checkTwxx();
that.__checkToken();
setTimeout(function() {
that.__checkCoupon();
}, 666);
});
_my.getStorage({
key: "FOLLOW_SUBJECT",
success: function(res) {
let fs = (res.data || '').split(",");
app.GLOBAL_DATA.FOLLOW_SUBJECT = fs;
if (fs.length > 0) {
that.__loadFollowSubject(fs);
}
}
}); // 跳转页面
if (e.nextpage) {
app.gotoPage(decodeURIComponent(e.nextpage));
return;
} // 显示收藏提醒
if (
!(
e.scene == 1089 ||
e.scene == 1001 ||
e.scene == 1022 ||
e.scene == 1023 ||
e.scene == 1027
)
) {
let showFav = function(t) {
that.setData({
showFav: true,
showFavClazz: "animated bounceIn slow"
});
_my.setStorage({
key: "SHOW_FAV",
data: t || 1
});
};
setTimeout(() => {
_my.getStorage({
key: "SHOW_FAV",
success: function(res) {
if (res.data < 1) showFav(res.data + 1);
},
fail: function(res) {
showFav(1);
}
});
}, 1500);
}
},
__loadComdata: function(retry) {
let that = this;
zutils.get(app, "api/home/comdata", function(res) {
if (res.data.error_code > 1000) {
_my.redirectTo({
url: "/pages/index/tips?msg=" + res.data.error_msg
});
return;
}
let _data = res.data.data;
if (!_data) {
if (retry) {
_my.showToast({
icon: "none",
duration: 4000,
title: "请求失败,请稍后重试"
});
} else {
that.__loadComdata(true);
}
}
if (!_data) return;
_my.setNavigationBarTitle({
title: _data.title || "软考必备"
}); // Banner
if (!_data.banners || _data.banners.length == 0) {
that.setData({
hideBanners: true
});
} else {
that.setData({
banners: _data.banners
});
} // 红点
if (_data.reddot) {
for (let k in _data.reddot) {
app.showReddot(_data.reddot[k], k);
}
}
that.setData({
icontext: _data.icontext || null,
declaration: _data.declaration || null,
openAis: _data.open_ais === true
}); // 0, 1, 99
app.GLOBAL_DATA.RUN_MODE = _data.runMode || 0;
});
},
onPullDownRefresh: function() {
this.onLoad();
setTimeout(function() {
_my.stopPullDownRefresh();
}, 800);
},
onShow: function() {
if (zutils.array.inAndErase(app.GLOBAL_DATA.RELOAD_EXAM, "Index")) {
this.__loadRecent();
}
if (zutils.array.inAndErase(app.GLOBAL_DATA.RELOAD_SUBJECT, "Index")) {
this.__loadComdata();
this.__loadRecommend();
}
let fs = app.GLOBAL_DATA.FOLLOW_SUBJECT;
if (fs && fs.length > 0) {
let lastFs = fs[fs.length - 1];
if (lastFs != this.__lastFs) {
this.__loadFollowSubject(fs);
}
} else {
this.setData({
followSubjects: null
});
}
},
// 解析分享(扫码进入)
__checkTwxx: function() {
return
let q = app.enterSource.query.q;
if (q && decodeURIComponent(q).indexOf("/t/wxx/") > -1) {
zutils.get(app, "api/share/parse-twxx?q=" + q, function(res) {
if (res.data.error_code == 0) {
_my.navigateTo({
url: res.data.data
});
}
});
}
},
// 解析分享口令
__checkToken: function() {
if (this.__checkToken_OK == true) return;
this.__checkToken_OK = true; // 清除口令
var rktk_token = false;
setTimeout(function() {
if (rktk_token == true) {
_my.setClipboardData({
data: "",
complete: () => _my.hideToast()
});
}
}, 1500);
let that = this;
_my.getClipboardData({
success: function(res) {
if (res.data && res.data.substr(0, 6) == "#考题解析#") {
// 扫码进入的优先级高于粘贴板
let scene = app.enterSource.scene;
if (
scene == 1011 ||
scene == 1012 ||
scene == 1013 ||
scene == 1047 ||
scene == 1048 ||
scene == 1049
) {
console.log("扫码进入" + scene + ": " + res.data);
rktk_token = true;
return;
} // 自己分享的
if (zutils.array.in(app.GLOBAL_DATA.KT_TOKENS, res.data)) {
return;
}
rktk_token = true;
zutils.get(
app,
"api/share/token-parse?text=" +
encodeURIComponent(res.data),
function(res2) {
if (res2.data.error_code == 0) {
let _data = res2.data.data;
_my.showModal({
title: _data.title,
confirmText: "立即查看",
content: _data.content,
success: function(res3) {
if (res3.confirm) {
_my.navigateTo({
url: _data.page
});
}
}
});
}
}
);
}
}
});
},
// 最近关注题库
__loadFollowSubject: function(fs) {
if (!fs || fs.length < 3) return;
this.__lastFs = fs[fs.length - 1];
zutils.get(app, "api/home/subject-names?ids=" + fs.join(","), res => {
if (res.data && res.data.data && res.data.data.length > 0) {
let _subjects = res.data.data;
_subjects.reverse();
this.__formatSubject(_subjects);
this.setData({
followSubjects: _subjects
});
}
});
},
// 最近答题
__loadRecent: function() {
zutils.get(app, "api/home/recent-exams", res => {
this.setData(res.data.data);
}); // 错题数量在此加载/刷新
zutils.get(app, "api/fav/incorrect-stats?d=1", res => {
this.setData(res.data.data);
});
},
// 推荐题库
__loadRecommend: function() {
zutils.get(app, "api/home/recommend-subjects", res => {
this.setData({
recommendSubjectsLoaded: true
});
let _data = res.data.data;
if (!_data) return;
let _subjects = _data.recommend_subjects;
this.__formatSubject(_subjects);
_data = {};
_data.recommendSubjects = [
_subjects[0],
_subjects[1],
_subjects[2]
];
if (_subjects.length > 3) {
_data.recommendSubjects2 = [
_subjects[3],
_subjects[4],
_subjects[5]
];
}
this.s | _subjects[i][10] = sname.substr(0, 7);
_subjects[i][11] = sname.substr(7);
if (sname.indexOf("下午题") > -1) {
_subjects[i][12] = "T2";
if (sname.indexOf("Ⅱ") > -1) {
_subjects[i][12] = "T3";
}
}
if (_subjects[i][3] == 2) {
_subjects[i][12] = "T4";
_subjects[i][10] = "知识点";
_subjects[i][11] = null;
_subjects[i][2] = _subjects[i][1];
}
}
},
todayExam: function(e) {
zutils.post(
app,
"api/exam/today-exam?formId=" + (e.detail.formId || ""),
function(res) {
if (res.data.error_code == 0) {
let _data = res.data.data;
_my.navigateTo({
url:
"../exam/exam?subject=" +
_data.subject_id +
"&exam=" +
_data.exam_id
});
} else {
let error_msg = res.data.error_msg || "系统错误";
if (
error_msg.indexOf("考试类型") > -1 ||
error_msg.indexOf("尚未选择") > -1
) {
_my.navigateTo({
url: "../question/subject-choice?back=1"
});
} else {
app.alert(error_msg);
}
}
}
);
},
gotoPage: function(e) {
let formId = e && e.detail ? e.detail.formId || "" : "";
zutils.post(app, "api/user/report-formid?noloading&formId=" + formId);
let url = e.currentTarget.dataset.url;
if (url) app.gotoPage(e.currentTarget.dataset.url);
else app.alert("暂未开放");
},
onShareAppMessage: function() {
return app.warpShareData();
},
// 优惠券
__checkCoupon: function() {
if (app.GLOBAL_DATA.IS_IOS === true) return;
let that = this;
zutils.get(app, "api/user/check-coupon?noloading", function(res) {
if (res.data.error_code == 0 && res.data.data) {
let _data = res.data.data;
_data.hideCoupon = true;
_data.showConponHighbar = true;
that.setData(_data);
let tdshow_key = "COUPONSHOW" + zutils.formatDate("yyMMdd");
_my.getStorage({
key: tdshow_key,
success: function(res) {
// 今日显示过
},
fail: function() {
_my.setStorage({
key: tdshow_key,
data: "1"
});
that.setData({
hideCoupon: false
});
}
});
}
});
},
hideCoupon: function(e) {
let formId = e && e.detail ? e.detail.formId || "" : "";
if (formId)
zutils.post(
app,
"api/user/report-formid?noloading&formId=" + formId
);
let that = this;
that.setData({
hideCoupon: true
});
app.reportKpi("COUPON.CLOSE");
}
});
| etData(_data);
});
},
__formatSubject: function(_subjects) {
for (let i = 0; i < _subjects.length; i++) {
let sname = _subjects[i][1];
| conditional_block |
index.js | const _Page = require("/__antmove/component/componentClass.js")("Page");
const _my = require("/__antmove/api/index.js")(my);
const app = app || getApp();
const zutils = require("../../utils/zutils.js");
_Page({
data: {
hideCoupon: true,
hideBanners: false,
banners: [
[
"https://cdn.chinaruankao.com/fs/20180123/akwarg2q20k37zkk.png",
"/pages/acts/share-guide"
]
],
openAis: false
},
onLoad: function(e) {
e = e || {};
let osi = app.GLOBAL_DATA.SYS_INFO;
if (osi && osi.screenWidth != 375) {
let swiperHeight = "height:" + osi.screenWidth / 2.5 + "px";
this.setData({
swiperHeight: swiperHeight
});
}
this.setData({
isAndroid: app.GLOBAL_DATA.IS_ANDROID
});
const that = this;
app.getUserInfo(function(u) {
that.__loadComdata();
that.__loadRecent();
that.__loadRecommend();
// that.__checkTwxx();
that.__checkToken();
setTimeout(function() {
that.__checkCoupon();
}, 666);
});
_my.getStorage({
key: "FOLLOW_SUBJECT",
success: function(res) {
let fs = (res.data || '').split(",");
app.GLOBAL_DATA.FOLLOW_SUBJECT = fs;
if (fs.length > 0) {
that.__loadFollowSubject(fs);
}
}
}); // 跳转页面
if (e.nextpage) {
app.gotoPage(decodeURIComponent(e.nextpage));
return;
} // 显示收藏提醒
if (
!(
e.scene == 1089 ||
e.scene == 1001 ||
e.scene == 1022 ||
e.scene == 1023 ||
e.scene == 1027
)
) {
let showFav = function(t) {
that.setData({
showFav: true,
showFavClazz: "animated bounceIn slow"
});
_my.setStorage({
key: "SHOW_FAV",
data: t || 1
});
};
setTimeout(() => {
_my.getStorage({
key: "SHOW_FAV",
success: function(res) {
if (res.data < 1) showFav(res.data + 1);
},
fail: function(res) {
showFav(1);
}
});
}, 1500);
}
},
__loadComdata: function(retry) {
let that = this;
zutils.get(app, "api/home/comdata", function(res) {
if (res.data.error_code > 1000) {
_my.redirectTo({
url: "/pages/index/tips?msg=" + res.data.error_msg
});
return;
}
let _data = res.data.data;
if (!_data) {
if (retry) {
_my.showToast({
icon: "none",
duration: 4000,
title: "请求失败,请稍后重试"
});
} else {
that.__loadComdata(true);
}
}
if (!_data) return;
_my.setNavigationBarTitle({
title: _data.title || "软考必备"
}); // Banner
if (!_data.banners || _data.banners.length == 0) {
that.setData({
hideBanners: true
});
} else {
that.setData({
banners: _data.banners | for (let k in _data.reddot) {
app.showReddot(_data.reddot[k], k);
}
}
that.setData({
icontext: _data.icontext || null,
declaration: _data.declaration || null,
openAis: _data.open_ais === true
}); // 0, 1, 99
app.GLOBAL_DATA.RUN_MODE = _data.runMode || 0;
});
},
onPullDownRefresh: function() {
this.onLoad();
setTimeout(function() {
_my.stopPullDownRefresh();
}, 800);
},
onShow: function() {
if (zutils.array.inAndErase(app.GLOBAL_DATA.RELOAD_EXAM, "Index")) {
this.__loadRecent();
}
if (zutils.array.inAndErase(app.GLOBAL_DATA.RELOAD_SUBJECT, "Index")) {
this.__loadComdata();
this.__loadRecommend();
}
let fs = app.GLOBAL_DATA.FOLLOW_SUBJECT;
if (fs && fs.length > 0) {
let lastFs = fs[fs.length - 1];
if (lastFs != this.__lastFs) {
this.__loadFollowSubject(fs);
}
} else {
this.setData({
followSubjects: null
});
}
},
// 解析分享(扫码进入)
__checkTwxx: function() {
return
let q = app.enterSource.query.q;
if (q && decodeURIComponent(q).indexOf("/t/wxx/") > -1) {
zutils.get(app, "api/share/parse-twxx?q=" + q, function(res) {
if (res.data.error_code == 0) {
_my.navigateTo({
url: res.data.data
});
}
});
}
},
// 解析分享口令
__checkToken: function() {
if (this.__checkToken_OK == true) return;
this.__checkToken_OK = true; // 清除口令
var rktk_token = false;
setTimeout(function() {
if (rktk_token == true) {
_my.setClipboardData({
data: "",
complete: () => _my.hideToast()
});
}
}, 1500);
let that = this;
_my.getClipboardData({
success: function(res) {
if (res.data && res.data.substr(0, 6) == "#考题解析#") {
// 扫码进入的优先级高于粘贴板
let scene = app.enterSource.scene;
if (
scene == 1011 ||
scene == 1012 ||
scene == 1013 ||
scene == 1047 ||
scene == 1048 ||
scene == 1049
) {
console.log("扫码进入" + scene + ": " + res.data);
rktk_token = true;
return;
} // 自己分享的
if (zutils.array.in(app.GLOBAL_DATA.KT_TOKENS, res.data)) {
return;
}
rktk_token = true;
zutils.get(
app,
"api/share/token-parse?text=" +
encodeURIComponent(res.data),
function(res2) {
if (res2.data.error_code == 0) {
let _data = res2.data.data;
_my.showModal({
title: _data.title,
confirmText: "立即查看",
content: _data.content,
success: function(res3) {
if (res3.confirm) {
_my.navigateTo({
url: _data.page
});
}
}
});
}
}
);
}
}
});
},
// 最近关注题库
__loadFollowSubject: function(fs) {
if (!fs || fs.length < 3) return;
this.__lastFs = fs[fs.length - 1];
zutils.get(app, "api/home/subject-names?ids=" + fs.join(","), res => {
if (res.data && res.data.data && res.data.data.length > 0) {
let _subjects = res.data.data;
_subjects.reverse();
this.__formatSubject(_subjects);
this.setData({
followSubjects: _subjects
});
}
});
},
// 最近答题
__loadRecent: function() {
zutils.get(app, "api/home/recent-exams", res => {
this.setData(res.data.data);
}); // 错题数量在此加载/刷新
zutils.get(app, "api/fav/incorrect-stats?d=1", res => {
this.setData(res.data.data);
});
},
// 推荐题库
__loadRecommend: function() {
zutils.get(app, "api/home/recommend-subjects", res => {
this.setData({
recommendSubjectsLoaded: true
});
let _data = res.data.data;
if (!_data) return;
let _subjects = _data.recommend_subjects;
this.__formatSubject(_subjects);
_data = {};
_data.recommendSubjects = [
_subjects[0],
_subjects[1],
_subjects[2]
];
if (_subjects.length > 3) {
_data.recommendSubjects2 = [
_subjects[3],
_subjects[4],
_subjects[5]
];
}
this.setData(_data);
});
},
__formatSubject: function(_subjects) {
for (let i = 0; i < _subjects.length; i++) {
let sname = _subjects[i][1];
_subjects[i][10] = sname.substr(0, 7);
_subjects[i][11] = sname.substr(7);
if (sname.indexOf("下午题") > -1) {
_subjects[i][12] = "T2";
if (sname.indexOf("Ⅱ") > -1) {
_subjects[i][12] = "T3";
}
}
if (_subjects[i][3] == 2) {
_subjects[i][12] = "T4";
_subjects[i][10] = "知识点";
_subjects[i][11] = null;
_subjects[i][2] = _subjects[i][1];
}
}
},
todayExam: function(e) {
zutils.post(
app,
"api/exam/today-exam?formId=" + (e.detail.formId || ""),
function(res) {
if (res.data.error_code == 0) {
let _data = res.data.data;
_my.navigateTo({
url:
"../exam/exam?subject=" +
_data.subject_id +
"&exam=" +
_data.exam_id
});
} else {
let error_msg = res.data.error_msg || "系统错误";
if (
error_msg.indexOf("考试类型") > -1 ||
error_msg.indexOf("尚未选择") > -1
) {
_my.navigateTo({
url: "../question/subject-choice?back=1"
});
} else {
app.alert(error_msg);
}
}
}
);
},
gotoPage: function(e) {
let formId = e && e.detail ? e.detail.formId || "" : "";
zutils.post(app, "api/user/report-formid?noloading&formId=" + formId);
let url = e.currentTarget.dataset.url;
if (url) app.gotoPage(e.currentTarget.dataset.url);
else app.alert("暂未开放");
},
onShareAppMessage: function() {
return app.warpShareData();
},
// 优惠券
__checkCoupon: function() {
if (app.GLOBAL_DATA.IS_IOS === true) return;
let that = this;
zutils.get(app, "api/user/check-coupon?noloading", function(res) {
if (res.data.error_code == 0 && res.data.data) {
let _data = res.data.data;
_data.hideCoupon = true;
_data.showConponHighbar = true;
that.setData(_data);
let tdshow_key = "COUPONSHOW" + zutils.formatDate("yyMMdd");
_my.getStorage({
key: tdshow_key,
success: function(res) {
// 今日显示过
},
fail: function() {
_my.setStorage({
key: tdshow_key,
data: "1"
});
that.setData({
hideCoupon: false
});
}
});
}
});
},
hideCoupon: function(e) {
let formId = e && e.detail ? e.detail.formId || "" : "";
if (formId)
zutils.post(
app,
"api/user/report-formid?noloading&formId=" + formId
);
let that = this;
that.setData({
hideCoupon: true
});
app.reportKpi("COUPON.CLOSE");
}
}); | });
} // 红点
if (_data.reddot) { | random_line_split |
main.py | from sklearn import tree
from sklearn.feature_extraction.text import CountVectorizer
import http.client
import json
import requests
print("This program takes the movies you like, the ones you don't, and the ones you are curious about if you will like or not. It uses Machine Learning with the data obtained from The Movie Database through an API to understand what you like and don't like. The data it uses to predict is the overview, genre, and director of each movie. It then predicts if you will like/probably like/probably not like/not like the movies you are curious about. As with all programs with Machine Learning, the more data (in our case, movies you like and don't) you can give the program, the better it will predict. As an added bonus, the program also predicts which top 20 rated movies on the entire database you will like. Please be careful to input a movie title as it is so that you get the movie you actually mean and the prgram doesn't throw an error. Happy Testing and Happy Watching! - Kaustubh")
print()
#APIKey I got by making a dev account at TMDB.
APIKey = ""
#empty lists designed to store user input and different parameters of the user's input
#Movies that the user likes and doesn't like go here
positive_titles = []
negative_titles = []
#The overview, genre IDs, and Director Name for each movies is stored here for training the machine.
training_texts = []
#Movies the user wants to know if he'll like or not based one the ones he knows he likes and dislikes
tobetested = []
#The overview, genre IDs, and Director Name for each movies is stored here for the machine to decide if it is a movie the user will like or not
tobetested_texts = []
#This array is for the titles of the top rated 20 movies on TMDB
mostpopular_titles = []
#The overview, genre IDs, and Director Name for the top 20 rated movies is stored here for the machine to decide if it is a movie the user will like or not
mostpopular_texts = []
#The following lines are for taking input for the user and populating the arrays initialized above.
i = int(input("How many movies that you like are you entering? "))
for x in range(i):
movieName = input("Enter the name of a movie you liked: ")
positive_titles.append(movieName)
print()
i = int(input("How many movies that you don't like are you entering? "))
for x in range(i):
movieName = input("Enter the name of a movie you didn't like: ")
negative_titles.append(movieName)
print()
i = int(input("How many movies you're curious about are you entering?"))
for x in range(i):
movieName = input("Enter the name of a movie you want to check: ")
tobetested.append(movieName)
print()
print()
print()
#lists of movies to make testing easier and faster. You will have to uncomment this block and comment out the block above for using this.
'''
negative_titles = ["Avatar", "1917", "Joker", "Inception", "Interstellar", "Inglourious Basterds", "The Platform", "Titanic", "The Wolf of Wall Street", "El Camino"]
positive_titles = ["Toy Story", "Big Hero 6", "Trolls World Tour", "Jumanji: The Next Level", "Ninja Turtles", "Tangled", "Despicable Me 2", "Finding Nemo", "Garfield", "Toy Story 2"]
tobetested = ["Alladin", "Frozen", "Coco", "Mad Max: Fury Road", "Pets 2", "Incredibles 2", "Sing", "Zootopia", "The Revenant", "Inside Out", "Ad Astra", "Trolls", "Shrek"]
'''
#Goes through the movies that the user likes and pulls the above mentioned parameters from TMDB to store in training_texts
for x in range(len(positive_titles)):
directors = []
movieName = positive_titles[x]
httpRequest = "https://api.themoviedb.org/3/search/movie?include_adult=false&page=1&query="+movieName+"&language=en-US&api_key="+APIKey
response = requests.get(httpRequest)
data = response.json()
training_texts.append(data["results"][0]["overview"])
training_texts.append(str(data["results"][0]["genre_ids"]))
movie_id = data["results"][0]["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
training_texts.append(str(directors))
#Goes through the movies that the user doesn't like and pulls the above mentioned parameters from TMDB to store in training_texts
for x in range(len(negative_titles)):
directors = []
movieName = negative_titles[x]
httpRequest = "https://api.themoviedb.org/3/search/movie?include_adult=false&page=1&query="+movieName+"&language=en-US&api_key="+APIKey
response = requests.get(httpRequest)
data = response.json()
training_texts.append(data["results"][0]["overview"])
training_texts.append(str(data["results"][0]["genre_ids"]))
movie_id = data["results"][0]["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
training_texts.append(str(directors))
#Goes through the movies that the user likes and pulls the above mentioned parameters from TDMB to store in tobetested_texts
for x in range(len(tobetested)):
directors = []
movieName = tobetested[x]
httpRequest = "https://api.themoviedb.org/3/search/movie?include_adult=false&page=1&query="+movieName+"&language=en-US&api_key="+APIKey
response = requests.get(httpRequest)
data = response.json()
tobetested_texts.append(data["results"][0]["overview"])
tobetested_texts.append(str(data["results"][0]["genre_ids"]))
movie_id = data["results"][0]["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
tobetested_texts.append(str(directors))
#Pulls the parameters for the current 20 top rated movies and puts them in mostpopular_texts
directors = []
httpRequest = "https://api.themoviedb.org/3/movie/top_rated?api_key=e20e035943ec00333eb2a1d09ea93a5c&language=en-US&page=1"
response = requests.get(httpRequest)
data = response.json()
bye = data["results"]
for x in bye:
if x["overview"] != "" or x["genre_ids"] != "":
mostpopular_titles.append(x["title"])
mostpopular_texts.append(x["overview"])
mostpopular_texts.append(str(x["genre_ids"]))
movie_id = x["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
mostpopular_texts.append(str(directors))
#Here I prepare an equivalent set of labels, to tell the machine
# that the first texts that came from movies the user liked are positive and the rest are negative.
# When I feed these into the classifier, it'll use indices to match up
# the texts and qualify what parameters are good and bad.
training_labels = ["good"] * (3*len(positive_titles)) + ["bad"] * (3*len(negative_titles))
#The vectorizer is set up here: the first main component of machine learning
vectorizer = CountVectorizer(stop_words='english')
#Here I feed the data we have into the vectorizer so it can keep a
# consistent mapping.
vectorizer.fit(training_texts)
# Here I transform all of the training texts into vector form. Basically makes it a list of numbers because code makes decisions quantitatively
training_vectors = vectorizer.transform(training_texts)
#I also convert the texts we are going to test and classify as good and bad into vector form
test_texts = tobetested_texts
test_populartexts = mostpopular_texts
testing_vectors = vectorizer.transform(test_texts)
testing_vectors_popular = vectorizer.transform(test_populartexts)
#This is here the real machine learning happens as the code "connects the dots" between the training data and what is considered good and bad using the labels.
classifier = tree.DecisionTreeClassifier()
classifier.fit(training_vectors, training_labels)
#Uses the connections the code made in previous steps to test each of the parameters of each movie the user wants to test and returns if the user will like/not like/probabaly like/probabaly not like based on the results.
likeDict = {
"will like" : "",
"will probably like" : "",
"will probably not like" : "",
"will not like" : ""
}
print("Out of the movies you wanted to test:")
print()
for i, movie in enumerate(tobetested):
listFormat = [tobetested_texts[i*3], tobetested_texts[i*3+1], tobetested_texts[i*3+2]]
vectorFormat = vectorizer.transform(listFormat)
result = classifier.predict(vectorFormat)
if result[0] == 'good' and result [1] == 'good' and result[2] == 'good':
likeDict['will like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'bad' and result[2] == 'bad':
likeDict['will not like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'good' and result[2] == 'bad':
likeDict['will probably like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'good' and result[2] == 'good':
likeDict['will probably like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'bad' and result[2] == 'good':
likeDict['will probably like'] += (movie + ", ")
else:
likeDict['will probably not like'] += (movie + ", ")
for x in likeDict:
if likeDict[x] != "":
print("You", x, likeDict[x][0:-2])
print()
print()
print()
#Uses the connections the code made in previous steps to test each of the parameters of each movies in the top 20 rated list and returns if the user will like/not like/probabaly like/probabaly not like based on the results.
likeDict = {
"will like" : "",
"will probably like" : "",
"will probably not like" : "",
"will not like" : ""
}
print("Using your likes and dislikes, out of the top 20 top rated movies in the entire movie database:")
print()
for i, movie in enumerate(mostpopular_titles):
listFormat = [mostpopular_texts[i*3], mostpopular_texts[i*3+1], mostpopular_texts[i*3+2]]
vectorFormat = vectorizer.transform(listFormat)
result = classifier.predict(vectorFormat)
if result[0] == 'good' and result [1] == 'good' and result[2] == 'good':
likeDict['will like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'bad' and result[2] == 'bad':
likeDict['will not like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'good' and result[2] == 'bad':
likeDict['will probably like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'good' and result[2] == 'good':
likeDict['will like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'bad' and result[2] == 'good':
likeDict['will probably like'] += (movie + ", ")
else:
likeDict['will probably not like'] += (movie + ", ")
for x in likeDict:
if likeDict[x] != "":
print("You", x, likeDict[x][0:-2])
print()
| classifier,
out_file='tree.dot',
feature_names=vectorizer.get_feature_names(),
class_names=["bad","good"]
) |
#Looking at how the code makes its decisions visually is alot easier so I export the model to the tree.dot file. Upon copying all the data in tree.dot and pasting it in the textbox on http://www.webgraphviz.com/ you can see what the decision making process looks like.
tree.export_graphviz( | random_line_split |
main.py |
from sklearn import tree
from sklearn.feature_extraction.text import CountVectorizer
import http.client
import json
import requests
print("This program takes the movies you like, the ones you don't, and the ones you are curious about if you will like or not. It uses Machine Learning with the data obtained from The Movie Database through an API to understand what you like and don't like. The data it uses to predict is the overview, genre, and director of each movie. It then predicts if you will like/probably like/probably not like/not like the movies you are curious about. As with all programs with Machine Learning, the more data (in our case, movies you like and don't) you can give the program, the better it will predict. As an added bonus, the program also predicts which top 20 rated movies on the entire database you will like. Please be careful to input a movie title as it is so that you get the movie you actually mean and the prgram doesn't throw an error. Happy Testing and Happy Watching! - Kaustubh")
print()
#APIKey I got by making a dev account at TMDB.
APIKey = ""
#empty lists designed to store user input and different parameters of the user's input
#Movies that the user likes and doesn't like go here
positive_titles = []
negative_titles = []
#The overview, genre IDs, and Director Name for each movies is stored here for training the machine.
training_texts = []
#Movies the user wants to know if he'll like or not based one the ones he knows he likes and dislikes
tobetested = []
#The overview, genre IDs, and Director Name for each movies is stored here for the machine to decide if it is a movie the user will like or not
tobetested_texts = []
#This array is for the titles of the top rated 20 movies on TMDB
mostpopular_titles = []
#The overview, genre IDs, and Director Name for the top 20 rated movies is stored here for the machine to decide if it is a movie the user will like or not
mostpopular_texts = []
#The following lines are for taking input for the user and populating the arrays initialized above.
i = int(input("How many movies that you like are you entering? "))
for x in range(i):
movieName = input("Enter the name of a movie you liked: ")
positive_titles.append(movieName)
print()
i = int(input("How many movies that you don't like are you entering? "))
for x in range(i):
movieName = input("Enter the name of a movie you didn't like: ")
negative_titles.append(movieName)
print()
i = int(input("How many movies you're curious about are you entering?"))
for x in range(i):
movieName = input("Enter the name of a movie you want to check: ")
tobetested.append(movieName)
print()
print()
print()
#lists of movies to make testing easier and faster. You will have to uncomment this block and comment out the block above for using this.
'''
negative_titles = ["Avatar", "1917", "Joker", "Inception", "Interstellar", "Inglourious Basterds", "The Platform", "Titanic", "The Wolf of Wall Street", "El Camino"]
positive_titles = ["Toy Story", "Big Hero 6", "Trolls World Tour", "Jumanji: The Next Level", "Ninja Turtles", "Tangled", "Despicable Me 2", "Finding Nemo", "Garfield", "Toy Story 2"]
tobetested = ["Alladin", "Frozen", "Coco", "Mad Max: Fury Road", "Pets 2", "Incredibles 2", "Sing", "Zootopia", "The Revenant", "Inside Out", "Ad Astra", "Trolls", "Shrek"]
'''
#Goes through the movies that the user likes and pulls the above mentioned parameters from TMDB to store in training_texts
for x in range(len(positive_titles)):
directors = []
movieName = positive_titles[x]
httpRequest = "https://api.themoviedb.org/3/search/movie?include_adult=false&page=1&query="+movieName+"&language=en-US&api_key="+APIKey
response = requests.get(httpRequest)
data = response.json()
training_texts.append(data["results"][0]["overview"])
training_texts.append(str(data["results"][0]["genre_ids"]))
movie_id = data["results"][0]["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
training_texts.append(str(directors))
#Goes through the movies that the user doesn't like and pulls the above mentioned parameters from TMDB to store in training_texts
for x in range(len(negative_titles)):
directors = []
movieName = negative_titles[x]
httpRequest = "https://api.themoviedb.org/3/search/movie?include_adult=false&page=1&query="+movieName+"&language=en-US&api_key="+APIKey
response = requests.get(httpRequest)
data = response.json()
training_texts.append(data["results"][0]["overview"])
training_texts.append(str(data["results"][0]["genre_ids"]))
movie_id = data["results"][0]["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
training_texts.append(str(directors))
#Goes through the movies that the user likes and pulls the above mentioned parameters from TDMB to store in tobetested_texts
for x in range(len(tobetested)):
directors = []
movieName = tobetested[x]
httpRequest = "https://api.themoviedb.org/3/search/movie?include_adult=false&page=1&query="+movieName+"&language=en-US&api_key="+APIKey
response = requests.get(httpRequest)
data = response.json()
tobetested_texts.append(data["results"][0]["overview"])
tobetested_texts.append(str(data["results"][0]["genre_ids"]))
movie_id = data["results"][0]["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
tobetested_texts.append(str(directors))
#Pulls the parameters for the current 20 top rated movies and puts them in mostpopular_texts
directors = []
httpRequest = "https://api.themoviedb.org/3/movie/top_rated?api_key=e20e035943ec00333eb2a1d09ea93a5c&language=en-US&page=1"
response = requests.get(httpRequest)
data = response.json()
bye = data["results"]
for x in bye:
if x["overview"] != "" or x["genre_ids"] != "":
mostpopular_titles.append(x["title"])
mostpopular_texts.append(x["overview"])
mostpopular_texts.append(str(x["genre_ids"]))
movie_id = x["id"]
httpRequest2 = "https://api.themoviedb.org/3/movie/" + str(movie_id) + "?api_key=" + APIKey + "&append_to_response=credits"
response = requests.get(httpRequest2)
data = response.json()
hello = data["credits"]
hello1 = hello["crew"]
for x in hello1:
if x["job"] == "Director":
directors.append(x["name"])
mostpopular_texts.append(str(directors))
#Here I prepare an equivalent set of labels, to tell the machine
# that the first texts that came from movies the user liked are positive and the rest are negative.
# When I feed these into the classifier, it'll use indices to match up
# the texts and qualify what parameters are good and bad.
training_labels = ["good"] * (3*len(positive_titles)) + ["bad"] * (3*len(negative_titles))
#The vectorizer is set up here: the first main component of machine learning
vectorizer = CountVectorizer(stop_words='english')
#Here I feed the data we have into the vectorizer so it can keep a
# consistent mapping.
vectorizer.fit(training_texts)
# Here I transform all of the training texts into vector form. Basically makes it a list of numbers because code makes decisions quantitatively
training_vectors = vectorizer.transform(training_texts)
#I also convert the texts we are going to test and classify as good and bad into vector form
test_texts = tobetested_texts
test_populartexts = mostpopular_texts
testing_vectors = vectorizer.transform(test_texts)
testing_vectors_popular = vectorizer.transform(test_populartexts)
#This is here the real machine learning happens as the code "connects the dots" between the training data and what is considered good and bad using the labels.
classifier = tree.DecisionTreeClassifier()
classifier.fit(training_vectors, training_labels)
#Uses the connections the code made in previous steps to test each of the parameters of each movie the user wants to test and returns if the user will like/not like/probabaly like/probabaly not like based on the results.
likeDict = {
"will like" : "",
"will probably like" : "",
"will probably not like" : "",
"will not like" : ""
}
print("Out of the movies you wanted to test:")
print()
for i, movie in enumerate(tobetested):
listFormat = [tobetested_texts[i*3], tobetested_texts[i*3+1], tobetested_texts[i*3+2]]
vectorFormat = vectorizer.transform(listFormat)
result = classifier.predict(vectorFormat)
if result[0] == 'good' and result [1] == 'good' and result[2] == 'good':
likeDict['will like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'bad' and result[2] == 'bad':
likeDict['will not like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'good' and result[2] == 'bad':
likeDict['will probably like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'good' and result[2] == 'good':
likeDict['will probably like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'bad' and result[2] == 'good':
likeDict['will probably like'] += (movie + ", ")
else:
likeDict['will probably not like'] += (movie + ", ")
for x in likeDict:
if likeDict[x] != "":
print("You", x, likeDict[x][0:-2])
print()
print()
print()
#Uses the connections the code made in previous steps to test each of the parameters of each movies in the top 20 rated list and returns if the user will like/not like/probabaly like/probabaly not like based on the results.
likeDict = {
"will like" : "",
"will probably like" : "",
"will probably not like" : "",
"will not like" : ""
}
print("Using your likes and dislikes, out of the top 20 top rated movies in the entire movie database:")
print()
for i, movie in enumerate(mostpopular_titles):
listFormat = [mostpopular_texts[i*3], mostpopular_texts[i*3+1], mostpopular_texts[i*3+2]]
vectorFormat = vectorizer.transform(listFormat)
result = classifier.predict(vectorFormat)
if result[0] == 'good' and result [1] == 'good' and result[2] == 'good':
likeDict['will like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'bad' and result[2] == 'bad':
likeDict['will not like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'good' and result[2] == 'bad':
likeDict['will probably like'] += (movie + ", ")
elif result[0] == 'bad' and result [1] == 'good' and result[2] == 'good':
likeDict['will like'] += (movie + ", ")
elif result[0] == 'good' and result [1] == 'bad' and result[2] == 'good':
likeDict['will probably like'] += (movie + ", ")
else:
|
for x in likeDict:
if likeDict[x] != "":
print("You", x, likeDict[x][0:-2])
print()
#Looking at how the code makes its decisions visually is alot easier so I export the model to the tree.dot file. Upon copying all the data in tree.dot and pasting it in the textbox on http://www.webgraphviz.com/ you can see what the decision making process looks like.
tree.export_graphviz(
classifier,
out_file='tree.dot',
feature_names=vectorizer.get_feature_names(),
class_names=["bad","good"]
)
| likeDict['will probably not like'] += (movie + ", ") | conditional_block |
data.py | from ..core.helpers import itemize
from ..core.files import backendRep, expandDir, prefixSlash, normpath
from .helpers import splitModRef
from .repo import checkoutRepo
from .links import provenanceLink
# GET DATA FOR MAIN SOURCE AND ALL MODULES
class AppData:
def __init__(
self, app, backend, moduleRefs, locations, modules, version, checkout, silent
):
"""Collects TF data according to specifications.
The specifications are passed as arguments when the object is initialized.
Parameters
----------
backend: string
`github` or `gitlab` or a GitLab instance such as `gitlab.huc.knaw.nl`.
app: obj
The high-level API object
moduleRefs: tuple
Each member consists of a module ref, which is a tuple of information
that defines a module.
locations: string|tuple
One or more directory paths. They will be combined with the `modules`
argument and used as locations to search for TF data files.
modules: string|tuple
One or more directory path segments. They will be appended to the
paths given by the `locations` argument to form search locations
for TF data files.
version: string
The version of TF data that should be retrievend. Version is a directory
level just below the search locations.
checkout: string
A specifier to use a specific release or commit of a data repository.
silent: string, optional tf.core.timestamp.SILENT_D
See `tf.core.timestamp.Timestamp`
"""
self.backend = backend
self.app = app
self.moduleRefs = (
[]
if moduleRefs is None
else moduleRefs.split(",")
if type(moduleRefs) is str
else list(moduleRefs)
)
self.locationsArg = locations
self.modulesArg = modules
self.version = version
self.checkout = checkout
self.silent = silent
def getMain(self):
"""Get the main data of the corpus.
This is specified by the `org`, `repo` and `relative` settings under
`provenanceSpec` in `config.yaml`.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
checkout = self.checkout
aContext = app.context
org = aContext.org
repo = aContext.repo
relative = prefixSlash(aContext.relative)
appPath = aContext.appPath
appName = aContext.appName
if appName.startswith("app:"):
appParent = appPath.rsplit("/", 1)[0]
relative = f"{appParent}{relative}"
elif org is None or repo is None:
appPathRep = f"{appPath}/" if appPath else ""
relative = f"{appPathRep}{appName}"
self.checkout = "local"
if not self.getModule(org, repo, prefixSlash(relative), checkout, isBase=True):
self.good = False
def getStandard(self):
"""Get the data of the standard modules specified by the settings of the corpus.
These are specified in the `moduleSpecs` setting under
`provenanceSpecs` in `config.yaml`.
They will be loaded *after* the extra modules specified in the **mod**
parameter, and only in as far they have not been specifief in the
**mod** parameter. In this way you can pass overriding
checkout specifiers to the standard modules.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
loadData = app.loadData
if not loadData or loadData == "core":
return
aContext = app.context
moduleSpecs = aContext.moduleSpecs
seen = self.seen
checkout = self.checkout
backend = self.backend
for m in moduleSpecs or []:
org = m["org"]
repo = m["repo"]
relative = m["relative"]
theCheckout = m.get("checkout", checkout)
theBackend = m.get("backend", backend)
bRep = backendRep(theBackend, "spec", default=backend)
ref = f"{bRep}{org}/{repo}{relative}"
if ref in seen:
continue
if not self.getModule(
org,
repo,
relative,
theCheckout,
backend=theBackend,
specs=m,
):
self.good = False
def getRefs(self):
"""Get data from additional modules.
These are specified in the `moduleRefs` parameter of `AppData`.
We store the set of special modules in order to skip them
later when we are loading the standard modules.
"""
backend = self.backend
refs = self.moduleRefs
for ref in refs:
refPure = ref.rsplit(":", 1)[0]
if refPure in self.seen:
continue
parts = splitModRef(ref)
if not parts:
self.good = False
continue
parts[2] = prefixSlash(normpath(parts[2])) # the relative bit
theBackend = (
None if parts[-1] is None or parts[-1] == backend else parts[-1]
)
if not self.getModule(*parts[0:-1], backend=theBackend):
self.good = False
def getModules(self):
"""Get data from additional local directories.
These are specified in the `locations` and `modules` parameters of `AppData`.
"""
self.provenance = [] | provenance = self.provenance
self.mLocations = []
mLocations = self.mLocations
self.locations = None
self.modules = None
self.good = True
self.seen = set()
self.getMain()
self.getRefs()
self.getStandard()
version = self.version
good = self.good
app = self.app
if good:
app.mLocations = mLocations
app.provenance = provenance
else:
return
mModules = []
if mLocations:
mModules.append(version or "")
locations = self.locationsArg
modules = self.modulesArg
givenLocations = (
[]
if locations is None
else [expandDir(app, x.strip()) for x in itemize(locations, "\n")]
if type(locations) is str
else [str(x) for x in locations]
)
givenModules = (
[]
if modules is None
else [normpath(x.strip()) for x in itemize(modules, "\n")]
if type(modules) is str
else [normpath(str(x)) for x in modules]
)
self.locations = mLocations + givenLocations
self.modules = mModules + givenModules
def getModule(
self, org, repo, relative, checkout, backend=None, isBase=False, specs=None
):
"""Prepare to load a single module.
Eventually, all TF data will be downloaded from local directories, bases
on a list of location paths and module paths.
This function computes the contribution of a single module to both the
location paths and the module paths.
Parameters
----------
org: string
GitHub organization or GitLab group of the module
repo: string:
GitHub repository or GitLab project of the module
relative: string
Path within the repository of the module
checkout: string
A specifier to use a specific release or commit of a data repository.
backend: string
The backend if different from the backend of the main module
isBase: boolean, optional False
Whether this module is the main data of the corpus.
specs: dict, optional False
Additional informational attributes of the module, e.g. a DOI
"""
backend = self.backend if backend is None else backendRep(backend, "norm")
bRep = backendRep(backend, "spec", default=self.backend)
version = self.version
silent = self.silent
mLocations = self.mLocations
provenance = self.provenance
seen = self.seen
app = self.app
_browse = app._browse
aContext = app.context
branch = aContext.provenanceSpec["branch"]
relative = prefixSlash(normpath(relative))
moduleRef = f"{bRep}{org}/{repo}{relative}"
if moduleRef in self.seen:
return True
if org is None or repo is None:
relativeBare = relative.removeprefix("/")
repoLocation = relativeBare
mLocations.append(relativeBare)
(commit, local, release) = (None, None, None)
else:
(commit, release, local, localBase, localDir) = checkoutRepo(
backend,
_browse=_browse,
org=org,
repo=repo,
folder=relative,
version=version,
checkout=checkout,
withPaths=False,
keep=False,
silent=silent,
)
if not localBase:
return False
repoLocation = f"{localBase}/{org}/{repo}"
mLocations.append(f"{localBase}/{localDir}")
seen.add(moduleRef)
if isBase:
app.repoLocation = repoLocation
info = {}
for item in (
("doi", None),
("corpus", f"{org}/{repo}{relative}"),
):
(key, default) = item
info[key] = (
getattr(aContext, key)
if isBase
else specs[key]
if specs and key in specs
else default
)
provenance.append(
(
("corpus", info["corpus"]),
("version", version),
("commit", commit or "??"),
("release", release or "none"),
(
"live",
provenanceLink(
backend, org, repo, version, branch, commit, local, release, relative
),
),
("doi", info["doi"]),
)
)
return True
def getModulesData(*args):
"""Retrieve all data for a corpus.
Parameters
----------
args: list
All parameters needed to retrieve all associated data.
They are the same as are needed to construct an `AppData` object.
"""
mData = AppData(*args)
mData.getModules()
if not mData.good or mData.locations is None:
return None
return (mData.locations, mData.modules) | random_line_split |
|
data.py | from ..core.helpers import itemize
from ..core.files import backendRep, expandDir, prefixSlash, normpath
from .helpers import splitModRef
from .repo import checkoutRepo
from .links import provenanceLink
# GET DATA FOR MAIN SOURCE AND ALL MODULES
class AppData:
def __init__(
self, app, backend, moduleRefs, locations, modules, version, checkout, silent
):
"""Collects TF data according to specifications.
The specifications are passed as arguments when the object is initialized.
Parameters
----------
backend: string
`github` or `gitlab` or a GitLab instance such as `gitlab.huc.knaw.nl`.
app: obj
The high-level API object
moduleRefs: tuple
Each member consists of a module ref, which is a tuple of information
that defines a module.
locations: string|tuple
One or more directory paths. They will be combined with the `modules`
argument and used as locations to search for TF data files.
modules: string|tuple
One or more directory path segments. They will be appended to the
paths given by the `locations` argument to form search locations
for TF data files.
version: string
The version of TF data that should be retrievend. Version is a directory
level just below the search locations.
checkout: string
A specifier to use a specific release or commit of a data repository.
silent: string, optional tf.core.timestamp.SILENT_D
See `tf.core.timestamp.Timestamp`
"""
self.backend = backend
self.app = app
self.moduleRefs = (
[]
if moduleRefs is None
else moduleRefs.split(",")
if type(moduleRefs) is str
else list(moduleRefs)
)
self.locationsArg = locations
self.modulesArg = modules
self.version = version
self.checkout = checkout
self.silent = silent
def getMain(self):
"""Get the main data of the corpus.
This is specified by the `org`, `repo` and `relative` settings under
`provenanceSpec` in `config.yaml`.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
checkout = self.checkout
aContext = app.context
org = aContext.org
repo = aContext.repo
relative = prefixSlash(aContext.relative)
appPath = aContext.appPath
appName = aContext.appName
if appName.startswith("app:"):
appParent = appPath.rsplit("/", 1)[0]
relative = f"{appParent}{relative}"
elif org is None or repo is None:
appPathRep = f"{appPath}/" if appPath else ""
relative = f"{appPathRep}{appName}"
self.checkout = "local"
if not self.getModule(org, repo, prefixSlash(relative), checkout, isBase=True):
self.good = False
def getStandard(self):
"""Get the data of the standard modules specified by the settings of the corpus.
These are specified in the `moduleSpecs` setting under
`provenanceSpecs` in `config.yaml`.
They will be loaded *after* the extra modules specified in the **mod**
parameter, and only in as far they have not been specifief in the
**mod** parameter. In this way you can pass overriding
checkout specifiers to the standard modules.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
loadData = app.loadData
if not loadData or loadData == "core":
return
aContext = app.context
moduleSpecs = aContext.moduleSpecs
seen = self.seen
checkout = self.checkout
backend = self.backend
for m in moduleSpecs or []:
org = m["org"]
repo = m["repo"]
relative = m["relative"]
theCheckout = m.get("checkout", checkout)
theBackend = m.get("backend", backend)
bRep = backendRep(theBackend, "spec", default=backend)
ref = f"{bRep}{org}/{repo}{relative}"
if ref in seen:
continue
if not self.getModule(
org,
repo,
relative,
theCheckout,
backend=theBackend,
specs=m,
):
self.good = False
def getRefs(self):
"""Get data from additional modules.
These are specified in the `moduleRefs` parameter of `AppData`.
We store the set of special modules in order to skip them
later when we are loading the standard modules.
"""
backend = self.backend
refs = self.moduleRefs
for ref in refs:
refPure = ref.rsplit(":", 1)[0]
if refPure in self.seen:
continue
parts = splitModRef(ref)
if not parts:
self.good = False
continue
parts[2] = prefixSlash(normpath(parts[2])) # the relative bit
theBackend = (
None if parts[-1] is None or parts[-1] == backend else parts[-1]
)
if not self.getModule(*parts[0:-1], backend=theBackend):
self.good = False
def getModules(self):
"""Get data from additional local directories.
These are specified in the `locations` and `modules` parameters of `AppData`.
"""
self.provenance = []
provenance = self.provenance
self.mLocations = []
mLocations = self.mLocations
self.locations = None
self.modules = None
self.good = True
self.seen = set()
self.getMain()
self.getRefs()
self.getStandard()
version = self.version
good = self.good
app = self.app
if good:
app.mLocations = mLocations
app.provenance = provenance
else:
return
mModules = []
if mLocations:
mModules.append(version or "")
locations = self.locationsArg
modules = self.modulesArg
givenLocations = (
[]
if locations is None
else [expandDir(app, x.strip()) for x in itemize(locations, "\n")]
if type(locations) is str
else [str(x) for x in locations]
)
givenModules = (
[]
if modules is None
else [normpath(x.strip()) for x in itemize(modules, "\n")]
if type(modules) is str
else [normpath(str(x)) for x in modules]
)
self.locations = mLocations + givenLocations
self.modules = mModules + givenModules
def getModule(
self, org, repo, relative, checkout, backend=None, isBase=False, specs=None
):
"""Prepare to load a single module.
Eventually, all TF data will be downloaded from local directories, bases
on a list of location paths and module paths.
This function computes the contribution of a single module to both the
location paths and the module paths.
Parameters
----------
org: string
GitHub organization or GitLab group of the module
repo: string:
GitHub repository or GitLab project of the module
relative: string
Path within the repository of the module
checkout: string
A specifier to use a specific release or commit of a data repository.
backend: string
The backend if different from the backend of the main module
isBase: boolean, optional False
Whether this module is the main data of the corpus.
specs: dict, optional False
Additional informational attributes of the module, e.g. a DOI
"""
backend = self.backend if backend is None else backendRep(backend, "norm")
bRep = backendRep(backend, "spec", default=self.backend)
version = self.version
silent = self.silent
mLocations = self.mLocations
provenance = self.provenance
seen = self.seen
app = self.app
_browse = app._browse
aContext = app.context
branch = aContext.provenanceSpec["branch"]
relative = prefixSlash(normpath(relative))
moduleRef = f"{bRep}{org}/{repo}{relative}"
if moduleRef in self.seen:
return True
if org is None or repo is None:
relativeBare = relative.removeprefix("/")
repoLocation = relativeBare
mLocations.append(relativeBare)
(commit, local, release) = (None, None, None)
else:
(commit, release, local, localBase, localDir) = checkoutRepo(
backend,
_browse=_browse,
org=org,
repo=repo,
folder=relative,
version=version,
checkout=checkout,
withPaths=False,
keep=False,
silent=silent,
)
if not localBase:
return False
repoLocation = f"{localBase}/{org}/{repo}"
mLocations.append(f"{localBase}/{localDir}")
seen.add(moduleRef)
if isBase:
|
info = {}
for item in (
("doi", None),
("corpus", f"{org}/{repo}{relative}"),
):
(key, default) = item
info[key] = (
getattr(aContext, key)
if isBase
else specs[key]
if specs and key in specs
else default
)
provenance.append(
(
("corpus", info["corpus"]),
("version", version),
("commit", commit or "??"),
("release", release or "none"),
(
"live",
provenanceLink(
backend, org, repo, version, branch, commit, local, release, relative
),
),
("doi", info["doi"]),
)
)
return True
def getModulesData(*args):
"""Retrieve all data for a corpus.
Parameters
----------
args: list
All parameters needed to retrieve all associated data.
They are the same as are needed to construct an `AppData` object.
"""
mData = AppData(*args)
mData.getModules()
if not mData.good or mData.locations is None:
return None
return (mData.locations, mData.modules)
| app.repoLocation = repoLocation | conditional_block |
data.py | from ..core.helpers import itemize
from ..core.files import backendRep, expandDir, prefixSlash, normpath
from .helpers import splitModRef
from .repo import checkoutRepo
from .links import provenanceLink
# GET DATA FOR MAIN SOURCE AND ALL MODULES
class | :
def __init__(
self, app, backend, moduleRefs, locations, modules, version, checkout, silent
):
"""Collects TF data according to specifications.
The specifications are passed as arguments when the object is initialized.
Parameters
----------
backend: string
`github` or `gitlab` or a GitLab instance such as `gitlab.huc.knaw.nl`.
app: obj
The high-level API object
moduleRefs: tuple
Each member consists of a module ref, which is a tuple of information
that defines a module.
locations: string|tuple
One or more directory paths. They will be combined with the `modules`
argument and used as locations to search for TF data files.
modules: string|tuple
One or more directory path segments. They will be appended to the
paths given by the `locations` argument to form search locations
for TF data files.
version: string
The version of TF data that should be retrievend. Version is a directory
level just below the search locations.
checkout: string
A specifier to use a specific release or commit of a data repository.
silent: string, optional tf.core.timestamp.SILENT_D
See `tf.core.timestamp.Timestamp`
"""
self.backend = backend
self.app = app
self.moduleRefs = (
[]
if moduleRefs is None
else moduleRefs.split(",")
if type(moduleRefs) is str
else list(moduleRefs)
)
self.locationsArg = locations
self.modulesArg = modules
self.version = version
self.checkout = checkout
self.silent = silent
def getMain(self):
"""Get the main data of the corpus.
This is specified by the `org`, `repo` and `relative` settings under
`provenanceSpec` in `config.yaml`.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
checkout = self.checkout
aContext = app.context
org = aContext.org
repo = aContext.repo
relative = prefixSlash(aContext.relative)
appPath = aContext.appPath
appName = aContext.appName
if appName.startswith("app:"):
appParent = appPath.rsplit("/", 1)[0]
relative = f"{appParent}{relative}"
elif org is None or repo is None:
appPathRep = f"{appPath}/" if appPath else ""
relative = f"{appPathRep}{appName}"
self.checkout = "local"
if not self.getModule(org, repo, prefixSlash(relative), checkout, isBase=True):
self.good = False
def getStandard(self):
"""Get the data of the standard modules specified by the settings of the corpus.
These are specified in the `moduleSpecs` setting under
`provenanceSpecs` in `config.yaml`.
They will be loaded *after* the extra modules specified in the **mod**
parameter, and only in as far they have not been specifief in the
**mod** parameter. In this way you can pass overriding
checkout specifiers to the standard modules.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
loadData = app.loadData
if not loadData or loadData == "core":
return
aContext = app.context
moduleSpecs = aContext.moduleSpecs
seen = self.seen
checkout = self.checkout
backend = self.backend
for m in moduleSpecs or []:
org = m["org"]
repo = m["repo"]
relative = m["relative"]
theCheckout = m.get("checkout", checkout)
theBackend = m.get("backend", backend)
bRep = backendRep(theBackend, "spec", default=backend)
ref = f"{bRep}{org}/{repo}{relative}"
if ref in seen:
continue
if not self.getModule(
org,
repo,
relative,
theCheckout,
backend=theBackend,
specs=m,
):
self.good = False
def getRefs(self):
"""Get data from additional modules.
These are specified in the `moduleRefs` parameter of `AppData`.
We store the set of special modules in order to skip them
later when we are loading the standard modules.
"""
backend = self.backend
refs = self.moduleRefs
for ref in refs:
refPure = ref.rsplit(":", 1)[0]
if refPure in self.seen:
continue
parts = splitModRef(ref)
if not parts:
self.good = False
continue
parts[2] = prefixSlash(normpath(parts[2])) # the relative bit
theBackend = (
None if parts[-1] is None or parts[-1] == backend else parts[-1]
)
if not self.getModule(*parts[0:-1], backend=theBackend):
self.good = False
def getModules(self):
"""Get data from additional local directories.
These are specified in the `locations` and `modules` parameters of `AppData`.
"""
self.provenance = []
provenance = self.provenance
self.mLocations = []
mLocations = self.mLocations
self.locations = None
self.modules = None
self.good = True
self.seen = set()
self.getMain()
self.getRefs()
self.getStandard()
version = self.version
good = self.good
app = self.app
if good:
app.mLocations = mLocations
app.provenance = provenance
else:
return
mModules = []
if mLocations:
mModules.append(version or "")
locations = self.locationsArg
modules = self.modulesArg
givenLocations = (
[]
if locations is None
else [expandDir(app, x.strip()) for x in itemize(locations, "\n")]
if type(locations) is str
else [str(x) for x in locations]
)
givenModules = (
[]
if modules is None
else [normpath(x.strip()) for x in itemize(modules, "\n")]
if type(modules) is str
else [normpath(str(x)) for x in modules]
)
self.locations = mLocations + givenLocations
self.modules = mModules + givenModules
def getModule(
self, org, repo, relative, checkout, backend=None, isBase=False, specs=None
):
"""Prepare to load a single module.
Eventually, all TF data will be downloaded from local directories, bases
on a list of location paths and module paths.
This function computes the contribution of a single module to both the
location paths and the module paths.
Parameters
----------
org: string
GitHub organization or GitLab group of the module
repo: string:
GitHub repository or GitLab project of the module
relative: string
Path within the repository of the module
checkout: string
A specifier to use a specific release or commit of a data repository.
backend: string
The backend if different from the backend of the main module
isBase: boolean, optional False
Whether this module is the main data of the corpus.
specs: dict, optional False
Additional informational attributes of the module, e.g. a DOI
"""
backend = self.backend if backend is None else backendRep(backend, "norm")
bRep = backendRep(backend, "spec", default=self.backend)
version = self.version
silent = self.silent
mLocations = self.mLocations
provenance = self.provenance
seen = self.seen
app = self.app
_browse = app._browse
aContext = app.context
branch = aContext.provenanceSpec["branch"]
relative = prefixSlash(normpath(relative))
moduleRef = f"{bRep}{org}/{repo}{relative}"
if moduleRef in self.seen:
return True
if org is None or repo is None:
relativeBare = relative.removeprefix("/")
repoLocation = relativeBare
mLocations.append(relativeBare)
(commit, local, release) = (None, None, None)
else:
(commit, release, local, localBase, localDir) = checkoutRepo(
backend,
_browse=_browse,
org=org,
repo=repo,
folder=relative,
version=version,
checkout=checkout,
withPaths=False,
keep=False,
silent=silent,
)
if not localBase:
return False
repoLocation = f"{localBase}/{org}/{repo}"
mLocations.append(f"{localBase}/{localDir}")
seen.add(moduleRef)
if isBase:
app.repoLocation = repoLocation
info = {}
for item in (
("doi", None),
("corpus", f"{org}/{repo}{relative}"),
):
(key, default) = item
info[key] = (
getattr(aContext, key)
if isBase
else specs[key]
if specs and key in specs
else default
)
provenance.append(
(
("corpus", info["corpus"]),
("version", version),
("commit", commit or "??"),
("release", release or "none"),
(
"live",
provenanceLink(
backend, org, repo, version, branch, commit, local, release, relative
),
),
("doi", info["doi"]),
)
)
return True
def getModulesData(*args):
"""Retrieve all data for a corpus.
Parameters
----------
args: list
All parameters needed to retrieve all associated data.
They are the same as are needed to construct an `AppData` object.
"""
mData = AppData(*args)
mData.getModules()
if not mData.good or mData.locations is None:
return None
return (mData.locations, mData.modules)
| AppData | identifier_name |
data.py | from ..core.helpers import itemize
from ..core.files import backendRep, expandDir, prefixSlash, normpath
from .helpers import splitModRef
from .repo import checkoutRepo
from .links import provenanceLink
# GET DATA FOR MAIN SOURCE AND ALL MODULES
class AppData:
|
def getModulesData(*args):
"""Retrieve all data for a corpus.
Parameters
----------
args: list
All parameters needed to retrieve all associated data.
They are the same as are needed to construct an `AppData` object.
"""
mData = AppData(*args)
mData.getModules()
if not mData.good or mData.locations is None:
return None
return (mData.locations, mData.modules)
| def __init__(
self, app, backend, moduleRefs, locations, modules, version, checkout, silent
):
"""Collects TF data according to specifications.
The specifications are passed as arguments when the object is initialized.
Parameters
----------
backend: string
`github` or `gitlab` or a GitLab instance such as `gitlab.huc.knaw.nl`.
app: obj
The high-level API object
moduleRefs: tuple
Each member consists of a module ref, which is a tuple of information
that defines a module.
locations: string|tuple
One or more directory paths. They will be combined with the `modules`
argument and used as locations to search for TF data files.
modules: string|tuple
One or more directory path segments. They will be appended to the
paths given by the `locations` argument to form search locations
for TF data files.
version: string
The version of TF data that should be retrievend. Version is a directory
level just below the search locations.
checkout: string
A specifier to use a specific release or commit of a data repository.
silent: string, optional tf.core.timestamp.SILENT_D
See `tf.core.timestamp.Timestamp`
"""
self.backend = backend
self.app = app
self.moduleRefs = (
[]
if moduleRefs is None
else moduleRefs.split(",")
if type(moduleRefs) is str
else list(moduleRefs)
)
self.locationsArg = locations
self.modulesArg = modules
self.version = version
self.checkout = checkout
self.silent = silent
def getMain(self):
"""Get the main data of the corpus.
This is specified by the `org`, `repo` and `relative` settings under
`provenanceSpec` in `config.yaml`.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
checkout = self.checkout
aContext = app.context
org = aContext.org
repo = aContext.repo
relative = prefixSlash(aContext.relative)
appPath = aContext.appPath
appName = aContext.appName
if appName.startswith("app:"):
appParent = appPath.rsplit("/", 1)[0]
relative = f"{appParent}{relative}"
elif org is None or repo is None:
appPathRep = f"{appPath}/" if appPath else ""
relative = f"{appPathRep}{appName}"
self.checkout = "local"
if not self.getModule(org, repo, prefixSlash(relative), checkout, isBase=True):
self.good = False
def getStandard(self):
"""Get the data of the standard modules specified by the settings of the corpus.
These are specified in the `moduleSpecs` setting under
`provenanceSpecs` in `config.yaml`.
They will be loaded *after* the extra modules specified in the **mod**
parameter, and only in as far they have not been specifief in the
**mod** parameter. In this way you can pass overriding
checkout specifiers to the standard modules.
See Also
--------
tf.advanced.settings: options allowed in `config.yaml`
"""
app = self.app
loadData = app.loadData
if not loadData or loadData == "core":
return
aContext = app.context
moduleSpecs = aContext.moduleSpecs
seen = self.seen
checkout = self.checkout
backend = self.backend
for m in moduleSpecs or []:
org = m["org"]
repo = m["repo"]
relative = m["relative"]
theCheckout = m.get("checkout", checkout)
theBackend = m.get("backend", backend)
bRep = backendRep(theBackend, "spec", default=backend)
ref = f"{bRep}{org}/{repo}{relative}"
if ref in seen:
continue
if not self.getModule(
org,
repo,
relative,
theCheckout,
backend=theBackend,
specs=m,
):
self.good = False
def getRefs(self):
"""Get data from additional modules.
These are specified in the `moduleRefs` parameter of `AppData`.
We store the set of special modules in order to skip them
later when we are loading the standard modules.
"""
backend = self.backend
refs = self.moduleRefs
for ref in refs:
refPure = ref.rsplit(":", 1)[0]
if refPure in self.seen:
continue
parts = splitModRef(ref)
if not parts:
self.good = False
continue
parts[2] = prefixSlash(normpath(parts[2])) # the relative bit
theBackend = (
None if parts[-1] is None or parts[-1] == backend else parts[-1]
)
if not self.getModule(*parts[0:-1], backend=theBackend):
self.good = False
def getModules(self):
"""Get data from additional local directories.
These are specified in the `locations` and `modules` parameters of `AppData`.
"""
self.provenance = []
provenance = self.provenance
self.mLocations = []
mLocations = self.mLocations
self.locations = None
self.modules = None
self.good = True
self.seen = set()
self.getMain()
self.getRefs()
self.getStandard()
version = self.version
good = self.good
app = self.app
if good:
app.mLocations = mLocations
app.provenance = provenance
else:
return
mModules = []
if mLocations:
mModules.append(version or "")
locations = self.locationsArg
modules = self.modulesArg
givenLocations = (
[]
if locations is None
else [expandDir(app, x.strip()) for x in itemize(locations, "\n")]
if type(locations) is str
else [str(x) for x in locations]
)
givenModules = (
[]
if modules is None
else [normpath(x.strip()) for x in itemize(modules, "\n")]
if type(modules) is str
else [normpath(str(x)) for x in modules]
)
self.locations = mLocations + givenLocations
self.modules = mModules + givenModules
def getModule(
self, org, repo, relative, checkout, backend=None, isBase=False, specs=None
):
"""Prepare to load a single module.
Eventually, all TF data will be downloaded from local directories, bases
on a list of location paths and module paths.
This function computes the contribution of a single module to both the
location paths and the module paths.
Parameters
----------
org: string
GitHub organization or GitLab group of the module
repo: string:
GitHub repository or GitLab project of the module
relative: string
Path within the repository of the module
checkout: string
A specifier to use a specific release or commit of a data repository.
backend: string
The backend if different from the backend of the main module
isBase: boolean, optional False
Whether this module is the main data of the corpus.
specs: dict, optional False
Additional informational attributes of the module, e.g. a DOI
"""
backend = self.backend if backend is None else backendRep(backend, "norm")
bRep = backendRep(backend, "spec", default=self.backend)
version = self.version
silent = self.silent
mLocations = self.mLocations
provenance = self.provenance
seen = self.seen
app = self.app
_browse = app._browse
aContext = app.context
branch = aContext.provenanceSpec["branch"]
relative = prefixSlash(normpath(relative))
moduleRef = f"{bRep}{org}/{repo}{relative}"
if moduleRef in self.seen:
return True
if org is None or repo is None:
relativeBare = relative.removeprefix("/")
repoLocation = relativeBare
mLocations.append(relativeBare)
(commit, local, release) = (None, None, None)
else:
(commit, release, local, localBase, localDir) = checkoutRepo(
backend,
_browse=_browse,
org=org,
repo=repo,
folder=relative,
version=version,
checkout=checkout,
withPaths=False,
keep=False,
silent=silent,
)
if not localBase:
return False
repoLocation = f"{localBase}/{org}/{repo}"
mLocations.append(f"{localBase}/{localDir}")
seen.add(moduleRef)
if isBase:
app.repoLocation = repoLocation
info = {}
for item in (
("doi", None),
("corpus", f"{org}/{repo}{relative}"),
):
(key, default) = item
info[key] = (
getattr(aContext, key)
if isBase
else specs[key]
if specs and key in specs
else default
)
provenance.append(
(
("corpus", info["corpus"]),
("version", version),
("commit", commit or "??"),
("release", release or "none"),
(
"live",
provenanceLink(
backend, org, repo, version, branch, commit, local, release, relative
),
),
("doi", info["doi"]),
)
)
return True | identifier_body |
preprocess_03.py | """
DESCRIPTION
Preprocesses audio data before sending to Neural Network
See demo in in main()
MIT License
Copyright (c) 2018 The-Instrumental-Specialists
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import neuralnet_02 as NN
import numpy as np
import os
import glob
import json
import time
import scipy
import matplotlib.pylab as plt
import scipy.io.wavfile as wavfile
import scipy.fftpack
from scipy.fftpack import dct
def getMax(array_list):
"""Returns a tuple (index,value) of the maximum in an 1D array or list"""
m = array_list[0]
m_index = 0
for i,value in enumerate(array_list):
if value > m:
m = value
m_index = i
return (m_index,m)
def processFile(filename,length = 256,q=1,fs_in=8000,divide=4,plot=False):
"""returns one sided FFT amplitudes of filename
filename (string): ex) 'sax.wav'
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): (optional argument) Downsampling Rate
fs_in (int): (optional argument) throw ValueError if fs of filename != fs_in
divide (int): (optional argument) 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): (optional argument) plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs//(2*q*divide)
Ex) length = 256 < (0.25sec)*(44100Hz)//(2*4*2) = 689
"""
length = length*divide
#fs = sample rate, sound = multichannel sound signal
try:
fs1, sound = wavfile.read(filename)
except ValueError:
print(str(filename) + ' failed to process')
return 'failed'
if fs1 != fs_in:
raise ValueError('Sampling rate should be ' + str(fs_in) + ' for: ' + filename)
sig1 = sound[:0] #left channel
pre_emphasis = 0.97
sig1 = np.append(sig1[0], sig1[1:] - pre_emphasis * sig1[:-1])
fs2, sig2 = downsample(sig1,fs1,q)
N2 = len(sig2)
sig3 = sig2[N2//2-length:N2//2+length]
#print(len(sig3))
FFT = abs(scipy.fft(sig3))
FFT_side = FFT[range(len(FFT)//2)]
#freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
#plt.plot(freqs,FFT)
if len(FFT_side) != length:
print('ERROR MESSAGE DETAILS')
print('filename: ' + filename)
print('length = ' + str(length))
print('fs_in = ' + str(fs_in))
print('q = ' + str(q))
print('divide = ' + str(divide))
total_time = len(sig1)/fs1
print('total_time = ' + str(total_time))
print('Please check: length < total_time*fs//(2*q)')
print('Check: ' + str(length) + ' < ' + str(total_time*fs1//(2*q)))
raise ValueError('Length FFT_side != length: ' + str(len(FFT_side)) + ' != ' + str(length))
FFT_log = []
# normalize FFT
for value in FFT_side:
value = np.log(value)
FFT_log.append(value)
max_val = getMax(FFT_log)[1]
FFT_norm = []
for value in FFT_log:
FFT_norm.append(value/max_val)
FFT_side = np.array(FFT_norm)
FFT_divided = FFT_side[range(length//divide)]
#plot = True
if plot == True:
freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
freqs_divided = np.array(freqs[range(len(FFT_divided))])
plt.plot(freqs_divided,FFT_divided) # plotting the complete fft spectrum
plt.show()
return FFT_divided
def processMPCC(filename,subsample=2048):
#assume 8000Hz
#amplify high frequencies
#Setup
try:
fs, signal = wavfile.read(filename) # File assumed to be in the same directory
except ValueError or UnboundLocalError:
print(filename + ' failed to process.')
print('Failed Read')
print()
return 'failed'
half = len(signal)//2
side = subsample//2
signal = signal[half-side:half+side]
if side != len(signal)//2:
print(filename + ' failed to process.')
print('N too small, N: ' + str(len(signal)) + ', subsample: ' + str(subsample))
print()
return 'failed'
sig = signal[:,0] #get first channel
#Pre-Emphasis
pre_emphasis = 0.97
e_sig = sig[1:] - pre_emphasis * sig[0:-1] #emphasized signal
sig_len = len(e_sig)
#Framing
fr_size = 0.025 # frame size (sec)
fr_overlap = 0.01 # frame stride, frame overlap (sec)
fr_len = int(round(fr_size * fs)) # frame length (sec/sec)
fr_step = int(round(fr_overlap * fs)) # amt to step frame each time
num_fr = int(np.ceil(np.abs(sig_len - fr_len) / fr_step)) #Number of Frames
padding = num_fr * fr_step + fr_len # Amount of padding between frames
z = [0 for _ in range(padding-sig_len)]
z = np.array(z)
pad_sig = np.append(e_sig, z) # Pad Signal so frames equal size
#idx = np.tile(np.linspace(0, fr_len,fr_len), (num_fr, 1)) + np.tile(np.linspace(0, num_fr * fr_step, fr_step * num_fr), (fr_len, 1)).T
#fr = pad_sig[idx]
idx = np.tile(np.arange(0, fr_len), (num_fr, 1)) + np.transpose(np.tile(np.arange(0, num_fr * fr_step, fr_step), (fr_len, 1)))
fr = pad_sig[idx.astype(np.int32)]
#Window
NFFT = 512
fr = fr * ( 0.54 - 0.46 * np.cos((2 * np.pi * NFFT) / (fr_len - 1)) ) # Hamming Window
#Fourier-Transform and Power Spectrum
#NFFT = NFFT
mag_fr = np.absolute(np.fft.rfft(fr, NFFT)) # Magnitude of the FFT
pow_fr = (1.0 / NFFT) * ((mag_fr) ** 2) # Power Spectrum
#Filter Banks
nfilt = 40
f_low = 0
f_high = (2595 * np.log10(1 + (fs / 2) / 700)) # Convert Hz to Mel
mel_points = np.linspace(f_low, f_high, nfilt + 2) # Equally spaced in Mel scale
hz_points = (700 * (10**(mel_points / 2595) - 1)) # Convert Mel to Hz
b = np.floor((NFFT + 1) * hz_points / fs) #bin
fbank = np.zeros((nfilt, int(np.floor(NFFT / 2 + 1))))
for i in range(1, nfilt + 1):
f_m_minus = int(b[i - 1]) # left
f_m = int(b[i]) # center
f_m_plus = int(b[i + 1]) # right
for j in range(f_m_minus, f_m):
fbank[i - 1, j] = (j - b[i - 1]) / (b[i] - b[i - 1])
for j in range(f_m, f_m_plus):
fbank[i - 1, j] = (b[i + 1] - j) / (b[i + 1] - b[i])
fb = np.dot(pow_fr, np.transpose(fbank)) # filter banks
fb = np.where(fb == 0, np.finfo(float).eps, fb) # Numerical Stability
fb = 20 * np.log10(fb) # convert to dB
#Mel-frequency Cepstral Coefficients (MFCCs)
num_ceps = 12
mfcc = dct(fb, type=2, axis=1, norm='ortho')[:, 1 : (num_ceps + 1)] # Keep 2-13
#Sinusoidal Filtering
c_lift = 22 # dim of MFCC vector
(n_fr, n_coeff) = mfcc.shape #number of frames number of coeff
ncoeff_array = np.arange(n_coeff)
lift = 1 + (c_lift / 2) * np.sin(np.pi * ncoeff_array / c_lift)
mfcc = mfcc * lift
#Mean Normalization
epsilon = 1e-8
for i in range(len(fb)):
fb[i] -= mean(fb) + epsilon
for i in range(len(mfcc)):
mfcc[i] -= mean(mfcc) + epsilon
output = []
for i in range(len(mfcc)):
for j in range(len(mfcc[0])):
output.append(mfcc[i][j])
m = getMax(output)[1]
for i,value in enumerate(output):
output[i] = value/m
return np.array(output)
def mean(array_list):
"""Returns the mean of an array or list"""
count = 0.0
for value in array_list:
count += value
return count/len(array_list)
def downsample(sig,fs,q):
"""
sig (list,array): sound/data signal
q (int): downsample factor
"""
N = len(sig)//q
new_sig = []
for i in range(len(sig)//q):
new_sig.append(sig[i*q])
new_sig = np.array(new_sig)
return (fs//q,new_sig)
class Preprocess:
|
def main():
# Note: Preprocessed data should be in folder preprocessed
v = processMPCC('instruments_07/banjo/banjo_A3_very-long_forte_normal.wav')
print('len(input layer) = ' + str(len(v)))
#raise Exception
P = Preprocess()
#P.processData('preprocessed/processed_01.txt',directory='instruments_07',fs_in=8000,length=input_length,q=1,divide=1,comment = 'Instrument Data')
P.processData('preprocessed/processed_01.txt',directory='instruments_03',way='mpcc',opt = [2048])
P.loadData('preprocessed/processed_01.txt')
X, Y = P.getXY()
print('Input Layer Length: ' + str(len(X[0])))
print('Output Layer Length: ' + str(len(Y[0])))
input_size = P.getInputLength()
output_size = P.getOutputLength()
net = NN.NeuralNetwork([input_size,100,output_size],'sigmoid')
net.storeWeights('weights/weights_01')
#net.loadWeights('weights/weights_01')
net.trainWithPlots(X,Y,learning_rate=1,intervals = 100,way='max')
net.testBatch(X,Y)
# Test print functions, these print statements can be used to figure
# out how to use code
# X, Y = P.getXY()
# files = P.getFileList()
# output_vectors = P.getOutputVectors()
# output_names = P.getOutputNames()
# print()
# print('X = ' + str(X))
# print()
# print('Y = ' + str(Y))
# print()
# print('File List = ' + str(files))
# print()
# print('Output Vectors = ' + str(output_vectors))
# print()
# print('Output Names = ' + str(output_names))
if __name__ == '__main__':
main()
| def __init__(self):
"""data_file (string): contains the file to load or store data, ex)data.txt
process (bool): if False, load data from data_file,
if True, process data in directory & store in data_file
directory (string): (optional) directory of data to be processed
"""
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
self.output = {}
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [] # list of names of subdirectories in directory
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {} # dictionary of dir:[file,file,file]
# self.data = {} # dictionary of dir:[input_nodes,input_nodes]
# self.X is dictionary of dir:[input_nodes1,input_nodes2]
# self.Y is dictionary of dir:[output_nodes1,output_nodes2]
# self.Y corresponds to self.X
self.X = [] # list of input vectors
self.Y = [] # list of output vectors
#if process == False:
#self.loadData(data_file)
#else: #process == True:
#self.processData(data_file,directory,comment)
def getXY(self):
"""Returns X (List of Input Vectors), and Y (List of Output Vectors)
for preprocessed data
ex) X = [[0,0],[0,1],[1,0],[1,1]]
ex) Y = [[0],[1],[1],[0]]
"""
return (self.X,self.Y)
def getInputLength(self):
"""Returns length of Input Layer"""
return len(self.X[0])
def getOutputLength(self):
"""Returns length of Output Layer"""
return len(self.Y[0])
def getFileList(self):
"""Returns a dictionary with key:value 'Output Name':[file list]
ex) {'sax':['sax1.wav','sax2.wav','sax3.wav']}
"""
return self.files
def getOutputVectors(self):
""" Returns a dictionary with key:value 'OutputName':output vector
Ex) output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
"""
return self.output
def getOutputNames(self):
"""Returns a list of the names of the output vectors
ex) ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
"""
return self.dirs
def loadData(self,data_file):
"""Loads the data in data_file into Trainer"""
#Load the data from the json
with open(data_file) as json_file:
data = json.load(json_file)
# Clear all instance variables
self.dirs = []
self.files = {}
self.X = []
self.Y = []
self.output = {}
# stored the data into the instance variables
self.dirs = data['dirs'] #good
self.files = data['files'] # good
# self.output is a dict() with string:np.array
output = data['output']
for e in output:
self.output[e] = np.array(output[e]) # -> fine
#self.X is a list of np.arrays
X = data['X']
for x in X:
self.X.append(np.array(x))# -> fine
#self.Y is a list of np.arrays
Y = data['Y']
for y in Y:
self.Y.append(list(y))# -> fine
#Test prints, uncomment to test if data looks correct
#print('self.dirs = ' + str(self.dirs))
#print()
#print('self.files = ' + str(self.files))
#print()
#print('self.output = ' + str(self.output))
#print()
#print('self.X = ' + str(self.X))
#print()
#print('self.Y = ' + str(self.Y))
#print()
print('Preprocessed data loaded from ' + str(data_file))
print(data['comment'])
return
def processData(self,data_file,directory,comment = '',way='mpcc',opt=[1024]):
"""Processes the data in directory and stores it in data_file
directory (string): folder of data to be processed
data_file (string): name of file for data to be stored ex) data.txt
comment (string): optional message to be stored with data
way = 'fft', opts is a list containing
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): Downsampling Rate (must be even, preferably power of 2)
fs_in (int): throw ValueError if fs of filename != fs_i
divide (int): 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): ( plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs/(q)
Ex) length = 1024 < (0.25sec)*(44100Hz)/(4) = 2756
way = 'mpcc', opts is a list containing
subsample (int) = Number of subsamples to take from audio file.
"""
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {}
for d in self.dirs:
self.files[d] = []
sub_dir = os.path.join(directory, d)
for filename in glob.glob(os.path.join(sub_dir, '*.wav')):
self.files[d].append(filename)
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
i = 0
for name in self.dirs:
temp = []
for j in range(len(self.dirs)):
if i == j:
temp.append(1)
else:
temp.append(0)
self.output[name] = np.array(temp)
i +=1
#self.X = [] # list of input vectors
#self.Y = [] # list of output vectors
t0 = time.time()
for name in self.dirs:
t1 = time.time()
for file in self.files[name]:
#input_vector = processFile(file,length=length1,q=q1,fs_in=fs_in1,divide=divide1,plot = False)
if way == 'mpcc':
input_vector = processMPCC(file,*opt)
elif way == 'fft':
input_vector = processFFT(file,*opt)
else:
raise ValueError('Invalid Way, valid types include: \'mpcc\' or \'fft\'')
if input_vector != 'failed':
self.X.append(input_vector)
self.Y.append(self.output[name])
print('Time take to process '+str(name)+ ': ' + str((time.time()-t1)/60)[0:4] + ' min.')
print('Total Processing Time: ' + str((time.time()-t0)/60)[0:4] + ' min.')
# Now we can store all of the data in a json
# Need to store self.X, self.Y, self.dirs,self.output,self.files,self.data
# self.dirs is a list of strings -> fine
# self.files is a dict() with string:string -> fine
# self.output is a dict() with string:np.array
output = {}
for d in self.output:
out_list = []
for value in self.output[d]:
out_list.append(int(value))
output[d] = out_list # -> fine
#self.X is a list of np.arrays
X = []
for i in range(len(self.X)):
x = []
for ele in self.X[i]:
x.append(float(ele))
X.append(x) # -> fine
#self.Y is a list of np.arrays
Y = []
for i in range(len(self.Y)):
y = []
for ele in self.Y[i]:
y.append(float(ele))
Y.append(y) # -> fine
store = {}
store['dirs'] = self.dirs # good
store['output'] = output # good
store['files'] = self.files # good
store['X'] = X # good
store['Y'] = Y # good
store['comment'] = comment
with open(data_file, 'w') as outfile:
json.dump(store, outfile)
print('Preprocessed data stored in ' + str(data_file))
return | identifier_body |
preprocess_03.py | """
DESCRIPTION
Preprocesses audio data before sending to Neural Network
See demo in in main()
MIT License
Copyright (c) 2018 The-Instrumental-Specialists
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import neuralnet_02 as NN
import numpy as np
import os
import glob
import json
import time
import scipy
import matplotlib.pylab as plt
import scipy.io.wavfile as wavfile
import scipy.fftpack
from scipy.fftpack import dct
def getMax(array_list):
"""Returns a tuple (index,value) of the maximum in an 1D array or list"""
m = array_list[0]
m_index = 0
for i,value in enumerate(array_list):
if value > m:
m = value
m_index = i
return (m_index,m)
def processFile(filename,length = 256,q=1,fs_in=8000,divide=4,plot=False):
"""returns one sided FFT amplitudes of filename
filename (string): ex) 'sax.wav'
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): (optional argument) Downsampling Rate
fs_in (int): (optional argument) throw ValueError if fs of filename != fs_in
divide (int): (optional argument) 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): (optional argument) plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs//(2*q*divide)
Ex) length = 256 < (0.25sec)*(44100Hz)//(2*4*2) = 689
"""
length = length*divide
#fs = sample rate, sound = multichannel sound signal
try:
fs1, sound = wavfile.read(filename)
except ValueError:
print(str(filename) + ' failed to process')
return 'failed'
if fs1 != fs_in:
raise ValueError('Sampling rate should be ' + str(fs_in) + ' for: ' + filename)
sig1 = sound[:0] #left channel
pre_emphasis = 0.97
sig1 = np.append(sig1[0], sig1[1:] - pre_emphasis * sig1[:-1])
fs2, sig2 = downsample(sig1,fs1,q)
N2 = len(sig2)
sig3 = sig2[N2//2-length:N2//2+length]
#print(len(sig3))
FFT = abs(scipy.fft(sig3))
FFT_side = FFT[range(len(FFT)//2)]
#freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
#plt.plot(freqs,FFT)
if len(FFT_side) != length:
print('ERROR MESSAGE DETAILS')
print('filename: ' + filename)
print('length = ' + str(length))
print('fs_in = ' + str(fs_in))
print('q = ' + str(q))
print('divide = ' + str(divide))
total_time = len(sig1)/fs1
print('total_time = ' + str(total_time))
print('Please check: length < total_time*fs//(2*q)')
print('Check: ' + str(length) + ' < ' + str(total_time*fs1//(2*q)))
raise ValueError('Length FFT_side != length: ' + str(len(FFT_side)) + ' != ' + str(length))
FFT_log = []
# normalize FFT
for value in FFT_side:
value = np.log(value)
FFT_log.append(value)
max_val = getMax(FFT_log)[1]
FFT_norm = []
for value in FFT_log:
FFT_norm.append(value/max_val)
FFT_side = np.array(FFT_norm)
FFT_divided = FFT_side[range(length//divide)]
#plot = True
if plot == True:
freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
freqs_divided = np.array(freqs[range(len(FFT_divided))])
plt.plot(freqs_divided,FFT_divided) # plotting the complete fft spectrum
plt.show()
return FFT_divided
def processMPCC(filename,subsample=2048):
#assume 8000Hz
#amplify high frequencies
#Setup
try:
fs, signal = wavfile.read(filename) # File assumed to be in the same directory
except ValueError or UnboundLocalError:
print(filename + ' failed to process.')
print('Failed Read')
print()
return 'failed'
half = len(signal)//2
side = subsample//2
signal = signal[half-side:half+side]
if side != len(signal)//2:
print(filename + ' failed to process.')
print('N too small, N: ' + str(len(signal)) + ', subsample: ' + str(subsample))
print()
return 'failed'
sig = signal[:,0] #get first channel
#Pre-Emphasis
pre_emphasis = 0.97
e_sig = sig[1:] - pre_emphasis * sig[0:-1] #emphasized signal
sig_len = len(e_sig)
#Framing
fr_size = 0.025 # frame size (sec)
fr_overlap = 0.01 # frame stride, frame overlap (sec)
fr_len = int(round(fr_size * fs)) # frame length (sec/sec)
fr_step = int(round(fr_overlap * fs)) # amt to step frame each time
num_fr = int(np.ceil(np.abs(sig_len - fr_len) / fr_step)) #Number of Frames
padding = num_fr * fr_step + fr_len # Amount of padding between frames
z = [0 for _ in range(padding-sig_len)]
z = np.array(z)
pad_sig = np.append(e_sig, z) # Pad Signal so frames equal size
#idx = np.tile(np.linspace(0, fr_len,fr_len), (num_fr, 1)) + np.tile(np.linspace(0, num_fr * fr_step, fr_step * num_fr), (fr_len, 1)).T
#fr = pad_sig[idx]
idx = np.tile(np.arange(0, fr_len), (num_fr, 1)) + np.transpose(np.tile(np.arange(0, num_fr * fr_step, fr_step), (fr_len, 1)))
fr = pad_sig[idx.astype(np.int32)]
#Window
NFFT = 512
fr = fr * ( 0.54 - 0.46 * np.cos((2 * np.pi * NFFT) / (fr_len - 1)) ) # Hamming Window
#Fourier-Transform and Power Spectrum
#NFFT = NFFT
mag_fr = np.absolute(np.fft.rfft(fr, NFFT)) # Magnitude of the FFT
pow_fr = (1.0 / NFFT) * ((mag_fr) ** 2) # Power Spectrum
#Filter Banks
nfilt = 40
f_low = 0
f_high = (2595 * np.log10(1 + (fs / 2) / 700)) # Convert Hz to Mel
mel_points = np.linspace(f_low, f_high, nfilt + 2) # Equally spaced in Mel scale
hz_points = (700 * (10**(mel_points / 2595) - 1)) # Convert Mel to Hz
b = np.floor((NFFT + 1) * hz_points / fs) #bin
fbank = np.zeros((nfilt, int(np.floor(NFFT / 2 + 1))))
for i in range(1, nfilt + 1):
f_m_minus = int(b[i - 1]) # left
f_m = int(b[i]) # center
f_m_plus = int(b[i + 1]) # right
for j in range(f_m_minus, f_m):
fbank[i - 1, j] = (j - b[i - 1]) / (b[i] - b[i - 1])
for j in range(f_m, f_m_plus):
fbank[i - 1, j] = (b[i + 1] - j) / (b[i + 1] - b[i])
fb = np.dot(pow_fr, np.transpose(fbank)) # filter banks
fb = np.where(fb == 0, np.finfo(float).eps, fb) # Numerical Stability
fb = 20 * np.log10(fb) # convert to dB
#Mel-frequency Cepstral Coefficients (MFCCs)
num_ceps = 12
mfcc = dct(fb, type=2, axis=1, norm='ortho')[:, 1 : (num_ceps + 1)] # Keep 2-13
#Sinusoidal Filtering
c_lift = 22 # dim of MFCC vector
(n_fr, n_coeff) = mfcc.shape #number of frames number of coeff
ncoeff_array = np.arange(n_coeff)
lift = 1 + (c_lift / 2) * np.sin(np.pi * ncoeff_array / c_lift)
mfcc = mfcc * lift
#Mean Normalization
epsilon = 1e-8
for i in range(len(fb)):
fb[i] -= mean(fb) + epsilon
for i in range(len(mfcc)):
mfcc[i] -= mean(mfcc) + epsilon
output = [] |
m = getMax(output)[1]
for i,value in enumerate(output):
output[i] = value/m
return np.array(output)
def mean(array_list):
"""Returns the mean of an array or list"""
count = 0.0
for value in array_list:
count += value
return count/len(array_list)
def downsample(sig,fs,q):
"""
sig (list,array): sound/data signal
q (int): downsample factor
"""
N = len(sig)//q
new_sig = []
for i in range(len(sig)//q):
new_sig.append(sig[i*q])
new_sig = np.array(new_sig)
return (fs//q,new_sig)
class Preprocess:
def __init__(self):
"""data_file (string): contains the file to load or store data, ex)data.txt
process (bool): if False, load data from data_file,
if True, process data in directory & store in data_file
directory (string): (optional) directory of data to be processed
"""
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
self.output = {}
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [] # list of names of subdirectories in directory
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {} # dictionary of dir:[file,file,file]
# self.data = {} # dictionary of dir:[input_nodes,input_nodes]
# self.X is dictionary of dir:[input_nodes1,input_nodes2]
# self.Y is dictionary of dir:[output_nodes1,output_nodes2]
# self.Y corresponds to self.X
self.X = [] # list of input vectors
self.Y = [] # list of output vectors
#if process == False:
#self.loadData(data_file)
#else: #process == True:
#self.processData(data_file,directory,comment)
def getXY(self):
"""Returns X (List of Input Vectors), and Y (List of Output Vectors)
for preprocessed data
ex) X = [[0,0],[0,1],[1,0],[1,1]]
ex) Y = [[0],[1],[1],[0]]
"""
return (self.X,self.Y)
def getInputLength(self):
"""Returns length of Input Layer"""
return len(self.X[0])
def getOutputLength(self):
"""Returns length of Output Layer"""
return len(self.Y[0])
def getFileList(self):
"""Returns a dictionary with key:value 'Output Name':[file list]
ex) {'sax':['sax1.wav','sax2.wav','sax3.wav']}
"""
return self.files
def getOutputVectors(self):
""" Returns a dictionary with key:value 'OutputName':output vector
Ex) output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
"""
return self.output
def getOutputNames(self):
"""Returns a list of the names of the output vectors
ex) ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
"""
return self.dirs
def loadData(self,data_file):
"""Loads the data in data_file into Trainer"""
#Load the data from the json
with open(data_file) as json_file:
data = json.load(json_file)
# Clear all instance variables
self.dirs = []
self.files = {}
self.X = []
self.Y = []
self.output = {}
# stored the data into the instance variables
self.dirs = data['dirs'] #good
self.files = data['files'] # good
# self.output is a dict() with string:np.array
output = data['output']
for e in output:
self.output[e] = np.array(output[e]) # -> fine
#self.X is a list of np.arrays
X = data['X']
for x in X:
self.X.append(np.array(x))# -> fine
#self.Y is a list of np.arrays
Y = data['Y']
for y in Y:
self.Y.append(list(y))# -> fine
#Test prints, uncomment to test if data looks correct
#print('self.dirs = ' + str(self.dirs))
#print()
#print('self.files = ' + str(self.files))
#print()
#print('self.output = ' + str(self.output))
#print()
#print('self.X = ' + str(self.X))
#print()
#print('self.Y = ' + str(self.Y))
#print()
print('Preprocessed data loaded from ' + str(data_file))
print(data['comment'])
return
def processData(self,data_file,directory,comment = '',way='mpcc',opt=[1024]):
"""Processes the data in directory and stores it in data_file
directory (string): folder of data to be processed
data_file (string): name of file for data to be stored ex) data.txt
comment (string): optional message to be stored with data
way = 'fft', opts is a list containing
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): Downsampling Rate (must be even, preferably power of 2)
fs_in (int): throw ValueError if fs of filename != fs_i
divide (int): 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): ( plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs/(q)
Ex) length = 1024 < (0.25sec)*(44100Hz)/(4) = 2756
way = 'mpcc', opts is a list containing
subsample (int) = Number of subsamples to take from audio file.
"""
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {}
for d in self.dirs:
self.files[d] = []
sub_dir = os.path.join(directory, d)
for filename in glob.glob(os.path.join(sub_dir, '*.wav')):
self.files[d].append(filename)
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
i = 0
for name in self.dirs:
temp = []
for j in range(len(self.dirs)):
if i == j:
temp.append(1)
else:
temp.append(0)
self.output[name] = np.array(temp)
i +=1
#self.X = [] # list of input vectors
#self.Y = [] # list of output vectors
t0 = time.time()
for name in self.dirs:
t1 = time.time()
for file in self.files[name]:
#input_vector = processFile(file,length=length1,q=q1,fs_in=fs_in1,divide=divide1,plot = False)
if way == 'mpcc':
input_vector = processMPCC(file,*opt)
elif way == 'fft':
input_vector = processFFT(file,*opt)
else:
raise ValueError('Invalid Way, valid types include: \'mpcc\' or \'fft\'')
if input_vector != 'failed':
self.X.append(input_vector)
self.Y.append(self.output[name])
print('Time take to process '+str(name)+ ': ' + str((time.time()-t1)/60)[0:4] + ' min.')
print('Total Processing Time: ' + str((time.time()-t0)/60)[0:4] + ' min.')
# Now we can store all of the data in a json
# Need to store self.X, self.Y, self.dirs,self.output,self.files,self.data
# self.dirs is a list of strings -> fine
# self.files is a dict() with string:string -> fine
# self.output is a dict() with string:np.array
output = {}
for d in self.output:
out_list = []
for value in self.output[d]:
out_list.append(int(value))
output[d] = out_list # -> fine
#self.X is a list of np.arrays
X = []
for i in range(len(self.X)):
x = []
for ele in self.X[i]:
x.append(float(ele))
X.append(x) # -> fine
#self.Y is a list of np.arrays
Y = []
for i in range(len(self.Y)):
y = []
for ele in self.Y[i]:
y.append(float(ele))
Y.append(y) # -> fine
store = {}
store['dirs'] = self.dirs # good
store['output'] = output # good
store['files'] = self.files # good
store['X'] = X # good
store['Y'] = Y # good
store['comment'] = comment
with open(data_file, 'w') as outfile:
json.dump(store, outfile)
print('Preprocessed data stored in ' + str(data_file))
return
def main():
# Note: Preprocessed data should be in folder preprocessed
v = processMPCC('instruments_07/banjo/banjo_A3_very-long_forte_normal.wav')
print('len(input layer) = ' + str(len(v)))
#raise Exception
P = Preprocess()
#P.processData('preprocessed/processed_01.txt',directory='instruments_07',fs_in=8000,length=input_length,q=1,divide=1,comment = 'Instrument Data')
P.processData('preprocessed/processed_01.txt',directory='instruments_03',way='mpcc',opt = [2048])
P.loadData('preprocessed/processed_01.txt')
X, Y = P.getXY()
print('Input Layer Length: ' + str(len(X[0])))
print('Output Layer Length: ' + str(len(Y[0])))
input_size = P.getInputLength()
output_size = P.getOutputLength()
net = NN.NeuralNetwork([input_size,100,output_size],'sigmoid')
net.storeWeights('weights/weights_01')
#net.loadWeights('weights/weights_01')
net.trainWithPlots(X,Y,learning_rate=1,intervals = 100,way='max')
net.testBatch(X,Y)
# Test print functions, these print statements can be used to figure
# out how to use code
# X, Y = P.getXY()
# files = P.getFileList()
# output_vectors = P.getOutputVectors()
# output_names = P.getOutputNames()
# print()
# print('X = ' + str(X))
# print()
# print('Y = ' + str(Y))
# print()
# print('File List = ' + str(files))
# print()
# print('Output Vectors = ' + str(output_vectors))
# print()
# print('Output Names = ' + str(output_names))
if __name__ == '__main__':
main() | for i in range(len(mfcc)):
for j in range(len(mfcc[0])):
output.append(mfcc[i][j]) | random_line_split |
preprocess_03.py | """
DESCRIPTION
Preprocesses audio data before sending to Neural Network
See demo in in main()
MIT License
Copyright (c) 2018 The-Instrumental-Specialists
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import neuralnet_02 as NN
import numpy as np
import os
import glob
import json
import time
import scipy
import matplotlib.pylab as plt
import scipy.io.wavfile as wavfile
import scipy.fftpack
from scipy.fftpack import dct
def getMax(array_list):
"""Returns a tuple (index,value) of the maximum in an 1D array or list"""
m = array_list[0]
m_index = 0
for i,value in enumerate(array_list):
if value > m:
m = value
m_index = i
return (m_index,m)
def processFile(filename,length = 256,q=1,fs_in=8000,divide=4,plot=False):
"""returns one sided FFT amplitudes of filename
filename (string): ex) 'sax.wav'
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): (optional argument) Downsampling Rate
fs_in (int): (optional argument) throw ValueError if fs of filename != fs_in
divide (int): (optional argument) 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): (optional argument) plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs//(2*q*divide)
Ex) length = 256 < (0.25sec)*(44100Hz)//(2*4*2) = 689
"""
length = length*divide
#fs = sample rate, sound = multichannel sound signal
try:
fs1, sound = wavfile.read(filename)
except ValueError:
print(str(filename) + ' failed to process')
return 'failed'
if fs1 != fs_in:
raise ValueError('Sampling rate should be ' + str(fs_in) + ' for: ' + filename)
sig1 = sound[:0] #left channel
pre_emphasis = 0.97
sig1 = np.append(sig1[0], sig1[1:] - pre_emphasis * sig1[:-1])
fs2, sig2 = downsample(sig1,fs1,q)
N2 = len(sig2)
sig3 = sig2[N2//2-length:N2//2+length]
#print(len(sig3))
FFT = abs(scipy.fft(sig3))
FFT_side = FFT[range(len(FFT)//2)]
#freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
#plt.plot(freqs,FFT)
if len(FFT_side) != length:
print('ERROR MESSAGE DETAILS')
print('filename: ' + filename)
print('length = ' + str(length))
print('fs_in = ' + str(fs_in))
print('q = ' + str(q))
print('divide = ' + str(divide))
total_time = len(sig1)/fs1
print('total_time = ' + str(total_time))
print('Please check: length < total_time*fs//(2*q)')
print('Check: ' + str(length) + ' < ' + str(total_time*fs1//(2*q)))
raise ValueError('Length FFT_side != length: ' + str(len(FFT_side)) + ' != ' + str(length))
FFT_log = []
# normalize FFT
for value in FFT_side:
value = np.log(value)
FFT_log.append(value)
max_val = getMax(FFT_log)[1]
FFT_norm = []
for value in FFT_log:
FFT_norm.append(value/max_val)
FFT_side = np.array(FFT_norm)
FFT_divided = FFT_side[range(length//divide)]
#plot = True
if plot == True:
freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
freqs_divided = np.array(freqs[range(len(FFT_divided))])
plt.plot(freqs_divided,FFT_divided) # plotting the complete fft spectrum
plt.show()
return FFT_divided
def processMPCC(filename,subsample=2048):
#assume 8000Hz
#amplify high frequencies
#Setup
try:
fs, signal = wavfile.read(filename) # File assumed to be in the same directory
except ValueError or UnboundLocalError:
print(filename + ' failed to process.')
print('Failed Read')
print()
return 'failed'
half = len(signal)//2
side = subsample//2
signal = signal[half-side:half+side]
if side != len(signal)//2:
print(filename + ' failed to process.')
print('N too small, N: ' + str(len(signal)) + ', subsample: ' + str(subsample))
print()
return 'failed'
sig = signal[:,0] #get first channel
#Pre-Emphasis
pre_emphasis = 0.97
e_sig = sig[1:] - pre_emphasis * sig[0:-1] #emphasized signal
sig_len = len(e_sig)
#Framing
fr_size = 0.025 # frame size (sec)
fr_overlap = 0.01 # frame stride, frame overlap (sec)
fr_len = int(round(fr_size * fs)) # frame length (sec/sec)
fr_step = int(round(fr_overlap * fs)) # amt to step frame each time
num_fr = int(np.ceil(np.abs(sig_len - fr_len) / fr_step)) #Number of Frames
padding = num_fr * fr_step + fr_len # Amount of padding between frames
z = [0 for _ in range(padding-sig_len)]
z = np.array(z)
pad_sig = np.append(e_sig, z) # Pad Signal so frames equal size
#idx = np.tile(np.linspace(0, fr_len,fr_len), (num_fr, 1)) + np.tile(np.linspace(0, num_fr * fr_step, fr_step * num_fr), (fr_len, 1)).T
#fr = pad_sig[idx]
idx = np.tile(np.arange(0, fr_len), (num_fr, 1)) + np.transpose(np.tile(np.arange(0, num_fr * fr_step, fr_step), (fr_len, 1)))
fr = pad_sig[idx.astype(np.int32)]
#Window
NFFT = 512
fr = fr * ( 0.54 - 0.46 * np.cos((2 * np.pi * NFFT) / (fr_len - 1)) ) # Hamming Window
#Fourier-Transform and Power Spectrum
#NFFT = NFFT
mag_fr = np.absolute(np.fft.rfft(fr, NFFT)) # Magnitude of the FFT
pow_fr = (1.0 / NFFT) * ((mag_fr) ** 2) # Power Spectrum
#Filter Banks
nfilt = 40
f_low = 0
f_high = (2595 * np.log10(1 + (fs / 2) / 700)) # Convert Hz to Mel
mel_points = np.linspace(f_low, f_high, nfilt + 2) # Equally spaced in Mel scale
hz_points = (700 * (10**(mel_points / 2595) - 1)) # Convert Mel to Hz
b = np.floor((NFFT + 1) * hz_points / fs) #bin
fbank = np.zeros((nfilt, int(np.floor(NFFT / 2 + 1))))
for i in range(1, nfilt + 1):
f_m_minus = int(b[i - 1]) # left
f_m = int(b[i]) # center
f_m_plus = int(b[i + 1]) # right
for j in range(f_m_minus, f_m):
fbank[i - 1, j] = (j - b[i - 1]) / (b[i] - b[i - 1])
for j in range(f_m, f_m_plus):
fbank[i - 1, j] = (b[i + 1] - j) / (b[i + 1] - b[i])
fb = np.dot(pow_fr, np.transpose(fbank)) # filter banks
fb = np.where(fb == 0, np.finfo(float).eps, fb) # Numerical Stability
fb = 20 * np.log10(fb) # convert to dB
#Mel-frequency Cepstral Coefficients (MFCCs)
num_ceps = 12
mfcc = dct(fb, type=2, axis=1, norm='ortho')[:, 1 : (num_ceps + 1)] # Keep 2-13
#Sinusoidal Filtering
c_lift = 22 # dim of MFCC vector
(n_fr, n_coeff) = mfcc.shape #number of frames number of coeff
ncoeff_array = np.arange(n_coeff)
lift = 1 + (c_lift / 2) * np.sin(np.pi * ncoeff_array / c_lift)
mfcc = mfcc * lift
#Mean Normalization
epsilon = 1e-8
for i in range(len(fb)):
fb[i] -= mean(fb) + epsilon
for i in range(len(mfcc)):
mfcc[i] -= mean(mfcc) + epsilon
output = []
for i in range(len(mfcc)):
for j in range(len(mfcc[0])):
output.append(mfcc[i][j])
m = getMax(output)[1]
for i,value in enumerate(output):
output[i] = value/m
return np.array(output)
def mean(array_list):
"""Returns the mean of an array or list"""
count = 0.0
for value in array_list:
count += value
return count/len(array_list)
def downsample(sig,fs,q):
"""
sig (list,array): sound/data signal
q (int): downsample factor
"""
N = len(sig)//q
new_sig = []
for i in range(len(sig)//q):
new_sig.append(sig[i*q])
new_sig = np.array(new_sig)
return (fs//q,new_sig)
class Preprocess:
def __init__(self):
"""data_file (string): contains the file to load or store data, ex)data.txt
process (bool): if False, load data from data_file,
if True, process data in directory & store in data_file
directory (string): (optional) directory of data to be processed
"""
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
self.output = {}
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [] # list of names of subdirectories in directory
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {} # dictionary of dir:[file,file,file]
# self.data = {} # dictionary of dir:[input_nodes,input_nodes]
# self.X is dictionary of dir:[input_nodes1,input_nodes2]
# self.Y is dictionary of dir:[output_nodes1,output_nodes2]
# self.Y corresponds to self.X
self.X = [] # list of input vectors
self.Y = [] # list of output vectors
#if process == False:
#self.loadData(data_file)
#else: #process == True:
#self.processData(data_file,directory,comment)
def getXY(self):
"""Returns X (List of Input Vectors), and Y (List of Output Vectors)
for preprocessed data
ex) X = [[0,0],[0,1],[1,0],[1,1]]
ex) Y = [[0],[1],[1],[0]]
"""
return (self.X,self.Y)
def getInputLength(self):
"""Returns length of Input Layer"""
return len(self.X[0])
def getOutputLength(self):
"""Returns length of Output Layer"""
return len(self.Y[0])
def getFileList(self):
"""Returns a dictionary with key:value 'Output Name':[file list]
ex) {'sax':['sax1.wav','sax2.wav','sax3.wav']}
"""
return self.files
def getOutputVectors(self):
""" Returns a dictionary with key:value 'OutputName':output vector
Ex) output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
"""
return self.output
def | (self):
"""Returns a list of the names of the output vectors
ex) ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
"""
return self.dirs
def loadData(self,data_file):
"""Loads the data in data_file into Trainer"""
#Load the data from the json
with open(data_file) as json_file:
data = json.load(json_file)
# Clear all instance variables
self.dirs = []
self.files = {}
self.X = []
self.Y = []
self.output = {}
# stored the data into the instance variables
self.dirs = data['dirs'] #good
self.files = data['files'] # good
# self.output is a dict() with string:np.array
output = data['output']
for e in output:
self.output[e] = np.array(output[e]) # -> fine
#self.X is a list of np.arrays
X = data['X']
for x in X:
self.X.append(np.array(x))# -> fine
#self.Y is a list of np.arrays
Y = data['Y']
for y in Y:
self.Y.append(list(y))# -> fine
#Test prints, uncomment to test if data looks correct
#print('self.dirs = ' + str(self.dirs))
#print()
#print('self.files = ' + str(self.files))
#print()
#print('self.output = ' + str(self.output))
#print()
#print('self.X = ' + str(self.X))
#print()
#print('self.Y = ' + str(self.Y))
#print()
print('Preprocessed data loaded from ' + str(data_file))
print(data['comment'])
return
def processData(self,data_file,directory,comment = '',way='mpcc',opt=[1024]):
"""Processes the data in directory and stores it in data_file
directory (string): folder of data to be processed
data_file (string): name of file for data to be stored ex) data.txt
comment (string): optional message to be stored with data
way = 'fft', opts is a list containing
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): Downsampling Rate (must be even, preferably power of 2)
fs_in (int): throw ValueError if fs of filename != fs_i
divide (int): 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): ( plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs/(q)
Ex) length = 1024 < (0.25sec)*(44100Hz)/(4) = 2756
way = 'mpcc', opts is a list containing
subsample (int) = Number of subsamples to take from audio file.
"""
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {}
for d in self.dirs:
self.files[d] = []
sub_dir = os.path.join(directory, d)
for filename in glob.glob(os.path.join(sub_dir, '*.wav')):
self.files[d].append(filename)
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
i = 0
for name in self.dirs:
temp = []
for j in range(len(self.dirs)):
if i == j:
temp.append(1)
else:
temp.append(0)
self.output[name] = np.array(temp)
i +=1
#self.X = [] # list of input vectors
#self.Y = [] # list of output vectors
t0 = time.time()
for name in self.dirs:
t1 = time.time()
for file in self.files[name]:
#input_vector = processFile(file,length=length1,q=q1,fs_in=fs_in1,divide=divide1,plot = False)
if way == 'mpcc':
input_vector = processMPCC(file,*opt)
elif way == 'fft':
input_vector = processFFT(file,*opt)
else:
raise ValueError('Invalid Way, valid types include: \'mpcc\' or \'fft\'')
if input_vector != 'failed':
self.X.append(input_vector)
self.Y.append(self.output[name])
print('Time take to process '+str(name)+ ': ' + str((time.time()-t1)/60)[0:4] + ' min.')
print('Total Processing Time: ' + str((time.time()-t0)/60)[0:4] + ' min.')
# Now we can store all of the data in a json
# Need to store self.X, self.Y, self.dirs,self.output,self.files,self.data
# self.dirs is a list of strings -> fine
# self.files is a dict() with string:string -> fine
# self.output is a dict() with string:np.array
output = {}
for d in self.output:
out_list = []
for value in self.output[d]:
out_list.append(int(value))
output[d] = out_list # -> fine
#self.X is a list of np.arrays
X = []
for i in range(len(self.X)):
x = []
for ele in self.X[i]:
x.append(float(ele))
X.append(x) # -> fine
#self.Y is a list of np.arrays
Y = []
for i in range(len(self.Y)):
y = []
for ele in self.Y[i]:
y.append(float(ele))
Y.append(y) # -> fine
store = {}
store['dirs'] = self.dirs # good
store['output'] = output # good
store['files'] = self.files # good
store['X'] = X # good
store['Y'] = Y # good
store['comment'] = comment
with open(data_file, 'w') as outfile:
json.dump(store, outfile)
print('Preprocessed data stored in ' + str(data_file))
return
def main():
# Note: Preprocessed data should be in folder preprocessed
v = processMPCC('instruments_07/banjo/banjo_A3_very-long_forte_normal.wav')
print('len(input layer) = ' + str(len(v)))
#raise Exception
P = Preprocess()
#P.processData('preprocessed/processed_01.txt',directory='instruments_07',fs_in=8000,length=input_length,q=1,divide=1,comment = 'Instrument Data')
P.processData('preprocessed/processed_01.txt',directory='instruments_03',way='mpcc',opt = [2048])
P.loadData('preprocessed/processed_01.txt')
X, Y = P.getXY()
print('Input Layer Length: ' + str(len(X[0])))
print('Output Layer Length: ' + str(len(Y[0])))
input_size = P.getInputLength()
output_size = P.getOutputLength()
net = NN.NeuralNetwork([input_size,100,output_size],'sigmoid')
net.storeWeights('weights/weights_01')
#net.loadWeights('weights/weights_01')
net.trainWithPlots(X,Y,learning_rate=1,intervals = 100,way='max')
net.testBatch(X,Y)
# Test print functions, these print statements can be used to figure
# out how to use code
# X, Y = P.getXY()
# files = P.getFileList()
# output_vectors = P.getOutputVectors()
# output_names = P.getOutputNames()
# print()
# print('X = ' + str(X))
# print()
# print('Y = ' + str(Y))
# print()
# print('File List = ' + str(files))
# print()
# print('Output Vectors = ' + str(output_vectors))
# print()
# print('Output Names = ' + str(output_names))
if __name__ == '__main__':
main()
| getOutputNames | identifier_name |
preprocess_03.py | """
DESCRIPTION
Preprocesses audio data before sending to Neural Network
See demo in in main()
MIT License
Copyright (c) 2018 The-Instrumental-Specialists
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import neuralnet_02 as NN
import numpy as np
import os
import glob
import json
import time
import scipy
import matplotlib.pylab as plt
import scipy.io.wavfile as wavfile
import scipy.fftpack
from scipy.fftpack import dct
def getMax(array_list):
"""Returns a tuple (index,value) of the maximum in an 1D array or list"""
m = array_list[0]
m_index = 0
for i,value in enumerate(array_list):
if value > m:
m = value
m_index = i
return (m_index,m)
def processFile(filename,length = 256,q=1,fs_in=8000,divide=4,plot=False):
"""returns one sided FFT amplitudes of filename
filename (string): ex) 'sax.wav'
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): (optional argument) Downsampling Rate
fs_in (int): (optional argument) throw ValueError if fs of filename != fs_in
divide (int): (optional argument) 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): (optional argument) plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs//(2*q*divide)
Ex) length = 256 < (0.25sec)*(44100Hz)//(2*4*2) = 689
"""
length = length*divide
#fs = sample rate, sound = multichannel sound signal
try:
fs1, sound = wavfile.read(filename)
except ValueError:
print(str(filename) + ' failed to process')
return 'failed'
if fs1 != fs_in:
raise ValueError('Sampling rate should be ' + str(fs_in) + ' for: ' + filename)
sig1 = sound[:0] #left channel
pre_emphasis = 0.97
sig1 = np.append(sig1[0], sig1[1:] - pre_emphasis * sig1[:-1])
fs2, sig2 = downsample(sig1,fs1,q)
N2 = len(sig2)
sig3 = sig2[N2//2-length:N2//2+length]
#print(len(sig3))
FFT = abs(scipy.fft(sig3))
FFT_side = FFT[range(len(FFT)//2)]
#freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
#plt.plot(freqs,FFT)
if len(FFT_side) != length:
print('ERROR MESSAGE DETAILS')
print('filename: ' + filename)
print('length = ' + str(length))
print('fs_in = ' + str(fs_in))
print('q = ' + str(q))
print('divide = ' + str(divide))
total_time = len(sig1)/fs1
print('total_time = ' + str(total_time))
print('Please check: length < total_time*fs//(2*q)')
print('Check: ' + str(length) + ' < ' + str(total_time*fs1//(2*q)))
raise ValueError('Length FFT_side != length: ' + str(len(FFT_side)) + ' != ' + str(length))
FFT_log = []
# normalize FFT
for value in FFT_side:
value = np.log(value)
FFT_log.append(value)
max_val = getMax(FFT_log)[1]
FFT_norm = []
for value in FFT_log:
FFT_norm.append(value/max_val)
FFT_side = np.array(FFT_norm)
FFT_divided = FFT_side[range(length//divide)]
#plot = True
if plot == True:
freqs = scipy.fftpack.fftfreq(sig3.size, 1/fs2)
freqs_divided = np.array(freqs[range(len(FFT_divided))])
plt.plot(freqs_divided,FFT_divided) # plotting the complete fft spectrum
plt.show()
return FFT_divided
def processMPCC(filename,subsample=2048):
#assume 8000Hz
#amplify high frequencies
#Setup
try:
fs, signal = wavfile.read(filename) # File assumed to be in the same directory
except ValueError or UnboundLocalError:
print(filename + ' failed to process.')
print('Failed Read')
print()
return 'failed'
half = len(signal)//2
side = subsample//2
signal = signal[half-side:half+side]
if side != len(signal)//2:
print(filename + ' failed to process.')
print('N too small, N: ' + str(len(signal)) + ', subsample: ' + str(subsample))
print()
return 'failed'
sig = signal[:,0] #get first channel
#Pre-Emphasis
pre_emphasis = 0.97
e_sig = sig[1:] - pre_emphasis * sig[0:-1] #emphasized signal
sig_len = len(e_sig)
#Framing
fr_size = 0.025 # frame size (sec)
fr_overlap = 0.01 # frame stride, frame overlap (sec)
fr_len = int(round(fr_size * fs)) # frame length (sec/sec)
fr_step = int(round(fr_overlap * fs)) # amt to step frame each time
num_fr = int(np.ceil(np.abs(sig_len - fr_len) / fr_step)) #Number of Frames
padding = num_fr * fr_step + fr_len # Amount of padding between frames
z = [0 for _ in range(padding-sig_len)]
z = np.array(z)
pad_sig = np.append(e_sig, z) # Pad Signal so frames equal size
#idx = np.tile(np.linspace(0, fr_len,fr_len), (num_fr, 1)) + np.tile(np.linspace(0, num_fr * fr_step, fr_step * num_fr), (fr_len, 1)).T
#fr = pad_sig[idx]
idx = np.tile(np.arange(0, fr_len), (num_fr, 1)) + np.transpose(np.tile(np.arange(0, num_fr * fr_step, fr_step), (fr_len, 1)))
fr = pad_sig[idx.astype(np.int32)]
#Window
NFFT = 512
fr = fr * ( 0.54 - 0.46 * np.cos((2 * np.pi * NFFT) / (fr_len - 1)) ) # Hamming Window
#Fourier-Transform and Power Spectrum
#NFFT = NFFT
mag_fr = np.absolute(np.fft.rfft(fr, NFFT)) # Magnitude of the FFT
pow_fr = (1.0 / NFFT) * ((mag_fr) ** 2) # Power Spectrum
#Filter Banks
nfilt = 40
f_low = 0
f_high = (2595 * np.log10(1 + (fs / 2) / 700)) # Convert Hz to Mel
mel_points = np.linspace(f_low, f_high, nfilt + 2) # Equally spaced in Mel scale
hz_points = (700 * (10**(mel_points / 2595) - 1)) # Convert Mel to Hz
b = np.floor((NFFT + 1) * hz_points / fs) #bin
fbank = np.zeros((nfilt, int(np.floor(NFFT / 2 + 1))))
for i in range(1, nfilt + 1):
f_m_minus = int(b[i - 1]) # left
f_m = int(b[i]) # center
f_m_plus = int(b[i + 1]) # right
for j in range(f_m_minus, f_m):
fbank[i - 1, j] = (j - b[i - 1]) / (b[i] - b[i - 1])
for j in range(f_m, f_m_plus):
fbank[i - 1, j] = (b[i + 1] - j) / (b[i + 1] - b[i])
fb = np.dot(pow_fr, np.transpose(fbank)) # filter banks
fb = np.where(fb == 0, np.finfo(float).eps, fb) # Numerical Stability
fb = 20 * np.log10(fb) # convert to dB
#Mel-frequency Cepstral Coefficients (MFCCs)
num_ceps = 12
mfcc = dct(fb, type=2, axis=1, norm='ortho')[:, 1 : (num_ceps + 1)] # Keep 2-13
#Sinusoidal Filtering
c_lift = 22 # dim of MFCC vector
(n_fr, n_coeff) = mfcc.shape #number of frames number of coeff
ncoeff_array = np.arange(n_coeff)
lift = 1 + (c_lift / 2) * np.sin(np.pi * ncoeff_array / c_lift)
mfcc = mfcc * lift
#Mean Normalization
epsilon = 1e-8
for i in range(len(fb)):
fb[i] -= mean(fb) + epsilon
for i in range(len(mfcc)):
mfcc[i] -= mean(mfcc) + epsilon
output = []
for i in range(len(mfcc)):
for j in range(len(mfcc[0])):
output.append(mfcc[i][j])
m = getMax(output)[1]
for i,value in enumerate(output):
output[i] = value/m
return np.array(output)
def mean(array_list):
"""Returns the mean of an array or list"""
count = 0.0
for value in array_list:
count += value
return count/len(array_list)
def downsample(sig,fs,q):
"""
sig (list,array): sound/data signal
q (int): downsample factor
"""
N = len(sig)//q
new_sig = []
for i in range(len(sig)//q):
new_sig.append(sig[i*q])
new_sig = np.array(new_sig)
return (fs//q,new_sig)
class Preprocess:
def __init__(self):
"""data_file (string): contains the file to load or store data, ex)data.txt
process (bool): if False, load data from data_file,
if True, process data in directory & store in data_file
directory (string): (optional) directory of data to be processed
"""
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
self.output = {}
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [] # list of names of subdirectories in directory
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {} # dictionary of dir:[file,file,file]
# self.data = {} # dictionary of dir:[input_nodes,input_nodes]
# self.X is dictionary of dir:[input_nodes1,input_nodes2]
# self.Y is dictionary of dir:[output_nodes1,output_nodes2]
# self.Y corresponds to self.X
self.X = [] # list of input vectors
self.Y = [] # list of output vectors
#if process == False:
#self.loadData(data_file)
#else: #process == True:
#self.processData(data_file,directory,comment)
def getXY(self):
"""Returns X (List of Input Vectors), and Y (List of Output Vectors)
for preprocessed data
ex) X = [[0,0],[0,1],[1,0],[1,1]]
ex) Y = [[0],[1],[1],[0]]
"""
return (self.X,self.Y)
def getInputLength(self):
"""Returns length of Input Layer"""
return len(self.X[0])
def getOutputLength(self):
"""Returns length of Output Layer"""
return len(self.Y[0])
def getFileList(self):
"""Returns a dictionary with key:value 'Output Name':[file list]
ex) {'sax':['sax1.wav','sax2.wav','sax3.wav']}
"""
return self.files
def getOutputVectors(self):
""" Returns a dictionary with key:value 'OutputName':output vector
Ex) output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
"""
return self.output
def getOutputNames(self):
"""Returns a list of the names of the output vectors
ex) ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
"""
return self.dirs
def loadData(self,data_file):
"""Loads the data in data_file into Trainer"""
#Load the data from the json
with open(data_file) as json_file:
data = json.load(json_file)
# Clear all instance variables
self.dirs = []
self.files = {}
self.X = []
self.Y = []
self.output = {}
# stored the data into the instance variables
self.dirs = data['dirs'] #good
self.files = data['files'] # good
# self.output is a dict() with string:np.array
output = data['output']
for e in output:
self.output[e] = np.array(output[e]) # -> fine
#self.X is a list of np.arrays
X = data['X']
for x in X:
self.X.append(np.array(x))# -> fine
#self.Y is a list of np.arrays
Y = data['Y']
for y in Y:
self.Y.append(list(y))# -> fine
#Test prints, uncomment to test if data looks correct
#print('self.dirs = ' + str(self.dirs))
#print()
#print('self.files = ' + str(self.files))
#print()
#print('self.output = ' + str(self.output))
#print()
#print('self.X = ' + str(self.X))
#print()
#print('self.Y = ' + str(self.Y))
#print()
print('Preprocessed data loaded from ' + str(data_file))
print(data['comment'])
return
def processData(self,data_file,directory,comment = '',way='mpcc',opt=[1024]):
"""Processes the data in directory and stores it in data_file
directory (string): folder of data to be processed
data_file (string): name of file for data to be stored ex) data.txt
comment (string): optional message to be stored with data
way = 'fft', opts is a list containing
length (int): Number of datapoints of one-sided fft (must be even,preferably a power of 2)
q (int): Downsampling Rate (must be even, preferably power of 2)
fs_in (int): throw ValueError if fs of filename != fs_i
divide (int): 1/divide*Nsamples is taken from FFT (preferably even)
plot (bool): ( plots the one sided FFT if True, otherwise does not plot
Note: length < total_time*fs/(q)
Ex) length = 1024 < (0.25sec)*(44100Hz)/(4) = 2756
way = 'mpcc', opts is a list containing
subsample (int) = Number of subsamples to take from audio file.
"""
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# directory names are names of instruments
#self.dirs =
# ['cel', 'cla', 'flu', 'gac', 'gel', 'org', 'pia', 'sax', 'tru', 'vio', 'voi']
self.dirs = [name for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
# example: self.files['sax'] =
# IRMAS-TrainingData\sax\006__[sax][nod][cla]1686__1.wav
self.files = {}
for d in self.dirs:
self.files[d] = []
sub_dir = os.path.join(directory, d)
for filename in glob.glob(os.path.join(sub_dir, '*.wav')):
self.files[d].append(filename)
# Ex) self.output['cel']: np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
i = 0
for name in self.dirs:
temp = []
for j in range(len(self.dirs)):
if i == j:
temp.append(1)
else:
temp.append(0)
self.output[name] = np.array(temp)
i +=1
#self.X = [] # list of input vectors
#self.Y = [] # list of output vectors
t0 = time.time()
for name in self.dirs:
t1 = time.time()
for file in self.files[name]:
#input_vector = processFile(file,length=length1,q=q1,fs_in=fs_in1,divide=divide1,plot = False)
if way == 'mpcc':
input_vector = processMPCC(file,*opt)
elif way == 'fft':
input_vector = processFFT(file,*opt)
else:
raise ValueError('Invalid Way, valid types include: \'mpcc\' or \'fft\'')
if input_vector != 'failed':
self.X.append(input_vector)
self.Y.append(self.output[name])
print('Time take to process '+str(name)+ ': ' + str((time.time()-t1)/60)[0:4] + ' min.')
print('Total Processing Time: ' + str((time.time()-t0)/60)[0:4] + ' min.')
# Now we can store all of the data in a json
# Need to store self.X, self.Y, self.dirs,self.output,self.files,self.data
# self.dirs is a list of strings -> fine
# self.files is a dict() with string:string -> fine
# self.output is a dict() with string:np.array
output = {}
for d in self.output:
out_list = []
for value in self.output[d]:
out_list.append(int(value))
output[d] = out_list # -> fine
#self.X is a list of np.arrays
X = []
for i in range(len(self.X)):
x = []
for ele in self.X[i]:
x.append(float(ele))
X.append(x) # -> fine
#self.Y is a list of np.arrays
Y = []
for i in range(len(self.Y)):
|
store = {}
store['dirs'] = self.dirs # good
store['output'] = output # good
store['files'] = self.files # good
store['X'] = X # good
store['Y'] = Y # good
store['comment'] = comment
with open(data_file, 'w') as outfile:
json.dump(store, outfile)
print('Preprocessed data stored in ' + str(data_file))
return
def main():
# Note: Preprocessed data should be in folder preprocessed
v = processMPCC('instruments_07/banjo/banjo_A3_very-long_forte_normal.wav')
print('len(input layer) = ' + str(len(v)))
#raise Exception
P = Preprocess()
#P.processData('preprocessed/processed_01.txt',directory='instruments_07',fs_in=8000,length=input_length,q=1,divide=1,comment = 'Instrument Data')
P.processData('preprocessed/processed_01.txt',directory='instruments_03',way='mpcc',opt = [2048])
P.loadData('preprocessed/processed_01.txt')
X, Y = P.getXY()
print('Input Layer Length: ' + str(len(X[0])))
print('Output Layer Length: ' + str(len(Y[0])))
input_size = P.getInputLength()
output_size = P.getOutputLength()
net = NN.NeuralNetwork([input_size,100,output_size],'sigmoid')
net.storeWeights('weights/weights_01')
#net.loadWeights('weights/weights_01')
net.trainWithPlots(X,Y,learning_rate=1,intervals = 100,way='max')
net.testBatch(X,Y)
# Test print functions, these print statements can be used to figure
# out how to use code
# X, Y = P.getXY()
# files = P.getFileList()
# output_vectors = P.getOutputVectors()
# output_names = P.getOutputNames()
# print()
# print('X = ' + str(X))
# print()
# print('Y = ' + str(Y))
# print()
# print('File List = ' + str(files))
# print()
# print('Output Vectors = ' + str(output_vectors))
# print()
# print('Output Names = ' + str(output_names))
if __name__ == '__main__':
main()
| y = []
for ele in self.Y[i]:
y.append(float(ele))
Y.append(y) # -> fine | conditional_block |
lib.rs | //! Everything related to meshes.
//!
//! **TODO**: Everything.
#![feature(trivial_bounds)]
#![feature(never_type)]
#![feature(doc_cfg)]
#![feature(proc_macro_hygiene)]
#![feature(try_blocks)]
#![feature(specialization)]
#![feature(associated_type_defaults)]
#![feature(associated_type_bounds)]
#![feature(array_value_iter)]
#![deny(missing_debug_implementations)]
#![deny(intra_doc_link_resolution_failure)]
// TODO: specialization now warns, but min_specialization is not yet ready to be used here
#![allow(incomplete_features)]
pub extern crate cgmath;
// This is done for proc macros from `lox-macros`. These use paths starting
// with `lox`. This makes sense for all crates using `lox` as dependency. But
// we also want to use proc macros in this library. So we alias `crate` with
// `lox`.
extern crate self as lox;
#[cfg(test)]
#[macro_use]
mod test_utils;
pub mod algo;
pub mod cast;
pub mod ds;
#[cfg(feature = "io")]
pub mod fat;
pub mod handle;
#[cfg(feature = "io")]
pub mod io;
pub mod map;
pub mod math;
pub mod mesh;
pub mod prop;
pub mod traits;
pub mod prelude;
pub mod refs;
#[cfg(feature = "io")]
pub mod shape;
pub mod util;
pub use crate::handle::{EdgeHandle, FaceHandle, VertexHandle};
/// The three basic elements in a polygon mesh.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum | {
Edge,
Face,
Vertex,
}
// ===========================================================================
// ===== `Sealed` trait
// ===========================================================================
pub(crate) mod sealed {
/// A trait that cannot be implemented outside of this crate.
///
/// This is helpful for all "real" traits in this library that only
/// abstract over a closed set of types. Thus, users shouldn't be able to
/// implement those traits for their types. Adding `Sealed` as supertrait
/// solves this problem.
pub trait Sealed {}
}
// ===========================================================================
// ===== Macros
// ===========================================================================
/// Derive macro for the [`Empty` trait][traits::Empty].
///
/// ```
/// use lox::Empty; // this imports the custom-derive and not the trait!
///
/// #[derive(Empty)]
/// struct MyStruct {
/// a: Vec<u32>, // => `vec![]`
/// b: Option<String>, // => `None`
/// c: (), // => `()`
/// }
/// ```
///
/// This can only be derived for structs. All struct fields need to implement
/// `Empty` in order for the derive to work. If your struct has generic
/// parameters, they won't be bounded with `Empty` in the generated impl block.
/// This is useful most of the time, because things like `Vec<T>` and
/// `Option<T>` don't require `T: Empty` to implement `Empty`. But this means
/// that you sometimes have to add a global `Empty` bound to your parameter or
/// implement `Empty` manually.
pub use lox_macros::Empty;
/// Derive macro for [the `MemSink` trait][io::MemSink].
///
/// You can easily derive `MemSink` for your own types. To do that, you have to
/// attach `#[derive(MemSink)]` to your struct definition (note: currently, the
/// trait can only be derived for structs with named fields). You also have to
/// annotate your fields with `#[lox(...)]` attributes to tell the derive macro
/// what a field should be used for. Example:
///
/// ```
/// use lox::{
/// MemSink, VertexHandle,
/// cgmath::Point3,
/// ds::HalfEdgeMesh,
/// map::DenseMap,
/// };
///
///
/// #[derive(MemSink)]
/// struct MyMesh {
/// #[lox(core_mesh)]
/// mesh: HalfEdgeMesh,
///
/// #[lox(vertex_position)]
/// positions: DenseMap<VertexHandle, Point3<f32>>,
/// }
/// ```
///
/// There is one required field: the core mesh field. That field's type has to
/// implement several mesh traits, in particular `MeshMut` and `TriMeshMut`.
/// You have to annotate that mesh with `#[lox(core_mesh)]`.
///
/// Additionally, you can have fields for each mesh property, like vertex
/// position or face colors. The type of those fields has to implement
/// `PropStoreMut` with a compatible element type. You have to annotate these
/// property fields with the corresponding attribute. The available properties
/// are:
///
/// - `vertex_position`
/// - `vertex_normal`
/// - `vertex_color`
/// - `face_normal`
/// - `face_color`
///
/// Furthermore, there are some configurations (like the cast mode) that can be
/// configured via `lox(...)` attributes as well. See below for more
/// information.
///
///
/// ## Cast modes
///
/// You can set a *cast mode* for each field. A `MemSink` has to be able to
/// "handle" any primitive type as the source is allowed to call the property
/// methods with any type. The sink can handle types either by casting or by
/// returning an error. The field's cast mode determines which casts are
/// allowed and which are not. Possible cast modes:
///
/// - `cast = "none"`
/// - `cast = "lossless"`
/// - `cast = "rounding"`
/// - `cast = "clamping"`
/// - `cast = "lossy"` (*default*)
///
/// The `none` mode does not allow casting at all. If the type provided by the
/// source does not match the type in your struct, an error is returned. All
/// other modes correspond to the cast modes in the [`cast`
/// module][crate::cast].
///
/// Note that the cast modes are used by `derive(MemSource)` as well.
///
/// You can specify the cast mode either per field or globally on the whole
/// struct. The mode of the struct applies to all fields that don't have a
/// field-specific mode.
///
/// ```
/// # use lox::{
/// # MemSink, VertexHandle,
/// # cgmath::{Point3, Vector3},
/// # ds::HalfEdgeMesh,
/// # map::DenseMap,
/// # };
/// #
/// #[derive(MemSink)]
/// #[lox(cast = "none")]
/// struct MyMesh {
/// #[lox(core_mesh)]
/// mesh: HalfEdgeMesh,
///
/// #[lox(vertex_position)]
/// positions: DenseMap<VertexHandle, Point3<f32>>,
///
/// #[lox(vertex_normal, cast = "lossy")]
/// normals: DenseMap<VertexHandle, Vector3<f32>>,
/// }
/// ```
///
/// In this example, the vertex positions inherit the "struct global" cast mode
/// (`none`), while the vertex normals override that mode to `lossy`.
///
///
/// ### Exact traits required for each field
///
/// Traits required for the `core_mesh` field:
/// - TODO
///
/// Traits required for property fields. For type `T` of the field:
/// - `T` must implement [`PropStoreMut`][crate::map::PropStoreMut] (with
/// fitting handle type). Additionally:
/// - For `vertex_position`: `T::Target` must implement
/// [`Pos3Like`][crate::prop::Pos3Like].
/// - For `*_normal`: `T::Target` must implement
/// [`Vec3Like`][crate::prop::Vec3Like].
/// - For `*_color`: `T::Target` must implement
/// [`ColorLike`][crate::prop::ColorLike] and `T::Target::Channel` must
/// implement [`Primitive`].
#[cfg(feature= "io")]
pub use lox_macros::MemSink;
/// Derive macro for [the `MemSource` trait][io::MemSource].
///
/// You can easily derive `MemSource` for your own types. To do that, you have
/// to attach `#[derive(MemSource)]` to your struct definition (note:
/// currently, the trait can only be derived for structs with named fields).
/// You also have to annotate your fields with `#[lox(...)]` attributes to tell
/// the derive macro what a field should be used for. Example:
///
/// ```
/// use lox::{
/// MemSource, VertexHandle,
/// cgmath::Point3,
/// ds::SharedVertexMesh,
/// map::DenseMap,
/// };
///
///
/// #[derive(MemSource)]
/// struct MyMesh {
/// #[lox(core_mesh)]
/// mesh: SharedVertexMesh,
///
/// #[lox(vertex_position)]
/// positions: DenseMap<VertexHandle, Point3<f32>>,
/// }
/// ```
///
/// Deriving this trait works very similar to deriving [`MemSink`]. See its
/// documentation for more information on the custom derive.
///
///
/// ### Exact traits required for each field
///
/// Traits required for the `core_mesh` field:
/// - TODO
///
/// Traits required for property fields. For type `T` of the field:
/// - `T` must implement [`PropStore`][crate::map::PropStore] (with fitting
/// handle type). Additionally:
/// - For `vertex_position`: `T::Target` must implement
/// [`Pos3Like`][crate::prop::Pos3Like] and `T::Target::Scalar` must
/// implement [`Primitive`].
/// - For `*_normal`: `T::Target` must implement
/// [`Vec3Like`][crate::prop::Vec3Like] and `T::Target::Scalar` must
/// implement [`Primitive`].
/// - For `*_color`: `T::Target` must implement
/// [`ColorLike`][crate::prop::ColorLike] and `T::Target::Channel` must
/// implement [`Primitive`].
#[cfg(feature= "io")]
pub use lox_macros::MemSource;
/// Convenience macro to quickly create a small mesh.
///
/// (This is just a dummy macro to add documentation to the actual proc-macro
/// reexported from `lox-macros`. See [#58700][i58700] and [#58696][i58696] for
/// more information.)
///
/// **Note about unstable features**: this proc macro needs to be invoked in
/// expression context, which is still unstable. So your crate needs to enable
/// the `proc_macro_hygiene` feature for this to work.
///
/// [i58700]: https://github.com/rust-lang/rust/issues/58700
/// [i58696]: https://github.com/rust-lang/rust/issues/58696
///
/// # Examples
///
/// Here we create two triangles:
///
/// ```
/// #![feature(proc_macro_hygiene)]
/// use lox::{
/// mesh,
/// prelude::*,
/// ds::SharedVertexMesh,
/// };
///
///
/// let (mesh, positions, distances, face_colors) = mesh! {
/// type: SharedVertexMesh,
/// vertices: [
/// v0: ([0.0, 0.0, 0.0], 0.0),
/// v1: ([0.0, 1.0, 0.0], 1.0),
/// v2: ([1.0, 0.0, 0.0], 1.0),
/// v3: ([1.0, 1.0, 0.0], 1.414),
/// ],
/// faces: [
/// [v0, v2, v1]: ("red"),
/// [v3, v1, v2]: ("green"),
/// ],
/// };
///
/// assert_eq!(mesh.num_vertices(), 4);
/// assert_eq!(mesh.num_faces(), 2);
/// ```
///
/// In the code above, we associate a position and a scalar value with each
/// vertex and a color (or rather, a color name) with each face. Properties of
/// vertices and faces are specified after a colon (`:`) in parenthesis (like a
/// tuple).
///
/// For each property you add in those parenthesis, the macro returns an
/// additional property map. The full return value is:
///
/// ```text
/// (mesh, /* vertex property maps */, /* face property maps*/)
/// ```
///
/// ## Without properties
///
/// We don't need to specify any properties. We can either write empty
/// parenthesis (`()`) or just omit the colon and the parenthesis:
///
/// ```
/// #![feature(proc_macro_hygiene)]
/// use lox::{
/// mesh,
/// ds::SharedVertexMesh,
/// };
///
///
/// let mesh = mesh! {
/// type: SharedVertexMesh,
/// vertices: [
/// v0: (), // <-- this is equivalent to:
/// v1, // <-- this
/// v2,
/// v3,
/// ],
/// faces: [
/// [v0, v2, v1],
/// [v3, v1, v2],
/// ],
/// };
/// ```
///
/// Of course, you can also add properties to the vertices, but not the faces,
/// or the other way around. However, you always have to specify the same
/// number of properties for all vertices and the same number of properties for
/// all faces.
///
/// ## An empty mesh
///
/// This is not particularly useful in itself, but it works. You can use this
/// syntax when you haven't yet decided how your mesh should look like.
///
/// ```
/// #![feature(proc_macro_hygiene)]
/// use lox::{
/// mesh,
/// ds::SharedVertexMesh,
/// };
///
///
/// let empty_mesh = mesh! {
/// type: SharedVertexMesh,
/// vertices: [],
/// faces: [],
/// };
/// ```
pub use lox_macros::mesh;
| MeshElement | identifier_name |
lib.rs | //! Everything related to meshes.
//!
//! **TODO**: Everything.
#![feature(trivial_bounds)]
#![feature(never_type)]
#![feature(doc_cfg)]
#![feature(proc_macro_hygiene)]
#![feature(try_blocks)]
#![feature(specialization)]
#![feature(associated_type_defaults)]
#![feature(associated_type_bounds)]
#![feature(array_value_iter)]
#![deny(missing_debug_implementations)]
#![deny(intra_doc_link_resolution_failure)]
// TODO: specialization now warns, but min_specialization is not yet ready to be used here
#![allow(incomplete_features)]
pub extern crate cgmath;
// This is done for proc macros from `lox-macros`. These use paths starting
// with `lox`. This makes sense for all crates using `lox` as dependency. But
// we also want to use proc macros in this library. So we alias `crate` with
// `lox`.
extern crate self as lox;
#[cfg(test)]
#[macro_use]
mod test_utils;
pub mod algo;
pub mod cast;
pub mod ds;
#[cfg(feature = "io")]
pub mod fat;
pub mod handle;
#[cfg(feature = "io")]
pub mod io;
pub mod map;
pub mod math;
pub mod mesh;
pub mod prop;
pub mod traits;
pub mod prelude;
pub mod refs;
#[cfg(feature = "io")]
pub mod shape;
pub mod util;
pub use crate::handle::{EdgeHandle, FaceHandle, VertexHandle};
/// The three basic elements in a polygon mesh.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MeshElement {
Edge,
Face,
Vertex,
}
// ===========================================================================
// ===== `Sealed` trait
// ===========================================================================
pub(crate) mod sealed {
/// A trait that cannot be implemented outside of this crate.
///
/// This is helpful for all "real" traits in this library that only
/// abstract over a closed set of types. Thus, users shouldn't be able to
/// implement those traits for their types. Adding `Sealed` as supertrait
/// solves this problem.
pub trait Sealed {}
}
// ===========================================================================
// ===== Macros
// ===========================================================================
/// Derive macro for the [`Empty` trait][traits::Empty].
///
/// ```
/// use lox::Empty; // this imports the custom-derive and not the trait!
///
/// #[derive(Empty)]
/// struct MyStruct {
/// a: Vec<u32>, // => `vec![]`
/// b: Option<String>, // => `None`
/// c: (), // => `()`
/// }
/// ```
///
/// This can only be derived for structs. All struct fields need to implement
/// `Empty` in order for the derive to work. If your struct has generic
/// parameters, they won't be bounded with `Empty` in the generated impl block.
/// This is useful most of the time, because things like `Vec<T>` and
/// `Option<T>` don't require `T: Empty` to implement `Empty`. But this means
/// that you sometimes have to add a global `Empty` bound to your parameter or
/// implement `Empty` manually.
pub use lox_macros::Empty;
/// Derive macro for [the `MemSink` trait][io::MemSink].
///
/// You can easily derive `MemSink` for your own types. To do that, you have to
/// attach `#[derive(MemSink)]` to your struct definition (note: currently, the
/// trait can only be derived for structs with named fields). You also have to
/// annotate your fields with `#[lox(...)]` attributes to tell the derive macro
/// what a field should be used for. Example:
///
/// ```
/// use lox::{
/// MemSink, VertexHandle,
/// cgmath::Point3,
/// ds::HalfEdgeMesh,
/// map::DenseMap,
/// };
///
///
/// #[derive(MemSink)]
/// struct MyMesh {
/// #[lox(core_mesh)]
/// mesh: HalfEdgeMesh,
///
/// #[lox(vertex_position)]
/// positions: DenseMap<VertexHandle, Point3<f32>>,
/// }
/// ```
///
/// There is one required field: the core mesh field. That field's type has to
/// implement several mesh traits, in particular `MeshMut` and `TriMeshMut`.
/// You have to annotate that mesh with `#[lox(core_mesh)]`.
///
/// Additionally, you can have fields for each mesh property, like vertex
/// position or face colors. The type of those fields has to implement
/// `PropStoreMut` with a compatible element type. You have to annotate these
/// property fields with the corresponding attribute. The available properties
/// are:
///
/// - `vertex_position`
/// - `vertex_normal`
/// - `vertex_color`
/// - `face_normal`
/// - `face_color`
///
/// Furthermore, there are some configurations (like the cast mode) that can be
/// configured via `lox(...)` attributes as well. See below for more
/// information.
///
///
/// ## Cast modes
///
/// You can set a *cast mode* for each field. A `MemSink` has to be able to
/// "handle" any primitive type as the source is allowed to call the property
/// methods with any type. The sink can handle types either by casting or by
/// returning an error. The field's cast mode determines which casts are
/// allowed and which are not. Possible cast modes:
///
/// - `cast = "none"`
/// - `cast = "lossless"`
/// - `cast = "rounding"`
/// - `cast = "clamping"`
/// - `cast = "lossy"` (*default*)
///
/// The `none` mode does not allow casting at all. If the type provided by the
/// source does not match the type in your struct, an error is returned. All
/// other modes correspond to the cast modes in the [`cast`
/// module][crate::cast].
///
/// Note that the cast modes are used by `derive(MemSource)` as well.
///
/// You can specify the cast mode either per field or globally on the whole
/// struct. The mode of the struct applies to all fields that don't have a
/// field-specific mode.
///
/// ```
/// # use lox::{
/// # MemSink, VertexHandle,
/// # cgmath::{Point3, Vector3},
/// # ds::HalfEdgeMesh,
/// # map::DenseMap,
/// # };
/// #
/// #[derive(MemSink)]
/// #[lox(cast = "none")]
/// struct MyMesh {
/// #[lox(core_mesh)]
/// mesh: HalfEdgeMesh,
///
/// #[lox(vertex_position)]
/// positions: DenseMap<VertexHandle, Point3<f32>>,
///
/// #[lox(vertex_normal, cast = "lossy")]
/// normals: DenseMap<VertexHandle, Vector3<f32>>,
/// }
/// ```
///
/// In this example, the vertex positions inherit the "struct global" cast mode
/// (`none`), while the vertex normals override that mode to `lossy`.
///
///
/// ### Exact traits required for each field
///
/// Traits required for the `core_mesh` field:
/// - TODO
///
/// Traits required for property fields. For type `T` of the field:
/// - `T` must implement [`PropStoreMut`][crate::map::PropStoreMut] (with
/// fitting handle type). Additionally:
/// - For `vertex_position`: `T::Target` must implement
/// [`Pos3Like`][crate::prop::Pos3Like].
/// - For `*_normal`: `T::Target` must implement
/// [`Vec3Like`][crate::prop::Vec3Like].
/// - For `*_color`: `T::Target` must implement
/// [`ColorLike`][crate::prop::ColorLike] and `T::Target::Channel` must
/// implement [`Primitive`].
#[cfg(feature= "io")]
pub use lox_macros::MemSink;
/// Derive macro for [the `MemSource` trait][io::MemSource].
///
/// You can easily derive `MemSource` for your own types. To do that, you have
/// to attach `#[derive(MemSource)]` to your struct definition (note:
/// currently, the trait can only be derived for structs with named fields).
/// You also have to annotate your fields with `#[lox(...)]` attributes to tell
/// the derive macro what a field should be used for. Example:
///
/// ```
/// use lox::{ | /// };
///
///
/// #[derive(MemSource)]
/// struct MyMesh {
/// #[lox(core_mesh)]
/// mesh: SharedVertexMesh,
///
/// #[lox(vertex_position)]
/// positions: DenseMap<VertexHandle, Point3<f32>>,
/// }
/// ```
///
/// Deriving this trait works very similar to deriving [`MemSink`]. See its
/// documentation for more information on the custom derive.
///
///
/// ### Exact traits required for each field
///
/// Traits required for the `core_mesh` field:
/// - TODO
///
/// Traits required for property fields. For type `T` of the field:
/// - `T` must implement [`PropStore`][crate::map::PropStore] (with fitting
/// handle type). Additionally:
/// - For `vertex_position`: `T::Target` must implement
/// [`Pos3Like`][crate::prop::Pos3Like] and `T::Target::Scalar` must
/// implement [`Primitive`].
/// - For `*_normal`: `T::Target` must implement
/// [`Vec3Like`][crate::prop::Vec3Like] and `T::Target::Scalar` must
/// implement [`Primitive`].
/// - For `*_color`: `T::Target` must implement
/// [`ColorLike`][crate::prop::ColorLike] and `T::Target::Channel` must
/// implement [`Primitive`].
#[cfg(feature= "io")]
pub use lox_macros::MemSource;
/// Convenience macro to quickly create a small mesh.
///
/// (This is just a dummy macro to add documentation to the actual proc-macro
/// reexported from `lox-macros`. See [#58700][i58700] and [#58696][i58696] for
/// more information.)
///
/// **Note about unstable features**: this proc macro needs to be invoked in
/// expression context, which is still unstable. So your crate needs to enable
/// the `proc_macro_hygiene` feature for this to work.
///
/// [i58700]: https://github.com/rust-lang/rust/issues/58700
/// [i58696]: https://github.com/rust-lang/rust/issues/58696
///
/// # Examples
///
/// Here we create two triangles:
///
/// ```
/// #![feature(proc_macro_hygiene)]
/// use lox::{
/// mesh,
/// prelude::*,
/// ds::SharedVertexMesh,
/// };
///
///
/// let (mesh, positions, distances, face_colors) = mesh! {
/// type: SharedVertexMesh,
/// vertices: [
/// v0: ([0.0, 0.0, 0.0], 0.0),
/// v1: ([0.0, 1.0, 0.0], 1.0),
/// v2: ([1.0, 0.0, 0.0], 1.0),
/// v3: ([1.0, 1.0, 0.0], 1.414),
/// ],
/// faces: [
/// [v0, v2, v1]: ("red"),
/// [v3, v1, v2]: ("green"),
/// ],
/// };
///
/// assert_eq!(mesh.num_vertices(), 4);
/// assert_eq!(mesh.num_faces(), 2);
/// ```
///
/// In the code above, we associate a position and a scalar value with each
/// vertex and a color (or rather, a color name) with each face. Properties of
/// vertices and faces are specified after a colon (`:`) in parenthesis (like a
/// tuple).
///
/// For each property you add in those parenthesis, the macro returns an
/// additional property map. The full return value is:
///
/// ```text
/// (mesh, /* vertex property maps */, /* face property maps*/)
/// ```
///
/// ## Without properties
///
/// We don't need to specify any properties. We can either write empty
/// parenthesis (`()`) or just omit the colon and the parenthesis:
///
/// ```
/// #![feature(proc_macro_hygiene)]
/// use lox::{
/// mesh,
/// ds::SharedVertexMesh,
/// };
///
///
/// let mesh = mesh! {
/// type: SharedVertexMesh,
/// vertices: [
/// v0: (), // <-- this is equivalent to:
/// v1, // <-- this
/// v2,
/// v3,
/// ],
/// faces: [
/// [v0, v2, v1],
/// [v3, v1, v2],
/// ],
/// };
/// ```
///
/// Of course, you can also add properties to the vertices, but not the faces,
/// or the other way around. However, you always have to specify the same
/// number of properties for all vertices and the same number of properties for
/// all faces.
///
/// ## An empty mesh
///
/// This is not particularly useful in itself, but it works. You can use this
/// syntax when you haven't yet decided how your mesh should look like.
///
/// ```
/// #![feature(proc_macro_hygiene)]
/// use lox::{
/// mesh,
/// ds::SharedVertexMesh,
/// };
///
///
/// let empty_mesh = mesh! {
/// type: SharedVertexMesh,
/// vertices: [],
/// faces: [],
/// };
/// ```
pub use lox_macros::mesh; | /// MemSource, VertexHandle,
/// cgmath::Point3,
/// ds::SharedVertexMesh,
/// map::DenseMap, | random_line_split |
main.rs | #[macro_use]
extern crate log;
extern crate simplelog;
use futures::future;
use futures::future::{BoxFuture, FutureExt};
use reqwest as request;
use base64::encode;
use dirs::home_dir;
use futures::io::SeekFrom;
use regex::Regex;
use request::header::{HeaderMap, AUTHORIZATION, CONTENT_TYPE, USER_AGENT};
use scraper::{Html, Selector};
use serde::{Deserialize, Serialize};
use simplelog::*;
use std::collections::HashMap;
use std::fs::{File, OpenOptions};
use std::io::{self, BufRead, LineWriter, Seek, Write};
use std::str;
const SURE_USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15";
const TWILIO_BASE_URL: &str = "https://api.twilio.com/2010-04-01";
#[tokio::main]
async fn main() -> SureResult<()> {
init_logging()?;
let client = request::Client::new();
let sess_id = get_session_id(&client).await?;
let mut listings = get_listings(&client, &sess_id, 0).await?;
remove_duplicates(&mut listings);
if listings.markers.len() > 0 {
let listings_map = scrape_listings(&client, &listings).await?;
let desired_listings = get_desired_listings(&listings_map);
if desired_listings.len() > 0 {
let listing_message = build_listing_message(&desired_listings);
send_messages(&client, &listing_message).await?;
}
}
Ok(())
}
fn init_logging() -> SureResult<()> {
let log_file = OpenOptions::new()
.append(true)
.create(true)
.open(&get_sure_filepath("sure.log"))?;
let config = ConfigBuilder::new()
.set_time_format_str("%c")
.set_time_to_local(true)
.build();
CombinedLogger::init(vec![WriteLogger::new(LevelFilter::Info, config, log_file)]).unwrap();
Ok(())
}
async fn get_session_id(client: &request::Client) -> SureResult<String> {
let re = Regex::new(r#"(PHPSESSID=[\w\S]+);"#).unwrap();
let res = client
.get("https://www.utahrealestate.com/index/public.index")
.header(USER_AGENT, SURE_USER_AGENT)
.send()
.await?;
let sessid = res.headers().get("set-cookie").unwrap().to_str().unwrap();
let mut id = String::from("");
for cap in re.captures_iter(sessid) {
id = String::from(&cap[1]);
}
if id == "" {
panic!("unable to find session id");
}
Ok(id)
}
fn get_listings<'a>(
client: &'a request::Client,
session_id: &'a str,
retry_count: usize,
) -> BoxFuture<'a, SureResult<UreData>> {
if retry_count > 3 {
error!("exceeded retry count - URE must be down");
std::process::exit(0);
}
async move {
let params = get_ure_search_params();
let mut headers = HeaderMap::new();
headers.insert(USER_AGENT, SURE_USER_AGENT.parse().unwrap());
headers.insert(
CONTENT_TYPE,
"application/x-www-form-urlencoded".parse().unwrap(),
);
headers.insert("PHPSESSID", session_id.parse().unwrap());
let res = client
.post("https://www.utahrealestate.com/search/chained.update/param_reset/county_code,o_county_code,city,o_city,zip,o_zip,geometry,o_geometry/count/false/criteria/false/pg/1/limit/50/dh/1190")
.headers(headers)
.body(params)
.send()
.await?;
let res_text = res.text().await?;
match serde_json::from_str(&res_text) {
Ok(v) => Ok(v),
Err(_) => {
error!("failed to parse text, retrying");
Ok(get_listings(client, session_id, retry_count + 1).await?)
}
}
}.boxed()
}
async fn scrape_listings(
client: &request::Client,
data: &UreData,
) -> SureResult<HashMap<String, Html>> {
let mut raw_futures = vec![];
for (index, marker) in data.markers.iter().enumerate() {
raw_futures.push(get_listing(&client, &marker.id, index));
}
let unpin_futures: Vec<_> = raw_futures.into_iter().map(Box::pin).collect();
let mut mut_futures = unpin_futures;
let mut documents: HashMap<String, Html> = HashMap::new();
let mut size: usize = 0;
let mut current: f32 = 0.0;
let total: usize = mut_futures.len();
while !mut_futures.is_empty() {
match future::select_all(mut_futures).await {
(Ok((id, _idx, document, content_length)), _index, remaining) => {
current += 1.0;
let percentage = (((current / total as f32) * 100.0) / 2.0) as usize;
io::stdout()
.write(
format!(
"\rdownloading listings {}/{}: [{}>{}]",
current,
total,
"=".repeat(percentage),
" ".repeat(50 - percentage),
)
.as_bytes(),
)
.unwrap();
io::stdout().flush().unwrap();
size += content_length;
documents.insert(id, document);
mut_futures = remaining;
}
(Err(_e), _index, remaining) => {
error!("document failed");
mut_futures = remaining;
}
}
}
println!("\n");
info!(
"downloaded {:.2?}MB from {} listings\n\t\t\t\t└──{:?}{}",
size as f32 / 1000000.0,
total,
documents.iter().map(|v| v.0).collect::<Vec<&String>>(),
" ".repeat(50)
);
Ok(documents)
}
fn get_desired_listings(listing_map: &HashMap<String, Html>) -> Vec<DesiredListing> {
let selector = Selector::parse(".facts___list___items .facts___item").unwrap();
let mut desired_listings: Vec<DesiredListing> = vec![];
for (key, value) in listing_map {
let mut dl = DesiredListing::new();
let div = value.select(&selector).collect::<Vec<_>>();
for node in div {
let mut node_vec = node
.text()
.collect::<Vec<&str>>()
.iter()
.map(|&v| v.trim())
.collect::<Vec<&str>>();
node_vec.retain(|&v| v != "");
if node_vec[0] == "Days on URE"
&& (node_vec[1] == "Just Listed"
|| node_vec[1].to_string().parse::<usize>().unwrap() >= 20)
{
dl.interested = true;
}
if node_vec[0] == "Status" && node_vec[1] == "Active" {
dl.active = true;
}
}
if dl.is_desired() {
dl.mls = String::from(key);
desired_listings.push(dl);
}
}
desired_listings
}
fn remove_duplicates(listings: &mut UreData) {
let mut dup_idx: Vec<usize> = vec![];
let mut existing = get_checked_listings();
for (idx, listing) in listings.markers.iter().enumerate() {
if existing.contains(&listing.id) {
dup_idx.push(idx);
}
}
if dup_idx.len() > 0 {
for i in dup_idx.into_iter().rev() {
listings.markers.remove(i);
}
}
if listings.markers.len() > 0 {
for listing in listings.markers.iter() {
existing.push(listing.id.clone());
}
write_checked_listings(&existing).unwrap();
} else {
info!("no new listings");
}
}
fn build_listing_message(listings: &Vec<DesiredListing>) -> String {
let mut message_str = String::from("");
for listing in listings {
message_str.push_str(&format!(
"https://www.utahrealestate.com/{}\n\n",
listing.mls
));
}
message_str
}
async fn send_messages(client: &request::Client, message: &str) -> SureResult<()> {
let credentials = get_twilio_credentials();
let mut raw_futures = vec![];
for number in credentials.alert_numbers.iter() {
raw_futures.push(send_message(&client, &message, number))
}
let unpin_futures: Vec<_> = raw_futures.into_iter().map(Box::pin).collect();
let mut mut_futures = unpin_futures;
while !mut_futures.is_empty() {
match future::select_all(mut_futures).await {
(Ok(_res), _index, remaining) => mut_futures = remaining,
(Err(_e), _index, remaining) => mut_futures = remaining,
}
}
Ok(())
}
async fn get_listing(
client: &request::Client,
id: &str,
index: usize,
) -> SureResult<(String, usize, Html, usize)> {
let url = format!("https://www.utahrealestate.com/{}", id);
let res = client
.get(&url)
.header(USER_AGENT, SURE_USER_AGENT)
.send()
.await?;
let body = res.text().await?;
let document = Html::parse_document(&body);
Ok((String::from(id), index, document, body.len()))
}
async fn send_message(client: &request::Client, message: &str, to: &str) -> SureResult<()> {
let credentials = get_twilio_credentials();
let message_url = format!(
"{}/Accounts/{}/Messages.json",
TWILIO_BASE_URL, credentials.sid
);
let mut headers = HeaderMap::new();
headers.insert(
AUTHORIZATION,
format!("Basic {}", credentials.basic_auth())
.parse()
.unwrap(),
);
let params = [
("From", &credentials.number),
("Body", &message.to_string()),
("To", &to.to_string()),
];
let res = client
.post(&message_url)
.headers(headers)
.form(¶ms)
.send()
.await?;
if res.status() == 201 {
info!("message sent");
} else {
error!(
"error sending message: {:?}\n\t└──{}\n\t└──{:?}",
res.status(),
res.text().await?,
params
)
}
Ok(())
}
///
/// Utility Functions
///
fn get_checked_listings() -> Vec<String> {
let mut checked_mls: Vec<String> = vec![];
if let Ok(lines) = read_lines(&get_sure_filepath("listings.txt")) {
for line in lines {
if let Ok(l) = line {
checked_mls.push(String::from(l.trim()))
}
}
}
checked_mls
}
fn write_checked_listings(checked: &Vec<String>) -> SureResult<()> {
let mut contents = String::from("");
let mut file = OpenOptions::new()
.write(true)
.create(true)
.open(&get_sure_filepath("listings.txt"))?;
file.set_len(0)?;
file.seek(SeekFrom::Start(0))?;
let mut file = LineWriter::new(file);
let mut sorted = checked
.iter()
.map(|v| v.parse::<usize>().unwrap())
.collect::<Vec<usize>>();
sorted.sort();
for c in sorted {
contents.push_str(&format!("{}\n", c));
}
file.write_all(contents.as_bytes())?;
Ok(())
}
fn get_ure_search_params() -> String {
let mut param_encoded = String::from("");
if let Ok(lines) = read_lines(&get_sure_filepath("queries.env")) {
for line in lines {
if let Ok(l) = line {
param_encoded.push_str(&format!("{}&", l));
}
}
}
String::from(param_encoded)
}
fn get_twilio_credentials() -> TwilioAuth {
let mut auth = TwilioAuth::new();
if let Ok(lines) = read_lines(&get_sure_filepath("twilio.env")) {
for line in lines {
if let Ok(i) = line {
let config_item: Vec<&str> = i.split('=').collect();
if config_item[0] == "AccountSID" {
auth.sid = String::from(config_item[1]);
}
if config_item[0] == "AuthToken" {
auth.auth_token = String::from(config_item[1]);
}
if config_item[0] == "TwilioNumber" {
auth.number = String::from(config_item[1]);
}
if config_item[0] == "AlertNumbers" {
let numbers: Vec<String> = config_item[1]
.split(",")
.into_iter()
.map(String::from)
.collect();
auth.alert_numbers = numbers;
}
}
}
}
auth
}
fn read_lines(filename: &str) -> io::Result<io::Lines<io::BufReader<File>>> {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn get_sure_filepath( | > String {
let mut home_path = home_dir().unwrap();
home_path.push(".sure");
home_path.push(filename);
String::from(home_path.to_str().unwrap())
}
///
///
/// Definitions and Implementations
///
///
///
/// DesiredListing
///
#[derive(Debug)]
struct DesiredListing {
active: bool,
interested: bool,
mls: String,
}
impl DesiredListing {
fn new() -> DesiredListing {
Default::default()
}
fn is_desired(&self) -> bool {
self.active && self.interested
}
}
impl Default for DesiredListing {
fn default() -> Self {
DesiredListing {
active: false,
interested: false,
mls: String::from(""),
}
}
}
///
/// Twilio
///
pub struct TwilioAuth {
sid: String,
auth_token: String,
number: String,
alert_numbers: Vec<String>,
}
impl TwilioAuth {
fn new() -> TwilioAuth {
Default::default()
}
fn basic_auth(&self) -> String {
encode(format!("{}:{}", &self.sid, &self.auth_token).as_bytes())
}
}
impl Default for TwilioAuth {
fn default() -> Self {
TwilioAuth {
sid: String::from(""),
auth_token: String::from(""),
number: String::from(""),
alert_numbers: vec![],
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct TwilioResponse {
error_code: String,
status: String,
}
///
/// SureResult and SureError
///
type SureResult<T> = Result<T, SureError>;
#[derive(Debug)]
enum SureError {
IoError(std::io::Error),
ReqwestError(request::Error),
StdError(Box<dyn std::error::Error>),
JsonError(serde_json::Error),
}
impl From<std::io::Error> for SureError {
fn from(error: std::io::Error) -> Self {
SureError::IoError(error)
}
}
impl From<reqwest::Error> for SureError {
fn from(error: reqwest::Error) -> Self {
SureError::ReqwestError(error)
}
}
impl From<Box<dyn std::error::Error>> for SureError {
fn from(error: Box<dyn std::error::Error>) -> Self {
SureError::StdError(error)
}
}
impl From<serde_json::Error> for SureError {
fn from(error: serde_json::Error) -> Self {
SureError::JsonError(error)
}
}
///
/// UreData
/// └── Vec<Marker>
///
#[derive(Debug, Serialize, Deserialize)]
struct UreData {
markers: Vec<Marker>,
}
#[derive(Debug, Serialize, Deserialize)]
struct Marker {
price: String,
id: String,
}
| filename: &str) - | identifier_name |
main.rs | #[macro_use]
extern crate log;
extern crate simplelog;
use futures::future;
use futures::future::{BoxFuture, FutureExt};
use reqwest as request;
use base64::encode;
use dirs::home_dir;
use futures::io::SeekFrom;
use regex::Regex;
use request::header::{HeaderMap, AUTHORIZATION, CONTENT_TYPE, USER_AGENT};
use scraper::{Html, Selector};
use serde::{Deserialize, Serialize};
use simplelog::*;
use std::collections::HashMap;
use std::fs::{File, OpenOptions};
use std::io::{self, BufRead, LineWriter, Seek, Write};
use std::str;
const SURE_USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15";
const TWILIO_BASE_URL: &str = "https://api.twilio.com/2010-04-01";
#[tokio::main]
async fn main() -> SureResult<()> {
init_logging()?;
let client = request::Client::new();
let sess_id = get_session_id(&client).await?;
let mut listings = get_listings(&client, &sess_id, 0).await?;
remove_duplicates(&mut listings);
if listings.markers.len() > 0 {
let listings_map = scrape_listings(&client, &listings).await?;
let desired_listings = get_desired_listings(&listings_map);
if desired_listings.len() > 0 {
let listing_message = build_listing_message(&desired_listings);
send_messages(&client, &listing_message).await?;
}
}
Ok(())
}
fn init_logging() -> SureResult<()> {
let log_file = OpenOptions::new()
.append(true)
.create(true)
.open(&get_sure_filepath("sure.log"))?;
let config = ConfigBuilder::new()
.set_time_format_str("%c")
.set_time_to_local(true)
.build();
CombinedLogger::init(vec![WriteLogger::new(LevelFilter::Info, config, log_file)]).unwrap();
Ok(())
}
async fn get_session_id(client: &request::Client) -> SureResult<String> {
let re = Regex::new(r#"(PHPSESSID=[\w\S]+);"#).unwrap();
let res = client
.get("https://www.utahrealestate.com/index/public.index")
.header(USER_AGENT, SURE_USER_AGENT)
.send()
.await?;
let sessid = res.headers().get("set-cookie").unwrap().to_str().unwrap();
let mut id = String::from("");
for cap in re.captures_iter(sessid) {
id = String::from(&cap[1]);
}
if id == "" {
panic!("unable to find session id");
}
Ok(id)
}
fn get_listings<'a>(
client: &'a request::Client,
session_id: &'a str,
retry_count: usize,
) -> BoxFuture<'a, SureResult<UreData>> {
if retry_count > 3 {
error!("exceeded retry count - URE must be down");
std::process::exit(0);
}
async move {
let params = get_ure_search_params();
let mut headers = HeaderMap::new();
headers.insert(USER_AGENT, SURE_USER_AGENT.parse().unwrap());
headers.insert(
CONTENT_TYPE,
"application/x-www-form-urlencoded".parse().unwrap(),
);
headers.insert("PHPSESSID", session_id.parse().unwrap());
let res = client
.post("https://www.utahrealestate.com/search/chained.update/param_reset/county_code,o_county_code,city,o_city,zip,o_zip,geometry,o_geometry/count/false/criteria/false/pg/1/limit/50/dh/1190")
.headers(headers)
.body(params)
.send()
.await?;
let res_text = res.text().await?;
match serde_json::from_str(&res_text) {
Ok(v) => Ok(v),
Err(_) => {
error!("failed to parse text, retrying");
Ok(get_listings(client, session_id, retry_count + 1).await?)
}
}
}.boxed()
}
async fn scrape_listings(
client: &request::Client,
data: &UreData,
) -> SureResult<HashMap<String, Html>> {
let mut raw_futures = vec![];
for (index, marker) in data.markers.iter().enumerate() {
raw_futures.push(get_listing(&client, &marker.id, index));
}
let unpin_futures: Vec<_> = raw_futures.into_iter().map(Box::pin).collect();
let mut mut_futures = unpin_futures;
let mut documents: HashMap<String, Html> = HashMap::new();
let mut size: usize = 0;
let mut current: f32 = 0.0;
let total: usize = mut_futures.len();
while !mut_futures.is_empty() {
match future::select_all(mut_futures).await { | io::stdout()
.write(
format!(
"\rdownloading listings {}/{}: [{}>{}]",
current,
total,
"=".repeat(percentage),
" ".repeat(50 - percentage),
)
.as_bytes(),
)
.unwrap();
io::stdout().flush().unwrap();
size += content_length;
documents.insert(id, document);
mut_futures = remaining;
}
(Err(_e), _index, remaining) => {
error!("document failed");
mut_futures = remaining;
}
}
}
println!("\n");
info!(
"downloaded {:.2?}MB from {} listings\n\t\t\t\t└──{:?}{}",
size as f32 / 1000000.0,
total,
documents.iter().map(|v| v.0).collect::<Vec<&String>>(),
" ".repeat(50)
);
Ok(documents)
}
fn get_desired_listings(listing_map: &HashMap<String, Html>) -> Vec<DesiredListing> {
let selector = Selector::parse(".facts___list___items .facts___item").unwrap();
let mut desired_listings: Vec<DesiredListing> = vec![];
for (key, value) in listing_map {
let mut dl = DesiredListing::new();
let div = value.select(&selector).collect::<Vec<_>>();
for node in div {
let mut node_vec = node
.text()
.collect::<Vec<&str>>()
.iter()
.map(|&v| v.trim())
.collect::<Vec<&str>>();
node_vec.retain(|&v| v != "");
if node_vec[0] == "Days on URE"
&& (node_vec[1] == "Just Listed"
|| node_vec[1].to_string().parse::<usize>().unwrap() >= 20)
{
dl.interested = true;
}
if node_vec[0] == "Status" && node_vec[1] == "Active" {
dl.active = true;
}
}
if dl.is_desired() {
dl.mls = String::from(key);
desired_listings.push(dl);
}
}
desired_listings
}
fn remove_duplicates(listings: &mut UreData) {
let mut dup_idx: Vec<usize> = vec![];
let mut existing = get_checked_listings();
for (idx, listing) in listings.markers.iter().enumerate() {
if existing.contains(&listing.id) {
dup_idx.push(idx);
}
}
if dup_idx.len() > 0 {
for i in dup_idx.into_iter().rev() {
listings.markers.remove(i);
}
}
if listings.markers.len() > 0 {
for listing in listings.markers.iter() {
existing.push(listing.id.clone());
}
write_checked_listings(&existing).unwrap();
} else {
info!("no new listings");
}
}
fn build_listing_message(listings: &Vec<DesiredListing>) -> String {
let mut message_str = String::from("");
for listing in listings {
message_str.push_str(&format!(
"https://www.utahrealestate.com/{}\n\n",
listing.mls
));
}
message_str
}
async fn send_messages(client: &request::Client, message: &str) -> SureResult<()> {
let credentials = get_twilio_credentials();
let mut raw_futures = vec![];
for number in credentials.alert_numbers.iter() {
raw_futures.push(send_message(&client, &message, number))
}
let unpin_futures: Vec<_> = raw_futures.into_iter().map(Box::pin).collect();
let mut mut_futures = unpin_futures;
while !mut_futures.is_empty() {
match future::select_all(mut_futures).await {
(Ok(_res), _index, remaining) => mut_futures = remaining,
(Err(_e), _index, remaining) => mut_futures = remaining,
}
}
Ok(())
}
async fn get_listing(
client: &request::Client,
id: &str,
index: usize,
) -> SureResult<(String, usize, Html, usize)> {
let url = format!("https://www.utahrealestate.com/{}", id);
let res = client
.get(&url)
.header(USER_AGENT, SURE_USER_AGENT)
.send()
.await?;
let body = res.text().await?;
let document = Html::parse_document(&body);
Ok((String::from(id), index, document, body.len()))
}
async fn send_message(client: &request::Client, message: &str, to: &str) -> SureResult<()> {
let credentials = get_twilio_credentials();
let message_url = format!(
"{}/Accounts/{}/Messages.json",
TWILIO_BASE_URL, credentials.sid
);
let mut headers = HeaderMap::new();
headers.insert(
AUTHORIZATION,
format!("Basic {}", credentials.basic_auth())
.parse()
.unwrap(),
);
let params = [
("From", &credentials.number),
("Body", &message.to_string()),
("To", &to.to_string()),
];
let res = client
.post(&message_url)
.headers(headers)
.form(¶ms)
.send()
.await?;
if res.status() == 201 {
info!("message sent");
} else {
error!(
"error sending message: {:?}\n\t└──{}\n\t└──{:?}",
res.status(),
res.text().await?,
params
)
}
Ok(())
}
///
/// Utility Functions
///
fn get_checked_listings() -> Vec<String> {
let mut checked_mls: Vec<String> = vec![];
if let Ok(lines) = read_lines(&get_sure_filepath("listings.txt")) {
for line in lines {
if let Ok(l) = line {
checked_mls.push(String::from(l.trim()))
}
}
}
checked_mls
}
fn write_checked_listings(checked: &Vec<String>) -> SureResult<()> {
let mut contents = String::from("");
let mut file = OpenOptions::new()
.write(true)
.create(true)
.open(&get_sure_filepath("listings.txt"))?;
file.set_len(0)?;
file.seek(SeekFrom::Start(0))?;
let mut file = LineWriter::new(file);
let mut sorted = checked
.iter()
.map(|v| v.parse::<usize>().unwrap())
.collect::<Vec<usize>>();
sorted.sort();
for c in sorted {
contents.push_str(&format!("{}\n", c));
}
file.write_all(contents.as_bytes())?;
Ok(())
}
fn get_ure_search_params() -> String {
let mut param_encoded = String::from("");
if let Ok(lines) = read_lines(&get_sure_filepath("queries.env")) {
for line in lines {
if let Ok(l) = line {
param_encoded.push_str(&format!("{}&", l));
}
}
}
String::from(param_encoded)
}
fn get_twilio_credentials() -> TwilioAuth {
let mut auth = TwilioAuth::new();
if let Ok(lines) = read_lines(&get_sure_filepath("twilio.env")) {
for line in lines {
if let Ok(i) = line {
let config_item: Vec<&str> = i.split('=').collect();
if config_item[0] == "AccountSID" {
auth.sid = String::from(config_item[1]);
}
if config_item[0] == "AuthToken" {
auth.auth_token = String::from(config_item[1]);
}
if config_item[0] == "TwilioNumber" {
auth.number = String::from(config_item[1]);
}
if config_item[0] == "AlertNumbers" {
let numbers: Vec<String> = config_item[1]
.split(",")
.into_iter()
.map(String::from)
.collect();
auth.alert_numbers = numbers;
}
}
}
}
auth
}
fn read_lines(filename: &str) -> io::Result<io::Lines<io::BufReader<File>>> {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn get_sure_filepath(filename: &str) -> String {
let mut home_path = home_dir().unwrap();
home_path.push(".sure");
home_path.push(filename);
String::from(home_path.to_str().unwrap())
}
///
///
/// Definitions and Implementations
///
///
///
/// DesiredListing
///
#[derive(Debug)]
struct DesiredListing {
active: bool,
interested: bool,
mls: String,
}
impl DesiredListing {
fn new() -> DesiredListing {
Default::default()
}
fn is_desired(&self) -> bool {
self.active && self.interested
}
}
impl Default for DesiredListing {
fn default() -> Self {
DesiredListing {
active: false,
interested: false,
mls: String::from(""),
}
}
}
///
/// Twilio
///
pub struct TwilioAuth {
sid: String,
auth_token: String,
number: String,
alert_numbers: Vec<String>,
}
impl TwilioAuth {
fn new() -> TwilioAuth {
Default::default()
}
fn basic_auth(&self) -> String {
encode(format!("{}:{}", &self.sid, &self.auth_token).as_bytes())
}
}
impl Default for TwilioAuth {
fn default() -> Self {
TwilioAuth {
sid: String::from(""),
auth_token: String::from(""),
number: String::from(""),
alert_numbers: vec![],
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct TwilioResponse {
error_code: String,
status: String,
}
///
/// SureResult and SureError
///
type SureResult<T> = Result<T, SureError>;
#[derive(Debug)]
enum SureError {
IoError(std::io::Error),
ReqwestError(request::Error),
StdError(Box<dyn std::error::Error>),
JsonError(serde_json::Error),
}
impl From<std::io::Error> for SureError {
fn from(error: std::io::Error) -> Self {
SureError::IoError(error)
}
}
impl From<reqwest::Error> for SureError {
fn from(error: reqwest::Error) -> Self {
SureError::ReqwestError(error)
}
}
impl From<Box<dyn std::error::Error>> for SureError {
fn from(error: Box<dyn std::error::Error>) -> Self {
SureError::StdError(error)
}
}
impl From<serde_json::Error> for SureError {
fn from(error: serde_json::Error) -> Self {
SureError::JsonError(error)
}
}
///
/// UreData
/// └── Vec<Marker>
///
#[derive(Debug, Serialize, Deserialize)]
struct UreData {
markers: Vec<Marker>,
}
#[derive(Debug, Serialize, Deserialize)]
struct Marker {
price: String,
id: String,
} | (Ok((id, _idx, document, content_length)), _index, remaining) => {
current += 1.0;
let percentage = (((current / total as f32) * 100.0) / 2.0) as usize; | random_line_split |
main.rs | #[macro_use]
extern crate log;
extern crate simplelog;
use futures::future;
use futures::future::{BoxFuture, FutureExt};
use reqwest as request;
use base64::encode;
use dirs::home_dir;
use futures::io::SeekFrom;
use regex::Regex;
use request::header::{HeaderMap, AUTHORIZATION, CONTENT_TYPE, USER_AGENT};
use scraper::{Html, Selector};
use serde::{Deserialize, Serialize};
use simplelog::*;
use std::collections::HashMap;
use std::fs::{File, OpenOptions};
use std::io::{self, BufRead, LineWriter, Seek, Write};
use std::str;
const SURE_USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15";
const TWILIO_BASE_URL: &str = "https://api.twilio.com/2010-04-01";
#[tokio::main]
async fn main() -> SureResult<()> {
init_logging()?;
let client = request::Client::new();
let sess_id = get_session_id(&client).await?;
let mut listings = get_listings(&client, &sess_id, 0).await?;
remove_duplicates(&mut listings);
if listings.markers.len() > 0 {
let listings_map = scrape_listings(&client, &listings).await?;
let desired_listings = get_desired_listings(&listings_map);
if desired_listings.len() > 0 {
let listing_message = build_listing_message(&desired_listings);
send_messages(&client, &listing_message).await?;
}
}
Ok(())
}
fn init_logging() -> SureResult<()> {
let log_file = OpenOptions::new()
.append(true)
.create(true)
.open(&get_sure_filepath("sure.log"))?;
let config = ConfigBuilder::new()
.set_time_format_str("%c")
.set_time_to_local(true)
.build();
CombinedLogger::init(vec![WriteLogger::new(LevelFilter::Info, config, log_file)]).unwrap();
Ok(())
}
async fn get_session_id(client: &request::Client) -> SureResult<String> {
let re = Regex::new(r#"(PHPSESSID=[\w\S]+);"#).unwrap();
let res = client
.get("https://www.utahrealestate.com/index/public.index")
.header(USER_AGENT, SURE_USER_AGENT)
.send()
.await?;
let sessid = res.headers().get("set-cookie").unwrap().to_str().unwrap();
let mut id = String::from("");
for cap in re.captures_iter(sessid) {
id = String::from(&cap[1]);
}
if id == "" {
panic!("unable to find session id");
}
Ok(id)
}
fn get_listings<'a>(
client: &'a request::Client,
session_id: &'a str,
retry_count: usize,
) -> BoxFuture<'a, SureResult<UreData>> {
if retry_count > 3 {
error!("exceeded retry count - URE must be down");
std::process::exit(0);
}
async move {
let params = get_ure_search_params();
let mut headers = HeaderMap::new();
headers.insert(USER_AGENT, SURE_USER_AGENT.parse().unwrap());
headers.insert(
CONTENT_TYPE,
"application/x-www-form-urlencoded".parse().unwrap(),
);
headers.insert("PHPSESSID", session_id.parse().unwrap());
let res = client
.post("https://www.utahrealestate.com/search/chained.update/param_reset/county_code,o_county_code,city,o_city,zip,o_zip,geometry,o_geometry/count/false/criteria/false/pg/1/limit/50/dh/1190")
.headers(headers)
.body(params)
.send()
.await?;
let res_text = res.text().await?;
match serde_json::from_str(&res_text) {
Ok(v) => Ok(v),
Err(_) => {
error!("failed to parse text, retrying");
Ok(get_listings(client, session_id, retry_count + 1).await?)
}
}
}.boxed()
}
async fn scrape_listings(
client: &request::Client,
data: &UreData,
) -> SureResult<HashMap<String, Html>> {
let mut raw_futures = vec![];
for (index, marker) in data.markers.iter().enumerate() {
raw_futures.push(get_listing(&client, &marker.id, index));
}
let unpin_futures: Vec<_> = raw_futures.into_iter().map(Box::pin).collect();
let mut mut_futures = unpin_futures;
let mut documents: HashMap<String, Html> = HashMap::new();
let mut size: usize = 0;
let mut current: f32 = 0.0;
let total: usize = mut_futures.len();
while !mut_futures.is_empty() {
match future::select_all(mut_futures).await {
(Ok((id, _idx, document, content_length)), _index, remaining) => {
current += 1.0;
let percentage = (((current / total as f32) * 100.0) / 2.0) as usize;
io::stdout()
.write(
format!(
"\rdownloading listings {}/{}: [{}>{}]",
current,
total,
"=".repeat(percentage),
" ".repeat(50 - percentage),
)
.as_bytes(),
)
.unwrap();
io::stdout().flush().unwrap();
size += content_length;
documents.insert(id, document);
mut_futures = remaining;
}
(Err(_e), _index, remaining) => {
error!("document failed");
mut_futures = remaining;
}
}
}
println!("\n");
info!(
"downloaded {:.2?}MB from {} listings\n\t\t\t\t└──{:?}{}",
size as f32 / 1000000.0,
total,
documents.iter().map(|v| v.0).collect::<Vec<&String>>(),
" ".repeat(50)
);
Ok(documents)
}
fn get_desired_listings(listing_map: &HashMap<String, Html>) -> Vec<DesiredListing> {
let selector = Selector::parse(".facts___list___items .facts___item").unwrap();
let mut desired_listings: Vec<DesiredListing> = vec![];
for (key, value) in listing_map {
let mut dl = DesiredListing::new();
let div = value.select(&selector).collect::<Vec<_>>();
for node in div {
let mut node_vec = node
.text()
.collect::<Vec<&str>>()
.iter()
.map(|&v| v.trim())
.collect::<Vec<&str>>();
node_vec.retain(|&v| v != "");
if node_vec[0] == "Days on URE"
&& (node_vec[1] == "Just Listed"
|| node_vec[1].to_string().parse::<usize>().unwrap() >= 20)
{
dl.interested = true;
}
if node_vec[0] == "Status" && node_vec[1] == "Active" {
dl.active = true;
}
}
if dl.is_desired() {
dl.mls = String::from(key);
desired_listings.push(dl);
}
}
desired_listings
}
fn remove_duplicates(listings: &mut UreData) {
let mut dup_idx: Vec<usize> = vec![];
let mut existing = get_checked_listings();
for (idx, listing) in listings.markers.iter().enumerate() {
if existing.contains(&listing.id) {
dup_idx.push(idx);
}
}
if dup_idx.len() > 0 {
for i in dup_idx.into_iter().rev() {
listings.markers.remove(i);
}
}
if listings.markers.len() > 0 {
for listing in listings.markers.iter() {
existing.push(listing.id.clone());
}
write_checked_listings(&existing).unwrap();
} else {
info!("no new listings");
}
}
fn build_listing_message(listings: &Vec<DesiredListing>) -> String {
let mut message_str = String::from("");
for listing in listings {
message_str.push_str(&format!(
"https://www.utahrealestate.com/{}\n\n",
listing.mls
));
}
message_str
}
async fn send_messages(client: &request::Client, message: &str) -> SureResult<()> {
let credentials = get_twilio_credentials();
let mut raw_futures = vec![];
for number in credentials.alert_numbers.iter() {
raw_futures.push(send_message(&client, &message, number))
}
let unpin_futures: Vec<_> = raw_futures.into_iter().map(Box::pin).collect();
let mut mut_futures = unpin_futures;
while !mut_futures.is_empty() {
match future::select_all(mut_futures).await {
(Ok(_res), _index, remaining) => mut_futures = remaining,
(Err(_e), _index, remaining) => mut_futures = remaining,
}
}
Ok(())
}
async fn get_listing(
client: &request::Client,
id: &str,
index: usize,
) -> SureResult<(String, usize, Html, usize)> {
let url = format!("https://www.utahrealestate.com/{}", id);
let res = client
.get(&url)
.header(USER_AGENT, SURE_USER_AGENT)
.send()
.await?;
let body = res.text().await?;
let document = Html::parse_document(&body);
Ok((String::from(id), index, document, body.len()))
}
async fn send_message(client: &request::Client, message: &str, to: &str) -> SureResult<()> {
let credentials = get_twilio_credentials();
let message_url = format!(
"{}/Accounts/{}/Messages.json",
TWILIO_BASE_URL, credentials.sid
);
let mut headers = HeaderMap::new();
headers.insert(
AUTHORIZATION,
format!("Basic {}", credentials.basic_auth())
.parse()
.unwrap(),
);
let params = [
("From", &credentials.number),
("Body", &message.to_string()),
("To", &to.to_string()),
];
let res = client
.post(&message_url)
.headers(headers)
.form(¶ms)
.send()
.await?;
if res.status() == 201 {
info!("message sent");
} else {
er | // Utility Functions
///
fn get_checked_listings() -> Vec<String> {
let mut checked_mls: Vec<String> = vec![];
if let Ok(lines) = read_lines(&get_sure_filepath("listings.txt")) {
for line in lines {
if let Ok(l) = line {
checked_mls.push(String::from(l.trim()))
}
}
}
checked_mls
}
fn write_checked_listings(checked: &Vec<String>) -> SureResult<()> {
let mut contents = String::from("");
let mut file = OpenOptions::new()
.write(true)
.create(true)
.open(&get_sure_filepath("listings.txt"))?;
file.set_len(0)?;
file.seek(SeekFrom::Start(0))?;
let mut file = LineWriter::new(file);
let mut sorted = checked
.iter()
.map(|v| v.parse::<usize>().unwrap())
.collect::<Vec<usize>>();
sorted.sort();
for c in sorted {
contents.push_str(&format!("{}\n", c));
}
file.write_all(contents.as_bytes())?;
Ok(())
}
fn get_ure_search_params() -> String {
let mut param_encoded = String::from("");
if let Ok(lines) = read_lines(&get_sure_filepath("queries.env")) {
for line in lines {
if let Ok(l) = line {
param_encoded.push_str(&format!("{}&", l));
}
}
}
String::from(param_encoded)
}
fn get_twilio_credentials() -> TwilioAuth {
let mut auth = TwilioAuth::new();
if let Ok(lines) = read_lines(&get_sure_filepath("twilio.env")) {
for line in lines {
if let Ok(i) = line {
let config_item: Vec<&str> = i.split('=').collect();
if config_item[0] == "AccountSID" {
auth.sid = String::from(config_item[1]);
}
if config_item[0] == "AuthToken" {
auth.auth_token = String::from(config_item[1]);
}
if config_item[0] == "TwilioNumber" {
auth.number = String::from(config_item[1]);
}
if config_item[0] == "AlertNumbers" {
let numbers: Vec<String> = config_item[1]
.split(",")
.into_iter()
.map(String::from)
.collect();
auth.alert_numbers = numbers;
}
}
}
}
auth
}
fn read_lines(filename: &str) -> io::Result<io::Lines<io::BufReader<File>>> {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn get_sure_filepath(filename: &str) -> String {
let mut home_path = home_dir().unwrap();
home_path.push(".sure");
home_path.push(filename);
String::from(home_path.to_str().unwrap())
}
///
///
/// Definitions and Implementations
///
///
///
/// DesiredListing
///
#[derive(Debug)]
struct DesiredListing {
active: bool,
interested: bool,
mls: String,
}
impl DesiredListing {
fn new() -> DesiredListing {
Default::default()
}
fn is_desired(&self) -> bool {
self.active && self.interested
}
}
impl Default for DesiredListing {
fn default() -> Self {
DesiredListing {
active: false,
interested: false,
mls: String::from(""),
}
}
}
///
/// Twilio
///
pub struct TwilioAuth {
sid: String,
auth_token: String,
number: String,
alert_numbers: Vec<String>,
}
impl TwilioAuth {
fn new() -> TwilioAuth {
Default::default()
}
fn basic_auth(&self) -> String {
encode(format!("{}:{}", &self.sid, &self.auth_token).as_bytes())
}
}
impl Default for TwilioAuth {
fn default() -> Self {
TwilioAuth {
sid: String::from(""),
auth_token: String::from(""),
number: String::from(""),
alert_numbers: vec![],
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct TwilioResponse {
error_code: String,
status: String,
}
///
/// SureResult and SureError
///
type SureResult<T> = Result<T, SureError>;
#[derive(Debug)]
enum SureError {
IoError(std::io::Error),
ReqwestError(request::Error),
StdError(Box<dyn std::error::Error>),
JsonError(serde_json::Error),
}
impl From<std::io::Error> for SureError {
fn from(error: std::io::Error) -> Self {
SureError::IoError(error)
}
}
impl From<reqwest::Error> for SureError {
fn from(error: reqwest::Error) -> Self {
SureError::ReqwestError(error)
}
}
impl From<Box<dyn std::error::Error>> for SureError {
fn from(error: Box<dyn std::error::Error>) -> Self {
SureError::StdError(error)
}
}
impl From<serde_json::Error> for SureError {
fn from(error: serde_json::Error) -> Self {
SureError::JsonError(error)
}
}
///
/// UreData
/// └── Vec<Marker>
///
#[derive(Debug, Serialize, Deserialize)]
struct UreData {
markers: Vec<Marker>,
}
#[derive(Debug, Serialize, Deserialize)]
struct Marker {
price: String,
id: String,
}
| ror!(
"error sending message: {:?}\n\t└──{}\n\t└──{:?}",
res.status(),
res.text().await?,
params
)
}
Ok(())
}
///
/ | conditional_block |
main.rs | // Copyright 2020 Xavier Gillard
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//! This example show how to implement a solver for the maximum independent set problem
//! using ddo. It is a fairly simple example but it features most of the aspects you will
//! want to copy when implementing your own solver.
use std::{cell::RefCell, path::Path, fs::File, io::{BufReader, BufRead}, num::ParseIntError, time::{Duration, Instant}};
use bit_set::BitSet;
use clap::Parser;
use ddo::*;
use regex::Regex;
#[cfg(test)]
mod tests;
/// This structure represents an instance of the Maximum Independent Set Problem.
/// It is this structure that implements a simple dynamic programming model for the
/// MISP. In that model, the state is simply a bitset where each bit represents
/// a node that may be kept or left out of the MIS.
pub struct Misp {
/// The number of variables in the problem instance
nb_vars: usize,
/// For each vertex 'i' of the original graph, the field 'neighbors[i]' contains
/// a bitmask representing the COMPLEMENT of the adjacency list of i in the
/// original graph. While this may seem a complicated take on the representation
/// of this problem instance, using the complement is helpful as it allows to
/// easily remove all the neighbors of a vertex from a state very efficiently.
neighbors: Vec<BitSet>,
/// For each vertex 'i', the value of 'weight[i]' denotes the weight associated
/// to vertex i in the problem instance. The goal of MISP is to select the nodes
/// from the underlying graph such that the resulting set is an independent set
/// where the sum of the weights of selected vertices is maximum.
weight: Vec<isize>,
}
/// A constant to mean take the node in the independent set.
const YES: isize = 1;
/// A constant to mean leave the node out of the independent set.
const NO: isize = 0;
/// The Misp class implements the 'Problem' trait. This means Misp is the definition
/// of the DP model. That DP model is pretty straightforward, still you might want
/// to check the implementation of the branching heuristic (next_variable method)
/// since it does interesting stuffs.
impl Problem for Misp {
type State = BitSet;
fn nb_variables(&self) -> usize {
self.nb_vars
}
fn initial_state(&self) -> Self::State {
(0..self.nb_variables()).collect()
}
fn initial_value(&self) -> isize {
0
}
fn transition(&self, state: &Self::State, decision: Decision) -> Self::State {
let mut res = state.clone();
res.remove(decision.variable.id());
if decision.value == YES {
// intersect with complement of the neighbors for fast set difference
res.intersect_with(&self.neighbors[decision.variable.id()]);
}
res
}
fn transition_cost(&self, _: &Self::State, decision: Decision) -> isize {
if decision.value == NO {
0
} else {
self.weight[decision.variable.id()]
}
}
fn for_each_in_domain(&self, variable: Variable, state: &Self::State, f: &mut dyn DecisionCallback) {
if state.contains(variable.id()) {
f.apply(Decision{variable, value: YES});
f.apply(Decision{variable, value: NO });
} else {
f.apply(Decision{variable, value: NO });
}
}
/// This method is (apparently) a bit more hairy. What it does is it simply decides to branch on
/// the variable that occurs in the least number of states present in the next layer. The intuition
/// here is to limit the max width as much as possible when developing the layers since all
/// nodes that are not impacted by the change on the selectd vertex are simply copied over to the
/// next layer.
fn next_variable(&self, _: usize, next_layer: &mut dyn Iterator<Item = &Self::State>) -> Option<Variable> {
// The thread local stuff is possibly one of the most surprising bits of this code. It declares
// a static variable called VAR_HEURISTIC storing the counts of each vertex in the next layer.
// The fact that it is static means that it will not be re-created (re allocated) upon each
// pass. The fact that it is declared within a thread local block, means that this static var
// will be created with a potentially mutable access for each thread.
thread_local! {
static VAR_HEURISTIC: RefCell<Vec<usize>> = RefCell::new(vec![]);
}
VAR_HEURISTIC.with(|heu| {
let mut heu = heu.borrow_mut();
let heu: &mut Vec<usize> = heu.as_mut();
// initialize
heu.reserve_exact(self.nb_variables());
if heu.is_empty() {
for _ in 0..self.nb_variables() { heu.push(0); }
} else {
heu.iter_mut().for_each(|i| *i = 0);
}
// count the occurrence of each var
for s in next_layer {
for sit in s.iter() {
heu[sit] += 1;
}
}
// take the one occurring the least often
heu.iter().copied().enumerate()
.filter(|(_, v)| *v > 0)
.min_by_key(|(_, v)| *v)
.map(|(x, _)| Variable(x))
})
}
fn is_impacted_by(&self, var: Variable, state: &Self::State) -> bool |
}
/// In addition to a dynamic programming (DP) model of the problem you want to solve,
/// the branch and bound with MDD algorithm (and thus ddo) requires that you provide
/// an additional relaxation allowing to control the maximum amount of space used by
/// the decision diagrams that are compiled.
///
/// That relaxation requires two operations: one to merge several nodes into one
/// merged node that acts as an over approximation of the other nodes. The second
/// operation is used to possibly offset some weight that would otherwise be lost
/// to the arcs entering the newly created merged node.
///
/// The role of this very simple structure is simply to provide an implementation
/// of that relaxation.
///
/// # Note:
/// In addition to the aforementioned two operations, the MispRelax structure implements
/// an optional `fast_upper_bound` method. Which one provides a useful bound to
/// prune some portions of the state-space as the decision diagrams are compiled.
/// (aka rough upper bound pruning).
pub struct MispRelax<'a>{pb: &'a Misp}
impl Relaxation for MispRelax<'_> {
type State = BitSet;
fn merge(&self, states: &mut dyn Iterator<Item = &Self::State>) -> Self::State {
let mut state = BitSet::with_capacity(self.pb.nb_variables());
for s in states {
state.union_with(s);
}
state
}
fn relax(
&self,
_source: &Self::State,
_dest: &Self::State,
_new: &Self::State,
_decision: Decision,
cost: isize,
) -> isize {
cost
}
fn fast_upper_bound(&self, state: &Self::State) -> isize {
state.iter().map(|x| self.pb.weight[x]).sum()
}
}
/// The last bit of information which we need to provide when implementing a ddo-based
/// solver is a `StateRanking`. This is an heuristic which is used to select the most
/// and least promising nodes as a means to only delete/merge the *least* promising nodes
/// when compiling restricted and relaxed DDs.
pub struct MispRanking;
impl StateRanking for MispRanking {
type State = BitSet;
fn compare(&self, a: &Self::State, b: &Self::State) -> std::cmp::Ordering {
a.len().cmp(&b.len())
.then_with(|| a.cmp(b))
}
}
// #########################################################################################
// # THE INFORMATION BEYOND THIS LINE IS NOT DIRECTLY RELATED TO THE IMPLEMENTATION OF #
// # A SOLVER BASED ON DDO. INSTEAD, THAT PORTION OF THE CODE CONTAINS GENERIC FUNCTION #
// # THAT ARE USED TO READ AN INSTANCE FROM FILE, PROCESS COMMAND LINE ARGUMENTS, AND #
// # THE MAIN FUNCTION. THESE ARE THUS NOT REQUIRED 'PER-SE', BUT I BELIEVE IT IS USEFUL #
// # TO SHOW HOW IT CAN BE DONE IN AN EXAMPLE. #
// #########################################################################################
/// This structure uses `clap-derive` annotations and define the arguments that can
/// be passed on to the executable solver.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// The path to the instance file
fname: String,
/// The number of concurrent threads
#[clap(short, long, default_value = "8")]
threads: usize,
/// The maximum amount of time you would like this solver to run
#[clap(short, long)]
duration: Option<u64>,
/// The maximum number of nodes per layer
#[clap(short, long)]
width: Option<usize>,
}
/// This enumeration simply groups the kind of errors that might occur when parsing a
/// misp instance from file. There can be io errors (file unavailable ?), format error
/// (e.g. the file is not an instance but contains the text of your next paper),
/// or parse int errors (which are actually a variant of the format error since it tells
/// you that the parser expected an integer number but got ... something else).
#[derive(Debug, thiserror::Error)]
enum Error {
/// There was an io related error
#[error("io error {0}")]
Io(#[from] std::io::Error),
/// The parser expected to read something that was an integer but got some garbage
#[error("parse int {0}")]
ParseInt(#[from] ParseIntError),
/// The file was not properly formatted.
#[error("ill formed instance")]
Format,
}
/// This function is used to read a misp instance from file. It returns either a
/// misp instance if everything went on well or an error describing the problem.
fn read_instance<P: AsRef<Path>>(fname: P) -> Result<Misp, Error> {
let f = File::open(fname)?;
let f = BufReader::new(f);
let comment = Regex::new(r"^c\s.*$").unwrap();
let pb_decl = Regex::new(r"^p\s+edge\s+(?P<vars>\d+)\s+(?P<edges>\d+)$").unwrap();
let node_decl = Regex::new(r"^n\s+(?P<node>\d+)\s+(?P<weight>-?\d+)").unwrap();
let edge_decl = Regex::new(r"^e\s+(?P<src>\d+)\s+(?P<dst>\d+)").unwrap();
let mut g = Misp{nb_vars: 0, neighbors: vec![], weight: vec![]};
for line in f.lines() {
let line = line?;
let line = line.trim();
if line.is_empty() {
continue;
}
if comment.is_match(line) {
continue;
}
if let Some(caps) = pb_decl.captures(line) {
let n = caps["vars"].to_string().parse::<usize>()?;
let full = (0..n).collect();
g.nb_vars = n;
g.neighbors = vec![full; n];
g.weight = vec![1; n];
continue;
}
if let Some(caps) = node_decl.captures(line) {
let n = caps["node"].to_string().parse::<usize>()?;
let w = caps["weight"].to_string().parse::<isize>()?;
let n = n - 1;
g.weight[n] = w;
continue;
}
if let Some(caps) = edge_decl.captures(line) {
let src = caps["src"].to_string().parse::<usize>()?;
let dst = caps["dst"].to_string().parse::<usize>()?;
let src = src-1;
let dst = dst-1;
g.neighbors[src].remove(dst);
g.neighbors[dst].remove(src);
continue;
}
// skip
return Err(Error::Format)
}
Ok(g)
}
/// An utility function to return an max width heuristic that can either be a fixed width
/// policy (if w is fixed) or an adaptive policy returning the number of unassigned variables
/// in the overall problem.
fn max_width<P: Problem>(p: &P, w: Option<usize>) -> Box<dyn WidthHeuristic<P::State> + Send + Sync> {
if let Some(w) = w {
Box::new(FixedWidth(w))
} else {
Box::new(NbUnassignedWidth(p.nb_variables()))
}
}
/// An utility function to return a cutoff heuristic that can either be a time budget policy
/// (if timeout is fixed) or no cutoff policy.
fn cutoff(timeout: Option<u64>) -> Box<dyn Cutoff + Send + Sync> {
if let Some(t) = timeout {
Box::new(TimeBudget::new(Duration::from_secs(t)))
} else {
Box::new(NoCutoff)
}
}
/// This is your executable's entry point. It is the place where all the pieces are put together
/// to create a fast an effective solver for the misp problem.
fn main() {
let args = Args::parse();
let fname = &args.fname;
let problem = read_instance(fname).unwrap();
let relaxation = MispRelax {pb: &problem};
let ranking = MispRanking;
let width = max_width(&problem, args.width);
let dominance = EmptyDominanceChecker::default();
let cutoff = cutoff(args.duration);
let mut fringe = NoDupFringe::new(MaxUB::new(&ranking));
// This solver compile DD that allow the definition of long arcs spanning over several layers.
let mut solver = ParNoBarrierSolverLel::custom(
&problem,
&relaxation,
&ranking,
width.as_ref(),
&dominance,
cutoff.as_ref(),
&mut fringe,
args.threads,
);
let start = Instant::now();
let Completion{ is_exact, best_value } = solver.maximize();
let duration = start.elapsed();
let upper_bound = solver.best_upper_bound();
let lower_bound = solver.best_lower_bound();
let gap = solver.gap();
let best_solution: Option<Vec<_>> = solver.best_solution().map(|mut decisions|{
decisions.sort_unstable_by_key(|d| d.variable.id());
decisions.iter()
.filter(|d| d.value == 1)
.map(|d| d.variable.id())
.collect()
});
// check solution
if let Some(bs) = best_solution.as_ref() {
for (i, a) in bs.iter().copied().enumerate() {
for b in bs.iter().copied().skip(i+1) {
if !problem.neighbors[a].contains(b) {
println!("not a solution ! {a} -- {b}");
}
}
}
}
println!("Duration: {:.3} seconds", duration.as_secs_f32());
println!("Objective: {}", best_value.unwrap_or(-1));
println!("Upper Bnd: {}", upper_bound);
println!("Lower Bnd: {}", lower_bound);
println!("Gap: {:.3}", gap);
println!("Aborted: {}", !is_exact);
println!("Solution: {:?}", best_solution.unwrap_or_default());
}
| {
state.contains(var.id())
} | identifier_body |
main.rs | // Copyright 2020 Xavier Gillard
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//! This example show how to implement a solver for the maximum independent set problem
//! using ddo. It is a fairly simple example but it features most of the aspects you will
//! want to copy when implementing your own solver.
use std::{cell::RefCell, path::Path, fs::File, io::{BufReader, BufRead}, num::ParseIntError, time::{Duration, Instant}};
use bit_set::BitSet;
use clap::Parser;
use ddo::*;
use regex::Regex;
#[cfg(test)]
mod tests;
/// This structure represents an instance of the Maximum Independent Set Problem.
/// It is this structure that implements a simple dynamic programming model for the
/// MISP. In that model, the state is simply a bitset where each bit represents
/// a node that may be kept or left out of the MIS.
pub struct Misp {
/// The number of variables in the problem instance
nb_vars: usize,
/// For each vertex 'i' of the original graph, the field 'neighbors[i]' contains
/// a bitmask representing the COMPLEMENT of the adjacency list of i in the
/// original graph. While this may seem a complicated take on the representation
/// of this problem instance, using the complement is helpful as it allows to
/// easily remove all the neighbors of a vertex from a state very efficiently.
neighbors: Vec<BitSet>,
/// For each vertex 'i', the value of 'weight[i]' denotes the weight associated
/// to vertex i in the problem instance. The goal of MISP is to select the nodes
/// from the underlying graph such that the resulting set is an independent set
/// where the sum of the weights of selected vertices is maximum.
weight: Vec<isize>,
}
/// A constant to mean take the node in the independent set.
const YES: isize = 1;
/// A constant to mean leave the node out of the independent set.
const NO: isize = 0;
/// The Misp class implements the 'Problem' trait. This means Misp is the definition
/// of the DP model. That DP model is pretty straightforward, still you might want
/// to check the implementation of the branching heuristic (next_variable method)
/// since it does interesting stuffs.
impl Problem for Misp {
type State = BitSet;
fn nb_variables(&self) -> usize {
self.nb_vars
}
fn initial_state(&self) -> Self::State {
(0..self.nb_variables()).collect()
}
fn initial_value(&self) -> isize {
0
}
fn transition(&self, state: &Self::State, decision: Decision) -> Self::State {
let mut res = state.clone();
res.remove(decision.variable.id());
if decision.value == YES {
// intersect with complement of the neighbors for fast set difference
res.intersect_with(&self.neighbors[decision.variable.id()]);
}
res
}
fn transition_cost(&self, _: &Self::State, decision: Decision) -> isize {
if decision.value == NO {
0
} else {
self.weight[decision.variable.id()]
}
}
fn for_each_in_domain(&self, variable: Variable, state: &Self::State, f: &mut dyn DecisionCallback) {
if state.contains(variable.id()) {
f.apply(Decision{variable, value: YES});
f.apply(Decision{variable, value: NO });
} else {
f.apply(Decision{variable, value: NO });
}
}
/// This method is (apparently) a bit more hairy. What it does is it simply decides to branch on
/// the variable that occurs in the least number of states present in the next layer. The intuition
/// here is to limit the max width as much as possible when developing the layers since all
/// nodes that are not impacted by the change on the selectd vertex are simply copied over to the
/// next layer.
fn next_variable(&self, _: usize, next_layer: &mut dyn Iterator<Item = &Self::State>) -> Option<Variable> {
// The thread local stuff is possibly one of the most surprising bits of this code. It declares
// a static variable called VAR_HEURISTIC storing the counts of each vertex in the next layer.
// The fact that it is static means that it will not be re-created (re allocated) upon each
// pass. The fact that it is declared within a thread local block, means that this static var
// will be created with a potentially mutable access for each thread.
thread_local! {
static VAR_HEURISTIC: RefCell<Vec<usize>> = RefCell::new(vec![]);
}
VAR_HEURISTIC.with(|heu| {
let mut heu = heu.borrow_mut();
let heu: &mut Vec<usize> = heu.as_mut();
// initialize
heu.reserve_exact(self.nb_variables());
if heu.is_empty() {
for _ in 0..self.nb_variables() { heu.push(0); }
} else {
heu.iter_mut().for_each(|i| *i = 0);
}
// count the occurrence of each var
for s in next_layer {
for sit in s.iter() {
heu[sit] += 1;
}
}
// take the one occurring the least often
heu.iter().copied().enumerate()
.filter(|(_, v)| *v > 0)
.min_by_key(|(_, v)| *v)
.map(|(x, _)| Variable(x))
})
}
fn is_impacted_by(&self, var: Variable, state: &Self::State) -> bool {
state.contains(var.id())
}
}
/// In addition to a dynamic programming (DP) model of the problem you want to solve,
/// the branch and bound with MDD algorithm (and thus ddo) requires that you provide
/// an additional relaxation allowing to control the maximum amount of space used by
/// the decision diagrams that are compiled.
///
/// That relaxation requires two operations: one to merge several nodes into one
/// merged node that acts as an over approximation of the other nodes. The second
/// operation is used to possibly offset some weight that would otherwise be lost
/// to the arcs entering the newly created merged node.
///
/// The role of this very simple structure is simply to provide an implementation
/// of that relaxation.
///
/// # Note:
/// In addition to the aforementioned two operations, the MispRelax structure implements
/// an optional `fast_upper_bound` method. Which one provides a useful bound to
/// prune some portions of the state-space as the decision diagrams are compiled.
/// (aka rough upper bound pruning).
pub struct MispRelax<'a>{pb: &'a Misp}
impl Relaxation for MispRelax<'_> {
type State = BitSet;
fn merge(&self, states: &mut dyn Iterator<Item = &Self::State>) -> Self::State {
let mut state = BitSet::with_capacity(self.pb.nb_variables());
for s in states {
state.union_with(s);
}
state
}
fn relax(
&self,
_source: &Self::State,
_dest: &Self::State,
_new: &Self::State,
_decision: Decision,
cost: isize,
) -> isize {
cost
}
fn fast_upper_bound(&self, state: &Self::State) -> isize {
state.iter().map(|x| self.pb.weight[x]).sum()
}
}
/// The last bit of information which we need to provide when implementing a ddo-based
/// solver is a `StateRanking`. This is an heuristic which is used to select the most
/// and least promising nodes as a means to only delete/merge the *least* promising nodes
/// when compiling restricted and relaxed DDs.
pub struct MispRanking;
impl StateRanking for MispRanking {
type State = BitSet;
fn compare(&self, a: &Self::State, b: &Self::State) -> std::cmp::Ordering {
a.len().cmp(&b.len())
.then_with(|| a.cmp(b))
}
}
// #########################################################################################
// # THE INFORMATION BEYOND THIS LINE IS NOT DIRECTLY RELATED TO THE IMPLEMENTATION OF #
// # A SOLVER BASED ON DDO. INSTEAD, THAT PORTION OF THE CODE CONTAINS GENERIC FUNCTION #
// # THAT ARE USED TO READ AN INSTANCE FROM FILE, PROCESS COMMAND LINE ARGUMENTS, AND #
// # THE MAIN FUNCTION. THESE ARE THUS NOT REQUIRED 'PER-SE', BUT I BELIEVE IT IS USEFUL #
// # TO SHOW HOW IT CAN BE DONE IN AN EXAMPLE. #
// #########################################################################################
/// This structure uses `clap-derive` annotations and define the arguments that can
/// be passed on to the executable solver.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// The path to the instance file
fname: String,
/// The number of concurrent threads
#[clap(short, long, default_value = "8")]
threads: usize,
/// The maximum amount of time you would like this solver to run
#[clap(short, long)]
duration: Option<u64>,
/// The maximum number of nodes per layer
#[clap(short, long)]
width: Option<usize>,
}
/// This enumeration simply groups the kind of errors that might occur when parsing a
/// misp instance from file. There can be io errors (file unavailable ?), format error
/// (e.g. the file is not an instance but contains the text of your next paper),
/// or parse int errors (which are actually a variant of the format error since it tells
/// you that the parser expected an integer number but got ... something else).
#[derive(Debug, thiserror::Error)]
enum Error {
/// There was an io related error
#[error("io error {0}")]
Io(#[from] std::io::Error),
/// The parser expected to read something that was an integer but got some garbage
#[error("parse int {0}")]
ParseInt(#[from] ParseIntError),
/// The file was not properly formatted.
#[error("ill formed instance")]
Format,
}
/// This function is used to read a misp instance from file. It returns either a
/// misp instance if everything went on well or an error describing the problem.
fn read_instance<P: AsRef<Path>>(fname: P) -> Result<Misp, Error> {
let f = File::open(fname)?;
let f = BufReader::new(f);
let comment = Regex::new(r"^c\s.*$").unwrap();
let pb_decl = Regex::new(r"^p\s+edge\s+(?P<vars>\d+)\s+(?P<edges>\d+)$").unwrap();
let node_decl = Regex::new(r"^n\s+(?P<node>\d+)\s+(?P<weight>-?\d+)").unwrap();
let edge_decl = Regex::new(r"^e\s+(?P<src>\d+)\s+(?P<dst>\d+)").unwrap();
let mut g = Misp{nb_vars: 0, neighbors: vec![], weight: vec![]};
for line in f.lines() {
let line = line?;
let line = line.trim();
if line.is_empty() |
if comment.is_match(line) {
continue;
}
if let Some(caps) = pb_decl.captures(line) {
let n = caps["vars"].to_string().parse::<usize>()?;
let full = (0..n).collect();
g.nb_vars = n;
g.neighbors = vec![full; n];
g.weight = vec![1; n];
continue;
}
if let Some(caps) = node_decl.captures(line) {
let n = caps["node"].to_string().parse::<usize>()?;
let w = caps["weight"].to_string().parse::<isize>()?;
let n = n - 1;
g.weight[n] = w;
continue;
}
if let Some(caps) = edge_decl.captures(line) {
let src = caps["src"].to_string().parse::<usize>()?;
let dst = caps["dst"].to_string().parse::<usize>()?;
let src = src-1;
let dst = dst-1;
g.neighbors[src].remove(dst);
g.neighbors[dst].remove(src);
continue;
}
// skip
return Err(Error::Format)
}
Ok(g)
}
/// An utility function to return an max width heuristic that can either be a fixed width
/// policy (if w is fixed) or an adaptive policy returning the number of unassigned variables
/// in the overall problem.
fn max_width<P: Problem>(p: &P, w: Option<usize>) -> Box<dyn WidthHeuristic<P::State> + Send + Sync> {
if let Some(w) = w {
Box::new(FixedWidth(w))
} else {
Box::new(NbUnassignedWidth(p.nb_variables()))
}
}
/// An utility function to return a cutoff heuristic that can either be a time budget policy
/// (if timeout is fixed) or no cutoff policy.
fn cutoff(timeout: Option<u64>) -> Box<dyn Cutoff + Send + Sync> {
if let Some(t) = timeout {
Box::new(TimeBudget::new(Duration::from_secs(t)))
} else {
Box::new(NoCutoff)
}
}
/// This is your executable's entry point. It is the place where all the pieces are put together
/// to create a fast an effective solver for the misp problem.
fn main() {
let args = Args::parse();
let fname = &args.fname;
let problem = read_instance(fname).unwrap();
let relaxation = MispRelax {pb: &problem};
let ranking = MispRanking;
let width = max_width(&problem, args.width);
let dominance = EmptyDominanceChecker::default();
let cutoff = cutoff(args.duration);
let mut fringe = NoDupFringe::new(MaxUB::new(&ranking));
// This solver compile DD that allow the definition of long arcs spanning over several layers.
let mut solver = ParNoBarrierSolverLel::custom(
&problem,
&relaxation,
&ranking,
width.as_ref(),
&dominance,
cutoff.as_ref(),
&mut fringe,
args.threads,
);
let start = Instant::now();
let Completion{ is_exact, best_value } = solver.maximize();
let duration = start.elapsed();
let upper_bound = solver.best_upper_bound();
let lower_bound = solver.best_lower_bound();
let gap = solver.gap();
let best_solution: Option<Vec<_>> = solver.best_solution().map(|mut decisions|{
decisions.sort_unstable_by_key(|d| d.variable.id());
decisions.iter()
.filter(|d| d.value == 1)
.map(|d| d.variable.id())
.collect()
});
// check solution
if let Some(bs) = best_solution.as_ref() {
for (i, a) in bs.iter().copied().enumerate() {
for b in bs.iter().copied().skip(i+1) {
if !problem.neighbors[a].contains(b) {
println!("not a solution ! {a} -- {b}");
}
}
}
}
println!("Duration: {:.3} seconds", duration.as_secs_f32());
println!("Objective: {}", best_value.unwrap_or(-1));
println!("Upper Bnd: {}", upper_bound);
println!("Lower Bnd: {}", lower_bound);
println!("Gap: {:.3}", gap);
println!("Aborted: {}", !is_exact);
println!("Solution: {:?}", best_solution.unwrap_or_default());
}
| {
continue;
} | conditional_block |
main.rs | // Copyright 2020 Xavier Gillard
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//! This example show how to implement a solver for the maximum independent set problem
//! using ddo. It is a fairly simple example but it features most of the aspects you will
//! want to copy when implementing your own solver.
use std::{cell::RefCell, path::Path, fs::File, io::{BufReader, BufRead}, num::ParseIntError, time::{Duration, Instant}};
use bit_set::BitSet;
use clap::Parser;
use ddo::*;
use regex::Regex;
#[cfg(test)]
mod tests;
/// This structure represents an instance of the Maximum Independent Set Problem.
/// It is this structure that implements a simple dynamic programming model for the
/// MISP. In that model, the state is simply a bitset where each bit represents
/// a node that may be kept or left out of the MIS.
pub struct Misp {
/// The number of variables in the problem instance
nb_vars: usize,
/// For each vertex 'i' of the original graph, the field 'neighbors[i]' contains
/// a bitmask representing the COMPLEMENT of the adjacency list of i in the
/// original graph. While this may seem a complicated take on the representation
/// of this problem instance, using the complement is helpful as it allows to
/// easily remove all the neighbors of a vertex from a state very efficiently.
neighbors: Vec<BitSet>,
/// For each vertex 'i', the value of 'weight[i]' denotes the weight associated
/// to vertex i in the problem instance. The goal of MISP is to select the nodes
/// from the underlying graph such that the resulting set is an independent set
/// where the sum of the weights of selected vertices is maximum.
weight: Vec<isize>,
}
/// A constant to mean take the node in the independent set.
const YES: isize = 1;
/// A constant to mean leave the node out of the independent set.
const NO: isize = 0;
/// The Misp class implements the 'Problem' trait. This means Misp is the definition
/// of the DP model. That DP model is pretty straightforward, still you might want
/// to check the implementation of the branching heuristic (next_variable method)
/// since it does interesting stuffs.
impl Problem for Misp {
type State = BitSet;
|
fn initial_state(&self) -> Self::State {
(0..self.nb_variables()).collect()
}
fn initial_value(&self) -> isize {
0
}
fn transition(&self, state: &Self::State, decision: Decision) -> Self::State {
let mut res = state.clone();
res.remove(decision.variable.id());
if decision.value == YES {
// intersect with complement of the neighbors for fast set difference
res.intersect_with(&self.neighbors[decision.variable.id()]);
}
res
}
fn transition_cost(&self, _: &Self::State, decision: Decision) -> isize {
if decision.value == NO {
0
} else {
self.weight[decision.variable.id()]
}
}
fn for_each_in_domain(&self, variable: Variable, state: &Self::State, f: &mut dyn DecisionCallback) {
if state.contains(variable.id()) {
f.apply(Decision{variable, value: YES});
f.apply(Decision{variable, value: NO });
} else {
f.apply(Decision{variable, value: NO });
}
}
/// This method is (apparently) a bit more hairy. What it does is it simply decides to branch on
/// the variable that occurs in the least number of states present in the next layer. The intuition
/// here is to limit the max width as much as possible when developing the layers since all
/// nodes that are not impacted by the change on the selectd vertex are simply copied over to the
/// next layer.
fn next_variable(&self, _: usize, next_layer: &mut dyn Iterator<Item = &Self::State>) -> Option<Variable> {
// The thread local stuff is possibly one of the most surprising bits of this code. It declares
// a static variable called VAR_HEURISTIC storing the counts of each vertex in the next layer.
// The fact that it is static means that it will not be re-created (re allocated) upon each
// pass. The fact that it is declared within a thread local block, means that this static var
// will be created with a potentially mutable access for each thread.
thread_local! {
static VAR_HEURISTIC: RefCell<Vec<usize>> = RefCell::new(vec![]);
}
VAR_HEURISTIC.with(|heu| {
let mut heu = heu.borrow_mut();
let heu: &mut Vec<usize> = heu.as_mut();
// initialize
heu.reserve_exact(self.nb_variables());
if heu.is_empty() {
for _ in 0..self.nb_variables() { heu.push(0); }
} else {
heu.iter_mut().for_each(|i| *i = 0);
}
// count the occurrence of each var
for s in next_layer {
for sit in s.iter() {
heu[sit] += 1;
}
}
// take the one occurring the least often
heu.iter().copied().enumerate()
.filter(|(_, v)| *v > 0)
.min_by_key(|(_, v)| *v)
.map(|(x, _)| Variable(x))
})
}
fn is_impacted_by(&self, var: Variable, state: &Self::State) -> bool {
state.contains(var.id())
}
}
/// In addition to a dynamic programming (DP) model of the problem you want to solve,
/// the branch and bound with MDD algorithm (and thus ddo) requires that you provide
/// an additional relaxation allowing to control the maximum amount of space used by
/// the decision diagrams that are compiled.
///
/// That relaxation requires two operations: one to merge several nodes into one
/// merged node that acts as an over approximation of the other nodes. The second
/// operation is used to possibly offset some weight that would otherwise be lost
/// to the arcs entering the newly created merged node.
///
/// The role of this very simple structure is simply to provide an implementation
/// of that relaxation.
///
/// # Note:
/// In addition to the aforementioned two operations, the MispRelax structure implements
/// an optional `fast_upper_bound` method. Which one provides a useful bound to
/// prune some portions of the state-space as the decision diagrams are compiled.
/// (aka rough upper bound pruning).
pub struct MispRelax<'a>{pb: &'a Misp}
impl Relaxation for MispRelax<'_> {
type State = BitSet;
fn merge(&self, states: &mut dyn Iterator<Item = &Self::State>) -> Self::State {
let mut state = BitSet::with_capacity(self.pb.nb_variables());
for s in states {
state.union_with(s);
}
state
}
fn relax(
&self,
_source: &Self::State,
_dest: &Self::State,
_new: &Self::State,
_decision: Decision,
cost: isize,
) -> isize {
cost
}
fn fast_upper_bound(&self, state: &Self::State) -> isize {
state.iter().map(|x| self.pb.weight[x]).sum()
}
}
/// The last bit of information which we need to provide when implementing a ddo-based
/// solver is a `StateRanking`. This is an heuristic which is used to select the most
/// and least promising nodes as a means to only delete/merge the *least* promising nodes
/// when compiling restricted and relaxed DDs.
pub struct MispRanking;
impl StateRanking for MispRanking {
type State = BitSet;
fn compare(&self, a: &Self::State, b: &Self::State) -> std::cmp::Ordering {
a.len().cmp(&b.len())
.then_with(|| a.cmp(b))
}
}
// #########################################################################################
// # THE INFORMATION BEYOND THIS LINE IS NOT DIRECTLY RELATED TO THE IMPLEMENTATION OF #
// # A SOLVER BASED ON DDO. INSTEAD, THAT PORTION OF THE CODE CONTAINS GENERIC FUNCTION #
// # THAT ARE USED TO READ AN INSTANCE FROM FILE, PROCESS COMMAND LINE ARGUMENTS, AND #
// # THE MAIN FUNCTION. THESE ARE THUS NOT REQUIRED 'PER-SE', BUT I BELIEVE IT IS USEFUL #
// # TO SHOW HOW IT CAN BE DONE IN AN EXAMPLE. #
// #########################################################################################
/// This structure uses `clap-derive` annotations and define the arguments that can
/// be passed on to the executable solver.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// The path to the instance file
fname: String,
/// The number of concurrent threads
#[clap(short, long, default_value = "8")]
threads: usize,
/// The maximum amount of time you would like this solver to run
#[clap(short, long)]
duration: Option<u64>,
/// The maximum number of nodes per layer
#[clap(short, long)]
width: Option<usize>,
}
/// This enumeration simply groups the kind of errors that might occur when parsing a
/// misp instance from file. There can be io errors (file unavailable ?), format error
/// (e.g. the file is not an instance but contains the text of your next paper),
/// or parse int errors (which are actually a variant of the format error since it tells
/// you that the parser expected an integer number but got ... something else).
#[derive(Debug, thiserror::Error)]
enum Error {
/// There was an io related error
#[error("io error {0}")]
Io(#[from] std::io::Error),
/// The parser expected to read something that was an integer but got some garbage
#[error("parse int {0}")]
ParseInt(#[from] ParseIntError),
/// The file was not properly formatted.
#[error("ill formed instance")]
Format,
}
/// This function is used to read a misp instance from file. It returns either a
/// misp instance if everything went on well or an error describing the problem.
fn read_instance<P: AsRef<Path>>(fname: P) -> Result<Misp, Error> {
let f = File::open(fname)?;
let f = BufReader::new(f);
let comment = Regex::new(r"^c\s.*$").unwrap();
let pb_decl = Regex::new(r"^p\s+edge\s+(?P<vars>\d+)\s+(?P<edges>\d+)$").unwrap();
let node_decl = Regex::new(r"^n\s+(?P<node>\d+)\s+(?P<weight>-?\d+)").unwrap();
let edge_decl = Regex::new(r"^e\s+(?P<src>\d+)\s+(?P<dst>\d+)").unwrap();
let mut g = Misp{nb_vars: 0, neighbors: vec![], weight: vec![]};
for line in f.lines() {
let line = line?;
let line = line.trim();
if line.is_empty() {
continue;
}
if comment.is_match(line) {
continue;
}
if let Some(caps) = pb_decl.captures(line) {
let n = caps["vars"].to_string().parse::<usize>()?;
let full = (0..n).collect();
g.nb_vars = n;
g.neighbors = vec![full; n];
g.weight = vec![1; n];
continue;
}
if let Some(caps) = node_decl.captures(line) {
let n = caps["node"].to_string().parse::<usize>()?;
let w = caps["weight"].to_string().parse::<isize>()?;
let n = n - 1;
g.weight[n] = w;
continue;
}
if let Some(caps) = edge_decl.captures(line) {
let src = caps["src"].to_string().parse::<usize>()?;
let dst = caps["dst"].to_string().parse::<usize>()?;
let src = src-1;
let dst = dst-1;
g.neighbors[src].remove(dst);
g.neighbors[dst].remove(src);
continue;
}
// skip
return Err(Error::Format)
}
Ok(g)
}
/// An utility function to return an max width heuristic that can either be a fixed width
/// policy (if w is fixed) or an adaptive policy returning the number of unassigned variables
/// in the overall problem.
fn max_width<P: Problem>(p: &P, w: Option<usize>) -> Box<dyn WidthHeuristic<P::State> + Send + Sync> {
if let Some(w) = w {
Box::new(FixedWidth(w))
} else {
Box::new(NbUnassignedWidth(p.nb_variables()))
}
}
/// An utility function to return a cutoff heuristic that can either be a time budget policy
/// (if timeout is fixed) or no cutoff policy.
fn cutoff(timeout: Option<u64>) -> Box<dyn Cutoff + Send + Sync> {
if let Some(t) = timeout {
Box::new(TimeBudget::new(Duration::from_secs(t)))
} else {
Box::new(NoCutoff)
}
}
/// This is your executable's entry point. It is the place where all the pieces are put together
/// to create a fast an effective solver for the misp problem.
fn main() {
let args = Args::parse();
let fname = &args.fname;
let problem = read_instance(fname).unwrap();
let relaxation = MispRelax {pb: &problem};
let ranking = MispRanking;
let width = max_width(&problem, args.width);
let dominance = EmptyDominanceChecker::default();
let cutoff = cutoff(args.duration);
let mut fringe = NoDupFringe::new(MaxUB::new(&ranking));
// This solver compile DD that allow the definition of long arcs spanning over several layers.
let mut solver = ParNoBarrierSolverLel::custom(
&problem,
&relaxation,
&ranking,
width.as_ref(),
&dominance,
cutoff.as_ref(),
&mut fringe,
args.threads,
);
let start = Instant::now();
let Completion{ is_exact, best_value } = solver.maximize();
let duration = start.elapsed();
let upper_bound = solver.best_upper_bound();
let lower_bound = solver.best_lower_bound();
let gap = solver.gap();
let best_solution: Option<Vec<_>> = solver.best_solution().map(|mut decisions|{
decisions.sort_unstable_by_key(|d| d.variable.id());
decisions.iter()
.filter(|d| d.value == 1)
.map(|d| d.variable.id())
.collect()
});
// check solution
if let Some(bs) = best_solution.as_ref() {
for (i, a) in bs.iter().copied().enumerate() {
for b in bs.iter().copied().skip(i+1) {
if !problem.neighbors[a].contains(b) {
println!("not a solution ! {a} -- {b}");
}
}
}
}
println!("Duration: {:.3} seconds", duration.as_secs_f32());
println!("Objective: {}", best_value.unwrap_or(-1));
println!("Upper Bnd: {}", upper_bound);
println!("Lower Bnd: {}", lower_bound);
println!("Gap: {:.3}", gap);
println!("Aborted: {}", !is_exact);
println!("Solution: {:?}", best_solution.unwrap_or_default());
} | fn nb_variables(&self) -> usize {
self.nb_vars
} | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.